diff --git a/src/intra.c b/src/intra.c index 6898bae5..270bb201 100644 --- a/src/intra.c +++ b/src/intra.c @@ -246,7 +246,7 @@ void kvz_intra_predict( } else { // Angular modes use smoothed reference pixels, unless the mode is close // to being either vertical or horizontal. - static const int kvz_intra_hor_ver_dist_thres[10] = {20, 20, 20, 14, 2, 0, 20, 0, 0, 0 }; + static const int kvz_intra_hor_ver_dist_thres[10] = {24, 24, 24, 14, 2, 0, 20, 0, 0, 0 }; int filter_threshold = kvz_intra_hor_ver_dist_thres[kvz_math_floor_log2(width)]; int dist_from_vert_or_hor = MIN(abs(mode - 50), abs(mode - 18)); if (dist_from_vert_or_hor > filter_threshold) { diff --git a/src/strategies/generic/intra-generic.c b/src/strategies/generic/intra-generic.c index 67c087a8..21cb9cf1 100644 --- a/src/strategies/generic/intra-generic.c +++ b/src/strategies/generic/intra-generic.c @@ -175,13 +175,10 @@ static void kvz_angular_pred_generic( p[0] = ref_main[ref_main_index - 1]; p[1] = ref_main[ref_main_index]; p[2] = ref_main[ref_main_index + 1]; - p[3] = f[3] != 0 ? ref_main[ref_main_index + 2] : 0; - if (use_cubic) { - dst[y * width + x] = CLIP_TO_PIXEL(((int32_t)(f[0] * p[0]) + (int32_t)(f[1] * p[1]) + (int32_t)(f[2] * p[2]) + (int32_t)(f[3] * p[3]) + 32) >> 6); - } - else { - dst[y * width + x] = ((int32_t)(f[0]*p[0]) + (int32_t)(f[1]*p[1]) + (int32_t)(f[2]*p[2]) + (int32_t)(f[3]*p[3]) + 32) >> 6; - } + p[3] = ref_main[ref_main_index + 2]; + + dst[y * width + x] = CLIP_TO_PIXEL(((int32_t)(f[0] * p[0]) + (int32_t)(f[1] * p[1]) + (int32_t)(f[2] * p[2]) + (int32_t)(f[3] * p[3]) + 32) >> 6); + } } else { @@ -190,7 +187,7 @@ static void kvz_angular_pred_generic( for (int_fast32_t x = 0; x < width; ++x) { kvz_pixel ref1 = ref_main[x + delta_int]; kvz_pixel ref2 = ref_main[x + delta_int + 1]; - dst[y * width + x] = ((32 - delta_fract) * ref1 + delta_fract * ref2 + 16) >> 5; + dst[y * width + x] = ref1 + ((delta_fract * (ref2-ref1) + 16) >> 5); } } }