summaryrefslogtreecommitdiff
path: root/vp9/encoder/vp9_rdopt.c
diff options
context:
space:
mode:
Diffstat (limited to 'vp9/encoder/vp9_rdopt.c')
-rw-r--r--vp9/encoder/vp9_rdopt.c36
1 files changed, 30 insertions, 6 deletions
diff --git a/vp9/encoder/vp9_rdopt.c b/vp9/encoder/vp9_rdopt.c
index 2a6b70703..b8d17205d 100644
--- a/vp9/encoder/vp9_rdopt.c
+++ b/vp9/encoder/vp9_rdopt.c
@@ -1349,11 +1349,25 @@ static int64_t encode_inter_mb_segment(VP9_COMP *cpi,
const InterpKernel *kernel = vp9_filter_kernels[mi->mbmi.interp_filter];
for (ref = 0; ref < 1 + is_compound; ++ref) {
- const uint8_t *pre = &pd->pre[ref].buf[vp9_raster_block_offset(BLOCK_8X8, i,
- pd->pre[ref].stride)];
+ const int bw = b_width_log2_lookup[BLOCK_8X8];
+ const int h = 4 * (i >> bw);
+ const int w = 4 * (i & ((1 << bw) - 1));
+ const struct scale_factors *sf = &xd->block_refs[ref]->sf;
+ int y_stride = pd->pre[ref].stride;
+ uint8_t *pre = pd->pre[ref].buf + (h * pd->pre[ref].stride + w);
+
+ if (vp9_is_scaled(sf)) {
+ const int x_start = (-xd->mb_to_left_edge >> (3 + pd->subsampling_x));
+ const int y_start = (-xd->mb_to_top_edge >> (3 + pd->subsampling_y));
+
+ y_stride = xd->block_refs[ref]->buf->y_stride;
+ pre = xd->block_refs[ref]->buf->y_buffer;
+ pre += scaled_buffer_offset(x_start + w, y_start + h,
+ y_stride, sf);
+ }
#if CONFIG_VP9_HIGHBITDEPTH
if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
- vp9_highbd_build_inter_predictor(pre, pd->pre[ref].stride,
+ vp9_highbd_build_inter_predictor(pre, y_stride,
dst, pd->dst.stride,
&mi->bmi[i].as_mv[ref].as_mv,
&xd->block_refs[ref]->sf, width, height,
@@ -1361,7 +1375,7 @@ static int64_t encode_inter_mb_segment(VP9_COMP *cpi,
mi_col * MI_SIZE + 4 * (i % 2),
mi_row * MI_SIZE + 4 * (i / 2), xd->bd);
} else {
- vp9_build_inter_predictor(pre, pd->pre[ref].stride,
+ vp9_build_inter_predictor(pre, y_stride,
dst, pd->dst.stride,
&mi->bmi[i].as_mv[ref].as_mv,
&xd->block_refs[ref]->sf, width, height, ref,
@@ -1370,7 +1384,7 @@ static int64_t encode_inter_mb_segment(VP9_COMP *cpi,
mi_row * MI_SIZE + 4 * (i / 2));
}
#else
- vp9_build_inter_predictor(pre, pd->pre[ref].stride,
+ vp9_build_inter_predictor(pre, y_stride,
dst, pd->dst.stride,
&mi->bmi[i].as_mv[ref].as_mv,
&xd->block_refs[ref]->sf, width, height, ref,
@@ -3021,7 +3035,7 @@ void vp9_rd_pick_inter_mode_sb(VP9_COMP *cpi,
for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
if (!(cpi->ref_frame_flags & flag_list[ref_frame])) {
// Skip checking missing references in both single and compound reference
- // modes. Note that a mode will be skipped iff both reference frames
+ // modes. Note that a mode will be skipped if both reference frames
// are masked out.
ref_frame_skip_mask[0] |= (1 << ref_frame);
ref_frame_skip_mask[1] |= SECOND_REF_FRAME_MASK;
@@ -3804,6 +3818,16 @@ void vp9_rd_pick_inter_mode_sub8x8(VP9_COMP *cpi,
ref_frame = vp9_ref_order[ref_index].ref_frame[0];
second_ref_frame = vp9_ref_order[ref_index].ref_frame[1];
+#if CONFIG_BETTER_HW_COMPATIBILITY
+ // forbid 8X4 and 4X8 partitions if any reference frame is scaled.
+ if (bsize == BLOCK_8X4 || bsize == BLOCK_4X8) {
+ int ref_scaled = vp9_is_scaled(&cm->frame_refs[ref_frame - 1].sf);
+ if (second_ref_frame > INTRA_FRAME)
+ ref_scaled += vp9_is_scaled(&cm->frame_refs[second_ref_frame - 1].sf);
+ if (ref_scaled)
+ continue;
+ }
+#endif
// Look at the reference frame of the best mode so far and set the
// skip mask to look at a subset of the remaining modes.
if (ref_index > 2 && sf->mode_skip_start < MAX_MODES) {