diff options
author | Jingning Han <jingning@google.com> | 2018-05-23 12:13:50 -0700 |
---|---|---|
committer | Jingning Han <jingning@google.com> | 2018-06-14 20:31:26 -0700 |
commit | dda0611008b59bd7065613ccca81a85b07e138c5 (patch) | |
tree | a127401c5a1f741a88ad7865d274fd628f32acc9 | |
parent | d94f1c84ccef8b9ac9765aa337865ec8c01fbcce (diff) | |
download | libvpx-dda0611008b59bd7065613ccca81a85b07e138c5.tar libvpx-dda0611008b59bd7065613ccca81a85b07e138c5.tar.gz libvpx-dda0611008b59bd7065613ccca81a85b07e138c5.tar.bz2 libvpx-dda0611008b59bd7065613ccca81a85b07e138c5.zip |
Prepare motion estimation process for temporal dependency model
Set up needed stack for the motion estimation process to build up
the temporal dependency model.
Change-Id: I3436302c916a686e8c82572ffc106bf8023404b6
-rw-r--r-- | vp9/encoder/vp9_encoder.c | 36 |
1 files changed, 36 insertions, 0 deletions
diff --git a/vp9/encoder/vp9_encoder.c b/vp9/encoder/vp9_encoder.c index 6abd320fc..2b1f2237f 100644 --- a/vp9/encoder/vp9_encoder.c +++ b/vp9/encoder/vp9_encoder.c @@ -5334,8 +5334,25 @@ void mc_flow_dispenser(VP9_COMP *cpi, GF_PICTURE *gf_picture, int frame_idx) { YV12_BUFFER_CONFIG *this_frame = gf_picture[frame_idx].frame; YV12_BUFFER_CONFIG *ref_frame[3] = { NULL, NULL, NULL }; + VP9_COMMON *cm = &cpi->common; struct scale_factors sf; int rdmult, idx; + ThreadData *td = &cpi->td; + MACROBLOCK *x = &td->mb; + MACROBLOCKD *xd = &x->e_mbd; + int mi_row, mi_col; + +#if CONFIG_VP9_HIGHBITDEPTH + DECLARE_ALIGNED(16, uint16_t, predictor16[16 * 16 * 3]); + DECLARE_ALIGNED(16, uint8_t, predictor8[16 * 16 * 3]); + uint8_t *predictor; + (void)predictor; + (void)predictor16; + (void)predictor8; +#else + DECLARE_ALIGNED(16, uint8_t, predictor[16 * 16 * 3]); + (void)predictor; +#endif // Setup scaling factor #if CONFIG_VP9_HIGHBITDEPTH @@ -5362,6 +5379,25 @@ void mc_flow_dispenser(VP9_COMP *cpi, GF_PICTURE *gf_picture, int frame_idx) { set_error_per_bit(&cpi->td.mb, rdmult); vp9_initialize_me_consts(cpi, &cpi->td.mb, ARNR_FILT_QINDEX); + for (mi_row = 0; mi_row < cm->mi_rows; ++mi_row) { + // Motion estimation row boundary + x->mv_limits.row_min = -((mi_row * MI_SIZE) + (17 - 2 * VP9_INTERP_EXTEND)); + x->mv_limits.row_max = + (cm->mi_rows - 1 - mi_row) * MI_SIZE + (17 - 2 * VP9_INTERP_EXTEND); + for (mi_col = 0; mi_col < cm->mi_cols; ++mi_col) { + int mb_y_offset = + mi_row * MI_SIZE * this_frame->y_stride + mi_col * MI_SIZE; + + (void)mb_y_offset; + // Motion estimation column boundary + x->mv_limits.col_min = + -((mi_col * MI_SIZE) + (17 - 2 * VP9_INTERP_EXTEND)); + x->mv_limits.col_max = + ((cm->mi_cols - 1 - mi_col) * MI_SIZE) + (17 - 2 * VP9_INTERP_EXTEND); + } + } + + (void)xd; (void)tpl_frame; (void)this_frame; (void)ref_frame; |