From 147b125b1596df9bd0c8141b9d09229ab65d3e0f Mon Sep 17 00:00:00 2001 From: John Koleszar Date: Thu, 16 Sep 2010 10:00:04 -0400 Subject: Reduce size of tokenizer tables This patch reduces the size of the global tables maintained by the tokenizer to 16k from 80k-96k. See issue #177. Change-Id: If0275d5f28389af11ac83c5d929d1157cde90fbe --- vp8/encoder/tokenize.c | 6 +++--- vp8/encoder/tokenize.h | 8 +++++++- 2 files changed, 10 insertions(+), 4 deletions(-) (limited to 'vp8') diff --git a/vp8/encoder/tokenize.c b/vp8/encoder/tokenize.c index d9b8d36fd..0e86f28df 100644 --- a/vp8/encoder/tokenize.c +++ b/vp8/encoder/tokenize.c @@ -26,8 +26,8 @@ _int64 context_counters[BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [vp8_coef void vp8_stuff_mb(VP8_COMP *cpi, MACROBLOCKD *x, TOKENEXTRA **t) ; void vp8_fix_contexts(MACROBLOCKD *x); -TOKENEXTRA vp8_dct_value_tokens[DCT_MAX_VALUE*2]; -const TOKENEXTRA *vp8_dct_value_tokens_ptr; +TOKENVALUE vp8_dct_value_tokens[DCT_MAX_VALUE*2]; +const TOKENVALUE *vp8_dct_value_tokens_ptr; int vp8_dct_value_cost[DCT_MAX_VALUE*2]; const int *vp8_dct_value_cost_ptr; #if 0 @@ -37,7 +37,7 @@ int skip_false_count = 0; static void fill_value_tokens() { - TOKENEXTRA *const t = vp8_dct_value_tokens + DCT_MAX_VALUE; + TOKENVALUE *const t = vp8_dct_value_tokens + DCT_MAX_VALUE; vp8_extra_bit_struct *const e = vp8_extra_bits; int i = -DCT_MAX_VALUE; diff --git a/vp8/encoder/tokenize.h b/vp8/encoder/tokenize.h index 7b9fc9eaa..01e8ec6d7 100644 --- a/vp8/encoder/tokenize.h +++ b/vp8/encoder/tokenize.h @@ -17,6 +17,12 @@ void vp8_tokenize_initialize(); +typedef struct +{ + short Token; + short Extra; +} TOKENVALUE; + typedef struct { int Token; @@ -40,6 +46,6 @@ extern const int *vp8_dct_value_cost_ptr; * improve cache locality, since it's needed for costing when the rest of the * fields are not. */ -extern const TOKENEXTRA *vp8_dct_value_tokens_ptr; +extern const TOKENVALUE *vp8_dct_value_tokens_ptr; #endif /* tokenize_h */ -- cgit v1.2.3