summaryrefslogtreecommitdiff
path: root/vpx_mem/memory_manager/include
diff options
context:
space:
mode:
authorJohn Koleszar <jkoleszar@google.com>2012-07-13 15:21:29 -0700
committerJohn Koleszar <jkoleszar@google.com>2012-07-17 11:46:03 -0700
commitc6b9039fd94aede59ac1086a379955137fc8e1b8 (patch)
treef9b20b2ca2114fe9303c8226bb3b368568fd5509 /vpx_mem/memory_manager/include
parent8697c6e454e02c6cf644daa9d29fabd07e846f18 (diff)
downloadlibvpx-c6b9039fd94aede59ac1086a379955137fc8e1b8.tar
libvpx-c6b9039fd94aede59ac1086a379955137fc8e1b8.tar.gz
libvpx-c6b9039fd94aede59ac1086a379955137fc8e1b8.tar.bz2
libvpx-c6b9039fd94aede59ac1086a379955137fc8e1b8.zip
Restyle code
Approximate the Google style guide[1] so that that there's a written document to follow and tools to check compliance[2]. [1]: http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml [2]: http://google-styleguide.googlecode.com/svn/trunk/cpplint/cpplint.py Change-Id: Idf40e3d8dddcc72150f6af127b13e5dab838685f
Diffstat (limited to 'vpx_mem/memory_manager/include')
-rw-r--r--vpx_mem/memory_manager/include/cavl_if.h57
-rw-r--r--vpx_mem/memory_manager/include/cavl_impl.h1579
-rw-r--r--vpx_mem/memory_manager/include/heapmm.h77
-rw-r--r--vpx_mem/memory_manager/include/hmm_cnfg.h10
-rw-r--r--vpx_mem/memory_manager/include/hmm_intrnl.h64
5 files changed, 831 insertions, 956 deletions
diff --git a/vpx_mem/memory_manager/include/cavl_if.h b/vpx_mem/memory_manager/include/cavl_if.h
index 1b2c9b738..ec6e525b7 100644
--- a/vpx_mem/memory_manager/include/cavl_if.h
+++ b/vpx_mem/memory_manager/include/cavl_if.h
@@ -32,13 +32,12 @@
#ifndef AVL_SEARCH_TYPE_DEFINED_
#define AVL_SEARCH_TYPE_DEFINED_
-typedef enum
-{
- AVL_EQUAL = 1,
- AVL_LESS = 2,
- AVL_GREATER = 4,
- AVL_LESS_EQUAL = AVL_EQUAL | AVL_LESS,
- AVL_GREATER_EQUAL = AVL_EQUAL | AVL_GREATER
+typedef enum {
+ AVL_EQUAL = 1,
+ AVL_LESS = 2,
+ AVL_GREATER = 4,
+ AVL_LESS_EQUAL = AVL_EQUAL | AVL_LESS,
+ AVL_GREATER_EQUAL = AVL_EQUAL | AVL_GREATER
}
avl_search_type;
@@ -75,15 +74,14 @@ avl_search_type;
#endif
-typedef struct
-{
+typedef struct {
#ifdef AVL_INSIDE_STRUCT
- AVL_INSIDE_STRUCT
+ AVL_INSIDE_STRUCT
#endif
- AVL_HANDLE root;
+ AVL_HANDLE root;
}
L_(avl);
@@ -108,7 +106,7 @@ L_SC AVL_HANDLE L_(subst)(L_(avl) *tree, AVL_HANDLE new_node);
#ifdef AVL_BUILD_ITER_TYPE
L_SC int L_(build)(
- L_(avl) *tree, AVL_BUILD_ITER_TYPE p, L_SIZE num_nodes);
+ L_(avl) *tree, AVL_BUILD_ITER_TYPE p, L_SIZE num_nodes);
#endif
@@ -153,7 +151,7 @@ L_SC int L_(build)(
/* Maximum depth may be more than number of bits in a long. */
#define L_BIT_ARR_DEFN(NAME) \
- unsigned long NAME[((AVL_MAX_DEPTH) + L_LONG_BIT - 1) / L_LONG_BIT];
+ unsigned long NAME[((AVL_MAX_DEPTH) + L_LONG_BIT - 1) / L_LONG_BIT];
#else
@@ -164,29 +162,28 @@ L_SC int L_(build)(
#endif
/* Iterator structure. */
-typedef struct
-{
- /* Tree being iterated over. */
- L_(avl) *tree_;
-
- /* Records a path into the tree. If bit n is true, indicates
- ** take greater branch from the nth node in the path, otherwise
- ** take the less branch. bit 0 gives branch from root, and
- ** so on. */
- L_BIT_ARR_DEFN(branch)
-
- /* Zero-based depth of path into tree. */
- unsigned depth;
-
- /* Handles of nodes in path from root to current node (returned by *). */
- AVL_HANDLE path_h[(AVL_MAX_DEPTH) - 1];
+typedef struct {
+ /* Tree being iterated over. */
+ L_(avl) *tree_;
+
+ /* Records a path into the tree. If bit n is true, indicates
+ ** take greater branch from the nth node in the path, otherwise
+ ** take the less branch. bit 0 gives branch from root, and
+ ** so on. */
+ L_BIT_ARR_DEFN(branch)
+
+ /* Zero-based depth of path into tree. */
+ unsigned depth;
+
+ /* Handles of nodes in path from root to current node (returned by *). */
+ AVL_HANDLE path_h[(AVL_MAX_DEPTH) - 1];
}
L_(iter);
/* Iterator function prototypes. */
L_SC void L_(start_iter)(
- L_(avl) *tree, L_(iter) *iter, AVL_KEY k, avl_search_type st);
+ L_(avl) *tree, L_(iter) *iter, AVL_KEY k, avl_search_type st);
L_SC void L_(start_iter_least)(L_(avl) *tree, L_(iter) *iter);
diff --git a/vpx_mem/memory_manager/include/cavl_impl.h b/vpx_mem/memory_manager/include/cavl_impl.h
index 5e165dd4d..cf7deb7fb 100644
--- a/vpx_mem/memory_manager/include/cavl_impl.h
+++ b/vpx_mem/memory_manager/include/cavl_impl.h
@@ -110,16 +110,16 @@
#define L_BIT_ARR_DEFN(NAME) unsigned long NAME[L_BIT_ARR_LONGS];
#define L_BIT_ARR_VAL(BIT_ARR, BIT_NUM) \
- ((BIT_ARR)[(BIT_NUM) / L_LONG_BIT] & (1L << ((BIT_NUM) % L_LONG_BIT)))
+ ((BIT_ARR)[(BIT_NUM) / L_LONG_BIT] & (1L << ((BIT_NUM) % L_LONG_BIT)))
#define L_BIT_ARR_0(BIT_ARR, BIT_NUM) \
- (BIT_ARR)[(BIT_NUM) / L_LONG_BIT] &= ~(1L << ((BIT_NUM) % L_LONG_BIT));
+ (BIT_ARR)[(BIT_NUM) / L_LONG_BIT] &= ~(1L << ((BIT_NUM) % L_LONG_BIT));
#define L_BIT_ARR_1(BIT_ARR, BIT_NUM) \
- (BIT_ARR)[(BIT_NUM) / L_LONG_BIT] |= 1L << ((BIT_NUM) % L_LONG_BIT);
+ (BIT_ARR)[(BIT_NUM) / L_LONG_BIT] |= 1L << ((BIT_NUM) % L_LONG_BIT);
#define L_BIT_ARR_ALL(BIT_ARR, BIT_VAL) \
- { int i = L_BIT_ARR_LONGS; do (BIT_ARR)[--i] = 0L - (BIT_VAL); while(i); }
+ { int i = L_BIT_ARR_LONGS; do (BIT_ARR)[--i] = 0L - (BIT_VAL); while(i); }
#else /* The bit array can definitely fit in one long */
@@ -138,7 +138,7 @@
#ifdef AVL_READ_ERRORS_HAPPEN
#define L_CHECK_READ_ERROR(ERROR_RETURN) \
- { if (AVL_READ_ERROR) return(ERROR_RETURN); }
+ { if (AVL_READ_ERROR) return(ERROR_RETURN); }
#else
@@ -179,18 +179,16 @@
#if (L_IMPL_MASK & AVL_IMPL_INIT)
-L_SC void L_(init)(L_(avl) *l_tree)
-{
- l_tree->root = AVL_NULL;
+L_SC void L_(init)(L_(avl) *l_tree) {
+ l_tree->root = AVL_NULL;
}
#endif
#if (L_IMPL_MASK & AVL_IMPL_IS_EMPTY)
-L_SC int L_(is_empty)(L_(avl) *l_tree)
-{
- return(l_tree->root == AVL_NULL);
+L_SC int L_(is_empty)(L_(avl) *l_tree) {
+ return(l_tree->root == AVL_NULL);
}
#endif
@@ -201,358 +199,305 @@ L_SC int L_(is_empty)(L_(avl) *l_tree)
/* Balances subtree, returns handle of root node of subtree after balancing.
*/
-L_SC AVL_HANDLE L_(balance)(L_BALANCE_PARAM_DECL_PREFIX AVL_HANDLE bal_h)
-{
- AVL_HANDLE deep_h;
+L_SC AVL_HANDLE L_(balance)(L_BALANCE_PARAM_DECL_PREFIX AVL_HANDLE bal_h) {
+ AVL_HANDLE deep_h;
- /* Either the "greater than" or the "less than" subtree of
- ** this node has to be 2 levels deeper (or else it wouldn't
- ** need balancing).
- */
- if (AVL_GET_BALANCE_FACTOR(bal_h) > 0)
- {
- /* "Greater than" subtree is deeper. */
+ /* Either the "greater than" or the "less than" subtree of
+ ** this node has to be 2 levels deeper (or else it wouldn't
+ ** need balancing).
+ */
+ if (AVL_GET_BALANCE_FACTOR(bal_h) > 0) {
+ /* "Greater than" subtree is deeper. */
- deep_h = AVL_GET_GREATER(bal_h, 1);
+ deep_h = AVL_GET_GREATER(bal_h, 1);
- L_CHECK_READ_ERROR(AVL_NULL)
+ L_CHECK_READ_ERROR(AVL_NULL)
- if (AVL_GET_BALANCE_FACTOR(deep_h) < 0)
- {
- int bf;
-
- AVL_HANDLE old_h = bal_h;
- bal_h = AVL_GET_LESS(deep_h, 1);
- L_CHECK_READ_ERROR(AVL_NULL)
- AVL_SET_GREATER(old_h, AVL_GET_LESS(bal_h, 1))
- AVL_SET_LESS(deep_h, AVL_GET_GREATER(bal_h, 1))
- AVL_SET_LESS(bal_h, old_h)
- AVL_SET_GREATER(bal_h, deep_h)
-
- bf = AVL_GET_BALANCE_FACTOR(bal_h);
-
- if (bf != 0)
- {
- if (bf > 0)
- {
- AVL_SET_BALANCE_FACTOR(old_h, -1)
- AVL_SET_BALANCE_FACTOR(deep_h, 0)
- }
- else
- {
- AVL_SET_BALANCE_FACTOR(deep_h, 1)
- AVL_SET_BALANCE_FACTOR(old_h, 0)
- }
-
- AVL_SET_BALANCE_FACTOR(bal_h, 0)
- }
- else
- {
- AVL_SET_BALANCE_FACTOR(old_h, 0)
- AVL_SET_BALANCE_FACTOR(deep_h, 0)
- }
- }
- else
- {
- AVL_SET_GREATER(bal_h, AVL_GET_LESS(deep_h, 0))
- AVL_SET_LESS(deep_h, bal_h)
-
- if (AVL_GET_BALANCE_FACTOR(deep_h) == 0)
- {
- AVL_SET_BALANCE_FACTOR(deep_h, -1)
- AVL_SET_BALANCE_FACTOR(bal_h, 1)
- }
- else
- {
- AVL_SET_BALANCE_FACTOR(deep_h, 0)
- AVL_SET_BALANCE_FACTOR(bal_h, 0)
- }
-
- bal_h = deep_h;
+ if (AVL_GET_BALANCE_FACTOR(deep_h) < 0) {
+ int bf;
+
+ AVL_HANDLE old_h = bal_h;
+ bal_h = AVL_GET_LESS(deep_h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ AVL_SET_GREATER(old_h, AVL_GET_LESS(bal_h, 1))
+ AVL_SET_LESS(deep_h, AVL_GET_GREATER(bal_h, 1))
+ AVL_SET_LESS(bal_h, old_h)
+ AVL_SET_GREATER(bal_h, deep_h)
+
+ bf = AVL_GET_BALANCE_FACTOR(bal_h);
+
+ if (bf != 0) {
+ if (bf > 0) {
+ AVL_SET_BALANCE_FACTOR(old_h, -1)
+ AVL_SET_BALANCE_FACTOR(deep_h, 0)
+ } else {
+ AVL_SET_BALANCE_FACTOR(deep_h, 1)
+ AVL_SET_BALANCE_FACTOR(old_h, 0)
}
+
+ AVL_SET_BALANCE_FACTOR(bal_h, 0)
+ } else {
+ AVL_SET_BALANCE_FACTOR(old_h, 0)
+ AVL_SET_BALANCE_FACTOR(deep_h, 0)
+ }
+ } else {
+ AVL_SET_GREATER(bal_h, AVL_GET_LESS(deep_h, 0))
+ AVL_SET_LESS(deep_h, bal_h)
+
+ if (AVL_GET_BALANCE_FACTOR(deep_h) == 0) {
+ AVL_SET_BALANCE_FACTOR(deep_h, -1)
+ AVL_SET_BALANCE_FACTOR(bal_h, 1)
+ } else {
+ AVL_SET_BALANCE_FACTOR(deep_h, 0)
+ AVL_SET_BALANCE_FACTOR(bal_h, 0)
+ }
+
+ bal_h = deep_h;
}
- else
- {
- /* "Less than" subtree is deeper. */
+ } else {
+ /* "Less than" subtree is deeper. */
- deep_h = AVL_GET_LESS(bal_h, 1);
- L_CHECK_READ_ERROR(AVL_NULL)
+ deep_h = AVL_GET_LESS(bal_h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
- if (AVL_GET_BALANCE_FACTOR(deep_h) > 0)
- {
- int bf;
- AVL_HANDLE old_h = bal_h;
- bal_h = AVL_GET_GREATER(deep_h, 1);
- L_CHECK_READ_ERROR(AVL_NULL)
- AVL_SET_LESS(old_h, AVL_GET_GREATER(bal_h, 0))
- AVL_SET_GREATER(deep_h, AVL_GET_LESS(bal_h, 0))
- AVL_SET_GREATER(bal_h, old_h)
- AVL_SET_LESS(bal_h, deep_h)
-
- bf = AVL_GET_BALANCE_FACTOR(bal_h);
-
- if (bf != 0)
- {
- if (bf < 0)
- {
- AVL_SET_BALANCE_FACTOR(old_h, 1)
- AVL_SET_BALANCE_FACTOR(deep_h, 0)
- }
- else
- {
- AVL_SET_BALANCE_FACTOR(deep_h, -1)
- AVL_SET_BALANCE_FACTOR(old_h, 0)
- }
-
- AVL_SET_BALANCE_FACTOR(bal_h, 0)
- }
- else
- {
- AVL_SET_BALANCE_FACTOR(old_h, 0)
- AVL_SET_BALANCE_FACTOR(deep_h, 0)
- }
- }
- else
- {
- AVL_SET_LESS(bal_h, AVL_GET_GREATER(deep_h, 0))
- AVL_SET_GREATER(deep_h, bal_h)
-
- if (AVL_GET_BALANCE_FACTOR(deep_h) == 0)
- {
- AVL_SET_BALANCE_FACTOR(deep_h, 1)
- AVL_SET_BALANCE_FACTOR(bal_h, -1)
- }
- else
- {
- AVL_SET_BALANCE_FACTOR(deep_h, 0)
- AVL_SET_BALANCE_FACTOR(bal_h, 0)
- }
-
- bal_h = deep_h;
+ if (AVL_GET_BALANCE_FACTOR(deep_h) > 0) {
+ int bf;
+ AVL_HANDLE old_h = bal_h;
+ bal_h = AVL_GET_GREATER(deep_h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ AVL_SET_LESS(old_h, AVL_GET_GREATER(bal_h, 0))
+ AVL_SET_GREATER(deep_h, AVL_GET_LESS(bal_h, 0))
+ AVL_SET_GREATER(bal_h, old_h)
+ AVL_SET_LESS(bal_h, deep_h)
+
+ bf = AVL_GET_BALANCE_FACTOR(bal_h);
+
+ if (bf != 0) {
+ if (bf < 0) {
+ AVL_SET_BALANCE_FACTOR(old_h, 1)
+ AVL_SET_BALANCE_FACTOR(deep_h, 0)
+ } else {
+ AVL_SET_BALANCE_FACTOR(deep_h, -1)
+ AVL_SET_BALANCE_FACTOR(old_h, 0)
}
+
+ AVL_SET_BALANCE_FACTOR(bal_h, 0)
+ } else {
+ AVL_SET_BALANCE_FACTOR(old_h, 0)
+ AVL_SET_BALANCE_FACTOR(deep_h, 0)
+ }
+ } else {
+ AVL_SET_LESS(bal_h, AVL_GET_GREATER(deep_h, 0))
+ AVL_SET_GREATER(deep_h, bal_h)
+
+ if (AVL_GET_BALANCE_FACTOR(deep_h) == 0) {
+ AVL_SET_BALANCE_FACTOR(deep_h, 1)
+ AVL_SET_BALANCE_FACTOR(bal_h, -1)
+ } else {
+ AVL_SET_BALANCE_FACTOR(deep_h, 0)
+ AVL_SET_BALANCE_FACTOR(bal_h, 0)
+ }
+
+ bal_h = deep_h;
}
+ }
- return(bal_h);
+ return(bal_h);
}
-L_SC AVL_HANDLE L_(insert)(L_(avl) *l_tree, AVL_HANDLE h)
-{
- AVL_SET_LESS(h, AVL_NULL)
- AVL_SET_GREATER(h, AVL_NULL)
- AVL_SET_BALANCE_FACTOR(h, 0)
+L_SC AVL_HANDLE L_(insert)(L_(avl) *l_tree, AVL_HANDLE h) {
+ AVL_SET_LESS(h, AVL_NULL)
+ AVL_SET_GREATER(h, AVL_NULL)
+ AVL_SET_BALANCE_FACTOR(h, 0)
- if (l_tree->root == AVL_NULL)
- l_tree->root = h;
- else
- {
- /* Last unbalanced node encountered in search for insertion point. */
- AVL_HANDLE unbal = AVL_NULL;
- /* Parent of last unbalanced node. */
- AVL_HANDLE parent_unbal = AVL_NULL;
- /* Balance factor of last unbalanced node. */
- int unbal_bf;
-
- /* Zero-based depth in tree. */
- unsigned depth = 0, unbal_depth = 0;
-
- /* Records a path into the tree. If bit n is true, indicates
- ** take greater branch from the nth node in the path, otherwise
- ** take the less branch. bit 0 gives branch from root, and
- ** so on. */
- L_BIT_ARR_DEFN(branch)
-
- AVL_HANDLE hh = l_tree->root;
- AVL_HANDLE parent = AVL_NULL;
- int cmp;
-
- do
- {
- if (AVL_GET_BALANCE_FACTOR(hh) != 0)
- {
- unbal = hh;
- parent_unbal = parent;
- unbal_depth = depth;
- }
-
- cmp = AVL_COMPARE_NODE_NODE(h, hh);
-
- if (cmp == 0)
- /* Duplicate key. */
- return(hh);
-
- parent = hh;
-
- if (cmp > 0)
- {
- hh = AVL_GET_GREATER(hh, 1);
- L_BIT_ARR_1(branch, depth)
- }
- else
- {
- hh = AVL_GET_LESS(hh, 1);
- L_BIT_ARR_0(branch, depth)
- }
-
- L_CHECK_READ_ERROR(AVL_NULL)
- depth++;
- }
- while (hh != AVL_NULL);
+ if (l_tree->root == AVL_NULL)
+ l_tree->root = h;
+ else {
+ /* Last unbalanced node encountered in search for insertion point. */
+ AVL_HANDLE unbal = AVL_NULL;
+ /* Parent of last unbalanced node. */
+ AVL_HANDLE parent_unbal = AVL_NULL;
+ /* Balance factor of last unbalanced node. */
+ int unbal_bf;
- /* Add node to insert as leaf of tree. */
- if (cmp < 0)
- AVL_SET_LESS(parent, h)
- else
- AVL_SET_GREATER(parent, h)
+ /* Zero-based depth in tree. */
+ unsigned depth = 0, unbal_depth = 0;
- depth = unbal_depth;
+ /* Records a path into the tree. If bit n is true, indicates
+ ** take greater branch from the nth node in the path, otherwise
+ ** take the less branch. bit 0 gives branch from root, and
+ ** so on. */
+ L_BIT_ARR_DEFN(branch)
- if (unbal == AVL_NULL)
- hh = l_tree->root;
- else
- {
- cmp = L_BIT_ARR_VAL(branch, depth) ? 1 : -1;
- depth++;
- unbal_bf = AVL_GET_BALANCE_FACTOR(unbal);
-
- if (cmp < 0)
- unbal_bf--;
- else /* cmp > 0 */
- unbal_bf++;
-
- hh = cmp < 0 ? AVL_GET_LESS(unbal, 1) : AVL_GET_GREATER(unbal, 1);
- L_CHECK_READ_ERROR(AVL_NULL)
-
- if ((unbal_bf != -2) && (unbal_bf != 2))
- {
- /* No rebalancing of tree is necessary. */
- AVL_SET_BALANCE_FACTOR(unbal, unbal_bf)
- unbal = AVL_NULL;
- }
- }
+ AVL_HANDLE hh = l_tree->root;
+ AVL_HANDLE parent = AVL_NULL;
+ int cmp;
- if (hh != AVL_NULL)
- while (h != hh)
- {
- cmp = L_BIT_ARR_VAL(branch, depth) ? 1 : -1;
- depth++;
-
- if (cmp < 0)
- {
- AVL_SET_BALANCE_FACTOR(hh, -1)
- hh = AVL_GET_LESS(hh, 1);
- }
- else /* cmp > 0 */
- {
- AVL_SET_BALANCE_FACTOR(hh, 1)
- hh = AVL_GET_GREATER(hh, 1);
- }
-
- L_CHECK_READ_ERROR(AVL_NULL)
- }
-
- if (unbal != AVL_NULL)
- {
- unbal = L_(balance)(L_BALANCE_PARAM_CALL_PREFIX unbal);
- L_CHECK_READ_ERROR(AVL_NULL)
-
- if (parent_unbal == AVL_NULL)
- l_tree->root = unbal;
- else
- {
- depth = unbal_depth - 1;
- cmp = L_BIT_ARR_VAL(branch, depth) ? 1 : -1;
-
- if (cmp < 0)
- AVL_SET_LESS(parent_unbal, unbal)
- else /* cmp > 0 */
- AVL_SET_GREATER(parent_unbal, unbal)
- }
- }
+ do {
+ if (AVL_GET_BALANCE_FACTOR(hh) != 0) {
+ unbal = hh;
+ parent_unbal = parent;
+ unbal_depth = depth;
+ }
- }
+ cmp = AVL_COMPARE_NODE_NODE(h, hh);
- return(h);
-}
+ if (cmp == 0)
+ /* Duplicate key. */
+ return(hh);
-#endif
+ parent = hh;
-#if (L_IMPL_MASK & AVL_IMPL_SEARCH)
+ if (cmp > 0) {
+ hh = AVL_GET_GREATER(hh, 1);
+ L_BIT_ARR_1(branch, depth)
+ } else {
+ hh = AVL_GET_LESS(hh, 1);
+ L_BIT_ARR_0(branch, depth)
+ }
+
+ L_CHECK_READ_ERROR(AVL_NULL)
+ depth++;
+ } while (hh != AVL_NULL);
+
+ /* Add node to insert as leaf of tree. */
+ if (cmp < 0)
+ AVL_SET_LESS(parent, h)
+ else
+ AVL_SET_GREATER(parent, h)
+
+ depth = unbal_depth;
+
+ if (unbal == AVL_NULL)
+ hh = l_tree->root;
+ else {
+ cmp = L_BIT_ARR_VAL(branch, depth) ? 1 : -1;
+ depth++;
+ unbal_bf = AVL_GET_BALANCE_FACTOR(unbal);
+
+ if (cmp < 0)
+ unbal_bf--;
+ else /* cmp > 0 */
+ unbal_bf++;
+
+ hh = cmp < 0 ? AVL_GET_LESS(unbal, 1) : AVL_GET_GREATER(unbal, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+
+ if ((unbal_bf != -2) && (unbal_bf != 2)) {
+ /* No rebalancing of tree is necessary. */
+ AVL_SET_BALANCE_FACTOR(unbal, unbal_bf)
+ unbal = AVL_NULL;
+ }
+ }
-L_SC AVL_HANDLE L_(search)(L_(avl) *l_tree, AVL_KEY k, avl_search_type st)
-{
- int cmp, target_cmp;
- AVL_HANDLE match_h = AVL_NULL;
- AVL_HANDLE h = l_tree->root;
+ if (hh != AVL_NULL)
+ while (h != hh) {
+ cmp = L_BIT_ARR_VAL(branch, depth) ? 1 : -1;
+ depth++;
- if (st & AVL_LESS)
- target_cmp = 1;
- else if (st & AVL_GREATER)
- target_cmp = -1;
- else
- target_cmp = 0;
+ if (cmp < 0) {
+ AVL_SET_BALANCE_FACTOR(hh, -1)
+ hh = AVL_GET_LESS(hh, 1);
+ } else { /* cmp > 0 */
+ AVL_SET_BALANCE_FACTOR(hh, 1)
+ hh = AVL_GET_GREATER(hh, 1);
+ }
- while (h != AVL_NULL)
- {
- cmp = AVL_COMPARE_KEY_NODE(k, h);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ }
- if (cmp == 0)
- {
- if (st & AVL_EQUAL)
- {
- match_h = h;
- break;
- }
+ if (unbal != AVL_NULL) {
+ unbal = L_(balance)(L_BALANCE_PARAM_CALL_PREFIX unbal);
+ L_CHECK_READ_ERROR(AVL_NULL)
- cmp = -target_cmp;
- }
- else if (target_cmp != 0)
- if (!((cmp ^ target_cmp) & L_MASK_HIGH_BIT))
- /* cmp and target_cmp are both positive or both negative. */
- match_h = h;
+ if (parent_unbal == AVL_NULL)
+ l_tree->root = unbal;
+ else {
+ depth = unbal_depth - 1;
+ cmp = L_BIT_ARR_VAL(branch, depth) ? 1 : -1;
- h = cmp < 0 ? AVL_GET_LESS(h, 1) : AVL_GET_GREATER(h, 1);
- L_CHECK_READ_ERROR(AVL_NULL)
+ if (cmp < 0)
+ AVL_SET_LESS(parent_unbal, unbal)
+ else /* cmp > 0 */
+ AVL_SET_GREATER(parent_unbal, unbal)
+ }
}
- return(match_h);
+ }
+
+ return(h);
+}
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_SEARCH)
+
+L_SC AVL_HANDLE L_(search)(L_(avl) *l_tree, AVL_KEY k, avl_search_type st) {
+ int cmp, target_cmp;
+ AVL_HANDLE match_h = AVL_NULL;
+ AVL_HANDLE h = l_tree->root;
+
+ if (st & AVL_LESS)
+ target_cmp = 1;
+ else if (st & AVL_GREATER)
+ target_cmp = -1;
+ else
+ target_cmp = 0;
+
+ while (h != AVL_NULL) {
+ cmp = AVL_COMPARE_KEY_NODE(k, h);
+
+ if (cmp == 0) {
+ if (st & AVL_EQUAL) {
+ match_h = h;
+ break;
+ }
+
+ cmp = -target_cmp;
+ } else if (target_cmp != 0)
+ if (!((cmp ^ target_cmp) & L_MASK_HIGH_BIT))
+ /* cmp and target_cmp are both positive or both negative. */
+ match_h = h;
+
+ h = cmp < 0 ? AVL_GET_LESS(h, 1) : AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ }
+
+ return(match_h);
}
#endif
#if (L_IMPL_MASK & AVL_IMPL_SEARCH_LEAST)
-L_SC AVL_HANDLE L_(search_least)(L_(avl) *l_tree)
-{
- AVL_HANDLE h = l_tree->root;
- AVL_HANDLE parent = AVL_NULL;
+L_SC AVL_HANDLE L_(search_least)(L_(avl) *l_tree) {
+ AVL_HANDLE h = l_tree->root;
+ AVL_HANDLE parent = AVL_NULL;
- while (h != AVL_NULL)
- {
- parent = h;
- h = AVL_GET_LESS(h, 1);
- L_CHECK_READ_ERROR(AVL_NULL)
- }
+ while (h != AVL_NULL) {
+ parent = h;
+ h = AVL_GET_LESS(h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ }
- return(parent);
+ return(parent);
}
#endif
#if (L_IMPL_MASK & AVL_IMPL_SEARCH_GREATEST)
-L_SC AVL_HANDLE L_(search_greatest)(L_(avl) *l_tree)
-{
- AVL_HANDLE h = l_tree->root;
- AVL_HANDLE parent = AVL_NULL;
+L_SC AVL_HANDLE L_(search_greatest)(L_(avl) *l_tree) {
+ AVL_HANDLE h = l_tree->root;
+ AVL_HANDLE parent = AVL_NULL;
- while (h != AVL_NULL)
- {
- parent = h;
- h = AVL_GET_GREATER(h, 1);
- L_CHECK_READ_ERROR(AVL_NULL)
- }
+ while (h != AVL_NULL) {
+ parent = h;
+ h = AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ }
- return(parent);
+ return(parent);
}
#endif
@@ -564,284 +509,253 @@ L_SC AVL_HANDLE L_(search_greatest)(L_(avl) *l_tree)
*/
L_SC AVL_HANDLE L_(balance)(L_BALANCE_PARAM_DECL_PREFIX AVL_HANDLE bal_h);
-L_SC AVL_HANDLE L_(remove)(L_(avl) *l_tree, AVL_KEY k)
-{
- /* Zero-based depth in tree. */
- unsigned depth = 0, rm_depth;
-
- /* Records a path into the tree. If bit n is true, indicates
- ** take greater branch from the nth node in the path, otherwise
- ** take the less branch. bit 0 gives branch from root, and
- ** so on. */
- L_BIT_ARR_DEFN(branch)
-
- AVL_HANDLE h = l_tree->root;
- AVL_HANDLE parent = AVL_NULL;
- AVL_HANDLE child;
- AVL_HANDLE path;
- int cmp, cmp_shortened_sub_with_path;
- int reduced_depth;
- int bf;
- AVL_HANDLE rm;
- AVL_HANDLE parent_rm;
-
- for (; ;)
- {
- if (h == AVL_NULL)
- /* No node in tree with given key. */
- return(AVL_NULL);
-
- cmp = AVL_COMPARE_KEY_NODE(k, h);
+L_SC AVL_HANDLE L_(remove)(L_(avl) *l_tree, AVL_KEY k) {
+ /* Zero-based depth in tree. */
+ unsigned depth = 0, rm_depth;
+
+ /* Records a path into the tree. If bit n is true, indicates
+ ** take greater branch from the nth node in the path, otherwise
+ ** take the less branch. bit 0 gives branch from root, and
+ ** so on. */
+ L_BIT_ARR_DEFN(branch)
+
+ AVL_HANDLE h = l_tree->root;
+ AVL_HANDLE parent = AVL_NULL;
+ AVL_HANDLE child;
+ AVL_HANDLE path;
+ int cmp, cmp_shortened_sub_with_path;
+ int reduced_depth;
+ int bf;
+ AVL_HANDLE rm;
+ AVL_HANDLE parent_rm;
+
+ for (;;) {
+ if (h == AVL_NULL)
+ /* No node in tree with given key. */
+ return(AVL_NULL);
- if (cmp == 0)
- /* Found node to remove. */
- break;
+ cmp = AVL_COMPARE_KEY_NODE(k, h);
- parent = h;
+ if (cmp == 0)
+ /* Found node to remove. */
+ break;
- if (cmp > 0)
- {
- h = AVL_GET_GREATER(h, 1);
- L_BIT_ARR_1(branch, depth)
- }
- else
- {
- h = AVL_GET_LESS(h, 1);
- L_BIT_ARR_0(branch, depth)
- }
+ parent = h;
- L_CHECK_READ_ERROR(AVL_NULL)
- depth++;
- cmp_shortened_sub_with_path = cmp;
+ if (cmp > 0) {
+ h = AVL_GET_GREATER(h, 1);
+ L_BIT_ARR_1(branch, depth)
+ } else {
+ h = AVL_GET_LESS(h, 1);
+ L_BIT_ARR_0(branch, depth)
}
- rm = h;
- parent_rm = parent;
- rm_depth = depth;
-
- /* If the node to remove is not a leaf node, we need to get a
- ** leaf node, or a node with a single leaf as its child, to put
- ** in the place of the node to remove. We will get the greatest
- ** node in the less subtree (of the node to remove), or the least
- ** node in the greater subtree. We take the leaf node from the
- ** deeper subtree, if there is one. */
-
- if (AVL_GET_BALANCE_FACTOR(h) < 0)
- {
+ L_CHECK_READ_ERROR(AVL_NULL)
+ depth++;
+ cmp_shortened_sub_with_path = cmp;
+ }
+
+ rm = h;
+ parent_rm = parent;
+ rm_depth = depth;
+
+ /* If the node to remove is not a leaf node, we need to get a
+ ** leaf node, or a node with a single leaf as its child, to put
+ ** in the place of the node to remove. We will get the greatest
+ ** node in the less subtree (of the node to remove), or the least
+ ** node in the greater subtree. We take the leaf node from the
+ ** deeper subtree, if there is one. */
+
+ if (AVL_GET_BALANCE_FACTOR(h) < 0) {
+ child = AVL_GET_LESS(h, 1);
+ L_BIT_ARR_0(branch, depth)
+ cmp = -1;
+ } else {
+ child = AVL_GET_GREATER(h, 1);
+ L_BIT_ARR_1(branch, depth)
+ cmp = 1;
+ }
+
+ L_CHECK_READ_ERROR(AVL_NULL)
+ depth++;
+
+ if (child != AVL_NULL) {
+ cmp = -cmp;
+
+ do {
+ parent = h;
+ h = child;
+
+ if (cmp < 0) {
child = AVL_GET_LESS(h, 1);
L_BIT_ARR_0(branch, depth)
- cmp = -1;
- }
- else
- {
+ } else {
child = AVL_GET_GREATER(h, 1);
L_BIT_ARR_1(branch, depth)
- cmp = 1;
- }
+ }
- L_CHECK_READ_ERROR(AVL_NULL)
- depth++;
+ L_CHECK_READ_ERROR(AVL_NULL)
+ depth++;
+ } while (child != AVL_NULL);
- if (child != AVL_NULL)
- {
- cmp = -cmp;
-
- do
- {
- parent = h;
- h = child;
-
- if (cmp < 0)
- {
- child = AVL_GET_LESS(h, 1);
- L_BIT_ARR_0(branch, depth)
- }
- else
- {
- child = AVL_GET_GREATER(h, 1);
- L_BIT_ARR_1(branch, depth)
- }
-
- L_CHECK_READ_ERROR(AVL_NULL)
- depth++;
- }
- while (child != AVL_NULL);
+ if (parent == rm)
+ /* Only went through do loop once. Deleted node will be replaced
+ ** in the tree structure by one of its immediate children. */
+ cmp_shortened_sub_with_path = -cmp;
+ else
+ cmp_shortened_sub_with_path = cmp;
+
+ /* Get the handle of the opposite child, which may not be null. */
+ child = cmp > 0 ? AVL_GET_LESS(h, 0) : AVL_GET_GREATER(h, 0);
+ }
- if (parent == rm)
- /* Only went through do loop once. Deleted node will be replaced
- ** in the tree structure by one of its immediate children. */
- cmp_shortened_sub_with_path = -cmp;
+ if (parent == AVL_NULL)
+ /* There were only 1 or 2 nodes in this tree. */
+ l_tree->root = child;
+ else if (cmp_shortened_sub_with_path < 0)
+ AVL_SET_LESS(parent, child)
+ else
+ AVL_SET_GREATER(parent, child)
+
+ /* "path" is the parent of the subtree being eliminated or reduced
+ ** from a depth of 2 to 1. If "path" is the node to be removed, we
+ ** set path to the node we're about to poke into the position of the
+ ** node to be removed. */
+ path = parent == rm ? h : parent;
+
+ if (h != rm) {
+ /* Poke in the replacement for the node to be removed. */
+ AVL_SET_LESS(h, AVL_GET_LESS(rm, 0))
+ AVL_SET_GREATER(h, AVL_GET_GREATER(rm, 0))
+ AVL_SET_BALANCE_FACTOR(h, AVL_GET_BALANCE_FACTOR(rm))
+
+ if (parent_rm == AVL_NULL)
+ l_tree->root = h;
+ else {
+ depth = rm_depth - 1;
+
+ if (L_BIT_ARR_VAL(branch, depth))
+ AVL_SET_GREATER(parent_rm, h)
else
- cmp_shortened_sub_with_path = cmp;
+ AVL_SET_LESS(parent_rm, h)
+ }
+ }
- /* Get the handle of the opposite child, which may not be null. */
- child = cmp > 0 ? AVL_GET_LESS(h, 0) : AVL_GET_GREATER(h, 0);
- }
+ if (path != AVL_NULL) {
+ /* Create a temporary linked list from the parent of the path node
+ ** to the root node. */
+ h = l_tree->root;
+ parent = AVL_NULL;
+ depth = 0;
- if (parent == AVL_NULL)
- /* There were only 1 or 2 nodes in this tree. */
- l_tree->root = child;
- else if (cmp_shortened_sub_with_path < 0)
- AVL_SET_LESS(parent, child)
- else
- AVL_SET_GREATER(parent, child)
-
- /* "path" is the parent of the subtree being eliminated or reduced
- ** from a depth of 2 to 1. If "path" is the node to be removed, we
- ** set path to the node we're about to poke into the position of the
- ** node to be removed. */
- path = parent == rm ? h : parent;
-
- if (h != rm)
- {
- /* Poke in the replacement for the node to be removed. */
- AVL_SET_LESS(h, AVL_GET_LESS(rm, 0))
- AVL_SET_GREATER(h, AVL_GET_GREATER(rm, 0))
- AVL_SET_BALANCE_FACTOR(h, AVL_GET_BALANCE_FACTOR(rm))
-
- if (parent_rm == AVL_NULL)
- l_tree->root = h;
- else
- {
- depth = rm_depth - 1;
-
- if (L_BIT_ARR_VAL(branch, depth))
- AVL_SET_GREATER(parent_rm, h)
- else
- AVL_SET_LESS(parent_rm, h)
- }
+ while (h != path) {
+ if (L_BIT_ARR_VAL(branch, depth)) {
+ child = AVL_GET_GREATER(h, 1);
+ AVL_SET_GREATER(h, parent)
+ } else {
+ child = AVL_GET_LESS(h, 1);
+ AVL_SET_LESS(h, parent)
+ }
+
+ L_CHECK_READ_ERROR(AVL_NULL)
+ depth++;
+ parent = h;
+ h = child;
}
- if (path != AVL_NULL)
- {
- /* Create a temporary linked list from the parent of the path node
- ** to the root node. */
- h = l_tree->root;
- parent = AVL_NULL;
- depth = 0;
-
- while (h != path)
- {
- if (L_BIT_ARR_VAL(branch, depth))
- {
- child = AVL_GET_GREATER(h, 1);
- AVL_SET_GREATER(h, parent)
- }
- else
- {
- child = AVL_GET_LESS(h, 1);
- AVL_SET_LESS(h, parent)
- }
-
- L_CHECK_READ_ERROR(AVL_NULL)
- depth++;
- parent = h;
- h = child;
- }
+ /* Climb from the path node to the root node using the linked
+ ** list, restoring the tree structure and rebalancing as necessary.
+ */
+ reduced_depth = 1;
+ cmp = cmp_shortened_sub_with_path;
- /* Climb from the path node to the root node using the linked
- ** list, restoring the tree structure and rebalancing as necessary.
- */
- reduced_depth = 1;
- cmp = cmp_shortened_sub_with_path;
-
- for (; ;)
- {
- if (reduced_depth)
- {
- bf = AVL_GET_BALANCE_FACTOR(h);
-
- if (cmp < 0)
- bf++;
- else /* cmp > 0 */
- bf--;
-
- if ((bf == -2) || (bf == 2))
- {
- h = L_(balance)(L_BALANCE_PARAM_CALL_PREFIX h);
- L_CHECK_READ_ERROR(AVL_NULL)
- bf = AVL_GET_BALANCE_FACTOR(h);
- }
- else
- AVL_SET_BALANCE_FACTOR(h, bf)
- reduced_depth = (bf == 0);
- }
-
- if (parent == AVL_NULL)
- break;
-
- child = h;
- h = parent;
- depth--;
- cmp = L_BIT_ARR_VAL(branch, depth) ? 1 : -1;
-
- if (cmp < 0)
- {
- parent = AVL_GET_LESS(h, 1);
- AVL_SET_LESS(h, child)
- }
- else
- {
- parent = AVL_GET_GREATER(h, 1);
- AVL_SET_GREATER(h, child)
- }
-
- L_CHECK_READ_ERROR(AVL_NULL)
- }
+ for (;;) {
+ if (reduced_depth) {
+ bf = AVL_GET_BALANCE_FACTOR(h);
+
+ if (cmp < 0)
+ bf++;
+ else /* cmp > 0 */
+ bf--;
+
+ if ((bf == -2) || (bf == 2)) {
+ h = L_(balance)(L_BALANCE_PARAM_CALL_PREFIX h);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ bf = AVL_GET_BALANCE_FACTOR(h);
+ } else
+ AVL_SET_BALANCE_FACTOR(h, bf)
+ reduced_depth = (bf == 0);
+ }
+
+ if (parent == AVL_NULL)
+ break;
+
+ child = h;
+ h = parent;
+ depth--;
+ cmp = L_BIT_ARR_VAL(branch, depth) ? 1 : -1;
+
+ if (cmp < 0) {
+ parent = AVL_GET_LESS(h, 1);
+ AVL_SET_LESS(h, child)
+ } else {
+ parent = AVL_GET_GREATER(h, 1);
+ AVL_SET_GREATER(h, child)
+ }
- l_tree->root = h;
+ L_CHECK_READ_ERROR(AVL_NULL)
}
- return(rm);
+ l_tree->root = h;
+ }
+
+ return(rm);
}
#endif
#if (L_IMPL_MASK & AVL_IMPL_SUBST)
-L_SC AVL_HANDLE L_(subst)(L_(avl) *l_tree, AVL_HANDLE new_node)
-{
- AVL_HANDLE h = l_tree->root;
- AVL_HANDLE parent = AVL_NULL;
- int cmp, last_cmp;
-
- /* Search for node already in tree with same key. */
- for (; ;)
- {
- if (h == AVL_NULL)
- /* No node in tree with same key as new node. */
- return(AVL_NULL);
+L_SC AVL_HANDLE L_(subst)(L_(avl) *l_tree, AVL_HANDLE new_node) {
+ AVL_HANDLE h = l_tree->root;
+ AVL_HANDLE parent = AVL_NULL;
+ int cmp, last_cmp;
- cmp = AVL_COMPARE_NODE_NODE(new_node, h);
-
- if (cmp == 0)
- /* Found the node to substitute new one for. */
- break;
+ /* Search for node already in tree with same key. */
+ for (;;) {
+ if (h == AVL_NULL)
+ /* No node in tree with same key as new node. */
+ return(AVL_NULL);
- last_cmp = cmp;
- parent = h;
- h = cmp < 0 ? AVL_GET_LESS(h, 1) : AVL_GET_GREATER(h, 1);
- L_CHECK_READ_ERROR(AVL_NULL)
- }
+ cmp = AVL_COMPARE_NODE_NODE(new_node, h);
- /* Copy tree housekeeping fields from node in tree to new node. */
- AVL_SET_LESS(new_node, AVL_GET_LESS(h, 0))
- AVL_SET_GREATER(new_node, AVL_GET_GREATER(h, 0))
- AVL_SET_BALANCE_FACTOR(new_node, AVL_GET_BALANCE_FACTOR(h))
+ if (cmp == 0)
+ /* Found the node to substitute new one for. */
+ break;
- if (parent == AVL_NULL)
- /* New node is also new root. */
- l_tree->root = new_node;
- else
- {
- /* Make parent point to new node. */
- if (last_cmp < 0)
- AVL_SET_LESS(parent, new_node)
- else
- AVL_SET_GREATER(parent, new_node)
- }
-
- return(h);
+ last_cmp = cmp;
+ parent = h;
+ h = cmp < 0 ? AVL_GET_LESS(h, 1) : AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ }
+
+ /* Copy tree housekeeping fields from node in tree to new node. */
+ AVL_SET_LESS(new_node, AVL_GET_LESS(h, 0))
+ AVL_SET_GREATER(new_node, AVL_GET_GREATER(h, 0))
+ AVL_SET_BALANCE_FACTOR(new_node, AVL_GET_BALANCE_FACTOR(h))
+
+ if (parent == AVL_NULL)
+ /* New node is also new root. */
+ l_tree->root = new_node;
+ else {
+ /* Make parent point to new node. */
+ if (last_cmp < 0)
+ AVL_SET_LESS(parent, new_node)
+ else
+ AVL_SET_GREATER(parent, new_node)
+ }
+
+ return(h);
}
#endif
@@ -851,144 +765,136 @@ L_SC AVL_HANDLE L_(subst)(L_(avl) *l_tree, AVL_HANDLE new_node)
#if (L_IMPL_MASK & AVL_IMPL_BUILD)
L_SC int L_(build)(
- L_(avl) *l_tree, AVL_BUILD_ITER_TYPE p, L_SIZE num_nodes)
-{
- /* Gives path to subtree being built. If bit n is false, branch
- ** less from the node at depth n, if true branch greater. */
- L_BIT_ARR_DEFN(branch)
-
- /* If bit n is true, then for the current subtree at depth n, its
- ** greater subtree has one more node than its less subtree. */
- L_BIT_ARR_DEFN(rem)
-
- /* Depth of root node of current subtree. */
- unsigned depth = 0;
+ L_(avl) *l_tree, AVL_BUILD_ITER_TYPE p, L_SIZE num_nodes) {
+ /* Gives path to subtree being built. If bit n is false, branch
+ ** less from the node at depth n, if true branch greater. */
+ L_BIT_ARR_DEFN(branch)
+
+ /* If bit n is true, then for the current subtree at depth n, its
+ ** greater subtree has one more node than its less subtree. */
+ L_BIT_ARR_DEFN(rem)
+
+ /* Depth of root node of current subtree. */
+ unsigned depth = 0;
+
+ /* Number of nodes in current subtree. */
+ L_SIZE num_sub = num_nodes;
+
+ /* The algorithm relies on a stack of nodes whose less subtree has
+ ** been built, but whose greater subtree has not yet been built.
+ ** The stack is implemented as linked list. The nodes are linked
+ ** together by having the "greater" handle of a node set to the
+ ** next node in the list. "less_parent" is the handle of the first
+ ** node in the list. */
+ AVL_HANDLE less_parent = AVL_NULL;
+
+ /* h is root of current subtree, child is one of its children. */
+ AVL_HANDLE h;
+ AVL_HANDLE child;
+
+ if (num_nodes == 0) {
+ l_tree->root = AVL_NULL;
+ return(1);
+ }
- /* Number of nodes in current subtree. */
- L_SIZE num_sub = num_nodes;
+ for (;;) {
+ while (num_sub > 2) {
+ /* Subtract one for root of subtree. */
+ num_sub--;
- /* The algorithm relies on a stack of nodes whose less subtree has
- ** been built, but whose greater subtree has not yet been built.
- ** The stack is implemented as linked list. The nodes are linked
- ** together by having the "greater" handle of a node set to the
- ** next node in the list. "less_parent" is the handle of the first
- ** node in the list. */
- AVL_HANDLE less_parent = AVL_NULL;
+ if (num_sub & 1)
+ L_BIT_ARR_1(rem, depth)
+ else
+ L_BIT_ARR_0(rem, depth)
+ L_BIT_ARR_0(branch, depth)
+ depth++;
- /* h is root of current subtree, child is one of its children. */
- AVL_HANDLE h;
- AVL_HANDLE child;
+ num_sub >>= 1;
+ }
- if (num_nodes == 0)
- {
- l_tree->root = AVL_NULL;
- return(1);
+ if (num_sub == 2) {
+ /* Build a subtree with two nodes, slanting to greater.
+ ** I arbitrarily chose to always have the extra node in the
+ ** greater subtree when there is an odd number of nodes to
+ ** split between the two subtrees. */
+
+ h = AVL_BUILD_ITER_VAL(p);
+ L_CHECK_READ_ERROR(0)
+ AVL_BUILD_ITER_INCR(p)
+ child = AVL_BUILD_ITER_VAL(p);
+ L_CHECK_READ_ERROR(0)
+ AVL_BUILD_ITER_INCR(p)
+ AVL_SET_LESS(child, AVL_NULL)
+ AVL_SET_GREATER(child, AVL_NULL)
+ AVL_SET_BALANCE_FACTOR(child, 0)
+ AVL_SET_GREATER(h, child)
+ AVL_SET_LESS(h, AVL_NULL)
+ AVL_SET_BALANCE_FACTOR(h, 1)
+ } else { /* num_sub == 1 */
+ /* Build a subtree with one node. */
+
+ h = AVL_BUILD_ITER_VAL(p);
+ L_CHECK_READ_ERROR(0)
+ AVL_BUILD_ITER_INCR(p)
+ AVL_SET_LESS(h, AVL_NULL)
+ AVL_SET_GREATER(h, AVL_NULL)
+ AVL_SET_BALANCE_FACTOR(h, 0)
}
- for (; ;)
- {
- while (num_sub > 2)
- {
- /* Subtract one for root of subtree. */
- num_sub--;
-
- if (num_sub & 1)
- L_BIT_ARR_1(rem, depth)
- else
- L_BIT_ARR_0(rem, depth)
- L_BIT_ARR_0(branch, depth)
- depth++;
-
- num_sub >>= 1;
+ while (depth) {
+ depth--;
+
+ if (!L_BIT_ARR_VAL(branch, depth))
+ /* We've completed a less subtree. */
+ break;
+
+ /* We've completed a greater subtree, so attach it to
+ ** its parent (that is less than it). We pop the parent
+ ** off the stack of less parents. */
+ child = h;
+ h = less_parent;
+ less_parent = AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR(0)
+ AVL_SET_GREATER(h, child)
+ /* num_sub = 2 * (num_sub - rem[depth]) + rem[depth] + 1 */
+ num_sub <<= 1;
+ num_sub += L_BIT_ARR_VAL(rem, depth) ? 0 : 1;
+
+ if (num_sub & (num_sub - 1))
+ /* num_sub is not a power of 2. */
+ AVL_SET_BALANCE_FACTOR(h, 0)
+ else
+ /* num_sub is a power of 2. */
+ AVL_SET_BALANCE_FACTOR(h, 1)
}
- if (num_sub == 2)
- {
- /* Build a subtree with two nodes, slanting to greater.
- ** I arbitrarily chose to always have the extra node in the
- ** greater subtree when there is an odd number of nodes to
- ** split between the two subtrees. */
-
- h = AVL_BUILD_ITER_VAL(p);
- L_CHECK_READ_ERROR(0)
- AVL_BUILD_ITER_INCR(p)
- child = AVL_BUILD_ITER_VAL(p);
- L_CHECK_READ_ERROR(0)
- AVL_BUILD_ITER_INCR(p)
- AVL_SET_LESS(child, AVL_NULL)
- AVL_SET_GREATER(child, AVL_NULL)
- AVL_SET_BALANCE_FACTOR(child, 0)
- AVL_SET_GREATER(h, child)
- AVL_SET_LESS(h, AVL_NULL)
- AVL_SET_BALANCE_FACTOR(h, 1)
- }
- else /* num_sub == 1 */
- {
- /* Build a subtree with one node. */
-
- h = AVL_BUILD_ITER_VAL(p);
- L_CHECK_READ_ERROR(0)
- AVL_BUILD_ITER_INCR(p)
- AVL_SET_LESS(h, AVL_NULL)
- AVL_SET_GREATER(h, AVL_NULL)
- AVL_SET_BALANCE_FACTOR(h, 0)
- }
+ if (num_sub == num_nodes)
+ /* We've completed the full tree. */
+ break;
- while (depth)
- {
- depth--;
-
- if (!L_BIT_ARR_VAL(branch, depth))
- /* We've completed a less subtree. */
- break;
-
- /* We've completed a greater subtree, so attach it to
- ** its parent (that is less than it). We pop the parent
- ** off the stack of less parents. */
- child = h;
- h = less_parent;
- less_parent = AVL_GET_GREATER(h, 1);
- L_CHECK_READ_ERROR(0)
- AVL_SET_GREATER(h, child)
- /* num_sub = 2 * (num_sub - rem[depth]) + rem[depth] + 1 */
- num_sub <<= 1;
- num_sub += L_BIT_ARR_VAL(rem, depth) ? 0 : 1;
-
- if (num_sub & (num_sub - 1))
- /* num_sub is not a power of 2. */
- AVL_SET_BALANCE_FACTOR(h, 0)
- else
- /* num_sub is a power of 2. */
- AVL_SET_BALANCE_FACTOR(h, 1)
- }
-
- if (num_sub == num_nodes)
- /* We've completed the full tree. */
- break;
-
- /* The subtree we've completed is the less subtree of the
- ** next node in the sequence. */
-
- child = h;
- h = AVL_BUILD_ITER_VAL(p);
- L_CHECK_READ_ERROR(0)
- AVL_BUILD_ITER_INCR(p)
- AVL_SET_LESS(h, child)
+ /* The subtree we've completed is the less subtree of the
+ ** next node in the sequence. */
- /* Put h into stack of less parents. */
- AVL_SET_GREATER(h, less_parent)
- less_parent = h;
+ child = h;
+ h = AVL_BUILD_ITER_VAL(p);
+ L_CHECK_READ_ERROR(0)
+ AVL_BUILD_ITER_INCR(p)
+ AVL_SET_LESS(h, child)
- /* Proceed to creating greater than subtree of h. */
- L_BIT_ARR_1(branch, depth)
- num_sub += L_BIT_ARR_VAL(rem, depth) ? 1 : 0;
- depth++;
+ /* Put h into stack of less parents. */
+ AVL_SET_GREATER(h, less_parent)
+ less_parent = h;
- } /* end for ( ; ; ) */
+ /* Proceed to creating greater than subtree of h. */
+ L_BIT_ARR_1(branch, depth)
+ num_sub += L_BIT_ARR_VAL(rem, depth) ? 1 : 0;
+ depth++;
- l_tree->root = h;
+ } /* end for (;; ) */
- return(1);
+ l_tree->root = h;
+
+ return(1);
}
#endif
@@ -1001,9 +907,8 @@ L_SC int L_(build)(
** invalid. (Depth is zero-base.) It's not necessary to initialize
** iterators prior to passing them to the "start" function.
*/
-L_SC void L_(init_iter)(L_(iter) *iter)
-{
- iter->depth = ~0;
+L_SC void L_(init_iter)(L_(iter) *iter) {
+ iter->depth = ~0;
}
#endif
@@ -1011,7 +916,7 @@ L_SC void L_(init_iter)(L_(iter) *iter)
#ifdef AVL_READ_ERRORS_HAPPEN
#define L_CHECK_READ_ERROR_INV_DEPTH \
- { if (AVL_READ_ERROR) { iter->depth = ~0; return; } }
+ { if (AVL_READ_ERROR) { iter->depth = ~0; return; } }
#else
@@ -1022,174 +927,157 @@ L_SC void L_(init_iter)(L_(iter) *iter)
#if (L_IMPL_MASK & AVL_IMPL_START_ITER)
L_SC void L_(start_iter)(
- L_(avl) *l_tree, L_(iter) *iter, AVL_KEY k, avl_search_type st)
-{
- AVL_HANDLE h = l_tree->root;
- unsigned d = 0;
- int cmp, target_cmp;
-
- /* Save the tree that we're going to iterate through in a
- ** member variable. */
- iter->tree_ = l_tree;
-
- iter->depth = ~0;
+ L_(avl) *l_tree, L_(iter) *iter, AVL_KEY k, avl_search_type st) {
+ AVL_HANDLE h = l_tree->root;
+ unsigned d = 0;
+ int cmp, target_cmp;
+
+ /* Save the tree that we're going to iterate through in a
+ ** member variable. */
+ iter->tree_ = l_tree;
+
+ iter->depth = ~0;
+
+ if (h == AVL_NULL)
+ /* Tree is empty. */
+ return;
+
+ if (st & AVL_LESS)
+ /* Key can be greater than key of starting node. */
+ target_cmp = 1;
+ else if (st & AVL_GREATER)
+ /* Key can be less than key of starting node. */
+ target_cmp = -1;
+ else
+ /* Key must be same as key of starting node. */
+ target_cmp = 0;
+
+ for (;;) {
+ cmp = AVL_COMPARE_KEY_NODE(k, h);
+
+ if (cmp == 0) {
+ if (st & AVL_EQUAL) {
+ /* Equal node was sought and found as starting node. */
+ iter->depth = d;
+ break;
+ }
+
+ cmp = -target_cmp;
+ } else if (target_cmp != 0)
+ if (!((cmp ^ target_cmp) & L_MASK_HIGH_BIT))
+ /* cmp and target_cmp are both negative or both positive. */
+ iter->depth = d;
+
+ h = cmp < 0 ? AVL_GET_LESS(h, 1) : AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
if (h == AVL_NULL)
- /* Tree is empty. */
- return;
-
- if (st & AVL_LESS)
- /* Key can be greater than key of starting node. */
- target_cmp = 1;
- else if (st & AVL_GREATER)
- /* Key can be less than key of starting node. */
- target_cmp = -1;
- else
- /* Key must be same as key of starting node. */
- target_cmp = 0;
-
- for (; ;)
- {
- cmp = AVL_COMPARE_KEY_NODE(k, h);
-
- if (cmp == 0)
- {
- if (st & AVL_EQUAL)
- {
- /* Equal node was sought and found as starting node. */
- iter->depth = d;
- break;
- }
-
- cmp = -target_cmp;
- }
- else if (target_cmp != 0)
- if (!((cmp ^ target_cmp) & L_MASK_HIGH_BIT))
- /* cmp and target_cmp are both negative or both positive. */
- iter->depth = d;
-
- h = cmp < 0 ? AVL_GET_LESS(h, 1) : AVL_GET_GREATER(h, 1);
- L_CHECK_READ_ERROR_INV_DEPTH
-
- if (h == AVL_NULL)
- break;
-
- if (cmp > 0)
- L_BIT_ARR_1(iter->branch, d)
- else
- L_BIT_ARR_0(iter->branch, d)
- iter->path_h[d++] = h;
- }
+ break;
+
+ if (cmp > 0)
+ L_BIT_ARR_1(iter->branch, d)
+ else
+ L_BIT_ARR_0(iter->branch, d)
+ iter->path_h[d++] = h;
+ }
}
#endif
#if (L_IMPL_MASK & AVL_IMPL_START_ITER_LEAST)
-L_SC void L_(start_iter_least)(L_(avl) *l_tree, L_(iter) *iter)
-{
- AVL_HANDLE h = l_tree->root;
+L_SC void L_(start_iter_least)(L_(avl) *l_tree, L_(iter) *iter) {
+ AVL_HANDLE h = l_tree->root;
- iter->tree_ = l_tree;
+ iter->tree_ = l_tree;
- iter->depth = ~0;
+ iter->depth = ~0;
- L_BIT_ARR_ALL(iter->branch, 0)
+ L_BIT_ARR_ALL(iter->branch, 0)
- while (h != AVL_NULL)
- {
- if (iter->depth != ~0)
- iter->path_h[iter->depth] = h;
+ while (h != AVL_NULL) {
+ if (iter->depth != ~0)
+ iter->path_h[iter->depth] = h;
- iter->depth++;
- h = AVL_GET_LESS(h, 1);
- L_CHECK_READ_ERROR_INV_DEPTH
- }
+ iter->depth++;
+ h = AVL_GET_LESS(h, 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
+ }
}
#endif
#if (L_IMPL_MASK & AVL_IMPL_START_ITER_GREATEST)
-L_SC void L_(start_iter_greatest)(L_(avl) *l_tree, L_(iter) *iter)
-{
- AVL_HANDLE h = l_tree->root;
+L_SC void L_(start_iter_greatest)(L_(avl) *l_tree, L_(iter) *iter) {
+ AVL_HANDLE h = l_tree->root;
- iter->tree_ = l_tree;
+ iter->tree_ = l_tree;
- iter->depth = ~0;
+ iter->depth = ~0;
- L_BIT_ARR_ALL(iter->branch, 1)
+ L_BIT_ARR_ALL(iter->branch, 1)
- while (h != AVL_NULL)
- {
- if (iter->depth != ~0)
- iter->path_h[iter->depth] = h;
+ while (h != AVL_NULL) {
+ if (iter->depth != ~0)
+ iter->path_h[iter->depth] = h;
- iter->depth++;
- h = AVL_GET_GREATER(h, 1);
- L_CHECK_READ_ERROR_INV_DEPTH
- }
+ iter->depth++;
+ h = AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
+ }
}
#endif
#if (L_IMPL_MASK & AVL_IMPL_GET_ITER)
-L_SC AVL_HANDLE L_(get_iter)(L_(iter) *iter)
-{
- if (iter->depth == ~0)
- return(AVL_NULL);
+L_SC AVL_HANDLE L_(get_iter)(L_(iter) *iter) {
+ if (iter->depth == ~0)
+ return(AVL_NULL);
- return(iter->depth == 0 ?
- iter->tree_->root : iter->path_h[iter->depth - 1]);
+ return(iter->depth == 0 ?
+ iter->tree_->root : iter->path_h[iter->depth - 1]);
}
#endif
#if (L_IMPL_MASK & AVL_IMPL_INCR_ITER)
-L_SC void L_(incr_iter)(L_(iter) *iter)
-{
+L_SC void L_(incr_iter)(L_(iter) *iter) {
#define l_tree (iter->tree_)
- if (iter->depth != ~0)
- {
- AVL_HANDLE h =
- AVL_GET_GREATER((iter->depth == 0 ?
- iter->tree_->root : iter->path_h[iter->depth - 1]), 1);
- L_CHECK_READ_ERROR_INV_DEPTH
+ if (iter->depth != ~0) {
+ AVL_HANDLE h =
+ AVL_GET_GREATER((iter->depth == 0 ?
+ iter->tree_->root : iter->path_h[iter->depth - 1]), 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
- if (h == AVL_NULL)
- do
- {
- if (iter->depth == 0)
- {
- iter->depth = ~0;
- break;
- }
-
- iter->depth--;
- }
- while (L_BIT_ARR_VAL(iter->branch, iter->depth));
- else
- {
- L_BIT_ARR_1(iter->branch, iter->depth)
- iter->path_h[iter->depth++] = h;
+ if (h == AVL_NULL)
+ do {
+ if (iter->depth == 0) {
+ iter->depth = ~0;
+ break;
+ }
- for (; ;)
- {
- h = AVL_GET_LESS(h, 1);
- L_CHECK_READ_ERROR_INV_DEPTH
+ iter->depth--;
+ } while (L_BIT_ARR_VAL(iter->branch, iter->depth));
+ else {
+ L_BIT_ARR_1(iter->branch, iter->depth)
+ iter->path_h[iter->depth++] = h;
- if (h == AVL_NULL)
- break;
+ for (;;) {
+ h = AVL_GET_LESS(h, 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
- L_BIT_ARR_0(iter->branch, iter->depth)
- iter->path_h[iter->depth++] = h;
- }
- }
+ if (h == AVL_NULL)
+ break;
+
+ L_BIT_ARR_0(iter->branch, iter->depth)
+ iter->path_h[iter->depth++] = h;
+ }
}
+ }
#undef l_tree
}
@@ -1198,47 +1086,40 @@ L_SC void L_(incr_iter)(L_(iter) *iter)
#if (L_IMPL_MASK & AVL_IMPL_DECR_ITER)
-L_SC void L_(decr_iter)(L_(iter) *iter)
-{
+L_SC void L_(decr_iter)(L_(iter) *iter) {
#define l_tree (iter->tree_)
- if (iter->depth != ~0)
- {
- AVL_HANDLE h =
- AVL_GET_LESS((iter->depth == 0 ?
- iter->tree_->root : iter->path_h[iter->depth - 1]), 1);
- L_CHECK_READ_ERROR_INV_DEPTH
+ if (iter->depth != ~0) {
+ AVL_HANDLE h =
+ AVL_GET_LESS((iter->depth == 0 ?
+ iter->tree_->root : iter->path_h[iter->depth - 1]), 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
- if (h == AVL_NULL)
- do
- {
- if (iter->depth == 0)
- {
- iter->depth = ~0;
- break;
- }
-
- iter->depth--;
- }
- while (!L_BIT_ARR_VAL(iter->branch, iter->depth));
- else
- {
- L_BIT_ARR_0(iter->branch, iter->depth)
- iter->path_h[iter->depth++] = h;
+ if (h == AVL_NULL)
+ do {
+ if (iter->depth == 0) {
+ iter->depth = ~0;
+ break;
+ }
- for (; ;)
- {
- h = AVL_GET_GREATER(h, 1);
- L_CHECK_READ_ERROR_INV_DEPTH
+ iter->depth--;
+ } while (!L_BIT_ARR_VAL(iter->branch, iter->depth));
+ else {
+ L_BIT_ARR_0(iter->branch, iter->depth)
+ iter->path_h[iter->depth++] = h;
- if (h == AVL_NULL)
- break;
+ for (;;) {
+ h = AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
- L_BIT_ARR_1(iter->branch, iter->depth)
- iter->path_h[iter->depth++] = h;
- }
- }
+ if (h == AVL_NULL)
+ break;
+
+ L_BIT_ARR_1(iter->branch, iter->depth)
+ iter->path_h[iter->depth++] = h;
+ }
}
+ }
#undef l_tree
}
diff --git a/vpx_mem/memory_manager/include/heapmm.h b/vpx_mem/memory_manager/include/heapmm.h
index 33004cadc..4934c2d8a 100644
--- a/vpx_mem/memory_manager/include/heapmm.h
+++ b/vpx_mem/memory_manager/include/heapmm.h
@@ -81,30 +81,29 @@
#include "hmm_cnfg.h"
/* Heap descriptor. */
-typedef struct HMM_UNIQUE(structure)
-{
- /* private: */
-
- /* Pointer to (payload of) root node in AVL tree. This field should
- ** really be the AVL tree descriptor (type avl_avl). But (in the
- ** instantiation of the AVL tree generic package used in package) the
- ** AVL tree descriptor simply contains a pointer to the root. So,
- ** whenever a pointer to the AVL tree descriptor is needed, I use the
- ** cast:
- **
- ** (avl_avl *) &(heap_desc->avl_tree_root)
- **
- ** (where heap_desc is a pointer to a heap descriptor). This trick
- ** allows me to avoid including cavl_if.h in this external header. */
- void *avl_tree_root;
-
- /* Pointer to first byte of last block freed, after any coalescing. */
- void *last_freed;
-
- /* public: */
-
- HMM_UNIQUE(size_bau) num_baus_can_shrink;
- void *end_of_shrinkable_chunk;
+typedef struct HMM_UNIQUE(structure) {
+ /* private: */
+
+ /* Pointer to (payload of) root node in AVL tree. This field should
+ ** really be the AVL tree descriptor (type avl_avl). But (in the
+ ** instantiation of the AVL tree generic package used in package) the
+ ** AVL tree descriptor simply contains a pointer to the root. So,
+ ** whenever a pointer to the AVL tree descriptor is needed, I use the
+ ** cast:
+ **
+ ** (avl_avl *) &(heap_desc->avl_tree_root)
+ **
+ ** (where heap_desc is a pointer to a heap descriptor). This trick
+ ** allows me to avoid including cavl_if.h in this external header. */
+ void *avl_tree_root;
+
+ /* Pointer to first byte of last block freed, after any coalescing. */
+ void *last_freed;
+
+ /* public: */
+
+ HMM_UNIQUE(size_bau) num_baus_can_shrink;
+ void *end_of_shrinkable_chunk;
}
HMM_UNIQUE(descriptor);
@@ -113,41 +112,41 @@ HMM_UNIQUE(descriptor);
void HMM_UNIQUE(init)(HMM_UNIQUE(descriptor) *desc);
void *HMM_UNIQUE(alloc)(
- HMM_UNIQUE(descriptor) *desc, HMM_UNIQUE(size_aau) num_addr_align_units);
+ HMM_UNIQUE(descriptor) *desc, HMM_UNIQUE(size_aau) num_addr_align_units);
/* NOT YET IMPLEMENTED */
void *HMM_UNIQUE(greedy_alloc)(
- HMM_UNIQUE(descriptor) *desc, HMM_UNIQUE(size_aau) needed_addr_align_units,
- HMM_UNIQUE(size_aau) coveted_addr_align_units);
+ HMM_UNIQUE(descriptor) *desc, HMM_UNIQUE(size_aau) needed_addr_align_units,
+ HMM_UNIQUE(size_aau) coveted_addr_align_units);
int HMM_UNIQUE(resize)(
- HMM_UNIQUE(descriptor) *desc, void *mem,
- HMM_UNIQUE(size_aau) num_addr_align_units);
+ HMM_UNIQUE(descriptor) *desc, void *mem,
+ HMM_UNIQUE(size_aau) num_addr_align_units);
/* NOT YET IMPLEMENTED */
int HMM_UNIQUE(greedy_resize)(
- HMM_UNIQUE(descriptor) *desc, void *mem,
- HMM_UNIQUE(size_aau) needed_addr_align_units,
- HMM_UNIQUE(size_aau) coveted_addr_align_units);
+ HMM_UNIQUE(descriptor) *desc, void *mem,
+ HMM_UNIQUE(size_aau) needed_addr_align_units,
+ HMM_UNIQUE(size_aau) coveted_addr_align_units);
void HMM_UNIQUE(free)(HMM_UNIQUE(descriptor) *desc, void *mem);
HMM_UNIQUE(size_aau) HMM_UNIQUE(true_size)(void *mem);
HMM_UNIQUE(size_aau) HMM_UNIQUE(largest_available)(
- HMM_UNIQUE(descriptor) *desc);
+ HMM_UNIQUE(descriptor) *desc);
void HMM_UNIQUE(new_chunk)(
- HMM_UNIQUE(descriptor) *desc, void *start_of_chunk,
- HMM_UNIQUE(size_bau) num_block_align_units);
+ HMM_UNIQUE(descriptor) *desc, void *start_of_chunk,
+ HMM_UNIQUE(size_bau) num_block_align_units);
void HMM_UNIQUE(grow_chunk)(
- HMM_UNIQUE(descriptor) *desc, void *end_of_chunk,
- HMM_UNIQUE(size_bau) num_block_align_units);
+ HMM_UNIQUE(descriptor) *desc, void *end_of_chunk,
+ HMM_UNIQUE(size_bau) num_block_align_units);
/* NOT YET IMPLEMENTED */
void HMM_UNIQUE(shrink_chunk)(
- HMM_UNIQUE(descriptor) *desc,
- HMM_UNIQUE(size_bau) num_block_align_units);
+ HMM_UNIQUE(descriptor) *desc,
+ HMM_UNIQUE(size_bau) num_block_align_units);
#endif /* defined HMM_PROCESS */
diff --git a/vpx_mem/memory_manager/include/hmm_cnfg.h b/vpx_mem/memory_manager/include/hmm_cnfg.h
index 30b9f5045..2c3391d7b 100644
--- a/vpx_mem/memory_manager/include/hmm_cnfg.h
+++ b/vpx_mem/memory_manager/include/hmm_cnfg.h
@@ -45,8 +45,8 @@
#define HMM_UNIQUE(BASE) hmm_ ## BASE
/* Number of bytes in an Address Alignment Unit (AAU). */
-//fwg
-//#define HMM_ADDR_ALIGN_UNIT sizeof(int)
+// fwg
+// #define HMM_ADDR_ALIGN_UNIT sizeof(int)
#define HMM_ADDR_ALIGN_UNIT 32
/* Number of AAUs in a Block Alignment Unit (BAU). */
@@ -65,7 +65,7 @@ void hmm_dflt_abort(const char *, const char *);
** statement. If you remove the definition of this macro, no self-auditing
** will be performed. */
#define HMM_AUDIT_FAIL \
- hmm_dflt_abort(__FILE__, HMM_SYM_TO_STRING(__LINE__));
+ hmm_dflt_abort(__FILE__, HMM_SYM_TO_STRING(__LINE__));
#elif HMM_CNFG_NUM == 0
@@ -90,8 +90,8 @@ extern const char *HMM_UNIQUE(fail_file);
extern unsigned HMM_UNIQUE(fail_line);
#define HMM_AUDIT_FAIL \
- { HMM_UNIQUE(fail_file) = __FILE__; HMM_UNIQUE(fail_line) = __LINE__; \
- longjmp(HMM_UNIQUE(jmp_buf), 1); }
+ { HMM_UNIQUE(fail_file) = __FILE__; HMM_UNIQUE(fail_line) = __LINE__; \
+ longjmp(HMM_UNIQUE(jmp_buf), 1); }
#elif HMM_CNFG_NUM == 1
diff --git a/vpx_mem/memory_manager/include/hmm_intrnl.h b/vpx_mem/memory_manager/include/hmm_intrnl.h
index 5d62abc59..27cefe449 100644
--- a/vpx_mem/memory_manager/include/hmm_intrnl.h
+++ b/vpx_mem/memory_manager/include/hmm_intrnl.h
@@ -26,34 +26,32 @@
/* Mask of high bit of variable of size_bau type. */
#define HIGH_BIT_BAU_SIZE \
- ((U(size_bau)) ~ (((U(size_bau)) ~ (U(size_bau)) 0) >> 1))
+ ((U(size_bau)) ~ (((U(size_bau)) ~ (U(size_bau)) 0) >> 1))
/* Add a given number of AAUs to pointer. */
#define AAUS_FORWARD(PTR, AAU_OFFSET) \
- (((char *) (PTR)) + ((AAU_OFFSET) * ((U(size_aau)) HMM_ADDR_ALIGN_UNIT)))
+ (((char *) (PTR)) + ((AAU_OFFSET) * ((U(size_aau)) HMM_ADDR_ALIGN_UNIT)))
/* Subtract a given number of AAUs from pointer. */
#define AAUS_BACKWARD(PTR, AAU_OFFSET) \
- (((char *) (PTR)) - ((AAU_OFFSET) * ((U(size_aau)) HMM_ADDR_ALIGN_UNIT)))
+ (((char *) (PTR)) - ((AAU_OFFSET) * ((U(size_aau)) HMM_ADDR_ALIGN_UNIT)))
/* Add a given number of BAUs to a pointer. */
#define BAUS_FORWARD(PTR, BAU_OFFSET) \
- AAUS_FORWARD((PTR), (BAU_OFFSET) * ((U(size_aau)) HMM_BLOCK_ALIGN_UNIT))
+ AAUS_FORWARD((PTR), (BAU_OFFSET) * ((U(size_aau)) HMM_BLOCK_ALIGN_UNIT))
/* Subtract a given number of BAUs to a pointer. */
#define BAUS_BACKWARD(PTR, BAU_OFFSET) \
- AAUS_BACKWARD((PTR), (BAU_OFFSET) * ((U(size_aau)) HMM_BLOCK_ALIGN_UNIT))
+ AAUS_BACKWARD((PTR), (BAU_OFFSET) * ((U(size_aau)) HMM_BLOCK_ALIGN_UNIT))
-typedef struct head_struct
-{
- /* Sizes in Block Alignment Units. */
- HMM_UNIQUE(size_bau) previous_block_size, block_size;
+typedef struct head_struct {
+ /* Sizes in Block Alignment Units. */
+ HMM_UNIQUE(size_bau) previous_block_size, block_size;
}
head_record;
-typedef struct ptr_struct
-{
- struct ptr_struct *self, *prev, *next;
+typedef struct ptr_struct {
+ struct ptr_struct *self, *prev, *next;
}
ptr_record;
@@ -71,50 +69,50 @@ ptr_record;
/* Minimum number of BAUs in a block (allowing room for the pointer record. */
#define MIN_BLOCK_BAUS \
- DIV_ROUND_UP(HEAD_AAUS + PTR_RECORD_AAUS, HMM_BLOCK_ALIGN_UNIT)
+ DIV_ROUND_UP(HEAD_AAUS + PTR_RECORD_AAUS, HMM_BLOCK_ALIGN_UNIT)
/* Return number of BAUs in block (masking off high bit containing block
** status). */
#define BLOCK_BAUS(HEAD_PTR) \
- (((head_record *) (HEAD_PTR))->block_size & ~HIGH_BIT_BAU_SIZE)
+ (((head_record *) (HEAD_PTR))->block_size & ~HIGH_BIT_BAU_SIZE)
/* Return number of BAUs in previous block (masking off high bit containing
** block status). */
#define PREV_BLOCK_BAUS(HEAD_PTR) \
- (((head_record *) (HEAD_PTR))->previous_block_size & ~HIGH_BIT_BAU_SIZE)
+ (((head_record *) (HEAD_PTR))->previous_block_size & ~HIGH_BIT_BAU_SIZE)
/* Set number of BAUs in previous block, preserving high bit containing
** block status. */
#define SET_PREV_BLOCK_BAUS(HEAD_PTR, N_BAUS) \
- { register head_record *h_ptr = (head_record *) (HEAD_PTR); \
- h_ptr->previous_block_size &= HIGH_BIT_BAU_SIZE; \
- h_ptr->previous_block_size |= (N_BAUS); }
+ { register head_record *h_ptr = (head_record *) (HEAD_PTR); \
+ h_ptr->previous_block_size &= HIGH_BIT_BAU_SIZE; \
+ h_ptr->previous_block_size |= (N_BAUS); }
/* Convert pointer to pointer record of block to pointer to block's head
** record. */
#define PTR_REC_TO_HEAD(PTR_REC_PTR) \
- ((head_record *) AAUS_BACKWARD(PTR_REC_PTR, HEAD_AAUS))
+ ((head_record *) AAUS_BACKWARD(PTR_REC_PTR, HEAD_AAUS))
/* Convert pointer to block head to pointer to block's pointer record. */
#define HEAD_TO_PTR_REC(HEAD_PTR) \
- ((ptr_record *) AAUS_FORWARD(HEAD_PTR, HEAD_AAUS))
+ ((ptr_record *) AAUS_FORWARD(HEAD_PTR, HEAD_AAUS))
/* Returns non-zero if block is allocated. */
#define IS_BLOCK_ALLOCATED(HEAD_PTR) \
- (((((head_record *) (HEAD_PTR))->block_size | \
- ((head_record *) (HEAD_PTR))->previous_block_size) & \
- HIGH_BIT_BAU_SIZE) == 0)
+ (((((head_record *) (HEAD_PTR))->block_size | \
+ ((head_record *) (HEAD_PTR))->previous_block_size) & \
+ HIGH_BIT_BAU_SIZE) == 0)
#define MARK_BLOCK_ALLOCATED(HEAD_PTR) \
- { register head_record *h_ptr = (head_record *) (HEAD_PTR); \
- h_ptr->block_size &= ~HIGH_BIT_BAU_SIZE; \
- h_ptr->previous_block_size &= ~HIGH_BIT_BAU_SIZE; }
+ { register head_record *h_ptr = (head_record *) (HEAD_PTR); \
+ h_ptr->block_size &= ~HIGH_BIT_BAU_SIZE; \
+ h_ptr->previous_block_size &= ~HIGH_BIT_BAU_SIZE; }
/* Mark a block as free when it is not the first block in a bin (and
** therefore not a node in the AVL tree). */
#define MARK_SUCCESSIVE_BLOCK_IN_FREE_BIN(HEAD_PTR) \
- { register head_record *h_ptr = (head_record *) (HEAD_PTR); \
- h_ptr->block_size |= HIGH_BIT_BAU_SIZE; }
+ { register head_record *h_ptr = (head_record *) (HEAD_PTR); \
+ h_ptr->block_size |= HIGH_BIT_BAU_SIZE; }
/* Prototypes for internal functions implemented in one file and called in
** another.
@@ -125,7 +123,7 @@ void U(into_free_collection)(U(descriptor) *desc, head_record *head_ptr);
void U(out_of_free_collection)(U(descriptor) *desc, head_record *head_ptr);
void *U(alloc_from_bin)(
- U(descriptor) *desc, ptr_record *bin_front_ptr, U(size_bau) n_baus);
+ U(descriptor) *desc, ptr_record *bin_front_ptr, U(size_bau) n_baus);
#ifdef HMM_AUDIT_FAIL
@@ -137,12 +135,12 @@ int U(audit_block_fail_dummy_return)(void);
/* Auditing a block consists of checking that the size in its head
** matches the previous block size in the head of the next block. */
#define AUDIT_BLOCK_AS_EXPR(HEAD_PTR) \
- ((BLOCK_BAUS(HEAD_PTR) == \
- PREV_BLOCK_BAUS(BAUS_FORWARD(HEAD_PTR, BLOCK_BAUS(HEAD_PTR)))) ? \
- 0 : U(audit_block_fail_dummy_return)())
+ ((BLOCK_BAUS(HEAD_PTR) == \
+ PREV_BLOCK_BAUS(BAUS_FORWARD(HEAD_PTR, BLOCK_BAUS(HEAD_PTR)))) ? \
+ 0 : U(audit_block_fail_dummy_return)())
#define AUDIT_BLOCK(HEAD_PTR) \
- { void *h_ptr = (HEAD_PTR); AUDIT_BLOCK_AS_EXPR(h_ptr); }
+ { void *h_ptr = (HEAD_PTR); AUDIT_BLOCK_AS_EXPR(h_ptr); }
#endif