summaryrefslogtreecommitdiff
path: root/vpx_mem/memory_manager
diff options
context:
space:
mode:
authorJohn Koleszar <jkoleszar@google.com>2010-05-18 11:58:33 -0400
committerJohn Koleszar <jkoleszar@google.com>2010-05-18 11:58:33 -0400
commit0ea50ce9cb4b65eee6afa1d041fe8beb5abda667 (patch)
tree1f3b9019f28bc56fd3156f96e5a9653a983ee61b /vpx_mem/memory_manager
downloadlibvpx-0ea50ce9cb4b65eee6afa1d041fe8beb5abda667.tar
libvpx-0ea50ce9cb4b65eee6afa1d041fe8beb5abda667.tar.gz
libvpx-0ea50ce9cb4b65eee6afa1d041fe8beb5abda667.tar.bz2
libvpx-0ea50ce9cb4b65eee6afa1d041fe8beb5abda667.zip
Initial WebM release
Diffstat (limited to 'vpx_mem/memory_manager')
-rw-r--r--vpx_mem/memory_manager/hmm_alloc.c59
-rw-r--r--vpx_mem/memory_manager/hmm_base.c433
-rw-r--r--vpx_mem/memory_manager/hmm_dflt_abort.c53
-rw-r--r--vpx_mem/memory_manager/hmm_grow.c49
-rw-r--r--vpx_mem/memory_manager/hmm_largest.c59
-rw-r--r--vpx_mem/memory_manager/hmm_resize.c118
-rw-r--r--vpx_mem/memory_manager/hmm_shrink.c110
-rw-r--r--vpx_mem/memory_manager/hmm_true.c31
-rw-r--r--vpx_mem/memory_manager/include/cavl_if.h226
-rw-r--r--vpx_mem/memory_manager/include/cavl_impl.h1266
-rw-r--r--vpx_mem/memory_manager/include/heapmm.h152
-rw-r--r--vpx_mem/memory_manager/include/hmm_cnfg.h115
-rw-r--r--vpx_mem/memory_manager/include/hmm_intrnl.h160
13 files changed, 2831 insertions, 0 deletions
diff --git a/vpx_mem/memory_manager/hmm_alloc.c b/vpx_mem/memory_manager/hmm_alloc.c
new file mode 100644
index 000000000..9abd81ee4
--- /dev/null
+++ b/vpx_mem/memory_manager/hmm_alloc.c
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* This code is in the public domain.
+** Version: 1.1 Author: Walt Karas
+*/
+
+#include "hmm_intrnl.h"
+
+void *U(alloc)(U(descriptor) *desc, U(size_aau) n)
+{
+#ifdef HMM_AUDIT_FAIL
+
+ if (desc->avl_tree_root)
+ AUDIT_BLOCK(PTR_REC_TO_HEAD(desc->avl_tree_root))
+#endif
+
+ if (desc->last_freed)
+ {
+#ifdef HMM_AUDIT_FAIL
+ AUDIT_BLOCK(desc->last_freed)
+#endif
+
+ U(into_free_collection)(desc, (head_record *)(desc->last_freed));
+
+ desc->last_freed = 0;
+ }
+
+ /* Add space for block header. */
+ n += HEAD_AAUS;
+
+ /* Convert n from number of address alignment units to block alignment
+ ** units. */
+ n = DIV_ROUND_UP(n, HMM_BLOCK_ALIGN_UNIT);
+
+ if (n < MIN_BLOCK_BAUS)
+ n = MIN_BLOCK_BAUS;
+
+ {
+ /* Search for the first node of the bin containing the smallest
+ ** block big enough to satisfy request. */
+ ptr_record *ptr_rec_ptr =
+ U(avl_search)(
+ (U(avl_avl) *) & (desc->avl_tree_root), (U(size_bau)) n,
+ AVL_GREATER_EQUAL);
+
+ /* If an approprate bin is found, satisfy the allocation request,
+ ** otherwise return null pointer. */
+ return(ptr_rec_ptr ?
+ U(alloc_from_bin)(desc, ptr_rec_ptr, (U(size_bau)) n) : 0);
+ }
+}
diff --git a/vpx_mem/memory_manager/hmm_base.c b/vpx_mem/memory_manager/hmm_base.c
new file mode 100644
index 000000000..0cacc3f8f
--- /dev/null
+++ b/vpx_mem/memory_manager/hmm_base.c
@@ -0,0 +1,433 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* This code is in the public domain.
+** Version: 1.1 Author: Walt Karas
+*/
+
+#include "hmm_intrnl.h"
+
+void U(init)(U(descriptor) *desc)
+{
+ desc->avl_tree_root = 0;
+ desc->last_freed = 0;
+}
+
+/* Remove a free block from a bin's doubly-linked list when it is not,
+** the first block in the bin.
+*/
+void U(dll_remove)(
+ /* Pointer to pointer record in the block to be removed. */
+ ptr_record *to_remove)
+{
+ to_remove->prev->next = to_remove->next;
+
+ if (to_remove->next)
+ to_remove->next->prev = to_remove->prev;
+}
+
+/* Put a block into the free collection of a heap.
+*/
+void U(into_free_collection)(
+ /* Pointer to heap descriptor. */
+ U(descriptor) *desc,
+ /* Pointer to head record of block. */
+ head_record *head_ptr)
+{
+ ptr_record *ptr_rec_ptr = HEAD_TO_PTR_REC(head_ptr);
+
+ ptr_record *bin_front_ptr =
+ U(avl_insert)((U(avl_avl) *) & (desc->avl_tree_root), ptr_rec_ptr);
+
+ if (bin_front_ptr != ptr_rec_ptr)
+ {
+ /* The block was not inserted into the AVL tree because there is
+ ** already a bin for the size of the block. */
+
+ MARK_SUCCESSIVE_BLOCK_IN_FREE_BIN(head_ptr)
+ ptr_rec_ptr->self = ptr_rec_ptr;
+
+ /* Make the block the new second block in the bin's doubly-linked
+ ** list. */
+ ptr_rec_ptr->prev = bin_front_ptr;
+ ptr_rec_ptr->next = bin_front_ptr->next;
+ bin_front_ptr->next = ptr_rec_ptr;
+
+ if (ptr_rec_ptr->next)
+ ptr_rec_ptr->next->prev = ptr_rec_ptr;
+ }
+ else
+ /* Block is first block in new bin. */
+ ptr_rec_ptr->next = 0;
+}
+
+/* Allocate a block from a given bin. Returns a pointer to the payload
+** of the removed block. The "last freed" pointer must be null prior
+** to calling this function.
+*/
+void *U(alloc_from_bin)(
+ /* Pointer to heap descriptor. */
+ U(descriptor) *desc,
+ /* Pointer to pointer record of first block in bin. */
+ ptr_record *bin_front_ptr,
+ /* Number of BAUs needed in the allocated block. If the block taken
+ ** from the bin is significantly larger than the number of BAUs needed,
+ ** the "extra" BAUs are split off to form a new free block. */
+ U(size_bau) n_baus)
+{
+ head_record *head_ptr;
+ U(size_bau) rem_baus;
+
+ if (bin_front_ptr->next)
+ {
+ /* There are multiple blocks in this bin. Use the 2nd block in
+ ** the bin to avoid needless change to the AVL tree.
+ */
+
+ ptr_record *ptr_rec_ptr = bin_front_ptr->next;
+ head_ptr = PTR_REC_TO_HEAD(ptr_rec_ptr);
+
+#ifdef AUDIT_FAIL
+ AUDIT_BLOCK(head_ptr)
+#endif
+
+ U(dll_remove)(ptr_rec_ptr);
+ }
+ else
+ {
+ /* There is only one block in the bin, so it has to be removed
+ ** from the AVL tree.
+ */
+
+ head_ptr = PTR_REC_TO_HEAD(bin_front_ptr);
+
+ U(avl_remove)(
+ (U(avl_avl) *) &(desc->avl_tree_root), BLOCK_BAUS(head_ptr));
+ }
+
+ MARK_BLOCK_ALLOCATED(head_ptr)
+
+ rem_baus = BLOCK_BAUS(head_ptr) - n_baus;
+
+ if (rem_baus >= MIN_BLOCK_BAUS)
+ {
+ /* Since there are enough "extra" BAUs, split them off to form
+ ** a new free block.
+ */
+
+ head_record *rem_head_ptr =
+ (head_record *) BAUS_FORWARD(head_ptr, n_baus);
+
+ /* Change the next block's header to reflect the fact that the
+ ** block preceeding it is now smaller.
+ */
+ SET_PREV_BLOCK_BAUS(
+ BAUS_FORWARD(head_ptr, head_ptr->block_size), rem_baus)
+
+ head_ptr->block_size = n_baus;
+
+ rem_head_ptr->previous_block_size = n_baus;
+ rem_head_ptr->block_size = rem_baus;
+
+ desc->last_freed = rem_head_ptr;
+ }
+
+ return(HEAD_TO_PTR_REC(head_ptr));
+}
+
+/* Take a block out of the free collection.
+*/
+void U(out_of_free_collection)(
+ /* Descriptor of heap that block is in. */
+ U(descriptor) *desc,
+ /* Pointer to head of block to take out of free collection. */
+ head_record *head_ptr)
+{
+ ptr_record *ptr_rec_ptr = HEAD_TO_PTR_REC(head_ptr);
+
+ if (ptr_rec_ptr->self == ptr_rec_ptr)
+ /* Block is not the front block in its bin, so all we have to
+ ** do is take it out of the bin's doubly-linked list. */
+ U(dll_remove)(ptr_rec_ptr);
+ else
+ {
+ ptr_record *next = ptr_rec_ptr->next;
+
+ if (next)
+ /* Block is the front block in its bin, and there is at least
+ ** one other block in the bin. Substitute the next block for
+ ** the front block. */
+ U(avl_subst)((U(avl_avl) *) &(desc->avl_tree_root), next);
+ else
+ /* Block is the front block in its bin, but there is no other
+ ** block in the bin. Eliminate the bin. */
+ U(avl_remove)(
+ (U(avl_avl) *) &(desc->avl_tree_root), BLOCK_BAUS(head_ptr));
+ }
+}
+
+void U(free)(U(descriptor) *desc, void *payload_ptr)
+{
+ /* Flags if coalesce with adjacent block. */
+ int coalesce;
+
+ head_record *fwd_head_ptr;
+ head_record *free_head_ptr = PTR_REC_TO_HEAD(payload_ptr);
+
+ desc->num_baus_can_shrink = 0;
+
+#ifdef HMM_AUDIT_FAIL
+
+ AUDIT_BLOCK(free_head_ptr)
+
+ /* Make sure not freeing an already free block. */
+ if (!IS_BLOCK_ALLOCATED(free_head_ptr))
+ HMM_AUDIT_FAIL
+
+ if (desc->avl_tree_root)
+ /* Audit root block in AVL tree. */
+ AUDIT_BLOCK(PTR_REC_TO_HEAD(desc->avl_tree_root))
+
+#endif
+
+ fwd_head_ptr =
+ (head_record *) BAUS_FORWARD(free_head_ptr, free_head_ptr->block_size);
+
+ if (free_head_ptr->previous_block_size)
+ {
+ /* Coalesce with backward block if possible. */
+
+ head_record *bkwd_head_ptr =
+ (head_record *) BAUS_BACKWARD(
+ free_head_ptr, free_head_ptr->previous_block_size);
+
+#ifdef HMM_AUDIT_FAIL
+ AUDIT_BLOCK(bkwd_head_ptr)
+#endif
+
+ if (bkwd_head_ptr == (head_record *)(desc->last_freed))
+ {
+ desc->last_freed = 0;
+ coalesce = 1;
+ }
+ else if (IS_BLOCK_ALLOCATED(bkwd_head_ptr))
+ coalesce = 0;
+ else
+ {
+ U(out_of_free_collection)(desc, bkwd_head_ptr);
+ coalesce = 1;
+ }
+
+ if (coalesce)
+ {
+ bkwd_head_ptr->block_size += free_head_ptr->block_size;
+ SET_PREV_BLOCK_BAUS(fwd_head_ptr, BLOCK_BAUS(bkwd_head_ptr))
+ free_head_ptr = bkwd_head_ptr;
+ }
+ }
+
+ if (fwd_head_ptr->block_size == 0)
+ {
+ /* Block to be freed is last block before dummy end-of-chunk block. */
+ desc->end_of_shrinkable_chunk =
+ BAUS_FORWARD(fwd_head_ptr, DUMMY_END_BLOCK_BAUS);
+ desc->num_baus_can_shrink = BLOCK_BAUS(free_head_ptr);
+
+ if (PREV_BLOCK_BAUS(free_head_ptr) == 0)
+ /* Free block is the entire chunk, so shrinking can eliminate
+ ** entire chunk including dummy end block. */
+ desc->num_baus_can_shrink += DUMMY_END_BLOCK_BAUS;
+ }
+ else
+ {
+ /* Coalesce with forward block if possible. */
+
+#ifdef HMM_AUDIT_FAIL
+ AUDIT_BLOCK(fwd_head_ptr)
+#endif
+
+ if (fwd_head_ptr == (head_record *)(desc->last_freed))
+ {
+ desc->last_freed = 0;
+ coalesce = 1;
+ }
+ else if (IS_BLOCK_ALLOCATED(fwd_head_ptr))
+ coalesce = 0;
+ else
+ {
+ U(out_of_free_collection)(desc, fwd_head_ptr);
+ coalesce = 1;
+ }
+
+ if (coalesce)
+ {
+ free_head_ptr->block_size += fwd_head_ptr->block_size;
+
+ fwd_head_ptr =
+ (head_record *) BAUS_FORWARD(
+ fwd_head_ptr, BLOCK_BAUS(fwd_head_ptr));
+
+ SET_PREV_BLOCK_BAUS(fwd_head_ptr, BLOCK_BAUS(free_head_ptr))
+
+ if (fwd_head_ptr->block_size == 0)
+ {
+ /* Coalesced block to be freed is last block before dummy
+ ** end-of-chunk block. */
+ desc->end_of_shrinkable_chunk =
+ BAUS_FORWARD(fwd_head_ptr, DUMMY_END_BLOCK_BAUS);
+ desc->num_baus_can_shrink = BLOCK_BAUS(free_head_ptr);
+
+ if (PREV_BLOCK_BAUS(free_head_ptr) == 0)
+ /* Free block is the entire chunk, so shrinking can
+ ** eliminate entire chunk including dummy end block. */
+ desc->num_baus_can_shrink += DUMMY_END_BLOCK_BAUS;
+ }
+ }
+ }
+
+ if (desc->last_freed)
+ {
+ /* There is a last freed block, but it is not adjacent to the
+ ** block being freed by this call to free, so put the last
+ ** freed block into the free collection.
+ */
+
+#ifdef HMM_AUDIT_FAIL
+ AUDIT_BLOCK(desc->last_freed)
+#endif
+
+ U(into_free_collection)(desc, (head_record *)(desc->last_freed));
+ }
+
+ desc->last_freed = free_head_ptr;
+}
+
+void U(new_chunk)(U(descriptor) *desc, void *start, U(size_bau) n_baus)
+{
+#ifdef HMM_AUDIT_FAIL
+
+ if (desc->avl_tree_root)
+ /* Audit root block in AVL tree. */
+ AUDIT_BLOCK(PTR_REC_TO_HEAD(desc->avl_tree_root))
+#endif
+
+#undef HEAD_PTR
+#define HEAD_PTR ((head_record *) start)
+
+ /* Make the chunk one big free block followed by a dummy end block.
+ */
+
+ n_baus -= DUMMY_END_BLOCK_BAUS;
+
+ HEAD_PTR->previous_block_size = 0;
+ HEAD_PTR->block_size = n_baus;
+
+ U(into_free_collection)(desc, HEAD_PTR);
+
+ /* Set up the dummy end block. */
+ start = BAUS_FORWARD(start, n_baus);
+ HEAD_PTR->previous_block_size = n_baus;
+ HEAD_PTR->block_size = 0;
+
+#undef HEAD_PTR
+}
+
+#ifdef HMM_AUDIT_FAIL
+
+/* Function that does audit fail actions defined my preprocessor symbol,
+** and returns a dummy integer value.
+*/
+int U(audit_block_fail_dummy_return)(void)
+{
+ HMM_AUDIT_FAIL
+
+ /* Dummy return. */
+ return(0);
+}
+
+#endif
+
+/* AVL Tree instantiation. */
+
+#ifdef HMM_AUDIT_FAIL
+
+/* The AVL tree generic package passes an ACCESS of 1 when it "touches"
+** a child node for the first time during a particular operation. I use
+** this feature to audit only one time (per operation) the free blocks
+** that are tree nodes. Since the root node is not a child node, it has
+** to be audited directly.
+*/
+
+/* The pain you feel while reading these macros will not be in vain. It
+** will remove all doubt from you mind that C++ inline functions are
+** a very good thing.
+*/
+
+#define AVL_GET_LESS(H, ACCESS) \
+ (((ACCESS) ? AUDIT_BLOCK_AS_EXPR(PTR_REC_TO_HEAD(H)) : 0), (H)->self)
+#define AVL_GET_GREATER(H, ACCESS) \
+ (((ACCESS) ? AUDIT_BLOCK_AS_EXPR(PTR_REC_TO_HEAD(H)) : 0), (H)->prev)
+
+#else
+
+#define AVL_GET_LESS(H, ACCESS) ((H)->self)
+#define AVL_GET_GREATER(H, ACCESS) ((H)->prev)
+
+#endif
+
+#define AVL_SET_LESS(H, LH) (H)->self = (LH);
+#define AVL_SET_GREATER(H, GH) (H)->prev = (GH);
+
+/* high bit of high bit of
+** block_size previous_block_size balance factor
+** ----------- ------------------- --------------
+** 0 0 n/a (block allocated)
+** 0 1 1
+** 1 0 -1
+** 1 1 0
+*/
+
+#define AVL_GET_BALANCE_FACTOR(H) \
+ ((((head_record *) (PTR_REC_TO_HEAD(H)))->block_size & \
+ HIGH_BIT_BAU_SIZE) ? \
+ (((head_record *) (PTR_REC_TO_HEAD(H)))->previous_block_size & \
+ HIGH_BIT_BAU_SIZE ? 0 : -1) : 1)
+
+#define AVL_SET_BALANCE_FACTOR(H, BF) \
+ { \
+ register head_record *p = \
+ (head_record *) PTR_REC_TO_HEAD(H); \
+ register int bal_f = (BF); \
+ \
+ if (bal_f <= 0) \
+ p->block_size |= HIGH_BIT_BAU_SIZE; \
+ else \
+ p->block_size &= ~HIGH_BIT_BAU_SIZE; \
+ if (bal_f >= 0) \
+ p->previous_block_size |= HIGH_BIT_BAU_SIZE; \
+ else \
+ p->previous_block_size &= ~HIGH_BIT_BAU_SIZE; \
+ }
+
+#define COMPARE_KEY_KEY(K1, K2) ((K1) == (K2) ? 0 : ((K1) > (K2) ? 1 : -1))
+
+#define AVL_COMPARE_KEY_NODE(K, H) \
+ COMPARE_KEY_KEY(K, BLOCK_BAUS(PTR_REC_TO_HEAD(H)))
+
+#define AVL_COMPARE_NODE_NODE(H1, H2) \
+ COMPARE_KEY_KEY(BLOCK_BAUS(PTR_REC_TO_HEAD(H1)), \
+ BLOCK_BAUS(PTR_REC_TO_HEAD(H2)))
+
+#define AVL_NULL ((ptr_record *) 0)
+
+#define AVL_IMPL_MASK \
+ ( AVL_IMPL_INSERT | AVL_IMPL_SEARCH | AVL_IMPL_REMOVE | AVL_IMPL_SUBST )
+
+#include "cavl_impl.h"
diff --git a/vpx_mem/memory_manager/hmm_dflt_abort.c b/vpx_mem/memory_manager/hmm_dflt_abort.c
new file mode 100644
index 000000000..dc59f5507
--- /dev/null
+++ b/vpx_mem/memory_manager/hmm_dflt_abort.c
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* This code is in the public domain.
+** Version: 1.1 Author: Walt Karas
+*/
+
+/* The function in this file performs default actions if self-auditing
+** finds heap corruption. Don't rely on this code to handle the
+** case where HMM is being used to implement the malloc and free standard
+** library functions. Rewrite the function if necessary to avoid using
+** I/O and execution termination functions that call malloc or free.
+** In Unix, for example, you would replace the fputs calls with calls
+** to the write system call using file handle number 2.
+*/
+#include "hmm_intrnl.h"
+#include <stdio.h>
+#include <stdlib.h>
+
+static int entered = 0;
+
+/* Print abort message, file and line. Terminate execution.
+*/
+void hmm_dflt_abort(const char *file, const char *line)
+{
+ /* Avoid use of printf(), which is more likely to use heap. */
+
+ if (entered)
+
+ /* The standard I/O functions called a heap function and caused
+ ** an indirect recursive call to this function. So we'll have
+ ** to just exit without printing a message. */
+ while (1);
+
+ entered = 1;
+
+ fputs("\n_abort - Heap corruption\n" "File: ", stderr);
+ fputs(file, stderr);
+ fputs(" Line: ", stderr);
+ fputs(line, stderr);
+ fputs("\n\n", stderr);
+ fputs("hmm_dflt_abort: while(1)!!!\n", stderr);
+ fflush(stderr);
+
+ while (1);
+}
diff --git a/vpx_mem/memory_manager/hmm_grow.c b/vpx_mem/memory_manager/hmm_grow.c
new file mode 100644
index 000000000..79d75a74b
--- /dev/null
+++ b/vpx_mem/memory_manager/hmm_grow.c
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* This code is in the public domain.
+** Version: 1.1 Author: Walt Karas
+*/
+
+#include "hmm_intrnl.h"
+
+void U(grow_chunk)(U(descriptor) *desc, void *end, U(size_bau) n_baus)
+{
+#undef HEAD_PTR
+#define HEAD_PTR ((head_record *) end)
+
+ end = BAUS_BACKWARD(end, DUMMY_END_BLOCK_BAUS);
+
+#ifdef HMM_AUDIT_FAIL
+
+ if (HEAD_PTR->block_size != 0)
+ /* Chunk does not have valid dummy end block. */
+ HMM_AUDIT_FAIL
+
+#endif
+
+ /* Create a new block that absorbs the old dummy end block. */
+ HEAD_PTR->block_size = n_baus;
+
+ /* Set up the new dummy end block. */
+ {
+ head_record *dummy = (head_record *) BAUS_FORWARD(end, n_baus);
+ dummy->previous_block_size = n_baus;
+ dummy->block_size = 0;
+ }
+
+ /* Simply free the new block, allowing it to coalesce with any
+ ** free block at that was the last block in the chunk prior to
+ ** growth.
+ */
+ U(free)(desc, HEAD_TO_PTR_REC(end));
+
+#undef HEAD_PTR
+}
diff --git a/vpx_mem/memory_manager/hmm_largest.c b/vpx_mem/memory_manager/hmm_largest.c
new file mode 100644
index 000000000..5ebe398e0
--- /dev/null
+++ b/vpx_mem/memory_manager/hmm_largest.c
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* This code is in the public domain.
+** Version: 1.1 Author: Walt Karas
+*/
+
+#include "hmm_intrnl.h"
+
+U(size_aau) U(largest_available)(U(descriptor) *desc)
+{
+ U(size_bau) largest;
+
+ if (!(desc->avl_tree_root))
+ largest = 0;
+ else
+ {
+#ifdef HMM_AUDIT_FAIL
+ /* Audit root block in AVL tree. */
+ AUDIT_BLOCK(PTR_REC_TO_HEAD(desc->avl_tree_root))
+#endif
+
+ largest =
+ BLOCK_BAUS(
+ PTR_REC_TO_HEAD(
+ U(avl_search)(
+ (U(avl_avl) *) & (desc->avl_tree_root),
+ (U(size_bau)) ~(U(size_bau)) 0, AVL_LESS)));
+ }
+
+ if (desc->last_freed)
+ {
+ /* Size of last freed block. */
+ register U(size_bau) lf_size;
+
+#ifdef HMM_AUDIT_FAIL
+ AUDIT_BLOCK(desc->last_freed)
+#endif
+
+ lf_size = BLOCK_BAUS(desc->last_freed);
+
+ if (lf_size > largest)
+ largest = lf_size;
+ }
+
+ /* Convert largest size to AAUs and subract head size leaving payload
+ ** size.
+ */
+ return(largest ?
+ ((largest * ((U(size_aau)) HMM_BLOCK_ALIGN_UNIT)) - HEAD_AAUS) :
+ 0);
+}
diff --git a/vpx_mem/memory_manager/hmm_resize.c b/vpx_mem/memory_manager/hmm_resize.c
new file mode 100644
index 000000000..6e3f2f041
--- /dev/null
+++ b/vpx_mem/memory_manager/hmm_resize.c
@@ -0,0 +1,118 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* This code is in the public domain.
+** Version: 1.1 Author: Walt Karas
+*/
+
+#include "hmm_intrnl.h"
+
+int U(resize)(U(descriptor) *desc, void *mem, U(size_aau) n)
+{
+ U(size_aau) i;
+ head_record *next_head_ptr;
+ head_record *head_ptr = PTR_REC_TO_HEAD(mem);
+
+ /* Flag. */
+ int next_block_free;
+
+ /* Convert n from desired block size in AAUs to BAUs. */
+ n += HEAD_AAUS;
+ n = DIV_ROUND_UP(n, HMM_BLOCK_ALIGN_UNIT);
+
+ if (n < MIN_BLOCK_BAUS)
+ n = MIN_BLOCK_BAUS;
+
+#ifdef HMM_AUDIT_FAIL
+
+ AUDIT_BLOCK(head_ptr)
+
+ if (!IS_BLOCK_ALLOCATED(head_ptr))
+ HMM_AUDIT_FAIL
+
+ if (desc->avl_tree_root)
+ AUDIT_BLOCK(PTR_REC_TO_HEAD(desc->avl_tree_root))
+
+#endif
+
+ i = head_ptr->block_size;
+
+ next_head_ptr =
+ (head_record *) BAUS_FORWARD(head_ptr, head_ptr->block_size);
+
+ next_block_free =
+ (next_head_ptr == desc->last_freed) ||
+ !IS_BLOCK_ALLOCATED(next_head_ptr);
+
+ if (next_block_free)
+ /* Block can expand into next free block. */
+ i += BLOCK_BAUS(next_head_ptr);
+
+ if (n > i)
+ /* Not enough room for block to expand. */
+ return(-1);
+
+ if (next_block_free)
+ {
+#ifdef HMM_AUDIT_FAIL
+ AUDIT_BLOCK(next_head_ptr)
+#endif
+
+ if (next_head_ptr == desc->last_freed)
+ desc->last_freed = 0;
+ else
+ U(out_of_free_collection)(desc, next_head_ptr);
+
+ next_head_ptr =
+ (head_record *) BAUS_FORWARD(head_ptr, (U(size_bau)) i);
+ }
+
+ /* Set i to number of "extra" BAUs. */
+ i -= n;
+
+ if (i < MIN_BLOCK_BAUS)
+ /* Not enough extra BAUs to be a block on their own, so just keep them
+ ** in the block being resized.
+ */
+ {
+ n += i;
+ i = n;
+ }
+ else
+ {
+ /* There are enough "leftover" BAUs in the next block to
+ ** form a remainder block. */
+
+ head_record *rem_head_ptr;
+
+ rem_head_ptr = (head_record *) BAUS_FORWARD(head_ptr, n);
+
+ rem_head_ptr->previous_block_size = (U(size_bau)) n;
+ rem_head_ptr->block_size = (U(size_bau)) i;
+
+ if (desc->last_freed)
+ {
+#ifdef HMM_AUDIT_FAIL
+ AUDIT_BLOCK(desc->last_freed)
+#endif
+
+ U(into_free_collection)(desc, (head_record *)(desc->last_freed));
+
+ desc->last_freed = 0;
+ }
+
+ desc->last_freed = rem_head_ptr;
+ }
+
+ head_ptr->block_size = (U(size_bau)) n;
+ next_head_ptr->previous_block_size = (U(size_bau)) i;
+
+ return(0);
+}
diff --git a/vpx_mem/memory_manager/hmm_shrink.c b/vpx_mem/memory_manager/hmm_shrink.c
new file mode 100644
index 000000000..5ef9b233f
--- /dev/null
+++ b/vpx_mem/memory_manager/hmm_shrink.c
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* This code is in the public domain.
+** Version: 1.1 Author: Walt Karas
+*/
+
+#include "hmm_intrnl.h"
+
+void U(shrink_chunk)(U(descriptor) *desc, U(size_bau) n_baus_to_shrink)
+{
+ head_record *dummy_end_block = (head_record *)
+ BAUS_BACKWARD(desc->end_of_shrinkable_chunk, DUMMY_END_BLOCK_BAUS);
+
+#ifdef HMM_AUDIT_FAIL
+
+ if (dummy_end_block->block_size != 0)
+ /* Chunk does not have valid dummy end block. */
+ HMM_AUDIT_FAIL
+
+#endif
+
+ if (n_baus_to_shrink)
+ {
+ head_record *last_block = (head_record *)
+ BAUS_BACKWARD(
+ dummy_end_block, dummy_end_block->previous_block_size);
+
+#ifdef HMM_AUDIT_FAIL
+ AUDIT_BLOCK(last_block)
+#endif
+
+ if (last_block == desc->last_freed)
+ {
+ U(size_bau) bs = BLOCK_BAUS(last_block);
+
+ /* Chunk will not be shrunk out of existence if
+ ** 1. There is at least one allocated block in the chunk
+ ** and the amount to shrink is exactly the size of the
+ ** last block, OR
+ ** 2. After the last block is shrunk, there will be enough
+ ** BAUs left in it to form a minimal size block. */
+ int chunk_will_survive =
+ (PREV_BLOCK_BAUS(last_block) && (n_baus_to_shrink == bs)) ||
+ (n_baus_to_shrink <= (U(size_bau))(bs - MIN_BLOCK_BAUS));
+
+ if (chunk_will_survive ||
+ (!PREV_BLOCK_BAUS(last_block) &&
+ (n_baus_to_shrink ==
+ (U(size_bau))(bs + DUMMY_END_BLOCK_BAUS))))
+ {
+ desc->last_freed = 0;
+
+ if (chunk_will_survive)
+ {
+ bs -= n_baus_to_shrink;
+
+ if (bs)
+ {
+ /* The last (non-dummy) block was not completely
+ ** eliminated by the shrink. */
+
+ last_block->block_size = bs;
+
+ /* Create new dummy end record.
+ */
+ dummy_end_block =
+ (head_record *) BAUS_FORWARD(last_block, bs);
+ dummy_end_block->previous_block_size = bs;
+ dummy_end_block->block_size = 0;
+
+#ifdef HMM_AUDIT_FAIL
+
+ if (desc->avl_tree_root)
+ AUDIT_BLOCK(PTR_REC_TO_HEAD(desc->avl_tree_root))
+#endif
+
+ U(into_free_collection)(desc, last_block);
+ }
+ else
+ {
+ /* The last (non-dummy) block was completely
+ ** eliminated by the shrink. Make its head
+ ** the new dummy end block.
+ */
+ last_block->block_size = 0;
+ last_block->previous_block_size &= ~HIGH_BIT_BAU_SIZE;
+ }
+ }
+ }
+
+#ifdef HMM_AUDIT_FAIL
+ else
+ HMM_AUDIT_FAIL
+#endif
+ }
+
+#ifdef HMM_AUDIT_FAIL
+ else
+ HMM_AUDIT_FAIL
+#endif
+ }
+}
diff --git a/vpx_mem/memory_manager/hmm_true.c b/vpx_mem/memory_manager/hmm_true.c
new file mode 100644
index 000000000..41103c89e
--- /dev/null
+++ b/vpx_mem/memory_manager/hmm_true.c
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* This code is in the public domain.
+** Version: 1.1 Author: Walt Karas
+*/
+
+#include "hmm_intrnl.h"
+
+U(size_aau) U(true_size)(void *payload_ptr)
+{
+ register head_record *head_ptr = PTR_REC_TO_HEAD(payload_ptr);
+
+#ifdef HMM_AUDIT_FAIL
+ AUDIT_BLOCK(head_ptr)
+#endif
+
+ /* Convert block size from BAUs to AAUs. Subtract head size, leaving
+ ** payload size.
+ */
+ return(
+ (BLOCK_BAUS(head_ptr) * ((U(size_aau)) HMM_BLOCK_ALIGN_UNIT)) -
+ HEAD_AAUS);
+}
diff --git a/vpx_mem/memory_manager/include/cavl_if.h b/vpx_mem/memory_manager/include/cavl_if.h
new file mode 100644
index 000000000..e2733ef2f
--- /dev/null
+++ b/vpx_mem/memory_manager/include/cavl_if.h
@@ -0,0 +1,226 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* Abstract AVL Tree Generic C Package.
+** Interface generation header file.
+**
+** This code is in the public domain. See cavl_tree.html for interface
+** documentation.
+**
+** Version: 1.5 Author: Walt Karas
+*/
+
+/* This header contains the definition of CHAR_BIT (number of bits in a
+** char). */
+#include <limits.h>
+
+#undef L_
+#undef L_EST_LONG_BIT
+#undef L_SIZE
+#undef L_SC
+#undef L_LONG_BIT
+#undef L_BIT_ARR_DEFN
+
+#ifndef AVL_SEARCH_TYPE_DEFINED_
+#define AVL_SEARCH_TYPE_DEFINED_
+
+typedef enum
+{
+ AVL_EQUAL = 1,
+ AVL_LESS = 2,
+ AVL_GREATER = 4,
+ AVL_LESS_EQUAL = AVL_EQUAL | AVL_LESS,
+ AVL_GREATER_EQUAL = AVL_EQUAL | AVL_GREATER
+}
+avl_search_type;
+
+#endif
+
+#ifdef AVL_UNIQUE
+
+#define L_ AVL_UNIQUE
+
+#else
+
+#define L_(X) X
+
+#endif
+
+/* Determine storage class for function prototypes. */
+#ifdef AVL_PRIVATE
+
+#define L_SC static
+
+#else
+
+#define L_SC extern
+
+#endif
+
+#ifdef AVL_SIZE
+
+#define L_SIZE AVL_SIZE
+
+#else
+
+#define L_SIZE unsigned long
+
+#endif
+
+typedef struct
+{
+#ifdef AVL_INSIDE_STRUCT
+
+ AVL_INSIDE_STRUCT
+
+#endif
+
+ AVL_HANDLE root;
+}
+L_(avl);
+
+/* Function prototypes. */
+
+L_SC void L_(init)(L_(avl) *tree);
+
+L_SC int L_(is_empty)(L_(avl) *tree);
+
+L_SC AVL_HANDLE L_(insert)(L_(avl) *tree, AVL_HANDLE h);
+
+L_SC AVL_HANDLE L_(search)(L_(avl) *tree, AVL_KEY k, avl_search_type st);
+
+L_SC AVL_HANDLE L_(search_least)(L_(avl) *tree);
+
+L_SC AVL_HANDLE L_(search_greatest)(L_(avl) *tree);
+
+L_SC AVL_HANDLE L_(remove)(L_(avl) *tree, AVL_KEY k);
+
+L_SC AVL_HANDLE L_(subst)(L_(avl) *tree, AVL_HANDLE new_node);
+
+#ifdef AVL_BUILD_ITER_TYPE
+
+L_SC int L_(build)(
+ L_(avl) *tree, AVL_BUILD_ITER_TYPE p, L_SIZE num_nodes);
+
+#endif
+
+/* ANSI C/ISO C++ require that a long have at least 32 bits. Set
+** L_EST_LONG_BIT to be the greatest multiple of 8 in the range
+** 32 - 64 (inclusive) that is less than or equal to the number of
+** bits in a long.
+*/
+
+#if (((LONG_MAX >> 31) >> 7) == 0)
+
+#define L_EST_LONG_BIT 32
+
+#elif (((LONG_MAX >> 31) >> 15) == 0)
+
+#define L_EST_LONG_BIT 40
+
+#elif (((LONG_MAX >> 31) >> 23) == 0)
+
+#define L_EST_LONG_BIT 48
+
+#elif (((LONG_MAX >> 31) >> 31) == 0)
+
+#define L_EST_LONG_BIT 56
+
+#else
+
+#define L_EST_LONG_BIT 64
+
+#endif
+
+/* Number of bits in a long. */
+#define L_LONG_BIT (sizeof(long) * CHAR_BIT)
+
+/* The macro L_BIT_ARR_DEFN defines a bit array whose index is a (0-based)
+** node depth. The definition depends on whether the maximum depth is more
+** or less than the number of bits in a single long.
+*/
+
+#if ((AVL_MAX_DEPTH) > L_EST_LONG_BIT)
+
+/* Maximum depth may be more than number of bits in a long. */
+
+#define L_BIT_ARR_DEFN(NAME) \
+ unsigned long NAME[((AVL_MAX_DEPTH) + L_LONG_BIT - 1) / L_LONG_BIT];
+
+#else
+
+/* Maximum depth is definitely less than number of bits in a long. */
+
+#define L_BIT_ARR_DEFN(NAME) unsigned long NAME;
+
+#endif
+
+/* Iterator structure. */
+typedef struct
+{
+ /* Tree being iterated over. */
+ L_(avl) *tree_;
+
+ /* Records a path into the tree. If bit n is true, indicates
+ ** take greater branch from the nth node in the path, otherwise
+ ** take the less branch. bit 0 gives branch from root, and
+ ** so on. */
+ L_BIT_ARR_DEFN(branch)
+
+ /* Zero-based depth of path into tree. */
+ unsigned depth;
+
+ /* Handles of nodes in path from root to current node (returned by *). */
+ AVL_HANDLE path_h[(AVL_MAX_DEPTH) - 1];
+}
+L_(iter);
+
+/* Iterator function prototypes. */
+
+L_SC void L_(start_iter)(
+ L_(avl) *tree, L_(iter) *iter, AVL_KEY k, avl_search_type st);
+
+L_SC void L_(start_iter_least)(L_(avl) *tree, L_(iter) *iter);
+
+L_SC void L_(start_iter_greatest)(L_(avl) *tree, L_(iter) *iter);
+
+L_SC AVL_HANDLE L_(get_iter)(L_(iter) *iter);
+
+L_SC void L_(incr_iter)(L_(iter) *iter);
+
+L_SC void L_(decr_iter)(L_(iter) *iter);
+
+L_SC void L_(init_iter)(L_(iter) *iter);
+
+#define AVL_IMPL_INIT 1
+#define AVL_IMPL_IS_EMPTY (1 << 1)
+#define AVL_IMPL_INSERT (1 << 2)
+#define AVL_IMPL_SEARCH (1 << 3)
+#define AVL_IMPL_SEARCH_LEAST (1 << 4)
+#define AVL_IMPL_SEARCH_GREATEST (1 << 5)
+#define AVL_IMPL_REMOVE (1 << 6)
+#define AVL_IMPL_BUILD (1 << 7)
+#define AVL_IMPL_START_ITER (1 << 8)
+#define AVL_IMPL_START_ITER_LEAST (1 << 9)
+#define AVL_IMPL_START_ITER_GREATEST (1 << 10)
+#define AVL_IMPL_GET_ITER (1 << 11)
+#define AVL_IMPL_INCR_ITER (1 << 12)
+#define AVL_IMPL_DECR_ITER (1 << 13)
+#define AVL_IMPL_INIT_ITER (1 << 14)
+#define AVL_IMPL_SUBST (1 << 15)
+
+#define AVL_IMPL_ALL (~0)
+
+#undef L_
+#undef L_EST_LONG_BIT
+#undef L_SIZE
+#undef L_SC
+#undef L_LONG_BIT
+#undef L_BIT_ARR_DEFN
diff --git a/vpx_mem/memory_manager/include/cavl_impl.h b/vpx_mem/memory_manager/include/cavl_impl.h
new file mode 100644
index 000000000..267bc7312
--- /dev/null
+++ b/vpx_mem/memory_manager/include/cavl_impl.h
@@ -0,0 +1,1266 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* Abstract AVL Tree Generic C Package.
+** Implementation generation header file.
+**
+** This code is in the public domain. See cavl_tree.html for interface
+** documentation.
+**
+** Version: 1.5 Author: Walt Karas
+*/
+
+#undef L_
+#undef L_EST_LONG_BIT
+#undef L_SIZE
+#undef l_tree
+#undef L_MASK_HIGH_BIT
+#undef L_LONG_BIT
+#undef L_BIT_ARR_DEFN
+#undef L_BIT_ARR_VAL
+#undef L_BIT_ARR_0
+#undef L_BIT_ARR_1
+#undef L_BIT_ARR_ALL
+#undef L_BIT_ARR_LONGS
+#undef L_IMPL_MASK
+#undef L_CHECK_READ_ERROR
+#undef L_CHECK_READ_ERROR_INV_DEPTH
+#undef L_SC
+#undef L_BALANCE_PARAM_PREFIX
+
+#ifdef AVL_UNIQUE
+
+#define L_ AVL_UNIQUE
+
+#else
+
+#define L_(X) X
+
+#endif
+
+/* Determine correct storage class for functions */
+#ifdef AVL_PRIVATE
+
+#define L_SC static
+
+#else
+
+#define L_SC
+
+#endif
+
+#ifdef AVL_SIZE
+
+#define L_SIZE AVL_SIZE
+
+#else
+
+#define L_SIZE unsigned long
+
+#endif
+
+#define L_MASK_HIGH_BIT ((int) ~ ((~ (unsigned) 0) >> 1))
+
+/* ANSI C/ISO C++ require that a long have at least 32 bits. Set
+** L_EST_LONG_BIT to be the greatest multiple of 8 in the range
+** 32 - 64 (inclusive) that is less than or equal to the number of
+** bits in a long.
+*/
+
+#if (((LONG_MAX >> 31) >> 7) == 0)
+
+#define L_EST_LONG_BIT 32
+
+#elif (((LONG_MAX >> 31) >> 15) == 0)
+
+#define L_EST_LONG_BIT 40
+
+#elif (((LONG_MAX >> 31) >> 23) == 0)
+
+#define L_EST_LONG_BIT 48
+
+#elif (((LONG_MAX >> 31) >> 31) == 0)
+
+#define L_EST_LONG_BIT 56
+
+#else
+
+#define L_EST_LONG_BIT 64
+
+#endif
+
+#define L_LONG_BIT (sizeof(long) * CHAR_BIT)
+
+#if ((AVL_MAX_DEPTH) > L_EST_LONG_BIT)
+
+/* The maximum depth may be greater than the number of bits in a long,
+** so multiple longs are needed to hold a bit array indexed by node
+** depth. */
+
+#define L_BIT_ARR_LONGS (((AVL_MAX_DEPTH) + L_LONG_BIT - 1) / L_LONG_BIT)
+
+#define L_BIT_ARR_DEFN(NAME) unsigned long NAME[L_BIT_ARR_LONGS];
+
+#define L_BIT_ARR_VAL(BIT_ARR, BIT_NUM) \
+ ((BIT_ARR)[(BIT_NUM) / L_LONG_BIT] & (1L << ((BIT_NUM) % L_LONG_BIT)))
+
+#define L_BIT_ARR_0(BIT_ARR, BIT_NUM) \
+ (BIT_ARR)[(BIT_NUM) / L_LONG_BIT] &= ~(1L << ((BIT_NUM) % L_LONG_BIT));
+
+#define L_BIT_ARR_1(BIT_ARR, BIT_NUM) \
+ (BIT_ARR)[(BIT_NUM) / L_LONG_BIT] |= 1L << ((BIT_NUM) % L_LONG_BIT);
+
+#define L_BIT_ARR_ALL(BIT_ARR, BIT_VAL) \
+ { int i = L_BIT_ARR_LONGS; do (BIT_ARR)[--i] = 0L - (BIT_VAL); while(i); }
+
+#else /* The bit array can definitely fit in one long */
+
+#define L_BIT_ARR_DEFN(NAME) unsigned long NAME;
+
+#define L_BIT_ARR_VAL(BIT_ARR, BIT_NUM) ((BIT_ARR) & (1L << (BIT_NUM)))
+
+#define L_BIT_ARR_0(BIT_ARR, BIT_NUM) (BIT_ARR) &= ~(1L << (BIT_NUM));
+
+#define L_BIT_ARR_1(BIT_ARR, BIT_NUM) (BIT_ARR) |= 1L << (BIT_NUM);
+
+#define L_BIT_ARR_ALL(BIT_ARR, BIT_VAL) (BIT_ARR) = 0L - (BIT_VAL);
+
+#endif
+
+#ifdef AVL_READ_ERRORS_HAPPEN
+
+#define L_CHECK_READ_ERROR(ERROR_RETURN) \
+ { if (AVL_READ_ERROR) return(ERROR_RETURN); }
+
+#else
+
+#define L_CHECK_READ_ERROR(ERROR_RETURN)
+
+#endif
+
+/* The presumed reason that an instantiation places additional fields
+** inside the AVL tree structure is that the SET_ and GET_ macros
+** need these fields. The "balance" function does not explicitly use
+** any fields in the AVL tree structure, so only pass an AVL tree
+** structure pointer to "balance" if it has instantiation-specific
+** fields that are (presumably) needed by the SET_/GET_ calls within
+** "balance".
+*/
+#ifdef AVL_INSIDE_STRUCT
+
+#define L_BALANCE_PARAM_CALL_PREFIX l_tree,
+#define L_BALANCE_PARAM_DECL_PREFIX L_(avl) *l_tree,
+
+#else
+
+#define L_BALANCE_PARAM_CALL_PREFIX
+#define L_BALANCE_PARAM_DECL_PREFIX
+
+#endif
+
+#ifdef AVL_IMPL_MASK
+
+#define L_IMPL_MASK (AVL_IMPL_MASK)
+
+#else
+
+/* Define all functions. */
+#define L_IMPL_MASK AVL_IMPL_ALL
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_INIT)
+
+L_SC void L_(init)(L_(avl) *l_tree)
+{
+ l_tree->root = AVL_NULL;
+}
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_IS_EMPTY)
+
+L_SC int L_(is_empty)(L_(avl) *l_tree)
+{
+ return(l_tree->root == AVL_NULL);
+}
+
+#endif
+
+/* Put the private balance function in the same compilation module as
+** the insert function. */
+#if (L_IMPL_MASK & AVL_IMPL_INSERT)
+
+/* Balances subtree, returns handle of root node of subtree after balancing.
+*/
+L_SC AVL_HANDLE L_(balance)(L_BALANCE_PARAM_DECL_PREFIX AVL_HANDLE bal_h)
+{
+ AVL_HANDLE deep_h;
+
+ /* Either the "greater than" or the "less than" subtree of
+ ** this node has to be 2 levels deeper (or else it wouldn't
+ ** need balancing).
+ */
+ if (AVL_GET_BALANCE_FACTOR(bal_h) > 0)
+ {
+ /* "Greater than" subtree is deeper. */
+
+ deep_h = AVL_GET_GREATER(bal_h, 1);
+
+ L_CHECK_READ_ERROR(AVL_NULL)
+
+ if (AVL_GET_BALANCE_FACTOR(deep_h) < 0)
+ {
+ int bf;
+
+ AVL_HANDLE old_h = bal_h;
+ bal_h = AVL_GET_LESS(deep_h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ AVL_SET_GREATER(old_h, AVL_GET_LESS(bal_h, 1))
+ AVL_SET_LESS(deep_h, AVL_GET_GREATER(bal_h, 1))
+ AVL_SET_LESS(bal_h, old_h)
+ AVL_SET_GREATER(bal_h, deep_h)
+
+ bf = AVL_GET_BALANCE_FACTOR(bal_h);
+
+ if (bf != 0)
+ {
+ if (bf > 0)
+ {
+ AVL_SET_BALANCE_FACTOR(old_h, -1)
+ AVL_SET_BALANCE_FACTOR(deep_h, 0)
+ }
+ else
+ {
+ AVL_SET_BALANCE_FACTOR(deep_h, 1)
+ AVL_SET_BALANCE_FACTOR(old_h, 0)
+ }
+
+ AVL_SET_BALANCE_FACTOR(bal_h, 0)
+ }
+ else
+ {
+ AVL_SET_BALANCE_FACTOR(old_h, 0)
+ AVL_SET_BALANCE_FACTOR(deep_h, 0)
+ }
+ }
+ else
+ {
+ AVL_SET_GREATER(bal_h, AVL_GET_LESS(deep_h, 0))
+ AVL_SET_LESS(deep_h, bal_h)
+
+ if (AVL_GET_BALANCE_FACTOR(deep_h) == 0)
+ {
+ AVL_SET_BALANCE_FACTOR(deep_h, -1)
+ AVL_SET_BALANCE_FACTOR(bal_h, 1)
+ }
+ else
+ {
+ AVL_SET_BALANCE_FACTOR(deep_h, 0)
+ AVL_SET_BALANCE_FACTOR(bal_h, 0)
+ }
+
+ bal_h = deep_h;
+ }
+ }
+ else
+ {
+ /* "Less than" subtree is deeper. */
+
+ deep_h = AVL_GET_LESS(bal_h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+
+ if (AVL_GET_BALANCE_FACTOR(deep_h) > 0)
+ {
+ int bf;
+ AVL_HANDLE old_h = bal_h;
+ bal_h = AVL_GET_GREATER(deep_h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ AVL_SET_LESS(old_h, AVL_GET_GREATER(bal_h, 0))
+ AVL_SET_GREATER(deep_h, AVL_GET_LESS(bal_h, 0))
+ AVL_SET_GREATER(bal_h, old_h)
+ AVL_SET_LESS(bal_h, deep_h)
+
+ bf = AVL_GET_BALANCE_FACTOR(bal_h);
+
+ if (bf != 0)
+ {
+ if (bf < 0)
+ {
+ AVL_SET_BALANCE_FACTOR(old_h, 1)
+ AVL_SET_BALANCE_FACTOR(deep_h, 0)
+ }
+ else
+ {
+ AVL_SET_BALANCE_FACTOR(deep_h, -1)
+ AVL_SET_BALANCE_FACTOR(old_h, 0)
+ }
+
+ AVL_SET_BALANCE_FACTOR(bal_h, 0)
+ }
+ else
+ {
+ AVL_SET_BALANCE_FACTOR(old_h, 0)
+ AVL_SET_BALANCE_FACTOR(deep_h, 0)
+ }
+ }
+ else
+ {
+ AVL_SET_LESS(bal_h, AVL_GET_GREATER(deep_h, 0))
+ AVL_SET_GREATER(deep_h, bal_h)
+
+ if (AVL_GET_BALANCE_FACTOR(deep_h) == 0)
+ {
+ AVL_SET_BALANCE_FACTOR(deep_h, 1)
+ AVL_SET_BALANCE_FACTOR(bal_h, -1)
+ }
+ else
+ {
+ AVL_SET_BALANCE_FACTOR(deep_h, 0)
+ AVL_SET_BALANCE_FACTOR(bal_h, 0)
+ }
+
+ bal_h = deep_h;
+ }
+ }
+
+ return(bal_h);
+}
+
+L_SC AVL_HANDLE L_(insert)(L_(avl) *l_tree, AVL_HANDLE h)
+{
+ AVL_SET_LESS(h, AVL_NULL)
+ AVL_SET_GREATER(h, AVL_NULL)
+ AVL_SET_BALANCE_FACTOR(h, 0)
+
+ if (l_tree->root == AVL_NULL)
+ l_tree->root = h;
+ else
+ {
+ /* Last unbalanced node encountered in search for insertion point. */
+ AVL_HANDLE unbal = AVL_NULL;
+ /* Parent of last unbalanced node. */
+ AVL_HANDLE parent_unbal = AVL_NULL;
+ /* Balance factor of last unbalanced node. */
+ int unbal_bf;
+
+ /* Zero-based depth in tree. */
+ unsigned depth = 0, unbal_depth = 0;
+
+ /* Records a path into the tree. If bit n is true, indicates
+ ** take greater branch from the nth node in the path, otherwise
+ ** take the less branch. bit 0 gives branch from root, and
+ ** so on. */
+ L_BIT_ARR_DEFN(branch)
+
+ AVL_HANDLE hh = l_tree->root;
+ AVL_HANDLE parent = AVL_NULL;
+ int cmp;
+
+ do
+ {
+ if (AVL_GET_BALANCE_FACTOR(hh) != 0)
+ {
+ unbal = hh;
+ parent_unbal = parent;
+ unbal_depth = depth;
+ }
+
+ cmp = AVL_COMPARE_NODE_NODE(h, hh);
+
+ if (cmp == 0)
+ /* Duplicate key. */
+ return(hh);
+
+ parent = hh;
+
+ if (cmp > 0)
+ {
+ hh = AVL_GET_GREATER(hh, 1);
+ L_BIT_ARR_1(branch, depth)
+ }
+ else
+ {
+ hh = AVL_GET_LESS(hh, 1);
+ L_BIT_ARR_0(branch, depth)
+ }
+
+ L_CHECK_READ_ERROR(AVL_NULL)
+ depth++;
+ }
+ while (hh != AVL_NULL);
+
+ /* Add node to insert as leaf of tree. */
+ if (cmp < 0)
+ AVL_SET_LESS(parent, h)
+ else
+ AVL_SET_GREATER(parent, h)
+
+ depth = unbal_depth;
+
+ if (unbal == AVL_NULL)
+ hh = l_tree->root;
+ else
+ {
+ cmp = L_BIT_ARR_VAL(branch, depth) ? 1 : -1;
+ depth++;
+ unbal_bf = AVL_GET_BALANCE_FACTOR(unbal);
+
+ if (cmp < 0)
+ unbal_bf--;
+ else /* cmp > 0 */
+ unbal_bf++;
+
+ hh = cmp < 0 ? AVL_GET_LESS(unbal, 1) : AVL_GET_GREATER(unbal, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+
+ if ((unbal_bf != -2) && (unbal_bf != 2))
+ {
+ /* No rebalancing of tree is necessary. */
+ AVL_SET_BALANCE_FACTOR(unbal, unbal_bf)
+ unbal = AVL_NULL;
+ }
+ }
+
+ if (hh != AVL_NULL)
+ while (h != hh)
+ {
+ cmp = L_BIT_ARR_VAL(branch, depth) ? 1 : -1;
+ depth++;
+
+ if (cmp < 0)
+ {
+ AVL_SET_BALANCE_FACTOR(hh, -1)
+ hh = AVL_GET_LESS(hh, 1);
+ }
+ else /* cmp > 0 */
+ {
+ AVL_SET_BALANCE_FACTOR(hh, 1)
+ hh = AVL_GET_GREATER(hh, 1);
+ }
+
+ L_CHECK_READ_ERROR(AVL_NULL)
+ }
+
+ if (unbal != AVL_NULL)
+ {
+ unbal = L_(balance)(L_BALANCE_PARAM_CALL_PREFIX unbal);
+ L_CHECK_READ_ERROR(AVL_NULL)
+
+ if (parent_unbal == AVL_NULL)
+ l_tree->root = unbal;
+ else
+ {
+ depth = unbal_depth - 1;
+ cmp = L_BIT_ARR_VAL(branch, depth) ? 1 : -1;
+
+ if (cmp < 0)
+ AVL_SET_LESS(parent_unbal, unbal)
+ else /* cmp > 0 */
+ AVL_SET_GREATER(parent_unbal, unbal)
+ }
+ }
+
+ }
+
+ return(h);
+}
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_SEARCH)
+
+L_SC AVL_HANDLE L_(search)(L_(avl) *l_tree, AVL_KEY k, avl_search_type st)
+{
+ int cmp, target_cmp;
+ AVL_HANDLE match_h = AVL_NULL;
+ AVL_HANDLE h = l_tree->root;
+
+ if (st & AVL_LESS)
+ target_cmp = 1;
+ else if (st & AVL_GREATER)
+ target_cmp = -1;
+ else
+ target_cmp = 0;
+
+ while (h != AVL_NULL)
+ {
+ cmp = AVL_COMPARE_KEY_NODE(k, h);
+
+ if (cmp == 0)
+ {
+ if (st & AVL_EQUAL)
+ {
+ match_h = h;
+ break;
+ }
+
+ cmp = -target_cmp;
+ }
+ else if (target_cmp != 0)
+ if (!((cmp ^ target_cmp) & L_MASK_HIGH_BIT))
+ /* cmp and target_cmp are both positive or both negative. */
+ match_h = h;
+
+ h = cmp < 0 ? AVL_GET_LESS(h, 1) : AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ }
+
+ return(match_h);
+}
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_SEARCH_LEAST)
+
+L_SC AVL_HANDLE L_(search_least)(L_(avl) *l_tree)
+{
+ AVL_HANDLE h = l_tree->root;
+ AVL_HANDLE parent = AVL_NULL;
+
+ while (h != AVL_NULL)
+ {
+ parent = h;
+ h = AVL_GET_LESS(h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ }
+
+ return(parent);
+}
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_SEARCH_GREATEST)
+
+L_SC AVL_HANDLE L_(search_greatest)(L_(avl) *l_tree)
+{
+ AVL_HANDLE h = l_tree->root;
+ AVL_HANDLE parent = AVL_NULL;
+
+ while (h != AVL_NULL)
+ {
+ parent = h;
+ h = AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ }
+
+ return(parent);
+}
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_REMOVE)
+
+/* Prototype of balance function (called by remove) in case not in
+** same compilation unit.
+*/
+L_SC AVL_HANDLE L_(balance)(L_BALANCE_PARAM_DECL_PREFIX AVL_HANDLE bal_h);
+
+L_SC AVL_HANDLE L_(remove)(L_(avl) *l_tree, AVL_KEY k)
+{
+ /* Zero-based depth in tree. */
+ unsigned depth = 0, rm_depth;
+
+ /* Records a path into the tree. If bit n is true, indicates
+ ** take greater branch from the nth node in the path, otherwise
+ ** take the less branch. bit 0 gives branch from root, and
+ ** so on. */
+ L_BIT_ARR_DEFN(branch)
+
+ AVL_HANDLE h = l_tree->root;
+ AVL_HANDLE parent = AVL_NULL;
+ AVL_HANDLE child;
+ AVL_HANDLE path;
+ int cmp, cmp_shortened_sub_with_path;
+ int reduced_depth;
+ int bf;
+ AVL_HANDLE rm;
+ AVL_HANDLE parent_rm;
+
+ for (; ;)
+ {
+ if (h == AVL_NULL)
+ /* No node in tree with given key. */
+ return(AVL_NULL);
+
+ cmp = AVL_COMPARE_KEY_NODE(k, h);
+
+ if (cmp == 0)
+ /* Found node to remove. */
+ break;
+
+ parent = h;
+
+ if (cmp > 0)
+ {
+ h = AVL_GET_GREATER(h, 1);
+ L_BIT_ARR_1(branch, depth)
+ }
+ else
+ {
+ h = AVL_GET_LESS(h, 1);
+ L_BIT_ARR_0(branch, depth)
+ }
+
+ L_CHECK_READ_ERROR(AVL_NULL)
+ depth++;
+ cmp_shortened_sub_with_path = cmp;
+ }
+
+ rm = h;
+ parent_rm = parent;
+ rm_depth = depth;
+
+ /* If the node to remove is not a leaf node, we need to get a
+ ** leaf node, or a node with a single leaf as its child, to put
+ ** in the place of the node to remove. We will get the greatest
+ ** node in the less subtree (of the node to remove), or the least
+ ** node in the greater subtree. We take the leaf node from the
+ ** deeper subtree, if there is one. */
+
+ if (AVL_GET_BALANCE_FACTOR(h) < 0)
+ {
+ child = AVL_GET_LESS(h, 1);
+ L_BIT_ARR_0(branch, depth)
+ cmp = -1;
+ }
+ else
+ {
+ child = AVL_GET_GREATER(h, 1);
+ L_BIT_ARR_1(branch, depth)
+ cmp = 1;
+ }
+
+ L_CHECK_READ_ERROR(AVL_NULL)
+ depth++;
+
+ if (child != AVL_NULL)
+ {
+ cmp = -cmp;
+
+ do
+ {
+ parent = h;
+ h = child;
+
+ if (cmp < 0)
+ {
+ child = AVL_GET_LESS(h, 1);
+ L_BIT_ARR_0(branch, depth)
+ }
+ else
+ {
+ child = AVL_GET_GREATER(h, 1);
+ L_BIT_ARR_1(branch, depth)
+ }
+
+ L_CHECK_READ_ERROR(AVL_NULL)
+ depth++;
+ }
+ while (child != AVL_NULL);
+
+ if (parent == rm)
+ /* Only went through do loop once. Deleted node will be replaced
+ ** in the tree structure by one of its immediate children. */
+ cmp_shortened_sub_with_path = -cmp;
+ else
+ cmp_shortened_sub_with_path = cmp;
+
+ /* Get the handle of the opposite child, which may not be null. */
+ child = cmp > 0 ? AVL_GET_LESS(h, 0) : AVL_GET_GREATER(h, 0);
+ }
+
+ if (parent == AVL_NULL)
+ /* There were only 1 or 2 nodes in this tree. */
+ l_tree->root = child;
+ else if (cmp_shortened_sub_with_path < 0)
+ AVL_SET_LESS(parent, child)
+ else
+ AVL_SET_GREATER(parent, child)
+
+ /* "path" is the parent of the subtree being eliminated or reduced
+ ** from a depth of 2 to 1. If "path" is the node to be removed, we
+ ** set path to the node we're about to poke into the position of the
+ ** node to be removed. */
+ path = parent == rm ? h : parent;
+
+ if (h != rm)
+ {
+ /* Poke in the replacement for the node to be removed. */
+ AVL_SET_LESS(h, AVL_GET_LESS(rm, 0))
+ AVL_SET_GREATER(h, AVL_GET_GREATER(rm, 0))
+ AVL_SET_BALANCE_FACTOR(h, AVL_GET_BALANCE_FACTOR(rm))
+
+ if (parent_rm == AVL_NULL)
+ l_tree->root = h;
+ else
+ {
+ depth = rm_depth - 1;
+
+ if (L_BIT_ARR_VAL(branch, depth))
+ AVL_SET_GREATER(parent_rm, h)
+ else
+ AVL_SET_LESS(parent_rm, h)
+ }
+ }
+
+ if (path != AVL_NULL)
+ {
+ /* Create a temporary linked list from the parent of the path node
+ ** to the root node. */
+ h = l_tree->root;
+ parent = AVL_NULL;
+ depth = 0;
+
+ while (h != path)
+ {
+ if (L_BIT_ARR_VAL(branch, depth))
+ {
+ child = AVL_GET_GREATER(h, 1);
+ AVL_SET_GREATER(h, parent)
+ }
+ else
+ {
+ child = AVL_GET_LESS(h, 1);
+ AVL_SET_LESS(h, parent)
+ }
+
+ L_CHECK_READ_ERROR(AVL_NULL)
+ depth++;
+ parent = h;
+ h = child;
+ }
+
+ /* Climb from the path node to the root node using the linked
+ ** list, restoring the tree structure and rebalancing as necessary.
+ */
+ reduced_depth = 1;
+ cmp = cmp_shortened_sub_with_path;
+
+ for (; ;)
+ {
+ if (reduced_depth)
+ {
+ bf = AVL_GET_BALANCE_FACTOR(h);
+
+ if (cmp < 0)
+ bf++;
+ else /* cmp > 0 */
+ bf--;
+
+ if ((bf == -2) || (bf == 2))
+ {
+ h = L_(balance)(L_BALANCE_PARAM_CALL_PREFIX h);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ bf = AVL_GET_BALANCE_FACTOR(h);
+ }
+ else
+ AVL_SET_BALANCE_FACTOR(h, bf)
+ reduced_depth = (bf == 0);
+ }
+
+ if (parent == AVL_NULL)
+ break;
+
+ child = h;
+ h = parent;
+ depth--;
+ cmp = L_BIT_ARR_VAL(branch, depth) ? 1 : -1;
+
+ if (cmp < 0)
+ {
+ parent = AVL_GET_LESS(h, 1);
+ AVL_SET_LESS(h, child)
+ }
+ else
+ {
+ parent = AVL_GET_GREATER(h, 1);
+ AVL_SET_GREATER(h, child)
+ }
+
+ L_CHECK_READ_ERROR(AVL_NULL)
+ }
+
+ l_tree->root = h;
+ }
+
+ return(rm);
+}
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_SUBST)
+
+L_SC AVL_HANDLE L_(subst)(L_(avl) *l_tree, AVL_HANDLE new_node)
+{
+ AVL_HANDLE h = l_tree->root;
+ AVL_HANDLE parent = AVL_NULL;
+ int cmp, last_cmp;
+
+ /* Search for node already in tree with same key. */
+ for (; ;)
+ {
+ if (h == AVL_NULL)
+ /* No node in tree with same key as new node. */
+ return(AVL_NULL);
+
+ cmp = AVL_COMPARE_NODE_NODE(new_node, h);
+
+ if (cmp == 0)
+ /* Found the node to substitute new one for. */
+ break;
+
+ last_cmp = cmp;
+ parent = h;
+ h = cmp < 0 ? AVL_GET_LESS(h, 1) : AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR(AVL_NULL)
+ }
+
+ /* Copy tree housekeeping fields from node in tree to new node. */
+ AVL_SET_LESS(new_node, AVL_GET_LESS(h, 0))
+ AVL_SET_GREATER(new_node, AVL_GET_GREATER(h, 0))
+ AVL_SET_BALANCE_FACTOR(new_node, AVL_GET_BALANCE_FACTOR(h))
+
+ if (parent == AVL_NULL)
+ /* New node is also new root. */
+ l_tree->root = new_node;
+ else
+ {
+ /* Make parent point to new node. */
+ if (last_cmp < 0)
+ AVL_SET_LESS(parent, new_node)
+ else
+ AVL_SET_GREATER(parent, new_node)
+ }
+
+ return(h);
+}
+
+#endif
+
+#ifdef AVL_BUILD_ITER_TYPE
+
+#if (L_IMPL_MASK & AVL_IMPL_BUILD)
+
+L_SC int L_(build)(
+ L_(avl) *l_tree, AVL_BUILD_ITER_TYPE p, L_SIZE num_nodes)
+{
+ /* Gives path to subtree being built. If bit n is false, branch
+ ** less from the node at depth n, if true branch greater. */
+ L_BIT_ARR_DEFN(branch)
+
+ /* If bit n is true, then for the current subtree at depth n, its
+ ** greater subtree has one more node than its less subtree. */
+ L_BIT_ARR_DEFN(rem)
+
+ /* Depth of root node of current subtree. */
+ unsigned depth = 0;
+
+ /* Number of nodes in current subtree. */
+ L_SIZE num_sub = num_nodes;
+
+ /* The algorithm relies on a stack of nodes whose less subtree has
+ ** been built, but whose greater subtree has not yet been built.
+ ** The stack is implemented as linked list. The nodes are linked
+ ** together by having the "greater" handle of a node set to the
+ ** next node in the list. "less_parent" is the handle of the first
+ ** node in the list. */
+ AVL_HANDLE less_parent = AVL_NULL;
+
+ /* h is root of current subtree, child is one of its children. */
+ AVL_HANDLE h;
+ AVL_HANDLE child;
+
+ if (num_nodes == 0)
+ {
+ l_tree->root = AVL_NULL;
+ return(1);
+ }
+
+ for (; ;)
+ {
+ while (num_sub > 2)
+ {
+ /* Subtract one for root of subtree. */
+ num_sub--;
+
+ if (num_sub & 1)
+ L_BIT_ARR_1(rem, depth)
+ else
+ L_BIT_ARR_0(rem, depth)
+ L_BIT_ARR_0(branch, depth)
+ depth++;
+
+ num_sub >>= 1;
+ }
+
+ if (num_sub == 2)
+ {
+ /* Build a subtree with two nodes, slanting to greater.
+ ** I arbitrarily chose to always have the extra node in the
+ ** greater subtree when there is an odd number of nodes to
+ ** split between the two subtrees. */
+
+ h = AVL_BUILD_ITER_VAL(p);
+ L_CHECK_READ_ERROR(0)
+ AVL_BUILD_ITER_INCR(p)
+ child = AVL_BUILD_ITER_VAL(p);
+ L_CHECK_READ_ERROR(0)
+ AVL_BUILD_ITER_INCR(p)
+ AVL_SET_LESS(child, AVL_NULL)
+ AVL_SET_GREATER(child, AVL_NULL)
+ AVL_SET_BALANCE_FACTOR(child, 0)
+ AVL_SET_GREATER(h, child)
+ AVL_SET_LESS(h, AVL_NULL)
+ AVL_SET_BALANCE_FACTOR(h, 1)
+ }
+ else /* num_sub == 1 */
+ {
+ /* Build a subtree with one node. */
+
+ h = AVL_BUILD_ITER_VAL(p);
+ L_CHECK_READ_ERROR(0)
+ AVL_BUILD_ITER_INCR(p)
+ AVL_SET_LESS(h, AVL_NULL)
+ AVL_SET_GREATER(h, AVL_NULL)
+ AVL_SET_BALANCE_FACTOR(h, 0)
+ }
+
+ while (depth)
+ {
+ depth--;
+
+ if (!L_BIT_ARR_VAL(branch, depth))
+ /* We've completed a less subtree. */
+ break;
+
+ /* We've completed a greater subtree, so attach it to
+ ** its parent (that is less than it). We pop the parent
+ ** off the stack of less parents. */
+ child = h;
+ h = less_parent;
+ less_parent = AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR(0)
+ AVL_SET_GREATER(h, child)
+ /* num_sub = 2 * (num_sub - rem[depth]) + rem[depth] + 1 */
+ num_sub <<= 1;
+ num_sub += L_BIT_ARR_VAL(rem, depth) ? 0 : 1;
+
+ if (num_sub & (num_sub - 1))
+ /* num_sub is not a power of 2. */
+ AVL_SET_BALANCE_FACTOR(h, 0)
+ else
+ /* num_sub is a power of 2. */
+ AVL_SET_BALANCE_FACTOR(h, 1)
+ }
+
+ if (num_sub == num_nodes)
+ /* We've completed the full tree. */
+ break;
+
+ /* The subtree we've completed is the less subtree of the
+ ** next node in the sequence. */
+
+ child = h;
+ h = AVL_BUILD_ITER_VAL(p);
+ L_CHECK_READ_ERROR(0)
+ AVL_BUILD_ITER_INCR(p)
+ AVL_SET_LESS(h, child)
+
+ /* Put h into stack of less parents. */
+ AVL_SET_GREATER(h, less_parent)
+ less_parent = h;
+
+ /* Proceed to creating greater than subtree of h. */
+ L_BIT_ARR_1(branch, depth)
+ num_sub += L_BIT_ARR_VAL(rem, depth) ? 1 : 0;
+ depth++;
+
+ } /* end for ( ; ; ) */
+
+ l_tree->root = h;
+
+ return(1);
+}
+
+#endif
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_INIT_ITER)
+
+/* Initialize depth to invalid value, to indicate iterator is
+** invalid. (Depth is zero-base.) It's not necessary to initialize
+** iterators prior to passing them to the "start" function.
+*/
+L_SC void L_(init_iter)(L_(iter) *iter)
+{
+ iter->depth = ~0;
+}
+
+#endif
+
+#ifdef AVL_READ_ERRORS_HAPPEN
+
+#define L_CHECK_READ_ERROR_INV_DEPTH \
+ { if (AVL_READ_ERROR) { iter->depth = ~0; return; } }
+
+#else
+
+#define L_CHECK_READ_ERROR_INV_DEPTH
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_START_ITER)
+
+L_SC void L_(start_iter)(
+ L_(avl) *l_tree, L_(iter) *iter, AVL_KEY k, avl_search_type st)
+{
+ AVL_HANDLE h = l_tree->root;
+ unsigned d = 0;
+ int cmp, target_cmp;
+
+ /* Save the tree that we're going to iterate through in a
+ ** member variable. */
+ iter->tree_ = l_tree;
+
+ iter->depth = ~0;
+
+ if (h == AVL_NULL)
+ /* Tree is empty. */
+ return;
+
+ if (st & AVL_LESS)
+ /* Key can be greater than key of starting node. */
+ target_cmp = 1;
+ else if (st & AVL_GREATER)
+ /* Key can be less than key of starting node. */
+ target_cmp = -1;
+ else
+ /* Key must be same as key of starting node. */
+ target_cmp = 0;
+
+ for (; ;)
+ {
+ cmp = AVL_COMPARE_KEY_NODE(k, h);
+
+ if (cmp == 0)
+ {
+ if (st & AVL_EQUAL)
+ {
+ /* Equal node was sought and found as starting node. */
+ iter->depth = d;
+ break;
+ }
+
+ cmp = -target_cmp;
+ }
+ else if (target_cmp != 0)
+ if (!((cmp ^ target_cmp) & L_MASK_HIGH_BIT))
+ /* cmp and target_cmp are both negative or both positive. */
+ iter->depth = d;
+
+ h = cmp < 0 ? AVL_GET_LESS(h, 1) : AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
+
+ if (h == AVL_NULL)
+ break;
+
+ if (cmp > 0)
+ L_BIT_ARR_1(iter->branch, d)
+ else
+ L_BIT_ARR_0(iter->branch, d)
+ iter->path_h[d++] = h;
+ }
+}
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_START_ITER_LEAST)
+
+L_SC void L_(start_iter_least)(L_(avl) *l_tree, L_(iter) *iter)
+{
+ AVL_HANDLE h = l_tree->root;
+
+ iter->tree_ = l_tree;
+
+ iter->depth = ~0;
+
+ L_BIT_ARR_ALL(iter->branch, 0)
+
+ while (h != AVL_NULL)
+ {
+ if (iter->depth != ~0)
+ iter->path_h[iter->depth] = h;
+
+ iter->depth++;
+ h = AVL_GET_LESS(h, 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
+ }
+}
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_START_ITER_GREATEST)
+
+L_SC void L_(start_iter_greatest)(L_(avl) *l_tree, L_(iter) *iter)
+{
+ AVL_HANDLE h = l_tree->root;
+
+ iter->tree_ = l_tree;
+
+ iter->depth = ~0;
+
+ L_BIT_ARR_ALL(iter->branch, 1)
+
+ while (h != AVL_NULL)
+ {
+ if (iter->depth != ~0)
+ iter->path_h[iter->depth] = h;
+
+ iter->depth++;
+ h = AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
+ }
+}
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_GET_ITER)
+
+L_SC AVL_HANDLE L_(get_iter)(L_(iter) *iter)
+{
+ if (iter->depth == ~0)
+ return(AVL_NULL);
+
+ return(iter->depth == 0 ?
+ iter->tree_->root : iter->path_h[iter->depth - 1]);
+}
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_INCR_ITER)
+
+L_SC void L_(incr_iter)(L_(iter) *iter)
+{
+#define l_tree (iter->tree_)
+
+ if (iter->depth != ~0)
+ {
+ AVL_HANDLE h =
+ AVL_GET_GREATER((iter->depth == 0 ?
+ iter->tree_->root : iter->path_h[iter->depth - 1]), 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
+
+ if (h == AVL_NULL)
+ do
+ {
+ if (iter->depth == 0)
+ {
+ iter->depth = ~0;
+ break;
+ }
+
+ iter->depth--;
+ }
+ while (L_BIT_ARR_VAL(iter->branch, iter->depth));
+ else
+ {
+ L_BIT_ARR_1(iter->branch, iter->depth)
+ iter->path_h[iter->depth++] = h;
+
+ for (; ;)
+ {
+ h = AVL_GET_LESS(h, 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
+
+ if (h == AVL_NULL)
+ break;
+
+ L_BIT_ARR_0(iter->branch, iter->depth)
+ iter->path_h[iter->depth++] = h;
+ }
+ }
+ }
+
+#undef l_tree
+}
+
+#endif
+
+#if (L_IMPL_MASK & AVL_IMPL_DECR_ITER)
+
+L_SC void L_(decr_iter)(L_(iter) *iter)
+{
+#define l_tree (iter->tree_)
+
+ if (iter->depth != ~0)
+ {
+ AVL_HANDLE h =
+ AVL_GET_LESS((iter->depth == 0 ?
+ iter->tree_->root : iter->path_h[iter->depth - 1]), 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
+
+ if (h == AVL_NULL)
+ do
+ {
+ if (iter->depth == 0)
+ {
+ iter->depth = ~0;
+ break;
+ }
+
+ iter->depth--;
+ }
+ while (!L_BIT_ARR_VAL(iter->branch, iter->depth));
+ else
+ {
+ L_BIT_ARR_0(iter->branch, iter->depth)
+ iter->path_h[iter->depth++] = h;
+
+ for (; ;)
+ {
+ h = AVL_GET_GREATER(h, 1);
+ L_CHECK_READ_ERROR_INV_DEPTH
+
+ if (h == AVL_NULL)
+ break;
+
+ L_BIT_ARR_1(iter->branch, iter->depth)
+ iter->path_h[iter->depth++] = h;
+ }
+ }
+ }
+
+#undef l_tree
+}
+
+#endif
+
+/* Tidy up the preprocessor symbol name space. */
+#undef L_
+#undef L_EST_LONG_BIT
+#undef L_SIZE
+#undef L_MASK_HIGH_BIT
+#undef L_LONG_BIT
+#undef L_BIT_ARR_DEFN
+#undef L_BIT_ARR_VAL
+#undef L_BIT_ARR_0
+#undef L_BIT_ARR_1
+#undef L_BIT_ARR_ALL
+#undef L_CHECK_READ_ERROR
+#undef L_CHECK_READ_ERROR_INV_DEPTH
+#undef L_BIT_ARR_LONGS
+#undef L_IMPL_MASK
+#undef L_CHECK_READ_ERROR
+#undef L_CHECK_READ_ERROR_INV_DEPTH
+#undef L_SC
+#undef L_BALANCE_PARAM_CALL_PREFIX
+#undef L_BALANCE_PARAM_DECL_PREFIX
diff --git a/vpx_mem/memory_manager/include/heapmm.h b/vpx_mem/memory_manager/include/heapmm.h
new file mode 100644
index 000000000..933e30dd7
--- /dev/null
+++ b/vpx_mem/memory_manager/include/heapmm.h
@@ -0,0 +1,152 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* This code is in the public domain.
+** Version: 1.1 Author: Walt Karas
+*/
+
+/* External header file for Heap Memory Manager. See documentation in
+** heapmm.html.
+*/
+
+#undef HMM_PROCESS
+
+/* Include once per configuration in a particular translation unit. */
+
+#ifndef HMM_CNFG_NUM
+
+/* Default configuration. */
+
+#ifndef HMM_INC_CNFG_DFLT
+#define HMM_INC_CNFG_DFLT
+#define HMM_PROCESS
+#endif
+
+#elif HMM_CNFG_NUM == 0
+
+/* Test configuration. */
+
+#ifndef HMM_INC_CNFG_0
+#define HMM_INC_CNFG_0
+#define HMM_PROCESS
+#endif
+
+#elif HMM_CNFG_NUM == 1
+
+#ifndef HMM_INC_CNFG_1
+#define HMM_INC_CNFG_1
+#define HMM_PROCESS
+#endif
+
+#elif HMM_CNFG_NUM == 2
+
+#ifndef HMM_INC_CNFG_2
+#define HMM_INC_CNFG_2
+#define HMM_PROCESS
+#endif
+
+#elif HMM_CNFG_NUM == 3
+
+#ifndef HMM_INC_CNFG_3
+#define HMM_INC_CNFG_3
+#define HMM_PROCESS
+#endif
+
+#elif HMM_CNFG_NUM == 4
+
+#ifndef HMM_INC_CNFG_4
+#define HMM_INC_CNFG_4
+#define HMM_PROCESS
+#endif
+
+#elif HMM_CNFG_NUM == 5
+
+#ifndef HMM_INC_CNFG_5
+#define HMM_INC_CNFG_5
+#define HMM_PROCESS
+#endif
+
+#endif
+
+#ifdef HMM_PROCESS
+
+#include "hmm_cnfg.h"
+
+/* Heap descriptor. */
+typedef struct HMM_UNIQUE(structure)
+{
+ /* private: */
+
+ /* Pointer to (payload of) root node in AVL tree. This field should
+ ** really be the AVL tree descriptor (type avl_avl). But (in the
+ ** instantiation of the AVL tree generic package used in package) the
+ ** AVL tree descriptor simply contains a pointer to the root. So,
+ ** whenever a pointer to the AVL tree descriptor is needed, I use the
+ ** cast:
+ **
+ ** (avl_avl *) &(heap_desc->avl_tree_root)
+ **
+ ** (where heap_desc is a pointer to a heap descriptor). This trick
+ ** allows me to avoid including cavl_if.h in this external header. */
+ void *avl_tree_root;
+
+ /* Pointer to first byte of last block freed, after any coalescing. */
+ void *last_freed;
+
+ /* public: */
+
+ HMM_UNIQUE(size_bau) num_baus_can_shrink;
+ void *end_of_shrinkable_chunk;
+}
+HMM_UNIQUE(descriptor);
+
+/* Prototypes for externally-callable functions. */
+
+void HMM_UNIQUE(init)(HMM_UNIQUE(descriptor) *desc);
+
+void *HMM_UNIQUE(alloc)(
+ HMM_UNIQUE(descriptor) *desc, HMM_UNIQUE(size_aau) num_addr_align_units);
+
+/* NOT YET IMPLEMENTED */
+void *HMM_UNIQUE(greedy_alloc)(
+ HMM_UNIQUE(descriptor) *desc, HMM_UNIQUE(size_aau) needed_addr_align_units,
+ HMM_UNIQUE(size_aau) coveted_addr_align_units);
+
+int HMM_UNIQUE(resize)(
+ HMM_UNIQUE(descriptor) *desc, void *mem,
+ HMM_UNIQUE(size_aau) num_addr_align_units);
+
+/* NOT YET IMPLEMENTED */
+int HMM_UNIQUE(greedy_resize)(
+ HMM_UNIQUE(descriptor) *desc, void *mem,
+ HMM_UNIQUE(size_aau) needed_addr_align_units,
+ HMM_UNIQUE(size_aau) coveted_addr_align_units);
+
+void HMM_UNIQUE(free)(HMM_UNIQUE(descriptor) *desc, void *mem);
+
+HMM_UNIQUE(size_aau) HMM_UNIQUE(true_size)(void *mem);
+
+HMM_UNIQUE(size_aau) HMM_UNIQUE(largest_available)(
+ HMM_UNIQUE(descriptor) *desc);
+
+void HMM_UNIQUE(new_chunk)(
+ HMM_UNIQUE(descriptor) *desc, void *start_of_chunk,
+ HMM_UNIQUE(size_bau) num_block_align_units);
+
+void HMM_UNIQUE(grow_chunk)(
+ HMM_UNIQUE(descriptor) *desc, void *end_of_chunk,
+ HMM_UNIQUE(size_bau) num_block_align_units);
+
+/* NOT YET IMPLEMENTED */
+void HMM_UNIQUE(shrink_chunk)(
+ HMM_UNIQUE(descriptor) *desc,
+ HMM_UNIQUE(size_bau) num_block_align_units);
+
+#endif /* defined HMM_PROCESS */
diff --git a/vpx_mem/memory_manager/include/hmm_cnfg.h b/vpx_mem/memory_manager/include/hmm_cnfg.h
new file mode 100644
index 000000000..86e4e9fa8
--- /dev/null
+++ b/vpx_mem/memory_manager/include/hmm_cnfg.h
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* This code is in the public domain.
+** Version: 1.1 Author: Walt Karas
+*/
+
+/* Configure Heap Memory Manager for processor architecture, compiler,
+** and desired performance characteristics. This file is included
+** by heapmm.h, so these definitions can be used by code external to
+** HMM. You can change the default configuration, and/or create alternate
+** configuration(s).
+*/
+
+/* To allow for multiple configurations of HMM to be used in the same
+** compilation unit, undefine all preprocessor symbols that will be
+** defined below.
+*/
+#undef HMM_ADDR_ALIGN_UNIT
+#undef HMM_BLOCK_ALIGN_UNIT
+#undef HMM_UNIQUE
+#undef HMM_DESC_PARAM
+#undef HMM_SYM_TO_STRING
+#undef HMM_SYM_TO_STRING
+#undef HMM_AUDIT_FAIL
+
+/* Turn X into a string after one macro expansion pass of X. This trick
+** works with both GCC and Visual C++. */
+#define HMM_SYM_TO_STRING(X) HMM_SYM_TO_STRING(X)
+#define HMM_SYM_TO_STRING(X) #X
+
+#ifndef HMM_CNFG_NUM
+
+/* Default configuration. */
+
+/* Use hmm_ prefix to avoid identifier conflicts. */
+#define HMM_UNIQUE(BASE) hmm_ ## BASE
+
+/* Number of bytes in an Address Alignment Unit (AAU). */
+//fwg
+//#define HMM_ADDR_ALIGN_UNIT sizeof(int)
+#define HMM_ADDR_ALIGN_UNIT 32
+
+/* Number of AAUs in a Block Alignment Unit (BAU). */
+#define HMM_BLOCK_ALIGN_UNIT 1
+
+/* Type of unsigned integer big enough to hold the size of a Block in AAUs. */
+typedef unsigned long HMM_UNIQUE(size_aau);
+
+/* Type of unsigned integer big enough to hold the size of a Block/Chunk
+** in BAUs. The high bit will be robbed. */
+typedef unsigned long HMM_UNIQUE(size_bau);
+
+void hmm_dflt_abort(const char *, const char *);
+
+/* Actions upon a self-audit failure. Must expand to a single complete
+** statement. If you remove the definition of this macro, no self-auditing
+** will be performed. */
+#define HMM_AUDIT_FAIL \
+ hmm_dflt_abort(__FILE__, HMM_SYM_TO_STRING(__LINE__));
+
+#elif HMM_CNFG_NUM == 0
+
+/* Definitions for testing. */
+
+#define HMM_UNIQUE(BASE) thmm_ ## BASE
+
+#define HMM_ADDR_ALIGN_UNIT sizeof(int)
+
+#define HMM_BLOCK_ALIGN_UNIT 3
+
+typedef unsigned HMM_UNIQUE(size_aau);
+
+typedef unsigned short HMM_UNIQUE(size_bau);
+
+/* Under this test setup, a long jump is done if there is a self-audit
+** failure.
+*/
+
+extern jmp_buf HMM_UNIQUE(jmp_buf);
+extern const char *HMM_UNIQUE(fail_file);
+extern unsigned HMM_UNIQUE(fail_line);
+
+#define HMM_AUDIT_FAIL \
+ { HMM_UNIQUE(fail_file) = __FILE__; HMM_UNIQUE(fail_line) = __LINE__; \
+ longjmp(HMM_UNIQUE(jmp_buf), 1); }
+
+#elif HMM_CNFG_NUM == 1
+
+/* Put configuration 1 definitions here (if there is a configuration 1). */
+
+#elif HMM_CNFG_NUM == 2
+
+/* Put configuration 2 definitions here. */
+
+#elif HMM_CNFG_NUM == 3
+
+/* Put configuration 3 definitions here. */
+
+#elif HMM_CNFG_NUM == 4
+
+/* Put configuration 4 definitions here. */
+
+#elif HMM_CNFG_NUM == 5
+
+/* Put configuration 5 definitions here. */
+
+#endif
diff --git a/vpx_mem/memory_manager/include/hmm_intrnl.h b/vpx_mem/memory_manager/include/hmm_intrnl.h
new file mode 100644
index 000000000..6e2be08fc
--- /dev/null
+++ b/vpx_mem/memory_manager/include/hmm_intrnl.h
@@ -0,0 +1,160 @@
+/*
+ * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license and patent
+ * grant that can be found in the LICENSE file in the root of the source
+ * tree. All contributing project authors may be found in the AUTHORS
+ * file in the root of the source tree.
+ */
+
+
+/* This code is in the public domain.
+** Version: 1.1 Author: Walt Karas
+*/
+
+#ifndef HMM_INTRNL_H_
+#define HMM_INTRNL_H_
+
+#ifdef __uClinux__
+# include <lddk.h>
+#endif
+
+#include "heapmm.h"
+
+#define U(BASE) HMM_UNIQUE(BASE)
+
+/* Mask of high bit of variable of size_bau type. */
+#define HIGH_BIT_BAU_SIZE \
+ ((U(size_bau)) ~ (((U(size_bau)) ~ (U(size_bau)) 0) >> 1))
+
+/* Add a given number of AAUs to pointer. */
+#define AAUS_FORWARD(PTR, AAU_OFFSET) \
+ (((char *) (PTR)) + ((AAU_OFFSET) * ((U(size_aau)) HMM_ADDR_ALIGN_UNIT)))
+
+/* Subtract a given number of AAUs from pointer. */
+#define AAUS_BACKWARD(PTR, AAU_OFFSET) \
+ (((char *) (PTR)) - ((AAU_OFFSET) * ((U(size_aau)) HMM_ADDR_ALIGN_UNIT)))
+
+/* Add a given number of BAUs to a pointer. */
+#define BAUS_FORWARD(PTR, BAU_OFFSET) \
+ AAUS_FORWARD((PTR), (BAU_OFFSET) * ((U(size_aau)) HMM_BLOCK_ALIGN_UNIT))
+
+/* Subtract a given number of BAUs to a pointer. */
+#define BAUS_BACKWARD(PTR, BAU_OFFSET) \
+ AAUS_BACKWARD((PTR), (BAU_OFFSET) * ((U(size_aau)) HMM_BLOCK_ALIGN_UNIT))
+
+typedef struct head_struct
+{
+ /* Sizes in Block Alignment Units. */
+ HMM_UNIQUE(size_bau) previous_block_size, block_size;
+}
+head_record;
+
+typedef struct ptr_struct
+{
+ struct ptr_struct *self, *prev, *next;
+}
+ptr_record;
+
+/* Divide and round up any fraction to the next whole number. */
+#define DIV_ROUND_UP(NUMER, DENOM) (((NUMER) + (DENOM) - 1) / (DENOM))
+
+/* Number of AAUs in a block head. */
+#define HEAD_AAUS DIV_ROUND_UP(sizeof(head_record), HMM_ADDR_ALIGN_UNIT)
+
+/* Number of AAUs in a block pointer record. */
+#define PTR_RECORD_AAUS DIV_ROUND_UP(sizeof(ptr_record), HMM_ADDR_ALIGN_UNIT)
+
+/* Number of BAUs in a dummy end record (at end of chunk). */
+#define DUMMY_END_BLOCK_BAUS DIV_ROUND_UP(HEAD_AAUS, HMM_BLOCK_ALIGN_UNIT)
+
+/* Minimum number of BAUs in a block (allowing room for the pointer record. */
+#define MIN_BLOCK_BAUS \
+ DIV_ROUND_UP(HEAD_AAUS + PTR_RECORD_AAUS, HMM_BLOCK_ALIGN_UNIT)
+
+/* Return number of BAUs in block (masking off high bit containing block
+** status). */
+#define BLOCK_BAUS(HEAD_PTR) \
+ (((head_record *) (HEAD_PTR))->block_size & ~HIGH_BIT_BAU_SIZE)
+
+/* Return number of BAUs in previous block (masking off high bit containing
+** block status). */
+#define PREV_BLOCK_BAUS(HEAD_PTR) \
+ (((head_record *) (HEAD_PTR))->previous_block_size & ~HIGH_BIT_BAU_SIZE)
+
+/* Set number of BAUs in previous block, preserving high bit containing
+** block status. */
+#define SET_PREV_BLOCK_BAUS(HEAD_PTR, N_BAUS) \
+ { register head_record *h_ptr = (head_record *) (HEAD_PTR); \
+ h_ptr->previous_block_size &= HIGH_BIT_BAU_SIZE; \
+ h_ptr->previous_block_size |= (N_BAUS); }
+
+/* Convert pointer to pointer record of block to pointer to block's head
+** record. */
+#define PTR_REC_TO_HEAD(PTR_REC_PTR) \
+ ((head_record *) AAUS_BACKWARD(PTR_REC_PTR, HEAD_AAUS))
+
+/* Convert pointer to block head to pointer to block's pointer record. */
+#define HEAD_TO_PTR_REC(HEAD_PTR) \
+ ((ptr_record *) AAUS_FORWARD(HEAD_PTR, HEAD_AAUS))
+
+/* Returns non-zero if block is allocated. */
+#define IS_BLOCK_ALLOCATED(HEAD_PTR) \
+ (((((head_record *) (HEAD_PTR))->block_size | \
+ ((head_record *) (HEAD_PTR))->previous_block_size) & \
+ HIGH_BIT_BAU_SIZE) == 0)
+
+#define MARK_BLOCK_ALLOCATED(HEAD_PTR) \
+ { register head_record *h_ptr = (head_record *) (HEAD_PTR); \
+ h_ptr->block_size &= ~HIGH_BIT_BAU_SIZE; \
+ h_ptr->previous_block_size &= ~HIGH_BIT_BAU_SIZE; }
+
+/* Mark a block as free when it is not the first block in a bin (and
+** therefore not a node in the AVL tree). */
+#define MARK_SUCCESSIVE_BLOCK_IN_FREE_BIN(HEAD_PTR) \
+ { register head_record *h_ptr = (head_record *) (HEAD_PTR); \
+ h_ptr->block_size |= HIGH_BIT_BAU_SIZE; }
+
+/* Prototypes for internal functions implemented in one file and called in
+** another.
+*/
+
+void U(into_free_collection)(U(descriptor) *desc, head_record *head_ptr);
+
+void U(out_of_free_collection)(U(descriptor) *desc, head_record *head_ptr);
+
+void *U(alloc_from_bin)(
+ U(descriptor) *desc, ptr_record *bin_front_ptr, U(size_bau) n_baus);
+
+#ifdef HMM_AUDIT_FAIL
+
+/* Simply contains a reference to the HMM_AUDIT_FAIL macro and a
+** dummy return. */
+int U(audit_block_fail_dummy_return)(void);
+
+
+/* Auditing a block consists of checking that the size in its head
+** matches the previous block size in the head of the next block. */
+#define AUDIT_BLOCK_AS_EXPR(HEAD_PTR) \
+ ((BLOCK_BAUS(HEAD_PTR) == \
+ PREV_BLOCK_BAUS(BAUS_FORWARD(HEAD_PTR, BLOCK_BAUS(HEAD_PTR)))) ? \
+ 0 : U(audit_block_fail_dummy_return)())
+
+#define AUDIT_BLOCK(HEAD_PTR) \
+ { void *h_ptr = (HEAD_PTR); AUDIT_BLOCK_AS_EXPR(h_ptr); }
+
+#endif
+
+/* Interface to AVL tree generic package instantiation. */
+
+#define AVL_UNIQUE(BASE) U(avl_ ## BASE)
+
+#define AVL_HANDLE ptr_record *
+
+#define AVL_KEY U(size_bau)
+
+#define AVL_MAX_DEPTH 64
+
+#include "cavl_if.h"
+
+#endif /* Include once. */