aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ChangeLog5
-rw-r--r--malloc/obstack.c14
-rw-r--r--malloc/tst-obstack.c54
3 files changed, 69 insertions, 4 deletions
diff --git a/ChangeLog b/ChangeLog
index 4c0a9057cb..84334310ea 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,10 @@
2000-10-24 Ulrich Drepper <drepper@redhat.com>
+ * malloc/obstack.c (_obstack_newchunk): Correctly align first returned
+ block.
+ * malloc/tst-obstack.c: New file.
+ Patch and test case by Alexandre Duret-Lutz <duret_g@epita.fr>.
+
Complete revamp of the reference counter handling.
* include/link.h (struct link_map): Add l_idx field.
* elf/dl-close.c: Handle decrementing of reference counters more
diff --git a/malloc/obstack.c b/malloc/obstack.c
index 38fc02faed..6b32678b56 100644
--- a/malloc/obstack.c
+++ b/malloc/obstack.c
@@ -282,9 +282,10 @@ _obstack_newchunk (h, length)
register long obj_size = h->next_free - h->object_base;
register long i;
long already;
+ char *object_base;
/* Compute size for new chunk. */
- new_size = (obj_size + length) + (obj_size >> 3) + 100;
+ new_size = (obj_size + length) + (obj_size >> 3) + h->alignment_mask + 100;
if (new_size < h->chunk_size)
new_size = h->chunk_size;
@@ -296,6 +297,11 @@ _obstack_newchunk (h, length)
new_chunk->prev = old_chunk;
new_chunk->limit = h->chunk_limit = (char *) new_chunk + new_size;
+ /* Compute an aligned object_base in the new chunk */
+ object_base =
+ __INT_TO_PTR ((__PTR_TO_INT (new_chunk->contents) + h->alignment_mask)
+ & ~ (h->alignment_mask));
+
/* Move the existing object to the new chunk.
Word at a time is fast and is safe if the object
is sufficiently aligned. */
@@ -303,7 +309,7 @@ _obstack_newchunk (h, length)
{
for (i = obj_size / sizeof (COPYING_UNIT) - 1;
i >= 0; i--)
- ((COPYING_UNIT *)new_chunk->contents)[i]
+ ((COPYING_UNIT *)object_base)[i]
= ((COPYING_UNIT *)h->object_base)[i];
/* We used to copy the odd few remaining bytes as one extra COPYING_UNIT,
but that can cross a page boundary on a machine
@@ -314,7 +320,7 @@ _obstack_newchunk (h, length)
already = 0;
/* Copy remaining bytes one by one. */
for (i = already; i < obj_size; i++)
- new_chunk->contents[i] = h->object_base[i];
+ object_base[i] = h->object_base[i];
/* If the object just copied was the only data in OLD_CHUNK,
free that chunk and remove it from the chain.
@@ -325,7 +331,7 @@ _obstack_newchunk (h, length)
CALL_FREEFUN (h, old_chunk);
}
- h->object_base = new_chunk->contents;
+ h->object_base = object_base;
h->next_free = h->object_base + obj_size;
/* The new chunk certainly contains no empty object yet. */
h->maybe_empty_object = 0;
diff --git a/malloc/tst-obstack.c b/malloc/tst-obstack.c
new file mode 100644
index 0000000000..bd00a0ab7b
--- /dev/null
+++ b/malloc/tst-obstack.c
@@ -0,0 +1,54 @@
+/* Test case by Alexandre Duret-Lutz <duret_g@epita.fr>. */
+#include <obstack.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define obstack_chunk_alloc verbose_malloc
+#define obstack_chunk_free verbose_free
+#define ALIGN_BOUNDARY 64
+#define ALIGN_MASK (ALIGN_BOUNDARY - 1)
+#define OBJECT_SIZE 1000
+
+static void *
+verbose_malloc (size_t size)
+{
+ void *buf = malloc (size);
+ printf ("malloc (%u) => %p\n", size, buf);
+ return buf;
+}
+
+static void
+verbose_free (void *buf)
+{
+ free (buf);
+ printf ("free (%p)\n", buf);
+}
+
+int
+main (void)
+{
+ struct obstack obs;
+ int i;
+ int result = 0;
+
+ obstack_init (&obs);
+ obstack_alignment_mask (&obs) = ALIGN_MASK;
+ /* finish an empty object to take alignment into account */
+ obstack_finish (&obs);
+
+ /* let's allocate some objects and print their addresses */
+ for (i = 15; i > 0; --i)
+ {
+ void *obj = obstack_alloc (&obs, OBJECT_SIZE);
+
+ printf ("obstack_alloc (%u) => %p \t%s\n", OBJECT_SIZE, obj,
+ ((uintptr_t) obj & ALIGN_MASK) ? "(not aligned)" : "");
+ result |= ((uintptr_t) obj & ALIGN_MASK) != 0;
+ }
+
+ /* clean up */
+ obstack_free (&obs, 0);
+
+ return result;
+}