aboutsummaryrefslogtreecommitdiff
path: root/malloc
diff options
context:
space:
mode:
authorWilco Dijkstra <wdijkstr@arm.com>2017-10-24 12:39:24 +0100
committerWilco Dijkstra <wdijkstr@arm.com>2017-10-24 12:39:24 +0100
commit3f6bb8a32e5f5efd78ac08c41e623651cc242a89 (patch)
tree268b445ef692deca0158cdd4832b0cc925538703 /malloc
parent1d479c8c33bab8c47f66c2199a353b5459881be3 (diff)
downloadglibc-3f6bb8a32e5f5efd78ac08c41e623651cc242a89.tar
glibc-3f6bb8a32e5f5efd78ac08c41e623651cc242a89.tar.gz
glibc-3f6bb8a32e5f5efd78ac08c41e623651cc242a89.tar.bz2
glibc-3f6bb8a32e5f5efd78ac08c41e623651cc242a89.zip
Add single-threaded path to malloc/realloc/calloc/memalloc
This patch adds a single-threaded fast path to malloc, realloc, calloc and memalloc. When we're single-threaded, we can bypass arena_get (which always locks the arena it returns) and just use the main arena. Also avoid retrying a different arena since there is just the main arena. * malloc/malloc.c (__libc_malloc): Add SINGLE_THREAD_P path. (__libc_realloc): Likewise. (_mid_memalign): Likewise. (__libc_calloc): Likewise.
Diffstat (limited to 'malloc')
-rw-r--r--malloc/malloc.c50
1 files changed, 41 insertions, 9 deletions
diff --git a/malloc/malloc.c b/malloc/malloc.c
index 32b6e968fc..3718a4636a 100644
--- a/malloc/malloc.c
+++ b/malloc/malloc.c
@@ -3038,6 +3038,14 @@ __libc_malloc (size_t bytes)
DIAG_POP_NEEDS_COMMENT;
#endif
+ if (SINGLE_THREAD_P)
+ {
+ victim = _int_malloc (&main_arena, bytes);
+ assert (!victim || chunk_is_mmapped (mem2chunk (victim)) ||
+ &main_arena == arena_for_chunk (mem2chunk (victim)));
+ return victim;
+ }
+
arena_get (ar_ptr, bytes);
victim = _int_malloc (ar_ptr, bytes);
@@ -3194,6 +3202,15 @@ __libc_realloc (void *oldmem, size_t bytes)
return newmem;
}
+ if (SINGLE_THREAD_P)
+ {
+ newp = _int_realloc (ar_ptr, oldp, oldsize, nb);
+ assert (!newp || chunk_is_mmapped (mem2chunk (newp)) ||
+ ar_ptr == arena_for_chunk (mem2chunk (newp)));
+
+ return newp;
+ }
+
__libc_lock_lock (ar_ptr->mutex);
newp = _int_realloc (ar_ptr, oldp, oldsize, nb);
@@ -3269,6 +3286,15 @@ _mid_memalign (size_t alignment, size_t bytes, void *address)
alignment = a;
}
+ if (SINGLE_THREAD_P)
+ {
+ p = _int_memalign (&main_arena, alignment, bytes);
+ assert (!p || chunk_is_mmapped (mem2chunk (p)) ||
+ &main_arena == arena_for_chunk (mem2chunk (p)));
+
+ return p;
+ }
+
arena_get (ar_ptr, bytes + alignment + MINSIZE);
p = _int_memalign (ar_ptr, alignment, bytes);
@@ -3361,7 +3387,11 @@ __libc_calloc (size_t n, size_t elem_size)
MAYBE_INIT_TCACHE ();
- arena_get (av, sz);
+ if (SINGLE_THREAD_P)
+ av = &main_arena;
+ else
+ arena_get (av, sz);
+
if (av)
{
/* Check if we hand out the top chunk, in which case there may be no
@@ -3391,19 +3421,21 @@ __libc_calloc (size_t n, size_t elem_size)
}
mem = _int_malloc (av, sz);
-
assert (!mem || chunk_is_mmapped (mem2chunk (mem)) ||
av == arena_for_chunk (mem2chunk (mem)));
- if (mem == 0 && av != NULL)
+ if (!SINGLE_THREAD_P)
{
- LIBC_PROBE (memory_calloc_retry, 1, sz);
- av = arena_get_retry (av, sz);
- mem = _int_malloc (av, sz);
- }
+ if (mem == 0 && av != NULL)
+ {
+ LIBC_PROBE (memory_calloc_retry, 1, sz);
+ av = arena_get_retry (av, sz);
+ mem = _int_malloc (av, sz);
+ }
- if (av != NULL)
- __libc_lock_unlock (av->mutex);
+ if (av != NULL)
+ __libc_lock_unlock (av->mutex);
+ }
/* Allocation failed even after a retry. */
if (mem == 0)