summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorRich Felker <dalias@aerifal.cx>2018-04-19 20:56:26 -0400
committerRich Felker <dalias@aerifal.cx>2018-04-19 20:56:26 -0400
commit72141795d4edd17f88da192447395a48444afa10 (patch)
tree483162056393cc2f4f380216781726531f76b94e /src
parent3c2cbbe7ba8b4486299ae0d5336ae01ab520d116 (diff)
downloadmusl-72141795d4edd17f88da192447395a48444afa10.tar.gz
return chunks split off by memalign using __bin_chunk instead of free
this change serves multiple purposes: 1. it ensures that static linking of memalign-family functions will pull in the system malloc implementation, thereby causing link errors if an attempt is made to link the system memalign functions with a replacement malloc (incomplete allocator replacement). 2. it eliminates calls to free that are unpaired with allocations, which are confusing when setting breakpoints or tracing execution. as a bonus, making __bin_chunk external may discourage aggressive and unnecessary inlining of it.
Diffstat (limited to 'src')
-rw-r--r--src/internal/malloc_impl.h3
-rw-r--r--src/malloc/malloc.c10
-rw-r--r--src/malloc/memalign.c2
3 files changed, 8 insertions, 7 deletions
diff --git a/src/internal/malloc_impl.h b/src/internal/malloc_impl.h
index 1ea0407c..4c4a4b46 100644
--- a/src/internal/malloc_impl.h
+++ b/src/internal/malloc_impl.h
@@ -36,4 +36,7 @@ struct bin {
#define IS_MMAPPED(c) !((c)->csize & (C_INUSE))
+__attribute__((__visibility__("hidden")))
+void __bin_chunk(struct chunk *);
+
#endif
diff --git a/src/malloc/malloc.c b/src/malloc/malloc.c
index c8bc9227..239ab9c6 100644
--- a/src/malloc/malloc.c
+++ b/src/malloc/malloc.c
@@ -263,8 +263,6 @@ static int pretrim(struct chunk *self, size_t n, int i, int j)
return 1;
}
-static void bin_chunk(struct chunk *);
-
static void trim(struct chunk *self, size_t n)
{
size_t n1 = CHUNK_SIZE(self);
@@ -280,7 +278,7 @@ static void trim(struct chunk *self, size_t n)
next->psize = n1-n | C_INUSE;
self->csize = n | C_INUSE;
- bin_chunk(split);
+ __bin_chunk(split);
}
void *malloc(size_t n)
@@ -436,7 +434,7 @@ copy_free_ret:
return new;
}
-static void bin_chunk(struct chunk *self)
+void __bin_chunk(struct chunk *self)
{
struct chunk *next = NEXT_CHUNK(self);
size_t final_size, new_size, size;
@@ -524,7 +522,7 @@ void free(void *p)
if (IS_MMAPPED(self))
unmap_chunk(self);
else
- bin_chunk(self);
+ __bin_chunk(self);
}
void __malloc_donate(char *start, char *end)
@@ -543,5 +541,5 @@ void __malloc_donate(char *start, char *end)
struct chunk *c = MEM_TO_CHUNK(start), *n = MEM_TO_CHUNK(end);
c->psize = n->csize = C_INUSE;
c->csize = n->psize = C_INUSE | (end-start);
- bin_chunk(c);
+ __bin_chunk(c);
}
diff --git a/src/malloc/memalign.c b/src/malloc/memalign.c
index 9c420229..7246a99b 100644
--- a/src/malloc/memalign.c
+++ b/src/malloc/memalign.c
@@ -48,7 +48,7 @@ void *__memalign(size_t align, size_t len)
n->psize = c->csize = C_INUSE | (new-mem);
n->csize = t->psize -= new-mem;
- free(mem);
+ __bin_chunk(c);
return new;
}