|
@@ -22,7 +22,11 @@ libc_hidden_proto(sbrk)
|
|
|
#include "heap.h"
|
|
|
|
|
|
static void
|
|
|
+#ifdef HEAP_USE_LOCKING
|
|
|
free_to_heap (void *mem, struct heap_free_area *heap, malloc_mutex_t *heap_lock)
|
|
|
+#else
|
|
|
+free_to_heap (void *mem, struct heap_free_area *heap)
|
|
|
+#endif
|
|
|
{
|
|
|
size_t size;
|
|
|
struct heap_free_area *fa;
|
|
@@ -39,7 +43,7 @@ free_to_heap (void *mem, struct heap_free_area *heap, malloc_mutex_t *heap_lock)
|
|
|
size = MALLOC_SIZE (mem);
|
|
|
mem = MALLOC_BASE (mem);
|
|
|
|
|
|
- __pthread_mutex_lock (heap_lock);
|
|
|
+ __heap_do_lock (heap_lock);
|
|
|
|
|
|
|
|
|
fa = __heap_free (heap, mem, size);
|
|
@@ -48,7 +52,7 @@ free_to_heap (void *mem, struct heap_free_area *heap, malloc_mutex_t *heap_lock)
|
|
|
unmapped. */
|
|
|
if (HEAP_FREE_AREA_SIZE (fa) < MALLOC_UNMAP_THRESHOLD)
|
|
|
|
|
|
- __pthread_mutex_unlock (heap_lock);
|
|
|
+ __heap_do_unlock (heap_lock);
|
|
|
else
|
|
|
|
|
|
{
|
|
@@ -81,7 +85,7 @@ free_to_heap (void *mem, struct heap_free_area *heap, malloc_mutex_t *heap_lock)
|
|
|
MALLOC_DEBUG (-1, "not unmapping: 0x%lx - 0x%lx (%ld bytes)",
|
|
|
start, end, end - start);
|
|
|
__malloc_unlock_sbrk ();
|
|
|
- __pthread_mutex_unlock (heap_lock);
|
|
|
+ __heap_do_unlock (heap_lock);
|
|
|
return;
|
|
|
}
|
|
|
#endif
|
|
@@ -108,7 +112,7 @@ free_to_heap (void *mem, struct heap_free_area *heap, malloc_mutex_t *heap_lock)
|
|
|
#ifdef MALLOC_USE_SBRK
|
|
|
|
|
|
|
|
|
- __pthread_mutex_unlock (heap_lock);
|
|
|
+ __heap_do_unlock (heap_lock);
|
|
|
|
|
|
sbrk (start - end);
|
|
|
|
|
@@ -172,15 +176,20 @@ free_to_heap (void *mem, struct heap_free_area *heap, malloc_mutex_t *heap_lock)
|
|
|
|
|
|
descriptor, because we might be unmapping from the mmb
|
|
|
heap. */
|
|
|
- __pthread_mutex_unlock (heap_lock);
|
|
|
+ __heap_do_unlock (heap_lock);
|
|
|
|
|
|
+#ifdef HEAP_USE_LOCKING
|
|
|
|
|
|
free_to_heap (mmb, &__malloc_mmb_heap, &__malloc_mmb_heap_lock);
|
|
|
+#else
|
|
|
+
|
|
|
+ free_to_heap (mmb, &__malloc_mmb_heap);
|
|
|
+#endif
|
|
|
|
|
|
|
|
|
munmap ((void *)mmb_start, mmb_end - mmb_start);
|
|
|
|
|
|
- __pthread_mutex_lock (heap_lock);
|
|
|
+ __heap_do_lock (heap_lock);
|
|
|
|
|
|
# ifdef __UCLIBC_HAS_THREADS__
|
|
|
|
|
@@ -213,7 +222,7 @@ free_to_heap (void *mem, struct heap_free_area *heap, malloc_mutex_t *heap_lock)
|
|
|
}
|
|
|
|
|
|
|
|
|
- __pthread_mutex_unlock (heap_lock);
|
|
|
+ __heap_do_unlock (heap_lock);
|
|
|
|
|
|
MALLOC_MMB_DEBUG_INDENT (-1);
|
|
|
|
|
@@ -243,7 +252,7 @@ free_to_heap (void *mem, struct heap_free_area *heap, malloc_mutex_t *heap_lock)
|
|
|
}
|
|
|
|
|
|
|
|
|
- __pthread_mutex_unlock (heap_lock);
|
|
|
+ __heap_do_unlock (heap_lock);
|
|
|
|
|
|
if (unmap_end > unmap_start)
|
|
|
|
|
@@ -260,5 +269,9 @@ free_to_heap (void *mem, struct heap_free_area *heap, malloc_mutex_t *heap_lock)
|
|
|
void
|
|
|
free (void *mem)
|
|
|
{
|
|
|
+#ifdef HEAP_USE_LOCKING
|
|
|
free_to_heap (mem, __malloc_heap, &__malloc_heap_lock);
|
|
|
+#else
|
|
|
+ free_to_heap (mem, __malloc_heap);
|
|
|
+#endif
|
|
|
}
|