memalign.c 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133
  1. /*
  2. This is a version (aka dlmalloc) of malloc/free/realloc written by
  3. Doug Lea and released to the public domain. Use, modify, and
  4. redistribute this code without permission or acknowledgement in any
  5. way you wish. Send questions, comments, complaints, performance
  6. data, etc to dl@cs.oswego.edu
  7. VERSION 2.7.2 Sat Aug 17 09:07:30 2002 Doug Lea (dl at gee)
  8. Note: There may be an updated version of this malloc obtainable at
  9. ftp://gee.cs.oswego.edu/pub/misc/malloc.c
  10. Check before installing!
  11. Hacked up for uClibc by Erik Andersen <andersen@codepoet.org>
  12. */
  13. #include <features.h>
  14. #include <stddef.h>
  15. #include <unistd.h>
  16. #include <errno.h>
  17. #include <string.h>
  18. #include "malloc.h"
  19. /* ------------------------------ memalign ------------------------------ */
  20. void* memalign(size_t alignment, size_t bytes)
  21. {
  22. size_t nb; /* padded request size */
  23. char* m; /* memory returned by malloc call */
  24. mchunkptr p; /* corresponding chunk */
  25. char* _brk; /* alignment point within p */
  26. mchunkptr newp; /* chunk to return */
  27. size_t newsize; /* its size */
  28. size_t leadsize; /* leading space before alignment point */
  29. mchunkptr remainder; /* spare room at end to split off */
  30. unsigned long remainder_size; /* its size */
  31. size_t size;
  32. void *retval;
  33. /* If need less alignment than we give anyway, just relay to malloc */
  34. if (alignment <= MALLOC_ALIGNMENT) return malloc(bytes);
  35. /* Otherwise, ensure that it is at least a minimum chunk size */
  36. if (alignment < MINSIZE) alignment = MINSIZE;
  37. /* Make sure alignment is power of 2 (in case MINSIZE is not). */
  38. if ((alignment & (alignment - 1)) != 0) {
  39. size_t a = MALLOC_ALIGNMENT * 2;
  40. while ((unsigned long)a < (unsigned long)alignment) a <<= 1;
  41. alignment = a;
  42. }
  43. checked_request2size(bytes, nb);
  44. __MALLOC_LOCK;
  45. /* Strategy: find a spot within that chunk that meets the alignment
  46. * request, and then possibly free the leading and trailing space. */
  47. /* Call malloc with worst case padding to hit alignment. */
  48. m = (char*)(malloc(nb + alignment + MINSIZE));
  49. if (m == 0) {
  50. retval = 0; /* propagate failure */
  51. goto DONE;
  52. }
  53. p = mem2chunk(m);
  54. if ((((unsigned long)(m)) % alignment) != 0) { /* misaligned */
  55. /*
  56. Find an aligned spot inside chunk. Since we need to give back
  57. leading space in a chunk of at least MINSIZE, if the first
  58. calculation places us at a spot with less than MINSIZE leader,
  59. we can move to the next aligned spot -- we've allocated enough
  60. total room so that this is always possible.
  61. */
  62. _brk = (char*)mem2chunk((unsigned long)(((unsigned long)(m + alignment - 1)) &
  63. -((signed long) alignment)));
  64. if ((unsigned long)(_brk - (char*)(p)) < MINSIZE)
  65. _brk += alignment;
  66. newp = (mchunkptr)_brk;
  67. leadsize = _brk - (char*)(p);
  68. newsize = chunksize(p) - leadsize;
  69. /* For mmapped chunks, just adjust offset */
  70. if (chunk_is_mmapped(p)) {
  71. newp->prev_size = p->prev_size + leadsize;
  72. set_head(newp, newsize|IS_MMAPPED);
  73. retval = chunk2mem(newp);
  74. goto DONE;
  75. }
  76. /* Otherwise, give back leader, use the rest */
  77. set_head(newp, newsize | PREV_INUSE);
  78. set_inuse_bit_at_offset(newp, newsize);
  79. set_head_size(p, leadsize);
  80. free(chunk2mem(p));
  81. p = newp;
  82. assert (newsize >= nb &&
  83. (((unsigned long)(chunk2mem(p))) % alignment) == 0);
  84. }
  85. /* Also give back spare room at the end */
  86. if (!chunk_is_mmapped(p)) {
  87. size = chunksize(p);
  88. if ((unsigned long)(size) > (unsigned long)(nb + MINSIZE)) {
  89. remainder_size = size - nb;
  90. remainder = chunk_at_offset(p, nb);
  91. set_head(remainder, remainder_size | PREV_INUSE);
  92. set_head_size(p, nb);
  93. free(chunk2mem(remainder));
  94. }
  95. }
  96. check_inuse_chunk(p);
  97. retval = chunk2mem(p);
  98. DONE:
  99. __MALLOC_UNLOCK;
  100. return retval;
  101. }
  102. weak_alias(memalign, aligned_alloc)
  103. libc_hidden_def(memalign)
  104. /* glibc compatibilty */
  105. weak_alias(memalign, __libc_memalign)