custom_malloc.c 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103
  1. #include "custom_malloc.h"
  2. #ifdef HAVE_LOCKS
  3. #include "posix_lock.h"
  4. #endif /* HAVE_LOCKS */
  5. #include "other.h"
  6. #include "sys_alloc.h"
  7. #include "block_header.h"
  8. #include "dmm_adaptor.h"
  9. void * custom_malloc(heap_t* heap, size_t size) {
  10. void *ptr;
  11. int fixed_list_id, i, found;
  12. maptable_node_t *current_maptable_node;
  13. void *current_block, *previous_block;
  14. ptr = NULL;
  15. previous_block = NULL;
  16. #ifdef HAVE_LOCKS
  17. posix_lock(heap);
  18. #endif /* HAVE_LOCKS */
  19. fixed_list_id = map_size_to_list(heap, size);
  20. // If a fixed list is found, do a first fit
  21. if(fixed_list_id != -1) {
  22. current_maptable_node = heap->maptable_head;
  23. // traverse through the maptable node list
  24. if(fixed_list_id != 0) {
  25. for(i = 1; i < fixed_list_id; i++) {
  26. current_maptable_node = current_maptable_node->next;
  27. }
  28. }
  29. if(current_maptable_node->fixed_list_head != NULL) {
  30. ptr = current_maptable_node->fixed_list_head;
  31. current_maptable_node->fixed_list_head = get_next(ptr);
  32. set_requested_size(ptr, size);
  33. set_next(ptr, heap->used_blocks_head);
  34. heap->used_blocks_head = ptr;
  35. }
  36. }
  37. if(ptr == NULL) {
  38. found = 0;
  39. // first fit from free list
  40. for(current_block = heap->free_list_head; current_block != NULL;
  41. current_block = get_next(current_block)) {
  42. if(get_size(current_block) >= size) {
  43. ptr = current_block;
  44. heap->used_blocks_head = ptr;
  45. if(current_block != heap->free_list_head) {
  46. set_next(previous_block, get_next(ptr));
  47. } else {
  48. heap->free_list_head = get_next(ptr);
  49. }
  50. set_requested_size(ptr, size);
  51. set_next(ptr, heap->used_blocks_head);
  52. // Begin of Stats
  53. heap->dmm_stats.live_objects += 1;
  54. heap->dmm_stats.num_malloc += 1;
  55. // End of Stats
  56. #ifdef HAVE_LOCKS
  57. posix_unlock(heap);
  58. #endif /* HAVE_LOCKS */
  59. return ptr;
  60. }
  61. previous_block = current_block;
  62. }
  63. if(!found) {
  64. ptr = sys_alloc(heap, size);
  65. heap->dmm_stats.mem_allocated += req_padding(size);
  66. heap->dmm_stats.mem_requested += size;
  67. }
  68. }
  69. // Begin of Stats
  70. heap->dmm_stats.live_objects += 1;
  71. heap->dmm_stats.num_malloc += 1;
  72. // End of Stats
  73. // Refresh the state of the heap allocator if a certain number of
  74. // malloc's has been served already
  75. // TODO Define 50 as a constant
  76. if(heap->dmm_stats.num_malloc % 50) {
  77. state_refresh(heap);
  78. }
  79. #ifdef HAVE_LOCKS
  80. posix_unlock(heap);
  81. #endif /* HAVE_LOCKS */
  82. return ptr;
  83. }