|
@@ -39,7 +39,8 @@ void * custom_malloc(heap_t* heap, size_t size) {
|
|
|
found = 0;
|
|
|
|
|
|
// first fit from free list
|
|
|
- for(current_block = heap->free_list_head; current_block != NULL; current_block = get_next(current_block)) {
|
|
|
+ for(current_block = heap->free_list_head; current_block != NULL;
|
|
|
+ current_block = get_next(current_block)) {
|
|
|
if(get_size(current_block) >= size) {
|
|
|
ptr = current_block;
|
|
|
heap->used_blocks_head = ptr;
|
|
@@ -50,6 +51,14 @@ void * custom_malloc(heap_t* heap, size_t size) {
|
|
|
}
|
|
|
set_requested_size(ptr, size);
|
|
|
set_next(ptr, heap->used_blocks_head);
|
|
|
+
|
|
|
+ // Begin of Stats
|
|
|
+
|
|
|
+ heap->dmm_stats.live_objects += 1;
|
|
|
+ heap->dmm_stats.num_malloc += 1;
|
|
|
+
|
|
|
+ // End of Stats
|
|
|
+
|
|
|
posix_unlock(heap);
|
|
|
return ptr;
|
|
|
}
|
|
@@ -58,10 +67,19 @@ void * custom_malloc(heap_t* heap, size_t size) {
|
|
|
|
|
|
if(!found) {
|
|
|
ptr = sys_alloc(heap, size);
|
|
|
+ heap->dmm_stats.mem_allocated += req_padding(size);
|
|
|
+ heap->dmm_stats.mem_requested += size;
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
+ // Begin of Stats
|
|
|
+
|
|
|
+ heap->dmm_stats.live_objects += 1;
|
|
|
+ heap->dmm_stats.num_malloc += 1;
|
|
|
+
|
|
|
+ // End of Stats
|
|
|
+
|
|
|
posix_unlock(heap);
|
|
|
return ptr;
|
|
|
}
|