GCC Code Coverage Report


Directory: cvmfs/
File: cvmfs/malloc_heap.cc
Date: 2025-12-21 02:39:23
Exec Total Coverage
Lines: 69 69 100.0%
Branches: 22 32 68.8%

Line Branch Exec Source
1 /**
2 * This file is part of the CernVM File System.
3 */
4
5
6 #include "malloc_heap.h"
7
8 #include <cassert>
9 #include <cstring>
10 #include <new>
11
12 #include "util/smalloc.h"
13
14 using namespace std; // NOLINT
15
16 4463586 void *MallocHeap::Allocate(uint64_t size, void *header, unsigned header_size) {
17
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 4463586 times.
4463586 assert(size > 0);
18
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 4463586 times.
4463586 assert(header_size <= size);
19 4463586 const uint64_t rounded_size = RoundUp8(size);
20 4463586 const int64_t real_size = rounded_size + sizeof(Tag);
21
2/2
✓ Branch 0 taken 171 times.
✓ Branch 1 taken 4463415 times.
4463586 if (gauge_ + real_size > capacity_)
22 171 return NULL;
23
24 4463415 unsigned char *new_block = heap_ + gauge_;
25 4463415 new (new_block) Tag(rounded_size);
26 4463415 new_block += sizeof(Tag);
27 4463415 memcpy(new_block, header, header_size);
28 4463415 gauge_ += real_size;
29 4463415 stored_ += rounded_size;
30 4463415 num_blocks_++;
31 4463415 return new_block;
32 }
33
34
35 257 void MallocHeap::Compact() {
36
2/2
✓ Branch 0 taken 43 times.
✓ Branch 1 taken 214 times.
257 if (gauge_ == 0)
37 43 return;
38
39 // Not really a tag, just the top memory address
40 214 Tag *heap_top = reinterpret_cast<Tag *>(heap_ + gauge_);
41 214 Tag *current_tag = reinterpret_cast<Tag *>(heap_);
42 214 Tag *next_tag = current_tag->JumpToNext();
43 // Move a sliding window of two blocks over the heap and compact where
44 // possible
45
2/2
✓ Branch 0 taken 6685483 times.
✓ Branch 1 taken 214 times.
6685697 while (next_tag < heap_top) {
46
2/2
✓ Branch 1 taken 4418957 times.
✓ Branch 2 taken 2266526 times.
6685483 if (current_tag->IsFree()) {
47
2/2
✓ Branch 1 taken 2195430 times.
✓ Branch 2 taken 2223527 times.
4418957 if (next_tag->IsFree()) {
48 // Adjacent free blocks, merge and try again
49 2195430 current_tag->size -= sizeof(Tag) + next_tag->GetSize();
50 2195430 next_tag = next_tag->JumpToNext();
51 } else {
52 // Free block followed by a reserved block, move memory and create a
53 // new free tag at the end of the moved block
54 2223527 const int64_t free_space = current_tag->size;
55 2223527 current_tag->size = next_tag->size;
56 2223527 memmove(current_tag->GetBlock(), next_tag->GetBlock(),
57 next_tag->GetSize());
58
1/2
✓ Branch 3 taken 2223527 times.
✗ Branch 4 not taken.
2223527 (*callback_ptr_)(BlockPtr(current_tag->GetBlock()));
59 2223527 next_tag = current_tag->JumpToNext();
60 2223527 next_tag->size = free_space;
61 }
62 } else {
63 // Current block allocated, move on
64 2266526 current_tag = next_tag;
65 2266526 next_tag = next_tag->JumpToNext();
66 }
67 }
68
69 214 gauge_ = (reinterpret_cast<unsigned char *>(current_tag) - heap_);
70
2/2
✓ Branch 1 taken 43 times.
✓ Branch 2 taken 171 times.
214 if (!current_tag->IsFree())
71 43 gauge_ += sizeof(Tag) + current_tag->GetSize();
72 }
73
74
75 86 void *MallocHeap::Expand(void *block, uint64_t new_size) {
76 86 const uint64_t old_size = GetSize(block);
77
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 86 times.
86 assert(old_size <= new_size);
78 86 void *new_block = Allocate(new_size, block, old_size);
79
1/2
✓ Branch 0 taken 86 times.
✗ Branch 1 not taken.
86 if (new_block != NULL)
80 86 MarkFree(block);
81 86 return new_block;
82 }
83
84
85 88322 bool MallocHeap::HasSpaceFor(uint64_t nbytes) {
86 88322 return RoundUp8(gauge_ + nbytes + sizeof(Tag)) <= capacity_;
87 }
88
89
90 2195687 void MallocHeap::MarkFree(void *block) {
91 2195687 Tag *tag = reinterpret_cast<Tag *>(block) - 1;
92
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 2195687 times.
2195687 assert(tag->size > 0);
93 2195687 tag->size = -(tag->size);
94 2195687 stored_ -= tag->GetSize();
95 2195687 num_blocks_--;
96 // TODO(jblomer): if MarkFree() takes place at the top of the heap, one could
97 // move back the gauge_ pointer. If this is an optimization or unnecessary
98 // extra work depends on how the MallocHeap is used.
99 2195687 }
100
101
102 2091602 uint64_t MallocHeap::GetSize(void *block) {
103 2091602 Tag *tag = reinterpret_cast<Tag *>(block) - 1;
104
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 2091602 times.
2091602 assert(tag->size > 0);
105 2091602 return tag->size;
106 }
107
108
109 912 MallocHeap::MallocHeap(uint64_t capacity, CallbackPtr callback_ptr)
110 912 : callback_ptr_(callback_ptr)
111 912 , capacity_(capacity)
112 912 , gauge_(0)
113 912 , stored_(0)
114 912 , num_blocks_(0) {
115
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 912 times.
912 assert(capacity_ > kMinCapacity);
116 // Ensure 8-byte alignment
117
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 912 times.
912 assert((capacity_ % 8) == 0);
118 912 heap_ = reinterpret_cast<unsigned char *>(sxmmap(capacity));
119
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 912 times.
912 assert(uintptr_t(heap_) % 8 == 0);
120 912 }
121
122
123 911 MallocHeap::~MallocHeap() { sxunmap(heap_, capacity_); }
124