Directory: | cvmfs/ |
---|---|
File: | cvmfs/malloc_arena.cc |
Date: | 2025-06-22 02:36:02 |
Exec | Total | Coverage | |
---|---|---|---|
Lines: | 122 | 122 | 100.0% |
Branches: | 39 | 54 | 72.2% |
Line | Branch | Exec | Source |
---|---|---|---|
1 | /** | ||
2 | * This file is part of the CernVM File System. | ||
3 | */ | ||
4 | |||
5 | #define __STDC_FORMAT_MACROS | ||
6 | |||
7 | |||
8 | #include "malloc_arena.h" | ||
9 | |||
10 | #include <cassert> | ||
11 | #include <cstddef> | ||
12 | #include <cstring> | ||
13 | #include <new> | ||
14 | |||
15 | #include "util/smalloc.h" | ||
16 | |||
17 | using namespace std; // NOLINT | ||
18 | |||
19 | |||
20 | /** | ||
21 | * Walks through the free list starting at rover_ and looks for the first block | ||
22 | * larger than block_size. Returns NULL if no such block exists. | ||
23 | */ | ||
24 | 127246100 | MallocArena::AvailBlockCtl *MallocArena::FindAvailBlock( | |
25 | const int32_t block_size) { | ||
26 | 127246100 | bool wrapped = false; | |
27 | // Generally: p = LINK(q) | ||
28 | 127246100 | AvailBlockCtl *q = rover_; | |
29 | AvailBlockCtl *p; | ||
30 | do { | ||
31 | 48139628541 | p = q->GetNextPtr(arena_); | |
32 |
2/2✓ Branch 0 taken 92949805 times.
✓ Branch 1 taken 48046678736 times.
|
48139628541 | if (p->size >= block_size) { |
33 | 92949805 | rover_ = p->GetNextPtr(arena_); | |
34 | 92949805 | return p; | |
35 | } | ||
36 |
2/2✓ Branch 0 taken 72716350 times.
✓ Branch 1 taken 47973962386 times.
|
48046678736 | if (p == head_avail_) { |
37 |
2/2✓ Branch 0 taken 34296295 times.
✓ Branch 1 taken 38420055 times.
|
72716350 | if (wrapped) |
38 | 34296295 | return NULL; | |
39 | 38420055 | wrapped = true; | |
40 | } | ||
41 | 48012382441 | q = p; | |
42 | } while (true); | ||
43 | } | ||
44 | |||
45 | |||
46 | /** | ||
47 | * Creates a free block at the place of the reserved block ptr points into. | ||
48 | * The free block might need to be merged with adjacent lower and/or upper | ||
49 | * blocks. In these cases, the corresponding blocks are removed from the list | ||
50 | * of available blocks. Every allocated block has a predecessor and a | ||
51 | * successor in the arena. The newly created free block is added to the end of | ||
52 | * the list of available blocks. | ||
53 | */ | ||
54 | 91299216 | void MallocArena::Free(void *ptr) { | |
55 |
1/2✗ Branch 1 not taken.
✓ Branch 2 taken 91299216 times.
|
91299216 | assert(Contains(ptr)); |
56 | |||
57 | 91299216 | no_reserved_--; | |
58 | |||
59 | 91299216 | ReservedBlockCtl *block_ctl = reinterpret_cast<ReservedBlockCtl *>( | |
60 | reinterpret_cast<char *>(ptr) - sizeof(ReservedBlockCtl)); | ||
61 | 91299216 | const char prior_tag = *(reinterpret_cast<char *>(block_ctl) - 1); | |
62 |
3/4✓ Branch 0 taken 58707267 times.
✓ Branch 1 taken 32591949 times.
✗ Branch 2 not taken.
✓ Branch 3 taken 58707267 times.
|
91299216 | assert((prior_tag == kTagAvail) || (prior_tag == kTagReserved)); |
63 | |||
64 | 91299216 | int32_t new_size = block_ctl->size(); | |
65 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 91299216 times.
|
91299216 | assert(new_size > 0); |
66 | 91299216 | AvailBlockCtl *new_avail = reinterpret_cast<AvailBlockCtl *>(block_ctl); | |
67 | |||
68 |
2/2✓ Branch 0 taken 32591949 times.
✓ Branch 1 taken 58707267 times.
|
91299216 | if (prior_tag == kTagAvail) { |
69 | // Merge with block before and remove the block from the list | ||
70 | 32591949 | const int32_t prior_size = | |
71 | 32591949 | reinterpret_cast<AvailBlockTag *>(reinterpret_cast<char *>(block_ctl) - | |
72 | sizeof(AvailBlockTag)) | ||
73 | ->size; | ||
74 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 32591949 times.
|
32591949 | assert(prior_size > 0); |
75 | 32591949 | new_size += prior_size; | |
76 | 32591949 | new_avail = reinterpret_cast<AvailBlockCtl *>( | |
77 | 32591949 | reinterpret_cast<char *>(block_ctl) - prior_size); | |
78 | // new_avail points now to the prior block | ||
79 | 32591949 | UnlinkAvailBlock(new_avail); | |
80 |
2/2✓ Branch 0 taken 440079 times.
✓ Branch 1 taken 32151870 times.
|
32591949 | if (rover_ == new_avail) |
81 | 440079 | rover_ = head_avail_; | |
82 | } | ||
83 | |||
84 | 91299216 | const int32_t succ_size = *reinterpret_cast<int32_t *>( | |
85 | 91299216 | reinterpret_cast<char *>(new_avail) + new_size); | |
86 |
2/2✓ Branch 0 taken 54062451 times.
✓ Branch 1 taken 37236765 times.
|
91299216 | if (succ_size >= 0) { |
87 | // Merge with succeeding block and remove the block from the list | ||
88 | 54062451 | AvailBlockCtl *succ_avail = reinterpret_cast<AvailBlockCtl *>( | |
89 | 54062451 | reinterpret_cast<char *>(new_avail) + new_size); | |
90 | 54062451 | UnlinkAvailBlock(succ_avail); | |
91 | 54062451 | new_size += succ_size; | |
92 |
2/2✓ Branch 0 taken 677964 times.
✓ Branch 1 taken 53384487 times.
|
54062451 | if (rover_ == succ_avail) |
93 | 677964 | rover_ = head_avail_; | |
94 | } | ||
95 | |||
96 | // Set new free block's boundaries | ||
97 | 91299216 | new_avail->size = new_size; | |
98 | 91299216 | new (AvailBlockTag::GetTagLocation(new_avail)) AvailBlockTag(new_size); | |
99 | |||
100 | 91299216 | EnqueueAvailBlock(new_avail); | |
101 | 91299216 | } | |
102 | |||
103 | |||
104 | /** | ||
105 | * Inserts an available block at the end of the free list. | ||
106 | */ | ||
107 | 91299216 | void MallocArena::EnqueueAvailBlock(AvailBlockCtl *block) { | |
108 | 91299216 | AvailBlockCtl *next = head_avail_; | |
109 | 91299216 | AvailBlockCtl *prev = head_avail_->GetPrevPtr(arena_); | |
110 | 91299216 | next->link_prev = block->ConvertToLink(arena_); | |
111 | 91299216 | prev->link_next = block->ConvertToLink(arena_); | |
112 | 91299216 | block->link_next = head_avail_->ConvertToLink(arena_); | |
113 | 91299216 | block->link_prev = prev->ConvertToLink(arena_); | |
114 | 91299216 | } | |
115 | |||
116 | |||
117 | /** | ||
118 | * The ptr points to the result of Malloc(). The size of the area is stored | ||
119 | * a few bytes before ptr. | ||
120 | */ | ||
121 | 179288315 | uint32_t MallocArena::GetSize(void *ptr) const { | |
122 |
1/2✗ Branch 1 not taken.
✓ Branch 2 taken 179288315 times.
|
179288315 | assert(Contains(ptr)); |
123 | |||
124 | 179288315 | ReservedBlockCtl *block_ctl = reinterpret_cast<ReservedBlockCtl *>( | |
125 | reinterpret_cast<char *>(ptr) - sizeof(ReservedBlockCtl)); | ||
126 | 179288315 | const int32_t size = block_ctl->size(); | |
127 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 179288315 times.
|
179288315 | assert(size > 1); |
128 | 179288315 | return size - sizeof(ReservedBlockCtl) - 1; | |
129 | } | ||
130 | |||
131 | |||
132 | /** | ||
133 | * Walks the list of available blocks starting from rover and allocates the | ||
134 | * first available spot that's large enough. Puts the reserved block at the end | ||
135 | * of the available one and, if necessary, removes the available one from the | ||
136 | * list of free blocks. | ||
137 | */ | ||
138 | 127246100 | void *MallocArena::Malloc(const uint32_t size) { | |
139 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 127246100 times.
|
127246100 | assert(size > 0); |
140 | |||
141 | // Control word first, block type tag last | ||
142 | 127246100 | int32_t total_size = sizeof(ReservedBlockCtl) + size + 1; | |
143 | 127246100 | total_size = RoundUp8(total_size); | |
144 |
2/2✓ Branch 0 taken 683014 times.
✓ Branch 1 taken 126563086 times.
|
127246100 | if (total_size < kMinBlockSize) |
145 | 683014 | total_size = kMinBlockSize; | |
146 | |||
147 | 127246100 | AvailBlockCtl *p = FindAvailBlock(total_size); | |
148 |
2/2✓ Branch 0 taken 34296295 times.
✓ Branch 1 taken 92949805 times.
|
127246100 | if (p == NULL) |
149 | 34296295 | return NULL; | |
150 | |||
151 | 92949805 | no_reserved_++; | |
152 | 92949805 | return ReserveBlock(p, total_size); | |
153 | } | ||
154 | |||
155 | |||
156 | /** | ||
157 | * The arena starts with a pointer to this followed by the AvailBlockCtl of | ||
158 | * head_avail_, followed by a reserved tag to prevent it from being merged, | ||
159 | * followed by a free block spanning the arena until the end tag. The end tag | ||
160 | * is a single negative int, which mimics another reserved block. | ||
161 | */ | ||
162 | 5373 | MallocArena::MallocArena(unsigned arena_size) | |
163 | 5373 | : arena_(reinterpret_cast<char *>(sxmmap_align(arena_size))) | |
164 | 5373 | , head_avail_(reinterpret_cast<AvailBlockCtl *>(arena_ + sizeof(uint64_t))) | |
165 | 5373 | , rover_(head_avail_) | |
166 | 5373 | , no_reserved_(0) | |
167 | 5373 | , arena_size_(arena_size) { | |
168 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 5373 times.
|
5373 | assert(arena_size_ > 0); |
169 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 5373 times.
|
5373 | assert((arena_size_ % (2 * 1024 * 1024)) == 0); // Multiple of 2MB |
170 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 5373 times.
|
5373 | assert(arena_size_ <= (512 * 1024 * 1024)); // <= 512MB |
171 | |||
172 | 5373 | const unsigned char padding = 7; | |
173 | // Size of the initial free block: everything minus arena boundaries | ||
174 | 5373 | const int32_t usable_size = | |
175 | 5373 | arena_size_ - (sizeof(uint64_t) + sizeof(AvailBlockCtl) + padding + 1 + | |
176 | sizeof(int32_t)); | ||
177 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 5373 times.
|
5373 | assert((usable_size % 8) == 0); |
178 | |||
179 | // First 8 bytes of arena: this pointer (occupies only 4 bytes on 32bit | ||
180 | // architectures, in which case the second 4 bytes are unused.) | ||
181 | 5373 | *reinterpret_cast<MallocArena **>(arena_) = this; | |
182 | |||
183 | // The initial large free block | ||
184 | 5373 | AvailBlockCtl *free_block = new (arena_ + sizeof(uint64_t) | |
185 | + sizeof(AvailBlockCtl) + padding + 1) | ||
186 | 5373 | AvailBlockCtl(); | |
187 | 5373 | free_block->size = usable_size; | |
188 | 5373 | free_block->link_next = free_block->link_prev = head_avail_->ConvertToLink( | |
189 | arena_); | ||
190 | 5373 | new (AvailBlockTag::GetTagLocation(free_block)) AvailBlockTag(usable_size); | |
191 | |||
192 | 5373 | head_avail_->size = 0; | |
193 | 5373 | head_avail_->link_next = head_avail_->link_prev = free_block->ConvertToLink( | |
194 | arena_); | ||
195 | |||
196 | // Prevent succeeding blocks from merging | ||
197 | 5373 | *(reinterpret_cast<char *>(free_block) - 1) = kTagReserved; | |
198 | // Final tag: reserved block marker | ||
199 | 5373 | *reinterpret_cast<int32_t *>(arena_ + arena_size_ - sizeof(int32_t)) = -1; | |
200 | 5373 | } | |
201 | |||
202 | |||
203 | /** | ||
204 | * Initializes the arena with repeated copies of the given pattern. Used for | ||
205 | * testing. | ||
206 | */ | ||
207 | 48 | MallocArena *MallocArena::CreateInitialized(unsigned arena_size, | |
208 | unsigned char pattern) { | ||
209 | 48 | MallocArena *result = new MallocArena(arena_size); | |
210 | // At this point, there is one big free block linked to by head_avail_ | ||
211 | 48 | AvailBlockCtl *free_block = result->head_avail_->GetNextPtr(result->arena_); | |
212 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 48 times.
|
48 | assert(free_block != result->head_avail_); |
213 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 48 times.
|
48 | assert(free_block->size > 0); |
214 | // Strip control information at both ends of the block | ||
215 | 48 | const int usable_size = | |
216 | 48 | free_block->size - (sizeof(AvailBlockCtl) + sizeof(AvailBlockTag)); | |
217 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 48 times.
|
48 | assert(usable_size > 0); |
218 | 48 | memset(free_block + 1, pattern, usable_size); | |
219 | 48 | return result; | |
220 | } | ||
221 | |||
222 | |||
223 | 5368 | MallocArena::~MallocArena() { sxunmap(arena_, arena_size_); } | |
224 | |||
225 | |||
226 | /** | ||
227 | * Given the free block "block", cuts out a new reserved block of size | ||
228 | * block_size at the end of the free block. Returns a pointer usable by the | ||
229 | * application. | ||
230 | */ | ||
231 | 92949805 | void *MallocArena::ReserveBlock(AvailBlockCtl *block, int32_t block_size) { | |
232 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 92949805 times.
|
92949805 | assert(block->size >= block_size); |
233 | |||
234 | 92949805 | int32_t remaining_size = block->size - block_size; | |
235 | // Avoid creation of very small blocks | ||
236 |
2/2✓ Branch 0 taken 4177901 times.
✓ Branch 1 taken 88771904 times.
|
92949805 | if (remaining_size < kMinBlockSize) { |
237 | 4177901 | block_size += remaining_size; | |
238 | 4177901 | remaining_size = 0; | |
239 | } | ||
240 | |||
241 | // Update the list of available blocks | ||
242 |
2/2✓ Branch 0 taken 4177901 times.
✓ Branch 1 taken 88771904 times.
|
92949805 | if (remaining_size == 0) { |
243 | // Remove free block p from the list of available blocks | ||
244 | 4177901 | UnlinkAvailBlock(block); | |
245 | } else { | ||
246 | 88771904 | block->ShrinkTo(remaining_size); | |
247 | } | ||
248 | |||
249 | // Place the new allocation, which also sets the block type tag at the end | ||
250 | 92949805 | char *new_block = reinterpret_cast<char *>(block) + remaining_size; | |
251 | 92949805 | new (new_block) ReservedBlockCtl(block_size); | |
252 | 92949805 | return new_block + sizeof(ReservedBlockCtl); | |
253 | } | ||
254 | |||
255 | |||
256 | /** | ||
257 | * Removes the given block from the doubly linked free block list. This happens | ||
258 | * when two adjacent free blocks are created in Free() and then merged. Or if | ||
259 | * a block gets fully used in Malloc(). | ||
260 | */ | ||
261 | 90832301 | void MallocArena::UnlinkAvailBlock(AvailBlockCtl *block) { | |
262 | 90832301 | AvailBlockCtl *next = block->GetNextPtr(arena_); | |
263 | 90832301 | AvailBlockCtl *prev = block->GetPrevPtr(arena_); | |
264 | 90832301 | prev->link_next = block->link_next; | |
265 | 90832301 | next->link_prev = block->link_prev; | |
266 | 90832301 | } | |
267 |