Directory: | cvmfs/ |
---|---|
File: | cvmfs/malloc_arena.cc |
Date: | 2025-04-20 02:34:28 |
Exec | Total | Coverage | |
---|---|---|---|
Lines: | 123 | 123 | 100.0% |
Branches: | 39 | 54 | 72.2% |
Line | Branch | Exec | Source |
---|---|---|---|
1 | /** | ||
2 | * This file is part of the CernVM File System. | ||
3 | */ | ||
4 | |||
5 | #define __STDC_FORMAT_MACROS | ||
6 | |||
7 | |||
8 | #include "malloc_arena.h" | ||
9 | |||
10 | #include <cassert> | ||
11 | #include <cstddef> | ||
12 | #include <cstring> | ||
13 | #include <new> | ||
14 | |||
15 | #include "util/smalloc.h" | ||
16 | |||
17 | using namespace std; // NOLINT | ||
18 | |||
19 | |||
20 | /** | ||
21 | * Walks through the free list starting at rover_ and looks for the first block | ||
22 | * larger than block_size. Returns NULL if no such block exists. | ||
23 | */ | ||
24 | 6997471 | MallocArena::AvailBlockCtl *MallocArena::FindAvailBlock( | |
25 | const int32_t block_size) | ||
26 | { | ||
27 | 6997471 | bool wrapped = false; | |
28 | // Generally: p = LINK(q) | ||
29 | 6997471 | AvailBlockCtl *q = rover_; | |
30 | AvailBlockCtl *p; | ||
31 | do { | ||
32 | 2402953212 | p = q->GetNextPtr(arena_); | |
33 |
2/2✓ Branch 0 taken 5281679 times.
✓ Branch 1 taken 2397671533 times.
|
2402953212 | if (p->size >= block_size) { |
34 | 5281679 | rover_ = p->GetNextPtr(arena_); | |
35 | 5281679 | return p; | |
36 | } | ||
37 |
2/2✓ Branch 0 taken 3705807 times.
✓ Branch 1 taken 2393965726 times.
|
2397671533 | if (p == head_avail_) { |
38 |
2/2✓ Branch 0 taken 1715792 times.
✓ Branch 1 taken 1990015 times.
|
3705807 | if (wrapped) |
39 | 1715792 | return NULL; | |
40 | 1990015 | wrapped = true; | |
41 | } | ||
42 | 2395955741 | q = p; | |
43 | } while (true); | ||
44 | } | ||
45 | |||
46 | |||
47 | /** | ||
48 | * Creates a free block at the place of the reserved block ptr points into. | ||
49 | * The free block might need to be merged with adjacent lower and/or upper | ||
50 | * blocks. In these cases, the corresponding blocks are removed from the list | ||
51 | * of available blocks. Every allocated block has a predecessor and a | ||
52 | * successor in the arena. The newly created free block is added to the end of | ||
53 | * the list of available blocks. | ||
54 | */ | ||
55 | 5227259 | void MallocArena::Free(void *ptr) { | |
56 |
1/2✗ Branch 1 not taken.
✓ Branch 2 taken 5227259 times.
|
5227259 | assert(Contains(ptr)); |
57 | |||
58 | 5227259 | no_reserved_--; | |
59 | |||
60 | 5227259 | ReservedBlockCtl *block_ctl = reinterpret_cast<ReservedBlockCtl *>( | |
61 | reinterpret_cast<char *>(ptr) - sizeof(ReservedBlockCtl)); | ||
62 | 5227259 | char prior_tag = *(reinterpret_cast<char *>(block_ctl) - 1); | |
63 |
3/4✓ Branch 0 taken 3294848 times.
✓ Branch 1 taken 1932411 times.
✗ Branch 2 not taken.
✓ Branch 3 taken 3294848 times.
|
5227259 | assert((prior_tag == kTagAvail) || (prior_tag == kTagReserved)); |
64 | |||
65 | 5227259 | int32_t new_size = block_ctl->size(); | |
66 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 5227259 times.
|
5227259 | assert(new_size > 0); |
67 | 5227259 | AvailBlockCtl *new_avail = reinterpret_cast<AvailBlockCtl *>(block_ctl); | |
68 | |||
69 |
2/2✓ Branch 0 taken 1932411 times.
✓ Branch 1 taken 3294848 times.
|
5227259 | if (prior_tag == kTagAvail) { |
70 | // Merge with block before and remove the block from the list | ||
71 | 1932411 | int32_t prior_size = reinterpret_cast<AvailBlockTag *>( | |
72 | reinterpret_cast<char *>(block_ctl) - sizeof(AvailBlockTag))->size; | ||
73 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 1932411 times.
|
1932411 | assert(prior_size > 0); |
74 | 1932411 | new_size += prior_size; | |
75 | 1932411 | new_avail = reinterpret_cast<AvailBlockCtl *>( | |
76 | 1932411 | reinterpret_cast<char *>(block_ctl) - prior_size); | |
77 | // new_avail points now to the prior block | ||
78 | 1932411 | UnlinkAvailBlock(new_avail); | |
79 |
2/2✓ Branch 0 taken 22248 times.
✓ Branch 1 taken 1910163 times.
|
1932411 | if (rover_ == new_avail) |
80 | 22248 | rover_ = head_avail_; | |
81 | } | ||
82 | |||
83 | 5227259 | int32_t succ_size = *reinterpret_cast<int32_t *>( | |
84 | 5227259 | reinterpret_cast<char *>(new_avail) + new_size); | |
85 |
2/2✓ Branch 0 taken 2851059 times.
✓ Branch 1 taken 2376200 times.
|
5227259 | if (succ_size >= 0) { |
86 | // Merge with succeeding block and remove the block from the list | ||
87 | 2851059 | AvailBlockCtl *succ_avail = reinterpret_cast<AvailBlockCtl *>( | |
88 | 2851059 | reinterpret_cast<char *>(new_avail) + new_size); | |
89 | 2851059 | UnlinkAvailBlock(succ_avail); | |
90 | 2851059 | new_size += succ_size; | |
91 |
2/2✓ Branch 0 taken 35763 times.
✓ Branch 1 taken 2815296 times.
|
2851059 | if (rover_ == succ_avail) |
92 | 35763 | rover_ = head_avail_; | |
93 | } | ||
94 | |||
95 | // Set new free block's boundaries | ||
96 | 5227259 | new_avail->size = new_size; | |
97 | 5227259 | new (AvailBlockTag::GetTagLocation(new_avail)) AvailBlockTag(new_size); | |
98 | |||
99 | 5227259 | EnqueueAvailBlock(new_avail); | |
100 | 5227259 | } | |
101 | |||
102 | |||
103 | /** | ||
104 | * Inserts an available block at the end of the free list. | ||
105 | */ | ||
106 | 5227259 | void MallocArena::EnqueueAvailBlock(AvailBlockCtl *block) { | |
107 | 5227259 | AvailBlockCtl *next = head_avail_; | |
108 | 5227259 | AvailBlockCtl *prev = head_avail_->GetPrevPtr(arena_); | |
109 | 5227259 | next->link_prev = block->ConvertToLink(arena_); | |
110 | 5227259 | prev->link_next = block->ConvertToLink(arena_); | |
111 | 5227259 | block->link_next = head_avail_->ConvertToLink(arena_); | |
112 | 5227259 | block->link_prev = prev->ConvertToLink(arena_); | |
113 | 5227259 | } | |
114 | |||
115 | |||
116 | /** | ||
117 | * The ptr points to the result of Malloc(). The size of the area is stored | ||
118 | * a few bytes before ptr. | ||
119 | */ | ||
120 | 8860705 | uint32_t MallocArena::GetSize(void *ptr) const { | |
121 |
1/2✗ Branch 1 not taken.
✓ Branch 2 taken 8860705 times.
|
8860705 | assert(Contains(ptr)); |
122 | |||
123 | 8860705 | ReservedBlockCtl *block_ctl = reinterpret_cast<ReservedBlockCtl *>( | |
124 | reinterpret_cast<char *>(ptr) - sizeof(ReservedBlockCtl)); | ||
125 | 8860705 | int32_t size = block_ctl->size(); | |
126 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 8860705 times.
|
8860705 | assert(size > 1); |
127 | 8860705 | return size - sizeof(ReservedBlockCtl) - 1; | |
128 | } | ||
129 | |||
130 | |||
131 | /** | ||
132 | * Walks the list of available blocks starting from rover and allocates the | ||
133 | * first available spot that's large enough. Puts the reserved block at the end | ||
134 | * of the available one and, if necessary, removes the available one from the | ||
135 | * list of free blocks. | ||
136 | */ | ||
137 | 6997471 | void *MallocArena::Malloc(const uint32_t size) { | |
138 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 6997471 times.
|
6997471 | assert(size > 0); |
139 | |||
140 | // Control word first, block type tag last | ||
141 | 6997471 | int32_t total_size = sizeof(ReservedBlockCtl) + size + 1; | |
142 | 6997471 | total_size = RoundUp8(total_size); | |
143 |
2/2✓ Branch 0 taken 29966 times.
✓ Branch 1 taken 6967505 times.
|
6997471 | if (total_size < kMinBlockSize) |
144 | 29966 | total_size = kMinBlockSize; | |
145 | |||
146 | 6997471 | AvailBlockCtl *p = FindAvailBlock(total_size); | |
147 |
2/2✓ Branch 0 taken 1715792 times.
✓ Branch 1 taken 5281679 times.
|
6997471 | if (p == NULL) |
148 | 1715792 | return NULL; | |
149 | |||
150 | 5281679 | no_reserved_++; | |
151 | 5281679 | return ReserveBlock(p, total_size); | |
152 | } | ||
153 | |||
154 | |||
155 | /** | ||
156 | * The arena starts with a pointer to this followed by the AvailBlockCtl of | ||
157 | * head_avail_, followed by a reserved tag to prevent it from being merged, | ||
158 | * followed by a free block spanning the arena until the end tag. The end tag | ||
159 | * is a single negative int, which mimics another reserved block. | ||
160 | */ | ||
161 | 171 | MallocArena::MallocArena(unsigned arena_size) | |
162 | 171 | : arena_(reinterpret_cast<char *>(sxmmap_align(arena_size))) | |
163 | 171 | , head_avail_(reinterpret_cast<AvailBlockCtl *>(arena_ + sizeof(uint64_t))) | |
164 | 171 | , rover_(head_avail_) | |
165 | 171 | , no_reserved_(0) | |
166 | 171 | , arena_size_(arena_size) | |
167 | { | ||
168 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 171 times.
|
171 | assert(arena_size_ > 0); |
169 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 171 times.
|
171 | assert((arena_size_ % (2 * 1024 * 1024)) == 0); // Multiple of 2MB |
170 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 171 times.
|
171 | assert(arena_size_ <= (512 * 1024 * 1024)); // <= 512MB |
171 | |||
172 | 171 | const unsigned char padding = 7; | |
173 | // Size of the initial free block: everything minus arena boundaries | ||
174 | 171 | int32_t usable_size = arena_size_ - | |
175 | (sizeof(uint64_t) + sizeof(AvailBlockCtl) + padding + 1 + sizeof(int32_t)); | ||
176 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 171 times.
|
171 | assert((usable_size % 8) == 0); |
177 | |||
178 | // First 8 bytes of arena: this pointer (occupies only 4 bytes on 32bit | ||
179 | // architectures, in which case the second 4 bytes are unused.) | ||
180 | 171 | *reinterpret_cast<MallocArena **>(arena_) = this; | |
181 | |||
182 | // The initial large free block | ||
183 | AvailBlockCtl *free_block = | ||
184 | 171 | new (arena_ + sizeof(uint64_t) + sizeof(AvailBlockCtl) + padding + 1) | |
185 | 171 | AvailBlockCtl(); | |
186 | 171 | free_block->size = usable_size; | |
187 | 171 | free_block->link_next = free_block->link_prev = | |
188 | 171 | head_avail_->ConvertToLink(arena_); | |
189 | 171 | new (AvailBlockTag::GetTagLocation(free_block)) AvailBlockTag(usable_size); | |
190 | |||
191 | 171 | head_avail_->size = 0; | |
192 | 342 | head_avail_->link_next = head_avail_->link_prev = | |
193 | 171 | free_block->ConvertToLink(arena_); | |
194 | |||
195 | // Prevent succeeding blocks from merging | ||
196 | 171 | *(reinterpret_cast<char *>(free_block) - 1) = kTagReserved; | |
197 | // Final tag: reserved block marker | ||
198 | 171 | *reinterpret_cast<int32_t *>(arena_ + arena_size_ - sizeof(int32_t)) = -1; | |
199 | 171 | } | |
200 | |||
201 | |||
202 | /** | ||
203 | * Initializes the arena with repeated copies of the given pattern. Used for | ||
204 | * testing. | ||
205 | */ | ||
206 | 1 | MallocArena *MallocArena::CreateInitialized( | |
207 | unsigned arena_size, | ||
208 | unsigned char pattern) | ||
209 | { | ||
210 | 1 | MallocArena *result = new MallocArena(arena_size); | |
211 | // At this point, there is one big free block linked to by head_avail_ | ||
212 | 1 | AvailBlockCtl *free_block = result->head_avail_->GetNextPtr(result->arena_); | |
213 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 1 times.
|
1 | assert(free_block != result->head_avail_); |
214 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 1 times.
|
1 | assert(free_block->size > 0); |
215 | // Strip control information at both ends of the block | ||
216 | 1 | int usable_size = free_block->size - | |
217 | (sizeof(AvailBlockCtl) + sizeof(AvailBlockTag)); | ||
218 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 1 times.
|
1 | assert(usable_size > 0); |
219 | 1 | memset(free_block + 1, pattern, usable_size); | |
220 | 1 | return result; | |
221 | } | ||
222 | |||
223 | |||
224 | 168 | MallocArena::~MallocArena() { | |
225 | 168 | sxunmap(arena_, arena_size_); | |
226 | 168 | } | |
227 | |||
228 | |||
229 | /** | ||
230 | * Given the free block "block", cuts out a new reserved block of size | ||
231 | * block_size at the end of the free block. Returns a pointer usable by the | ||
232 | * application. | ||
233 | */ | ||
234 | 5281679 | void *MallocArena::ReserveBlock( | |
235 | AvailBlockCtl *block, | ||
236 | int32_t block_size) | ||
237 | { | ||
238 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 5281679 times.
|
5281679 | assert(block->size >= block_size); |
239 | |||
240 | 5281679 | int32_t remaining_size = block->size - block_size; | |
241 | // Avoid creation of very small blocks | ||
242 |
2/2✓ Branch 0 taken 427091 times.
✓ Branch 1 taken 4854588 times.
|
5281679 | if (remaining_size < kMinBlockSize) { |
243 | 427091 | block_size += remaining_size; | |
244 | 427091 | remaining_size = 0; | |
245 | } | ||
246 | |||
247 | // Update the list of available blocks | ||
248 |
2/2✓ Branch 0 taken 427091 times.
✓ Branch 1 taken 4854588 times.
|
5281679 | if (remaining_size == 0) { |
249 | // Remove free block p from the list of available blocks | ||
250 | 427091 | UnlinkAvailBlock(block); | |
251 | } else { | ||
252 | 4854588 | block->ShrinkTo(remaining_size); | |
253 | } | ||
254 | |||
255 | // Place the new allocation, which also sets the block type tag at the end | ||
256 | 5281679 | char *new_block = reinterpret_cast<char *>(block) + remaining_size; | |
257 | 5281679 | new (new_block) ReservedBlockCtl(block_size); | |
258 | 5281679 | return new_block + sizeof(ReservedBlockCtl); | |
259 | } | ||
260 | |||
261 | |||
262 | /** | ||
263 | * Removes the given block from the doubly linked free block list. This happens | ||
264 | * when two adjacent free blocks are created in Free() and then merged. Or if | ||
265 | * a block gets fully used in Malloc(). | ||
266 | */ | ||
267 | 5210561 | void MallocArena::UnlinkAvailBlock(AvailBlockCtl *block) { | |
268 | 5210561 | AvailBlockCtl *next = block->GetNextPtr(arena_); | |
269 | 5210561 | AvailBlockCtl *prev = block->GetPrevPtr(arena_); | |
270 | 5210561 | prev->link_next = block->link_next; | |
271 | 5210561 | next->link_prev = block->link_prev; | |
272 | 5210561 | } | |
273 |