GCC Code Coverage Report


Directory: cvmfs/
File: cvmfs/malloc_arena.h
Date: 2025-06-29 02:35:41
Exec Total Coverage
Lines: 28 28 100.0%
Branches: 1 2 50.0%

Line Branch Exec Source
1 /**
2 * This file is part of the CernVM File System.
3 */
4
5 #ifndef CVMFS_MALLOC_ARENA_H_
6 #define CVMFS_MALLOC_ARENA_H_
7
8 #include <inttypes.h>
9 #include <stdint.h>
10
11 #include <cassert>
12 #include <new>
13
14 // TODO(jblomer): the arena size could be a template parameter. In order to
15 // reduce code duplication, all functions not requiring the arena size could be
16 // moved to a base class.
17
18 /**
19 * An mmap'd block of general purpose memory for malloc/free in sqlite. Uses
20 * the "boundary-tag system" as described in TAOCP vol 1 section 2.5.
21 * Not thread-safe.
22 *
23 * Returned pointers are 8-byte aligned. Since a reserved block has a 4 bytes
24 * prefix, all reserved blocks must start addresses that end with binary 100.
25 * We round up all allocation requests to a multiple of 8. Reserved blocks
26 * are always placed at the upper bound of a free block. The first block of
27 * the arena is appropriately padded and because the upper bound of the
28 * initial large free block is at an address that ends on binary 100, all
29 * reserved blocks will be aligned correctly.
30 *
31 * The tag layouts are as follows. Size is a signed 4 byte integer, previous
32 * and next are 4 byte offsets into the arena, linking to the previous and
33 * next free blocks in the list of free blocks.
34 *
35 * Available (free) block: Reserved (used) block:
36 *
37 * +--------+--------+ +--------+--------+
38 * upper | size | 00| upper | | 01|
39 * +--------+--------+ +--------+--------+
40 * | ... | | ... |
41 * +-----------------+ +-----------------+
42 * |previous| | | ... |
43 * +-----------------+ +-----------------+
44 * lower | size | next | lower | -size | |
45 * +--------+--------+ +--------+--------+
46 * B0 B7 B0 B7
47 *
48 *
49 * First block of arena_: Last "block" of arena_:
50 * in the free list but blocked for merging
51 *
52 * +--------+#########
53 * upper | 01| arena #
54 * +--------+--------+
55 * |previous| |
56 * +-----------------+ multiple of 8
57 * | int(0) | next | <- head_avail_ |
58 * +-----------------+ +--------+########
59 * lower | this [+ pad@32] | lower |int(-1) |outside#
60 * +--------+--------+ +--------+########
61 * B0 B7 B0 B4
62 *
63 * The arena size has to be a power of 2MB. It also has to be a multiple of 8
64 * for alignment reasons. It needs to be <= 512MB.
65 */
66 class MallocArena {
67 public:
68 /**
69 * Avoid very small free blocks. This must be a larger than
70 * sizeof(AvailBlockCtl) + sizeof(AvailBlockTag) and a multiple of 8.
71 */
72 static const int kMinBlockSize = 24;
73
74 /**
75 * Returns the MallocArena that houses the destination of ptr.
76 */
77 674462832 static inline MallocArena *GetMallocArena(void *ptr, unsigned arena_size) {
78 // NOLINTNEXTLINE(performance-no-int-to-ptr)
79 674462832 void *arena = reinterpret_cast<void *>(
80 674462832 uintptr_t(ptr) & ~(uintptr_t(arena_size) - uintptr_t(1)));
81 674462832 return *reinterpret_cast<MallocArena **>(arena);
82 }
83
84 explicit MallocArena(unsigned arena_size);
85 static MallocArena *CreateInitialized(unsigned arena_size,
86 unsigned char pattern);
87 ~MallocArena();
88
89 void *Malloc(const uint32_t size);
90 void Free(void *ptr);
91 639599375 inline bool Contains(void *ptr) const {
92 639599375 return GetMallocArena(ptr, arena_size_) == this;
93 }
94 uint32_t GetSize(void *ptr) const;
95 5226211 bool IsEmpty() const { return no_reserved_ == 0; }
96
97 private:
98 static const char kTagAvail = 0;
99 static const char kTagReserved = 1;
100
101 /**
102 * Lower boundary of a free block. Note that the linking of blocks is not
103 * necessarily in ascending order but random.
104 */
105 struct AvailBlockCtl {
106 110019136780 AvailBlockCtl *GetNextPtr(char *base) {
107 110019136780 return reinterpret_cast<AvailBlockCtl *>(base + link_next);
108 }
109 467404147 AvailBlockCtl *GetPrevPtr(char *base) {
110 467404147 return reinterpret_cast<AvailBlockCtl *>(base + link_prev);
111 }
112 936113312 int32_t ConvertToLink(char *base) {
113 936113312 return static_cast<int>(reinterpret_cast<char *>(this) - base);
114 }
115 218250243 void ShrinkTo(int32_t smaller_size) {
116 218250243 size = smaller_size;
117 218250243 new (AvailBlockTag::GetTagLocation(this)) AvailBlockTag(smaller_size);
118 218250243 }
119 int32_t size; // always positive
120 int32_t link_next; // offset in the arena; saves 4 bytes on 64bit archs
121 int32_t link_prev; // offset in the arena; saves 4 bytes on 64bit archs
122 };
123
124 /**
125 * 8 bytes upper boundary of a free block.
126 */
127 struct AvailBlockTag {
128 452281175 explicit AvailBlockTag(int32_t s) : size(s), tag(kTagAvail) { }
129 452281175 static void *GetTagLocation(AvailBlockCtl *block) {
130 452281175 return reinterpret_cast<char *>(block) + block->size
131 452281175 - sizeof(AvailBlockTag);
132 }
133 int32_t size;
134 char padding[3];
135 char tag;
136 };
137
138 /**
139 * Lower boundary of a reserved block: a negative size to distinguish from
140 * the lower boundary of a free block.
141 */
142 struct ReservedBlockCtl {
143 public:
144 236026281 explicit ReservedBlockCtl(int32_t s) : size_(-s) {
145 236026281 char *base = reinterpret_cast<char *>(this);
146 236026281 *(base + s - 1) = kTagReserved;
147 236026281 }
148 639599375 int32_t size() const {
149
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 639599375 times.
639599375 assert(size_ <= 0);
150 639599375 return -size_;
151 }
152
153 private:
154 int32_t size_; // always negative
155 };
156
157 void UnlinkAvailBlock(AvailBlockCtl *block);
158 void EnqueueAvailBlock(AvailBlockCtl *block);
159 AvailBlockCtl *FindAvailBlock(const int32_t block_size);
160 void *ReserveBlock(AvailBlockCtl *block, int32_t block_size);
161
162 /**
163 * Starts with the address of the MallocArena object followed by a
164 * head_avail_ block and ends with a -1 guard integer to mimic a reserved
165 * end block. The arena is aligned at a multiple of arena_size. Therefore,
166 * a pointer pointing into it can get the corresponding MallocArena object
167 * in O(1).
168 */
169 char *arena_; ///< The memory block
170 /**
171 * Head of the list of available blocks. Located at the beginning of the
172 * arena_. The only "available" block with size 0 and with the upper tag
173 * field set to "reserved", so that it is not merged with another free'd
174 * block.
175 */
176 AvailBlockCtl *head_avail_;
177 AvailBlockCtl *rover_; ///< The free block where the next search starts.
178 uint32_t no_reserved_;
179 unsigned arena_size_;
180 }; // class MallocArena
181
182 #endif // CVMFS_MALLOC_ARENA_H_
183