GCC Code Coverage Report


Directory: cvmfs/
File: cvmfs/malloc_heap.cc
Date: 2025-06-22 02:36:02
Exec Total Coverage
Lines: 69 69 100.0%
Branches: 22 32 68.8%

Line Branch Exec Source
1 /**
2 * This file is part of the CernVM File System.
3 */
4
5
6 #include "malloc_heap.h"
7
8 #include <cassert>
9 #include <cstring>
10 #include <new>
11
12 #include "util/smalloc.h"
13
14 using namespace std; // NOLINT
15
16 212257 void *MallocHeap::Allocate(uint64_t size, void *header, unsigned header_size) {
17
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 212257 times.
212257 assert(size > 0);
18
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 212257 times.
212257 assert(header_size <= size);
19 212257 const uint64_t rounded_size = RoundUp8(size);
20 212257 const int64_t real_size = rounded_size + sizeof(Tag);
21
2/2
✓ Branch 0 taken 7 times.
✓ Branch 1 taken 212250 times.
212257 if (gauge_ + real_size > capacity_)
22 7 return NULL;
23
24 212250 unsigned char *new_block = heap_ + gauge_;
25 212250 new (new_block) Tag(rounded_size);
26 212250 new_block += sizeof(Tag);
27 212250 memcpy(new_block, header, header_size);
28 212250 gauge_ += real_size;
29 212250 stored_ += rounded_size;
30 212250 num_blocks_++;
31 212250 return new_block;
32 }
33
34
35 12 void MallocHeap::Compact() {
36
2/2
✓ Branch 0 taken 2 times.
✓ Branch 1 taken 10 times.
12 if (gauge_ == 0)
37 2 return;
38
39 // Not really a tag, just the top memory address
40 10 Tag *heap_top = reinterpret_cast<Tag *>(heap_ + gauge_);
41 10 Tag *current_tag = reinterpret_cast<Tag *>(heap_);
42 10 Tag *next_tag = current_tag->JumpToNext();
43 // Move a sliding window of two blocks over the heap and compact where
44 // possible
45
2/2
✓ Branch 0 taken 318416 times.
✓ Branch 1 taken 10 times.
318426 while (next_tag < heap_top) {
46
2/2
✓ Branch 1 taken 210184 times.
✓ Branch 2 taken 108232 times.
318416 if (current_tag->IsFree()) {
47
2/2
✓ Branch 1 taken 103952 times.
✓ Branch 2 taken 106232 times.
210184 if (next_tag->IsFree()) {
48 // Adjacent free blocks, merge and try again
49 103952 current_tag->size -= sizeof(Tag) + next_tag->GetSize();
50 103952 next_tag = next_tag->JumpToNext();
51 } else {
52 // Free block followed by a reserved block, move memory and create a
53 // new free tag at the end of the moved block
54 106232 const int64_t free_space = current_tag->size;
55 106232 current_tag->size = next_tag->size;
56 106232 memmove(current_tag->GetBlock(), next_tag->GetBlock(),
57 next_tag->GetSize());
58
1/2
✓ Branch 3 taken 106232 times.
✗ Branch 4 not taken.
106232 (*callback_ptr_)(BlockPtr(current_tag->GetBlock()));
59 106232 next_tag = current_tag->JumpToNext();
60 106232 next_tag->size = free_space;
61 }
62 } else {
63 // Current block allocated, move on
64 108232 current_tag = next_tag;
65 108232 next_tag = next_tag->JumpToNext();
66 }
67 }
68
69 10 gauge_ = (reinterpret_cast<unsigned char *>(current_tag) - heap_);
70
2/2
✓ Branch 1 taken 2 times.
✓ Branch 2 taken 8 times.
10 if (!current_tag->IsFree())
71 2 gauge_ += sizeof(Tag) + current_tag->GetSize();
72 }
73
74
75 4 void *MallocHeap::Expand(void *block, uint64_t new_size) {
76 4 const uint64_t old_size = GetSize(block);
77
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 4 times.
4 assert(old_size <= new_size);
78 4 void *new_block = Allocate(new_size, block, old_size);
79
1/2
✓ Branch 0 taken 4 times.
✗ Branch 1 not taken.
4 if (new_block != NULL)
80 4 MarkFree(block);
81 4 return new_block;
82 }
83
84
85 4108 bool MallocHeap::HasSpaceFor(uint64_t nbytes) {
86 4108 return RoundUp8(gauge_ + nbytes + sizeof(Tag)) <= capacity_;
87 }
88
89
90 103964 void MallocHeap::MarkFree(void *block) {
91 103964 Tag *tag = reinterpret_cast<Tag *>(block) - 1;
92
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 103964 times.
103964 assert(tag->size > 0);
93 103964 tag->size = -(tag->size);
94 103964 stored_ -= tag->GetSize();
95 103964 num_blocks_--;
96 // TODO(jblomer): if MarkFree() takes place at the top of the heap, one could
97 // move back the gauge_ pointer. If this is an optimization or unnecessary
98 // extra work depends on how the MallocHeap is used.
99 103964 }
100
101
102 100096 uint64_t MallocHeap::GetSize(void *block) {
103 100096 Tag *tag = reinterpret_cast<Tag *>(block) - 1;
104
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 100096 times.
100096 assert(tag->size > 0);
105 100096 return tag->size;
106 }
107
108
109 795 MallocHeap::MallocHeap(uint64_t capacity, CallbackPtr callback_ptr)
110 795 : callback_ptr_(callback_ptr)
111 795 , capacity_(capacity)
112 795 , gauge_(0)
113 795 , stored_(0)
114 795 , num_blocks_(0) {
115
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 795 times.
795 assert(capacity_ > kMinCapacity);
116 // Ensure 8-byte alignment
117
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 795 times.
795 assert((capacity_ % 8) == 0);
118 795 heap_ = reinterpret_cast<unsigned char *>(sxmmap(capacity));
119
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 795 times.
795 assert(uintptr_t(heap_) % 8 == 0);
120 795 }
121
122
123 794 MallocHeap::~MallocHeap() { sxunmap(heap_, capacity_); }
124