GCC Code Coverage Report


Directory: cvmfs/
File: cvmfs/malloc_heap.cc
Date: 2025-11-30 02:35:17
Exec Total Coverage
Lines: 69 69 100.0%
Branches: 22 32 68.8%

Line Branch Exec Source
1 /**
2 * This file is part of the CernVM File System.
3 */
4
5
6 #include "malloc_heap.h"
7
8 #include <cassert>
9 #include <cstring>
10 #include <new>
11
12 #include "util/smalloc.h"
13
14 using namespace std; // NOLINT
15
16 2977766 void *MallocHeap::Allocate(uint64_t size, void *header, unsigned header_size) {
17
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 2977766 times.
2977766 assert(size > 0);
18
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 2977766 times.
2977766 assert(header_size <= size);
19 2977766 const uint64_t rounded_size = RoundUp8(size);
20 2977766 const int64_t real_size = rounded_size + sizeof(Tag);
21
2/2
✓ Branch 0 taken 115 times.
✓ Branch 1 taken 2977651 times.
2977766 if (gauge_ + real_size > capacity_)
22 115 return NULL;
23
24 2977651 unsigned char *new_block = heap_ + gauge_;
25 2977651 new (new_block) Tag(rounded_size);
26 2977651 new_block += sizeof(Tag);
27 2977651 memcpy(new_block, header, header_size);
28 2977651 gauge_ += real_size;
29 2977651 stored_ += rounded_size;
30 2977651 num_blocks_++;
31 2977651 return new_block;
32 }
33
34
35 173 void MallocHeap::Compact() {
36
2/2
✓ Branch 0 taken 29 times.
✓ Branch 1 taken 144 times.
173 if (gauge_ == 0)
37 29 return;
38
39 // Not really a tag, just the top memory address
40 144 Tag *heap_top = reinterpret_cast<Tag *>(heap_ + gauge_);
41 144 Tag *current_tag = reinterpret_cast<Tag *>(heap_);
42 144 Tag *next_tag = current_tag->JumpToNext();
43 // Move a sliding window of two blocks over the heap and compact where
44 // possible
45
2/2
✓ Branch 0 taken 4463683 times.
✓ Branch 1 taken 144 times.
4463827 while (next_tag < heap_top) {
46
2/2
✓ Branch 1 taken 2947669 times.
✓ Branch 2 taken 1516014 times.
4463683 if (current_tag->IsFree()) {
47
2/2
✓ Branch 1 taken 1460654 times.
✓ Branch 2 taken 1487015 times.
2947669 if (next_tag->IsFree()) {
48 // Adjacent free blocks, merge and try again
49 1460654 current_tag->size -= sizeof(Tag) + next_tag->GetSize();
50 1460654 next_tag = next_tag->JumpToNext();
51 } else {
52 // Free block followed by a reserved block, move memory and create a
53 // new free tag at the end of the moved block
54 1487015 const int64_t free_space = current_tag->size;
55 1487015 current_tag->size = next_tag->size;
56 1487015 memmove(current_tag->GetBlock(), next_tag->GetBlock(),
57 next_tag->GetSize());
58
1/2
✓ Branch 3 taken 1487015 times.
✗ Branch 4 not taken.
1487015 (*callback_ptr_)(BlockPtr(current_tag->GetBlock()));
59 1487015 next_tag = current_tag->JumpToNext();
60 1487015 next_tag->size = free_space;
61 }
62 } else {
63 // Current block allocated, move on
64 1516014 current_tag = next_tag;
65 1516014 next_tag = next_tag->JumpToNext();
66 }
67 }
68
69 144 gauge_ = (reinterpret_cast<unsigned char *>(current_tag) - heap_);
70
2/2
✓ Branch 1 taken 29 times.
✓ Branch 2 taken 115 times.
144 if (!current_tag->IsFree())
71 29 gauge_ += sizeof(Tag) + current_tag->GetSize();
72 }
73
74
75 58 void *MallocHeap::Expand(void *block, uint64_t new_size) {
76 58 const uint64_t old_size = GetSize(block);
77
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 58 times.
58 assert(old_size <= new_size);
78 58 void *new_block = Allocate(new_size, block, old_size);
79
1/2
✓ Branch 0 taken 58 times.
✗ Branch 1 not taken.
58 if (new_block != NULL)
80 58 MarkFree(block);
81 58 return new_block;
82 }
83
84
85 59566 bool MallocHeap::HasSpaceFor(uint64_t nbytes) {
86 59566 return RoundUp8(gauge_ + nbytes + sizeof(Tag)) <= capacity_;
87 }
88
89
90 1460827 void MallocHeap::MarkFree(void *block) {
91 1460827 Tag *tag = reinterpret_cast<Tag *>(block) - 1;
92
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 1460827 times.
1460827 assert(tag->size > 0);
93 1460827 tag->size = -(tag->size);
94 1460827 stored_ -= tag->GetSize();
95 1460827 num_blocks_--;
96 // TODO(jblomer): if MarkFree() takes place at the top of the heap, one could
97 // move back the gauge_ pointer. If this is an optimization or unnecessary
98 // extra work depends on how the MallocHeap is used.
99 1460827 }
100
101
102 1398042 uint64_t MallocHeap::GetSize(void *block) {
103 1398042 Tag *tag = reinterpret_cast<Tag *>(block) - 1;
104
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 1398042 times.
1398042 assert(tag->size > 0);
105 1398042 return tag->size;
106 }
107
108
109 412 MallocHeap::MallocHeap(uint64_t capacity, CallbackPtr callback_ptr)
110 412 : callback_ptr_(callback_ptr)
111 412 , capacity_(capacity)
112 412 , gauge_(0)
113 412 , stored_(0)
114 412 , num_blocks_(0) {
115
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 412 times.
412 assert(capacity_ > kMinCapacity);
116 // Ensure 8-byte alignment
117
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 412 times.
412 assert((capacity_ % 8) == 0);
118 412 heap_ = reinterpret_cast<unsigned char *>(sxmmap(capacity));
119
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 412 times.
412 assert(uintptr_t(heap_) % 8 == 0);
120 412 }
121
122
123 411 MallocHeap::~MallocHeap() { sxunmap(heap_, capacity_); }
124