Directory: | cvmfs/ |
---|---|
File: | cvmfs/malloc_heap.cc |
Date: | 2025-02-09 02:34:19 |
Exec | Total | Coverage | |
---|---|---|---|
Lines: | 72 | 72 | 100.0% |
Branches: | 22 | 32 | 68.8% |
Line | Branch | Exec | Source |
---|---|---|---|
1 | /** | ||
2 | * This file is part of the CernVM File System. | ||
3 | */ | ||
4 | |||
5 | |||
6 | #include "malloc_heap.h" | ||
7 | |||
8 | #include <cassert> | ||
9 | #include <cstring> | ||
10 | #include <new> | ||
11 | |||
12 | #include "util/smalloc.h" | ||
13 | |||
14 | using namespace std; // NOLINT | ||
15 | |||
16 | 106130 | void *MallocHeap::Allocate( | |
17 | uint64_t size, | ||
18 | void *header, | ||
19 | unsigned header_size) | ||
20 | { | ||
21 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 106130 times.
|
106130 | assert(size > 0); |
22 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 106130 times.
|
106130 | assert(header_size <= size); |
23 | 106130 | uint64_t rounded_size = RoundUp8(size); | |
24 | 106130 | int64_t real_size = rounded_size + sizeof(Tag); | |
25 |
2/2✓ Branch 0 taken 4 times.
✓ Branch 1 taken 106126 times.
|
106130 | if (gauge_ + real_size > capacity_) |
26 | 4 | return NULL; | |
27 | |||
28 | 106126 | unsigned char *new_block = heap_ + gauge_; | |
29 | 106126 | new (new_block) Tag(rounded_size); | |
30 | 106126 | new_block += sizeof(Tag); | |
31 | 106126 | memcpy(new_block, header, header_size); | |
32 | 106126 | gauge_ += real_size; | |
33 | 106126 | stored_ += rounded_size; | |
34 | 106126 | num_blocks_++; | |
35 | 106126 | return new_block; | |
36 | } | ||
37 | |||
38 | |||
39 | 6 | void MallocHeap::Compact() { | |
40 |
2/2✓ Branch 0 taken 1 times.
✓ Branch 1 taken 5 times.
|
6 | if (gauge_ == 0) |
41 | 1 | return; | |
42 | |||
43 | // Not really a tag, just the top memory address | ||
44 | 5 | Tag *heap_top = reinterpret_cast<Tag *>(heap_ + gauge_); | |
45 | 5 | Tag *current_tag = reinterpret_cast<Tag *>(heap_); | |
46 | 5 | Tag *next_tag = current_tag->JumpToNext(); | |
47 | // Move a sliding window of two blocks over the heap and compact where | ||
48 | // possible | ||
49 |
2/2✓ Branch 0 taken 159071 times.
✓ Branch 1 taken 5 times.
|
159076 | while (next_tag < heap_top) { |
50 |
2/2✓ Branch 1 taken 105093 times.
✓ Branch 2 taken 53978 times.
|
159071 | if (current_tag->IsFree()) { |
51 |
2/2✓ Branch 1 taken 52114 times.
✓ Branch 2 taken 52979 times.
|
105093 | if (next_tag->IsFree()) { |
52 | // Adjacent free blocks, merge and try again | ||
53 | 52114 | current_tag->size -= sizeof(Tag) + next_tag->GetSize(); | |
54 | 52114 | next_tag = next_tag->JumpToNext(); | |
55 | } else { | ||
56 | // Free block followed by a reserved block, move memory and create a | ||
57 | // new free tag at the end of the moved block | ||
58 | 52979 | int64_t free_space = current_tag->size; | |
59 | 52979 | current_tag->size = next_tag->size; | |
60 | 105958 | memmove(current_tag->GetBlock(), | |
61 | 52979 | next_tag->GetBlock(), next_tag->GetSize()); | |
62 |
1/2✓ Branch 3 taken 52979 times.
✗ Branch 4 not taken.
|
52979 | (*callback_ptr_)(BlockPtr(current_tag->GetBlock())); |
63 | 52979 | next_tag = current_tag->JumpToNext(); | |
64 | 52979 | next_tag->size = free_space; | |
65 | } | ||
66 | } else { | ||
67 | // Current block allocated, move on | ||
68 | 53978 | current_tag = next_tag; | |
69 | 53978 | next_tag = next_tag->JumpToNext(); | |
70 | } | ||
71 | } | ||
72 | |||
73 | 5 | gauge_ = (reinterpret_cast<unsigned char *>(current_tag) - heap_); | |
74 |
2/2✓ Branch 1 taken 1 times.
✓ Branch 2 taken 4 times.
|
5 | if (!current_tag->IsFree()) |
75 | 1 | gauge_ += sizeof(Tag) + current_tag->GetSize(); | |
76 | } | ||
77 | |||
78 | |||
79 | 2 | void *MallocHeap::Expand(void *block, uint64_t new_size) { | |
80 | 2 | uint64_t old_size = GetSize(block); | |
81 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 2 times.
|
2 | assert(old_size <= new_size); |
82 | 2 | void *new_block = Allocate(new_size, block, old_size); | |
83 |
1/2✓ Branch 0 taken 2 times.
✗ Branch 1 not taken.
|
2 | if (new_block != NULL) |
84 | 2 | MarkFree(block); | |
85 | 2 | return new_block; | |
86 | } | ||
87 | |||
88 | |||
89 | 2054 | bool MallocHeap::HasSpaceFor(uint64_t nbytes) { | |
90 | 2054 | return RoundUp8(gauge_ + nbytes + sizeof(Tag)) <= capacity_; | |
91 | } | ||
92 | |||
93 | |||
94 | 52120 | void MallocHeap::MarkFree(void *block) { | |
95 | 52120 | Tag *tag = reinterpret_cast<Tag *>(block) - 1; | |
96 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 52120 times.
|
52120 | assert(tag->size > 0); |
97 | 52120 | tag->size = -(tag->size); | |
98 | 52120 | stored_ -= tag->GetSize(); | |
99 | 52120 | num_blocks_--; | |
100 | // TODO(jblomer): if MarkFree() takes place at the top of the heap, one could | ||
101 | // move back the gauge_ pointer. If this is an optimization or unnecessary | ||
102 | // extra work depends on how the MallocHeap is used. | ||
103 | 52120 | } | |
104 | |||
105 | |||
106 | 49910 | uint64_t MallocHeap::GetSize(void *block) { | |
107 | 49910 | Tag *tag = reinterpret_cast<Tag *>(block) - 1; | |
108 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 49910 times.
|
49910 | assert(tag->size > 0); |
109 | 49910 | return tag->size; | |
110 | } | ||
111 | |||
112 | |||
113 | 22 | MallocHeap::MallocHeap(uint64_t capacity, CallbackPtr callback_ptr) | |
114 | 22 | : callback_ptr_(callback_ptr) | |
115 | 22 | , capacity_(capacity) | |
116 | 22 | , gauge_(0) | |
117 | 22 | , stored_(0) | |
118 | 22 | , num_blocks_(0) | |
119 | { | ||
120 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 22 times.
|
22 | assert(capacity_ > kMinCapacity); |
121 | // Ensure 8-byte alignment | ||
122 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 22 times.
|
22 | assert((capacity_ % 8) == 0); |
123 | 22 | heap_ = reinterpret_cast<unsigned char *>(sxmmap(capacity)); | |
124 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 22 times.
|
22 | assert(uintptr_t(heap_) % 8 == 0); |
125 | 22 | } | |
126 | |||
127 | |||
128 | 22 | MallocHeap::~MallocHeap() { | |
129 | 22 | sxunmap(heap_, capacity_); | |
130 | 22 | } | |
131 |