GCC Code Coverage Report


Directory: cvmfs/
File: cvmfs/ingestion/item_mem.cc
Date: 2025-12-28 02:35:52
Exec Total Coverage
Lines: 45 46 97.8%
Branches: 34 50 68.0%

Line Branch Exec Source
1 /**
2 * This file is part of the CernVM File System.
3 */
4
5 #include "item_mem.h"
6
7 #include <cassert>
8 #include <cstdlib>
9
10 #include "util/concurrency.h"
11 #include "util/exception.h"
12
13 atomic_int64 ItemAllocator::total_allocated_ = 0;
14
15
16 9106967 void ItemAllocator::Free(void *ptr) {
17 9106967 const MutexLockGuard guard(lock_);
18
19 9131569 MallocArena *M = MallocArena::GetMallocArena(ptr, kArenaSize);
20
1/2
✓ Branch 1 taken 9131569 times.
✗ Branch 2 not taken.
9131569 M->Free(ptr);
21 9131569 const unsigned N = malloc_arenas_.size();
22
6/6
✓ Branch 0 taken 1696730 times.
✓ Branch 1 taken 7434839 times.
✓ Branch 3 taken 48 times.
✓ Branch 4 taken 1696682 times.
✓ Branch 5 taken 48 times.
✓ Branch 6 taken 9131521 times.
9131569 if ((N > 1) && M->IsEmpty()) {
23
1/2
✓ Branch 0 taken 59 times.
✗ Branch 1 not taken.
59 for (unsigned i = 0; i < N; ++i) {
24
2/2
✓ Branch 1 taken 48 times.
✓ Branch 2 taken 11 times.
59 if (malloc_arenas_[i] == M) {
25
1/2
✓ Branch 1 taken 48 times.
✗ Branch 2 not taken.
48 delete malloc_arenas_[i];
26 48 atomic_xadd64(&total_allocated_, -static_cast<int>(kArenaSize));
27
1/2
✓ Branch 4 taken 48 times.
✗ Branch 5 not taken.
48 malloc_arenas_.erase(malloc_arenas_.begin() + i);
28 48 idx_last_arena_ = 0;
29 48 return;
30 }
31 }
32 PANIC(NULL);
33 }
34
2/2
✓ Branch 1 taken 9124028 times.
✓ Branch 2 taken 15 times.
9131569 }
35
36
37 1090 ItemAllocator::ItemAllocator() : idx_last_arena_(0) {
38 1090 const int retval = pthread_mutex_init(&lock_, NULL);
39
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 1090 times.
1090 assert(retval == 0);
40
41
3/6
✓ Branch 1 taken 1090 times.
✗ Branch 2 not taken.
✓ Branch 4 taken 1090 times.
✗ Branch 5 not taken.
✓ Branch 7 taken 1090 times.
✗ Branch 8 not taken.
1090 malloc_arenas_.push_back(new MallocArena(kArenaSize));
42 1090 atomic_xadd64(&total_allocated_, kArenaSize);
43 1090 }
44
45
46 1089 ItemAllocator::~ItemAllocator() {
47
2/2
✓ Branch 1 taken 1089 times.
✓ Branch 2 taken 1089 times.
2178 for (unsigned i = 0; i < malloc_arenas_.size(); ++i) {
48 1089 atomic_xadd64(&total_allocated_, -static_cast<int>(kArenaSize));
49
1/2
✓ Branch 1 taken 1089 times.
✗ Branch 2 not taken.
1089 delete malloc_arenas_[i];
50 }
51 1089 pthread_mutex_destroy(&lock_);
52 1089 }
53
54
55 9087674 void *ItemAllocator::Malloc(unsigned size) {
56 9087674 const MutexLockGuard guard(lock_);
57
58
1/2
✓ Branch 2 taken 9131569 times.
✗ Branch 3 not taken.
9131569 void *p = malloc_arenas_[idx_last_arena_]->Malloc(size);
59
2/2
✓ Branch 0 taken 9131488 times.
✓ Branch 1 taken 81 times.
9131569 if (p != NULL)
60 9131488 return p;
61 81 const unsigned N = malloc_arenas_.size();
62
2/2
✓ Branch 0 taken 103 times.
✓ Branch 1 taken 48 times.
151 for (unsigned i = 0; i < N; ++i) {
63
1/2
✓ Branch 2 taken 103 times.
✗ Branch 3 not taken.
103 p = malloc_arenas_[i]->Malloc(size);
64
2/2
✓ Branch 0 taken 33 times.
✓ Branch 1 taken 70 times.
103 if (p != NULL) {
65 33 idx_last_arena_ = i;
66 33 return p;
67 }
68 }
69 48 idx_last_arena_ = N;
70
2/4
✓ Branch 1 taken 48 times.
✗ Branch 2 not taken.
✓ Branch 4 taken 48 times.
✗ Branch 5 not taken.
48 MallocArena *M = new MallocArena(kArenaSize);
71 48 atomic_xadd64(&total_allocated_, kArenaSize);
72
1/2
✓ Branch 1 taken 48 times.
✗ Branch 2 not taken.
48 malloc_arenas_.push_back(M);
73
1/2
✓ Branch 1 taken 48 times.
✗ Branch 2 not taken.
48 p = M->Malloc(size);
74
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 48 times.
48 assert(p != NULL);
75 48 return p;
76 9131569 }
77