GCC Code Coverage Report
Directory: cvmfs/ Exec Total Coverage
File: cvmfs/ingestion/item_mem.cc Lines: 42 43 97.7 %
Date: 2019-02-03 02:48:13 Branches: 21 26 80.8 %

Line Branch Exec Source
1
/**
2
 * This file is part of the CernVM File System.
3
 */
4
5
#include "item_mem.h"
6
7
#include <cassert>
8
#include <cstdlib>
9
10
#include "util_concurrency.h"
11
12
13
atomic_int64 ItemAllocator::total_allocated_ = 0;
14
15
16
1039914
void ItemAllocator::Free(void *ptr) {
17
1039914
  MutexLockGuard guard(lock_);
18
19
1039962
  MallocArena *M = MallocArena::GetMallocArena(ptr, kArenaSize);
20
1039962
  M->Free(ptr);
21
1039962
  unsigned N = malloc_arenas_.size();
22

1039962
  if ((N > 1) && M->IsEmpty()) {
23
41
    for (unsigned i = 0; i < N; ++i) {
24
41
      if (malloc_arenas_[i] == M) {
25
21
        delete malloc_arenas_[i];
26
21
        atomic_xadd64(&total_allocated_, -static_cast<int>(kArenaSize));
27
21
        malloc_arenas_.erase(malloc_arenas_.begin() + i);
28
21
        idx_last_arena_ = 0;
29
        return;
30
      }
31
    }
32
    assert(false);
33
  }
34
}
35
36
37
107
ItemAllocator::ItemAllocator() : idx_last_arena_(0) {
38
107
  int retval = pthread_mutex_init(&lock_, NULL);
39
107
  assert(retval == 0);
40
41
107
  malloc_arenas_.push_back(new MallocArena(kArenaSize));
42
107
  atomic_xadd64(&total_allocated_, kArenaSize);
43
107
}
44
45
46
107
ItemAllocator::~ItemAllocator() {
47
214
  for (unsigned i = 0; i < malloc_arenas_.size(); ++i) {
48
107
    atomic_xadd64(&total_allocated_, -static_cast<int>(kArenaSize));
49
107
    delete malloc_arenas_[i];
50
  }
51
107
  pthread_mutex_destroy(&lock_);
52
107
}
53
54
55
1039924
void *ItemAllocator::Malloc(unsigned size) {
56
1039924
  MutexLockGuard guard(lock_);
57
58
1039962
  void *p = malloc_arenas_[idx_last_arena_]->Malloc(size);
59
1039962
  if (p != NULL)
60
1039733
    return p;
61
229
  unsigned N = malloc_arenas_.size();
62
378
  for (unsigned i = 0; i < N; ++i) {
63
357
    p = malloc_arenas_[i]->Malloc(size);
64
357
    if (p != NULL) {
65
208
      idx_last_arena_ = i;
66
208
      return p;
67
    }
68
  }
69
21
  idx_last_arena_ = N;
70
21
  MallocArena *M = new MallocArena(kArenaSize);
71
21
  atomic_xadd64(&total_allocated_, kArenaSize);
72
21
  malloc_arenas_.push_back(M);
73
21
  p = M->Malloc(size);
74
21
  assert(p != NULL);
75
21
  return p;
76
}