GCC Code Coverage Report


Directory: cvmfs/
File: cvmfs/lru.h
Date: 2025-08-31 02:39:21
Exec Total Coverage
Lines: 320 324 98.8%
Branches: 135 246 54.9%

Line Branch Exec Source
1 /**
2 * This file is part of the CernVM File System.
3 *
4 * This class provides an Least Recently Used (LRU) cache for arbitrary data
5 * It stores Key-Value pairs of arbitrary data types in a hash table and
6 * automatically deletes the entries which are least touched in the last time
7 * to prevent the structure from growing beyond a given maximal cache size.
8 * The cache uses a hand crafted memory allocator to use memory efficiently
9 *
10 * Hash functions have to be provided. They should return an equal distribution
11 * of keys in uint32_t. In addition, a special key has to be provided that is
12 * used to mark "empty" elements in the hash table.
13 *
14 * The cache size has to be a multiply of 64.
15 *
16 * usage:
17 * // 100 entries, -1 special key
18 * LruCache<int, string> cache(100, -1, hasher_int);
19 *
20 * // Inserting some stuff
21 * cache.insert(42, "fourtytwo");
22 * cache.insert(2, "small prime number");
23 * cache.insert(1337, "leet");
24 *
25 * // Trying to retrieve a value
26 * int result;
27 * if (cache.lookup(21, result)) {
28 * cout << "cache hit: " << result << endl;
29 * } else {
30 * cout << "cache miss" << endl;
31 * }
32 *
33 * cache.drop(); // Empty the cache
34 */
35
36 #ifndef CVMFS_LRU_H_
37 #define CVMFS_LRU_H_
38
39 // If defined the cache is secured by a posix mutex
40 #define LRU_CACHE_THREAD_SAFE
41
42 #include <stdint.h>
43
44 #include <algorithm>
45 #include <cassert>
46 #include <cstring>
47 #include <functional>
48 #include <map>
49 #include <string>
50
51 #include "smallhash.h"
52 #include "statistics.h"
53 #include "util/atomic.h"
54 #include "util/platform.h"
55 #include "util/single_copy.h"
56 #include "util/smalloc.h"
57
58 namespace lru {
59
60 /**
61 * Counting of cache operations.
62 */
63 struct Counters {
64 perf::Counter *sz_size;
65 perf::Counter *n_hit;
66 perf::Counter *n_miss;
67 perf::Counter *n_insert;
68 perf::Counter *n_insert_negative;
69 uint64_t num_collisions;
70 uint32_t max_collisions;
71 perf::Counter *n_update;
72 perf::Counter *n_update_value;
73 perf::Counter *n_replace;
74 perf::Counter *n_forget;
75 perf::Counter *n_drop;
76 perf::Counter *sz_allocated;
77
78 1971 explicit Counters(perf::StatisticsTemplate statistics) {
79
3/6
✓ Branch 2 taken 1971 times.
✗ Branch 3 not taken.
✓ Branch 6 taken 1971 times.
✗ Branch 7 not taken.
✓ Branch 9 taken 1971 times.
✗ Branch 10 not taken.
1971 sz_size = statistics.RegisterTemplated("sz_size", "Total size");
80 1971 num_collisions = 0;
81 1971 max_collisions = 0;
82
3/6
✓ Branch 2 taken 1971 times.
✗ Branch 3 not taken.
✓ Branch 6 taken 1971 times.
✗ Branch 7 not taken.
✓ Branch 9 taken 1971 times.
✗ Branch 10 not taken.
1971 n_hit = statistics.RegisterTemplated("n_hit", "Number of hits");
83
3/6
✓ Branch 2 taken 1971 times.
✗ Branch 3 not taken.
✓ Branch 6 taken 1971 times.
✗ Branch 7 not taken.
✓ Branch 9 taken 1971 times.
✗ Branch 10 not taken.
1971 n_miss = statistics.RegisterTemplated("n_miss", "Number of misses");
84
3/6
✓ Branch 2 taken 1971 times.
✗ Branch 3 not taken.
✓ Branch 6 taken 1971 times.
✗ Branch 7 not taken.
✓ Branch 9 taken 1971 times.
✗ Branch 10 not taken.
1971 n_insert = statistics.RegisterTemplated("n_insert", "Number of inserts");
85
3/6
✓ Branch 2 taken 1971 times.
✗ Branch 3 not taken.
✓ Branch 6 taken 1971 times.
✗ Branch 7 not taken.
✓ Branch 9 taken 1971 times.
✗ Branch 10 not taken.
1971 n_insert_negative = statistics.RegisterTemplated(
86 "n_insert_negative", "Number of negative inserts");
87
3/6
✓ Branch 2 taken 1971 times.
✗ Branch 3 not taken.
✓ Branch 6 taken 1971 times.
✗ Branch 7 not taken.
✓ Branch 9 taken 1971 times.
✗ Branch 10 not taken.
1971 n_update = statistics.RegisterTemplated("n_update", "Number of updates");
88
3/6
✓ Branch 2 taken 1971 times.
✗ Branch 3 not taken.
✓ Branch 6 taken 1971 times.
✗ Branch 7 not taken.
✓ Branch 9 taken 1971 times.
✗ Branch 10 not taken.
1971 n_update_value = statistics.RegisterTemplated("n_update_value",
89 "Number of value changes");
90
3/6
✓ Branch 2 taken 1971 times.
✗ Branch 3 not taken.
✓ Branch 6 taken 1971 times.
✗ Branch 7 not taken.
✓ Branch 9 taken 1971 times.
✗ Branch 10 not taken.
1971 n_replace = statistics.RegisterTemplated("n_replace", "Number of replaces");
91
3/6
✓ Branch 2 taken 1971 times.
✗ Branch 3 not taken.
✓ Branch 6 taken 1971 times.
✗ Branch 7 not taken.
✓ Branch 9 taken 1971 times.
✗ Branch 10 not taken.
1971 n_forget = statistics.RegisterTemplated("n_forget", "Number of forgets");
92
3/6
✓ Branch 2 taken 1971 times.
✗ Branch 3 not taken.
✓ Branch 6 taken 1971 times.
✗ Branch 7 not taken.
✓ Branch 9 taken 1971 times.
✗ Branch 10 not taken.
1971 n_drop = statistics.RegisterTemplated("n_drop", "Number of drops");
93
3/6
✓ Branch 2 taken 1971 times.
✗ Branch 3 not taken.
✓ Branch 6 taken 1971 times.
✗ Branch 7 not taken.
✓ Branch 9 taken 1971 times.
✗ Branch 10 not taken.
1971 sz_allocated = statistics.RegisterTemplated("sz_allocated",
94 "Number of allocated bytes ");
95 1971 }
96 };
97
98
99 /**
100 * Template class to create a LRU cache
101 * @param Key type of the key values
102 * @param Value type of the value values
103 */
104 template<class Key, class Value>
105 class LruCache : SingleCopy {
106 private:
107 // Forward declarations of private internal data structures
108 template<class T>
109 class ListEntry;
110 template<class T>
111 class ListEntryHead;
112 template<class T>
113 class ListEntryContent;
114 template<class M>
115 class MemoryAllocator;
116
117 // Helpers to get the template magic right
118 typedef ListEntryContent<Key> ConcreteListEntryContent;
119 typedef MemoryAllocator<ConcreteListEntryContent> ConcreteMemoryAllocator;
120
121 /**
122 * This structure wraps the user data and relates it to the LRU list entry
123 */
124 typedef struct {
125 ListEntryContent<Key> *list_entry;
126 Value value;
127 } CacheEntry;
128
129 // static uint64_t GetEntrySize() { return sizeof(Key) + sizeof(Value); }
130
131 /**
132 * A special purpose memory allocator for the cache entries.
133 * It allocates enough memory for the maximal number of cache entries at
134 * startup, and assigns new ListEntryContent objects to a free spot in this
135 * memory pool (by overriding the 'new' and 'delete' operators of
136 * ListEntryContent).
137 *
138 * @param T the type of object to be allocated by this MemoryAllocator
139 */
140 template<class T>
141 class MemoryAllocator : SingleCopy {
142 public:
143 /**
144 * Creates a MemoryAllocator to handle a memory pool for objects of type T
145 * @param num_slots the number of slots to be allocated for the given
146 * datatype T
147 */
148 2635 explicit MemoryAllocator(const unsigned int num_slots) {
149 // how many bitmap chunks (chars) do we need?
150 2635 const unsigned int num_bytes_bitmap = num_slots / 8;
151 2635 bits_per_block_ = 8 * sizeof(bitmap_[0]);
152
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 1971 times.
2635 assert((num_slots % bits_per_block_) == 0);
153
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 1971 times.
2635 assert(num_slots >= 2 * bits_per_block_);
154
155 // How much actual memory do we need?
156 2635 const unsigned int num_bytes_memory = sizeof(T) * num_slots;
157
158 // Allocate zero'd memory
159 2635 bitmap_ = reinterpret_cast<uint64_t *>(scalloc(num_bytes_bitmap, 1));
160 2635 memory_ = reinterpret_cast<T *>(scalloc(num_bytes_memory, 1));
161
162 // Create initial state
163 2635 num_slots_ = num_slots;
164 2635 num_free_slots_ = num_slots;
165 2635 next_free_slot_ = 0;
166 2635 bytes_allocated_ = num_bytes_bitmap + num_bytes_memory;
167 2635 }
168
169 /**
170 * Number of bytes for a single entry
171 */
172 882 static double GetEntrySize() {
173 882 return static_cast<double>(sizeof(T)) + 1.0 / 8.0;
174 }
175
176 /**
177 * The memory allocator also frees all allocated data
178 */
179 3938 virtual ~MemoryAllocator() {
180 3938 free(bitmap_);
181 3938 free(memory_);
182 }
183
184 /**
185 * Check if the memory pool is full.
186 * @return true if all slots are occupied, otherwise false
187 */
188 161516 inline bool IsFull() const { return num_free_slots_ == 0; }
189
190 80758 T *Construct(const T object) {
191 80758 T *mem = Allocate();
192
1/2
✓ Branch 0 taken 80129 times.
✗ Branch 1 not taken.
80758 if (mem != NULL) {
193
1/2
✓ Branch 2 taken 80129 times.
✗ Branch 3 not taken.
80758 new (static_cast<void *>(mem)) T(object);
194 }
195 80758 return mem;
196 }
197
198 80754 void Destruct(T *object) {
199 80754 object->~T();
200 80754 Deallocate(object);
201 80754 }
202
203 /**
204 * Allocate a slot and returns a pointer to the memory.
205 * @return a pointer to a chunk of the memory pool
206 */
207 80758 T *Allocate() {
208
1/2
✗ Branch 1 not taken.
✓ Branch 2 taken 80129 times.
80758 if (this->IsFull())
209 return NULL;
210
211 // Allocate a slot
212 80758 this->SetBit(next_free_slot_);
213 80758 --num_free_slots_;
214 80758 T *slot = memory_ + next_free_slot_;
215
216 // Find a new free slot if there are some left
217
2/2
✓ Branch 1 taken 61733 times.
✓ Branch 2 taken 18396 times.
80758 if (!this->IsFull()) {
218 62362 unsigned bitmap_block = next_free_slot_ / bits_per_block_;
219
2/2
✓ Branch 0 taken 853 times.
✓ Branch 1 taken 61733 times.
63215 while (~bitmap_[bitmap_block] == 0)
220 853 bitmap_block = (bitmap_block + 1) % (num_slots_ / bits_per_block_);
221 // TODO(jblomer): faster search inside the int
222 62362 next_free_slot_ = bitmap_block * bits_per_block_;
223
2/2
✓ Branch 1 taken 1863850 times.
✓ Branch 2 taken 61733 times.
1927581 while (this->GetBit(next_free_slot_))
224 1865219 next_free_slot_++;
225 }
226
227 80758 return slot;
228 }
229
230 /**
231 * Free a given slot in the memory pool
232 * @param slot a pointer to the slot be freed
233 */
234 80754 void Deallocate(T *slot) {
235 // Check if given slot is in bounds
236
2/4
✓ Branch 0 taken 80125 times.
✗ Branch 1 not taken.
✓ Branch 2 taken 80125 times.
✗ Branch 3 not taken.
80754 assert((slot >= memory_) && (slot <= memory_ + num_slots_));
237
238 // Get position of slot
239 80754 const unsigned int position = slot - memory_;
240
241 // Check if slot was already freed
242
1/2
✗ Branch 1 not taken.
✓ Branch 2 taken 80125 times.
80754 assert(this->GetBit(position));
243
244 // Free slot, save the position of this slot as free (faster reallocation)
245 80754 this->UnsetBit(position);
246 80754 next_free_slot_ = position;
247 80754 ++num_free_slots_;
248 80754 }
249
250 2671 uint64_t bytes_allocated() { return bytes_allocated_; }
251
252 private:
253 /**
254 * Check a bit in the internal allocation bitmap.
255 * @param position the position to check
256 * @return true if bit is set, otherwise false
257 */
258 2008335 inline bool GetBit(const unsigned position) {
259
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 2005708 times.
2008335 assert(position < num_slots_);
260 2008335 return ((bitmap_[position / bits_per_block_]
261 2008335 & (uint64_t(1) << (position % bits_per_block_)))
262 2008335 != 0);
263 }
264
265 /**
266 * set a bit in the internal allocation bitmap
267 * @param position the number of the bit to be set
268 */
269 80758 inline void SetBit(const unsigned position) {
270
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 80129 times.
80758 assert(position < num_slots_);
271 80758 bitmap_[position / bits_per_block_] |= uint64_t(1)
272 80758 << (position % bits_per_block_);
273 80758 }
274
275 /**
276 * Clear a bit in the internal allocation bitmap
277 * @param position the number of the bit to be cleared
278 */
279 80754 inline void UnsetBit(const unsigned position) {
280
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 80125 times.
80754 assert(position < num_slots_);
281 80754 bitmap_[position / bits_per_block_] &= ~(uint64_t(1)
282 80754 << (position % bits_per_block_));
283 80754 }
284
285 unsigned int num_slots_; /**< Overall number of slots in memory pool. */
286 unsigned int num_free_slots_; /**< Current number of free slots left. */
287 unsigned int next_free_slot_; /**< Position of next free slot in pool. */
288 uint64_t bytes_allocated_;
289 uint64_t *bitmap_; /**< A bitmap to mark slots as allocated. */
290 unsigned bits_per_block_;
291 T *memory_; /**< The memory pool, array of Ms. */
292 };
293
294
295 /**
296 * Internal LRU list entry, to maintain the doubly linked list.
297 * The list keeps track of the least recently used keys in the cache.
298 */
299 template<class T>
300 class ListEntry {
301 friend class LruCache;
302
303 public:
304 /**
305 * Create a new list entry as lonely, both next and prev pointing to this.
306 */
307 83393 ListEntry() {
308 83393 this->next = this;
309 83393 this->prev = this;
310 83393 }
311
312 80758 ListEntry(const ListEntry<T> &other) {
313
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 80129 times.
80758 next = (other.next == &other) ? this : other.next;
314
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 80129 times.
80758 prev = (other.prev == &other) ? this : other.prev;
315 80758 }
316
317 324446 virtual ~ListEntry() { }
318
319 /**
320 * Checks if the ListEntry is the list head
321 * @return true if ListEntry is list head otherwise false
322 */
323 virtual bool IsListHead() const = 0;
324
325 /**
326 * A lonely ListEntry has no connection to other elements.
327 * @return true if ListEntry is lonely otherwise false
328 */
329
3/4
✓ Branch 0 taken 253595 times.
✓ Branch 1 taken 641853 times.
✓ Branch 2 taken 253595 times.
✗ Branch 3 not taken.
898482 bool IsLonely() const { return (this->next == this && this->prev == this); }
330
331 ListEntry<T> *next; /**< Pointer to next element in the list. */
332 ListEntry<T> *prev; /**< Pointer to previous element in the list. */
333
334 protected:
335 /**
336 * Insert a given ListEntry after this one.
337 * @param entry the ListEntry to insert after this one
338 */
339 inline void InsertAsSuccessor(ListEntryContent<T> *entry) {
340 assert(entry->IsLonely());
341
342 // Mount the new element between this and this->next
343 entry->next = this->next;
344 entry->prev = this;
345
346 // Fix pointers of existing list elements
347 this->next->prev = entry;
348 this->next = entry;
349 assert(!entry->IsLonely());
350 }
351
352 /**
353 * Insert a given ListEntry in front of this one
354 * @param entry the ListEntry to insert in front of this one
355 */
356 254668 inline void InsertAsPredecessor(ListEntryContent<T> *entry) {
357
1/2
✗ Branch 1 not taken.
✓ Branch 2 taken 253595 times.
254668 assert(entry->IsLonely());
358
1/2
✗ Branch 1 not taken.
✓ Branch 2 taken 253595 times.
254668 assert(!entry->IsListHead());
359
360 // Mount the new element between this and this->prev
361 254668 entry->next = this;
362 254668 entry->prev = this->prev;
363
364 // Fix pointers of existing list elements
365 254668 this->prev->next = entry;
366 254668 this->prev = entry;
367
368
1/2
✗ Branch 1 not taken.
✓ Branch 2 taken 253595 times.
254668 assert(!entry->IsLonely());
369 254668 }
370
371 /**
372 * Remove this element from it's list.
373 * The function connects this->next with this->prev leaving the list
374 * in a consistent state. The ListEntry itself is lonely afterwards,
375 * but not deleted.
376 */
377 virtual void RemoveFromList() = 0;
378
379 private:
380 // No assignment operator (enforced by linker error)
381 ListEntry<T> &operator=(const ListEntry<T> &other);
382 }; // template<class T> class ListEntry
383
384 /**
385 * Specialized ListEntry to contain a data entry of type T
386 */
387 template<class T>
388 class ListEntryContent : public ListEntry<T> {
389 public:
390
1/2
✓ Branch 2 taken 5519 times.
✗ Branch 3 not taken.
80758 explicit ListEntryContent(T content) { content_ = content; }
391
392 344183 inline bool IsListHead() const { return false; }
393 27112 inline T content() const { return content_; }
394
395 /**
396 * See ListEntry base class.
397 */
398 196894 inline void RemoveFromList() {
399
1/2
✗ Branch 1 not taken.
✓ Branch 2 taken 196450 times.
196894 assert(!this->IsLonely());
400
401 // Remove this from list
402 196894 this->prev->next = this->next;
403 196894 this->next->prev = this->prev;
404
405 // Make this lonely
406 196894 this->next = this;
407 196894 this->prev = this;
408 196894 }
409
410 private:
411 T content_; /**< The data content of this ListEntry */
412 };
413
414 /**
415 * Specialized ListEntry to form a list head.
416 * Every list has exactly one list head which is also the entry point
417 * in the list. It is used to manipulate the list.
418 */
419 template<class T>
420 class ListEntryHead : public ListEntry<T> {
421 public:
422 2635 explicit ListEntryHead(ConcreteMemoryAllocator *allocator)
423 2635 : allocator_(allocator) { }
424
425 3938 virtual ~ListEntryHead() { this->clear(); }
426
427 /**
428 * Remove all entries from the list.
429 * ListEntry objects are deleted but contained data keeps available
430 */
431 2669 void clear() {
432 // Delete all list entries
433 2669 ListEntry<T> *entry = this->next;
434 ListEntry<T> *delete_me;
435
2/2
✓ Branch 1 taken 57141 times.
✓ Branch 2 taken 2005 times.
60439 while (!entry->IsListHead()) {
436 57770 delete_me = entry;
437 57770 entry = entry->next;
438 57770 allocator_->Destruct(
439 static_cast<ConcreteListEntryContent *>(delete_me));
440 }
441
442 // Reset the list to lonely
443 2669 this->next = this;
444 2669 this->prev = this;
445 2669 }
446
447 2724 inline bool IsListHead() const { return true; }
448 18342 inline bool IsEmpty() const { return this->IsLonely(); }
449
450 /**
451 * Push a new data object to the end of the list.
452 * @param the data object to insert
453 * @return the ListEntryContent structure wrapped around the data object
454 */
455 80758 inline ListEntryContent<T> *PushBack(T content) {
456
1/2
✓ Branch 1 taken 80129 times.
✗ Branch 2 not taken.
80758 ListEntryContent<T> *new_entry = allocator_->Construct(
457 161516 ListEntryContent<T>(content));
458 80758 this->InsertAsPredecessor(new_entry);
459 80758 return new_entry;
460 }
461
462 /**
463 * Pop the first object of the list.
464 * The object is returned and removed from the list
465 * @return the data object which resided in the first list entry
466 */
467 18342 inline T PopFront() {
468
1/2
✗ Branch 1 not taken.
✓ Branch 2 taken 18342 times.
18342 assert(!this->IsEmpty());
469 18342 return Pop(this->next);
470 }
471
472 /**
473 * Take a list entry out of it's list and reinsert at the end of this list.
474 * @param the ListEntry to be moved to the end of this list
475 */
476 173910 inline void MoveToBack(ListEntryContent<T> *entry) {
477
1/2
✗ Branch 1 not taken.
✓ Branch 2 taken 173466 times.
173910 assert(!entry->IsLonely());
478
479 173910 entry->RemoveFromList();
480 173910 this->InsertAsPredecessor(entry);
481 173910 }
482
483 /**
484 * See ListEntry base class
485 */
486 inline void RemoveFromList() { assert(false); }
487
488 private:
489 /**
490 * Pop a ListEntry from the list (arbitrary position).
491 * The given ListEntry is removed from the list, deleted and it's
492 * data content is returned
493 * @param popped_entry the entry to be popped
494 * @return the data object of the popped ListEntry
495 */
496 18342 inline T Pop(ListEntry<T> *popped_entry) {
497
1/2
✗ Branch 1 not taken.
✓ Branch 2 taken 18342 times.
18342 assert(!popped_entry->IsListHead());
498
499 18342 ListEntryContent<T> *popped = (ListEntryContent<T> *)popped_entry;
500 18342 popped->RemoveFromList();
501 18342 T result = popped->content();
502 18342 allocator_->Destruct(
503 static_cast<ConcreteListEntryContent *>(popped_entry));
504 18342 return result;
505 }
506
507 private:
508 ConcreteMemoryAllocator *allocator_;
509 };
510
511 public: // LruCache
512 /**
513 * Create a new LRU cache object
514 * @param cache_size the maximal size of the cache
515 */
516 2635 LruCache(const unsigned cache_size,
517 const Key &empty_key,
518 uint32_t (*hasher)(const Key &key),
519 perf::StatisticsTemplate statistics)
520
1/2
✓ Branch 1 taken 1971 times.
✗ Branch 2 not taken.
2635 : counters_(statistics)
521 2635 , pause_(false)
522 2635 , cache_gauge_(0)
523 2635 , cache_size_(cache_size)
524 2635 , allocator_(cache_size)
525
2/4
✓ Branch 3 taken 1971 times.
✗ Branch 4 not taken.
✓ Branch 6 taken 1971 times.
✗ Branch 7 not taken.
5270 , lru_list_(&allocator_) {
526
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 1971 times.
2635 assert(cache_size > 0);
527
528 2635 counters_.sz_size->Set(cache_size_);
529 2635 filter_entry_ = NULL;
530 // cache_ = Cache(cache_size_);
531
1/2
✓ Branch 1 taken 1971 times.
✗ Branch 2 not taken.
2635 cache_.Init(cache_size_, empty_key, hasher);
532 2635 perf::Xadd(counters_.sz_allocated,
533 2635 allocator_.bytes_allocated() + cache_.bytes_allocated());
534
535 #ifdef LRU_CACHE_THREAD_SAFE
536 2635 const int retval = pthread_mutex_init(&lock_, NULL);
537
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 1971 times.
2635 assert(retval == 0);
538 #endif
539 2635 }
540
541 882 static double GetEntrySize() {
542 882 return SmallHashFixed<Key, CacheEntry>::GetEntrySize()
543 882 + ConcreteMemoryAllocator::GetEntrySize();
544 }
545
546 3938 virtual ~LruCache() {
547 #ifdef LRU_CACHE_THREAD_SAFE
548 3938 pthread_mutex_destroy(&lock_);
549 #endif
550 }
551
552 /**
553 * Insert a new key-value pair to the list.
554 * If the cache is already full, the least recently used object is removed;
555 * afterwards the new object is inserted.
556 * If the object is already present it is updated and moved back to the end
557 * of the list
558 * @param key the key where the value is saved
559 * @param value the value of the cache entry
560 * @return true on insert, false on update
561 */
562 148172 virtual bool Insert(const Key &key, const Value &value) {
563 148172 this->Lock();
564
2/2
✓ Branch 0 taken 36 times.
✓ Branch 1 taken 147507 times.
148172 if (pause_) {
565 36 Unlock();
566 36 return false;
567 }
568
569
1/2
✓ Branch 1 taken 72735 times.
✗ Branch 2 not taken.
148136 CacheEntry entry;
570
571 // Check if we have to update an existent entry
572
3/4
✓ Branch 1 taken 147507 times.
✗ Branch 2 not taken.
✓ Branch 3 taken 67378 times.
✓ Branch 4 taken 80129 times.
148136 if (this->DoLookup(key, &entry)) {
573 67378 perf::Inc(counters_.n_update);
574
1/2
✓ Branch 1 taken 162 times.
✗ Branch 2 not taken.
67378 entry.value = value;
575
1/2
✓ Branch 1 taken 67378 times.
✗ Branch 2 not taken.
67378 cache_.Insert(key, entry);
576
1/2
✓ Branch 1 taken 67378 times.
✗ Branch 2 not taken.
67378 this->Touch(entry);
577 67378 this->Unlock();
578 67378 return false;
579 }
580
581 80758 perf::Inc(counters_.n_insert);
582 // Check if we have to make some space in the cache a
583
2/2
✓ Branch 1 taken 18342 times.
✓ Branch 2 taken 61787 times.
80758 if (this->IsFull())
584
1/2
✓ Branch 1 taken 18342 times.
✗ Branch 2 not taken.
18342 this->DeleteOldest();
585
586
1/2
✓ Branch 1 taken 80129 times.
✗ Branch 2 not taken.
80758 entry.list_entry = lru_list_.PushBack(key);
587
1/2
✓ Branch 1 taken 75239 times.
✗ Branch 2 not taken.
80758 entry.value = value;
588
589
1/2
✓ Branch 1 taken 80129 times.
✗ Branch 2 not taken.
80758 cache_.Insert(key, entry);
590 80758 cache_gauge_++;
591
592 80758 Unlock();
593 80758 return true;
594 76030 }
595
596
597 /**
598 * Updates object and moves back to the end of the list. The object must be
599 * present.
600 */
601 18 virtual void Update(const Key &key) {
602 18 Lock();
603 // Is not called from the client, only from the cache plugin
604
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 18 times.
18 assert(!pause_);
605
0/2
✗ Branch 1 not taken.
✗ Branch 2 not taken.
18 CacheEntry entry;
606
1/2
✓ Branch 1 taken 18 times.
✗ Branch 2 not taken.
18 const bool retval = DoLookup(key, &entry);
607
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 18 times.
18 assert(retval);
608 18 perf::Inc(counters_.n_update);
609
1/2
✓ Branch 1 taken 18 times.
✗ Branch 2 not taken.
18 Touch(entry);
610 18 Unlock();
611 18 }
612
613
614 /**
615 * Changes the value of an entry in the LRU cache without updating the LRU
616 * order.
617 */
618 54 virtual bool UpdateValue(const Key &key, const Value &value) {
619 54 this->Lock();
620
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 54 times.
54 if (pause_) {
621 Unlock();
622 return false;
623 }
624
625
0/2
✗ Branch 1 not taken.
✗ Branch 2 not taken.
54 CacheEntry entry;
626
3/4
✓ Branch 1 taken 54 times.
✗ Branch 2 not taken.
✓ Branch 3 taken 18 times.
✓ Branch 4 taken 36 times.
54 if (!this->DoLookup(key, &entry)) {
627 18 this->Unlock();
628 18 return false;
629 }
630
631 36 perf::Inc(counters_.n_update_value);
632
1/2
✓ Branch 1 taken 36 times.
✗ Branch 2 not taken.
36 entry.value = value;
633
1/2
✓ Branch 1 taken 36 times.
✗ Branch 2 not taken.
36 cache_.Insert(key, entry);
634 36 this->Unlock();
635 36 return true;
636 54 }
637
638
639 /**
640 * Retrieve an element from the cache.
641 * If the element was found, it will be marked as 'recently used' and returned
642 * @param key the key to perform a lookup on
643 * @param value (out) here the result is saved (not touch in case of miss)
644 * @return true on successful lookup, false if key was not found
645 */
646 115889 virtual bool Lookup(const Key &key, Value *value, bool update_lru = true) {
647 115889 bool found = false;
648 115889 Lock();
649
2/2
✓ Branch 0 taken 360 times.
✓ Branch 1 taken 114456 times.
115889 if (pause_) {
650 360 Unlock();
651 360 return false;
652 }
653
654
1/2
✓ Branch 1 taken 75702 times.
✗ Branch 2 not taken.
115529 CacheEntry entry;
655
3/4
✓ Branch 1 taken 114456 times.
✗ Branch 2 not taken.
✓ Branch 3 taken 106868 times.
✓ Branch 4 taken 7588 times.
115529 if (DoLookup(key, &entry)) {
656 // Hit
657 107312 perf::Inc(counters_.n_hit);
658
2/2
✓ Branch 0 taken 106070 times.
✓ Branch 1 taken 798 times.
107312 if (update_lru)
659
1/2
✓ Branch 1 taken 106070 times.
✗ Branch 2 not taken.
106514 Touch(entry);
660
1/2
✓ Branch 1 taken 38424 times.
✗ Branch 2 not taken.
107312 *value = entry.value;
661 107312 found = true;
662 } else {
663 8217 perf::Inc(counters_.n_miss);
664 }
665
666 115529 Unlock();
667 115529 return found;
668 40900 }
669
670 /**
671 * Forgets about a specific cache entry
672 * @param key the key to delete from the cache
673 * @return true if key was deleted, false if key was not in the cache
674 */
675 532 virtual bool Forget(const Key &key) {
676 532 bool found = false;
677 532 this->Lock();
678
2/2
✓ Branch 0 taken 36 times.
✓ Branch 1 taken 496 times.
532 if (pause_) {
679 36 Unlock();
680 36 return false;
681 }
682
683
1/2
✓ Branch 1 taken 172 times.
✗ Branch 2 not taken.
496 CacheEntry entry;
684
3/4
✓ Branch 1 taken 496 times.
✗ Branch 2 not taken.
✓ Branch 3 taken 370 times.
✓ Branch 4 taken 126 times.
496 if (this->DoLookup(key, &entry)) {
685 370 found = true;
686 370 perf::Inc(counters_.n_forget);
687
688
1/2
✓ Branch 1 taken 370 times.
✗ Branch 2 not taken.
370 entry.list_entry->RemoveFromList();
689
1/2
✓ Branch 1 taken 370 times.
✗ Branch 2 not taken.
370 allocator_.Destruct(entry.list_entry);
690
1/2
✓ Branch 1 taken 370 times.
✗ Branch 2 not taken.
370 cache_.Erase(key);
691 370 --cache_gauge_;
692 }
693
694 496 this->Unlock();
695 496 return found;
696 324 }
697
698 /**
699 * Clears all elements from the cache.
700 * All memory of internal data structures will be freed but data of
701 * cache entries may stay in use, we do not call delete on any user data.
702 */
703 36 virtual void Drop() {
704 36 this->Lock();
705
706 36 cache_gauge_ = 0;
707 36 lru_list_.clear();
708 36 cache_.Clear();
709 36 perf::Inc(counters_.n_drop);
710 36 counters_.sz_allocated->Set(0);
711 36 perf::Xadd(counters_.sz_allocated,
712 36 allocator_.bytes_allocated() + cache_.bytes_allocated());
713
714 36 this->Unlock();
715 36 }
716
717 107 void Pause() {
718 107 Lock();
719 107 pause_ = true;
720 107 Unlock();
721 107 }
722
723 54 void Resume() {
724 54 Lock();
725 54 pause_ = false;
726 54 Unlock();
727 54 }
728
729 173530 inline bool IsFull() const { return cache_gauge_ >= cache_size_; }
730 19062 inline bool IsEmpty() const { return cache_gauge_ == 0; }
731
732 Counters counters() {
733 Lock();
734 cache_.GetCollisionStats(&counters_.num_collisions,
735 &counters_.max_collisions);
736 Unlock();
737 return counters_;
738 }
739
740 /**
741 * Prepares for in-order iteration of the cache entries to perform a filter
742 * operation. To ensure consistency, the LruCache must be locked for the
743 * duration of the filter operation.
744 */
745 226 virtual void FilterBegin() {
746
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 226 times.
226 assert(!filter_entry_);
747 226 Lock();
748 226 filter_entry_ = &lru_list_;
749 226 }
750
751 /**
752 * Get the current key and value for the filter operation
753 * @param key Address to write the key
754 * @param value Address to write the value
755 */
756 4498 virtual void FilterGet(Key *key, Value *value) {
757
1/2
✓ Branch 1 taken 4462 times.
✗ Branch 2 not taken.
4498 CacheEntry entry;
758
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 4498 times.
4498 assert(filter_entry_);
759
2/4
✓ Branch 1 taken 4498 times.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
✓ Branch 4 taken 4498 times.
4498 assert(!filter_entry_->IsListHead());
760 4498 *key = static_cast<ConcreteListEntryContent *>(filter_entry_)->content();
761
1/2
✓ Branch 1 taken 4498 times.
✗ Branch 2 not taken.
4498 const bool rc = this->DoLookup(*key, &entry);
762
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 4498 times.
4498 assert(rc);
763
1/2
✓ Branch 1 taken 36 times.
✗ Branch 2 not taken.
4498 *value = entry.value;
764 4498 }
765
766 /**
767 * Advance to the next entry in the list
768 * @returns false upon reaching the end of the cache list
769 */
770 4688 virtual bool FilterNext() {
771
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 4688 times.
4688 assert(filter_entry_);
772 4688 filter_entry_ = filter_entry_->next;
773 4688 return !filter_entry_->IsListHead();
774 }
775
776 /**
777 * Delete the current cache list entry
778 */
779 4272 virtual void FilterDelete() {
780
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 4272 times.
4272 assert(filter_entry_);
781
2/4
✓ Branch 1 taken 4272 times.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
✓ Branch 4 taken 4272 times.
4272 assert(!filter_entry_->IsListHead());
782 4272 ListEntry<Key> *new_current = filter_entry_->prev;
783 4272 perf::Inc(counters_.n_forget);
784 4272 Key k = static_cast<ConcreteListEntryContent *>(filter_entry_)->content();
785
1/2
✓ Branch 1 taken 4272 times.
✗ Branch 2 not taken.
4272 filter_entry_->RemoveFromList();
786
1/2
✓ Branch 1 taken 4272 times.
✗ Branch 2 not taken.
4272 allocator_.Destruct(static_cast<ConcreteListEntryContent *>(filter_entry_));
787
1/2
✓ Branch 1 taken 4272 times.
✗ Branch 2 not taken.
4272 cache_.Erase(k);
788 4272 --cache_gauge_;
789 4272 filter_entry_ = new_current;
790 4272 }
791
792 /**
793 * Finish filtering the entries and unlock the cache
794 */
795 226 virtual void FilterEnd() {
796
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 226 times.
226 assert(filter_entry_);
797 226 filter_entry_ = NULL;
798 226 Unlock();
799 226 }
800
801 protected:
802 Counters counters_;
803
804 private:
805 /**
806 * this just performs a lookup in the cache
807 * WITHOUT changing the LRU order
808 * @param key the key to perform a lookup on
809 * @param entry a pointer to the entry structure
810 * @return true on successful lookup, false otherwise
811 */
812 268731 inline bool DoLookup(const Key &key, CacheEntry *entry) {
813 268731 return cache_.Lookup(key, entry);
814 }
815
816 /**
817 * Touch an entry.
818 * The entry will be moved to the back of the LRU list to mark it
819 * as 'recently used'... this saves the entry from being deleted
820 * @param entry the CacheEntry to be touched (CacheEntry is the internal
821 * wrapper data structure)
822 */
823 173910 inline void Touch(const CacheEntry &entry) {
824 173910 lru_list_.MoveToBack(entry.list_entry);
825 173910 }
826
827 /**
828 * Deletes the least recently used entry from the cache.
829 */
830 18342 inline void DeleteOldest() {
831
1/2
✗ Branch 1 not taken.
✓ Branch 2 taken 18342 times.
18342 assert(!this->IsEmpty());
832
833 18342 perf::Inc(counters_.n_replace);
834
1/2
✓ Branch 1 taken 18342 times.
✗ Branch 2 not taken.
18342 Key delete_me = lru_list_.PopFront();
835
1/2
✓ Branch 1 taken 18342 times.
✗ Branch 2 not taken.
18342 cache_.Erase(delete_me);
836
837 18342 --cache_gauge_;
838 18342 }
839
840 /**
841 * Locks the cache (thread safety).
842 */
843 265088 inline void Lock() {
844 #ifdef LRU_CACHE_THREAD_SAFE
845 265088 pthread_mutex_lock(&lock_);
846 #endif
847 265088 }
848
849 /**
850 * Unlocks the cache (thread safety).
851 */
852 265088 inline void Unlock() {
853 #ifdef LRU_CACHE_THREAD_SAFE
854 265088 pthread_mutex_unlock(&lock_);
855 #endif
856 265088 }
857
858 bool pause_; /**< Temporarily stops the cache in order to avoid poisoning */
859
860 // Internal data fields
861 unsigned int cache_gauge_;
862 const unsigned int cache_size_;
863 ConcreteMemoryAllocator allocator_;
864
865 /**
866 * A doubly linked list to keep track of the least recently used data entries.
867 * New entries get pushed back to the list. If an entry is touched
868 * it is moved to the back of the list again.
869 * If the cache gets too long, the first element (the oldest) gets
870 * deleted to obtain some space.
871 */
872 ListEntryHead<Key> lru_list_;
873 SmallHashFixed<Key, CacheEntry> cache_;
874
875 ListEntry<Key> *filter_entry_;
876 #ifdef LRU_CACHE_THREAD_SAFE
877 pthread_mutex_t lock_; /**< Mutex to make cache thread safe. */
878 #endif
879 }; // class LruCache
880
881 } // namespace lru
882
883 #endif // CVMFS_LRU_H_
884