GCC Code Coverage Report
Directory: cvmfs/ Exec Total Coverage
File: cvmfs/lru.h Lines: 311 315 98.7 %
Date: 2019-02-03 02:48:13 Branches: 121 344 35.2 %

Line Branch Exec Source
1
/**
2
 * This file is part of the CernVM File System.
3
 *
4
 * This class provides an Least Recently Used (LRU) cache for arbitrary data
5
 * It stores Key-Value pairs of arbitrary data types in a hash table and
6
 * automatically deletes the entries which are least touched in the last time
7
 * to prevent the structure from growing beyond a given maximal cache size.
8
 * The cache uses a hand crafted memory allocator to use memory efficiently
9
 *
10
 * Hash functions have to be provided.  They should return an equal distribution
11
 * of keys in uint32_t.  In addition, a special key has to be provided that is
12
 * used to mark "empty" elements in the hash table.
13
 *
14
 * The cache size has to be a multiply of 64.
15
 *
16
 * usage:
17
 *   // 100 entries, -1 special key
18
 *   LruCache<int, string> cache(100, -1, hasher_int);
19
 *
20
 *   // Inserting some stuff
21
 *   cache.insert(42, "fourtytwo");
22
 *   cache.insert(2, "small prime number");
23
 *   cache.insert(1337, "leet");
24
 *
25
 *   // Trying to retrieve a value
26
 *   int result;
27
 *   if (cache.lookup(21, result)) {
28
 *      cout << "cache hit: " << result << endl;
29
 *   } else {
30
 *      cout << "cache miss" << endl;
31
 *   }
32
 *
33
 *   cache.drop();  // Empty the cache
34
 */
35
36
#ifndef CVMFS_LRU_H_
37
#define CVMFS_LRU_H_
38
39
// If defined the cache is secured by a posix mutex
40
#define LRU_CACHE_THREAD_SAFE
41
42
#include <stdint.h>
43
44
#include <algorithm>
45
#include <cassert>
46
#include <cstring>
47
#include <functional>
48
#include <map>
49
#include <string>
50
51
#include "atomic.h"
52
#include "platform.h"
53
#include "smallhash.h"
54
#include "smalloc.h"
55
#include "statistics.h"
56
#include "util/single_copy.h"
57
58
namespace lru {
59
60
/**
61
 * Counting of cache operations.
62
 */
63
struct Counters {
64
  perf::Counter *sz_size;
65
  perf::Counter *n_hit;
66
  perf::Counter *n_miss;
67
  perf::Counter *n_insert;
68
  perf::Counter *n_insert_negative;
69
  uint64_t num_collisions;
70
  uint32_t max_collisions;
71
  perf::Counter *n_update;
72
  perf::Counter *n_update_value;
73
  perf::Counter *n_replace;
74
  perf::Counter *n_forget;
75
  perf::Counter *n_drop;
76
  perf::Counter *sz_allocated;
77
78
213
  explicit Counters(perf::StatisticsTemplate statistics) {
79
213
    sz_size = statistics.RegisterTemplated("sz_size", "Total size");
80
213
    num_collisions = 0;
81
213
    max_collisions = 0;
82
213
    n_hit = statistics.RegisterTemplated("n_hit", "Number of hits");
83
213
    n_miss = statistics.RegisterTemplated("n_miss", "Number of misses");
84
213
    n_insert = statistics.RegisterTemplated("n_insert", "Number of inserts");
85
    n_insert_negative = statistics.RegisterTemplated("n_insert_negative",
86
213
        "Number of negative inserts");
87
    n_update = statistics.RegisterTemplated("n_update",
88
213
        "Number of updates");
89
    n_update_value = statistics.RegisterTemplated("n_update_value",
90
213
        "Number of value changes");
91
213
    n_replace = statistics.RegisterTemplated("n_replace", "Number of replaces");
92
213
    n_forget = statistics.RegisterTemplated("n_forget", "Number of forgets");
93
213
    n_drop = statistics.RegisterTemplated("n_drop", "Number of drops");
94
    sz_allocated = statistics.RegisterTemplated("sz_allocated",
95
213
        "Number of allocated bytes ");
96
213
  }
97
};
98
99
100
/**
101
 * Template class to create a LRU cache
102
 * @param Key type of the key values
103
 * @param Value type of the value values
104
 */
105
template<class Key, class Value>
106
class LruCache : SingleCopy {
107
 private:
108
  // Forward declarations of private internal data structures
109
  template<class T> class ListEntry;
110
  template<class T> class ListEntryHead;
111
  template<class T> class ListEntryContent;
112
  template<class M> class MemoryAllocator;
113
114
  // Helpers to get the template magic right
115
  typedef ListEntryContent<Key> ConcreteListEntryContent;
116
  typedef MemoryAllocator<ConcreteListEntryContent> ConcreteMemoryAllocator;
117
118
  /**
119
   * This structure wraps the user data and relates it to the LRU list entry
120
   */
121
7294788
  typedef struct {
122
    ListEntryContent<Key> *list_entry;
123
    Value value;
124
  } CacheEntry;
125
126
  // static uint64_t GetEntrySize() { return sizeof(Key) + sizeof(Value); }
127
128
  /**
129
   * A special purpose memory allocator for the cache entries.
130
   * It allocates enough memory for the maximal number of cache entries at
131
   * startup, and assigns new ListEntryContent objects to a free spot in this
132
   * memory pool (by overriding the 'new' and 'delete' operators of
133
   * ListEntryContent).
134
   *
135
   * @param T the type of object to be allocated by this MemoryAllocator
136
   */
137
  template<class T> class MemoryAllocator : SingleCopy {
138
   public:
139
    /**
140
     * Creates a MemoryAllocator to handle a memory pool for objects of type T
141
     * @param num_slots the number of slots to be allocated for the given datatype T
142
     */
143
213
    explicit MemoryAllocator(const unsigned int num_slots) {
144
      // how many bitmap chunks (chars) do we need?
145
213
      unsigned int num_bytes_bitmap = num_slots / 8;
146
213
      bits_per_block_ = 8 * sizeof(bitmap_[0]);
147

213
      assert((num_slots % bits_per_block_) == 0);
148

213
      assert(num_slots >= 2*bits_per_block_);
149
150
      // How much actual memory do we need?
151
213
      const unsigned int num_bytes_memory = sizeof(T) * num_slots;
152
153
      // Allocate zero'd memory
154
213
      bitmap_ = reinterpret_cast<uint64_t *>(scalloc(num_bytes_bitmap, 1));
155
213
      memory_ = reinterpret_cast<T *>(scalloc(num_bytes_memory, 1));
156
157
      // Create initial state
158
213
      num_slots_ = num_slots;
159
213
      num_free_slots_ = num_slots;
160
213
      next_free_slot_ = 0;
161
213
      bytes_allocated_ = num_bytes_bitmap + num_bytes_memory;
162
213
    }
163
164
    /**
165
     * Number of bytes for a single entry
166
     */
167
96
    static double GetEntrySize() {
168
96
      return static_cast<double>(sizeof(T)) + 1.0/8.0;
169
    }
170
171
    /**
172
     * The memory allocator also frees all allocated data
173
     */
174
213
    virtual ~MemoryAllocator() {
175
213
      free(bitmap_);
176

213
      free(memory_);
177
426
    }
178
179
    /**
180
     * Check if the memory pool is full.
181
     * @return true if all slots are occupied, otherwise false
182
     */
183
8602
    inline bool IsFull() const { return num_free_slots_ == 0; }
184
185
4301
    T* Construct(const T object) {
186
4301
      T* mem = Allocate();
187

4301
      if (mem != NULL) {
188

4301
        new (static_cast<void*>(mem)) T(object);
189
      }
190
4301
      return mem;
191
    }
192
193
4301
    void Destruct(T *object) {
194
4301
      object->~T();
195
4301
      Deallocate(object);
196
4301
    }
197
198
    /**
199
     * Allocate a slot and returns a pointer to the memory.
200
     * @return a pointer to a chunk of the memory pool
201
     */
202
4301
    T* Allocate() {
203

4301
      if (this->IsFull())
204
        return NULL;
205
206
      // Allocate a slot
207
4301
      this->SetBit(next_free_slot_);
208
4301
      --num_free_slots_;
209
4301
      T *slot = memory_ + next_free_slot_;
210
211
      // Find a new free slot if there are some left
212

4301
      if (!this->IsFull()) {
213
3279
        unsigned bitmap_block = next_free_slot_ / bits_per_block_;
214

6604
        while (~bitmap_[bitmap_block] == 0)
215
46
          bitmap_block = (bitmap_block + 1) % (num_slots_ / bits_per_block_);
216
        // TODO(jblomer): faster search inside the int
217
3279
        next_free_slot_ = bitmap_block * bits_per_block_;
218

106371
        while (this->GetBit(next_free_slot_))
219
99813
          next_free_slot_++;
220
      }
221
222
4301
      return slot;
223
    }
224
225
    /**
226
     * Free a given slot in the memory pool
227
     * @param slot a pointer to the slot be freed
228
     */
229
4301
    void Deallocate(T* slot) {
230
      // Check if given slot is in bounds
231



4301
      assert((slot >= memory_) && (slot <= memory_ + num_slots_));
232
233
      // Get position of slot
234
4301
      const unsigned int position = slot - memory_;
235
236
      // Check if slot was already freed
237

4301
      assert(this->GetBit(position));
238
239
      // Free slot, save the position of this slot as free (faster reallocation)
240
4301
      this->UnsetBit(position);
241
4301
      next_free_slot_ = position;
242
4301
      ++num_free_slots_;
243
4301
    }
244
245
215
    uint64_t bytes_allocated() { return bytes_allocated_; }
246
247
   private:
248
    /**
249
     * Check a bit in the internal allocation bitmap.
250
     * @param position the position to check
251
     * @return true if bit is set, otherwise false
252
     */
253
107393
    inline bool GetBit(const unsigned position) {
254

107393
      assert(position < num_slots_);
255
      return ((bitmap_[position / bits_per_block_] &
256
107393
               (uint64_t(1) << (position % bits_per_block_))) != 0);
257
    }
258
259
    /**
260
     *  set a bit in the internal allocation bitmap
261
     *  @param position the number of the bit to be set
262
     */
263
4301
    inline void SetBit(const unsigned position) {
264

4301
      assert(position < num_slots_);
265
4301
      bitmap_[position / bits_per_block_] |=
266
        uint64_t(1) << (position % bits_per_block_);
267
4301
    }
268
269
    /**
270
     * Clear a bit in the internal allocation bitmap
271
     * @param position the number of the bit to be cleared
272
     */
273
4301
    inline void UnsetBit(const unsigned position) {
274

4301
      assert(position < num_slots_);
275
4301
      bitmap_[position / bits_per_block_] &=
276
        ~(uint64_t(1) << (position % bits_per_block_));
277
4301
    }
278
279
    unsigned int num_slots_;  /**< Overall number of slots in memory pool. */
280
    unsigned int num_free_slots_;  /**< Current number of free slots left. */
281
    unsigned int next_free_slot_;  /**< Position of next free slot in pool. */
282
    uint64_t bytes_allocated_;
283
    uint64_t *bitmap_;  /**< A bitmap to mark slots as allocated. */
284
    unsigned bits_per_block_;
285
    T *memory_;  /**< The memory pool, array of Ms. */
286
  };
287
288
289
  /**
290
   * Internal LRU list entry, to maintain the doubly linked list.
291
   * The list keeps track of the least recently used keys in the cache.
292
   */
293
  template<class T> class ListEntry {
294
  friend class LruCache;
295
   public:
296
    /**
297
     * Create a new list entry as lonely, both next and prev pointing to this.
298
     */
299
4514
    ListEntry() {
300
4514
      this->next = this;
301
4514
      this->prev = this;
302
4514
    }
303
304
4301
    ListEntry(const ListEntry<T> &other) {
305

4301
      next = (other.next == &other) ? this : other.next;
306

4301
      prev = (other.prev == &other) ? this : other.prev;
307
4301
    }
308
309

8815
    virtual ~ListEntry() {}
310
311
    /**
312
     * Checks if the ListEntry is the list head
313
     * @return true if ListEntry is list head otherwise false
314
     */
315
    virtual bool IsListHead() const = 0;
316
317
    /**
318
     * A lonely ListEntry has no connection to other elements.
319
     * @return true if ListEntry is lonely otherwise false
320
     */
321



196019
    bool IsLonely() const { return (this->next == this && this->prev == this); }
322
323
    ListEntry<T> *next;  /**< Pointer to next element in the list. */
324
    ListEntry<T> *prev;  /**< Pointer to previous element in the list. */
325
326
   protected:
327
    /**
328
     * Insert a given ListEntry after this one.
329
     * @param entry the ListEntry to insert after this one
330
     */
331
    inline void InsertAsSuccessor(ListEntryContent<T> *entry) {
332
      assert(entry->IsLonely());
333
334
      // Mount the new element between this and this->next
335
      entry->next = this->next;
336
      entry->prev = this;
337
338
      // Fix pointers of existing list elements
339
      this->next->prev = entry;
340
      this->next = entry;
341
      assert(!entry->IsLonely());
342
    }
343
344
    /**
345
     * Insert a given ListEntry in front of this one
346
     * @param entry the ListEntry to insert in front of this one
347
     */
348
50616
    inline void InsertAsPredecessor(ListEntryContent<T> *entry) {
349

50616
      assert(entry->IsLonely());
350

50616
      assert(!entry->IsListHead());
351
352
      // Mount the new element between this and this->prev
353
50616
      entry->next = this;
354
50616
      entry->prev = this->prev;
355
356
      // Fix pointers of existing list elements
357
50616
      this->prev->next = entry;
358
50616
      this->prev = entry;
359
360

50616
      assert(!entry->IsLonely());
361
50616
    }
362
363
    /**
364
     * Remove this element from it's list.
365
     * The function connects this->next with this->prev leaving the list
366
     * in a consistent state.  The ListEntry itself is lonely afterwards,
367
     * but not deleted.
368
     */
369
    virtual void RemoveFromList() = 0;
370
371
   private:
372
    // No assignment operator (enforced by linker error)
373
    ListEntry<T>& operator=(const ListEntry<T> &other);
374
  };  // template<class T> class ListEntry
375
376
  /**
377
   * Specialized ListEntry to contain a data entry of type T
378
   */
379


12903
  template<class T> class ListEntryContent : public ListEntry<T> {
380
   public:
381
4301
    explicit ListEntryContent(T content) {
382
4301
      content_ = content;
383
4301
    }
384
385
55139
    inline bool IsListHead() const { return false; }
386
1239
    inline T content() const { return content_; }
387
388
    /**
389
     * See ListEntry base class.
390
     */
391
47453
    inline void RemoveFromList() {
392

47453
      assert(!this->IsLonely());
393
394
      // Remove this from list
395
47453
      this->prev->next = this->next;
396
47453
      this->next->prev = this->prev;
397
398
      // Make this lonely
399
47453
      this->next = this;
400
47453
      this->prev = this;
401
47453
    }
402
403
   private:
404
    T content_;  /**< The data content of this ListEntry */
405
  };
406
407
  /**
408
   * Specialized ListEntry to form a list head.
409
   * Every list has exactly one list head which is also the entry point
410
   * in the list. It is used to manipulate the list.
411
   */
412
  template<class T> class ListEntryHead : public ListEntry<T> {
413
   public:
414
213
    explicit ListEntryHead(ConcreteMemoryAllocator *allocator) :
415
213
      allocator_(allocator) {}
416
417
213
    virtual ~ListEntryHead() {
418

213
      this->clear();
419
426
    }
420
421
    /**
422
     * Remove all entries from the list.
423
     * ListEntry objects are deleted but contained data keeps available
424
     */
425
215
    void clear() {
426
      // Delete all list entries
427
215
      ListEntry<T> *entry = this->next;
428
      ListEntry<T> *delete_me;
429

3593
      while (!entry->IsListHead()) {
430
3163
        delete_me = entry;
431
3163
        entry = entry->next;
432
3163
        allocator_->Destruct(static_cast<ConcreteListEntryContent*>(delete_me));
433
      }
434
435
      // Reset the list to lonely
436
215
      this->next = this;
437
215
      this->prev = this;
438
215
    }
439
440
220
    inline bool IsListHead() const { return true; }
441
1019
    inline bool IsEmpty() const { return this->IsLonely(); }
442
443
    /**
444
     * Push a new data object to the end of the list.
445
     * @param the data object to insert
446
     * @return the ListEntryContent structure wrapped around the data object
447
     */
448
4301
    inline ListEntryContent<T>* PushBack(T content) {
449
      ListEntryContent<T> *new_entry =
450
4301
        allocator_->Construct(ListEntryContent<T>(content));
451
4301
      this->InsertAsPredecessor(new_entry);
452
4301
      return new_entry;
453
    }
454
455
    /**
456
     * Pop the first object of the list.
457
     * The object is returned and removed from the list
458
     * @return the data object which resided in the first list entry
459
     */
460
1019
    inline T PopFront() {
461

1019
      assert(!this->IsEmpty());
462
1019
      return Pop(this->next);
463
    }
464
465
    /**
466
     * Take a list entry out of it's list and reinsert at the end of this list.
467
     * @param the ListEntry to be moved to the end of this list
468
     */
469
46315
    inline void MoveToBack(ListEntryContent<T> *entry) {
470

46315
      assert(!entry->IsLonely());
471
472
46315
      entry->RemoveFromList();
473
46315
      this->InsertAsPredecessor(entry);
474
46315
    }
475
476
    /**
477
     * See ListEntry base class
478
     */
479
    inline void RemoveFromList() { assert(false); }
480
481
   private:
482
    /**
483
     * Pop a ListEntry from the list (arbitrary position).
484
     * The given ListEntry is removed from the list, deleted and it's
485
     * data content is returned
486
     * @param popped_entry the entry to be popped
487
     * @return the data object of the popped ListEntry
488
     */
489
1019
    inline T Pop(ListEntry<T> *popped_entry) {
490

1019
      assert(!popped_entry->IsListHead());
491
492
1019
      ListEntryContent<T> *popped = (ListEntryContent<T> *)popped_entry;
493
1019
      popped->RemoveFromList();
494
1019
      T result = popped->content();
495
1019
      allocator_->Destruct(static_cast<ConcreteListEntryContent*>(
496
        popped_entry));
497
1019
      return result;
498
    }
499
500
   private:
501
    ConcreteMemoryAllocator *allocator_;
502
  };
503
504
 public:  // LruCache
505
  /**
506
   * Create a new LRU cache object
507
   * @param cache_size the maximal size of the cache
508
   */
509
213
  LruCache(const unsigned   cache_size,
510
           const Key       &empty_key,
511
           uint32_t (*hasher)(const Key &key),
512
           perf::StatisticsTemplate statistics) :
513
    counters_(statistics),
514
    pause_(false),
515
    cache_gauge_(0),
516
    cache_size_(cache_size),
517
    allocator_(cache_size),
518
213
    lru_list_(&allocator_)
519
  {
520

213
    assert(cache_size > 0);
521
522
213
    counters_.sz_size->Set(cache_size_);
523
213
    filter_entry_ = NULL;
524
    // cache_ = Cache(cache_size_);
525
213
    cache_.Init(cache_size_, empty_key, hasher);
526
213
    perf::Xadd(counters_.sz_allocated, allocator_.bytes_allocated() +
527
                  cache_.bytes_allocated());
528
529
#ifdef LRU_CACHE_THREAD_SAFE
530
213
    int retval = pthread_mutex_init(&lock_, NULL);
531

213
    assert(retval == 0);
532
#endif
533
213
  }
534
535
96
  static double GetEntrySize() {
536
    return SmallHashFixed<Key, CacheEntry>::GetEntrySize() +
537
96
           ConcreteMemoryAllocator::GetEntrySize();
538
  }
539
540
213
  virtual ~LruCache() {
541
#ifdef LRU_CACHE_THREAD_SAFE
542


213
    pthread_mutex_destroy(&lock_);
543
#endif
544
426
  }
545
546
  /**
547
   * Insert a new key-value pair to the list.
548
   * If the cache is already full, the least recently used object is removed;
549
   * afterwards the new object is inserted.
550
   * If the object is already present it is updated and moved back to the end
551
   * of the list
552
   * @param key the key where the value is saved
553
   * @param value the value of the cache entry
554
   * @return true on insert, false on update
555
   */
556
26399
  virtual bool Insert(const Key &key, const Value &value) {
557
26399
    this->Lock();
558

26399
    if (pause_) {
559
2
      Unlock();
560
2
      return false;
561
    }
562
563
26397
    CacheEntry entry;
564
565
    // Check if we have to update an existent entry
566

26397
    if (this->DoLookup(key, &entry)) {
567
22096
      perf::Inc(counters_.n_update);
568
22096
      entry.value = value;
569
22096
      cache_.Insert(key, entry);
570
22096
      this->Touch(entry);
571
22096
      this->Unlock();
572
22096
      return false;
573
    }
574
575
4301
    perf::Inc(counters_.n_insert);
576
    // Check if we have to make some space in the cache a
577

4301
    if (this->IsFull())
578
1019
      this->DeleteOldest();
579
580
4301
    entry.list_entry = lru_list_.PushBack(key);
581
4301
    entry.value = value;
582
583
4301
    cache_.Insert(key, entry);
584
4301
    cache_gauge_++;
585
586
4301
    Unlock();
587
4301
    return true;
588
  }
589
590
591
  /**
592
   * Updates object and moves back to the end of the list.  The object must be
593
   * present.
594
   */
595
1
  virtual void Update(const Key &key) {
596
1
    Lock();
597
    // Is not called from the client, only from the cache plugin
598

1
    assert(!pause_);
599
1
    CacheEntry entry;
600
1
    bool retval = DoLookup(key, &entry);
601

1
    assert(retval);
602
1
    perf::Inc(counters_.n_update);
603
1
    Touch(entry);
604
1
    Unlock();
605
1
  }
606
607
608
  /**
609
   * Changes the value of an entry in the LRU cache without updating the LRU
610
   * order.
611
   */
612
3
  virtual bool UpdateValue(const Key &key, const Value &value) {
613
3
    this->Lock();
614

3
    if (pause_) {
615
      Unlock();
616
      return false;
617
    }
618
619
3
    CacheEntry entry;
620

3
    if (!this->DoLookup(key, &entry)) {
621
1
      this->Unlock();
622
1
      return false;
623
    }
624
625
2
    perf::Inc(counters_.n_update_value);
626
2
    entry.value = value;
627
2
    cache_.Insert(key, entry);
628
2
    this->Unlock();
629
2
    return true;
630
  }
631
632
633
  /**
634
   * Retrieve an element from the cache.
635
   * If the element was found, it will be marked as 'recently used' and returned
636
   * @param key the key to perform a lookup on
637
   * @param value (out) here the result is saved (not touch in case of miss)
638
   * @return true on successful lookup, false if key was not found
639
   */
640
24503
  virtual bool Lookup(const Key &key, Value *value, bool update_lru = true) {
641
24503
    bool found = false;
642
24503
    Lock();
643

24503
    if (pause_) {
644
20
      Unlock();
645
20
      return false;
646
    }
647
648
24483
    CacheEntry entry;
649

24483
    if (DoLookup(key, &entry)) {
650
      // Hit
651
24249
      perf::Inc(counters_.n_hit);
652

24249
      if (update_lru)
653
24218
        Touch(entry);
654
24249
      *value = entry.value;
655
24249
      found = true;
656
    } else {
657
234
      perf::Inc(counters_.n_miss);
658
    }
659
660
24483
    Unlock();
661
24483
    return found;
662
  }
663
664
  /**
665
   * Forgets about a specific cache entry
666
   * @param key the key to delete from the cache
667
   * @return true if key was deleted, false if key was not in the cache
668
   */
669
24
  virtual bool Forget(const Key &key) {
670
24
    bool found = false;
671
24
    this->Lock();
672

24
    if (pause_) {
673
2
      Unlock();
674
2
      return false;
675
    }
676
677
22
    CacheEntry entry;
678

22
    if (this->DoLookup(key, &entry)) {
679
15
      found = true;
680
15
      perf::Inc(counters_.n_forget);
681
682
15
      entry.list_entry->RemoveFromList();
683
15
      allocator_.Destruct(entry.list_entry);
684
15
      cache_.Erase(key);
685
15
      --cache_gauge_;
686
    }
687
688
22
    this->Unlock();
689
22
    return found;
690
  }
691
692
  /**
693
   * Clears all elements from the cache.
694
   * All memory of internal data structures will be freed but data of
695
   * cache entries may stay in use, we do not call delete on any user data.
696
   */
697
2
  virtual void Drop() {
698
2
    this->Lock();
699
700
2
    cache_gauge_ = 0;
701
2
    lru_list_.clear();
702
2
    cache_.Clear();
703
2
    perf::Inc(counters_.n_drop);
704
2
    counters_.sz_allocated->Set(0);
705
2
    perf::Xadd(counters_.sz_allocated, allocator_.bytes_allocated() +
706
                  cache_.bytes_allocated());
707
708
2
    this->Unlock();
709
2
  }
710
711
6
  void Pause() {
712
6
    Lock();
713
6
    pause_ = true;
714
6
    Unlock();
715
6
  }
716
717
3
  void Resume() {
718
3
    Lock();
719
3
    pause_ = false;
720
3
    Unlock();
721
3
  }
722
723
9455
  inline bool IsFull() const { return cache_gauge_ >= cache_size_; }
724
1059
  inline bool IsEmpty() const { return cache_gauge_ == 0; }
725
726
  Counters counters() {
727
    Lock();
728
    cache_.GetCollisionStats(&counters_.num_collisions,
729
                             &counters_.max_collisions);
730
    Unlock();
731
    return counters_;
732
  }
733
734
  /**
735
   * Prepares for in-order iteration of the cache entries to perform a filter
736
   * operation. To ensure consistency, the LruCache must be locked for the
737
   * duration of the filter operation.
738
   */
739
12
  virtual void FilterBegin() {
740

12
    assert(!filter_entry_);
741
12
    Lock();
742
12
    filter_entry_ = &lru_list_;
743
12
  }
744
745
  /**
746
   * Get the current key and value for the filter operation
747
   * @param key Address to write the key
748
   * @param value Address to write the value
749
   */
750
116
  virtual void FilterGet(Key *key, Value *value) {
751
116
    CacheEntry entry;
752

116
    assert(filter_entry_);
753

116
    assert(!filter_entry_->IsListHead());
754
116
    *key = static_cast<ConcreteListEntryContent *>(filter_entry_)->content();
755
116
    bool rc = this->DoLookup(*key, &entry);
756

116
    assert(rc);
757
116
    *value = entry.value;
758
116
  }
759
760
  /**
761
   * Advance to the next entry in the list
762
   * @returns false upon reaching the end of the cache list
763
   */
764
126
  virtual bool FilterNext() {
765

126
    assert(filter_entry_);
766
126
    filter_entry_ = filter_entry_->next;
767
126
    return !filter_entry_->IsListHead();
768
  }
769
770
 /**
771
  * Delete the current cache list entry
772
  */
773
104
  virtual void FilterDelete() {
774

104
    assert(filter_entry_);
775

104
    assert(!filter_entry_->IsListHead());
776
104
    ListEntry<Key> *new_current = filter_entry_->prev;
777
104
    perf::Inc(counters_.n_forget);
778
104
    Key k = static_cast<ConcreteListEntryContent *>(filter_entry_)->content();
779
104
    filter_entry_->RemoveFromList();
780
104
    allocator_.Destruct(static_cast<ConcreteListEntryContent *>(filter_entry_));
781
104
    cache_.Erase(k);
782
104
    --cache_gauge_;
783
104
    filter_entry_ = new_current;
784
104
  }
785
786
 /**
787
  * Finish filtering the entries and unlock the cache
788
  */
789
12
  virtual void FilterEnd() {
790

12
    assert(filter_entry_);
791
12
    filter_entry_ = NULL;
792
12
    Unlock();
793
12
  }
794
795
 protected:
796
  Counters counters_;
797
798
 private:
799
  /**
800
   *  this just performs a lookup in the cache
801
   *  WITHOUT changing the LRU order
802
   *  @param key the key to perform a lookup on
803
   *  @param entry a pointer to the entry structure
804
   *  @return true on successful lookup, false otherwise
805
   */
806
51022
  inline bool DoLookup(const Key &key, CacheEntry *entry) {
807
51022
    return cache_.Lookup(key, entry);
808
  }
809
810
  /**
811
   * Touch an entry.
812
   * The entry will be moved to the back of the LRU list to mark it
813
   * as 'recently used'... this saves the entry from being deleted
814
   * @param entry the CacheEntry to be touched (CacheEntry is the internal wrapper data structure)
815
   */
816
46315
  inline void Touch(const CacheEntry &entry) {
817
46315
    lru_list_.MoveToBack(entry.list_entry);
818
46315
  }
819
820
  /**
821
   * Deletes the least recently used entry from the cache.
822
   */
823
1019
  inline void DeleteOldest() {
824

1019
    assert(!this->IsEmpty());
825
826
1019
    perf::Inc(counters_.n_replace);
827
1019
    Key delete_me = lru_list_.PopFront();
828
1019
    cache_.Erase(delete_me);
829
830
1019
    --cache_gauge_;
831
1019
  }
832
833
  /**
834
   * Locks the cache (thread safety).
835
   */
836
50953
  inline void Lock() {
837
#ifdef LRU_CACHE_THREAD_SAFE
838
50953
    pthread_mutex_lock(&lock_);
839
#endif
840
50953
  }
841
842
  /**
843
   * Unlocks the cache (thread safety).
844
   */
845
50953
  inline void Unlock() {
846
#ifdef LRU_CACHE_THREAD_SAFE
847
50953
    pthread_mutex_unlock(&lock_);
848
#endif
849
50953
  }
850
851
  bool pause_;  /**< Temporarily stops the cache in order to avoid poisoning */
852
853
  // Internal data fields
854
  unsigned int            cache_gauge_;
855
  const unsigned int      cache_size_;
856
  ConcreteMemoryAllocator allocator_;
857
858
  /**
859
   * A doubly linked list to keep track of the least recently used data entries.
860
   * New entries get pushed back to the list. If an entry is touched
861
   * it is moved to the back of the list again.
862
   * If the cache gets too long, the first element (the oldest) gets
863
   * deleted to obtain some space.
864
   */
865
  ListEntryHead<Key>              lru_list_;
866
  SmallHashFixed<Key, CacheEntry> cache_;
867
868
  ListEntry<Key> *filter_entry_;
869
#ifdef LRU_CACHE_THREAD_SAFE
870
  pthread_mutex_t lock_;  /**< Mutex to make cache thread safe. */
871
#endif
872
};  // class LruCache
873
874
}  // namespace lru
875
876
#endif  // CVMFS_LRU_H_