GCC Code Coverage Report
Directory: cvmfs/ Exec Total Coverage
File: cvmfs/atomic.h Lines: 39 41 95.1 %
Date: 2019-02-03 02:48:13 Branches: 8 8 100.0 %

Line Branch Exec Source
1
/**
2
 * This file is part of the CernVM File System.
3
 *
4
 * Defines wrapper functions for atomic integer operations.  Atomic operations
5
 * are handled by GCC.
6
 */
7
8
#ifndef CVMFS_ATOMIC_H_
9
#define CVMFS_ATOMIC_H_
10
11
#include <stdint.h>
12
13
#ifdef CVMFS_NAMESPACE_GUARD
14
namespace CVMFS_NAMESPACE_GUARD {
15
#endif
16
17
typedef int32_t atomic_int32;
18
typedef int64_t atomic_int64;
19
20
77651
static void inline __attribute__((used)) atomic_init32(atomic_int32 *a) {
21
77651
  *a = 0;
22
77651
}
23
24
12515
static void inline __attribute__((used)) atomic_init64(atomic_int64 *a) {
25
12515
  *a = 0;
26
12515
}
27
28
67610056687
static int32_t inline __attribute__((used)) atomic_read32(atomic_int32 *a) {
29
67610056687
  return __sync_fetch_and_add(a, 0);
30
}
31
32
19722179
static int64_t inline __attribute__((used)) atomic_read64(atomic_int64 *a) {
33
19722179
  return __sync_fetch_and_add(a, 0);
34
}
35
36
static void inline __attribute__((used))
37
22665212
atomic_write32(atomic_int32 *a, int32_t value) {
38
22665212
  while (!__sync_bool_compare_and_swap(a, atomic_read32(a), value)) {
39
  }
40
12687885
}
41
42
static void inline __attribute__((used))
43
19592037
atomic_write64(atomic_int64 *a, int64_t value) {
44
19592037
  while (!__sync_bool_compare_and_swap(a, atomic_read64(a), value)) {
45
  }
46
13376371
}
47
48
9878647
static void inline __attribute__((used)) atomic_inc32(atomic_int32 *a) {
49
9878647
  (void)__sync_fetch_and_add(a, 1);
50
9878647
}
51
52
16202499
static void inline __attribute__((used)) atomic_inc64(atomic_int64 *a) {
53
16202499
  (void)__sync_fetch_and_add(a, 1);
54
16202499
}
55
56
24387430
static void inline __attribute__((used)) atomic_dec32(atomic_int32 *a) {
57
24387430
  (void)__sync_fetch_and_sub(a, 1);
58
24387430
}
59
60
16862390
static void inline __attribute__((used)) atomic_dec64(atomic_int64 *a) {
61
16862390
  (void)__sync_fetch_and_sub(a, 1);
62
16862390
}
63
64
static int32_t inline __attribute__((used))
65
9323284177
atomic_xadd32(atomic_int32 *a, int32_t offset) {
66
9323284177
  if (offset < 0) return __sync_fetch_and_sub(a, -offset);
67
9323272142
  return __sync_fetch_and_add(a, offset);
68
}
69
70
static int64_t inline __attribute__((used))
71
2337777
atomic_xadd64(atomic_int64 *a, int64_t offset) {
72
2337777
  if (offset < 0) return __sync_fetch_and_sub(a, -offset);
73
1297685
  return __sync_fetch_and_add(a, offset);
74
}
75
76
static bool inline __attribute__((used))
77
9317698936
atomic_cas32(atomic_int32 *a, int32_t cmp, int32_t newval) {
78
9317698936
  return __sync_bool_compare_and_swap(a, cmp, newval);
79
}
80
81
static bool inline __attribute__((used))
82
atomic_cas64(atomic_int64 *a, int64_t cmp, int64_t newval) {
83
  // Clang 3.5 has a bug in optimized __sync_bool_compare_and_swap:
84
  // https://bugs.llvm.org//show_bug.cgi?format=multiple&id=21499
85
  return __sync_bool_compare_and_swap(a, cmp, newval);
86
}
87
88
39
static void inline __attribute__((used)) MemoryFence() {
89
39
  asm __volatile__("" : : : "memory");
90
39
}
91
92
#ifdef CVMFS_NAMESPACE_GUARD
93
}  // namespace CVMFS_NAMESPACE_GUARD
94
#endif
95
96
#endif  // CVMFS_ATOMIC_H_