GCC Code Coverage Report


Directory: cvmfs/
File: cvmfs/util/atomic.h
Date: 2024-04-21 02:33:16
Exec Total Coverage
Lines: 39 41 95.1%
Branches: 8 8 100.0%

Line Branch Exec Source
1 /**
2 * This file is part of the CernVM File System.
3 *
4 * Defines wrapper functions for atomic integer operations. Atomic operations
5 * are handled by GCC.
6 */
7
8 #ifndef CVMFS_UTIL_ATOMIC_H_
9 #define CVMFS_UTIL_ATOMIC_H_
10
11 #include <stdint.h>
12
13 #ifdef CVMFS_NAMESPACE_GUARD
14 namespace CVMFS_NAMESPACE_GUARD {
15 #endif
16
17 typedef int32_t atomic_int32;
18 typedef int64_t atomic_int64;
19
20 635530 static void inline __attribute__((used)) atomic_init32(atomic_int32 *a) {
21 635530 *a = 0;
22 635530 }
23
24 264611 static void inline __attribute__((used)) atomic_init64(atomic_int64 *a) {
25 264611 *a = 0;
26 264611 }
27
28 89806252 static int32_t inline __attribute__((used)) atomic_read32(atomic_int32 *a) {
29 89806252 return __sync_fetch_and_add(a, 0);
30 }
31
32 43293230 static int64_t inline __attribute__((used)) atomic_read64(atomic_int64 *a) {
33 43293230 return __sync_fetch_and_add(a, 0);
34 }
35
36 static void inline __attribute__((used))
37 13552382 atomic_write32(atomic_int32 *a, int32_t value) {
38
2/2
✓ Branch 1 taken 37580731 times.
✓ Branch 2 taken 17122301 times.
51133113 while (!__sync_bool_compare_and_swap(a, atomic_read32(a), value)) {
39 }
40 17122301 }
41
42 static void inline __attribute__((used))
43 14421401 atomic_write64(atomic_int64 *a, int64_t value) {
44
2/2
✓ Branch 1 taken 31548817 times.
✓ Branch 2 taken 18184879 times.
45970218 while (!__sync_bool_compare_and_swap(a, atomic_read64(a), value)) {
45 }
46 18184879 }
47
48 7704842 static void inline __attribute__((used)) atomic_inc32(atomic_int32 *a) {
49 7704842 (void)__sync_fetch_and_add(a, 1);
50 7704842 }
51
52 11829941 static void inline __attribute__((used)) atomic_inc64(atomic_int64 *a) {
53 11829941 (void)__sync_fetch_and_add(a, 1);
54 11829941 }
55
56 4894439 static void inline __attribute__((used)) atomic_dec32(atomic_int32 *a) {
57 4894439 (void)__sync_fetch_and_sub(a, 1);
58 4894439 }
59
60 4088840 static void inline __attribute__((used)) atomic_dec64(atomic_int64 *a) {
61 4088840 (void)__sync_fetch_and_sub(a, 1);
62 4088840 }
63
64 static int32_t inline __attribute__((used))
65 914842 atomic_xadd32(atomic_int32 *a, int32_t offset) {
66
2/2
✓ Branch 0 taken 193335 times.
✓ Branch 1 taken 721507 times.
914842 if (offset < 0) return __sync_fetch_and_sub(a, -offset);
67 721507 return __sync_fetch_and_add(a, offset);
68 }
69
70 static int64_t inline __attribute__((used))
71 3399005 atomic_xadd64(atomic_int64 *a, int64_t offset) {
72
2/2
✓ Branch 0 taken 804129 times.
✓ Branch 1 taken 2594876 times.
3399005 if (offset < 0) return __sync_fetch_and_sub(a, -offset);
73 2594876 return __sync_fetch_and_add(a, offset);
74 }
75
76 static bool inline __attribute__((used))
77 103297 atomic_cas32(atomic_int32 *a, int32_t cmp, int32_t newval) {
78 103297 return __sync_bool_compare_and_swap(a, cmp, newval);
79 }
80
81 static bool inline __attribute__((used))
82 atomic_cas64(atomic_int64 *a, int64_t cmp, int64_t newval) {
83 // Clang 3.5 has a bug in optimized __sync_bool_compare_and_swap:
84 // https://bugs.llvm.org//show_bug.cgi?format=multiple&id=21499
85 return __sync_bool_compare_and_swap(a, cmp, newval);
86 }
87
88 42 static void inline __attribute__((used)) MemoryFence() {
89 42 asm __volatile__("" : : : "memory");
90 42 }
91
92 #ifdef CVMFS_NAMESPACE_GUARD
93 } // namespace CVMFS_NAMESPACE_GUARD
94 #endif
95
96 #endif // CVMFS_UTIL_ATOMIC_H_
97