1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
|
diff --git a/include/refcount.h b/include/refcount.h
index 5bb9660..68fb8fc 100644
--- a/include/refcount.h
+++ b/include/refcount.h
@@ -50,7 +50,7 @@ static inline unsigned int
refcount_unsafe_ref (refcount_t *ref)
{
unsigned int r;
- r = __atomic_add_fetch (ref, 1, __ATOMIC_RELAXED);
+ r = __atomic_add_fetch (ref, 1, __ATOMIC_SEQ_CST);
assert (r != UINT_MAX || !"refcount overflowed!");
return r;
}
@@ -74,7 +74,7 @@ static inline unsigned int
refcount_deref (refcount_t *ref)
{
unsigned int r;
- r = __atomic_sub_fetch (ref, 1, __ATOMIC_RELAXED);
+ r = __atomic_sub_fetch (ref, 1, __ATOMIC_SEQ_CST);
assert (r != UINT_MAX || !"refcount underflowed!");
return r;
}
@@ -84,7 +84,7 @@ refcount_deref (refcount_t *ref)
static inline unsigned int
refcount_references (refcount_t *ref)
{
- return __atomic_load_n (ref, __ATOMIC_RELAXED);
+ return __atomic_load_n (ref, __ATOMIC_SEQ_CST);
}
/* Reference counting with weak references. */
@@ -140,7 +140,7 @@ refcounts_unsafe_ref (refcounts_t *ref, struct references *result)
{
const union _references op = { .references = { .hard = 1 } };
union _references r;
- r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_RELAXED);
+ r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_SEQ_CST);
assert (r.references.hard != UINT32_MAX || !"refcount overflowed!");
if (result)
*result = r.references;
@@ -170,7 +170,7 @@ refcounts_deref (refcounts_t *ref, struct references *result)
{
const union _references op = { .references = { .hard = 1 } };
union _references r;
- r.value = __atomic_sub_fetch (&ref->value, op.value, __ATOMIC_RELAXED);
+ r.value = __atomic_sub_fetch (&ref->value, op.value, __ATOMIC_SEQ_CST);
assert (r.references.hard != UINT32_MAX || !"refcount underflowed!");
if (result)
*result = r.references;
@@ -200,7 +200,7 @@ refcounts_promote (refcounts_t *ref, struct references *result)
const union _references op =
{ .references = { .weak = ~0U, .hard = 1} };
union _references r;
- r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_RELAXED);
+ r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_SEQ_CST);
assert (r.references.hard != UINT32_MAX || !"refcount overflowed!");
assert (r.references.weak != UINT32_MAX || !"refcount underflowed!");
if (result)
@@ -228,7 +228,7 @@ refcounts_demote (refcounts_t *ref, struct references *result)
operation. */
const union _references op = { .references = { .hard = ~0U } };
union _references r;
- r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_RELAXED);
+ r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_SEQ_CST);
assert (r.references.hard != UINT32_MAX || !"refcount underflowed!");
assert (r.references.weak != UINT32_MAX || !"refcount overflowed!");
if (result)
@@ -248,7 +248,7 @@ refcounts_unsafe_ref_weak (refcounts_t *ref, struct references *result)
{
const union _references op = { .references = { .weak = 1 } };
union _references r;
- r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_RELAXED);
+ r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_SEQ_CST);
assert (r.references.weak != UINT32_MAX || !"refcount overflowed!");
if (result)
*result = r.references;
@@ -278,7 +278,7 @@ refcounts_deref_weak (refcounts_t *ref, struct references *result)
{
const union _references op = { .references = { .weak = 1 } };
union _references r;
- r.value = __atomic_sub_fetch (&ref->value, op.value, __ATOMIC_RELAXED);
+ r.value = __atomic_sub_fetch (&ref->value, op.value, __ATOMIC_SEQ_CST);
assert (r.references.weak != UINT32_MAX || !"refcount underflowed!");
if (result)
*result = r.references;
@@ -291,7 +291,7 @@ static inline void
refcounts_references (refcounts_t *ref, struct references *result)
{
union _references r;
- r.value =__atomic_load_n (&ref->value, __ATOMIC_RELAXED);
+ r.value =__atomic_load_n (&ref->value, __ATOMIC_SEQ_CST);
*result = r.references;
}
|