summaryrefslogtreecommitdiff
path: root/debian
diff options
context:
space:
mode:
Diffstat (limited to 'debian')
-rw-r--r--debian/patches/series1
-rw-r--r--debian/patches/use__ATOMIC_SEQ_CST.patch94
2 files changed, 95 insertions, 0 deletions
diff --git a/debian/patches/series b/debian/patches/series
index a4d2fe8d..f34f2f22 100644
--- a/debian/patches/series
+++ b/debian/patches/series
@@ -61,3 +61,4 @@ libpager-fixthreads.patch
0001-libshouldbeinlibc-add-assert.h-variant-that-prints-b.patch
0002-include-use-assert-backtrace.h-in-refcount.h.patch
0003-hack_why_doesnt_backtrace_symbols_fd_work.patch
+use__ATOMIC_SEQ_CST.patch
diff --git a/debian/patches/use__ATOMIC_SEQ_CST.patch b/debian/patches/use__ATOMIC_SEQ_CST.patch
new file mode 100644
index 00000000..59f1f9bc
--- /dev/null
+++ b/debian/patches/use__ATOMIC_SEQ_CST.patch
@@ -0,0 +1,94 @@
+diff --git a/include/refcount.h b/include/refcount.h
+index 5bb9660..68fb8fc 100644
+--- a/include/refcount.h
++++ b/include/refcount.h
+@@ -50,7 +50,7 @@ static inline unsigned int
+ refcount_unsafe_ref (refcount_t *ref)
+ {
+ unsigned int r;
+- r = __atomic_add_fetch (ref, 1, __ATOMIC_RELAXED);
++ r = __atomic_add_fetch (ref, 1, __ATOMIC_SEQ_CST);
+ assert (r != UINT_MAX || !"refcount overflowed!");
+ return r;
+ }
+@@ -74,7 +74,7 @@ static inline unsigned int
+ refcount_deref (refcount_t *ref)
+ {
+ unsigned int r;
+- r = __atomic_sub_fetch (ref, 1, __ATOMIC_RELAXED);
++ r = __atomic_sub_fetch (ref, 1, __ATOMIC_SEQ_CST);
+ assert (r != UINT_MAX || !"refcount underflowed!");
+ return r;
+ }
+@@ -84,7 +84,7 @@ refcount_deref (refcount_t *ref)
+ static inline unsigned int
+ refcount_references (refcount_t *ref)
+ {
+- return __atomic_load_n (ref, __ATOMIC_RELAXED);
++ return __atomic_load_n (ref, __ATOMIC_SEQ_CST);
+ }
+
+ /* Reference counting with weak references. */
+@@ -140,7 +140,7 @@ refcounts_unsafe_ref (refcounts_t *ref, struct references *result)
+ {
+ const union _references op = { .references = { .hard = 1 } };
+ union _references r;
+- r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_RELAXED);
++ r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_SEQ_CST);
+ assert (r.references.hard != UINT32_MAX || !"refcount overflowed!");
+ if (result)
+ *result = r.references;
+@@ -170,7 +170,7 @@ refcounts_deref (refcounts_t *ref, struct references *result)
+ {
+ const union _references op = { .references = { .hard = 1 } };
+ union _references r;
+- r.value = __atomic_sub_fetch (&ref->value, op.value, __ATOMIC_RELAXED);
++ r.value = __atomic_sub_fetch (&ref->value, op.value, __ATOMIC_SEQ_CST);
+ assert (r.references.hard != UINT32_MAX || !"refcount underflowed!");
+ if (result)
+ *result = r.references;
+@@ -200,7 +200,7 @@ refcounts_promote (refcounts_t *ref, struct references *result)
+ const union _references op =
+ { .references = { .weak = ~0U, .hard = 1} };
+ union _references r;
+- r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_RELAXED);
++ r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_SEQ_CST);
+ assert (r.references.hard != UINT32_MAX || !"refcount overflowed!");
+ assert (r.references.weak != UINT32_MAX || !"refcount underflowed!");
+ if (result)
+@@ -228,7 +228,7 @@ refcounts_demote (refcounts_t *ref, struct references *result)
+ operation. */
+ const union _references op = { .references = { .hard = ~0U } };
+ union _references r;
+- r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_RELAXED);
++ r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_SEQ_CST);
+ assert (r.references.hard != UINT32_MAX || !"refcount underflowed!");
+ assert (r.references.weak != UINT32_MAX || !"refcount overflowed!");
+ if (result)
+@@ -248,7 +248,7 @@ refcounts_unsafe_ref_weak (refcounts_t *ref, struct references *result)
+ {
+ const union _references op = { .references = { .weak = 1 } };
+ union _references r;
+- r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_RELAXED);
++ r.value = __atomic_add_fetch (&ref->value, op.value, __ATOMIC_SEQ_CST);
+ assert (r.references.weak != UINT32_MAX || !"refcount overflowed!");
+ if (result)
+ *result = r.references;
+@@ -278,7 +278,7 @@ refcounts_deref_weak (refcounts_t *ref, struct references *result)
+ {
+ const union _references op = { .references = { .weak = 1 } };
+ union _references r;
+- r.value = __atomic_sub_fetch (&ref->value, op.value, __ATOMIC_RELAXED);
++ r.value = __atomic_sub_fetch (&ref->value, op.value, __ATOMIC_SEQ_CST);
+ assert (r.references.weak != UINT32_MAX || !"refcount underflowed!");
+ if (result)
+ *result = r.references;
+@@ -291,7 +291,7 @@ static inline void
+ refcounts_references (refcounts_t *ref, struct references *result)
+ {
+ union _references r;
+- r.value =__atomic_load_n (&ref->value, __ATOMIC_RELAXED);
++ r.value =__atomic_load_n (&ref->value, __ATOMIC_SEQ_CST);
+ *result = r.references;
+ }
+