summaryrefslogtreecommitdiff
path: root/include/jemalloc/internal/atomic_gcc_sync.h
diff options
context:
space:
mode:
authorDanny Lin <danny@kdrag0n.dev>2021-10-11 20:49:26 -0700
committeralk3pInjection <5e147612@kscope.ink>2022-05-08 12:40:50 +0800
commite36e52fb6ec54bc47e17e849ca5a3a301eaa6d05 (patch)
tree854d309be91bcdd38fe46ea8ad0f906bef39d017 /include/jemalloc/internal/atomic_gcc_sync.h
parent6d4d27fd2651ed114386b89c2d251b816a849460 (diff)
parentea6b3e973b477b8061e0076bb257dbd7f3faa756 (diff)
Merge tag '5.2.1' into HEAD
Release Change-Id: I269b861cb81499b78f13dc2e88827f13ef5a207d
Diffstat (limited to 'include/jemalloc/internal/atomic_gcc_sync.h')
-rw-r--r--include/jemalloc/internal/atomic_gcc_sync.h18
1 files changed, 11 insertions, 7 deletions
diff --git a/include/jemalloc/internal/atomic_gcc_sync.h b/include/jemalloc/internal/atomic_gcc_sync.h
index 30846e4d..e02b7cbe 100644
--- a/include/jemalloc/internal/atomic_gcc_sync.h
+++ b/include/jemalloc/internal/atomic_gcc_sync.h
@@ -27,8 +27,10 @@ atomic_fence(atomic_memory_order_t mo) {
asm volatile("" ::: "memory");
# if defined(__i386__) || defined(__x86_64__)
/* This is implicit on x86. */
-# elif defined(__ppc__)
+# elif defined(__ppc64__)
asm volatile("lwsync");
+# elif defined(__ppc__)
+ asm volatile("sync");
# elif defined(__sparc__) && defined(__arch64__)
if (mo == atomic_memory_order_acquire) {
asm volatile("membar #LoadLoad | #LoadStore");
@@ -113,8 +115,8 @@ atomic_store_##short_type(atomic_##short_type##_t *a, \
} \
\
ATOMIC_INLINE type \
-atomic_exchange_##short_type(atomic_##short_type##_t *a, type val, \
- atomic_memory_order_t mo) { \
+atomic_exchange_##short_type(atomic_##short_type##_t *a, type val, \
+ atomic_memory_order_t mo) { \
/* \
* Because of FreeBSD, we care about gcc 4.2, which doesn't have\
* an atomic exchange builtin. We fake it with a CAS loop. \
@@ -129,8 +131,9 @@ atomic_exchange_##short_type(atomic_##short_type##_t *a, type val, \
\
ATOMIC_INLINE bool \
atomic_compare_exchange_weak_##short_type(atomic_##short_type##_t *a, \
- type *expected, type desired, atomic_memory_order_t success_mo, \
- atomic_memory_order_t failure_mo) { \
+ type *expected, type desired, \
+ atomic_memory_order_t success_mo, \
+ atomic_memory_order_t failure_mo) { \
type prev = __sync_val_compare_and_swap(&a->repr, *expected, \
desired); \
if (prev == *expected) { \
@@ -142,8 +145,9 @@ atomic_compare_exchange_weak_##short_type(atomic_##short_type##_t *a, \
} \
ATOMIC_INLINE bool \
atomic_compare_exchange_strong_##short_type(atomic_##short_type##_t *a, \
- type *expected, type desired, atomic_memory_order_t success_mo, \
- atomic_memory_order_t failure_mo) { \
+ type *expected, type desired, \
+ atomic_memory_order_t success_mo, \
+ atomic_memory_order_t failure_mo) { \
type prev = __sync_val_compare_and_swap(&a->repr, *expected, \
desired); \
if (prev == *expected) { \