@@ -13,6 +13,8 @@
* ODP RW Locks
*/
+#include <odp_atomic.h>
+
#ifdef __cplusplus
extern "C" {
#endif
@@ -28,7 +30,7 @@ extern "C" {
* read lock count > 0
*/
typedef struct {
- volatile int32_t cnt; /**< -1 Write lock,
+ odp_atomic_u32_t cnt; /**< -1 Write lock,
> 0 for Read lock. */
} odp_rwlock_t;
@@ -31,7 +31,7 @@ extern "C" {
*/
typedef struct odp_ticketlock_t {
odp_atomic_u32_t next_ticket; /**< @private Next ticket */
- volatile uint32_t cur_ticket; /**< @private Current ticket */
+ odp_atomic_u32_t cur_ticket; /**< @private Current ticket */
} odp_ticketlock_t;
new file mode 100644
@@ -0,0 +1,523 @@
+/* Copyright (c) 2013, Linaro Limited
+ * All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+/**
+ * @file
+ *
+ * ODP atomic types and operations, semantically a subset of C11 atomics.
+ * Reuse the 32-bit and 64-bit type definitions from odp_atomic.h.
+ * Atomic functions must be used to operate on atomic variables!
+ */
+
+#ifndef ODP_ATOMIC_INTERNAL_H_
+#define ODP_ATOMIC_INTERNAL_H_
+
+#include <stdint.h>
+#include <stdbool.h>
+#include <odp_align.h>
+#include <odp_hints.h>
+#include <odp_atomic.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/** @addtogroup odp_synchronizers
+ * Atomic operations.
+ * @{
+ */
+
+/**
+ * Pointer atomic type
+ */
+typedef struct {
+ void *v; /**< Actual storage for the atomic variable */
+} odp_atomic_ptr_t
+ODP_ALIGNED(sizeof(void *)); /* Enforce alignement! */
+
+typedef enum {
+/** Relaxed memory ordering, no ordering of other accesses enforced */
+ ODP_MEMMODEL_RLX = __ATOMIC_RELAXED,
+/** Acquire memory ordering, synchronize with release stores from another
+ * thread (later accesses cannot move before acquire operation) */
+ ODP_MEMMODEL_ACQ = __ATOMIC_ACQUIRE,
+/** Release memory ordering, synchronize with acquire loads from another
+ * thread (earlier accesses cannot move after release operation) */
+ ODP_MEMMODEL_RLS = __ATOMIC_RELEASE,
+/** Acquire&release memory ordering, synchronize with acquire loads and release
+ * stores in another (one other) thread */
+ ODP_MEMMODEL_ACQ_RLS = __ATOMIC_ACQ_REL,
+/** Sequential consistent memory ordering, synchronize with acquire loads and
+ * release stores in all other threads */
+ ODP_MEMMODEL_SC = __ATOMIC_SEQ_CST
+} odp_memmodel_t;
+
+/**
+ * Insert a full memory barrier (fence) in the compiler and instruction
+ * sequence
+ */
+#define ODP_COMPILER_HW_BARRIER() __atomic_thread_fence(__ATOMIC_SEQ_CST)
+
+/*****************************************************************************
+ * Operations on 32-bit atomics
+ * odp_atomic_u32_load_mm - return current value
+ * odp_atomic_u32_store_mm - no return value
+ * odp_atomic_u32_xchg_mm - return old value
+ * odp_atomic_u32_cmp_xchg_strong_mm - return bool
+ * odp_atomic_u32_fetch_add_mm - return old value
+ * odp_atomic_u32_add_mm - no return value
+ * odp_atomic_u32_fetch_sub_mm - return old value
+ * odp_atomic_u32_sub_mm - no return value
+ *****************************************************************************/
+
+/**
+ * Atomic load of 32-bit atomic variable
+ *
+ * @param ptr Pointer to a 32-bit atomic variable
+ * @param memmodel Memory model associated with the load
+ *
+ * @return Value of the variable
+ */
+static inline uint32_t odp_atomic_u32_load_mm(const odp_atomic_u32_t *ptr,
+ odp_memmodel_t mmodel)
+{
+ return __atomic_load_n(&ptr->v, mmodel);
+}
+
+/**
+ * Atomic store to 32-bit atomic variable
+ *
+ * @param ptr Pointer to a 32-bit atomic variable
+ * @param val Value to write to the atomic variable
+ * @param memmodel Memory model associated with the store
+ */
+static inline void odp_atomic_u32_store_mm(odp_atomic_u32_t *ptr,
+ uint32_t val,
+ odp_memmodel_t mmodel)
+{
+ __atomic_store_n(&ptr->v, val, mmodel);
+}
+
+/**
+ * Atomic exchange (swap) of 32-bit atomic variable
+ *
+ * @param ptr Pointer to a 32-bit atomic variable
+ * @param val New value to write
+ * @param memmodel Memory model associated with the exchange operation
+ *
+ * @return Old value of variable
+ */
+static inline uint32_t odp_atomic_u32_xchg_mm(odp_atomic_u32_t *ptr,
+ uint32_t val,
+ odp_memmodel_t mmodel)
+
+{
+ return __atomic_exchange_n(&ptr->v, val, mmodel);
+}
+
+/**
+ * Atomic compare and exchange (swap) of 32-bit atomic variable
+ * "Strong" semantics, will not fail spuriously.
+ *
+ * @param ptr Pointer to a 32-bit atomic variable
+ * @param exp_p Pointer to expected value (updated on failure)
+ * @param val New value to write
+ * @param succ Memory model associated with a successful compare-and-swap
+ * operation
+ * @param fail Memory model associated with a failed compare-and-swap
+ * operation
+ *
+ * @return 1 (true) if exchange successful, 0 (false) if not successful (and
+ * '*exp_p' updated with current value)
+ */
+static inline int odp_atomic_u32_cmp_xchg_strong_mm(odp_atomic_u32_t *ptr,
+ uint32_t *exp_p,
+ uint32_t val,
+ odp_memmodel_t succ,
+ odp_memmodel_t fail)
+{
+ return __atomic_compare_exchange_n(&ptr->v, exp_p, val,
+ false/*strong*/, succ, fail);
+}
+
+/**
+ * Atomic fetch and add of 32-bit atomic variable
+ *
+ * param ptr Pointer to a 32-bit atomic variable
+ * @param memmodel Memory model associated with the add operation.
+ *
+ * @return Value of the atomic variable before the addition
+ */
+static inline uint32_t odp_atomic_u32_fetch_add_mm(odp_atomic_u32_t *ptr,
+ uint32_t val,
+ odp_memmodel_t mmodel)
+{
+ return __atomic_fetch_add(&ptr->v, val, mmodel);
+}
+
+/**
+ * Atomic add of 32-bit atomic variable
+ *
+ * param ptr Pointer to a 32-bit atomic variable
+ * @param memmodel Memory model associated with the add operation.
+ */
+static inline void odp_atomic_u32_add_mm(odp_atomic_u32_t *ptr,
+ uint32_t val,
+ odp_memmodel_t mmodel)
+
+{
+ (void)__atomic_fetch_add(&ptr->v, val, mmodel);
+}
+
+/**
+ * Atomic fetch and subtract of 32-bit atomic variable
+ *
+ * param ptr Pointer to a 32-bit atomic variable
+ * @param memmodel Memory model associated with the subtract
+ * operation.
+ *
+ * @return Value of the atomic variable before the subtraction
+ */
+static inline uint32_t odp_atomic_u32_fetch_sub_mm(odp_atomic_u32_t *ptr,
+ uint32_t val,
+ odp_memmodel_t mmodel)
+{
+ return __atomic_fetch_sub(&ptr->v, val, mmodel);
+}
+
+/**
+ * Atomic subtract of 32-bit atomic variable
+ *
+ * param ptr Pointer to a 32-bit atomic variable
+ * @param memmodel Memory model associated with the subtract operation.
+ */
+static inline void odp_atomic_u32_sub_mm(odp_atomic_u32_t *ptr,
+ uint32_t val,
+ odp_memmodel_t mmodel)
+
+{
+ (void)__atomic_fetch_sub(&ptr->v, val, mmodel);
+}
+
+/*****************************************************************************
+ * Operations on 64-bit atomics
+ * odp_atomic_u64_load_mm - return current value
+ * odp_atomic_u64_store_mm - no return value
+ * odp_atomic_u64_xchg_mm - return old value
+ * odp_atomic_u64_cmp_xchg_strong_mm - return bool
+ * odp_atomic_u64_fetch_add_mm - return old value
+ * odp_atomic_u64_add_mm - no return value
+ * odp_atomic_u64_fetch_sub_mm - return old value
+ * odp_atomic_u64_sub_mm - no return value
+ *****************************************************************************/
+
+/* Check if the compiler support lock-less atomic operations on 64-bit types */
+#if defined __GCC_ATOMIC_LLONG_LOCK_FREE && __GCC_ATOMIC_LLONG_LOCK_FREE < 2
+/**
+ * Helper macro for lock-based atomic operations on 64-bit integers
+ * @param ptr Pointer to the 64-bit atomic variable
+ * @param expr Expression used update the variable.
+ * @param mm Memory model to use.
+ * @return The old value of the variable.
+ */
+#define ATOMIC_OP_MM(ptr, expr, mm) \
+({ \
+ uint64_t old_val; \
+ /* Loop while lock is already taken, stop when lock becomes clear */ \
+ while (__atomic_test_and_set(&(ptr)->lock, \
+ (mm) == ODP_MEMMODEL_SC ? \
+ __ATOMIC_SEQ_CST : __ATOMIC_ACQUIRE)) \
+ (void)0; \
+ old_val = (ptr)->v; \
+ expr; /* Perform whatever update is desired */ \
+ __atomic_clear(&(ptr)->lock, \
+ (mm) == ODP_MEMMODEL_SC ? \
+ __ATOMIC_SEQ_CST : __ATOMIC_RELEASE); \
+ old_val; /* Return old value */ \
+})
+#endif
+
+/**
+ * Atomic load of 64-bit atomic variable
+ *
+ * @param ptr Pointer to a 64-bit atomic variable
+ * @param memmodel Memory model associated with the load
+ *
+ * @return Value of the variable
+ */
+static inline uint64_t odp_atomic_u64_load_mm(odp_atomic_u64_t *ptr,
+ odp_memmodel_t mmodel)
+{
+#if defined __GCC_ATOMIC_LLONG_LOCK_FREE && __GCC_ATOMIC_LLONG_LOCK_FREE < 2
+ return ATOMIC_OP_MM(ptr, (void)0, mmodel);
+#else
+ return __atomic_load_n(&ptr->v, mmodel);
+#endif
+}
+
+/**
+ * Atomic store to 64-bit atomic variable
+ *
+ * @param ptr Pointer to a 64-bit atomic variable
+ * @param val Value to write to the atomic variable
+ * @param memmodel Memory model associated with the store
+ */
+static inline void odp_atomic_u64_store_mm(odp_atomic_u64_t *ptr,
+ uint64_t val,
+ odp_memmodel_t mmodel)
+{
+#if defined __GCC_ATOMIC_LLONG_LOCK_FREE && __GCC_ATOMIC_LLONG_LOCK_FREE < 2
+ (void)ATOMIC_OP_MM(ptr, ptr->v = val, mmodel);
+#else
+ __atomic_store_n(&ptr->v, val, mmodel);
+#endif
+}
+
+/**
+ * Atomic exchange (swap) of 64-bit atomic variable
+ *
+ * @param ptr Pointer to a 64-bit atomic variable
+ * @param val New value to write
+ * @param memmodel Memory model associated with the exchange operation
+ *
+ * @return Old value of variable
+ */
+static inline uint64_t odp_atomic_u64_xchg_mm(odp_atomic_u64_t *ptr,
+ uint64_t val,
+ odp_memmodel_t mmodel)
+
+{
+#if defined __GCC_ATOMIC_LLONG_LOCK_FREE && __GCC_ATOMIC_LLONG_LOCK_FREE < 2
+ return ATOMIC_OP_MM(ptr, ptr->v = val, mmodel);
+#else
+ return __atomic_exchange_n(&ptr->v, val, mmodel);
+#endif
+}
+
+/**
+ * Atomic compare and exchange (swap) of 64-bit atomic variable
+ * "Strong" semantics, will not fail spuriously.
+ *
+ * @param ptr Pointer to a 64-bit atomic variable
+ * @param exp_p Pointer to expected value (updated on failure)
+ * @param val New value to write
+ * @param succ Memory model associated with a successful compare-and-swap
+ * operation
+ * @param fail Memory model associated with a failed compare-and-swap
+ * operation
+ *
+ * @return 1 (true) if exchange successful, 0 (false) if not successful (and
+ * '*exp_p' updated with current value)
+ */
+static inline int odp_atomic_u64_cmp_xchg_strong_mm(odp_atomic_u64_t *ptr,
+ uint64_t *exp_p,
+ uint64_t val,
+ odp_memmodel_t succ,
+ odp_memmodel_t fail)
+{
+#if defined __GCC_ATOMIC_LLONG_LOCK_FREE && __GCC_ATOMIC_LLONG_LOCK_FREE < 2
+ /* Possibly we are a bit pessimistic with the memory models */
+ int success;
+ /* Loop while lock is already taken, stop when lock becomes clear */
+ while (__atomic_test_and_set(&(ptr)->lock,
+ (succ) == ODP_MEMMODEL_SC ?
+ __ATOMIC_SEQ_CST : __ATOMIC_ACQUIRE))
+ (void)0;
+ if (ptr->v == *exp_p) {
+ ptr->v = val;
+ success = 1;
+ } else {
+ *exp_p = ptr->v;
+ success = 0;
+ succ = fail;
+ }
+ __atomic_clear(&(ptr)->lock,
+ (succ) == ODP_MEMMODEL_SC ?
+ __ATOMIC_SEQ_CST : __ATOMIC_RELEASE);
+ return success;
+#else
+ return __atomic_compare_exchange_n(&ptr->v, exp_p, val,
+ false, succ, fail);
+#endif
+}
+
+/**
+ * Atomic fetch and add of 64-bit atomic variable
+ *
+ * param ptr Pointer to a 64-bit atomic variable
+ * @param memmodel Memory model associated with the add operation.
+ *
+ * @return Value of the atomic variable before the addition
+ */
+static inline uint64_t odp_atomic_u64_fetch_add_mm(odp_atomic_u64_t *ptr,
+ uint64_t val,
+ odp_memmodel_t mmodel)
+{
+#if defined __GCC_ATOMIC_LLONG_LOCK_FREE && __GCC_ATOMIC_LLONG_LOCK_FREE < 2
+ return ATOMIC_OP_MM(ptr, ptr->v += val, mmodel);
+#else
+ return __atomic_fetch_add(&ptr->v, val, mmodel);
+#endif
+}
+
+/**
+ * Atomic add of 64-bit atomic variable
+ *
+ * param ptr Pointer to a 64-bit atomic variable
+ * @param memmodel Memory model associated with the add operation.
+ */
+static inline void odp_atomic_u64_add_mm(odp_atomic_u64_t *ptr,
+ uint64_t val,
+ odp_memmodel_t mmodel)
+
+{
+#if defined __GCC_ATOMIC_LLONG_LOCK_FREE && __GCC_ATOMIC_LLONG_LOCK_FREE < 2
+ (void)ATOMIC_OP_MM(ptr, ptr->v += val, mmodel);
+#else
+ (void)__atomic_fetch_add(&ptr->v, val, mmodel);
+#endif
+}
+
+/**
+ * Atomic fetch and subtract of 64-bit atomic variable
+ *
+ * param ptr Pointer to a 64-bit atomic variable
+ * @param memmodel Memory model associated with the subtract
+ * operation.
+ *
+ * @return Value of the atomic variable before the subtraction
+ */
+static inline uint64_t odp_atomic_u64_fetch_sub_mm(odp_atomic_u64_t *ptr,
+ uint64_t val,
+ odp_memmodel_t mmodel)
+{
+#if defined __GCC_ATOMIC_LLONG_LOCK_FREE && __GCC_ATOMIC_LLONG_LOCK_FREE < 2
+ return ATOMIC_OP_MM(ptr, ptr->v -= val, mmodel);
+#else
+ return __atomic_fetch_sub(&ptr->v, val, mmodel);
+#endif
+}
+
+/**
+ * Atomic subtract of 64-bit atomic variable
+ *
+ * param ptr Pointer to a 64-bit atomic variable
+ * @param memmodel Memory model associated with the subtract operation.
+ */
+static inline void odp_atomic_u64_sub_mm(odp_atomic_u64_t *ptr,
+ uint64_t val,
+ odp_memmodel_t mmodel)
+
+{
+#if defined __GCC_ATOMIC_LLONG_LOCK_FREE && __GCC_ATOMIC_LLONG_LOCK_FREE < 2
+ (void)ATOMIC_OP_MM(ptr, ptr->v -= val, mmodel);
+#else
+ (void)__atomic_fetch_sub(&ptr->v, val, mmodel);
+#endif
+}
+
+#if defined __GCC_ATOMIC_LLONG_LOCK_FREE && __GCC_ATOMIC_LLONG_LOCK_FREE < 2
+#undef ATOMIC_OP_MM
+#endif
+
+/*****************************************************************************
+ * Operations on pointer atomics
+ * odp_atomic_ptr_init - no return value
+ * odp_atomic_ptr_load - return current value
+ * odp_atomic_ptr_store - no return value
+ * odp_atomic_ptr_xchg - return old value
+ *****************************************************************************/
+
+/**
+ * Initialization of pointer atomic variable
+ *
+ * @param ptr Pointer to a pointer atomic variable
+ * @param val Value to initialize the variable with
+ */
+static inline void odp_atomic_ptr_init(odp_atomic_ptr_t *ptr, void *val)
+{
+ __atomic_store_n(&ptr->v, val, __ATOMIC_RELAXED);
+}
+
+/**
+ * Atomic load of pointer atomic variable
+ *
+ * @param ptr Pointer to a pointer atomic variable
+ * @param memmodel Memory model associated with the load
+ *
+ * @return Value of the variable
+ */
+static inline void *odp_atomic_ptr_load(const odp_atomic_ptr_t *ptr,
+ odp_memmodel_t mmodel)
+{
+ return __atomic_load_n(&ptr->v, mmodel);
+}
+
+/**
+ * Atomic store to pointer atomic variable
+ *
+ * @param ptr Pointer to a pointer atomic variable
+ * @param val Value to write to the atomic variable
+ * @param memmodel Memory model associated with the store
+ */
+static inline void odp_atomic_ptr_store(odp_atomic_ptr_t *ptr,
+ void *val,
+ odp_memmodel_t mmodel)
+{
+ __atomic_store_n(&ptr->v, val, mmodel);
+}
+
+/**
+ * Atomic exchange (swap) of pointer atomic variable
+ *
+ * @param ptr Pointer to a pointer atomic variable
+ * @param val New value to write
+ * @param memmodel Memory model associated with the exchange operation
+ *
+ * @return Old value of variable
+ */
+static inline void *odp_atomic_ptr_xchg(odp_atomic_ptr_t *ptr,
+ void *val,
+ odp_memmodel_t mmodel)
+
+{
+ return __atomic_exchange_n(&ptr->v, val, mmodel);
+}
+
+/**
+ * Atomic compare and exchange (swap) of pointer atomic variable
+ * "Strong" semantics, will not fail spuriously.
+ *
+ * @param ptr Pointer to a pointer atomic variable
+ * @param exp_p Pointer to expected value (updated on failure)
+ * @param val New value to write
+ * @param succ Memory model associated with a successful compare-and-swap
+ * operation
+ * @param fail Memory model associated with a failed compare-and-swap
+ * operation
+ *
+ * @return 1 (true) if exchange successful, 0 (false) if not successful (and
+ * '*exp_p' updated with current value)
+ */
+static inline int odp_atomic_ptr_cmp_xchg_strong(odp_atomic_ptr_t *ptr,
+ void **exp_p,
+ void *val,
+ odp_memmodel_t succ,
+ odp_memmodel_t fail)
+{
+ return __atomic_compare_exchange_n(&ptr->v, exp_p, val,
+ false/*strong*/, succ, fail);
+}
+
+/**
+ * @}
+ */
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
@@ -7,6 +7,7 @@
#include <odp_barrier.h>
#include <odp_sync.h>
#include <odp_spin_internal.h>
+#include <odp_atomic_internal.h>
void odp_barrier_init_count(odp_barrier_t *barrier, uint32_t count)
{
@@ -32,7 +33,7 @@ void odp_barrier_sync(odp_barrier_t *barrier)
uint32_t count;
int wasless;
- __atomic_thread_fence(__ATOMIC_SEQ_CST);
+ ODP_COMPILER_HW_BARRIER();
count = odp_atomic_fetch_inc_u32(&barrier->bar);
wasless = count < barrier->count;
@@ -45,5 +46,5 @@ void odp_barrier_sync(odp_barrier_t *barrier)
odp_spin();
}
- __atomic_thread_fence(__ATOMIC_SEQ_CST);
+ ODP_COMPILER_HW_BARRIER();
}
@@ -6,64 +6,63 @@
#include <stdbool.h>
#include <odp_atomic.h>
+#include <odp_atomic_internal.h>
#include <odp_rwlock.h>
#include <odp_spin_internal.h>
void odp_rwlock_init(odp_rwlock_t *rwlock)
{
- rwlock->cnt = 0;
+ odp_atomic_init_u32(&rwlock->cnt, 0);
}
void odp_rwlock_read_lock(odp_rwlock_t *rwlock)
{
- int32_t cnt;
+ uint32_t cnt;
int is_locked = 0;
while (is_locked == 0) {
- cnt = rwlock->cnt;
+ cnt = odp_atomic_u32_load_mm(&rwlock->cnt, ODP_MEMMODEL_RLX);
/* waiting for read lock */
- if (cnt < 0) {
+ if ((int32_t)cnt < 0) {
odp_spin();
continue;
}
- is_locked = __atomic_compare_exchange_n(&rwlock->cnt,
+ is_locked = odp_atomic_u32_cmp_xchg_strong_mm(&rwlock->cnt,
&cnt,
cnt + 1,
- false/*strong*/,
- __ATOMIC_ACQUIRE,
- __ATOMIC_RELAXED);
+ ODP_MEMMODEL_ACQ,
+ ODP_MEMMODEL_RLX);
}
}
void odp_rwlock_read_unlock(odp_rwlock_t *rwlock)
{
- (void)__atomic_sub_fetch(&rwlock->cnt, 1, __ATOMIC_RELEASE);
+ odp_atomic_u32_sub_mm(&rwlock->cnt, 1, ODP_MEMMODEL_RLS);
}
void odp_rwlock_write_lock(odp_rwlock_t *rwlock)
{
- int32_t cnt;
+ uint32_t cnt;
int is_locked = 0;
while (is_locked == 0) {
- int32_t zero = 0;
- cnt = rwlock->cnt;
+ uint32_t zero = 0;
+ cnt = odp_atomic_u32_load_mm(&rwlock->cnt, ODP_MEMMODEL_RLX);
/* lock aquired, wait */
if (cnt != 0) {
odp_spin();
continue;
}
- is_locked = __atomic_compare_exchange_n(&rwlock->cnt,
+ is_locked = odp_atomic_u32_cmp_xchg_strong_mm(&rwlock->cnt,
&zero,
- -1,
- false/*strong*/,
- __ATOMIC_ACQUIRE,
- __ATOMIC_RELAXED);
+ (uint32_t)-1,
+ ODP_MEMMODEL_ACQ,
+ ODP_MEMMODEL_RLX);
}
}
void odp_rwlock_write_unlock(odp_rwlock_t *rwlock)
{
- (void)__atomic_add_fetch(&rwlock->cnt, 1, __ATOMIC_RELEASE);
+ odp_atomic_u32_store_mm(&rwlock->cnt, 0, ODP_MEMMODEL_RLS);
}
@@ -5,7 +5,7 @@
*/
#include <odp_ticketlock.h>
-#include <odp_atomic.h>
+#include <odp_atomic_internal.h>
#include <odp_sync.h>
#include <odp_spin_internal.h>
@@ -13,7 +13,7 @@
void odp_ticketlock_init(odp_ticketlock_t *ticketlock)
{
odp_atomic_init_u32(&ticketlock->next_ticket, 0);
- ticketlock->cur_ticket = 0;
+ odp_atomic_init_u32(&ticketlock->cur_ticket, 0);
}
@@ -23,29 +23,24 @@ void odp_ticketlock_lock(odp_ticketlock_t *ticketlock)
ticket = odp_atomic_fetch_inc_u32(&ticketlock->next_ticket);
- while (ticket != ticketlock->cur_ticket)
+ while (ticket != odp_atomic_u32_load_mm(&ticketlock->cur_ticket,
+ ODP_MEMMODEL_ACQ))
odp_spin();
-
- __atomic_thread_fence(__ATOMIC_ACQUIRE);
}
void odp_ticketlock_unlock(odp_ticketlock_t *ticketlock)
{
- odp_sync_stores();
-
- ticketlock->cur_ticket++;
-
+ odp_atomic_u32_add_mm(&ticketlock->cur_ticket, 1, ODP_MEMMODEL_RLS);
#if defined __OCTEON__
- odp_sync_stores(); /* Possibly SYNCW instead of SYNC */
-#else
- __atomic_thread_fence(__ATOMIC_RELEASE);
+ /* Flush OCTEON write buffer (syncw) */
+ odp_sync_stores();
#endif
}
int odp_ticketlock_is_locked(odp_ticketlock_t *ticketlock)
{
- return ticketlock->cur_ticket !=
+ return odp_atomic_load_u32(&ticketlock->cur_ticket) !=
odp_atomic_load_u32(&ticketlock->next_ticket);
}
Signed-off-by: Ola Liljedahl <ola.liljedahl@linaro.org> --- This patches introduces odp_atomic_internal.h and changes the usage from GCC __atomic builtins to the odp_atomic_internal.h API where possible. Only odp_ring.c (which is not a proper ODP module) still uses GCC __atomic builtins. odp_atomic_internal.h: reuse the types from odp_atomic.h but also adds odp_atomic_ptr_t. Implements the following operations on 32- and 64-bit atomics: load, store, xchg, cmp_xchg_strong, fetch_add, add, fetch_sub, sub. And init, load, store and xchg for atomic pointers. All functions take a C11-style memory model parameter. Emulated 64-bit support for functionally challenged 32-bit architectures (e.g. PPC32). Defines ODP_COMPILER_HW_BARRIER macro. This is a full sequentially consistent memory barrier. odp_rwlock.h: updated to use odp_atomic.h types. odp_ticketlock.h: updated to use odp_atomic.h types. odp_barrier.c: update to use odp_atomic_internal.h operations. odp_rwlock.c: updated to use odp_atomic_internal.h operations. odp_ticketlock.c: updated to use odp_atomic_internal.h operations. platform/linux-generic/include/api/odp_rwlock.h | 4 +- .../linux-generic/include/api/odp_ticketlock.h | 2 +- .../linux-generic/include/odp_atomic_internal.h | 523 +++++++++++++++++++++ platform/linux-generic/odp_barrier.c | 5 +- platform/linux-generic/odp_rwlock.c | 35 +- platform/linux-generic/odp_ticketlock.c | 21 +- 6 files changed, 555 insertions(+), 35 deletions(-) create mode 100644 platform/linux-generic/include/odp_atomic_internal.h