5#ifndef _RTE_SPINLOCK_H_
6#define _RTE_SPINLOCK_H_
22#ifdef RTE_FORCE_INTRINSICS
37#define RTE_SPINLOCK_INITIALIZER { 0 }
60#ifdef RTE_FORCE_INTRINSICS
66 while (!__atomic_compare_exchange_n(&sl->
locked, &exp, 1, 0,
67 __ATOMIC_ACQUIRE, __ATOMIC_RELAXED)) {
84#ifdef RTE_FORCE_INTRINSICS
88 __atomic_store_n(&sl->
locked, 0, __ATOMIC_RELEASE);
104#ifdef RTE_FORCE_INTRINSICS
109 return __atomic_compare_exchange_n(&sl->
locked, &exp, 1,
111 __ATOMIC_ACQUIRE, __ATOMIC_RELAXED);
125 return __atomic_load_n(&sl->
locked, __ATOMIC_ACQUIRE);
194#define RTE_SPINLOCK_RECURSIVE_INITIALIZER {RTE_SPINLOCK_INITIALIZER, -1, 0}
219 if (slr->
user !=
id) {
233 if (--(slr->
count) == 0) {
253 if (slr->
user !=
id) {
#define __rte_warn_unused_result
static int rte_gettid(void)
static __rte_always_inline void rte_wait_until_equal_32(volatile uint32_t *addr, uint32_t expected, int memorder)
static int rte_tm_supported(void)
static __rte_warn_unused_result int rte_spinlock_recursive_trylock_tm(rte_spinlock_recursive_t *slr)
static void rte_spinlock_recursive_lock(rte_spinlock_recursive_t *slr)
static void rte_spinlock_recursive_lock_tm(rte_spinlock_recursive_t *slr)
static void rte_spinlock_unlock(rte_spinlock_t *sl)
static void rte_spinlock_recursive_unlock_tm(rte_spinlock_recursive_t *slr)
static void rte_spinlock_recursive_unlock(rte_spinlock_recursive_t *slr)
static __rte_warn_unused_result int rte_spinlock_trylock_tm(rte_spinlock_t *sl)
static void rte_spinlock_lock(rte_spinlock_t *sl)
static void rte_spinlock_lock_tm(rte_spinlock_t *sl)
static void rte_spinlock_recursive_init(rte_spinlock_recursive_t *slr)
static __rte_warn_unused_result int rte_spinlock_trylock(rte_spinlock_t *sl)
static void rte_spinlock_unlock_tm(rte_spinlock_t *sl)
static __rte_warn_unused_result int rte_spinlock_recursive_trylock(rte_spinlock_recursive_t *slr)
static int rte_spinlock_is_locked(rte_spinlock_t *sl)
static void rte_spinlock_init(rte_spinlock_t *sl)