52#define RTE_RWLOCK_WAIT 0x1
53#define RTE_RWLOCK_WRITE 0x2
54#define RTE_RWLOCK_MASK (RTE_RWLOCK_WAIT | RTE_RWLOCK_WRITE)
56#define RTE_RWLOCK_READ 0x4
65#define RTE_RWLOCK_INITIALIZER { 0 }
96 while (__atomic_load_n(&rwl->cnt, __ATOMIC_RELAXED)
101 x = __atomic_add_fetch(&rwl->cnt, RTE_RWLOCK_READ,
105 if (
likely(!(x & RTE_RWLOCK_MASK)))
109 __atomic_fetch_sub(&rwl->cnt, RTE_RWLOCK_READ,
129 x = __atomic_load_n(&rwl->cnt, __ATOMIC_RELAXED);
132 if (x & RTE_RWLOCK_MASK)
136 x = __atomic_add_fetch(&rwl->cnt, RTE_RWLOCK_READ,
140 if (
unlikely(x & RTE_RWLOCK_MASK)) {
141 __atomic_fetch_sub(&rwl->cnt, RTE_RWLOCK_READ,
158 __atomic_fetch_sub(&rwl->cnt, RTE_RWLOCK_READ, __ATOMIC_RELEASE);
176 x = __atomic_load_n(&rwl->cnt, __ATOMIC_RELAXED);
177 if (x < RTE_RWLOCK_WRITE &&
178 __atomic_compare_exchange_n(&rwl->cnt, &x, x + RTE_RWLOCK_WRITE,
179 1, __ATOMIC_ACQUIRE, __ATOMIC_RELAXED))
197 x = __atomic_load_n(&rwl->cnt, __ATOMIC_RELAXED);
200 if (
likely(x < RTE_RWLOCK_WRITE)) {
202 if (__atomic_compare_exchange_n(&rwl->cnt, &x, RTE_RWLOCK_WRITE, 1,
203 __ATOMIC_ACQUIRE, __ATOMIC_RELAXED))
227 __atomic_fetch_sub(&rwl->cnt, RTE_RWLOCK_WRITE, __ATOMIC_RELEASE);
static void rte_pause(void)
static void rte_rwlock_read_unlock_tm(rte_rwlock_t *rwl)
static void rte_rwlock_write_unlock(rte_rwlock_t *rwl)
static void rte_rwlock_write_unlock_tm(rte_rwlock_t *rwl)
static int rte_rwlock_write_trylock(rte_rwlock_t *rwl)
static void rte_rwlock_write_lock(rte_rwlock_t *rwl)
static void rte_rwlock_read_lock_tm(rte_rwlock_t *rwl)
static void rte_rwlock_read_lock(rte_rwlock_t *rwl)
static void rte_rwlock_init(rte_rwlock_t *rwl)
static void rte_rwlock_write_lock_tm(rte_rwlock_t *rwl)
static int rte_rwlock_read_trylock(rte_rwlock_t *rwl)
static void rte_rwlock_read_unlock(rte_rwlock_t *rwl)