5 #ifndef _RTE_SPINLOCK_H_     6 #define _RTE_SPINLOCK_H_    22 #ifdef RTE_FORCE_INTRINSICS    37 #define RTE_SPINLOCK_INITIALIZER { 0 }    60 #ifdef RTE_FORCE_INTRINSICS    66     while (!__atomic_compare_exchange_n(&sl->
locked, &exp, 1, 0,
    67                 __ATOMIC_ACQUIRE, __ATOMIC_RELAXED)) {
    68         while (__atomic_load_n(&sl->
locked, __ATOMIC_RELAXED))
    84 #ifdef RTE_FORCE_INTRINSICS    88     __atomic_store_n(&sl->
locked, 0, __ATOMIC_RELEASE);
   103 #ifdef RTE_FORCE_INTRINSICS   108     return __atomic_compare_exchange_n(&sl->
locked, &exp, 1,
   110                 __ATOMIC_ACQUIRE, __ATOMIC_RELAXED);
   124     return __atomic_load_n(&sl->
locked, __ATOMIC_ACQUIRE);
   192 #define RTE_SPINLOCK_RECURSIVE_INITIALIZER {RTE_SPINLOCK_INITIALIZER, -1, 0}   217     if (slr->
user != 
id) {
   231     if (--(slr->
count) == 0) {
   250     if (slr->
user != 
id) {
 
static int rte_spinlock_recursive_trylock(rte_spinlock_recursive_t *slr)
 
static void rte_spinlock_recursive_lock_tm(rte_spinlock_recursive_t *slr)
 
static void rte_spinlock_lock(rte_spinlock_t *sl)
 
static void rte_spinlock_recursive_init(rte_spinlock_recursive_t *slr)
 
static void rte_spinlock_recursive_lock(rte_spinlock_recursive_t *slr)
 
static void rte_spinlock_unlock(rte_spinlock_t *sl)
 
static void rte_spinlock_unlock_tm(rte_spinlock_t *sl)
 
static void rte_spinlock_init(rte_spinlock_t *sl)
 
static int rte_tm_supported(void)
 
static int rte_spinlock_trylock_tm(rte_spinlock_t *sl)
 
static void rte_pause(void)
 
static void rte_spinlock_recursive_unlock_tm(rte_spinlock_recursive_t *slr)
 
static int rte_spinlock_recursive_trylock_tm(rte_spinlock_recursive_t *slr)
 
static int rte_spinlock_is_locked(rte_spinlock_t *sl)
 
static int rte_spinlock_trylock(rte_spinlock_t *sl)
 
static void rte_spinlock_lock_tm(rte_spinlock_t *sl)
 
static int rte_gettid(void)
 
static void rte_spinlock_recursive_unlock(rte_spinlock_recursive_t *slr)