5 #ifndef _RTE_STACK_LF_GENERIC_H_ 6 #define _RTE_STACK_LF_GENERIC_H_ 12 __rte_stack_lf_count(
struct rte_stack *s)
33 __rte_stack_lf_push_elems(
struct rte_stack_lf_list *list,
34 struct rte_stack_lf_elem *first,
35 struct rte_stack_lf_elem *last,
38 #ifndef RTE_ARCH_X86_64 44 struct rte_stack_lf_head old_head;
47 old_head = list->head;
50 struct rte_stack_lf_head new_head;
63 new_head.cnt = old_head.cnt + 1;
65 last->next = old_head.top;
69 (rte_int128_t *)&list->head,
70 (rte_int128_t *)&old_head,
71 (rte_int128_t *)&new_head,
74 }
while (success == 0);
81 __rte_stack_lf_pop_elems(
struct rte_stack_lf_list *list,
84 struct rte_stack_lf_elem **last)
86 #ifndef RTE_ARCH_X86_64 94 struct rte_stack_lf_head old_head;
110 old_head = list->head;
114 struct rte_stack_lf_head new_head;
115 struct rte_stack_lf_elem *tmp;
133 for (i = 0; i < num && tmp != NULL; i++) {
136 obj_table[i] = tmp->data;
149 new_head.cnt = old_head.cnt + 1;
153 (rte_int128_t *)&list->head,
154 (rte_int128_t *)&old_head,
155 (rte_int128_t *)&new_head,
158 }
while (success == 0);
#define __rte_always_inline
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int __rte_experimental rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_smp_mb(void)
static void rte_prefetch0(const volatile void *p)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)