DPDK 21.11.0
rte_atomic.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
3 */
4
5#ifndef _RTE_ATOMIC_H_
6#define _RTE_ATOMIC_H_
7
15#include <stdint.h>
16#include <rte_common.h>
17
18#ifdef __DOXYGEN__
19
23
29static inline void rte_mb(void);
30
37static inline void rte_wmb(void);
38
45static inline void rte_rmb(void);
47
51
58static inline void rte_smp_mb(void);
59
67static inline void rte_smp_wmb(void);
68
76static inline void rte_smp_rmb(void);
78
82
89static inline void rte_io_mb(void);
90
98static inline void rte_io_wmb(void);
99
107static inline void rte_io_rmb(void);
109
110#endif /* __DOXYGEN__ */
111
118#define rte_compiler_barrier() do { \
119 asm volatile ("" : : : "memory"); \
120} while(0)
121
125static inline void rte_atomic_thread_fence(int memorder);
126
127/*------------------------- 16 bit atomic operations -------------------------*/
128
145static inline int
146rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
147
148#ifdef RTE_FORCE_INTRINSICS
149static inline int
150rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
151{
152 return __sync_bool_compare_and_swap(dst, exp, src);
153}
154#endif
155
171static inline uint16_t
172rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
173
174#ifdef RTE_FORCE_INTRINSICS
175static inline uint16_t
176rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
177{
178#if defined(__clang__)
179 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
180#else
181 return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
182#endif
183}
184#endif
185
189typedef struct {
190 volatile int16_t cnt;
192
196#define RTE_ATOMIC16_INIT(val) { (val) }
197
204static inline void
206{
207 v->cnt = 0;
208}
209
218static inline int16_t
220{
221 return v->cnt;
222}
223
232static inline void
233rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
234{
235 v->cnt = new_value;
236}
237
246static inline void
248{
249 __sync_fetch_and_add(&v->cnt, inc);
250}
251
260static inline void
262{
263 __sync_fetch_and_sub(&v->cnt, dec);
264}
265
272static inline void
274
275#ifdef RTE_FORCE_INTRINSICS
276static inline void
278{
279 rte_atomic16_add(v, 1);
280}
281#endif
282
289static inline void
291
292#ifdef RTE_FORCE_INTRINSICS
293static inline void
295{
296 rte_atomic16_sub(v, 1);
297}
298#endif
299
313static inline int16_t
315{
316 return __sync_add_and_fetch(&v->cnt, inc);
317}
318
333static inline int16_t
335{
336 return __sync_sub_and_fetch(&v->cnt, dec);
337}
338
351
352#ifdef RTE_FORCE_INTRINSICS
353static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
354{
355 return __sync_add_and_fetch(&v->cnt, 1) == 0;
356}
357#endif
358
371
372#ifdef RTE_FORCE_INTRINSICS
373static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
374{
375 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
376}
377#endif
378
391
392#ifdef RTE_FORCE_INTRINSICS
393static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
394{
395 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
396}
397#endif
398
405static inline void rte_atomic16_clear(rte_atomic16_t *v)
406{
407 v->cnt = 0;
408}
409
410/*------------------------- 32 bit atomic operations -------------------------*/
411
428static inline int
429rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
430
431#ifdef RTE_FORCE_INTRINSICS
432static inline int
433rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
434{
435 return __sync_bool_compare_and_swap(dst, exp, src);
436}
437#endif
438
454static inline uint32_t
455rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
456
457#ifdef RTE_FORCE_INTRINSICS
458static inline uint32_t
459rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
460{
461#if defined(__clang__)
462 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
463#else
464 return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
465#endif
466}
467#endif
468
472typedef struct {
473 volatile int32_t cnt;
475
479#define RTE_ATOMIC32_INIT(val) { (val) }
480
487static inline void
489{
490 v->cnt = 0;
491}
492
501static inline int32_t
503{
504 return v->cnt;
505}
506
515static inline void
516rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
517{
518 v->cnt = new_value;
519}
520
529static inline void
531{
532 __sync_fetch_and_add(&v->cnt, inc);
533}
534
543static inline void
545{
546 __sync_fetch_and_sub(&v->cnt, dec);
547}
548
555static inline void
557
558#ifdef RTE_FORCE_INTRINSICS
559static inline void
561{
562 rte_atomic32_add(v, 1);
563}
564#endif
565
572static inline void
574
575#ifdef RTE_FORCE_INTRINSICS
576static inline void
578{
579 rte_atomic32_sub(v,1);
580}
581#endif
582
596static inline int32_t
598{
599 return __sync_add_and_fetch(&v->cnt, inc);
600}
601
616static inline int32_t
618{
619 return __sync_sub_and_fetch(&v->cnt, dec);
620}
621
634
635#ifdef RTE_FORCE_INTRINSICS
636static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
637{
638 return __sync_add_and_fetch(&v->cnt, 1) == 0;
639}
640#endif
641
654
655#ifdef RTE_FORCE_INTRINSICS
656static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
657{
658 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
659}
660#endif
661
674
675#ifdef RTE_FORCE_INTRINSICS
676static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
677{
678 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
679}
680#endif
681
688static inline void rte_atomic32_clear(rte_atomic32_t *v)
689{
690 v->cnt = 0;
691}
692
693/*------------------------- 64 bit atomic operations -------------------------*/
694
710static inline int
711rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
712
713#ifdef RTE_FORCE_INTRINSICS
714static inline int
715rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
716{
717 return __sync_bool_compare_and_swap(dst, exp, src);
718}
719#endif
720
736static inline uint64_t
737rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
738
739#ifdef RTE_FORCE_INTRINSICS
740static inline uint64_t
741rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
742{
743#if defined(__clang__)
744 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
745#else
746 return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
747#endif
748}
749#endif
750
754typedef struct {
755 volatile int64_t cnt;
757
761#define RTE_ATOMIC64_INIT(val) { (val) }
762
769static inline void
771
772#ifdef RTE_FORCE_INTRINSICS
773static inline void
775{
776#ifdef __LP64__
777 v->cnt = 0;
778#else
779 int success = 0;
780 uint64_t tmp;
781
782 while (success == 0) {
783 tmp = v->cnt;
784 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
785 tmp, 0);
786 }
787#endif
788}
789#endif
790
799static inline int64_t
801
802#ifdef RTE_FORCE_INTRINSICS
803static inline int64_t
805{
806#ifdef __LP64__
807 return v->cnt;
808#else
809 int success = 0;
810 uint64_t tmp;
811
812 while (success == 0) {
813 tmp = v->cnt;
814 /* replace the value by itself */
815 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
816 tmp, tmp);
817 }
818 return tmp;
819#endif
820}
821#endif
822
831static inline void
832rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
833
834#ifdef RTE_FORCE_INTRINSICS
835static inline void
836rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
837{
838#ifdef __LP64__
839 v->cnt = new_value;
840#else
841 int success = 0;
842 uint64_t tmp;
843
844 while (success == 0) {
845 tmp = v->cnt;
846 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
847 tmp, new_value);
848 }
849#endif
850}
851#endif
852
861static inline void
863
864#ifdef RTE_FORCE_INTRINSICS
865static inline void
866rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
867{
868 __sync_fetch_and_add(&v->cnt, inc);
869}
870#endif
871
880static inline void
882
883#ifdef RTE_FORCE_INTRINSICS
884static inline void
885rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
886{
887 __sync_fetch_and_sub(&v->cnt, dec);
888}
889#endif
890
897static inline void
899
900#ifdef RTE_FORCE_INTRINSICS
901static inline void
903{
904 rte_atomic64_add(v, 1);
905}
906#endif
907
914static inline void
916
917#ifdef RTE_FORCE_INTRINSICS
918static inline void
920{
921 rte_atomic64_sub(v, 1);
922}
923#endif
924
938static inline int64_t
940
941#ifdef RTE_FORCE_INTRINSICS
942static inline int64_t
944{
945 return __sync_add_and_fetch(&v->cnt, inc);
946}
947#endif
948
962static inline int64_t
964
965#ifdef RTE_FORCE_INTRINSICS
966static inline int64_t
968{
969 return __sync_sub_and_fetch(&v->cnt, dec);
970}
971#endif
972
985
986#ifdef RTE_FORCE_INTRINSICS
987static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
988{
989 return rte_atomic64_add_return(v, 1) == 0;
990}
991#endif
992
1005
1006#ifdef RTE_FORCE_INTRINSICS
1007static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1008{
1009 return rte_atomic64_sub_return(v, 1) == 0;
1010}
1011#endif
1012
1025
1026#ifdef RTE_FORCE_INTRINSICS
1027static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1028{
1029 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1030}
1031#endif
1032
1039static inline void rte_atomic64_clear(rte_atomic64_t *v);
1040
1041#ifdef RTE_FORCE_INTRINSICS
1042static inline void rte_atomic64_clear(rte_atomic64_t *v)
1043{
1044 rte_atomic64_set(v, 0);
1045}
1046#endif
1047
1048/*------------------------ 128 bit atomic operations -------------------------*/
1049
1054typedef struct {
1056 union {
1057 uint64_t val[2];
1058#ifdef RTE_ARCH_64
1059 __extension__ __int128 int128;
1060#endif
1061 };
1062} __rte_aligned(16) rte_int128_t;
1063
1064#ifdef __DOXYGEN__
1065
1105__rte_experimental
1106static inline int
1108 rte_int128_t *exp,
1109 const rte_int128_t *src,
1110 unsigned int weak,
1111 int success,
1112 int failure);
1113
1114#endif /* __DOXYGEN__ */
1115
1116#endif /* _RTE_ATOMIC_H_ */
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static void rte_atomic16_dec(rte_atomic16_t *v)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
static void rte_atomic_thread_fence(int memorder)
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static void rte_io_rmb(void)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition: rte_atomic.h:688
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_io_mb(void)
static void rte_io_wmb(void)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_atomic64_clear(rte_atomic64_t *v)
static void rte_smp_mb(void)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:334
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition: rte_atomic.h:233
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition: rte_atomic.h:405
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:247
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_mb(void)
static void rte_atomic32_inc(rte_atomic32_t *v)
static void rte_smp_wmb(void)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:261
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_atomic32_dec(rte_atomic32_t *v)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static void rte_atomic64_dec(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:617
static void rte_atomic16_init(rte_atomic16_t *v)
Definition: rte_atomic.h:205
static void rte_smp_rmb(void)
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static __rte_experimental int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition: rte_atomic.h:502
static void rte_wmb(void)
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:530
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition: rte_atomic.h:516
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:544
static void rte_atomic64_inc(rte_atomic64_t *v)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition: rte_atomic.h:219
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_atomic64_init(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:314
static void rte_atomic32_init(rte_atomic32_t *v)
Definition: rte_atomic.h:488
static void rte_atomic16_inc(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:597
#define RTE_STD_C11
Definition: rte_common.h:42
__extension__ struct rte_eth_link __rte_aligned(8)
volatile int16_t cnt
Definition: rte_atomic.h:190
volatile int32_t cnt
Definition: rte_atomic.h:473
volatile int64_t cnt
Definition: rte_atomic.h:755