2 static inline int a_cas(volatile int *p, int t, int s)
5 "lock ; cmpxchg %3, %1"
6 : "=a"(t), "=m"(*p) : "a"(t), "r"(s) : "memory" );
10 #define a_cas_p a_cas_p
11 static inline void *a_cas_p(volatile void *p, void *t, void *s)
13 __asm__( "lock ; cmpxchg %3, %1"
14 : "=a"(t), "=m"(*(void *volatile *)p)
15 : "a"(t), "r"(s) : "memory" );
20 static inline int a_swap(volatile int *p, int v)
24 : "=r"(v), "=m"(*p) : "0"(v) : "memory" );
28 #define a_fetch_add a_fetch_add
29 static inline int a_fetch_add(volatile int *p, int v)
33 : "=r"(v), "=m"(*p) : "0"(v) : "memory" );
38 static inline void a_and(volatile int *p, int v)
42 : "=m"(*p) : "r"(v) : "memory" );
46 static inline void a_or(volatile int *p, int v)
50 : "=m"(*p) : "r"(v) : "memory" );
53 #define a_and_64 a_and_64
54 static inline void a_and_64(volatile uint64_t *p, uint64_t v)
58 : "=m"(*p) : "r"(v) : "memory" );
61 #define a_or_64 a_or_64
62 static inline void a_or_64(volatile uint64_t *p, uint64_t v)
66 : "=m"(*p) : "r"(v) : "memory" );
70 static inline void a_inc(volatile int *p)
74 : "=m"(*p) : "m"(*p) : "memory" );
78 static inline void a_dec(volatile int *p)
82 : "=m"(*p) : "m"(*p) : "memory" );
85 #define a_store a_store
86 static inline void a_store(volatile int *p, int x)
89 "mov %1, %0 ; lock ; orl $0,(%%rsp)"
90 : "=m"(*p) : "r"(x) : "memory" );
93 #define a_barrier a_barrier
94 static inline void a_barrier()
96 __asm__ __volatile__( "" : : : "memory" );
100 static inline void a_spin()
102 __asm__ __volatile__( "pause" : : : "memory" );
105 #define a_crash a_crash
106 static inline void a_crash()
108 __asm__ __volatile__( "hlt" : : : "memory" );
111 #define a_ctz_64 a_ctz_64
112 static inline int a_ctz_64(uint64_t x)
114 __asm__( "bsf %1,%0" : "=r"(x) : "r"(x) );
118 #define a_clz_64 a_clz_64
119 static inline int a_clz_64(uint64_t x)
121 __asm__( "bsr %1,%0 ; xor $63,%0" : "=r"(x) : "r"(x) );