static inline int a_cas(volatile int *p, int t, int s)
{
-
- __asm__( "1: lwarx 10, 0, %1\n"
- " stwcx. %3, 0, %1\n"
- " bne- 1b\n"
- " mr %0, 10\n"
- : "=r"(t) : "r"(p), "r"(t), "r"(s) : "memory" );
+ __asm__("\n"
+ " sync\n"
+ "1: lwarx %0, 0, %4\n"
+ " cmpw %0, %2\n"
+ " bne 1f\n"
+ " stwcx. %3, 0, %4\n"
+ " bne- 1b\n"
+ " isync\n"
+ "1: \n"
+ : "=&r"(t), "+m"(*p) : "r"(t), "r"(s), "r"(p) : "cc", "memory" );
return t;
}
while (a_cas(p, old, old|v) != old);
}
+static inline void a_or_l(volatile void *p, long v)
+{
+ a_or(p, v);
+}
+
static inline void a_and_64(volatile uint64_t *p, uint64_t v)
{
-#if __BYTE_ORDER == __LITTLE_ENDIAN
- a_and((int *)p, v);
- a_and((int *)p+1, v>>32);
-#else
- a_and((int *)p+1, v);
- a_and((int *)p, v>>32);
-#endif
+ union { uint64_t v; uint32_t r[2]; } u = { v };
+ a_and((int *)p, u.r[0]);
+ a_and((int *)p+1, u.r[1]);
}
static inline void a_or_64(volatile uint64_t *p, uint64_t v)
{
-#if __BYTE_ORDER == __LITTLE_ENDIAN
- a_or((int *)p, v);
- a_or((int *)p+1, v>>32);
-#else
- a_or((int *)p+1, v);
- a_or((int *)p, v>>32);
-#endif
+ union { uint64_t v; uint32_t r[2]; } u = { v };
+ a_or((int *)p, u.r[0]);
+ a_or((int *)p+1, u.r[1]);
}
#endif