#define nop do { } while (0)
#ifdef __powerpc64__
-#define read_barrier() \
- __asm__ __volatile__ ("lwsync" : : : "memory")
+#define read_barrier() __asm__ __volatile__ ("lwsync" : : : "memory")
#else
-#define read_barrier() \
- __asm__ __volatile__ ("sync" : : : "memory")
+#define read_barrier() __asm__ __volatile__ ("sync" : : : "memory")
#endif
+#define write_barrier() __asm__ __volatile__ ("sync" : : : "memory")
+
static inline int __ilog2(unsigned long bitmask)
{
int lz;
return 31 - lz;
}
-static inline int fio_ffz(unsigned long bitmask)
+static inline int arch_ffz(unsigned long bitmask)
{
if ((bitmask = ~bitmask) == 0)
return 32;
return __ilog2(bitmask & -bitmask);
}
+#define ARCH_HAVE_FFZ
+
+typedef struct {
+ volatile unsigned int lock;
+} spinlock_t;
+
+static inline void spin_trylock(spinlock_t *lock)
+{
+ unsigned long tmp;
+
+ __asm__ __volatile__(
+ "1: lwarx %0,0,%2\n\
+ cmpwi 0,%0,0\n\
+ bne- 2f\n\
+ stwcx. 1,0,%2\n\
+ bne- 1b\n\
+ isync\n\
+ 2:" : "=&r" (tmp)
+ : (&lock->lock)
+ : "cr0", "memory");
+
+ return tmp;
+}
+
+static inline void spin_lock(spinlock_t *lock)
+{
+ while (spin_trylock(lock))
+ ;
+}
+
+static inline void spin_unlock(spinlock_t *lock)
+{
+ read_barrier();
+ lock->lock = 0;
+}
#endif