summaryrefslogtreecommitdiff
path: root/arch/parisc
diff options
context:
space:
mode:
Diffstat (limited to 'arch/parisc')
-rw-r--r--arch/parisc/include/asm/atomic.h4
-rw-r--r--arch/parisc/include/asm/cmpxchg.h2
-rw-r--r--arch/parisc/lib/bitops.c12
3 files changed, 16 insertions, 2 deletions
diff --git a/arch/parisc/include/asm/atomic.h b/arch/parisc/include/asm/atomic.h
index 118953d41763..03862320f779 100644
--- a/arch/parisc/include/asm/atomic.h
+++ b/arch/parisc/include/asm/atomic.h
@@ -136,8 +136,6 @@ ATOMIC_OPS(xor, ^=)
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP
-#define ATOMIC_INIT(i) { (i) }
-
#ifdef CONFIG_64BIT
#define ATOMIC64_INIT(i) { (i) }
@@ -212,6 +210,8 @@ atomic64_set(atomic64_t *v, s64 i)
_atomic_spin_unlock_irqrestore(v, flags);
}
+#define atomic64_set_release(v, i) atomic64_set((v), (i))
+
static __inline__ s64
atomic64_read(const atomic64_t *v)
{
diff --git a/arch/parisc/include/asm/cmpxchg.h b/arch/parisc/include/asm/cmpxchg.h
index ab5c215cf46c..068958575871 100644
--- a/arch/parisc/include/asm/cmpxchg.h
+++ b/arch/parisc/include/asm/cmpxchg.h
@@ -60,6 +60,7 @@ extern void __cmpxchg_called_with_bad_pointer(void);
extern unsigned long __cmpxchg_u32(volatile unsigned int *m, unsigned int old,
unsigned int new_);
extern u64 __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new_);
+extern u8 __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new_);
/* don't worry...optimizer will get rid of most of this */
static inline unsigned long
@@ -71,6 +72,7 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size)
#endif
case 4: return __cmpxchg_u32((unsigned int *)ptr,
(unsigned int)old, (unsigned int)new_);
+ case 1: return __cmpxchg_u8((u8 *)ptr, (u8)old, (u8)new_);
}
__cmpxchg_called_with_bad_pointer();
return old;
diff --git a/arch/parisc/lib/bitops.c b/arch/parisc/lib/bitops.c
index 70ffbcf889b8..2e4d1f05a926 100644
--- a/arch/parisc/lib/bitops.c
+++ b/arch/parisc/lib/bitops.c
@@ -79,3 +79,15 @@ unsigned long __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsign
_atomic_spin_unlock_irqrestore(ptr, flags);
return (unsigned long)prev;
}
+
+u8 __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new)
+{
+ unsigned long flags;
+ u8 prev;
+
+ _atomic_spin_lock_irqsave(ptr, flags);
+ if ((prev = *ptr) == old)
+ *ptr = new;
+ _atomic_spin_unlock_irqrestore(ptr, flags);
+ return prev;
+}