Add cmpxchg_local to blackfin, replace __cmpxchg by generic cmpxchg

Use the new generic cmpxchg_local (disables interrupt). Also use the generic
cmpxchg as fallback if SMP is not set since nobody seems to know why __cmpxchg
has been implemented in assembly in the first place thather than in plain C.

Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Cc: Bryan Wu <bryan.wu@analog.com>
Cc: Michael Frysinger <michael.frysinger@analog.com>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
This commit is contained in:
Mathieu Desnoyers 2008-02-07 00:16:13 -08:00 committed by Linus Torvalds
parent 5e86c11d3e
commit 10b8827068
1 changed files with 11 additions and 46 deletions

View File

@ -183,55 +183,20 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
return tmp; return tmp;
} }
#include <asm-generic/cmpxchg-local.h>
/* /*
* Atomic compare and exchange. Compare OLD with MEM, if identical, * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
* store NEW in MEM. Return the initial value in MEM. Success is * them available.
* indicated by comparing RETURN with OLD.
*/ */
static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old, #define cmpxchg_local(ptr, o, n) \
unsigned long new, int size) ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
{
unsigned long tmp = 0;
unsigned long flags = 0;
local_irq_save(flags);
switch (size) {
case 1:
__asm__ __volatile__
("%0 = b%3 (z);\n\t"
"CC = %1 == %0;\n\t"
"IF !CC JUMP 1f;\n\t"
"b%3 = %2;\n\t"
"1:\n\t"
: "=&d" (tmp) : "d" (old), "d" (new), "m" (*__xg(ptr)) : "memory");
break;
case 2:
__asm__ __volatile__
("%0 = w%3 (z);\n\t"
"CC = %1 == %0;\n\t"
"IF !CC JUMP 1f;\n\t"
"w%3 = %2;\n\t"
"1:\n\t"
: "=&d" (tmp) : "d" (old), "d" (new), "m" (*__xg(ptr)) : "memory");
break;
case 4:
__asm__ __volatile__
("%0 = %3;\n\t"
"CC = %1 == %0;\n\t"
"IF !CC JUMP 1f;\n\t"
"%3 = %2;\n\t"
"1:\n\t"
: "=&d" (tmp) : "d" (old), "d" (new), "m" (*__xg(ptr)) : "memory");
break;
}
local_irq_restore(flags);
return tmp;
}
#define cmpxchg(ptr,o,n)\
((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\
(unsigned long)(n), sizeof(*(ptr)))) (unsigned long)(n), sizeof(*(ptr))))
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
#ifndef CONFIG_SMP
#include <asm-generic/cmpxchg.h>
#endif
#define prepare_to_switch() do { } while(0) #define prepare_to_switch() do { } while(0)