atomics/treewide: Make atomic_fetch_add_unless() optional
Several architectures these have a near-identical implementation based on atomic_read() and atomic_cmpxchg() which we can instead define in <linux/atomic.h>, so let's do so, using something close to the existing x86 implementation with try_cmpxchg(). Where an architecture provides its own atomic_fetch_add_unless(), it must define a preprocessor symbol for it. The instrumented atomics are updated accordingly. Note that arch/arc's existing atomic_fetch_add_unless() had redundant barriers, as these are already present in its atomic_cmpxchg() implementation. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Reviewed-by: Geert Uytterhoeven <geert@linux-m68k.org> Reviewed-by: Will Deacon <will.deacon@arm.com> Acked-by: Geert Uytterhoeven <geert@linux-m68k.org> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org> Acked-by: Palmer Dabbelt <palmer@sifive.com> Cc: Boqun Feng <boqun.feng@gmail.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: Vineet Gupta <vgupta@synopsys.com> Link: https://lore.kernel.org/lkml/20180621121321.4761-7-mark.rutland@arm.com Signed-off-by: Ingo Molnar <mingo@kernel.org>
This commit is contained in:
parent
bef828204a
commit
eccc2da8c0
|
@ -235,7 +235,7 @@ static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
||||||
smp_mb();
|
smp_mb();
|
||||||
return old;
|
return old;
|
||||||
}
|
}
|
||||||
|
#define atomic_fetch_add_unless atomic_fetch_add_unless
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* atomic64_add_unless - add unless the number is a given value
|
* atomic64_add_unless - add unless the number is a given value
|
||||||
|
|
|
@ -308,34 +308,6 @@ ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3)
|
||||||
#undef ATOMIC_OP_RETURN
|
#undef ATOMIC_OP_RETURN
|
||||||
#undef ATOMIC_OP
|
#undef ATOMIC_OP
|
||||||
|
|
||||||
/**
|
|
||||||
* atomic_fetch_add_unless - add unless the number is a given value
|
|
||||||
* @v: pointer of type atomic_t
|
|
||||||
* @a: the amount to add to v...
|
|
||||||
* @u: ...unless v is equal to u.
|
|
||||||
*
|
|
||||||
* Atomically adds @a to @v, so long as it was not @u.
|
|
||||||
* Returns the old value of @v
|
|
||||||
*/
|
|
||||||
#define atomic_fetch_add_unless(v, a, u) \
|
|
||||||
({ \
|
|
||||||
int c, old; \
|
|
||||||
\
|
|
||||||
/* \
|
|
||||||
* Explicit full memory barrier needed before/after as \
|
|
||||||
* LLOCK/SCOND thmeselves don't provide any such semantics \
|
|
||||||
*/ \
|
|
||||||
smp_mb(); \
|
|
||||||
\
|
|
||||||
c = atomic_read(v); \
|
|
||||||
while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c)\
|
|
||||||
c = old; \
|
|
||||||
\
|
|
||||||
smp_mb(); \
|
|
||||||
\
|
|
||||||
c; \
|
|
||||||
})
|
|
||||||
|
|
||||||
#define atomic_inc(v) atomic_add(1, v)
|
#define atomic_inc(v) atomic_add(1, v)
|
||||||
#define atomic_dec(v) atomic_sub(1, v)
|
#define atomic_dec(v) atomic_sub(1, v)
|
||||||
|
|
||||||
|
|
|
@ -156,6 +156,7 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
||||||
|
|
||||||
return oldval;
|
return oldval;
|
||||||
}
|
}
|
||||||
|
#define atomic_fetch_add_unless atomic_fetch_add_unless
|
||||||
|
|
||||||
#else /* ARM_ARCH_6 */
|
#else /* ARM_ARCH_6 */
|
||||||
|
|
||||||
|
@ -215,16 +216,6 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
||||||
{
|
|
||||||
int c, old;
|
|
||||||
|
|
||||||
c = atomic_read(v);
|
|
||||||
while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c)
|
|
||||||
c = old;
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif /* __LINUX_ARM_ARCH__ */
|
#endif /* __LINUX_ARM_ARCH__ */
|
||||||
|
|
||||||
#define ATOMIC_OPS(op, c_op, asm_op) \
|
#define ATOMIC_OPS(op, c_op, asm_op) \
|
||||||
|
|
|
@ -125,7 +125,6 @@
|
||||||
#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
|
#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
|
||||||
#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
|
#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
|
||||||
#define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0)
|
#define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0)
|
||||||
#define atomic_fetch_add_unless(v, a, u) ___atomic_add_unless(v, a, u,)
|
|
||||||
#define atomic_andnot atomic_andnot
|
#define atomic_andnot atomic_andnot
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -106,5 +106,6 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
||||||
arch_local_irq_restore(flags);
|
arch_local_irq_restore(flags);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
#define atomic_fetch_add_unless atomic_fetch_add_unless
|
||||||
|
|
||||||
#endif /* __ARCH_H8300_ATOMIC __ */
|
#endif /* __ARCH_H8300_ATOMIC __ */
|
||||||
|
|
|
@ -196,6 +196,7 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
||||||
);
|
);
|
||||||
return __oldval;
|
return __oldval;
|
||||||
}
|
}
|
||||||
|
#define atomic_fetch_add_unless atomic_fetch_add_unless
|
||||||
|
|
||||||
#define atomic_inc(v) atomic_add(1, (v))
|
#define atomic_inc(v) atomic_add(1, (v))
|
||||||
#define atomic_dec(v) atomic_sub(1, (v))
|
#define atomic_dec(v) atomic_sub(1, (v))
|
||||||
|
|
|
@ -215,22 +215,6 @@ ATOMIC64_FETCH_OP(xor, ^)
|
||||||
(cmpxchg(&((v)->counter), old, new))
|
(cmpxchg(&((v)->counter), old, new))
|
||||||
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
||||||
|
|
||||||
static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
||||||
{
|
|
||||||
int c, old;
|
|
||||||
c = atomic_read(v);
|
|
||||||
for (;;) {
|
|
||||||
if (unlikely(c == (u)))
|
|
||||||
break;
|
|
||||||
old = atomic_cmpxchg((v), c, c + (a));
|
|
||||||
if (likely(old == c))
|
|
||||||
break;
|
|
||||||
c = old;
|
|
||||||
}
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
|
static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
|
||||||
{
|
{
|
||||||
long c, old;
|
long c, old;
|
||||||
|
|
|
@ -211,19 +211,4 @@ static inline int atomic_add_negative(int i, atomic_t *v)
|
||||||
return c != 0;
|
return c != 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
||||||
{
|
|
||||||
int c, old;
|
|
||||||
c = atomic_read(v);
|
|
||||||
for (;;) {
|
|
||||||
if (unlikely(c == (u)))
|
|
||||||
break;
|
|
||||||
old = atomic_cmpxchg((v), c, c + (a));
|
|
||||||
if (likely(old == c))
|
|
||||||
break;
|
|
||||||
c = old;
|
|
||||||
}
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif /* __ARCH_M68K_ATOMIC __ */
|
#endif /* __ARCH_M68K_ATOMIC __ */
|
||||||
|
|
|
@ -274,30 +274,6 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
|
||||||
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
||||||
#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
|
||||||
|
|
||||||
/**
|
|
||||||
* atomic_fetch_add_unless - add unless the number is a given value
|
|
||||||
* @v: pointer of type atomic_t
|
|
||||||
* @a: the amount to add to v...
|
|
||||||
* @u: ...unless v is equal to u.
|
|
||||||
*
|
|
||||||
* Atomically adds @a to @v, so long as it was not @u.
|
|
||||||
* Returns the old value of @v.
|
|
||||||
*/
|
|
||||||
static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
||||||
{
|
|
||||||
int c, old;
|
|
||||||
c = atomic_read(v);
|
|
||||||
for (;;) {
|
|
||||||
if (unlikely(c == (u)))
|
|
||||||
break;
|
|
||||||
old = atomic_cmpxchg((v), c, c + (a));
|
|
||||||
if (likely(old == c))
|
|
||||||
break;
|
|
||||||
c = old;
|
|
||||||
}
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
|
|
||||||
#define atomic_dec_return(v) atomic_sub_return(1, (v))
|
#define atomic_dec_return(v) atomic_sub_return(1, (v))
|
||||||
#define atomic_inc_return(v) atomic_add_return(1, (v))
|
#define atomic_inc_return(v) atomic_add_return(1, (v))
|
||||||
|
|
||||||
|
|
|
@ -77,30 +77,6 @@ static __inline__ int atomic_read(const atomic_t *v)
|
||||||
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
||||||
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
||||||
|
|
||||||
/**
|
|
||||||
* atomic_fetch_add_unless - add unless the number is a given value
|
|
||||||
* @v: pointer of type atomic_t
|
|
||||||
* @a: the amount to add to v...
|
|
||||||
* @u: ...unless v is equal to u.
|
|
||||||
*
|
|
||||||
* Atomically adds @a to @v, so long as it was not @u.
|
|
||||||
* Returns the old value of @v.
|
|
||||||
*/
|
|
||||||
static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
||||||
{
|
|
||||||
int c, old;
|
|
||||||
c = atomic_read(v);
|
|
||||||
for (;;) {
|
|
||||||
if (unlikely(c == (u)))
|
|
||||||
break;
|
|
||||||
old = atomic_cmpxchg((v), c, c + (a));
|
|
||||||
if (likely(old == c))
|
|
||||||
break;
|
|
||||||
c = old;
|
|
||||||
}
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
|
|
||||||
#define ATOMIC_OP(op, c_op) \
|
#define ATOMIC_OP(op, c_op) \
|
||||||
static __inline__ void atomic_##op(int i, atomic_t *v) \
|
static __inline__ void atomic_##op(int i, atomic_t *v) \
|
||||||
{ \
|
{ \
|
||||||
|
|
|
@ -248,6 +248,7 @@ static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
||||||
|
|
||||||
return t;
|
return t;
|
||||||
}
|
}
|
||||||
|
#define atomic_fetch_add_unless atomic_fetch_add_unless
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* atomic_inc_not_zero - increment unless the number is zero
|
* atomic_inc_not_zero - increment unless the number is zero
|
||||||
|
|
|
@ -349,6 +349,7 @@ static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
||||||
: "memory");
|
: "memory");
|
||||||
return prev;
|
return prev;
|
||||||
}
|
}
|
||||||
|
#define atomic_fetch_add_unless atomic_fetch_add_unless
|
||||||
|
|
||||||
#ifndef CONFIG_GENERIC_ATOMIC64
|
#ifndef CONFIG_GENERIC_ATOMIC64
|
||||||
static __always_inline long __atomic64_add_unless(atomic64_t *v, long a, long u)
|
static __always_inline long __atomic64_add_unless(atomic64_t *v, long a, long u)
|
||||||
|
|
|
@ -90,21 +90,6 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
|
||||||
return __atomic_cmpxchg(&v->counter, old, new);
|
return __atomic_cmpxchg(&v->counter, old, new);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
||||||
{
|
|
||||||
int c, old;
|
|
||||||
c = atomic_read(v);
|
|
||||||
for (;;) {
|
|
||||||
if (unlikely(c == u))
|
|
||||||
break;
|
|
||||||
old = atomic_cmpxchg(v, c, c + a);
|
|
||||||
if (likely(old == c))
|
|
||||||
break;
|
|
||||||
c = old;
|
|
||||||
}
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
|
|
||||||
#define ATOMIC64_INIT(i) { (i) }
|
#define ATOMIC64_INIT(i) { (i) }
|
||||||
|
|
||||||
static inline long atomic64_read(const atomic64_t *v)
|
static inline long atomic64_read(const atomic64_t *v)
|
||||||
|
|
|
@ -45,31 +45,6 @@
|
||||||
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
||||||
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
||||||
|
|
||||||
/**
|
|
||||||
* atomic_fetch_add_unless - add unless the number is a given value
|
|
||||||
* @v: pointer of type atomic_t
|
|
||||||
* @a: the amount to add to v...
|
|
||||||
* @u: ...unless v is equal to u.
|
|
||||||
*
|
|
||||||
* Atomically adds @a to @v, so long as it was not @u.
|
|
||||||
* Returns the old value of @v.
|
|
||||||
*/
|
|
||||||
static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
||||||
{
|
|
||||||
int c, old;
|
|
||||||
c = atomic_read(v);
|
|
||||||
for (;;) {
|
|
||||||
if (unlikely(c == (u)))
|
|
||||||
break;
|
|
||||||
old = atomic_cmpxchg((v), c, c + (a));
|
|
||||||
if (likely(old == c))
|
|
||||||
break;
|
|
||||||
c = old;
|
|
||||||
}
|
|
||||||
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif /* CONFIG_CPU_J2 */
|
#endif /* CONFIG_CPU_J2 */
|
||||||
|
|
||||||
#endif /* __ASM_SH_ATOMIC_H */
|
#endif /* __ASM_SH_ATOMIC_H */
|
||||||
|
|
|
@ -30,6 +30,8 @@ int atomic_xchg(atomic_t *, int);
|
||||||
int atomic_fetch_add_unless(atomic_t *, int, int);
|
int atomic_fetch_add_unless(atomic_t *, int, int);
|
||||||
void atomic_set(atomic_t *, int);
|
void atomic_set(atomic_t *, int);
|
||||||
|
|
||||||
|
#define atomic_fetch_add_unless atomic_fetch_add_unless
|
||||||
|
|
||||||
#define atomic_set_release(v, i) atomic_set((v), (i))
|
#define atomic_set_release(v, i) atomic_set((v), (i))
|
||||||
|
|
||||||
#define atomic_read(v) READ_ONCE((v)->counter)
|
#define atomic_read(v) READ_ONCE((v)->counter)
|
||||||
|
|
|
@ -89,21 +89,6 @@ static inline int atomic_xchg(atomic_t *v, int new)
|
||||||
return xchg(&v->counter, new);
|
return xchg(&v->counter, new);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
||||||
{
|
|
||||||
int c, old;
|
|
||||||
c = atomic_read(v);
|
|
||||||
for (;;) {
|
|
||||||
if (unlikely(c == (u)))
|
|
||||||
break;
|
|
||||||
old = atomic_cmpxchg((v), c, c + (a));
|
|
||||||
if (likely(old == c))
|
|
||||||
break;
|
|
||||||
c = old;
|
|
||||||
}
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
|
|
||||||
#define atomic64_cmpxchg(v, o, n) \
|
#define atomic64_cmpxchg(v, o, n) \
|
||||||
((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
||||||
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
||||||
|
|
|
@ -253,27 +253,6 @@ static inline int arch_atomic_fetch_xor(int i, atomic_t *v)
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* arch_atomic_fetch_add_unless - add unless the number is already a given value
|
|
||||||
* @v: pointer of type atomic_t
|
|
||||||
* @a: the amount to add to v...
|
|
||||||
* @u: ...unless v is equal to u.
|
|
||||||
*
|
|
||||||
* Atomically adds @a to @v, so long as @v was not already @u.
|
|
||||||
* Returns the old value of @v.
|
|
||||||
*/
|
|
||||||
static __always_inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
||||||
{
|
|
||||||
int c = arch_atomic_read(v);
|
|
||||||
|
|
||||||
do {
|
|
||||||
if (unlikely(c == u))
|
|
||||||
break;
|
|
||||||
} while (!arch_atomic_try_cmpxchg(v, &c, c + a));
|
|
||||||
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifdef CONFIG_X86_32
|
#ifdef CONFIG_X86_32
|
||||||
# include <asm/atomic64_32.h>
|
# include <asm/atomic64_32.h>
|
||||||
#else
|
#else
|
||||||
|
|
|
@ -274,30 +274,6 @@ ATOMIC_OPS(xor)
|
||||||
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
|
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
|
||||||
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
||||||
|
|
||||||
/**
|
|
||||||
* atomic_fetch_add_unless - add unless the number is a given value
|
|
||||||
* @v: pointer of type atomic_t
|
|
||||||
* @a: the amount to add to v...
|
|
||||||
* @u: ...unless v is equal to u.
|
|
||||||
*
|
|
||||||
* Atomically adds @a to @v, so long as it was not @u.
|
|
||||||
* Returns the old value of @v.
|
|
||||||
*/
|
|
||||||
static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
||||||
{
|
|
||||||
int c, old;
|
|
||||||
c = atomic_read(v);
|
|
||||||
for (;;) {
|
|
||||||
if (unlikely(c == (u)))
|
|
||||||
break;
|
|
||||||
old = atomic_cmpxchg((v), c, c + (a));
|
|
||||||
if (likely(old == c))
|
|
||||||
break;
|
|
||||||
c = old;
|
|
||||||
}
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif /* __KERNEL__ */
|
#endif /* __KERNEL__ */
|
||||||
|
|
||||||
#endif /* _XTENSA_ATOMIC_H */
|
#endif /* _XTENSA_ATOMIC_H */
|
||||||
|
|
|
@ -84,12 +84,14 @@ static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 ne
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#ifdef arch_atomic_fetch_add_unless
|
||||||
|
#define atomic_fetch_add_unless atomic_fetch_add_unless
|
||||||
static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
||||||
{
|
{
|
||||||
kasan_check_write(v, sizeof(*v));
|
kasan_check_write(v, sizeof(*v));
|
||||||
return arch_atomic_fetch_add_unless(v, a, u);
|
return arch_atomic_fetch_add_unless(v, a, u);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
static __always_inline bool atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
|
static __always_inline bool atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
|
||||||
{
|
{
|
||||||
|
|
|
@ -221,15 +221,4 @@ static inline void atomic_dec(atomic_t *v)
|
||||||
#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
|
#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
|
||||||
#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
|
#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
|
||||||
|
|
||||||
#ifndef atomic_fetch_add_unless
|
|
||||||
static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
||||||
{
|
|
||||||
int c, old;
|
|
||||||
c = atomic_read(v);
|
|
||||||
while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c)
|
|
||||||
c = old;
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#endif /* __ASM_GENERIC_ATOMIC_H */
|
#endif /* __ASM_GENERIC_ATOMIC_H */
|
||||||
|
|
|
@ -521,6 +521,29 @@
|
||||||
#endif
|
#endif
|
||||||
#endif /* xchg_relaxed */
|
#endif /* xchg_relaxed */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* atomic_fetch_add_unless - add unless the number is already a given value
|
||||||
|
* @v: pointer of type atomic_t
|
||||||
|
* @a: the amount to add to v...
|
||||||
|
* @u: ...unless v is equal to u.
|
||||||
|
*
|
||||||
|
* Atomically adds @a to @v, if @v was not already @u.
|
||||||
|
* Returns the original value of @v.
|
||||||
|
*/
|
||||||
|
#ifndef atomic_fetch_add_unless
|
||||||
|
static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
||||||
|
{
|
||||||
|
int c = atomic_read(v);
|
||||||
|
|
||||||
|
do {
|
||||||
|
if (unlikely(c == u))
|
||||||
|
break;
|
||||||
|
} while (!atomic_try_cmpxchg(v, &c, c + a));
|
||||||
|
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* atomic_add_unless - add unless the number is already a given value
|
* atomic_add_unless - add unless the number is already a given value
|
||||||
* @v: pointer of type atomic_t
|
* @v: pointer of type atomic_t
|
||||||
|
|
Loading…
Reference in New Issue