atomics/treewide: Make conditional inc/dec ops optional
The conditional inc/dec ops differ for atomic_t and atomic64_t: - atomic_inc_unless_positive() is optional for atomic_t, and doesn't exist for atomic64_t. - atomic_dec_unless_negative() is optional for atomic_t, and doesn't exist for atomic64_t. - atomic_dec_if_positive is optional for atomic_t, and is mandatory for atomic64_t. Let's make these consistently optional for both. At the same time, let's clean up the existing fallbacks to use atomic_try_cmpxchg(). The instrumented atomics are updated accordingly. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Reviewed-by: Will Deacon <will.deacon@arm.com> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org> Cc: Boqun Feng <boqun.feng@gmail.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Thomas Gleixner <tglx@linutronix.de> Link: https://lore.kernel.org/lkml/20180621121321.4761-18-mark.rutland@arm.com Signed-off-by: Ingo Molnar <mingo@kernel.org>
This commit is contained in:
parent
9837559d8e
commit
b3a2a05f91
|
@ -296,5 +296,6 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
|
|||
smp_mb();
|
||||
return old - 1;
|
||||
}
|
||||
#define atomic64_dec_if_positive atomic64_dec_if_positive
|
||||
|
||||
#endif /* _ALPHA_ATOMIC_H */
|
||||
|
|
|
@ -517,6 +517,7 @@ static inline long long atomic64_dec_if_positive(atomic64_t *v)
|
|||
|
||||
return val;
|
||||
}
|
||||
#define atomic64_dec_if_positive atomic64_dec_if_positive
|
||||
|
||||
/**
|
||||
* atomic64_fetch_add_unless - add unless the number is a given value
|
||||
|
|
|
@ -474,6 +474,7 @@ static inline long long atomic64_dec_if_positive(atomic64_t *v)
|
|||
|
||||
return result;
|
||||
}
|
||||
#define atomic64_dec_if_positive atomic64_dec_if_positive
|
||||
|
||||
static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
|
||||
long long u)
|
||||
|
|
|
@ -159,5 +159,7 @@
|
|||
|
||||
#define atomic64_andnot atomic64_andnot
|
||||
|
||||
#define atomic64_dec_if_positive atomic64_dec_if_positive
|
||||
|
||||
#endif
|
||||
#endif
|
||||
|
|
|
@ -215,22 +215,6 @@ ATOMIC64_FETCH_OP(xor, ^)
|
|||
(cmpxchg(&((v)->counter), old, new))
|
||||
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
||||
|
||||
static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
|
||||
{
|
||||
long c, old, dec;
|
||||
c = atomic64_read(v);
|
||||
for (;;) {
|
||||
dec = c - 1;
|
||||
if (unlikely(dec < 0))
|
||||
break;
|
||||
old = atomic64_cmpxchg((v), c, dec);
|
||||
if (likely(old == c))
|
||||
break;
|
||||
c = old;
|
||||
}
|
||||
return dec;
|
||||
}
|
||||
|
||||
#define atomic_add(i,v) (void)atomic_add_return((i), (v))
|
||||
#define atomic_sub(i,v) (void)atomic_sub_return((i), (v))
|
||||
|
||||
|
|
|
@ -223,29 +223,6 @@ atomic64_read(const atomic64_t *v)
|
|||
((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
||||
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
||||
|
||||
/*
|
||||
* atomic64_dec_if_positive - decrement by 1 if old value positive
|
||||
* @v: pointer of type atomic_t
|
||||
*
|
||||
* The function returns the old value of *v minus 1, even if
|
||||
* the atomic variable, v, was not decremented.
|
||||
*/
|
||||
static inline long atomic64_dec_if_positive(atomic64_t *v)
|
||||
{
|
||||
long c, old, dec;
|
||||
c = atomic64_read(v);
|
||||
for (;;) {
|
||||
dec = c - 1;
|
||||
if (unlikely(dec < 0))
|
||||
break;
|
||||
old = atomic64_cmpxchg((v), c, dec);
|
||||
if (likely(old == c))
|
||||
break;
|
||||
c = old;
|
||||
}
|
||||
return dec;
|
||||
}
|
||||
|
||||
#endif /* !CONFIG_64BIT */
|
||||
|
||||
|
||||
|
|
|
@ -488,6 +488,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
|
|||
|
||||
return t;
|
||||
}
|
||||
#define atomic64_dec_if_positive atomic64_dec_if_positive
|
||||
|
||||
#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
||||
#define atomic64_cmpxchg_relaxed(v, o, n) \
|
||||
|
|
|
@ -145,23 +145,6 @@ ATOMIC64_OPS(xor)
|
|||
|
||||
#undef ATOMIC64_OPS
|
||||
|
||||
static inline long atomic64_dec_if_positive(atomic64_t *v)
|
||||
{
|
||||
long c, old, dec;
|
||||
|
||||
c = atomic64_read(v);
|
||||
for (;;) {
|
||||
dec = c - 1;
|
||||
if (unlikely(dec < 0))
|
||||
break;
|
||||
old = atomic64_cmpxchg((v), c, dec);
|
||||
if (likely(old == c))
|
||||
break;
|
||||
c = old;
|
||||
}
|
||||
return dec;
|
||||
}
|
||||
|
||||
#define atomic64_sub_return(_i, _v) atomic64_add_return(-(long)(_i), _v)
|
||||
#define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long)(_i), _v)
|
||||
#define atomic64_sub(_i, _v) atomic64_add(-(long)(_i), _v)
|
||||
|
|
|
@ -62,5 +62,6 @@ static inline int atomic_xchg(atomic_t *v, int new)
|
|||
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
||||
|
||||
long atomic64_dec_if_positive(atomic64_t *v);
|
||||
#define atomic64_dec_if_positive atomic64_dec_if_positive
|
||||
|
||||
#endif /* !(__ARCH_SPARC64_ATOMIC__) */
|
||||
|
|
|
@ -254,6 +254,7 @@ static inline int arch_atomic64_inc_not_zero(atomic64_t *v)
|
|||
return r;
|
||||
}
|
||||
|
||||
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
|
||||
static inline long long arch_atomic64_dec_if_positive(atomic64_t *v)
|
||||
{
|
||||
long long r;
|
||||
|
|
|
@ -191,24 +191,6 @@ static inline long arch_atomic64_xchg(atomic64_t *v, long new)
|
|||
return xchg(&v->counter, new);
|
||||
}
|
||||
|
||||
/*
|
||||
* arch_atomic64_dec_if_positive - decrement by 1 if old value positive
|
||||
* @v: pointer of type atomic_t
|
||||
*
|
||||
* The function returns the old value of *v minus 1, even if
|
||||
* the atomic variable, v, was not decremented.
|
||||
*/
|
||||
static inline long arch_atomic64_dec_if_positive(atomic64_t *v)
|
||||
{
|
||||
s64 dec, c = arch_atomic64_read(v);
|
||||
do {
|
||||
dec = c - 1;
|
||||
if (unlikely(dec < 0))
|
||||
break;
|
||||
} while (!arch_atomic64_try_cmpxchg(v, &c, dec));
|
||||
return dec;
|
||||
}
|
||||
|
||||
static inline void arch_atomic64_and(long i, atomic64_t *v)
|
||||
{
|
||||
asm volatile(LOCK_PREFIX "andq %1,%0"
|
||||
|
|
|
@ -243,11 +243,14 @@ static __always_inline bool atomic64_inc_not_zero(atomic64_t *v)
|
|||
}
|
||||
#endif
|
||||
|
||||
#ifdef arch_atomic64_dec_if_positive
|
||||
#define atomic64_dec_if_positive atomic64_dec_if_positive
|
||||
static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
|
||||
{
|
||||
kasan_check_write(v, sizeof(*v));
|
||||
return arch_atomic64_dec_if_positive(v);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef arch_atomic_dec_and_test
|
||||
#define atomic_dec_and_test atomic_dec_and_test
|
||||
|
|
|
@ -51,6 +51,7 @@ ATOMIC64_OPS(xor)
|
|||
#undef ATOMIC64_OP
|
||||
|
||||
extern long long atomic64_dec_if_positive(atomic64_t *v);
|
||||
#define atomic64_dec_if_positive atomic64_dec_if_positive
|
||||
extern long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n);
|
||||
extern long long atomic64_xchg(atomic64_t *v, long long new);
|
||||
extern long long atomic64_fetch_add_unless(atomic64_t *v, long long a, long long u);
|
||||
|
|
|
@ -683,28 +683,30 @@ static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
|
|||
#endif
|
||||
|
||||
#ifndef atomic_inc_unless_negative
|
||||
static inline bool atomic_inc_unless_negative(atomic_t *p)
|
||||
static inline bool atomic_inc_unless_negative(atomic_t *v)
|
||||
{
|
||||
int v, v1;
|
||||
for (v = 0; v >= 0; v = v1) {
|
||||
v1 = atomic_cmpxchg(p, v, v + 1);
|
||||
if (likely(v1 == v))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
int c = atomic_read(v);
|
||||
|
||||
do {
|
||||
if (unlikely(c < 0))
|
||||
return false;
|
||||
} while (!atomic_try_cmpxchg(v, &c, c + 1));
|
||||
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef atomic_dec_unless_positive
|
||||
static inline bool atomic_dec_unless_positive(atomic_t *p)
|
||||
static inline bool atomic_dec_unless_positive(atomic_t *v)
|
||||
{
|
||||
int v, v1;
|
||||
for (v = 0; v <= 0; v = v1) {
|
||||
v1 = atomic_cmpxchg(p, v, v - 1);
|
||||
if (likely(v1 == v))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
int c = atomic_read(v);
|
||||
|
||||
do {
|
||||
if (unlikely(c > 0))
|
||||
return false;
|
||||
} while (!atomic_try_cmpxchg(v, &c, c - 1));
|
||||
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -718,17 +720,14 @@ static inline bool atomic_dec_unless_positive(atomic_t *p)
|
|||
#ifndef atomic_dec_if_positive
|
||||
static inline int atomic_dec_if_positive(atomic_t *v)
|
||||
{
|
||||
int c, old, dec;
|
||||
c = atomic_read(v);
|
||||
for (;;) {
|
||||
int dec, c = atomic_read(v);
|
||||
|
||||
do {
|
||||
dec = c - 1;
|
||||
if (unlikely(dec < 0))
|
||||
break;
|
||||
old = atomic_cmpxchg((v), c, dec);
|
||||
if (likely(old == c))
|
||||
break;
|
||||
c = old;
|
||||
}
|
||||
} while (!atomic_try_cmpxchg(v, &c, dec));
|
||||
|
||||
return dec;
|
||||
}
|
||||
#endif
|
||||
|
@ -1290,6 +1289,56 @@ static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v
|
|||
}
|
||||
#endif
|
||||
|
||||
#ifndef atomic64_inc_unless_negative
|
||||
static inline bool atomic64_inc_unless_negative(atomic64_t *v)
|
||||
{
|
||||
long long c = atomic64_read(v);
|
||||
|
||||
do {
|
||||
if (unlikely(c < 0))
|
||||
return false;
|
||||
} while (!atomic64_try_cmpxchg(v, &c, c + 1));
|
||||
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef atomic64_dec_unless_positive
|
||||
static inline bool atomic64_dec_unless_positive(atomic64_t *v)
|
||||
{
|
||||
long long c = atomic64_read(v);
|
||||
|
||||
do {
|
||||
if (unlikely(c > 0))
|
||||
return false;
|
||||
} while (!atomic64_try_cmpxchg(v, &c, c - 1));
|
||||
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
/*
|
||||
* atomic64_dec_if_positive - decrement by 1 if old value positive
|
||||
* @v: pointer of type atomic64_t
|
||||
*
|
||||
* The function returns the old value of *v minus 1, even if
|
||||
* the atomic64 variable, v, was not decremented.
|
||||
*/
|
||||
#ifndef atomic64_dec_if_positive
|
||||
static inline long long atomic64_dec_if_positive(atomic64_t *v)
|
||||
{
|
||||
long long dec, c = atomic64_read(v);
|
||||
|
||||
do {
|
||||
dec = c - 1;
|
||||
if (unlikely(dec < 0))
|
||||
break;
|
||||
} while (!atomic64_try_cmpxchg(v, &c, dec));
|
||||
|
||||
return dec;
|
||||
}
|
||||
#endif
|
||||
|
||||
#define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
|
||||
#define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
|
||||
|
||||
|
|
Loading…
Reference in New Issue