atomics/treewide: Make unconditional inc/dec ops optional
Many of the inc/dec ops are mandatory, but for most architectures inc/dec are simply trivial wrappers around their corresponding add/sub ops. Let's make all the inc/dec ops optional, so that we can get rid of these boilerplate wrappers. The instrumented atomics are updated accordingly. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Reviewed-by: Will Deacon <will.deacon@arm.com> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org> Acked-by: Palmer Dabbelt <palmer@sifive.com> Cc: Boqun Feng <boqun.feng@gmail.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Thomas Gleixner <tglx@linutronix.de> Link: https://lore.kernel.org/lkml/20180621121321.4761-17-mark.rutland@arm.com Signed-off-by: Ingo Molnar <mingo@kernel.org>
This commit is contained in:
parent
18cc1814d4
commit
9837559d8e
|
@ -297,16 +297,4 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
|
||||||
return old - 1;
|
return old - 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
#define atomic_dec_return(v) atomic_sub_return(1,(v))
|
|
||||||
#define atomic64_dec_return(v) atomic64_sub_return(1,(v))
|
|
||||||
|
|
||||||
#define atomic_inc_return(v) atomic_add_return(1,(v))
|
|
||||||
#define atomic64_inc_return(v) atomic64_add_return(1,(v))
|
|
||||||
|
|
||||||
#define atomic_inc(v) atomic_add(1,(v))
|
|
||||||
#define atomic64_inc(v) atomic64_add(1,(v))
|
|
||||||
|
|
||||||
#define atomic_dec(v) atomic_sub(1,(v))
|
|
||||||
#define atomic64_dec(v) atomic64_sub(1,(v))
|
|
||||||
|
|
||||||
#endif /* _ALPHA_ATOMIC_H */
|
#endif /* _ALPHA_ATOMIC_H */
|
||||||
|
|
|
@ -308,12 +308,6 @@ ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3)
|
||||||
#undef ATOMIC_OP_RETURN
|
#undef ATOMIC_OP_RETURN
|
||||||
#undef ATOMIC_OP
|
#undef ATOMIC_OP
|
||||||
|
|
||||||
#define atomic_inc(v) atomic_add(1, v)
|
|
||||||
#define atomic_dec(v) atomic_sub(1, v)
|
|
||||||
|
|
||||||
#define atomic_inc_return(v) atomic_add_return(1, (v))
|
|
||||||
#define atomic_dec_return(v) atomic_sub_return(1, (v))
|
|
||||||
|
|
||||||
#ifdef CONFIG_GENERIC_ATOMIC64
|
#ifdef CONFIG_GENERIC_ATOMIC64
|
||||||
|
|
||||||
#include <asm-generic/atomic64.h>
|
#include <asm-generic/atomic64.h>
|
||||||
|
@ -560,11 +554,6 @@ static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
|
||||||
}
|
}
|
||||||
#define atomic64_fetch_add_unless atomic64_fetch_add_unless
|
#define atomic64_fetch_add_unless atomic64_fetch_add_unless
|
||||||
|
|
||||||
#define atomic64_inc(v) atomic64_add(1LL, (v))
|
|
||||||
#define atomic64_inc_return(v) atomic64_add_return(1LL, (v))
|
|
||||||
#define atomic64_dec(v) atomic64_sub(1LL, (v))
|
|
||||||
#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
|
|
||||||
|
|
||||||
#endif /* !CONFIG_GENERIC_ATOMIC64 */
|
#endif /* !CONFIG_GENERIC_ATOMIC64 */
|
||||||
|
|
||||||
#endif /* !__ASSEMBLY__ */
|
#endif /* !__ASSEMBLY__ */
|
||||||
|
|
|
@ -245,12 +245,6 @@ ATOMIC_OPS(xor, ^=, eor)
|
||||||
|
|
||||||
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
||||||
|
|
||||||
#define atomic_inc(v) atomic_add(1, v)
|
|
||||||
#define atomic_dec(v) atomic_sub(1, v)
|
|
||||||
|
|
||||||
#define atomic_inc_return_relaxed(v) (atomic_add_return_relaxed(1, v))
|
|
||||||
#define atomic_dec_return_relaxed(v) (atomic_sub_return_relaxed(1, v))
|
|
||||||
|
|
||||||
#ifndef CONFIG_GENERIC_ATOMIC64
|
#ifndef CONFIG_GENERIC_ATOMIC64
|
||||||
typedef struct {
|
typedef struct {
|
||||||
long long counter;
|
long long counter;
|
||||||
|
@ -512,11 +506,6 @@ static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
|
||||||
}
|
}
|
||||||
#define atomic64_fetch_add_unless atomic64_fetch_add_unless
|
#define atomic64_fetch_add_unless atomic64_fetch_add_unless
|
||||||
|
|
||||||
#define atomic64_inc(v) atomic64_add(1LL, (v))
|
|
||||||
#define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1LL, (v))
|
|
||||||
#define atomic64_dec(v) atomic64_sub(1LL, (v))
|
|
||||||
#define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1LL, (v))
|
|
||||||
|
|
||||||
#endif /* !CONFIG_GENERIC_ATOMIC64 */
|
#endif /* !CONFIG_GENERIC_ATOMIC64 */
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -50,21 +50,11 @@
|
||||||
#define atomic_add_return_release atomic_add_return_release
|
#define atomic_add_return_release atomic_add_return_release
|
||||||
#define atomic_add_return atomic_add_return
|
#define atomic_add_return atomic_add_return
|
||||||
|
|
||||||
#define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v))
|
|
||||||
#define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v))
|
|
||||||
#define atomic_inc_return_release(v) atomic_add_return_release(1, (v))
|
|
||||||
#define atomic_inc_return(v) atomic_add_return(1, (v))
|
|
||||||
|
|
||||||
#define atomic_sub_return_relaxed atomic_sub_return_relaxed
|
#define atomic_sub_return_relaxed atomic_sub_return_relaxed
|
||||||
#define atomic_sub_return_acquire atomic_sub_return_acquire
|
#define atomic_sub_return_acquire atomic_sub_return_acquire
|
||||||
#define atomic_sub_return_release atomic_sub_return_release
|
#define atomic_sub_return_release atomic_sub_return_release
|
||||||
#define atomic_sub_return atomic_sub_return
|
#define atomic_sub_return atomic_sub_return
|
||||||
|
|
||||||
#define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v))
|
|
||||||
#define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v))
|
|
||||||
#define atomic_dec_return_release(v) atomic_sub_return_release(1, (v))
|
|
||||||
#define atomic_dec_return(v) atomic_sub_return(1, (v))
|
|
||||||
|
|
||||||
#define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
|
#define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
|
||||||
#define atomic_fetch_add_acquire atomic_fetch_add_acquire
|
#define atomic_fetch_add_acquire atomic_fetch_add_acquire
|
||||||
#define atomic_fetch_add_release atomic_fetch_add_release
|
#define atomic_fetch_add_release atomic_fetch_add_release
|
||||||
|
@ -108,8 +98,6 @@
|
||||||
cmpxchg_release(&((v)->counter), (old), (new))
|
cmpxchg_release(&((v)->counter), (old), (new))
|
||||||
#define atomic_cmpxchg(v, old, new) cmpxchg(&((v)->counter), (old), (new))
|
#define atomic_cmpxchg(v, old, new) cmpxchg(&((v)->counter), (old), (new))
|
||||||
|
|
||||||
#define atomic_inc(v) atomic_add(1, (v))
|
|
||||||
#define atomic_dec(v) atomic_sub(1, (v))
|
|
||||||
#define atomic_andnot atomic_andnot
|
#define atomic_andnot atomic_andnot
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -124,21 +112,11 @@
|
||||||
#define atomic64_add_return_release atomic64_add_return_release
|
#define atomic64_add_return_release atomic64_add_return_release
|
||||||
#define atomic64_add_return atomic64_add_return
|
#define atomic64_add_return atomic64_add_return
|
||||||
|
|
||||||
#define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v))
|
|
||||||
#define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v))
|
|
||||||
#define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v))
|
|
||||||
#define atomic64_inc_return(v) atomic64_add_return(1, (v))
|
|
||||||
|
|
||||||
#define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
|
#define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
|
||||||
#define atomic64_sub_return_acquire atomic64_sub_return_acquire
|
#define atomic64_sub_return_acquire atomic64_sub_return_acquire
|
||||||
#define atomic64_sub_return_release atomic64_sub_return_release
|
#define atomic64_sub_return_release atomic64_sub_return_release
|
||||||
#define atomic64_sub_return atomic64_sub_return
|
#define atomic64_sub_return atomic64_sub_return
|
||||||
|
|
||||||
#define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v))
|
|
||||||
#define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v))
|
|
||||||
#define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v))
|
|
||||||
#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
|
|
||||||
|
|
||||||
#define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
|
#define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
|
||||||
#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
|
#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
|
||||||
#define atomic64_fetch_add_release atomic64_fetch_add_release
|
#define atomic64_fetch_add_release atomic64_fetch_add_release
|
||||||
|
@ -179,8 +157,6 @@
|
||||||
#define atomic64_cmpxchg_release atomic_cmpxchg_release
|
#define atomic64_cmpxchg_release atomic_cmpxchg_release
|
||||||
#define atomic64_cmpxchg atomic_cmpxchg
|
#define atomic64_cmpxchg atomic_cmpxchg
|
||||||
|
|
||||||
#define atomic64_inc(v) atomic64_add(1, (v))
|
|
||||||
#define atomic64_dec(v) atomic64_sub(1, (v))
|
|
||||||
#define atomic64_andnot atomic64_andnot
|
#define atomic64_andnot atomic64_andnot
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -69,13 +69,6 @@ ATOMIC_OPS(sub, -=)
|
||||||
#undef ATOMIC_OP_RETURN
|
#undef ATOMIC_OP_RETURN
|
||||||
#undef ATOMIC_OP
|
#undef ATOMIC_OP
|
||||||
|
|
||||||
#define atomic_inc_return(v) atomic_add_return(1, v)
|
|
||||||
#define atomic_dec_return(v) atomic_sub_return(1, v)
|
|
||||||
|
|
||||||
#define atomic_inc(v) (void)atomic_inc_return(v)
|
|
||||||
|
|
||||||
#define atomic_dec(v) (void)atomic_dec_return(v)
|
|
||||||
|
|
||||||
static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
|
static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
|
||||||
{
|
{
|
||||||
int ret;
|
int ret;
|
||||||
|
|
|
@ -198,10 +198,4 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
||||||
}
|
}
|
||||||
#define atomic_fetch_add_unless atomic_fetch_add_unless
|
#define atomic_fetch_add_unless atomic_fetch_add_unless
|
||||||
|
|
||||||
#define atomic_inc(v) atomic_add(1, (v))
|
|
||||||
#define atomic_dec(v) atomic_sub(1, (v))
|
|
||||||
|
|
||||||
#define atomic_inc_return(v) (atomic_add_return(1, v))
|
|
||||||
#define atomic_dec_return(v) (atomic_sub_return(1, v))
|
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -231,19 +231,10 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
|
||||||
return dec;
|
return dec;
|
||||||
}
|
}
|
||||||
|
|
||||||
#define atomic_dec_return(v) atomic_sub_return(1, (v))
|
|
||||||
#define atomic_inc_return(v) atomic_add_return(1, (v))
|
|
||||||
#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
|
|
||||||
#define atomic64_inc_return(v) atomic64_add_return(1, (v))
|
|
||||||
|
|
||||||
#define atomic_add(i,v) (void)atomic_add_return((i), (v))
|
#define atomic_add(i,v) (void)atomic_add_return((i), (v))
|
||||||
#define atomic_sub(i,v) (void)atomic_sub_return((i), (v))
|
#define atomic_sub(i,v) (void)atomic_sub_return((i), (v))
|
||||||
#define atomic_inc(v) atomic_add(1, (v))
|
|
||||||
#define atomic_dec(v) atomic_sub(1, (v))
|
|
||||||
|
|
||||||
#define atomic64_add(i,v) (void)atomic64_add_return((i), (v))
|
#define atomic64_add(i,v) (void)atomic64_add_return((i), (v))
|
||||||
#define atomic64_sub(i,v) (void)atomic64_sub_return((i), (v))
|
#define atomic64_sub(i,v) (void)atomic64_sub_return((i), (v))
|
||||||
#define atomic64_inc(v) atomic64_add(1, (v))
|
|
||||||
#define atomic64_dec(v) atomic64_sub(1, (v))
|
|
||||||
|
|
||||||
#endif /* _ASM_IA64_ATOMIC_H */
|
#endif /* _ASM_IA64_ATOMIC_H */
|
||||||
|
|
|
@ -126,11 +126,13 @@ static inline void atomic_inc(atomic_t *v)
|
||||||
{
|
{
|
||||||
__asm__ __volatile__("addql #1,%0" : "+m" (*v));
|
__asm__ __volatile__("addql #1,%0" : "+m" (*v));
|
||||||
}
|
}
|
||||||
|
#define atomic_inc atomic_inc
|
||||||
|
|
||||||
static inline void atomic_dec(atomic_t *v)
|
static inline void atomic_dec(atomic_t *v)
|
||||||
{
|
{
|
||||||
__asm__ __volatile__("subql #1,%0" : "+m" (*v));
|
__asm__ __volatile__("subql #1,%0" : "+m" (*v));
|
||||||
}
|
}
|
||||||
|
#define atomic_dec atomic_dec
|
||||||
|
|
||||||
static inline int atomic_dec_and_test(atomic_t *v)
|
static inline int atomic_dec_and_test(atomic_t *v)
|
||||||
{
|
{
|
||||||
|
@ -192,9 +194,6 @@ static inline int atomic_xchg(atomic_t *v, int new)
|
||||||
|
|
||||||
#endif /* !CONFIG_RMW_INSNS */
|
#endif /* !CONFIG_RMW_INSNS */
|
||||||
|
|
||||||
#define atomic_dec_return(v) atomic_sub_return(1, (v))
|
|
||||||
#define atomic_inc_return(v) atomic_add_return(1, (v))
|
|
||||||
|
|
||||||
static inline int atomic_sub_and_test(int i, atomic_t *v)
|
static inline int atomic_sub_and_test(int i, atomic_t *v)
|
||||||
{
|
{
|
||||||
char c;
|
char c;
|
||||||
|
|
|
@ -274,31 +274,12 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
|
||||||
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
||||||
#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
|
||||||
|
|
||||||
#define atomic_dec_return(v) atomic_sub_return(1, (v))
|
|
||||||
#define atomic_inc_return(v) atomic_add_return(1, (v))
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* atomic_dec_if_positive - decrement by 1 if old value positive
|
* atomic_dec_if_positive - decrement by 1 if old value positive
|
||||||
* @v: pointer of type atomic_t
|
* @v: pointer of type atomic_t
|
||||||
*/
|
*/
|
||||||
#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
|
#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
|
||||||
|
|
||||||
/*
|
|
||||||
* atomic_inc - increment atomic variable
|
|
||||||
* @v: pointer of type atomic_t
|
|
||||||
*
|
|
||||||
* Atomically increments @v by 1.
|
|
||||||
*/
|
|
||||||
#define atomic_inc(v) atomic_add(1, (v))
|
|
||||||
|
|
||||||
/*
|
|
||||||
* atomic_dec - decrement and test
|
|
||||||
* @v: pointer of type atomic_t
|
|
||||||
*
|
|
||||||
* Atomically decrements @v by 1.
|
|
||||||
*/
|
|
||||||
#define atomic_dec(v) atomic_sub(1, (v))
|
|
||||||
|
|
||||||
#ifdef CONFIG_64BIT
|
#ifdef CONFIG_64BIT
|
||||||
|
|
||||||
#define ATOMIC64_INIT(i) { (i) }
|
#define ATOMIC64_INIT(i) { (i) }
|
||||||
|
@ -554,31 +535,12 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
|
||||||
((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
||||||
#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
|
#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
|
||||||
|
|
||||||
#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
|
|
||||||
#define atomic64_inc_return(v) atomic64_add_return(1, (v))
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* atomic64_dec_if_positive - decrement by 1 if old value positive
|
* atomic64_dec_if_positive - decrement by 1 if old value positive
|
||||||
* @v: pointer of type atomic64_t
|
* @v: pointer of type atomic64_t
|
||||||
*/
|
*/
|
||||||
#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
|
#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
|
||||||
|
|
||||||
/*
|
|
||||||
* atomic64_inc - increment atomic variable
|
|
||||||
* @v: pointer of type atomic64_t
|
|
||||||
*
|
|
||||||
* Atomically increments @v by 1.
|
|
||||||
*/
|
|
||||||
#define atomic64_inc(v) atomic64_add(1, (v))
|
|
||||||
|
|
||||||
/*
|
|
||||||
* atomic64_dec - decrement and test
|
|
||||||
* @v: pointer of type atomic64_t
|
|
||||||
*
|
|
||||||
* Atomically decrements @v by 1.
|
|
||||||
*/
|
|
||||||
#define atomic64_dec(v) atomic64_sub(1, (v))
|
|
||||||
|
|
||||||
#endif /* CONFIG_64BIT */
|
#endif /* CONFIG_64BIT */
|
||||||
|
|
||||||
#endif /* _ASM_ATOMIC_H */
|
#endif /* _ASM_ATOMIC_H */
|
||||||
|
|
|
@ -136,12 +136,6 @@ ATOMIC_OPS(xor, ^=)
|
||||||
#undef ATOMIC_OP_RETURN
|
#undef ATOMIC_OP_RETURN
|
||||||
#undef ATOMIC_OP
|
#undef ATOMIC_OP
|
||||||
|
|
||||||
#define atomic_inc(v) (atomic_add( 1,(v)))
|
|
||||||
#define atomic_dec(v) (atomic_add( -1,(v)))
|
|
||||||
|
|
||||||
#define atomic_inc_return(v) (atomic_add_return( 1,(v)))
|
|
||||||
#define atomic_dec_return(v) (atomic_add_return( -1,(v)))
|
|
||||||
|
|
||||||
#define ATOMIC_INIT(i) { (i) }
|
#define ATOMIC_INIT(i) { (i) }
|
||||||
|
|
||||||
#ifdef CONFIG_64BIT
|
#ifdef CONFIG_64BIT
|
||||||
|
@ -224,12 +218,6 @@ atomic64_read(const atomic64_t *v)
|
||||||
return READ_ONCE((v)->counter);
|
return READ_ONCE((v)->counter);
|
||||||
}
|
}
|
||||||
|
|
||||||
#define atomic64_inc(v) (atomic64_add( 1,(v)))
|
|
||||||
#define atomic64_dec(v) (atomic64_add( -1,(v)))
|
|
||||||
|
|
||||||
#define atomic64_inc_return(v) (atomic64_add_return( 1,(v)))
|
|
||||||
#define atomic64_dec_return(v) (atomic64_add_return( -1,(v)))
|
|
||||||
|
|
||||||
/* exported interface */
|
/* exported interface */
|
||||||
#define atomic64_cmpxchg(v, o, n) \
|
#define atomic64_cmpxchg(v, o, n) \
|
||||||
((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
||||||
|
|
|
@ -143,6 +143,7 @@ static __inline__ void atomic_inc(atomic_t *v)
|
||||||
: "r" (&v->counter)
|
: "r" (&v->counter)
|
||||||
: "cc", "xer");
|
: "cc", "xer");
|
||||||
}
|
}
|
||||||
|
#define atomic_inc atomic_inc
|
||||||
|
|
||||||
static __inline__ int atomic_inc_return_relaxed(atomic_t *v)
|
static __inline__ int atomic_inc_return_relaxed(atomic_t *v)
|
||||||
{
|
{
|
||||||
|
@ -175,6 +176,7 @@ static __inline__ void atomic_dec(atomic_t *v)
|
||||||
: "r" (&v->counter)
|
: "r" (&v->counter)
|
||||||
: "cc", "xer");
|
: "cc", "xer");
|
||||||
}
|
}
|
||||||
|
#define atomic_dec atomic_dec
|
||||||
|
|
||||||
static __inline__ int atomic_dec_return_relaxed(atomic_t *v)
|
static __inline__ int atomic_dec_return_relaxed(atomic_t *v)
|
||||||
{
|
{
|
||||||
|
@ -411,6 +413,7 @@ static __inline__ void atomic64_inc(atomic64_t *v)
|
||||||
: "r" (&v->counter)
|
: "r" (&v->counter)
|
||||||
: "cc", "xer");
|
: "cc", "xer");
|
||||||
}
|
}
|
||||||
|
#define atomic64_inc atomic64_inc
|
||||||
|
|
||||||
static __inline__ long atomic64_inc_return_relaxed(atomic64_t *v)
|
static __inline__ long atomic64_inc_return_relaxed(atomic64_t *v)
|
||||||
{
|
{
|
||||||
|
@ -441,6 +444,7 @@ static __inline__ void atomic64_dec(atomic64_t *v)
|
||||||
: "r" (&v->counter)
|
: "r" (&v->counter)
|
||||||
: "cc", "xer");
|
: "cc", "xer");
|
||||||
}
|
}
|
||||||
|
#define atomic64_dec atomic64_dec
|
||||||
|
|
||||||
static __inline__ long atomic64_dec_return_relaxed(atomic64_t *v)
|
static __inline__ long atomic64_dec_return_relaxed(atomic64_t *v)
|
||||||
{
|
{
|
||||||
|
|
|
@ -209,82 +209,6 @@ ATOMIC_OPS(xor, xor, i)
|
||||||
#undef ATOMIC_FETCH_OP
|
#undef ATOMIC_FETCH_OP
|
||||||
#undef ATOMIC_OP_RETURN
|
#undef ATOMIC_OP_RETURN
|
||||||
|
|
||||||
#define ATOMIC_OP(op, func_op, I, c_type, prefix) \
|
|
||||||
static __always_inline \
|
|
||||||
void atomic##prefix##_##op(atomic##prefix##_t *v) \
|
|
||||||
{ \
|
|
||||||
atomic##prefix##_##func_op(I, v); \
|
|
||||||
}
|
|
||||||
|
|
||||||
#define ATOMIC_FETCH_OP(op, func_op, I, c_type, prefix) \
|
|
||||||
static __always_inline \
|
|
||||||
c_type atomic##prefix##_fetch_##op##_relaxed(atomic##prefix##_t *v) \
|
|
||||||
{ \
|
|
||||||
return atomic##prefix##_fetch_##func_op##_relaxed(I, v); \
|
|
||||||
} \
|
|
||||||
static __always_inline \
|
|
||||||
c_type atomic##prefix##_fetch_##op(atomic##prefix##_t *v) \
|
|
||||||
{ \
|
|
||||||
return atomic##prefix##_fetch_##func_op(I, v); \
|
|
||||||
}
|
|
||||||
|
|
||||||
#define ATOMIC_OP_RETURN(op, asm_op, c_op, I, c_type, prefix) \
|
|
||||||
static __always_inline \
|
|
||||||
c_type atomic##prefix##_##op##_return_relaxed(atomic##prefix##_t *v) \
|
|
||||||
{ \
|
|
||||||
return atomic##prefix##_fetch_##op##_relaxed(v) c_op I; \
|
|
||||||
} \
|
|
||||||
static __always_inline \
|
|
||||||
c_type atomic##prefix##_##op##_return(atomic##prefix##_t *v) \
|
|
||||||
{ \
|
|
||||||
return atomic##prefix##_fetch_##op(v) c_op I; \
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifdef CONFIG_GENERIC_ATOMIC64
|
|
||||||
#define ATOMIC_OPS(op, asm_op, c_op, I) \
|
|
||||||
ATOMIC_OP( op, asm_op, I, int, ) \
|
|
||||||
ATOMIC_FETCH_OP( op, asm_op, I, int, ) \
|
|
||||||
ATOMIC_OP_RETURN(op, asm_op, c_op, I, int, )
|
|
||||||
#else
|
|
||||||
#define ATOMIC_OPS(op, asm_op, c_op, I) \
|
|
||||||
ATOMIC_OP( op, asm_op, I, int, ) \
|
|
||||||
ATOMIC_FETCH_OP( op, asm_op, I, int, ) \
|
|
||||||
ATOMIC_OP_RETURN(op, asm_op, c_op, I, int, ) \
|
|
||||||
ATOMIC_OP( op, asm_op, I, long, 64) \
|
|
||||||
ATOMIC_FETCH_OP( op, asm_op, I, long, 64) \
|
|
||||||
ATOMIC_OP_RETURN(op, asm_op, c_op, I, long, 64)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
ATOMIC_OPS(inc, add, +, 1)
|
|
||||||
ATOMIC_OPS(dec, add, +, -1)
|
|
||||||
|
|
||||||
#define atomic_inc_return_relaxed atomic_inc_return_relaxed
|
|
||||||
#define atomic_dec_return_relaxed atomic_dec_return_relaxed
|
|
||||||
#define atomic_inc_return atomic_inc_return
|
|
||||||
#define atomic_dec_return atomic_dec_return
|
|
||||||
|
|
||||||
#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
|
|
||||||
#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
|
|
||||||
#define atomic_fetch_inc atomic_fetch_inc
|
|
||||||
#define atomic_fetch_dec atomic_fetch_dec
|
|
||||||
|
|
||||||
#ifndef CONFIG_GENERIC_ATOMIC64
|
|
||||||
#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
|
|
||||||
#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
|
|
||||||
#define atomic64_inc_return atomic64_inc_return
|
|
||||||
#define atomic64_dec_return atomic64_dec_return
|
|
||||||
|
|
||||||
#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
|
|
||||||
#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
|
|
||||||
#define atomic64_fetch_inc atomic64_fetch_inc
|
|
||||||
#define atomic64_fetch_dec atomic64_fetch_dec
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#undef ATOMIC_OPS
|
|
||||||
#undef ATOMIC_OP
|
|
||||||
#undef ATOMIC_FETCH_OP
|
|
||||||
#undef ATOMIC_OP_RETURN
|
|
||||||
|
|
||||||
/* This is required to provide a full barrier on success. */
|
/* This is required to provide a full barrier on success. */
|
||||||
static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
||||||
{
|
{
|
||||||
|
|
|
@ -55,13 +55,9 @@ static inline void atomic_add(int i, atomic_t *v)
|
||||||
__atomic_add(i, &v->counter);
|
__atomic_add(i, &v->counter);
|
||||||
}
|
}
|
||||||
|
|
||||||
#define atomic_inc(_v) atomic_add(1, _v)
|
|
||||||
#define atomic_inc_return(_v) atomic_add_return(1, _v)
|
|
||||||
#define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v)
|
#define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v)
|
||||||
#define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v)
|
#define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v)
|
||||||
#define atomic_fetch_sub(_i, _v) atomic_fetch_add(-(int)(_i), _v)
|
#define atomic_fetch_sub(_i, _v) atomic_fetch_add(-(int)(_i), _v)
|
||||||
#define atomic_dec(_v) atomic_sub(1, _v)
|
|
||||||
#define atomic_dec_return(_v) atomic_sub_return(1, _v)
|
|
||||||
|
|
||||||
#define ATOMIC_OPS(op) \
|
#define ATOMIC_OPS(op) \
|
||||||
static inline void atomic_##op(int i, atomic_t *v) \
|
static inline void atomic_##op(int i, atomic_t *v) \
|
||||||
|
@ -166,12 +162,8 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
|
||||||
return dec;
|
return dec;
|
||||||
}
|
}
|
||||||
|
|
||||||
#define atomic64_inc(_v) atomic64_add(1, _v)
|
|
||||||
#define atomic64_inc_return(_v) atomic64_add_return(1, _v)
|
|
||||||
#define atomic64_sub_return(_i, _v) atomic64_add_return(-(long)(_i), _v)
|
#define atomic64_sub_return(_i, _v) atomic64_add_return(-(long)(_i), _v)
|
||||||
#define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long)(_i), _v)
|
#define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long)(_i), _v)
|
||||||
#define atomic64_sub(_i, _v) atomic64_add(-(long)(_i), _v)
|
#define atomic64_sub(_i, _v) atomic64_add(-(long)(_i), _v)
|
||||||
#define atomic64_dec(_v) atomic64_sub(1, _v)
|
|
||||||
#define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
|
|
||||||
|
|
||||||
#endif /* __ARCH_S390_ATOMIC__ */
|
#endif /* __ARCH_S390_ATOMIC__ */
|
||||||
|
|
|
@ -32,12 +32,6 @@
|
||||||
#include <asm/atomic-irq.h>
|
#include <asm/atomic-irq.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define atomic_dec_return(v) atomic_sub_return(1, (v))
|
|
||||||
#define atomic_inc_return(v) atomic_add_return(1, (v))
|
|
||||||
|
|
||||||
#define atomic_inc(v) atomic_add(1, (v))
|
|
||||||
#define atomic_dec(v) atomic_sub(1, (v))
|
|
||||||
|
|
||||||
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
||||||
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
||||||
|
|
||||||
|
|
|
@ -38,8 +38,6 @@ void atomic_set(atomic_t *, int);
|
||||||
|
|
||||||
#define atomic_add(i, v) ((void)atomic_add_return( (int)(i), (v)))
|
#define atomic_add(i, v) ((void)atomic_add_return( (int)(i), (v)))
|
||||||
#define atomic_sub(i, v) ((void)atomic_add_return(-(int)(i), (v)))
|
#define atomic_sub(i, v) ((void)atomic_add_return(-(int)(i), (v)))
|
||||||
#define atomic_inc(v) ((void)atomic_add_return( 1, (v)))
|
|
||||||
#define atomic_dec(v) ((void)atomic_add_return( -1, (v)))
|
|
||||||
|
|
||||||
#define atomic_and(i, v) ((void)atomic_fetch_and((i), (v)))
|
#define atomic_and(i, v) ((void)atomic_fetch_and((i), (v)))
|
||||||
#define atomic_or(i, v) ((void)atomic_fetch_or((i), (v)))
|
#define atomic_or(i, v) ((void)atomic_fetch_or((i), (v)))
|
||||||
|
@ -48,7 +46,4 @@ void atomic_set(atomic_t *, int);
|
||||||
#define atomic_sub_return(i, v) (atomic_add_return(-(int)(i), (v)))
|
#define atomic_sub_return(i, v) (atomic_add_return(-(int)(i), (v)))
|
||||||
#define atomic_fetch_sub(i, v) (atomic_fetch_add (-(int)(i), (v)))
|
#define atomic_fetch_sub(i, v) (atomic_fetch_add (-(int)(i), (v)))
|
||||||
|
|
||||||
#define atomic_inc_return(v) (atomic_add_return( 1, (v)))
|
|
||||||
#define atomic_dec_return(v) (atomic_add_return( -1, (v)))
|
|
||||||
|
|
||||||
#endif /* !(__ARCH_SPARC_ATOMIC__) */
|
#endif /* !(__ARCH_SPARC_ATOMIC__) */
|
||||||
|
|
|
@ -50,18 +50,6 @@ ATOMIC_OPS(xor)
|
||||||
#undef ATOMIC_OP_RETURN
|
#undef ATOMIC_OP_RETURN
|
||||||
#undef ATOMIC_OP
|
#undef ATOMIC_OP
|
||||||
|
|
||||||
#define atomic_dec_return(v) atomic_sub_return(1, v)
|
|
||||||
#define atomic64_dec_return(v) atomic64_sub_return(1, v)
|
|
||||||
|
|
||||||
#define atomic_inc_return(v) atomic_add_return(1, v)
|
|
||||||
#define atomic64_inc_return(v) atomic64_add_return(1, v)
|
|
||||||
|
|
||||||
#define atomic_inc(v) atomic_add(1, v)
|
|
||||||
#define atomic64_inc(v) atomic64_add(1, v)
|
|
||||||
|
|
||||||
#define atomic_dec(v) atomic_sub(1, v)
|
|
||||||
#define atomic64_dec(v) atomic64_sub(1, v)
|
|
||||||
|
|
||||||
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
||||||
|
|
||||||
static inline int atomic_xchg(atomic_t *v, int new)
|
static inline int atomic_xchg(atomic_t *v, int new)
|
||||||
|
|
|
@ -92,6 +92,7 @@ static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
|
||||||
*
|
*
|
||||||
* Atomically increments @v by 1.
|
* Atomically increments @v by 1.
|
||||||
*/
|
*/
|
||||||
|
#define arch_atomic_inc arch_atomic_inc
|
||||||
static __always_inline void arch_atomic_inc(atomic_t *v)
|
static __always_inline void arch_atomic_inc(atomic_t *v)
|
||||||
{
|
{
|
||||||
asm volatile(LOCK_PREFIX "incl %0"
|
asm volatile(LOCK_PREFIX "incl %0"
|
||||||
|
@ -104,6 +105,7 @@ static __always_inline void arch_atomic_inc(atomic_t *v)
|
||||||
*
|
*
|
||||||
* Atomically decrements @v by 1.
|
* Atomically decrements @v by 1.
|
||||||
*/
|
*/
|
||||||
|
#define arch_atomic_dec arch_atomic_dec
|
||||||
static __always_inline void arch_atomic_dec(atomic_t *v)
|
static __always_inline void arch_atomic_dec(atomic_t *v)
|
||||||
{
|
{
|
||||||
asm volatile(LOCK_PREFIX "decl %0"
|
asm volatile(LOCK_PREFIX "decl %0"
|
||||||
|
@ -177,9 +179,6 @@ static __always_inline int arch_atomic_sub_return(int i, atomic_t *v)
|
||||||
return arch_atomic_add_return(-i, v);
|
return arch_atomic_add_return(-i, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
#define arch_atomic_inc_return(v) (arch_atomic_add_return(1, v))
|
|
||||||
#define arch_atomic_dec_return(v) (arch_atomic_sub_return(1, v))
|
|
||||||
|
|
||||||
static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v)
|
static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v)
|
||||||
{
|
{
|
||||||
return xadd(&v->counter, i);
|
return xadd(&v->counter, i);
|
||||||
|
|
|
@ -158,6 +158,7 @@ static inline long long arch_atomic64_inc_return(atomic64_t *v)
|
||||||
"S" (v) : "memory", "ecx");
|
"S" (v) : "memory", "ecx");
|
||||||
return a;
|
return a;
|
||||||
}
|
}
|
||||||
|
#define arch_atomic64_inc_return arch_atomic64_inc_return
|
||||||
|
|
||||||
static inline long long arch_atomic64_dec_return(atomic64_t *v)
|
static inline long long arch_atomic64_dec_return(atomic64_t *v)
|
||||||
{
|
{
|
||||||
|
@ -166,6 +167,7 @@ static inline long long arch_atomic64_dec_return(atomic64_t *v)
|
||||||
"S" (v) : "memory", "ecx");
|
"S" (v) : "memory", "ecx");
|
||||||
return a;
|
return a;
|
||||||
}
|
}
|
||||||
|
#define arch_atomic64_dec_return arch_atomic64_dec_return
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* arch_atomic64_add - add integer to atomic64 variable
|
* arch_atomic64_add - add integer to atomic64 variable
|
||||||
|
@ -203,6 +205,7 @@ static inline long long arch_atomic64_sub(long long i, atomic64_t *v)
|
||||||
*
|
*
|
||||||
* Atomically increments @v by 1.
|
* Atomically increments @v by 1.
|
||||||
*/
|
*/
|
||||||
|
#define arch_atomic64_inc arch_atomic64_inc
|
||||||
static inline void arch_atomic64_inc(atomic64_t *v)
|
static inline void arch_atomic64_inc(atomic64_t *v)
|
||||||
{
|
{
|
||||||
__alternative_atomic64(inc, inc_return, /* no output */,
|
__alternative_atomic64(inc, inc_return, /* no output */,
|
||||||
|
@ -215,6 +218,7 @@ static inline void arch_atomic64_inc(atomic64_t *v)
|
||||||
*
|
*
|
||||||
* Atomically decrements @v by 1.
|
* Atomically decrements @v by 1.
|
||||||
*/
|
*/
|
||||||
|
#define arch_atomic64_dec arch_atomic64_dec
|
||||||
static inline void arch_atomic64_dec(atomic64_t *v)
|
static inline void arch_atomic64_dec(atomic64_t *v)
|
||||||
{
|
{
|
||||||
__alternative_atomic64(dec, dec_return, /* no output */,
|
__alternative_atomic64(dec, dec_return, /* no output */,
|
||||||
|
|
|
@ -83,6 +83,7 @@ static inline bool arch_atomic64_sub_and_test(long i, atomic64_t *v)
|
||||||
*
|
*
|
||||||
* Atomically increments @v by 1.
|
* Atomically increments @v by 1.
|
||||||
*/
|
*/
|
||||||
|
#define arch_atomic64_inc arch_atomic64_inc
|
||||||
static __always_inline void arch_atomic64_inc(atomic64_t *v)
|
static __always_inline void arch_atomic64_inc(atomic64_t *v)
|
||||||
{
|
{
|
||||||
asm volatile(LOCK_PREFIX "incq %0"
|
asm volatile(LOCK_PREFIX "incq %0"
|
||||||
|
@ -96,6 +97,7 @@ static __always_inline void arch_atomic64_inc(atomic64_t *v)
|
||||||
*
|
*
|
||||||
* Atomically decrements @v by 1.
|
* Atomically decrements @v by 1.
|
||||||
*/
|
*/
|
||||||
|
#define arch_atomic64_dec arch_atomic64_dec
|
||||||
static __always_inline void arch_atomic64_dec(atomic64_t *v)
|
static __always_inline void arch_atomic64_dec(atomic64_t *v)
|
||||||
{
|
{
|
||||||
asm volatile(LOCK_PREFIX "decq %0"
|
asm volatile(LOCK_PREFIX "decq %0"
|
||||||
|
@ -173,9 +175,6 @@ static inline long arch_atomic64_fetch_sub(long i, atomic64_t *v)
|
||||||
return xadd(&v->counter, -i);
|
return xadd(&v->counter, -i);
|
||||||
}
|
}
|
||||||
|
|
||||||
#define arch_atomic64_inc_return(v) (arch_atomic64_add_return(1, (v)))
|
|
||||||
#define arch_atomic64_dec_return(v) (arch_atomic64_sub_return(1, (v)))
|
|
||||||
|
|
||||||
static inline long arch_atomic64_cmpxchg(atomic64_t *v, long old, long new)
|
static inline long arch_atomic64_cmpxchg(atomic64_t *v, long old, long new)
|
||||||
{
|
{
|
||||||
return arch_cmpxchg(&v->counter, old, new);
|
return arch_cmpxchg(&v->counter, old, new);
|
||||||
|
|
|
@ -197,38 +197,6 @@ ATOMIC_OPS(xor)
|
||||||
#undef ATOMIC_OP_RETURN
|
#undef ATOMIC_OP_RETURN
|
||||||
#undef ATOMIC_OP
|
#undef ATOMIC_OP
|
||||||
|
|
||||||
/**
|
|
||||||
* atomic_inc - increment atomic variable
|
|
||||||
* @v: pointer of type atomic_t
|
|
||||||
*
|
|
||||||
* Atomically increments @v by 1.
|
|
||||||
*/
|
|
||||||
#define atomic_inc(v) atomic_add(1,(v))
|
|
||||||
|
|
||||||
/**
|
|
||||||
* atomic_inc - increment atomic variable
|
|
||||||
* @v: pointer of type atomic_t
|
|
||||||
*
|
|
||||||
* Atomically increments @v by 1.
|
|
||||||
*/
|
|
||||||
#define atomic_inc_return(v) atomic_add_return(1,(v))
|
|
||||||
|
|
||||||
/**
|
|
||||||
* atomic_dec - decrement atomic variable
|
|
||||||
* @v: pointer of type atomic_t
|
|
||||||
*
|
|
||||||
* Atomically decrements @v by 1.
|
|
||||||
*/
|
|
||||||
#define atomic_dec(v) atomic_sub(1,(v))
|
|
||||||
|
|
||||||
/**
|
|
||||||
* atomic_dec_return - decrement atomic variable
|
|
||||||
* @v: pointer of type atomic_t
|
|
||||||
*
|
|
||||||
* Atomically decrements @v by 1.
|
|
||||||
*/
|
|
||||||
#define atomic_dec_return(v) atomic_sub_return(1,(v))
|
|
||||||
|
|
||||||
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
|
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
|
||||||
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
||||||
|
|
||||||
|
|
|
@ -102,29 +102,41 @@ static __always_inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#ifdef arch_atomic_inc
|
||||||
|
#define atomic_inc atomic_inc
|
||||||
static __always_inline void atomic_inc(atomic_t *v)
|
static __always_inline void atomic_inc(atomic_t *v)
|
||||||
{
|
{
|
||||||
kasan_check_write(v, sizeof(*v));
|
kasan_check_write(v, sizeof(*v));
|
||||||
arch_atomic_inc(v);
|
arch_atomic_inc(v);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef arch_atomic64_inc
|
||||||
|
#define atomic64_inc atomic64_inc
|
||||||
static __always_inline void atomic64_inc(atomic64_t *v)
|
static __always_inline void atomic64_inc(atomic64_t *v)
|
||||||
{
|
{
|
||||||
kasan_check_write(v, sizeof(*v));
|
kasan_check_write(v, sizeof(*v));
|
||||||
arch_atomic64_inc(v);
|
arch_atomic64_inc(v);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef arch_atomic_dec
|
||||||
|
#define atomic_dec atomic_dec
|
||||||
static __always_inline void atomic_dec(atomic_t *v)
|
static __always_inline void atomic_dec(atomic_t *v)
|
||||||
{
|
{
|
||||||
kasan_check_write(v, sizeof(*v));
|
kasan_check_write(v, sizeof(*v));
|
||||||
arch_atomic_dec(v);
|
arch_atomic_dec(v);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef atch_atomic64_dec
|
||||||
|
#define atomic64_dec
|
||||||
static __always_inline void atomic64_dec(atomic64_t *v)
|
static __always_inline void atomic64_dec(atomic64_t *v)
|
||||||
{
|
{
|
||||||
kasan_check_write(v, sizeof(*v));
|
kasan_check_write(v, sizeof(*v));
|
||||||
arch_atomic64_dec(v);
|
arch_atomic64_dec(v);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
static __always_inline void atomic_add(int i, atomic_t *v)
|
static __always_inline void atomic_add(int i, atomic_t *v)
|
||||||
{
|
{
|
||||||
|
@ -186,29 +198,41 @@ static __always_inline void atomic64_xor(s64 i, atomic64_t *v)
|
||||||
arch_atomic64_xor(i, v);
|
arch_atomic64_xor(i, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef arch_atomic_inc_return
|
||||||
|
#define atomic_inc_return atomic_inc_return
|
||||||
static __always_inline int atomic_inc_return(atomic_t *v)
|
static __always_inline int atomic_inc_return(atomic_t *v)
|
||||||
{
|
{
|
||||||
kasan_check_write(v, sizeof(*v));
|
kasan_check_write(v, sizeof(*v));
|
||||||
return arch_atomic_inc_return(v);
|
return arch_atomic_inc_return(v);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef arch_atomic64_in_return
|
||||||
|
#define atomic64_inc_return atomic64_inc_return
|
||||||
static __always_inline s64 atomic64_inc_return(atomic64_t *v)
|
static __always_inline s64 atomic64_inc_return(atomic64_t *v)
|
||||||
{
|
{
|
||||||
kasan_check_write(v, sizeof(*v));
|
kasan_check_write(v, sizeof(*v));
|
||||||
return arch_atomic64_inc_return(v);
|
return arch_atomic64_inc_return(v);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef arch_atomic_dec_return
|
||||||
|
#define atomic_dec_return atomic_dec_return
|
||||||
static __always_inline int atomic_dec_return(atomic_t *v)
|
static __always_inline int atomic_dec_return(atomic_t *v)
|
||||||
{
|
{
|
||||||
kasan_check_write(v, sizeof(*v));
|
kasan_check_write(v, sizeof(*v));
|
||||||
return arch_atomic_dec_return(v);
|
return arch_atomic_dec_return(v);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef arch_atomic64_dec_return
|
||||||
|
#define atomic64_dec_return atomic64_dec_return
|
||||||
static __always_inline s64 atomic64_dec_return(atomic64_t *v)
|
static __always_inline s64 atomic64_dec_return(atomic64_t *v)
|
||||||
{
|
{
|
||||||
kasan_check_write(v, sizeof(*v));
|
kasan_check_write(v, sizeof(*v));
|
||||||
return arch_atomic64_dec_return(v);
|
return arch_atomic64_dec_return(v);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
#ifdef arch_atomic64_inc_not_zero
|
#ifdef arch_atomic64_inc_not_zero
|
||||||
#define atomic64_inc_not_zero atomic64_inc_not_zero
|
#define atomic64_inc_not_zero atomic64_inc_not_zero
|
||||||
|
|
|
@ -196,19 +196,6 @@ static inline void atomic_sub(int i, atomic_t *v)
|
||||||
atomic_sub_return(i, v);
|
atomic_sub_return(i, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void atomic_inc(atomic_t *v)
|
|
||||||
{
|
|
||||||
atomic_add_return(1, v);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline void atomic_dec(atomic_t *v)
|
|
||||||
{
|
|
||||||
atomic_sub_return(1, v);
|
|
||||||
}
|
|
||||||
|
|
||||||
#define atomic_dec_return(v) atomic_sub_return(1, (v))
|
|
||||||
#define atomic_inc_return(v) atomic_add_return(1, (v))
|
|
||||||
|
|
||||||
#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
|
#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
|
||||||
#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
|
#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
|
||||||
|
|
||||||
|
|
|
@ -56,9 +56,4 @@ extern long long atomic64_xchg(atomic64_t *v, long long new);
|
||||||
extern long long atomic64_fetch_add_unless(atomic64_t *v, long long a, long long u);
|
extern long long atomic64_fetch_add_unless(atomic64_t *v, long long a, long long u);
|
||||||
#define atomic64_fetch_add_unless atomic64_fetch_add_unless
|
#define atomic64_fetch_add_unless atomic64_fetch_add_unless
|
||||||
|
|
||||||
#define atomic64_inc(v) atomic64_add(1LL, (v))
|
|
||||||
#define atomic64_inc_return(v) atomic64_add_return(1LL, (v))
|
|
||||||
#define atomic64_dec(v) atomic64_sub(1LL, (v))
|
|
||||||
#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
|
|
||||||
|
|
||||||
#endif /* _ASM_GENERIC_ATOMIC64_H */
|
#endif /* _ASM_GENERIC_ATOMIC64_H */
|
||||||
|
|
|
@ -97,11 +97,23 @@
|
||||||
#endif
|
#endif
|
||||||
#endif /* atomic_add_return_relaxed */
|
#endif /* atomic_add_return_relaxed */
|
||||||
|
|
||||||
|
#ifndef atomic_inc
|
||||||
|
#define atomic_inc(v) atomic_add(1, (v))
|
||||||
|
#endif
|
||||||
|
|
||||||
/* atomic_inc_return_relaxed */
|
/* atomic_inc_return_relaxed */
|
||||||
#ifndef atomic_inc_return_relaxed
|
#ifndef atomic_inc_return_relaxed
|
||||||
|
|
||||||
|
#ifndef atomic_inc_return
|
||||||
|
#define atomic_inc_return(v) atomic_add_return(1, (v))
|
||||||
|
#define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v))
|
||||||
|
#define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v))
|
||||||
|
#define atomic_inc_return_release(v) atomic_add_return_release(1, (v))
|
||||||
|
#else /* atomic_inc_return */
|
||||||
#define atomic_inc_return_relaxed atomic_inc_return
|
#define atomic_inc_return_relaxed atomic_inc_return
|
||||||
#define atomic_inc_return_acquire atomic_inc_return
|
#define atomic_inc_return_acquire atomic_inc_return
|
||||||
#define atomic_inc_return_release atomic_inc_return
|
#define atomic_inc_return_release atomic_inc_return
|
||||||
|
#endif /* atomic_inc_return */
|
||||||
|
|
||||||
#else /* atomic_inc_return_relaxed */
|
#else /* atomic_inc_return_relaxed */
|
||||||
|
|
||||||
|
@ -145,11 +157,23 @@
|
||||||
#endif
|
#endif
|
||||||
#endif /* atomic_sub_return_relaxed */
|
#endif /* atomic_sub_return_relaxed */
|
||||||
|
|
||||||
|
#ifndef atomic_dec
|
||||||
|
#define atomic_dec(v) atomic_sub(1, (v))
|
||||||
|
#endif
|
||||||
|
|
||||||
/* atomic_dec_return_relaxed */
|
/* atomic_dec_return_relaxed */
|
||||||
#ifndef atomic_dec_return_relaxed
|
#ifndef atomic_dec_return_relaxed
|
||||||
|
|
||||||
|
#ifndef atomic_dec_return
|
||||||
|
#define atomic_dec_return(v) atomic_sub_return(1, (v))
|
||||||
|
#define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v))
|
||||||
|
#define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v))
|
||||||
|
#define atomic_dec_return_release(v) atomic_sub_return_release(1, (v))
|
||||||
|
#else /* atomic_dec_return */
|
||||||
#define atomic_dec_return_relaxed atomic_dec_return
|
#define atomic_dec_return_relaxed atomic_dec_return
|
||||||
#define atomic_dec_return_acquire atomic_dec_return
|
#define atomic_dec_return_acquire atomic_dec_return
|
||||||
#define atomic_dec_return_release atomic_dec_return
|
#define atomic_dec_return_release atomic_dec_return
|
||||||
|
#endif /* atomic_dec_return */
|
||||||
|
|
||||||
#else /* atomic_dec_return_relaxed */
|
#else /* atomic_dec_return_relaxed */
|
||||||
|
|
||||||
|
@ -748,11 +772,23 @@ static inline int atomic_dec_if_positive(atomic_t *v)
|
||||||
#endif
|
#endif
|
||||||
#endif /* atomic64_add_return_relaxed */
|
#endif /* atomic64_add_return_relaxed */
|
||||||
|
|
||||||
|
#ifndef atomic64_inc
|
||||||
|
#define atomic64_inc(v) atomic64_add(1, (v))
|
||||||
|
#endif
|
||||||
|
|
||||||
/* atomic64_inc_return_relaxed */
|
/* atomic64_inc_return_relaxed */
|
||||||
#ifndef atomic64_inc_return_relaxed
|
#ifndef atomic64_inc_return_relaxed
|
||||||
|
|
||||||
|
#ifndef atomic64_inc_return
|
||||||
|
#define atomic64_inc_return(v) atomic64_add_return(1, (v))
|
||||||
|
#define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v))
|
||||||
|
#define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v))
|
||||||
|
#define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v))
|
||||||
|
#else /* atomic64_inc_return */
|
||||||
#define atomic64_inc_return_relaxed atomic64_inc_return
|
#define atomic64_inc_return_relaxed atomic64_inc_return
|
||||||
#define atomic64_inc_return_acquire atomic64_inc_return
|
#define atomic64_inc_return_acquire atomic64_inc_return
|
||||||
#define atomic64_inc_return_release atomic64_inc_return
|
#define atomic64_inc_return_release atomic64_inc_return
|
||||||
|
#endif /* atomic64_inc_return */
|
||||||
|
|
||||||
#else /* atomic64_inc_return_relaxed */
|
#else /* atomic64_inc_return_relaxed */
|
||||||
|
|
||||||
|
@ -797,11 +833,23 @@ static inline int atomic_dec_if_positive(atomic_t *v)
|
||||||
#endif
|
#endif
|
||||||
#endif /* atomic64_sub_return_relaxed */
|
#endif /* atomic64_sub_return_relaxed */
|
||||||
|
|
||||||
|
#ifndef atomic64_dec
|
||||||
|
#define atomic64_dec(v) atomic64_sub(1, (v))
|
||||||
|
#endif
|
||||||
|
|
||||||
/* atomic64_dec_return_relaxed */
|
/* atomic64_dec_return_relaxed */
|
||||||
#ifndef atomic64_dec_return_relaxed
|
#ifndef atomic64_dec_return_relaxed
|
||||||
|
|
||||||
|
#ifndef atomic64_dec_return
|
||||||
|
#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
|
||||||
|
#define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v))
|
||||||
|
#define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v))
|
||||||
|
#define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v))
|
||||||
|
#else /* atomic64_dec_return */
|
||||||
#define atomic64_dec_return_relaxed atomic64_dec_return
|
#define atomic64_dec_return_relaxed atomic64_dec_return
|
||||||
#define atomic64_dec_return_acquire atomic64_dec_return
|
#define atomic64_dec_return_acquire atomic64_dec_return
|
||||||
#define atomic64_dec_return_release atomic64_dec_return
|
#define atomic64_dec_return_release atomic64_dec_return
|
||||||
|
#endif /* atomic64_dec_return */
|
||||||
|
|
||||||
#else /* atomic64_dec_return_relaxed */
|
#else /* atomic64_dec_return_relaxed */
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue