2526 lines
63 KiB
C
2526 lines
63 KiB
C
// SPDX-License-Identifier: GPL-2.0
|
|
|
|
// Generated by scripts/atomic/gen-atomic-fallback.sh
|
|
// DO NOT MODIFY THIS FILE DIRECTLY
|
|
|
|
#ifndef _LINUX_ATOMIC_FALLBACK_H
|
|
#define _LINUX_ATOMIC_FALLBACK_H
|
|
|
|
#include <linux/compiler.h>
|
|
|
|
#ifndef xchg_relaxed
|
|
#define xchg_relaxed xchg
|
|
#define xchg_acquire xchg
|
|
#define xchg_release xchg
|
|
#else /* xchg_relaxed */
|
|
|
|
#ifndef xchg_acquire
|
|
#define xchg_acquire(...) \
|
|
__atomic_op_acquire(xchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef xchg_release
|
|
#define xchg_release(...) \
|
|
__atomic_op_release(xchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef xchg
|
|
#define xchg(...) \
|
|
__atomic_op_fence(xchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#endif /* xchg_relaxed */
|
|
|
|
#ifndef cmpxchg_relaxed
|
|
#define cmpxchg_relaxed cmpxchg
|
|
#define cmpxchg_acquire cmpxchg
|
|
#define cmpxchg_release cmpxchg
|
|
#else /* cmpxchg_relaxed */
|
|
|
|
#ifndef cmpxchg_acquire
|
|
#define cmpxchg_acquire(...) \
|
|
__atomic_op_acquire(cmpxchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef cmpxchg_release
|
|
#define cmpxchg_release(...) \
|
|
__atomic_op_release(cmpxchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef cmpxchg
|
|
#define cmpxchg(...) \
|
|
__atomic_op_fence(cmpxchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#endif /* cmpxchg_relaxed */
|
|
|
|
#ifndef cmpxchg64_relaxed
|
|
#define cmpxchg64_relaxed cmpxchg64
|
|
#define cmpxchg64_acquire cmpxchg64
|
|
#define cmpxchg64_release cmpxchg64
|
|
#else /* cmpxchg64_relaxed */
|
|
|
|
#ifndef cmpxchg64_acquire
|
|
#define cmpxchg64_acquire(...) \
|
|
__atomic_op_acquire(cmpxchg64, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef cmpxchg64_release
|
|
#define cmpxchg64_release(...) \
|
|
__atomic_op_release(cmpxchg64, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef cmpxchg64
|
|
#define cmpxchg64(...) \
|
|
__atomic_op_fence(cmpxchg64, __VA_ARGS__)
|
|
#endif
|
|
|
|
#endif /* cmpxchg64_relaxed */
|
|
|
|
#define arch_atomic_read atomic_read
|
|
#define arch_atomic_read_acquire atomic_read_acquire
|
|
|
|
#ifndef atomic_read_acquire
|
|
static __always_inline int
|
|
atomic_read_acquire(const atomic_t *v)
|
|
{
|
|
return smp_load_acquire(&(v)->counter);
|
|
}
|
|
#define atomic_read_acquire atomic_read_acquire
|
|
#endif
|
|
|
|
#define arch_atomic_set atomic_set
|
|
#define arch_atomic_set_release atomic_set_release
|
|
|
|
#ifndef atomic_set_release
|
|
static __always_inline void
|
|
atomic_set_release(atomic_t *v, int i)
|
|
{
|
|
smp_store_release(&(v)->counter, i);
|
|
}
|
|
#define atomic_set_release atomic_set_release
|
|
#endif
|
|
|
|
#define arch_atomic_add atomic_add
|
|
|
|
#define arch_atomic_add_return atomic_add_return
|
|
#define arch_atomic_add_return_acquire atomic_add_return_acquire
|
|
#define arch_atomic_add_return_release atomic_add_return_release
|
|
#define arch_atomic_add_return_relaxed atomic_add_return_relaxed
|
|
|
|
#ifndef atomic_add_return_relaxed
|
|
#define atomic_add_return_acquire atomic_add_return
|
|
#define atomic_add_return_release atomic_add_return
|
|
#define atomic_add_return_relaxed atomic_add_return
|
|
#else /* atomic_add_return_relaxed */
|
|
|
|
#ifndef atomic_add_return_acquire
|
|
static __always_inline int
|
|
atomic_add_return_acquire(int i, atomic_t *v)
|
|
{
|
|
int ret = atomic_add_return_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_add_return_acquire atomic_add_return_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_add_return_release
|
|
static __always_inline int
|
|
atomic_add_return_release(int i, atomic_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_add_return_relaxed(i, v);
|
|
}
|
|
#define atomic_add_return_release atomic_add_return_release
|
|
#endif
|
|
|
|
#ifndef atomic_add_return
|
|
static __always_inline int
|
|
atomic_add_return(int i, atomic_t *v)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_add_return_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_add_return atomic_add_return
|
|
#endif
|
|
|
|
#endif /* atomic_add_return_relaxed */
|
|
|
|
#define arch_atomic_fetch_add atomic_fetch_add
|
|
#define arch_atomic_fetch_add_acquire atomic_fetch_add_acquire
|
|
#define arch_atomic_fetch_add_release atomic_fetch_add_release
|
|
#define arch_atomic_fetch_add_relaxed atomic_fetch_add_relaxed
|
|
|
|
#ifndef atomic_fetch_add_relaxed
|
|
#define atomic_fetch_add_acquire atomic_fetch_add
|
|
#define atomic_fetch_add_release atomic_fetch_add
|
|
#define atomic_fetch_add_relaxed atomic_fetch_add
|
|
#else /* atomic_fetch_add_relaxed */
|
|
|
|
#ifndef atomic_fetch_add_acquire
|
|
static __always_inline int
|
|
atomic_fetch_add_acquire(int i, atomic_t *v)
|
|
{
|
|
int ret = atomic_fetch_add_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_add_acquire atomic_fetch_add_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_add_release
|
|
static __always_inline int
|
|
atomic_fetch_add_release(int i, atomic_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_fetch_add_relaxed(i, v);
|
|
}
|
|
#define atomic_fetch_add_release atomic_fetch_add_release
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_add
|
|
static __always_inline int
|
|
atomic_fetch_add(int i, atomic_t *v)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_fetch_add_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_add atomic_fetch_add
|
|
#endif
|
|
|
|
#endif /* atomic_fetch_add_relaxed */
|
|
|
|
#define arch_atomic_sub atomic_sub
|
|
|
|
#define arch_atomic_sub_return atomic_sub_return
|
|
#define arch_atomic_sub_return_acquire atomic_sub_return_acquire
|
|
#define arch_atomic_sub_return_release atomic_sub_return_release
|
|
#define arch_atomic_sub_return_relaxed atomic_sub_return_relaxed
|
|
|
|
#ifndef atomic_sub_return_relaxed
|
|
#define atomic_sub_return_acquire atomic_sub_return
|
|
#define atomic_sub_return_release atomic_sub_return
|
|
#define atomic_sub_return_relaxed atomic_sub_return
|
|
#else /* atomic_sub_return_relaxed */
|
|
|
|
#ifndef atomic_sub_return_acquire
|
|
static __always_inline int
|
|
atomic_sub_return_acquire(int i, atomic_t *v)
|
|
{
|
|
int ret = atomic_sub_return_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_sub_return_acquire atomic_sub_return_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_sub_return_release
|
|
static __always_inline int
|
|
atomic_sub_return_release(int i, atomic_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_sub_return_relaxed(i, v);
|
|
}
|
|
#define atomic_sub_return_release atomic_sub_return_release
|
|
#endif
|
|
|
|
#ifndef atomic_sub_return
|
|
static __always_inline int
|
|
atomic_sub_return(int i, atomic_t *v)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_sub_return_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_sub_return atomic_sub_return
|
|
#endif
|
|
|
|
#endif /* atomic_sub_return_relaxed */
|
|
|
|
#define arch_atomic_fetch_sub atomic_fetch_sub
|
|
#define arch_atomic_fetch_sub_acquire atomic_fetch_sub_acquire
|
|
#define arch_atomic_fetch_sub_release atomic_fetch_sub_release
|
|
#define arch_atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
|
|
|
|
#ifndef atomic_fetch_sub_relaxed
|
|
#define atomic_fetch_sub_acquire atomic_fetch_sub
|
|
#define atomic_fetch_sub_release atomic_fetch_sub
|
|
#define atomic_fetch_sub_relaxed atomic_fetch_sub
|
|
#else /* atomic_fetch_sub_relaxed */
|
|
|
|
#ifndef atomic_fetch_sub_acquire
|
|
static __always_inline int
|
|
atomic_fetch_sub_acquire(int i, atomic_t *v)
|
|
{
|
|
int ret = atomic_fetch_sub_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_sub_release
|
|
static __always_inline int
|
|
atomic_fetch_sub_release(int i, atomic_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_fetch_sub_relaxed(i, v);
|
|
}
|
|
#define atomic_fetch_sub_release atomic_fetch_sub_release
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_sub
|
|
static __always_inline int
|
|
atomic_fetch_sub(int i, atomic_t *v)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_fetch_sub_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_sub atomic_fetch_sub
|
|
#endif
|
|
|
|
#endif /* atomic_fetch_sub_relaxed */
|
|
|
|
#define arch_atomic_inc atomic_inc
|
|
|
|
#ifndef atomic_inc
|
|
static __always_inline void
|
|
atomic_inc(atomic_t *v)
|
|
{
|
|
atomic_add(1, v);
|
|
}
|
|
#define atomic_inc atomic_inc
|
|
#endif
|
|
|
|
#define arch_atomic_inc_return atomic_inc_return
|
|
#define arch_atomic_inc_return_acquire atomic_inc_return_acquire
|
|
#define arch_atomic_inc_return_release atomic_inc_return_release
|
|
#define arch_atomic_inc_return_relaxed atomic_inc_return_relaxed
|
|
|
|
#ifndef atomic_inc_return_relaxed
|
|
#ifdef atomic_inc_return
|
|
#define atomic_inc_return_acquire atomic_inc_return
|
|
#define atomic_inc_return_release atomic_inc_return
|
|
#define atomic_inc_return_relaxed atomic_inc_return
|
|
#endif /* atomic_inc_return */
|
|
|
|
#ifndef atomic_inc_return
|
|
static __always_inline int
|
|
atomic_inc_return(atomic_t *v)
|
|
{
|
|
return atomic_add_return(1, v);
|
|
}
|
|
#define atomic_inc_return atomic_inc_return
|
|
#endif
|
|
|
|
#ifndef atomic_inc_return_acquire
|
|
static __always_inline int
|
|
atomic_inc_return_acquire(atomic_t *v)
|
|
{
|
|
return atomic_add_return_acquire(1, v);
|
|
}
|
|
#define atomic_inc_return_acquire atomic_inc_return_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_inc_return_release
|
|
static __always_inline int
|
|
atomic_inc_return_release(atomic_t *v)
|
|
{
|
|
return atomic_add_return_release(1, v);
|
|
}
|
|
#define atomic_inc_return_release atomic_inc_return_release
|
|
#endif
|
|
|
|
#ifndef atomic_inc_return_relaxed
|
|
static __always_inline int
|
|
atomic_inc_return_relaxed(atomic_t *v)
|
|
{
|
|
return atomic_add_return_relaxed(1, v);
|
|
}
|
|
#define atomic_inc_return_relaxed atomic_inc_return_relaxed
|
|
#endif
|
|
|
|
#else /* atomic_inc_return_relaxed */
|
|
|
|
#ifndef atomic_inc_return_acquire
|
|
static __always_inline int
|
|
atomic_inc_return_acquire(atomic_t *v)
|
|
{
|
|
int ret = atomic_inc_return_relaxed(v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_inc_return_acquire atomic_inc_return_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_inc_return_release
|
|
static __always_inline int
|
|
atomic_inc_return_release(atomic_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_inc_return_relaxed(v);
|
|
}
|
|
#define atomic_inc_return_release atomic_inc_return_release
|
|
#endif
|
|
|
|
#ifndef atomic_inc_return
|
|
static __always_inline int
|
|
atomic_inc_return(atomic_t *v)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_inc_return_relaxed(v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_inc_return atomic_inc_return
|
|
#endif
|
|
|
|
#endif /* atomic_inc_return_relaxed */
|
|
|
|
#define arch_atomic_fetch_inc atomic_fetch_inc
|
|
#define arch_atomic_fetch_inc_acquire atomic_fetch_inc_acquire
|
|
#define arch_atomic_fetch_inc_release atomic_fetch_inc_release
|
|
#define arch_atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
|
|
|
|
#ifndef atomic_fetch_inc_relaxed
|
|
#ifdef atomic_fetch_inc
|
|
#define atomic_fetch_inc_acquire atomic_fetch_inc
|
|
#define atomic_fetch_inc_release atomic_fetch_inc
|
|
#define atomic_fetch_inc_relaxed atomic_fetch_inc
|
|
#endif /* atomic_fetch_inc */
|
|
|
|
#ifndef atomic_fetch_inc
|
|
static __always_inline int
|
|
atomic_fetch_inc(atomic_t *v)
|
|
{
|
|
return atomic_fetch_add(1, v);
|
|
}
|
|
#define atomic_fetch_inc atomic_fetch_inc
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_inc_acquire
|
|
static __always_inline int
|
|
atomic_fetch_inc_acquire(atomic_t *v)
|
|
{
|
|
return atomic_fetch_add_acquire(1, v);
|
|
}
|
|
#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_inc_release
|
|
static __always_inline int
|
|
atomic_fetch_inc_release(atomic_t *v)
|
|
{
|
|
return atomic_fetch_add_release(1, v);
|
|
}
|
|
#define atomic_fetch_inc_release atomic_fetch_inc_release
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_inc_relaxed
|
|
static __always_inline int
|
|
atomic_fetch_inc_relaxed(atomic_t *v)
|
|
{
|
|
return atomic_fetch_add_relaxed(1, v);
|
|
}
|
|
#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
|
|
#endif
|
|
|
|
#else /* atomic_fetch_inc_relaxed */
|
|
|
|
#ifndef atomic_fetch_inc_acquire
|
|
static __always_inline int
|
|
atomic_fetch_inc_acquire(atomic_t *v)
|
|
{
|
|
int ret = atomic_fetch_inc_relaxed(v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_inc_release
|
|
static __always_inline int
|
|
atomic_fetch_inc_release(atomic_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_fetch_inc_relaxed(v);
|
|
}
|
|
#define atomic_fetch_inc_release atomic_fetch_inc_release
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_inc
|
|
static __always_inline int
|
|
atomic_fetch_inc(atomic_t *v)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_fetch_inc_relaxed(v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_inc atomic_fetch_inc
|
|
#endif
|
|
|
|
#endif /* atomic_fetch_inc_relaxed */
|
|
|
|
#define arch_atomic_dec atomic_dec
|
|
|
|
#ifndef atomic_dec
|
|
static __always_inline void
|
|
atomic_dec(atomic_t *v)
|
|
{
|
|
atomic_sub(1, v);
|
|
}
|
|
#define atomic_dec atomic_dec
|
|
#endif
|
|
|
|
#define arch_atomic_dec_return atomic_dec_return
|
|
#define arch_atomic_dec_return_acquire atomic_dec_return_acquire
|
|
#define arch_atomic_dec_return_release atomic_dec_return_release
|
|
#define arch_atomic_dec_return_relaxed atomic_dec_return_relaxed
|
|
|
|
#ifndef atomic_dec_return_relaxed
|
|
#ifdef atomic_dec_return
|
|
#define atomic_dec_return_acquire atomic_dec_return
|
|
#define atomic_dec_return_release atomic_dec_return
|
|
#define atomic_dec_return_relaxed atomic_dec_return
|
|
#endif /* atomic_dec_return */
|
|
|
|
#ifndef atomic_dec_return
|
|
static __always_inline int
|
|
atomic_dec_return(atomic_t *v)
|
|
{
|
|
return atomic_sub_return(1, v);
|
|
}
|
|
#define atomic_dec_return atomic_dec_return
|
|
#endif
|
|
|
|
#ifndef atomic_dec_return_acquire
|
|
static __always_inline int
|
|
atomic_dec_return_acquire(atomic_t *v)
|
|
{
|
|
return atomic_sub_return_acquire(1, v);
|
|
}
|
|
#define atomic_dec_return_acquire atomic_dec_return_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_dec_return_release
|
|
static __always_inline int
|
|
atomic_dec_return_release(atomic_t *v)
|
|
{
|
|
return atomic_sub_return_release(1, v);
|
|
}
|
|
#define atomic_dec_return_release atomic_dec_return_release
|
|
#endif
|
|
|
|
#ifndef atomic_dec_return_relaxed
|
|
static __always_inline int
|
|
atomic_dec_return_relaxed(atomic_t *v)
|
|
{
|
|
return atomic_sub_return_relaxed(1, v);
|
|
}
|
|
#define atomic_dec_return_relaxed atomic_dec_return_relaxed
|
|
#endif
|
|
|
|
#else /* atomic_dec_return_relaxed */
|
|
|
|
#ifndef atomic_dec_return_acquire
|
|
static __always_inline int
|
|
atomic_dec_return_acquire(atomic_t *v)
|
|
{
|
|
int ret = atomic_dec_return_relaxed(v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_dec_return_acquire atomic_dec_return_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_dec_return_release
|
|
static __always_inline int
|
|
atomic_dec_return_release(atomic_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_dec_return_relaxed(v);
|
|
}
|
|
#define atomic_dec_return_release atomic_dec_return_release
|
|
#endif
|
|
|
|
#ifndef atomic_dec_return
|
|
static __always_inline int
|
|
atomic_dec_return(atomic_t *v)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_dec_return_relaxed(v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_dec_return atomic_dec_return
|
|
#endif
|
|
|
|
#endif /* atomic_dec_return_relaxed */
|
|
|
|
#define arch_atomic_fetch_dec atomic_fetch_dec
|
|
#define arch_atomic_fetch_dec_acquire atomic_fetch_dec_acquire
|
|
#define arch_atomic_fetch_dec_release atomic_fetch_dec_release
|
|
#define arch_atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
|
|
|
|
#ifndef atomic_fetch_dec_relaxed
|
|
#ifdef atomic_fetch_dec
|
|
#define atomic_fetch_dec_acquire atomic_fetch_dec
|
|
#define atomic_fetch_dec_release atomic_fetch_dec
|
|
#define atomic_fetch_dec_relaxed atomic_fetch_dec
|
|
#endif /* atomic_fetch_dec */
|
|
|
|
#ifndef atomic_fetch_dec
|
|
static __always_inline int
|
|
atomic_fetch_dec(atomic_t *v)
|
|
{
|
|
return atomic_fetch_sub(1, v);
|
|
}
|
|
#define atomic_fetch_dec atomic_fetch_dec
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_dec_acquire
|
|
static __always_inline int
|
|
atomic_fetch_dec_acquire(atomic_t *v)
|
|
{
|
|
return atomic_fetch_sub_acquire(1, v);
|
|
}
|
|
#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_dec_release
|
|
static __always_inline int
|
|
atomic_fetch_dec_release(atomic_t *v)
|
|
{
|
|
return atomic_fetch_sub_release(1, v);
|
|
}
|
|
#define atomic_fetch_dec_release atomic_fetch_dec_release
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_dec_relaxed
|
|
static __always_inline int
|
|
atomic_fetch_dec_relaxed(atomic_t *v)
|
|
{
|
|
return atomic_fetch_sub_relaxed(1, v);
|
|
}
|
|
#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
|
|
#endif
|
|
|
|
#else /* atomic_fetch_dec_relaxed */
|
|
|
|
#ifndef atomic_fetch_dec_acquire
|
|
static __always_inline int
|
|
atomic_fetch_dec_acquire(atomic_t *v)
|
|
{
|
|
int ret = atomic_fetch_dec_relaxed(v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_dec_release
|
|
static __always_inline int
|
|
atomic_fetch_dec_release(atomic_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_fetch_dec_relaxed(v);
|
|
}
|
|
#define atomic_fetch_dec_release atomic_fetch_dec_release
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_dec
|
|
static __always_inline int
|
|
atomic_fetch_dec(atomic_t *v)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_fetch_dec_relaxed(v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_dec atomic_fetch_dec
|
|
#endif
|
|
|
|
#endif /* atomic_fetch_dec_relaxed */
|
|
|
|
#define arch_atomic_and atomic_and
|
|
|
|
#define arch_atomic_fetch_and atomic_fetch_and
|
|
#define arch_atomic_fetch_and_acquire atomic_fetch_and_acquire
|
|
#define arch_atomic_fetch_and_release atomic_fetch_and_release
|
|
#define arch_atomic_fetch_and_relaxed atomic_fetch_and_relaxed
|
|
|
|
#ifndef atomic_fetch_and_relaxed
|
|
#define atomic_fetch_and_acquire atomic_fetch_and
|
|
#define atomic_fetch_and_release atomic_fetch_and
|
|
#define atomic_fetch_and_relaxed atomic_fetch_and
|
|
#else /* atomic_fetch_and_relaxed */
|
|
|
|
#ifndef atomic_fetch_and_acquire
|
|
static __always_inline int
|
|
atomic_fetch_and_acquire(int i, atomic_t *v)
|
|
{
|
|
int ret = atomic_fetch_and_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_and_acquire atomic_fetch_and_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_and_release
|
|
static __always_inline int
|
|
atomic_fetch_and_release(int i, atomic_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_fetch_and_relaxed(i, v);
|
|
}
|
|
#define atomic_fetch_and_release atomic_fetch_and_release
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_and
|
|
static __always_inline int
|
|
atomic_fetch_and(int i, atomic_t *v)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_fetch_and_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_and atomic_fetch_and
|
|
#endif
|
|
|
|
#endif /* atomic_fetch_and_relaxed */
|
|
|
|
#define arch_atomic_andnot atomic_andnot
|
|
|
|
#ifndef atomic_andnot
|
|
static __always_inline void
|
|
atomic_andnot(int i, atomic_t *v)
|
|
{
|
|
atomic_and(~i, v);
|
|
}
|
|
#define atomic_andnot atomic_andnot
|
|
#endif
|
|
|
|
#define arch_atomic_fetch_andnot atomic_fetch_andnot
|
|
#define arch_atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
|
|
#define arch_atomic_fetch_andnot_release atomic_fetch_andnot_release
|
|
#define arch_atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
|
|
|
|
#ifndef atomic_fetch_andnot_relaxed
|
|
#ifdef atomic_fetch_andnot
|
|
#define atomic_fetch_andnot_acquire atomic_fetch_andnot
|
|
#define atomic_fetch_andnot_release atomic_fetch_andnot
|
|
#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
|
|
#endif /* atomic_fetch_andnot */
|
|
|
|
#ifndef atomic_fetch_andnot
|
|
static __always_inline int
|
|
atomic_fetch_andnot(int i, atomic_t *v)
|
|
{
|
|
return atomic_fetch_and(~i, v);
|
|
}
|
|
#define atomic_fetch_andnot atomic_fetch_andnot
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_andnot_acquire
|
|
static __always_inline int
|
|
atomic_fetch_andnot_acquire(int i, atomic_t *v)
|
|
{
|
|
return atomic_fetch_and_acquire(~i, v);
|
|
}
|
|
#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_andnot_release
|
|
static __always_inline int
|
|
atomic_fetch_andnot_release(int i, atomic_t *v)
|
|
{
|
|
return atomic_fetch_and_release(~i, v);
|
|
}
|
|
#define atomic_fetch_andnot_release atomic_fetch_andnot_release
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_andnot_relaxed
|
|
static __always_inline int
|
|
atomic_fetch_andnot_relaxed(int i, atomic_t *v)
|
|
{
|
|
return atomic_fetch_and_relaxed(~i, v);
|
|
}
|
|
#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
|
|
#endif
|
|
|
|
#else /* atomic_fetch_andnot_relaxed */
|
|
|
|
#ifndef atomic_fetch_andnot_acquire
|
|
static __always_inline int
|
|
atomic_fetch_andnot_acquire(int i, atomic_t *v)
|
|
{
|
|
int ret = atomic_fetch_andnot_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_andnot_release
|
|
static __always_inline int
|
|
atomic_fetch_andnot_release(int i, atomic_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_fetch_andnot_relaxed(i, v);
|
|
}
|
|
#define atomic_fetch_andnot_release atomic_fetch_andnot_release
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_andnot
|
|
static __always_inline int
|
|
atomic_fetch_andnot(int i, atomic_t *v)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_fetch_andnot_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_andnot atomic_fetch_andnot
|
|
#endif
|
|
|
|
#endif /* atomic_fetch_andnot_relaxed */
|
|
|
|
#define arch_atomic_or atomic_or
|
|
|
|
#define arch_atomic_fetch_or atomic_fetch_or
|
|
#define arch_atomic_fetch_or_acquire atomic_fetch_or_acquire
|
|
#define arch_atomic_fetch_or_release atomic_fetch_or_release
|
|
#define arch_atomic_fetch_or_relaxed atomic_fetch_or_relaxed
|
|
|
|
#ifndef atomic_fetch_or_relaxed
|
|
#define atomic_fetch_or_acquire atomic_fetch_or
|
|
#define atomic_fetch_or_release atomic_fetch_or
|
|
#define atomic_fetch_or_relaxed atomic_fetch_or
|
|
#else /* atomic_fetch_or_relaxed */
|
|
|
|
#ifndef atomic_fetch_or_acquire
|
|
static __always_inline int
|
|
atomic_fetch_or_acquire(int i, atomic_t *v)
|
|
{
|
|
int ret = atomic_fetch_or_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_or_acquire atomic_fetch_or_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_or_release
|
|
static __always_inline int
|
|
atomic_fetch_or_release(int i, atomic_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_fetch_or_relaxed(i, v);
|
|
}
|
|
#define atomic_fetch_or_release atomic_fetch_or_release
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_or
|
|
static __always_inline int
|
|
atomic_fetch_or(int i, atomic_t *v)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_fetch_or_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_or atomic_fetch_or
|
|
#endif
|
|
|
|
#endif /* atomic_fetch_or_relaxed */
|
|
|
|
#define arch_atomic_xor atomic_xor
|
|
|
|
#define arch_atomic_fetch_xor atomic_fetch_xor
|
|
#define arch_atomic_fetch_xor_acquire atomic_fetch_xor_acquire
|
|
#define arch_atomic_fetch_xor_release atomic_fetch_xor_release
|
|
#define arch_atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
|
|
|
|
#ifndef atomic_fetch_xor_relaxed
|
|
#define atomic_fetch_xor_acquire atomic_fetch_xor
|
|
#define atomic_fetch_xor_release atomic_fetch_xor
|
|
#define atomic_fetch_xor_relaxed atomic_fetch_xor
|
|
#else /* atomic_fetch_xor_relaxed */
|
|
|
|
#ifndef atomic_fetch_xor_acquire
|
|
static __always_inline int
|
|
atomic_fetch_xor_acquire(int i, atomic_t *v)
|
|
{
|
|
int ret = atomic_fetch_xor_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_xor_release
|
|
static __always_inline int
|
|
atomic_fetch_xor_release(int i, atomic_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_fetch_xor_relaxed(i, v);
|
|
}
|
|
#define atomic_fetch_xor_release atomic_fetch_xor_release
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_xor
|
|
static __always_inline int
|
|
atomic_fetch_xor(int i, atomic_t *v)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_fetch_xor_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_fetch_xor atomic_fetch_xor
|
|
#endif
|
|
|
|
#endif /* atomic_fetch_xor_relaxed */
|
|
|
|
#define arch_atomic_xchg atomic_xchg
|
|
#define arch_atomic_xchg_acquire atomic_xchg_acquire
|
|
#define arch_atomic_xchg_release atomic_xchg_release
|
|
#define arch_atomic_xchg_relaxed atomic_xchg_relaxed
|
|
|
|
#ifndef atomic_xchg_relaxed
|
|
#define atomic_xchg_acquire atomic_xchg
|
|
#define atomic_xchg_release atomic_xchg
|
|
#define atomic_xchg_relaxed atomic_xchg
|
|
#else /* atomic_xchg_relaxed */
|
|
|
|
#ifndef atomic_xchg_acquire
|
|
static __always_inline int
|
|
atomic_xchg_acquire(atomic_t *v, int i)
|
|
{
|
|
int ret = atomic_xchg_relaxed(v, i);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_xchg_acquire atomic_xchg_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_xchg_release
|
|
static __always_inline int
|
|
atomic_xchg_release(atomic_t *v, int i)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_xchg_relaxed(v, i);
|
|
}
|
|
#define atomic_xchg_release atomic_xchg_release
|
|
#endif
|
|
|
|
#ifndef atomic_xchg
|
|
static __always_inline int
|
|
atomic_xchg(atomic_t *v, int i)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_xchg_relaxed(v, i);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_xchg atomic_xchg
|
|
#endif
|
|
|
|
#endif /* atomic_xchg_relaxed */
|
|
|
|
#define arch_atomic_cmpxchg atomic_cmpxchg
|
|
#define arch_atomic_cmpxchg_acquire atomic_cmpxchg_acquire
|
|
#define arch_atomic_cmpxchg_release atomic_cmpxchg_release
|
|
#define arch_atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
|
|
|
|
#ifndef atomic_cmpxchg_relaxed
|
|
#define atomic_cmpxchg_acquire atomic_cmpxchg
|
|
#define atomic_cmpxchg_release atomic_cmpxchg
|
|
#define atomic_cmpxchg_relaxed atomic_cmpxchg
|
|
#else /* atomic_cmpxchg_relaxed */
|
|
|
|
#ifndef atomic_cmpxchg_acquire
|
|
static __always_inline int
|
|
atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
|
|
{
|
|
int ret = atomic_cmpxchg_relaxed(v, old, new);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_cmpxchg_release
|
|
static __always_inline int
|
|
atomic_cmpxchg_release(atomic_t *v, int old, int new)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_cmpxchg_relaxed(v, old, new);
|
|
}
|
|
#define atomic_cmpxchg_release atomic_cmpxchg_release
|
|
#endif
|
|
|
|
#ifndef atomic_cmpxchg
|
|
static __always_inline int
|
|
atomic_cmpxchg(atomic_t *v, int old, int new)
|
|
{
|
|
int ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_cmpxchg_relaxed(v, old, new);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_cmpxchg atomic_cmpxchg
|
|
#endif
|
|
|
|
#endif /* atomic_cmpxchg_relaxed */
|
|
|
|
#define arch_atomic_try_cmpxchg atomic_try_cmpxchg
|
|
#define arch_atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
|
|
#define arch_atomic_try_cmpxchg_release atomic_try_cmpxchg_release
|
|
#define arch_atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
|
|
|
|
#ifndef atomic_try_cmpxchg_relaxed
|
|
#ifdef atomic_try_cmpxchg
|
|
#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
|
|
#define atomic_try_cmpxchg_release atomic_try_cmpxchg
|
|
#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
|
|
#endif /* atomic_try_cmpxchg */
|
|
|
|
#ifndef atomic_try_cmpxchg
|
|
static __always_inline bool
|
|
atomic_try_cmpxchg(atomic_t *v, int *old, int new)
|
|
{
|
|
int r, o = *old;
|
|
r = atomic_cmpxchg(v, o, new);
|
|
if (unlikely(r != o))
|
|
*old = r;
|
|
return likely(r == o);
|
|
}
|
|
#define atomic_try_cmpxchg atomic_try_cmpxchg
|
|
#endif
|
|
|
|
#ifndef atomic_try_cmpxchg_acquire
|
|
static __always_inline bool
|
|
atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
|
|
{
|
|
int r, o = *old;
|
|
r = atomic_cmpxchg_acquire(v, o, new);
|
|
if (unlikely(r != o))
|
|
*old = r;
|
|
return likely(r == o);
|
|
}
|
|
#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_try_cmpxchg_release
|
|
static __always_inline bool
|
|
atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
|
|
{
|
|
int r, o = *old;
|
|
r = atomic_cmpxchg_release(v, o, new);
|
|
if (unlikely(r != o))
|
|
*old = r;
|
|
return likely(r == o);
|
|
}
|
|
#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
|
|
#endif
|
|
|
|
#ifndef atomic_try_cmpxchg_relaxed
|
|
static __always_inline bool
|
|
atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
|
|
{
|
|
int r, o = *old;
|
|
r = atomic_cmpxchg_relaxed(v, o, new);
|
|
if (unlikely(r != o))
|
|
*old = r;
|
|
return likely(r == o);
|
|
}
|
|
#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
|
|
#endif
|
|
|
|
#else /* atomic_try_cmpxchg_relaxed */
|
|
|
|
#ifndef atomic_try_cmpxchg_acquire
|
|
static __always_inline bool
|
|
atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
|
|
{
|
|
bool ret = atomic_try_cmpxchg_relaxed(v, old, new);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
|
|
#endif
|
|
|
|
#ifndef atomic_try_cmpxchg_release
|
|
static __always_inline bool
|
|
atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic_try_cmpxchg_relaxed(v, old, new);
|
|
}
|
|
#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
|
|
#endif
|
|
|
|
#ifndef atomic_try_cmpxchg
|
|
static __always_inline bool
|
|
atomic_try_cmpxchg(atomic_t *v, int *old, int new)
|
|
{
|
|
bool ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic_try_cmpxchg_relaxed(v, old, new);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic_try_cmpxchg atomic_try_cmpxchg
|
|
#endif
|
|
|
|
#endif /* atomic_try_cmpxchg_relaxed */
|
|
|
|
#define arch_atomic_sub_and_test atomic_sub_and_test
|
|
|
|
#ifndef atomic_sub_and_test
|
|
/**
|
|
* atomic_sub_and_test - subtract value from variable and test result
|
|
* @i: integer value to subtract
|
|
* @v: pointer of type atomic_t
|
|
*
|
|
* Atomically subtracts @i from @v and returns
|
|
* true if the result is zero, or false for all
|
|
* other cases.
|
|
*/
|
|
static __always_inline bool
|
|
atomic_sub_and_test(int i, atomic_t *v)
|
|
{
|
|
return atomic_sub_return(i, v) == 0;
|
|
}
|
|
#define atomic_sub_and_test atomic_sub_and_test
|
|
#endif
|
|
|
|
#define arch_atomic_dec_and_test atomic_dec_and_test
|
|
|
|
#ifndef atomic_dec_and_test
|
|
/**
|
|
* atomic_dec_and_test - decrement and test
|
|
* @v: pointer of type atomic_t
|
|
*
|
|
* Atomically decrements @v by 1 and
|
|
* returns true if the result is 0, or false for all other
|
|
* cases.
|
|
*/
|
|
static __always_inline bool
|
|
atomic_dec_and_test(atomic_t *v)
|
|
{
|
|
return atomic_dec_return(v) == 0;
|
|
}
|
|
#define atomic_dec_and_test atomic_dec_and_test
|
|
#endif
|
|
|
|
#define arch_atomic_inc_and_test atomic_inc_and_test
|
|
|
|
#ifndef atomic_inc_and_test
|
|
/**
|
|
* atomic_inc_and_test - increment and test
|
|
* @v: pointer of type atomic_t
|
|
*
|
|
* Atomically increments @v by 1
|
|
* and returns true if the result is zero, or false for all
|
|
* other cases.
|
|
*/
|
|
static __always_inline bool
|
|
atomic_inc_and_test(atomic_t *v)
|
|
{
|
|
return atomic_inc_return(v) == 0;
|
|
}
|
|
#define atomic_inc_and_test atomic_inc_and_test
|
|
#endif
|
|
|
|
#define arch_atomic_add_negative atomic_add_negative
|
|
|
|
#ifndef atomic_add_negative
|
|
/**
|
|
* atomic_add_negative - add and test if negative
|
|
* @i: integer value to add
|
|
* @v: pointer of type atomic_t
|
|
*
|
|
* Atomically adds @i to @v and returns true
|
|
* if the result is negative, or false when
|
|
* result is greater than or equal to zero.
|
|
*/
|
|
static __always_inline bool
|
|
atomic_add_negative(int i, atomic_t *v)
|
|
{
|
|
return atomic_add_return(i, v) < 0;
|
|
}
|
|
#define atomic_add_negative atomic_add_negative
|
|
#endif
|
|
|
|
#define arch_atomic_fetch_add_unless atomic_fetch_add_unless
|
|
|
|
#ifndef atomic_fetch_add_unless
|
|
/**
|
|
* atomic_fetch_add_unless - add unless the number is already a given value
|
|
* @v: pointer of type atomic_t
|
|
* @a: the amount to add to v...
|
|
* @u: ...unless v is equal to u.
|
|
*
|
|
* Atomically adds @a to @v, so long as @v was not already @u.
|
|
* Returns original value of @v
|
|
*/
|
|
static __always_inline int
|
|
atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
{
|
|
int c = atomic_read(v);
|
|
|
|
do {
|
|
if (unlikely(c == u))
|
|
break;
|
|
} while (!atomic_try_cmpxchg(v, &c, c + a));
|
|
|
|
return c;
|
|
}
|
|
#define atomic_fetch_add_unless atomic_fetch_add_unless
|
|
#endif
|
|
|
|
#define arch_atomic_add_unless atomic_add_unless
|
|
|
|
#ifndef atomic_add_unless
|
|
/**
|
|
* atomic_add_unless - add unless the number is already a given value
|
|
* @v: pointer of type atomic_t
|
|
* @a: the amount to add to v...
|
|
* @u: ...unless v is equal to u.
|
|
*
|
|
* Atomically adds @a to @v, if @v was not already @u.
|
|
* Returns true if the addition was done.
|
|
*/
|
|
static __always_inline bool
|
|
atomic_add_unless(atomic_t *v, int a, int u)
|
|
{
|
|
return atomic_fetch_add_unless(v, a, u) != u;
|
|
}
|
|
#define atomic_add_unless atomic_add_unless
|
|
#endif
|
|
|
|
#define arch_atomic_inc_not_zero atomic_inc_not_zero
|
|
|
|
#ifndef atomic_inc_not_zero
|
|
/**
|
|
* atomic_inc_not_zero - increment unless the number is zero
|
|
* @v: pointer of type atomic_t
|
|
*
|
|
* Atomically increments @v by 1, if @v is non-zero.
|
|
* Returns true if the increment was done.
|
|
*/
|
|
static __always_inline bool
|
|
atomic_inc_not_zero(atomic_t *v)
|
|
{
|
|
return atomic_add_unless(v, 1, 0);
|
|
}
|
|
#define atomic_inc_not_zero atomic_inc_not_zero
|
|
#endif
|
|
|
|
#define arch_atomic_inc_unless_negative atomic_inc_unless_negative
|
|
|
|
#ifndef atomic_inc_unless_negative
|
|
static __always_inline bool
|
|
atomic_inc_unless_negative(atomic_t *v)
|
|
{
|
|
int c = atomic_read(v);
|
|
|
|
do {
|
|
if (unlikely(c < 0))
|
|
return false;
|
|
} while (!atomic_try_cmpxchg(v, &c, c + 1));
|
|
|
|
return true;
|
|
}
|
|
#define atomic_inc_unless_negative atomic_inc_unless_negative
|
|
#endif
|
|
|
|
#define arch_atomic_dec_unless_positive atomic_dec_unless_positive
|
|
|
|
#ifndef atomic_dec_unless_positive
|
|
static __always_inline bool
|
|
atomic_dec_unless_positive(atomic_t *v)
|
|
{
|
|
int c = atomic_read(v);
|
|
|
|
do {
|
|
if (unlikely(c > 0))
|
|
return false;
|
|
} while (!atomic_try_cmpxchg(v, &c, c - 1));
|
|
|
|
return true;
|
|
}
|
|
#define atomic_dec_unless_positive atomic_dec_unless_positive
|
|
#endif
|
|
|
|
#define arch_atomic_dec_if_positive atomic_dec_if_positive
|
|
|
|
#ifndef atomic_dec_if_positive
|
|
static __always_inline int
|
|
atomic_dec_if_positive(atomic_t *v)
|
|
{
|
|
int dec, c = atomic_read(v);
|
|
|
|
do {
|
|
dec = c - 1;
|
|
if (unlikely(dec < 0))
|
|
break;
|
|
} while (!atomic_try_cmpxchg(v, &c, dec));
|
|
|
|
return dec;
|
|
}
|
|
#define atomic_dec_if_positive atomic_dec_if_positive
|
|
#endif
|
|
|
|
#ifdef CONFIG_GENERIC_ATOMIC64
|
|
#include <asm-generic/atomic64.h>
|
|
#endif
|
|
|
|
#define arch_atomic64_read atomic64_read
|
|
#define arch_atomic64_read_acquire atomic64_read_acquire
|
|
|
|
#ifndef atomic64_read_acquire
|
|
static __always_inline s64
|
|
atomic64_read_acquire(const atomic64_t *v)
|
|
{
|
|
return smp_load_acquire(&(v)->counter);
|
|
}
|
|
#define atomic64_read_acquire atomic64_read_acquire
|
|
#endif
|
|
|
|
#define arch_atomic64_set atomic64_set
|
|
#define arch_atomic64_set_release atomic64_set_release
|
|
|
|
#ifndef atomic64_set_release
|
|
static __always_inline void
|
|
atomic64_set_release(atomic64_t *v, s64 i)
|
|
{
|
|
smp_store_release(&(v)->counter, i);
|
|
}
|
|
#define atomic64_set_release atomic64_set_release
|
|
#endif
|
|
|
|
#define arch_atomic64_add atomic64_add
|
|
|
|
#define arch_atomic64_add_return atomic64_add_return
|
|
#define arch_atomic64_add_return_acquire atomic64_add_return_acquire
|
|
#define arch_atomic64_add_return_release atomic64_add_return_release
|
|
#define arch_atomic64_add_return_relaxed atomic64_add_return_relaxed
|
|
|
|
#ifndef atomic64_add_return_relaxed
|
|
#define atomic64_add_return_acquire atomic64_add_return
|
|
#define atomic64_add_return_release atomic64_add_return
|
|
#define atomic64_add_return_relaxed atomic64_add_return
|
|
#else /* atomic64_add_return_relaxed */
|
|
|
|
#ifndef atomic64_add_return_acquire
|
|
static __always_inline s64
|
|
atomic64_add_return_acquire(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret = atomic64_add_return_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_add_return_acquire atomic64_add_return_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_add_return_release
|
|
static __always_inline s64
|
|
atomic64_add_return_release(s64 i, atomic64_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_add_return_relaxed(i, v);
|
|
}
|
|
#define atomic64_add_return_release atomic64_add_return_release
|
|
#endif
|
|
|
|
#ifndef atomic64_add_return
|
|
static __always_inline s64
|
|
atomic64_add_return(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_add_return_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_add_return atomic64_add_return
|
|
#endif
|
|
|
|
#endif /* atomic64_add_return_relaxed */
|
|
|
|
#define arch_atomic64_fetch_add atomic64_fetch_add
|
|
#define arch_atomic64_fetch_add_acquire atomic64_fetch_add_acquire
|
|
#define arch_atomic64_fetch_add_release atomic64_fetch_add_release
|
|
#define arch_atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
|
|
|
|
#ifndef atomic64_fetch_add_relaxed
|
|
#define atomic64_fetch_add_acquire atomic64_fetch_add
|
|
#define atomic64_fetch_add_release atomic64_fetch_add
|
|
#define atomic64_fetch_add_relaxed atomic64_fetch_add
|
|
#else /* atomic64_fetch_add_relaxed */
|
|
|
|
#ifndef atomic64_fetch_add_acquire
|
|
static __always_inline s64
|
|
atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret = atomic64_fetch_add_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_add_release
|
|
static __always_inline s64
|
|
atomic64_fetch_add_release(s64 i, atomic64_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_fetch_add_relaxed(i, v);
|
|
}
|
|
#define atomic64_fetch_add_release atomic64_fetch_add_release
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_add
|
|
static __always_inline s64
|
|
atomic64_fetch_add(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_fetch_add_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_add atomic64_fetch_add
|
|
#endif
|
|
|
|
#endif /* atomic64_fetch_add_relaxed */
|
|
|
|
#define arch_atomic64_sub atomic64_sub
|
|
|
|
#define arch_atomic64_sub_return atomic64_sub_return
|
|
#define arch_atomic64_sub_return_acquire atomic64_sub_return_acquire
|
|
#define arch_atomic64_sub_return_release atomic64_sub_return_release
|
|
#define arch_atomic64_sub_return_relaxed atomic64_sub_return_relaxed
|
|
|
|
#ifndef atomic64_sub_return_relaxed
|
|
#define atomic64_sub_return_acquire atomic64_sub_return
|
|
#define atomic64_sub_return_release atomic64_sub_return
|
|
#define atomic64_sub_return_relaxed atomic64_sub_return
|
|
#else /* atomic64_sub_return_relaxed */
|
|
|
|
#ifndef atomic64_sub_return_acquire
|
|
static __always_inline s64
|
|
atomic64_sub_return_acquire(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret = atomic64_sub_return_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_sub_return_acquire atomic64_sub_return_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_sub_return_release
|
|
static __always_inline s64
|
|
atomic64_sub_return_release(s64 i, atomic64_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_sub_return_relaxed(i, v);
|
|
}
|
|
#define atomic64_sub_return_release atomic64_sub_return_release
|
|
#endif
|
|
|
|
#ifndef atomic64_sub_return
|
|
static __always_inline s64
|
|
atomic64_sub_return(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_sub_return_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_sub_return atomic64_sub_return
|
|
#endif
|
|
|
|
#endif /* atomic64_sub_return_relaxed */
|
|
|
|
#define arch_atomic64_fetch_sub atomic64_fetch_sub
|
|
#define arch_atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
|
|
#define arch_atomic64_fetch_sub_release atomic64_fetch_sub_release
|
|
#define arch_atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
|
|
|
|
#ifndef atomic64_fetch_sub_relaxed
|
|
#define atomic64_fetch_sub_acquire atomic64_fetch_sub
|
|
#define atomic64_fetch_sub_release atomic64_fetch_sub
|
|
#define atomic64_fetch_sub_relaxed atomic64_fetch_sub
|
|
#else /* atomic64_fetch_sub_relaxed */
|
|
|
|
#ifndef atomic64_fetch_sub_acquire
|
|
static __always_inline s64
|
|
atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret = atomic64_fetch_sub_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_sub_release
|
|
static __always_inline s64
|
|
atomic64_fetch_sub_release(s64 i, atomic64_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_fetch_sub_relaxed(i, v);
|
|
}
|
|
#define atomic64_fetch_sub_release atomic64_fetch_sub_release
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_sub
|
|
static __always_inline s64
|
|
atomic64_fetch_sub(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_fetch_sub_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_sub atomic64_fetch_sub
|
|
#endif
|
|
|
|
#endif /* atomic64_fetch_sub_relaxed */
|
|
|
|
#define arch_atomic64_inc atomic64_inc
|
|
|
|
#ifndef atomic64_inc
|
|
static __always_inline void
|
|
atomic64_inc(atomic64_t *v)
|
|
{
|
|
atomic64_add(1, v);
|
|
}
|
|
#define atomic64_inc atomic64_inc
|
|
#endif
|
|
|
|
#define arch_atomic64_inc_return atomic64_inc_return
|
|
#define arch_atomic64_inc_return_acquire atomic64_inc_return_acquire
|
|
#define arch_atomic64_inc_return_release atomic64_inc_return_release
|
|
#define arch_atomic64_inc_return_relaxed atomic64_inc_return_relaxed
|
|
|
|
#ifndef atomic64_inc_return_relaxed
|
|
#ifdef atomic64_inc_return
|
|
#define atomic64_inc_return_acquire atomic64_inc_return
|
|
#define atomic64_inc_return_release atomic64_inc_return
|
|
#define atomic64_inc_return_relaxed atomic64_inc_return
|
|
#endif /* atomic64_inc_return */
|
|
|
|
#ifndef atomic64_inc_return
|
|
static __always_inline s64
|
|
atomic64_inc_return(atomic64_t *v)
|
|
{
|
|
return atomic64_add_return(1, v);
|
|
}
|
|
#define atomic64_inc_return atomic64_inc_return
|
|
#endif
|
|
|
|
#ifndef atomic64_inc_return_acquire
|
|
static __always_inline s64
|
|
atomic64_inc_return_acquire(atomic64_t *v)
|
|
{
|
|
return atomic64_add_return_acquire(1, v);
|
|
}
|
|
#define atomic64_inc_return_acquire atomic64_inc_return_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_inc_return_release
|
|
static __always_inline s64
|
|
atomic64_inc_return_release(atomic64_t *v)
|
|
{
|
|
return atomic64_add_return_release(1, v);
|
|
}
|
|
#define atomic64_inc_return_release atomic64_inc_return_release
|
|
#endif
|
|
|
|
#ifndef atomic64_inc_return_relaxed
|
|
static __always_inline s64
|
|
atomic64_inc_return_relaxed(atomic64_t *v)
|
|
{
|
|
return atomic64_add_return_relaxed(1, v);
|
|
}
|
|
#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
|
|
#endif
|
|
|
|
#else /* atomic64_inc_return_relaxed */
|
|
|
|
#ifndef atomic64_inc_return_acquire
|
|
static __always_inline s64
|
|
atomic64_inc_return_acquire(atomic64_t *v)
|
|
{
|
|
s64 ret = atomic64_inc_return_relaxed(v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_inc_return_acquire atomic64_inc_return_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_inc_return_release
|
|
static __always_inline s64
|
|
atomic64_inc_return_release(atomic64_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_inc_return_relaxed(v);
|
|
}
|
|
#define atomic64_inc_return_release atomic64_inc_return_release
|
|
#endif
|
|
|
|
#ifndef atomic64_inc_return
|
|
static __always_inline s64
|
|
atomic64_inc_return(atomic64_t *v)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_inc_return_relaxed(v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_inc_return atomic64_inc_return
|
|
#endif
|
|
|
|
#endif /* atomic64_inc_return_relaxed */
|
|
|
|
#define arch_atomic64_fetch_inc atomic64_fetch_inc
|
|
#define arch_atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
|
|
#define arch_atomic64_fetch_inc_release atomic64_fetch_inc_release
|
|
#define arch_atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
|
|
|
|
#ifndef atomic64_fetch_inc_relaxed
|
|
#ifdef atomic64_fetch_inc
|
|
#define atomic64_fetch_inc_acquire atomic64_fetch_inc
|
|
#define atomic64_fetch_inc_release atomic64_fetch_inc
|
|
#define atomic64_fetch_inc_relaxed atomic64_fetch_inc
|
|
#endif /* atomic64_fetch_inc */
|
|
|
|
#ifndef atomic64_fetch_inc
|
|
static __always_inline s64
|
|
atomic64_fetch_inc(atomic64_t *v)
|
|
{
|
|
return atomic64_fetch_add(1, v);
|
|
}
|
|
#define atomic64_fetch_inc atomic64_fetch_inc
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_inc_acquire
|
|
static __always_inline s64
|
|
atomic64_fetch_inc_acquire(atomic64_t *v)
|
|
{
|
|
return atomic64_fetch_add_acquire(1, v);
|
|
}
|
|
#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_inc_release
|
|
static __always_inline s64
|
|
atomic64_fetch_inc_release(atomic64_t *v)
|
|
{
|
|
return atomic64_fetch_add_release(1, v);
|
|
}
|
|
#define atomic64_fetch_inc_release atomic64_fetch_inc_release
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_inc_relaxed
|
|
static __always_inline s64
|
|
atomic64_fetch_inc_relaxed(atomic64_t *v)
|
|
{
|
|
return atomic64_fetch_add_relaxed(1, v);
|
|
}
|
|
#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
|
|
#endif
|
|
|
|
#else /* atomic64_fetch_inc_relaxed */
|
|
|
|
#ifndef atomic64_fetch_inc_acquire
|
|
static __always_inline s64
|
|
atomic64_fetch_inc_acquire(atomic64_t *v)
|
|
{
|
|
s64 ret = atomic64_fetch_inc_relaxed(v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_inc_release
|
|
static __always_inline s64
|
|
atomic64_fetch_inc_release(atomic64_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_fetch_inc_relaxed(v);
|
|
}
|
|
#define atomic64_fetch_inc_release atomic64_fetch_inc_release
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_inc
|
|
static __always_inline s64
|
|
atomic64_fetch_inc(atomic64_t *v)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_fetch_inc_relaxed(v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_inc atomic64_fetch_inc
|
|
#endif
|
|
|
|
#endif /* atomic64_fetch_inc_relaxed */
|
|
|
|
#define arch_atomic64_dec atomic64_dec
|
|
|
|
#ifndef atomic64_dec
|
|
static __always_inline void
|
|
atomic64_dec(atomic64_t *v)
|
|
{
|
|
atomic64_sub(1, v);
|
|
}
|
|
#define atomic64_dec atomic64_dec
|
|
#endif
|
|
|
|
#define arch_atomic64_dec_return atomic64_dec_return
|
|
#define arch_atomic64_dec_return_acquire atomic64_dec_return_acquire
|
|
#define arch_atomic64_dec_return_release atomic64_dec_return_release
|
|
#define arch_atomic64_dec_return_relaxed atomic64_dec_return_relaxed
|
|
|
|
#ifndef atomic64_dec_return_relaxed
|
|
#ifdef atomic64_dec_return
|
|
#define atomic64_dec_return_acquire atomic64_dec_return
|
|
#define atomic64_dec_return_release atomic64_dec_return
|
|
#define atomic64_dec_return_relaxed atomic64_dec_return
|
|
#endif /* atomic64_dec_return */
|
|
|
|
#ifndef atomic64_dec_return
|
|
static __always_inline s64
|
|
atomic64_dec_return(atomic64_t *v)
|
|
{
|
|
return atomic64_sub_return(1, v);
|
|
}
|
|
#define atomic64_dec_return atomic64_dec_return
|
|
#endif
|
|
|
|
#ifndef atomic64_dec_return_acquire
|
|
static __always_inline s64
|
|
atomic64_dec_return_acquire(atomic64_t *v)
|
|
{
|
|
return atomic64_sub_return_acquire(1, v);
|
|
}
|
|
#define atomic64_dec_return_acquire atomic64_dec_return_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_dec_return_release
|
|
static __always_inline s64
|
|
atomic64_dec_return_release(atomic64_t *v)
|
|
{
|
|
return atomic64_sub_return_release(1, v);
|
|
}
|
|
#define atomic64_dec_return_release atomic64_dec_return_release
|
|
#endif
|
|
|
|
#ifndef atomic64_dec_return_relaxed
|
|
static __always_inline s64
|
|
atomic64_dec_return_relaxed(atomic64_t *v)
|
|
{
|
|
return atomic64_sub_return_relaxed(1, v);
|
|
}
|
|
#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
|
|
#endif
|
|
|
|
#else /* atomic64_dec_return_relaxed */
|
|
|
|
#ifndef atomic64_dec_return_acquire
|
|
static __always_inline s64
|
|
atomic64_dec_return_acquire(atomic64_t *v)
|
|
{
|
|
s64 ret = atomic64_dec_return_relaxed(v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_dec_return_acquire atomic64_dec_return_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_dec_return_release
|
|
static __always_inline s64
|
|
atomic64_dec_return_release(atomic64_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_dec_return_relaxed(v);
|
|
}
|
|
#define atomic64_dec_return_release atomic64_dec_return_release
|
|
#endif
|
|
|
|
#ifndef atomic64_dec_return
|
|
static __always_inline s64
|
|
atomic64_dec_return(atomic64_t *v)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_dec_return_relaxed(v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_dec_return atomic64_dec_return
|
|
#endif
|
|
|
|
#endif /* atomic64_dec_return_relaxed */
|
|
|
|
#define arch_atomic64_fetch_dec atomic64_fetch_dec
|
|
#define arch_atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
|
|
#define arch_atomic64_fetch_dec_release atomic64_fetch_dec_release
|
|
#define arch_atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
|
|
|
|
#ifndef atomic64_fetch_dec_relaxed
|
|
#ifdef atomic64_fetch_dec
|
|
#define atomic64_fetch_dec_acquire atomic64_fetch_dec
|
|
#define atomic64_fetch_dec_release atomic64_fetch_dec
|
|
#define atomic64_fetch_dec_relaxed atomic64_fetch_dec
|
|
#endif /* atomic64_fetch_dec */
|
|
|
|
#ifndef atomic64_fetch_dec
|
|
static __always_inline s64
|
|
atomic64_fetch_dec(atomic64_t *v)
|
|
{
|
|
return atomic64_fetch_sub(1, v);
|
|
}
|
|
#define atomic64_fetch_dec atomic64_fetch_dec
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_dec_acquire
|
|
static __always_inline s64
|
|
atomic64_fetch_dec_acquire(atomic64_t *v)
|
|
{
|
|
return atomic64_fetch_sub_acquire(1, v);
|
|
}
|
|
#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_dec_release
|
|
static __always_inline s64
|
|
atomic64_fetch_dec_release(atomic64_t *v)
|
|
{
|
|
return atomic64_fetch_sub_release(1, v);
|
|
}
|
|
#define atomic64_fetch_dec_release atomic64_fetch_dec_release
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_dec_relaxed
|
|
static __always_inline s64
|
|
atomic64_fetch_dec_relaxed(atomic64_t *v)
|
|
{
|
|
return atomic64_fetch_sub_relaxed(1, v);
|
|
}
|
|
#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
|
|
#endif
|
|
|
|
#else /* atomic64_fetch_dec_relaxed */
|
|
|
|
#ifndef atomic64_fetch_dec_acquire
|
|
static __always_inline s64
|
|
atomic64_fetch_dec_acquire(atomic64_t *v)
|
|
{
|
|
s64 ret = atomic64_fetch_dec_relaxed(v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_dec_release
|
|
static __always_inline s64
|
|
atomic64_fetch_dec_release(atomic64_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_fetch_dec_relaxed(v);
|
|
}
|
|
#define atomic64_fetch_dec_release atomic64_fetch_dec_release
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_dec
|
|
static __always_inline s64
|
|
atomic64_fetch_dec(atomic64_t *v)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_fetch_dec_relaxed(v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_dec atomic64_fetch_dec
|
|
#endif
|
|
|
|
#endif /* atomic64_fetch_dec_relaxed */
|
|
|
|
#define arch_atomic64_and atomic64_and
|
|
|
|
#define arch_atomic64_fetch_and atomic64_fetch_and
|
|
#define arch_atomic64_fetch_and_acquire atomic64_fetch_and_acquire
|
|
#define arch_atomic64_fetch_and_release atomic64_fetch_and_release
|
|
#define arch_atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
|
|
|
|
#ifndef atomic64_fetch_and_relaxed
|
|
#define atomic64_fetch_and_acquire atomic64_fetch_and
|
|
#define atomic64_fetch_and_release atomic64_fetch_and
|
|
#define atomic64_fetch_and_relaxed atomic64_fetch_and
|
|
#else /* atomic64_fetch_and_relaxed */
|
|
|
|
#ifndef atomic64_fetch_and_acquire
|
|
static __always_inline s64
|
|
atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret = atomic64_fetch_and_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_and_release
|
|
static __always_inline s64
|
|
atomic64_fetch_and_release(s64 i, atomic64_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_fetch_and_relaxed(i, v);
|
|
}
|
|
#define atomic64_fetch_and_release atomic64_fetch_and_release
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_and
|
|
static __always_inline s64
|
|
atomic64_fetch_and(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_fetch_and_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_and atomic64_fetch_and
|
|
#endif
|
|
|
|
#endif /* atomic64_fetch_and_relaxed */
|
|
|
|
#define arch_atomic64_andnot atomic64_andnot
|
|
|
|
#ifndef atomic64_andnot
|
|
static __always_inline void
|
|
atomic64_andnot(s64 i, atomic64_t *v)
|
|
{
|
|
atomic64_and(~i, v);
|
|
}
|
|
#define atomic64_andnot atomic64_andnot
|
|
#endif
|
|
|
|
#define arch_atomic64_fetch_andnot atomic64_fetch_andnot
|
|
#define arch_atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
|
|
#define arch_atomic64_fetch_andnot_release atomic64_fetch_andnot_release
|
|
#define arch_atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
|
|
|
|
#ifndef atomic64_fetch_andnot_relaxed
|
|
#ifdef atomic64_fetch_andnot
|
|
#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
|
|
#define atomic64_fetch_andnot_release atomic64_fetch_andnot
|
|
#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
|
|
#endif /* atomic64_fetch_andnot */
|
|
|
|
#ifndef atomic64_fetch_andnot
|
|
static __always_inline s64
|
|
atomic64_fetch_andnot(s64 i, atomic64_t *v)
|
|
{
|
|
return atomic64_fetch_and(~i, v);
|
|
}
|
|
#define atomic64_fetch_andnot atomic64_fetch_andnot
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_andnot_acquire
|
|
static __always_inline s64
|
|
atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
|
|
{
|
|
return atomic64_fetch_and_acquire(~i, v);
|
|
}
|
|
#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_andnot_release
|
|
static __always_inline s64
|
|
atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
|
|
{
|
|
return atomic64_fetch_and_release(~i, v);
|
|
}
|
|
#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_andnot_relaxed
|
|
static __always_inline s64
|
|
atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
|
|
{
|
|
return atomic64_fetch_and_relaxed(~i, v);
|
|
}
|
|
#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
|
|
#endif
|
|
|
|
#else /* atomic64_fetch_andnot_relaxed */
|
|
|
|
#ifndef atomic64_fetch_andnot_acquire
|
|
static __always_inline s64
|
|
atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret = atomic64_fetch_andnot_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_andnot_release
|
|
static __always_inline s64
|
|
atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_fetch_andnot_relaxed(i, v);
|
|
}
|
|
#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_andnot
|
|
static __always_inline s64
|
|
atomic64_fetch_andnot(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_fetch_andnot_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_andnot atomic64_fetch_andnot
|
|
#endif
|
|
|
|
#endif /* atomic64_fetch_andnot_relaxed */
|
|
|
|
#define arch_atomic64_or atomic64_or
|
|
|
|
#define arch_atomic64_fetch_or atomic64_fetch_or
|
|
#define arch_atomic64_fetch_or_acquire atomic64_fetch_or_acquire
|
|
#define arch_atomic64_fetch_or_release atomic64_fetch_or_release
|
|
#define arch_atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
|
|
|
|
#ifndef atomic64_fetch_or_relaxed
|
|
#define atomic64_fetch_or_acquire atomic64_fetch_or
|
|
#define atomic64_fetch_or_release atomic64_fetch_or
|
|
#define atomic64_fetch_or_relaxed atomic64_fetch_or
|
|
#else /* atomic64_fetch_or_relaxed */
|
|
|
|
#ifndef atomic64_fetch_or_acquire
|
|
static __always_inline s64
|
|
atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret = atomic64_fetch_or_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_or_release
|
|
static __always_inline s64
|
|
atomic64_fetch_or_release(s64 i, atomic64_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_fetch_or_relaxed(i, v);
|
|
}
|
|
#define atomic64_fetch_or_release atomic64_fetch_or_release
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_or
|
|
static __always_inline s64
|
|
atomic64_fetch_or(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_fetch_or_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_or atomic64_fetch_or
|
|
#endif
|
|
|
|
#endif /* atomic64_fetch_or_relaxed */
|
|
|
|
#define arch_atomic64_xor atomic64_xor
|
|
|
|
#define arch_atomic64_fetch_xor atomic64_fetch_xor
|
|
#define arch_atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
|
|
#define arch_atomic64_fetch_xor_release atomic64_fetch_xor_release
|
|
#define arch_atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
|
|
|
|
#ifndef atomic64_fetch_xor_relaxed
|
|
#define atomic64_fetch_xor_acquire atomic64_fetch_xor
|
|
#define atomic64_fetch_xor_release atomic64_fetch_xor
|
|
#define atomic64_fetch_xor_relaxed atomic64_fetch_xor
|
|
#else /* atomic64_fetch_xor_relaxed */
|
|
|
|
#ifndef atomic64_fetch_xor_acquire
|
|
static __always_inline s64
|
|
atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret = atomic64_fetch_xor_relaxed(i, v);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_xor_release
|
|
static __always_inline s64
|
|
atomic64_fetch_xor_release(s64 i, atomic64_t *v)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_fetch_xor_relaxed(i, v);
|
|
}
|
|
#define atomic64_fetch_xor_release atomic64_fetch_xor_release
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_xor
|
|
static __always_inline s64
|
|
atomic64_fetch_xor(s64 i, atomic64_t *v)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_fetch_xor_relaxed(i, v);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_fetch_xor atomic64_fetch_xor
|
|
#endif
|
|
|
|
#endif /* atomic64_fetch_xor_relaxed */
|
|
|
|
#define arch_atomic64_xchg atomic64_xchg
|
|
#define arch_atomic64_xchg_acquire atomic64_xchg_acquire
|
|
#define arch_atomic64_xchg_release atomic64_xchg_release
|
|
#define arch_atomic64_xchg_relaxed atomic64_xchg_relaxed
|
|
|
|
#ifndef atomic64_xchg_relaxed
|
|
#define atomic64_xchg_acquire atomic64_xchg
|
|
#define atomic64_xchg_release atomic64_xchg
|
|
#define atomic64_xchg_relaxed atomic64_xchg
|
|
#else /* atomic64_xchg_relaxed */
|
|
|
|
#ifndef atomic64_xchg_acquire
|
|
static __always_inline s64
|
|
atomic64_xchg_acquire(atomic64_t *v, s64 i)
|
|
{
|
|
s64 ret = atomic64_xchg_relaxed(v, i);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_xchg_acquire atomic64_xchg_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_xchg_release
|
|
static __always_inline s64
|
|
atomic64_xchg_release(atomic64_t *v, s64 i)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_xchg_relaxed(v, i);
|
|
}
|
|
#define atomic64_xchg_release atomic64_xchg_release
|
|
#endif
|
|
|
|
#ifndef atomic64_xchg
|
|
static __always_inline s64
|
|
atomic64_xchg(atomic64_t *v, s64 i)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_xchg_relaxed(v, i);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_xchg atomic64_xchg
|
|
#endif
|
|
|
|
#endif /* atomic64_xchg_relaxed */
|
|
|
|
#define arch_atomic64_cmpxchg atomic64_cmpxchg
|
|
#define arch_atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
|
|
#define arch_atomic64_cmpxchg_release atomic64_cmpxchg_release
|
|
#define arch_atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
|
|
|
|
#ifndef atomic64_cmpxchg_relaxed
|
|
#define atomic64_cmpxchg_acquire atomic64_cmpxchg
|
|
#define atomic64_cmpxchg_release atomic64_cmpxchg
|
|
#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
|
|
#else /* atomic64_cmpxchg_relaxed */
|
|
|
|
#ifndef atomic64_cmpxchg_acquire
|
|
static __always_inline s64
|
|
atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
|
|
{
|
|
s64 ret = atomic64_cmpxchg_relaxed(v, old, new);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_cmpxchg_release
|
|
static __always_inline s64
|
|
atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_cmpxchg_relaxed(v, old, new);
|
|
}
|
|
#define atomic64_cmpxchg_release atomic64_cmpxchg_release
|
|
#endif
|
|
|
|
#ifndef atomic64_cmpxchg
|
|
static __always_inline s64
|
|
atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
|
|
{
|
|
s64 ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_cmpxchg_relaxed(v, old, new);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_cmpxchg atomic64_cmpxchg
|
|
#endif
|
|
|
|
#endif /* atomic64_cmpxchg_relaxed */
|
|
|
|
#define arch_atomic64_try_cmpxchg atomic64_try_cmpxchg
|
|
#define arch_atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
|
|
#define arch_atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
|
|
#define arch_atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
|
|
|
|
#ifndef atomic64_try_cmpxchg_relaxed
|
|
#ifdef atomic64_try_cmpxchg
|
|
#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
|
|
#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
|
|
#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
|
|
#endif /* atomic64_try_cmpxchg */
|
|
|
|
#ifndef atomic64_try_cmpxchg
|
|
static __always_inline bool
|
|
atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
|
|
{
|
|
s64 r, o = *old;
|
|
r = atomic64_cmpxchg(v, o, new);
|
|
if (unlikely(r != o))
|
|
*old = r;
|
|
return likely(r == o);
|
|
}
|
|
#define atomic64_try_cmpxchg atomic64_try_cmpxchg
|
|
#endif
|
|
|
|
#ifndef atomic64_try_cmpxchg_acquire
|
|
static __always_inline bool
|
|
atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
|
|
{
|
|
s64 r, o = *old;
|
|
r = atomic64_cmpxchg_acquire(v, o, new);
|
|
if (unlikely(r != o))
|
|
*old = r;
|
|
return likely(r == o);
|
|
}
|
|
#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_try_cmpxchg_release
|
|
static __always_inline bool
|
|
atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
|
|
{
|
|
s64 r, o = *old;
|
|
r = atomic64_cmpxchg_release(v, o, new);
|
|
if (unlikely(r != o))
|
|
*old = r;
|
|
return likely(r == o);
|
|
}
|
|
#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
|
|
#endif
|
|
|
|
#ifndef atomic64_try_cmpxchg_relaxed
|
|
static __always_inline bool
|
|
atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
|
|
{
|
|
s64 r, o = *old;
|
|
r = atomic64_cmpxchg_relaxed(v, o, new);
|
|
if (unlikely(r != o))
|
|
*old = r;
|
|
return likely(r == o);
|
|
}
|
|
#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
|
|
#endif
|
|
|
|
#else /* atomic64_try_cmpxchg_relaxed */
|
|
|
|
#ifndef atomic64_try_cmpxchg_acquire
|
|
static __always_inline bool
|
|
atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
|
|
{
|
|
bool ret = atomic64_try_cmpxchg_relaxed(v, old, new);
|
|
__atomic_acquire_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
|
|
#endif
|
|
|
|
#ifndef atomic64_try_cmpxchg_release
|
|
static __always_inline bool
|
|
atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
|
|
{
|
|
__atomic_release_fence();
|
|
return atomic64_try_cmpxchg_relaxed(v, old, new);
|
|
}
|
|
#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
|
|
#endif
|
|
|
|
#ifndef atomic64_try_cmpxchg
|
|
static __always_inline bool
|
|
atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
|
|
{
|
|
bool ret;
|
|
__atomic_pre_full_fence();
|
|
ret = atomic64_try_cmpxchg_relaxed(v, old, new);
|
|
__atomic_post_full_fence();
|
|
return ret;
|
|
}
|
|
#define atomic64_try_cmpxchg atomic64_try_cmpxchg
|
|
#endif
|
|
|
|
#endif /* atomic64_try_cmpxchg_relaxed */
|
|
|
|
#define arch_atomic64_sub_and_test atomic64_sub_and_test
|
|
|
|
#ifndef atomic64_sub_and_test
|
|
/**
|
|
* atomic64_sub_and_test - subtract value from variable and test result
|
|
* @i: integer value to subtract
|
|
* @v: pointer of type atomic64_t
|
|
*
|
|
* Atomically subtracts @i from @v and returns
|
|
* true if the result is zero, or false for all
|
|
* other cases.
|
|
*/
|
|
static __always_inline bool
|
|
atomic64_sub_and_test(s64 i, atomic64_t *v)
|
|
{
|
|
return atomic64_sub_return(i, v) == 0;
|
|
}
|
|
#define atomic64_sub_and_test atomic64_sub_and_test
|
|
#endif
|
|
|
|
#define arch_atomic64_dec_and_test atomic64_dec_and_test
|
|
|
|
#ifndef atomic64_dec_and_test
|
|
/**
|
|
* atomic64_dec_and_test - decrement and test
|
|
* @v: pointer of type atomic64_t
|
|
*
|
|
* Atomically decrements @v by 1 and
|
|
* returns true if the result is 0, or false for all other
|
|
* cases.
|
|
*/
|
|
static __always_inline bool
|
|
atomic64_dec_and_test(atomic64_t *v)
|
|
{
|
|
return atomic64_dec_return(v) == 0;
|
|
}
|
|
#define atomic64_dec_and_test atomic64_dec_and_test
|
|
#endif
|
|
|
|
#define arch_atomic64_inc_and_test atomic64_inc_and_test
|
|
|
|
#ifndef atomic64_inc_and_test
|
|
/**
|
|
* atomic64_inc_and_test - increment and test
|
|
* @v: pointer of type atomic64_t
|
|
*
|
|
* Atomically increments @v by 1
|
|
* and returns true if the result is zero, or false for all
|
|
* other cases.
|
|
*/
|
|
static __always_inline bool
|
|
atomic64_inc_and_test(atomic64_t *v)
|
|
{
|
|
return atomic64_inc_return(v) == 0;
|
|
}
|
|
#define atomic64_inc_and_test atomic64_inc_and_test
|
|
#endif
|
|
|
|
#define arch_atomic64_add_negative atomic64_add_negative
|
|
|
|
#ifndef atomic64_add_negative
|
|
/**
|
|
* atomic64_add_negative - add and test if negative
|
|
* @i: integer value to add
|
|
* @v: pointer of type atomic64_t
|
|
*
|
|
* Atomically adds @i to @v and returns true
|
|
* if the result is negative, or false when
|
|
* result is greater than or equal to zero.
|
|
*/
|
|
static __always_inline bool
|
|
atomic64_add_negative(s64 i, atomic64_t *v)
|
|
{
|
|
return atomic64_add_return(i, v) < 0;
|
|
}
|
|
#define atomic64_add_negative atomic64_add_negative
|
|
#endif
|
|
|
|
#define arch_atomic64_fetch_add_unless atomic64_fetch_add_unless
|
|
|
|
#ifndef atomic64_fetch_add_unless
|
|
/**
|
|
* atomic64_fetch_add_unless - add unless the number is already a given value
|
|
* @v: pointer of type atomic64_t
|
|
* @a: the amount to add to v...
|
|
* @u: ...unless v is equal to u.
|
|
*
|
|
* Atomically adds @a to @v, so long as @v was not already @u.
|
|
* Returns original value of @v
|
|
*/
|
|
static __always_inline s64
|
|
atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
|
|
{
|
|
s64 c = atomic64_read(v);
|
|
|
|
do {
|
|
if (unlikely(c == u))
|
|
break;
|
|
} while (!atomic64_try_cmpxchg(v, &c, c + a));
|
|
|
|
return c;
|
|
}
|
|
#define atomic64_fetch_add_unless atomic64_fetch_add_unless
|
|
#endif
|
|
|
|
#define arch_atomic64_add_unless atomic64_add_unless
|
|
|
|
#ifndef atomic64_add_unless
|
|
/**
|
|
* atomic64_add_unless - add unless the number is already a given value
|
|
* @v: pointer of type atomic64_t
|
|
* @a: the amount to add to v...
|
|
* @u: ...unless v is equal to u.
|
|
*
|
|
* Atomically adds @a to @v, if @v was not already @u.
|
|
* Returns true if the addition was done.
|
|
*/
|
|
static __always_inline bool
|
|
atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
|
|
{
|
|
return atomic64_fetch_add_unless(v, a, u) != u;
|
|
}
|
|
#define atomic64_add_unless atomic64_add_unless
|
|
#endif
|
|
|
|
#define arch_atomic64_inc_not_zero atomic64_inc_not_zero
|
|
|
|
#ifndef atomic64_inc_not_zero
|
|
/**
|
|
* atomic64_inc_not_zero - increment unless the number is zero
|
|
* @v: pointer of type atomic64_t
|
|
*
|
|
* Atomically increments @v by 1, if @v is non-zero.
|
|
* Returns true if the increment was done.
|
|
*/
|
|
static __always_inline bool
|
|
atomic64_inc_not_zero(atomic64_t *v)
|
|
{
|
|
return atomic64_add_unless(v, 1, 0);
|
|
}
|
|
#define atomic64_inc_not_zero atomic64_inc_not_zero
|
|
#endif
|
|
|
|
#define arch_atomic64_inc_unless_negative atomic64_inc_unless_negative
|
|
|
|
#ifndef atomic64_inc_unless_negative
|
|
static __always_inline bool
|
|
atomic64_inc_unless_negative(atomic64_t *v)
|
|
{
|
|
s64 c = atomic64_read(v);
|
|
|
|
do {
|
|
if (unlikely(c < 0))
|
|
return false;
|
|
} while (!atomic64_try_cmpxchg(v, &c, c + 1));
|
|
|
|
return true;
|
|
}
|
|
#define atomic64_inc_unless_negative atomic64_inc_unless_negative
|
|
#endif
|
|
|
|
#define arch_atomic64_dec_unless_positive atomic64_dec_unless_positive
|
|
|
|
#ifndef atomic64_dec_unless_positive
|
|
static __always_inline bool
|
|
atomic64_dec_unless_positive(atomic64_t *v)
|
|
{
|
|
s64 c = atomic64_read(v);
|
|
|
|
do {
|
|
if (unlikely(c > 0))
|
|
return false;
|
|
} while (!atomic64_try_cmpxchg(v, &c, c - 1));
|
|
|
|
return true;
|
|
}
|
|
#define atomic64_dec_unless_positive atomic64_dec_unless_positive
|
|
#endif
|
|
|
|
#define arch_atomic64_dec_if_positive atomic64_dec_if_positive
|
|
|
|
#ifndef atomic64_dec_if_positive
|
|
static __always_inline s64
|
|
atomic64_dec_if_positive(atomic64_t *v)
|
|
{
|
|
s64 dec, c = atomic64_read(v);
|
|
|
|
do {
|
|
dec = c - 1;
|
|
if (unlikely(dec < 0))
|
|
break;
|
|
} while (!atomic64_try_cmpxchg(v, &c, dec));
|
|
|
|
return dec;
|
|
}
|
|
#define atomic64_dec_if_positive atomic64_dec_if_positive
|
|
#endif
|
|
|
|
#endif /* _LINUX_ATOMIC_FALLBACK_H */
|
|
// 9d95b56f98d82a2a26c7b79ccdd0c47572d50a6f
|