ia64: define __smp_xxx
This defines __smp_xxx barriers for ia64, for use by virtualization. smp_xxx barriers are removed as they are defined correctly by asm-generic/barriers.h This reduces the amount of arch-specific boiler-plate code. Signed-off-by: Michael S. Tsirkin <mst@redhat.com> Acked-by: Tony Luck <tony.luck@intel.com> Acked-by: Arnd Bergmann <arnd@arndb.de> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
This commit is contained in:
parent
27f6cabc0e
commit
eebd1b9278
|
@ -42,28 +42,24 @@
|
|||
#define dma_rmb() mb()
|
||||
#define dma_wmb() mb()
|
||||
|
||||
#ifdef CONFIG_SMP
|
||||
# define smp_mb() mb()
|
||||
#else
|
||||
# define smp_mb() barrier()
|
||||
#endif
|
||||
# define __smp_mb() mb()
|
||||
|
||||
#define smp_mb__before_atomic() barrier()
|
||||
#define smp_mb__after_atomic() barrier()
|
||||
#define __smp_mb__before_atomic() barrier()
|
||||
#define __smp_mb__after_atomic() barrier()
|
||||
|
||||
/*
|
||||
* IA64 GCC turns volatile stores into st.rel and volatile loads into ld.acq no
|
||||
* need for asm trickery!
|
||||
*/
|
||||
|
||||
#define smp_store_release(p, v) \
|
||||
#define __smp_store_release(p, v) \
|
||||
do { \
|
||||
compiletime_assert_atomic_type(*p); \
|
||||
barrier(); \
|
||||
WRITE_ONCE(*p, v); \
|
||||
} while (0)
|
||||
|
||||
#define smp_load_acquire(p) \
|
||||
#define __smp_load_acquire(p) \
|
||||
({ \
|
||||
typeof(*p) ___p1 = READ_ONCE(*p); \
|
||||
compiletime_assert_atomic_type(*p); \
|
||||
|
|
Loading…
Reference in New Issue