xref: /linux/arch/s390/include/asm/barrier.h (revision e2be04c7f9958dde770eeb8b30e829ca969b37bb)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright IBM Corp. 1999, 2009
4  *
5  * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
6  */
7 
8 #ifndef __ASM_BARRIER_H
9 #define __ASM_BARRIER_H
10 
11 /*
12  * Force strict CPU ordering.
13  * And yes, this is required on UP too when we're talking
14  * to devices.
15  */
16 
17 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
18 /* Fast-BCR without checkpoint synchronization */
19 #define __ASM_BARRIER "bcr 14,0\n"
20 #else
21 #define __ASM_BARRIER "bcr 15,0\n"
22 #endif
23 
24 #define mb() do {  asm volatile(__ASM_BARRIER : : : "memory"); } while (0)
25 
26 #define rmb()				barrier()
27 #define wmb()				barrier()
28 #define dma_rmb()			mb()
29 #define dma_wmb()			mb()
30 #define __smp_mb()			mb()
31 #define __smp_rmb()			rmb()
32 #define __smp_wmb()			wmb()
33 
34 #define __smp_store_release(p, v)					\
35 do {									\
36 	compiletime_assert_atomic_type(*p);				\
37 	barrier();							\
38 	WRITE_ONCE(*p, v);						\
39 } while (0)
40 
41 #define __smp_load_acquire(p)						\
42 ({									\
43 	typeof(*p) ___p1 = READ_ONCE(*p);				\
44 	compiletime_assert_atomic_type(*p);				\
45 	barrier();							\
46 	___p1;								\
47 })
48 
49 #define __smp_mb__before_atomic()	barrier()
50 #define __smp_mb__after_atomic()	barrier()
51 
52 #include <asm-generic/barrier.h>
53 
54 #endif /* __ASM_BARRIER_H */
55