xref: /linux/arch/mips/include/asm/cmpxchg.h (revision e2be04c7f9958dde770eeb8b30e829ca969b37bb)
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7  */
8 #ifndef __ASM_CMPXCHG_H
9 #define __ASM_CMPXCHG_H
10 
11 #include <linux/bug.h>
12 #include <linux/irqflags.h>
13 #include <asm/compiler.h>
14 #include <asm/war.h>
15 
16 /*
17  * Using a branch-likely instruction to check the result of an sc instruction
18  * works around a bug present in R10000 CPUs prior to revision 3.0 that could
19  * cause ll-sc sequences to execute non-atomically.
20  */
21 #if R10000_LLSC_WAR
22 # define __scbeqz "beqzl"
23 #else
24 # define __scbeqz "beqz"
25 #endif
26 
27 /*
28  * These functions doesn't exist, so if they are called you'll either:
29  *
30  * - Get an error at compile-time due to __compiletime_error, if supported by
31  *   your compiler.
32  *
33  * or:
34  *
35  * - Get an error at link-time due to the call to the missing function.
36  */
37 extern unsigned long __cmpxchg_called_with_bad_pointer(void)
38 	__compiletime_error("Bad argument size for cmpxchg");
39 extern unsigned long __xchg_called_with_bad_pointer(void)
40 	__compiletime_error("Bad argument size for xchg");
41 
42 #define __xchg_asm(ld, st, m, val)					\
43 ({									\
44 	__typeof(*(m)) __ret;						\
45 									\
46 	if (kernel_uses_llsc) {						\
47 		__asm__ __volatile__(					\
48 		"	.set	push				\n"	\
49 		"	.set	noat				\n"	\
50 		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
51 		"1:	" ld "	%0, %2		# __xchg_asm	\n"	\
52 		"	.set	mips0				\n"	\
53 		"	move	$1, %z3				\n"	\
54 		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
55 		"	" st "	$1, %1				\n"	\
56 		"\t" __scbeqz "	$1, 1b				\n"	\
57 		"	.set	pop				\n"	\
58 		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
59 		: GCC_OFF_SMALL_ASM() (*m), "Jr" (val)			\
60 		: "memory");						\
61 	} else {							\
62 		unsigned long __flags;					\
63 									\
64 		raw_local_irq_save(__flags);				\
65 		__ret = *m;						\
66 		*m = val;						\
67 		raw_local_irq_restore(__flags);				\
68 	}								\
69 									\
70 	__ret;								\
71 })
72 
73 extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
74 				  unsigned int size);
75 
76 static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
77 				   int size)
78 {
79 	switch (size) {
80 	case 1:
81 	case 2:
82 		return __xchg_small(ptr, x, size);
83 
84 	case 4:
85 		return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
86 
87 	case 8:
88 		if (!IS_ENABLED(CONFIG_64BIT))
89 			return __xchg_called_with_bad_pointer();
90 
91 		return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
92 
93 	default:
94 		return __xchg_called_with_bad_pointer();
95 	}
96 }
97 
98 #define xchg(ptr, x)							\
99 ({									\
100 	__typeof__(*(ptr)) __res;					\
101 									\
102 	smp_mb__before_llsc();						\
103 									\
104 	__res = (__typeof__(*(ptr)))					\
105 		__xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));	\
106 									\
107 	smp_llsc_mb();							\
108 									\
109 	__res;								\
110 })
111 
112 #define __cmpxchg_asm(ld, st, m, old, new)				\
113 ({									\
114 	__typeof(*(m)) __ret;						\
115 									\
116 	if (kernel_uses_llsc) {						\
117 		__asm__ __volatile__(					\
118 		"	.set	push				\n"	\
119 		"	.set	noat				\n"	\
120 		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
121 		"1:	" ld "	%0, %2		# __cmpxchg_asm \n"	\
122 		"	bne	%0, %z3, 2f			\n"	\
123 		"	.set	mips0				\n"	\
124 		"	move	$1, %z4				\n"	\
125 		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
126 		"	" st "	$1, %1				\n"	\
127 		"\t" __scbeqz "	$1, 1b				\n"	\
128 		"	.set	pop				\n"	\
129 		"2:						\n"	\
130 		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
131 		: GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)		\
132 		: "memory");						\
133 	} else {							\
134 		unsigned long __flags;					\
135 									\
136 		raw_local_irq_save(__flags);				\
137 		__ret = *m;						\
138 		if (__ret == old)					\
139 			*m = new;					\
140 		raw_local_irq_restore(__flags);				\
141 	}								\
142 									\
143 	__ret;								\
144 })
145 
146 extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
147 				     unsigned long new, unsigned int size);
148 
149 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
150 				      unsigned long new, unsigned int size)
151 {
152 	switch (size) {
153 	case 1:
154 	case 2:
155 		return __cmpxchg_small(ptr, old, new, size);
156 
157 	case 4:
158 		return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
159 				     (u32)old, new);
160 
161 	case 8:
162 		/* lld/scd are only available for MIPS64 */
163 		if (!IS_ENABLED(CONFIG_64BIT))
164 			return __cmpxchg_called_with_bad_pointer();
165 
166 		return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
167 				     (u64)old, new);
168 
169 	default:
170 		return __cmpxchg_called_with_bad_pointer();
171 	}
172 }
173 
174 #define cmpxchg_local(ptr, old, new)					\
175 	((__typeof__(*(ptr)))						\
176 		__cmpxchg((ptr),					\
177 			  (unsigned long)(__typeof__(*(ptr)))(old),	\
178 			  (unsigned long)(__typeof__(*(ptr)))(new),	\
179 			  sizeof(*(ptr))))
180 
181 #define cmpxchg(ptr, old, new)						\
182 ({									\
183 	__typeof__(*(ptr)) __res;					\
184 									\
185 	smp_mb__before_llsc();						\
186 	__res = cmpxchg_local((ptr), (old), (new));			\
187 	smp_llsc_mb();							\
188 									\
189 	__res;								\
190 })
191 
192 #ifdef CONFIG_64BIT
193 #define cmpxchg64_local(ptr, o, n)					\
194   ({									\
195 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
196 	cmpxchg_local((ptr), (o), (n));					\
197   })
198 
199 #define cmpxchg64(ptr, o, n)						\
200   ({									\
201 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
202 	cmpxchg((ptr), (o), (n));					\
203   })
204 #else
205 #include <asm-generic/cmpxchg-local.h>
206 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
207 #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
208 #endif
209 
210 #undef __scbeqz
211 
212 #endif /* __ASM_CMPXCHG_H */
213