xref: /linux/arch/mips/lib/mips-atomic.c (revision fbc872c38c8fed31948c85683b5326ee5ab9fccc)
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 1994, 95, 96, 97, 98, 99, 2003 by Ralf Baechle
7  * Copyright (C) 1996 by Paul M. Antoine
8  * Copyright (C) 1999 Silicon Graphics
9  * Copyright (C) 2000 MIPS Technologies, Inc.
10  */
11 #include <asm/irqflags.h>
12 #include <asm/hazards.h>
13 #include <linux/compiler.h>
14 #include <linux/preempt.h>
15 #include <linux/export.h>
16 #include <linux/stringify.h>
17 
18 #if !defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_CPU_MIPSR6)
19 
20 /*
21  * For cli() we have to insert nops to make sure that the new value
22  * has actually arrived in the status register before the end of this
23  * macro.
24  * R4000/R4400 need three nops, the R4600 two nops and the R10000 needs
25  * no nops at all.
26  */
27 /*
28  * For TX49, operating only IE bit is not enough.
29  *
30  * If mfc0 $12 follows store and the mfc0 is last instruction of a
31  * page and fetching the next instruction causes TLB miss, the result
32  * of the mfc0 might wrongly contain EXL bit.
33  *
34  * ERT-TX49H2-027, ERT-TX49H3-012, ERT-TX49HL3-006, ERT-TX49H4-008
35  *
36  * Workaround: mask EXL bit of the result or place a nop before mfc0.
37  */
38 notrace void arch_local_irq_disable(void)
39 {
40 	preempt_disable();
41 
42 	__asm__ __volatile__(
43 	"	.set	push						\n"
44 	"	.set	noat						\n"
45 	"	mfc0	$1,$12						\n"
46 	"	ori	$1,0x1f						\n"
47 	"	xori	$1,0x1f						\n"
48 	"	.set	noreorder					\n"
49 	"	mtc0	$1,$12						\n"
50 	"	" __stringify(__irq_disable_hazard) "			\n"
51 	"	.set	pop						\n"
52 	: /* no outputs */
53 	: /* no inputs */
54 	: "memory");
55 
56 	preempt_enable();
57 }
58 EXPORT_SYMBOL(arch_local_irq_disable);
59 
60 notrace unsigned long arch_local_irq_save(void)
61 {
62 	unsigned long flags;
63 
64 	preempt_disable();
65 
66 	__asm__ __volatile__(
67 	"	.set	push						\n"
68 	"	.set	reorder						\n"
69 	"	.set	noat						\n"
70 	"	mfc0	%[flags], $12					\n"
71 	"	ori	$1, %[flags], 0x1f				\n"
72 	"	xori	$1, 0x1f					\n"
73 	"	.set	noreorder					\n"
74 	"	mtc0	$1, $12						\n"
75 	"	" __stringify(__irq_disable_hazard) "			\n"
76 	"	.set	pop						\n"
77 	: [flags] "=r" (flags)
78 	: /* no inputs */
79 	: "memory");
80 
81 	preempt_enable();
82 
83 	return flags;
84 }
85 EXPORT_SYMBOL(arch_local_irq_save);
86 
87 notrace void arch_local_irq_restore(unsigned long flags)
88 {
89 	unsigned long __tmp1;
90 
91 	preempt_disable();
92 
93 	__asm__ __volatile__(
94 	"	.set	push						\n"
95 	"	.set	noreorder					\n"
96 	"	.set	noat						\n"
97 	"	mfc0	$1, $12						\n"
98 	"	andi	%[flags], 1					\n"
99 	"	ori	$1, 0x1f					\n"
100 	"	xori	$1, 0x1f					\n"
101 	"	or	%[flags], $1					\n"
102 	"	mtc0	%[flags], $12					\n"
103 	"	" __stringify(__irq_disable_hazard) "			\n"
104 	"	.set	pop						\n"
105 	: [flags] "=r" (__tmp1)
106 	: "0" (flags)
107 	: "memory");
108 
109 	preempt_enable();
110 }
111 EXPORT_SYMBOL(arch_local_irq_restore);
112 
113 #endif /* !CONFIG_CPU_MIPSR2 && !CONFIG_CPU_MIPSR6 */
114