xref: /linux/arch/arm64/kernel/compat_alignment.c (revision cbdb1f163af2bb90d01be1f0263df1d8d5c9d9d3)
1 // SPDX-License-Identifier: GPL-2.0-only
2 // based on arch/arm/mm/alignment.c
3 
4 #include <linux/compiler.h>
5 #include <linux/errno.h>
6 #include <linux/kernel.h>
7 #include <linux/init.h>
8 #include <linux/perf_event.h>
9 #include <linux/uaccess.h>
10 
11 #include <asm/exception.h>
12 #include <asm/ptrace.h>
13 #include <asm/traps.h>
14 
15 /*
16  * 32-bit misaligned trap handler (c) 1998 San Mehat (CCC) -July 1998
17  *
18  * Speed optimisations and better fault handling by Russell King.
19  */
20 #define CODING_BITS(i)	(i & 0x0e000000)
21 
22 #define LDST_P_BIT(i)	(i & (1 << 24))		/* Preindex		*/
23 #define LDST_U_BIT(i)	(i & (1 << 23))		/* Add offset		*/
24 #define LDST_W_BIT(i)	(i & (1 << 21))		/* Writeback		*/
25 #define LDST_L_BIT(i)	(i & (1 << 20))		/* Load			*/
26 
27 #define LDST_P_EQ_U(i)	((((i) ^ ((i) >> 1)) & (1 << 23)) == 0)
28 
29 #define LDSTHD_I_BIT(i)	(i & (1 << 22))		/* double/half-word immed */
30 
31 #define RN_BITS(i)	((i >> 16) & 15)	/* Rn			*/
32 #define RD_BITS(i)	((i >> 12) & 15)	/* Rd			*/
33 #define RM_BITS(i)	(i & 15)		/* Rm			*/
34 
35 #define REGMASK_BITS(i)	(i & 0xffff)
36 
37 #define BAD_INSTR 	0xdeadc0de
38 
39 /* Thumb-2 32 bit format per ARMv7 DDI0406A A6.3, either f800h,e800h,f800h */
40 #define IS_T32(hi16) \
41 	(((hi16) & 0xe000) == 0xe000 && ((hi16) & 0x1800))
42 
43 union offset_union {
44 	unsigned long un;
45 	  signed long sn;
46 };
47 
48 #define TYPE_ERROR	0
49 #define TYPE_FAULT	1
50 #define TYPE_LDST	2
51 #define TYPE_DONE	3
52 
53 static void
54 do_alignment_finish_ldst(unsigned long addr, u32 instr, struct pt_regs *regs,
55 			 union offset_union offset)
56 {
57 	if (!LDST_U_BIT(instr))
58 		offset.un = -offset.un;
59 
60 	if (!LDST_P_BIT(instr))
61 		addr += offset.un;
62 
63 	if (!LDST_P_BIT(instr) || LDST_W_BIT(instr))
64 		regs->regs[RN_BITS(instr)] = addr;
65 }
66 
67 static int
68 do_alignment_ldrdstrd(unsigned long addr, u32 instr, struct pt_regs *regs)
69 {
70 	unsigned int rd = RD_BITS(instr);
71 	unsigned int rd2;
72 	int load;
73 
74 	if ((instr & 0xfe000000) == 0xe8000000) {
75 		/* ARMv7 Thumb-2 32-bit LDRD/STRD */
76 		rd2 = (instr >> 8) & 0xf;
77 		load = !!(LDST_L_BIT(instr));
78 	} else if (((rd & 1) == 1) || (rd == 14)) {
79 		return TYPE_ERROR;
80 	} else {
81 		load = ((instr & 0xf0) == 0xd0);
82 		rd2 = rd + 1;
83 	}
84 
85 	if (load) {
86 		unsigned int val, val2;
87 
88 		if (get_user(val, (u32 __user *)addr) ||
89 		    get_user(val2, (u32 __user *)(addr + 4)))
90 			return TYPE_FAULT;
91 		regs->regs[rd] = val;
92 		regs->regs[rd2] = val2;
93 	} else {
94 		if (put_user(regs->regs[rd], (u32 __user *)addr) ||
95 		    put_user(regs->regs[rd2], (u32 __user *)(addr + 4)))
96 			return TYPE_FAULT;
97 	}
98 	return TYPE_LDST;
99 }
100 
101 /*
102  * LDM/STM alignment handler.
103  *
104  * There are 4 variants of this instruction:
105  *
106  * B = rn pointer before instruction, A = rn pointer after instruction
107  *              ------ increasing address ----->
108  *	        |    | r0 | r1 | ... | rx |    |
109  * PU = 01             B                    A
110  * PU = 11        B                    A
111  * PU = 00        A                    B
112  * PU = 10             A                    B
113  */
114 static int
115 do_alignment_ldmstm(unsigned long addr, u32 instr, struct pt_regs *regs)
116 {
117 	unsigned int rd, rn, nr_regs, regbits;
118 	unsigned long eaddr, newaddr;
119 	unsigned int val;
120 
121 	/* count the number of registers in the mask to be transferred */
122 	nr_regs = hweight16(REGMASK_BITS(instr)) * 4;
123 
124 	rn = RN_BITS(instr);
125 	newaddr = eaddr = regs->regs[rn];
126 
127 	if (!LDST_U_BIT(instr))
128 		nr_regs = -nr_regs;
129 	newaddr += nr_regs;
130 	if (!LDST_U_BIT(instr))
131 		eaddr = newaddr;
132 
133 	if (LDST_P_EQ_U(instr))	/* U = P */
134 		eaddr += 4;
135 
136 	for (regbits = REGMASK_BITS(instr), rd = 0; regbits;
137 	     regbits >>= 1, rd += 1)
138 		if (regbits & 1) {
139 			if (LDST_L_BIT(instr)) {
140 				if (get_user(val, (u32 __user *)eaddr))
141 					return TYPE_FAULT;
142 				if (rd < 15)
143 					regs->regs[rd] = val;
144 				else
145 					regs->pc = val;
146 			} else {
147 				/*
148 				 * The PC register has a bias of +8 in ARM mode
149 				 * and +4 in Thumb mode. This means that a read
150 				 * of the value of PC should account for this.
151 				 * Since Thumb does not permit STM instructions
152 				 * to refer to PC, just add 8 here.
153 				 */
154 				val = (rd < 15) ? regs->regs[rd] : regs->pc + 8;
155 				if (put_user(val, (u32 __user *)eaddr))
156 					return TYPE_FAULT;
157 			}
158 			eaddr += 4;
159 		}
160 
161 	if (LDST_W_BIT(instr))
162 		regs->regs[rn] = newaddr;
163 
164 	return TYPE_DONE;
165 }
166 
167 /*
168  * Convert Thumb multi-word load/store instruction forms to equivalent ARM
169  * instructions so we can reuse ARM userland alignment fault fixups for Thumb.
170  *
171  * This implementation was initially based on the algorithm found in
172  * gdb/sim/arm/thumbemu.c. It is basically just a code reduction of same
173  * to convert only Thumb ld/st instruction forms to equivalent ARM forms.
174  *
175  * NOTES:
176  * 1. Comments below refer to ARM ARM DDI0100E Thumb Instruction sections.
177  * 2. If for some reason we're passed an non-ld/st Thumb instruction to
178  *    decode, we return 0xdeadc0de. This should never happen under normal
179  *    circumstances but if it does, we've got other problems to deal with
180  *    elsewhere and we obviously can't fix those problems here.
181  */
182 
183 static unsigned long thumb2arm(u16 tinstr)
184 {
185 	u32 L = (tinstr & (1<<11)) >> 11;
186 
187 	switch ((tinstr & 0xf800) >> 11) {
188 	/* 6.6.1 Format 1: */
189 	case 0xc000 >> 11:				/* 7.1.51 STMIA */
190 	case 0xc800 >> 11:				/* 7.1.25 LDMIA */
191 		{
192 			u32 Rn = (tinstr & (7<<8)) >> 8;
193 			u32 W = ((L<<Rn) & (tinstr&255)) ? 0 : 1<<21;
194 
195 			return 0xe8800000 | W | (L<<20) | (Rn<<16) |
196 				(tinstr&255);
197 		}
198 
199 	/* 6.6.1 Format 2: */
200 	case 0xb000 >> 11:				/* 7.1.48 PUSH */
201 	case 0xb800 >> 11:				/* 7.1.47 POP */
202 		if ((tinstr & (3 << 9)) == 0x0400) {
203 			static const u32 subset[4] = {
204 				0xe92d0000,	/* STMDB sp!,{registers} */
205 				0xe92d4000,	/* STMDB sp!,{registers,lr} */
206 				0xe8bd0000,	/* LDMIA sp!,{registers} */
207 				0xe8bd8000	/* LDMIA sp!,{registers,pc} */
208 			};
209 			return subset[(L<<1) | ((tinstr & (1<<8)) >> 8)] |
210 			    (tinstr & 255);		/* register_list */
211 		}
212 		fallthrough;	/* for illegal instruction case */
213 
214 	default:
215 		return BAD_INSTR;
216 	}
217 }
218 
219 /*
220  * Convert Thumb-2 32 bit LDM, STM, LDRD, STRD to equivalent instruction
221  * handlable by ARM alignment handler, also find the corresponding handler,
222  * so that we can reuse ARM userland alignment fault fixups for Thumb.
223  *
224  * @pinstr: original Thumb-2 instruction; returns new handlable instruction
225  * @regs: register context.
226  * @poffset: return offset from faulted addr for later writeback
227  *
228  * NOTES:
229  * 1. Comments below refer to ARMv7 DDI0406A Thumb Instruction sections.
230  * 2. Register name Rt from ARMv7 is same as Rd from ARMv6 (Rd is Rt)
231  */
232 static void *
233 do_alignment_t32_to_handler(u32 *pinstr, struct pt_regs *regs,
234 			    union offset_union *poffset)
235 {
236 	u32 instr = *pinstr;
237 	u16 tinst1 = (instr >> 16) & 0xffff;
238 	u16 tinst2 = instr & 0xffff;
239 
240 	switch (tinst1 & 0xffe0) {
241 	/* A6.3.5 Load/Store multiple */
242 	case 0xe880:		/* STM/STMIA/STMEA,LDM/LDMIA, PUSH/POP T2 */
243 	case 0xe8a0:		/* ...above writeback version */
244 	case 0xe900:		/* STMDB/STMFD, LDMDB/LDMEA */
245 	case 0xe920:		/* ...above writeback version */
246 		/* no need offset decision since handler calculates it */
247 		return do_alignment_ldmstm;
248 
249 	case 0xf840:		/* POP/PUSH T3 (single register) */
250 		if (RN_BITS(instr) == 13 && (tinst2 & 0x09ff) == 0x0904) {
251 			u32 L = !!(LDST_L_BIT(instr));
252 			const u32 subset[2] = {
253 				0xe92d0000,	/* STMDB sp!,{registers} */
254 				0xe8bd0000,	/* LDMIA sp!,{registers} */
255 			};
256 			*pinstr = subset[L] | (1<<RD_BITS(instr));
257 			return do_alignment_ldmstm;
258 		}
259 		/* Else fall through for illegal instruction case */
260 		break;
261 
262 	/* A6.3.6 Load/store double, STRD/LDRD(immed, lit, reg) */
263 	case 0xe860:
264 	case 0xe960:
265 	case 0xe8e0:
266 	case 0xe9e0:
267 		poffset->un = (tinst2 & 0xff) << 2;
268 		fallthrough;
269 
270 	case 0xe940:
271 	case 0xe9c0:
272 		return do_alignment_ldrdstrd;
273 
274 	/*
275 	 * No need to handle load/store instructions up to word size
276 	 * since ARMv6 and later CPUs can perform unaligned accesses.
277 	 */
278 	default:
279 		break;
280 	}
281 	return NULL;
282 }
283 
284 static int alignment_get_arm(struct pt_regs *regs, __le32 __user *ip, u32 *inst)
285 {
286 	__le32 instr = 0;
287 	int fault;
288 
289 	fault = get_user(instr, ip);
290 	if (fault)
291 		return fault;
292 
293 	*inst = __le32_to_cpu(instr);
294 	return 0;
295 }
296 
297 static int alignment_get_thumb(struct pt_regs *regs, __le16 __user *ip, u16 *inst)
298 {
299 	__le16 instr = 0;
300 	int fault;
301 
302 	fault = get_user(instr, ip);
303 	if (fault)
304 		return fault;
305 
306 	*inst = __le16_to_cpu(instr);
307 	return 0;
308 }
309 
310 int do_compat_alignment_fixup(unsigned long addr, struct pt_regs *regs)
311 {
312 	union offset_union offset;
313 	unsigned long instrptr;
314 	int (*handler)(unsigned long addr, u32 instr, struct pt_regs *regs);
315 	unsigned int type;
316 	u32 instr = 0;
317 	u16 tinstr = 0;
318 	int isize = 4;
319 	int thumb2_32b = 0;
320 	int fault;
321 
322 	instrptr = instruction_pointer(regs);
323 
324 	if (compat_thumb_mode(regs)) {
325 		__le16 __user *ptr = (__le16 __user *)(instrptr & ~1);
326 
327 		fault = alignment_get_thumb(regs, ptr, &tinstr);
328 		if (!fault) {
329 			if (IS_T32(tinstr)) {
330 				/* Thumb-2 32-bit */
331 				u16 tinst2;
332 				fault = alignment_get_thumb(regs, ptr + 1, &tinst2);
333 				instr = ((u32)tinstr << 16) | tinst2;
334 				thumb2_32b = 1;
335 			} else {
336 				isize = 2;
337 				instr = thumb2arm(tinstr);
338 			}
339 		}
340 	} else {
341 		fault = alignment_get_arm(regs, (__le32 __user *)instrptr, &instr);
342 	}
343 
344 	if (fault)
345 		return 1;
346 
347 	switch (CODING_BITS(instr)) {
348 	case 0x00000000:	/* 3.13.4 load/store instruction extensions */
349 		if (LDSTHD_I_BIT(instr))
350 			offset.un = (instr & 0xf00) >> 4 | (instr & 15);
351 		else
352 			offset.un = regs->regs[RM_BITS(instr)];
353 
354 		if ((instr & 0x001000f0) == 0x000000d0 || /* LDRD */
355 		    (instr & 0x001000f0) == 0x000000f0)   /* STRD */
356 			handler = do_alignment_ldrdstrd;
357 		else
358 			return 1;
359 		break;
360 
361 	case 0x08000000:	/* ldm or stm, or thumb-2 32bit instruction */
362 		if (thumb2_32b) {
363 			offset.un = 0;
364 			handler = do_alignment_t32_to_handler(&instr, regs, &offset);
365 		} else {
366 			offset.un = 0;
367 			handler = do_alignment_ldmstm;
368 		}
369 		break;
370 
371 	default:
372 		return 1;
373 	}
374 
375 	type = handler(addr, instr, regs);
376 
377 	if (type == TYPE_ERROR || type == TYPE_FAULT)
378 		return 1;
379 
380 	if (type == TYPE_LDST)
381 		do_alignment_finish_ldst(addr, instr, regs, offset);
382 
383 	perf_sw_event(PERF_COUNT_SW_ALIGNMENT_FAULTS, 1, regs, regs->pc);
384 	arm64_skip_faulting_instruction(regs, isize);
385 
386 	return 0;
387 }
388