xref: /linux/arch/arm/mm/proc-arm946.S (revision 60063497a95e716c9a689af3be2687d261f115b4)
1/*
2 *  linux/arch/arm/mm/arm946.S: utility functions for ARM946E-S
3 *
4 *  Copyright (C) 2004-2006 Hyok S. Choi (hyok.choi@samsung.com)
5 *
6 *  (Many of cache codes are from proc-arm926.S)
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
11 *
12 */
13#include <linux/linkage.h>
14#include <linux/init.h>
15#include <asm/assembler.h>
16#include <asm/hwcap.h>
17#include <asm/pgtable-hwdef.h>
18#include <asm/pgtable.h>
19#include <asm/ptrace.h>
20#include "proc-macros.S"
21
22/*
23 * ARM946E-S is synthesizable to have 0KB to 1MB sized D-Cache,
24 * comprising 256 lines of 32 bytes (8 words).
25 */
26#define CACHE_DSIZE	(CONFIG_CPU_DCACHE_SIZE) /* typically 8KB. */
27#define CACHE_DLINESIZE	32			/* fixed */
28#define CACHE_DSEGMENTS	4			/* fixed */
29#define CACHE_DENTRIES	(CACHE_DSIZE / CACHE_DSEGMENTS / CACHE_DLINESIZE)
30#define CACHE_DLIMIT	(CACHE_DSIZE * 4)	/* benchmark needed */
31
32	.text
33/*
34 * cpu_arm946_proc_init()
35 * cpu_arm946_switch_mm()
36 *
37 * These are not required.
38 */
39ENTRY(cpu_arm946_proc_init)
40ENTRY(cpu_arm946_switch_mm)
41	mov	pc, lr
42
43/*
44 * cpu_arm946_proc_fin()
45 */
46ENTRY(cpu_arm946_proc_fin)
47	mrc	p15, 0, r0, c1, c0, 0		@ ctrl register
48	bic	r0, r0, #0x00001000		@ i-cache
49	bic	r0, r0, #0x00000004		@ d-cache
50	mcr	p15, 0, r0, c1, c0, 0		@ disable caches
51	mov	pc, lr
52
53/*
54 * cpu_arm946_reset(loc)
55 * Params  : r0 = address to jump to
56 * Notes   : This sets up everything for a reset
57 */
58ENTRY(cpu_arm946_reset)
59	mov	ip, #0
60	mcr	p15, 0, ip, c7, c5, 0		@ flush I cache
61	mcr	p15, 0, ip, c7, c6, 0		@ flush D cache
62	mcr	p15, 0, ip, c7, c10, 4		@ drain WB
63	mrc	p15, 0, ip, c1, c0, 0		@ ctrl register
64	bic	ip, ip, #0x00000005		@ .............c.p
65	bic	ip, ip, #0x00001000		@ i-cache
66	mcr	p15, 0, ip, c1, c0, 0		@ ctrl register
67	mov	pc, r0
68
69/*
70 * cpu_arm946_do_idle()
71 */
72	.align	5
73ENTRY(cpu_arm946_do_idle)
74	mcr	p15, 0, r0, c7, c0, 4		@ Wait for interrupt
75	mov	pc, lr
76
77/*
78 *	flush_icache_all()
79 *
80 *	Unconditionally clean and invalidate the entire icache.
81 */
82ENTRY(arm946_flush_icache_all)
83	mov	r0, #0
84	mcr	p15, 0, r0, c7, c5, 0		@ invalidate I cache
85	mov	pc, lr
86ENDPROC(arm946_flush_icache_all)
87
88/*
89 *	flush_user_cache_all()
90 */
91ENTRY(arm946_flush_user_cache_all)
92	/* FALLTHROUGH */
93
94/*
95 *	flush_kern_cache_all()
96 *
97 *	Clean and invalidate the entire cache.
98 */
99ENTRY(arm946_flush_kern_cache_all)
100	mov	r2, #VM_EXEC
101	mov	ip, #0
102__flush_whole_cache:
103#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
104	mcr	p15, 0, ip, c7, c6, 0		@ flush D cache
105#else
106	mov	r1, #(CACHE_DSEGMENTS - 1) << 29 @ 4 segments
1071:	orr	r3, r1, #(CACHE_DENTRIES - 1) << 4 @ n entries
1082:	mcr	p15, 0, r3, c7, c14, 2		@ clean/flush D index
109	subs	r3, r3, #1 << 4
110	bcs	2b				@ entries n to 0
111	subs	r1, r1, #1 << 29
112	bcs	1b				@ segments 3 to 0
113#endif
114	tst	r2, #VM_EXEC
115	mcrne	p15, 0, ip, c7, c5, 0		@ flush I cache
116	mcrne	p15, 0, ip, c7, c10, 4		@ drain WB
117	mov	pc, lr
118
119/*
120 *	flush_user_cache_range(start, end, flags)
121 *
122 *	Clean and invalidate a range of cache entries in the
123 *	specified address range.
124 *
125 *	- start	- start address (inclusive)
126 *	- end	- end address (exclusive)
127 *	- flags	- vm_flags describing address space
128 * (same as arm926)
129 */
130ENTRY(arm946_flush_user_cache_range)
131	mov	ip, #0
132	sub	r3, r1, r0			@ calculate total size
133	cmp	r3, #CACHE_DLIMIT
134	bhs	__flush_whole_cache
135
1361:	tst	r2, #VM_EXEC
137#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
138	mcr	p15, 0, r0, c7, c6, 1		@ invalidate D entry
139	mcrne	p15, 0, r0, c7, c5, 1		@ invalidate I entry
140	add	r0, r0, #CACHE_DLINESIZE
141	mcr	p15, 0, r0, c7, c6, 1		@ invalidate D entry
142	mcrne	p15, 0, r0, c7, c5, 1		@ invalidate I entry
143	add	r0, r0, #CACHE_DLINESIZE
144#else
145	mcr	p15, 0, r0, c7, c14, 1		@ clean and invalidate D entry
146	mcrne	p15, 0, r0, c7, c5, 1		@ invalidate I entry
147	add	r0, r0, #CACHE_DLINESIZE
148	mcr	p15, 0, r0, c7, c14, 1		@ clean and invalidate D entry
149	mcrne	p15, 0, r0, c7, c5, 1		@ invalidate I entry
150	add	r0, r0, #CACHE_DLINESIZE
151#endif
152	cmp	r0, r1
153	blo	1b
154	tst	r2, #VM_EXEC
155	mcrne	p15, 0, ip, c7, c10, 4		@ drain WB
156	mov	pc, lr
157
158/*
159 *	coherent_kern_range(start, end)
160 *
161 *	Ensure coherency between the Icache and the Dcache in the
162 *	region described by start, end.  If you have non-snooping
163 *	Harvard caches, you need to implement this function.
164 *
165 *	- start	- virtual start address
166 *	- end	- virtual end address
167 */
168ENTRY(arm946_coherent_kern_range)
169	/* FALLTHROUGH */
170
171/*
172 *	coherent_user_range(start, end)
173 *
174 *	Ensure coherency between the Icache and the Dcache in the
175 *	region described by start, end.  If you have non-snooping
176 *	Harvard caches, you need to implement this function.
177 *
178 *	- start	- virtual start address
179 *	- end	- virtual end address
180 * (same as arm926)
181 */
182ENTRY(arm946_coherent_user_range)
183	bic	r0, r0, #CACHE_DLINESIZE - 1
1841:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
185	mcr	p15, 0, r0, c7, c5, 1		@ invalidate I entry
186	add	r0, r0, #CACHE_DLINESIZE
187	cmp	r0, r1
188	blo	1b
189	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
190	mov	pc, lr
191
192/*
193 *	flush_kern_dcache_area(void *addr, size_t size)
194 *
195 *	Ensure no D cache aliasing occurs, either with itself or
196 *	the I cache
197 *
198 *	- addr	- kernel address
199 *	- size	- region size
200 * (same as arm926)
201 */
202ENTRY(arm946_flush_kern_dcache_area)
203	add	r1, r0, r1
2041:	mcr	p15, 0, r0, c7, c14, 1		@ clean+invalidate D entry
205	add	r0, r0, #CACHE_DLINESIZE
206	cmp	r0, r1
207	blo	1b
208	mov	r0, #0
209	mcr	p15, 0, r0, c7, c5, 0		@ invalidate I cache
210	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
211	mov	pc, lr
212
213/*
214 *	dma_inv_range(start, end)
215 *
216 *	Invalidate (discard) the specified virtual address range.
217 *	May not write back any entries.  If 'start' or 'end'
218 *	are not cache line aligned, those lines must be written
219 *	back.
220 *
221 *	- start	- virtual start address
222 *	- end	- virtual end address
223 * (same as arm926)
224 */
225arm946_dma_inv_range:
226#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
227	tst	r0, #CACHE_DLINESIZE - 1
228	mcrne	p15, 0, r0, c7, c10, 1		@ clean D entry
229	tst	r1, #CACHE_DLINESIZE - 1
230	mcrne	p15, 0, r1, c7, c10, 1		@ clean D entry
231#endif
232	bic	r0, r0, #CACHE_DLINESIZE - 1
2331:	mcr	p15, 0, r0, c7, c6, 1		@ invalidate D entry
234	add	r0, r0, #CACHE_DLINESIZE
235	cmp	r0, r1
236	blo	1b
237	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
238	mov	pc, lr
239
240/*
241 *	dma_clean_range(start, end)
242 *
243 *	Clean the specified virtual address range.
244 *
245 *	- start	- virtual start address
246 *	- end	- virtual end address
247 *
248 * (same as arm926)
249 */
250arm946_dma_clean_range:
251#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
252	bic	r0, r0, #CACHE_DLINESIZE - 1
2531:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
254	add	r0, r0, #CACHE_DLINESIZE
255	cmp	r0, r1
256	blo	1b
257#endif
258	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
259	mov	pc, lr
260
261/*
262 *	dma_flush_range(start, end)
263 *
264 *	Clean and invalidate the specified virtual address range.
265 *
266 *	- start	- virtual start address
267 *	- end	- virtual end address
268 *
269 * (same as arm926)
270 */
271ENTRY(arm946_dma_flush_range)
272	bic	r0, r0, #CACHE_DLINESIZE - 1
2731:
274#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
275	mcr	p15, 0, r0, c7, c14, 1		@ clean+invalidate D entry
276#else
277	mcr	p15, 0, r0, c7, c6, 1		@ invalidate D entry
278#endif
279	add	r0, r0, #CACHE_DLINESIZE
280	cmp	r0, r1
281	blo	1b
282	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
283	mov	pc, lr
284
285/*
286 *	dma_map_area(start, size, dir)
287 *	- start	- kernel virtual start address
288 *	- size	- size of region
289 *	- dir	- DMA direction
290 */
291ENTRY(arm946_dma_map_area)
292	add	r1, r1, r0
293	cmp	r2, #DMA_TO_DEVICE
294	beq	arm946_dma_clean_range
295	bcs	arm946_dma_inv_range
296	b	arm946_dma_flush_range
297ENDPROC(arm946_dma_map_area)
298
299/*
300 *	dma_unmap_area(start, size, dir)
301 *	- start	- kernel virtual start address
302 *	- size	- size of region
303 *	- dir	- DMA direction
304 */
305ENTRY(arm946_dma_unmap_area)
306	mov	pc, lr
307ENDPROC(arm946_dma_unmap_area)
308
309	@ define struct cpu_cache_fns (see <asm/cacheflush.h> and proc-macros.S)
310	define_cache_functions arm946
311
312ENTRY(cpu_arm946_dcache_clean_area)
313#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
3141:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
315	add	r0, r0, #CACHE_DLINESIZE
316	subs	r1, r1, #CACHE_DLINESIZE
317	bhi	1b
318#endif
319	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
320	mov	pc, lr
321
322	__CPUINIT
323
324	.type	__arm946_setup, #function
325__arm946_setup:
326	mov	r0, #0
327	mcr	p15, 0, r0, c7, c5, 0		@ invalidate I cache
328	mcr	p15, 0, r0, c7, c6, 0		@ invalidate D cache
329	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
330
331	mcr	p15, 0, r0, c6, c3, 0		@ disable memory region 3~7
332	mcr	p15, 0, r0, c6, c4, 0
333	mcr	p15, 0, r0, c6, c5, 0
334	mcr	p15, 0, r0, c6, c6, 0
335	mcr	p15, 0, r0, c6, c7, 0
336
337	mov	r0, #0x0000003F			@ base = 0, size = 4GB
338	mcr	p15, 0, r0, c6,	c0, 0		@ set region 0, default
339
340	ldr	r0, =(CONFIG_DRAM_BASE & 0xFFFFF000) @ base[31:12] of RAM
341	ldr	r1, =(CONFIG_DRAM_SIZE >> 12)	@ size of RAM (must be >= 4KB)
342	mov	r2, #10				@ 11 is the minimum (4KB)
3431:	add	r2, r2, #1			@ area size *= 2
344	mov	r1, r1, lsr #1
345	bne	1b				@ count not zero r-shift
346	orr	r0, r0, r2, lsl #1		@ the region register value
347	orr	r0, r0, #1			@ set enable bit
348	mcr	p15, 0, r0, c6,	c1, 0		@ set region 1, RAM
349
350	ldr	r0, =(CONFIG_FLASH_MEM_BASE & 0xFFFFF000) @ base[31:12] of FLASH
351	ldr	r1, =(CONFIG_FLASH_SIZE >> 12)	@ size of FLASH (must be >= 4KB)
352	mov	r2, #10				@ 11 is the minimum (4KB)
3531:	add	r2, r2, #1			@ area size *= 2
354	mov	r1, r1, lsr #1
355	bne	1b				@ count not zero r-shift
356	orr	r0, r0, r2, lsl #1		@ the region register value
357	orr	r0, r0, #1			@ set enable bit
358	mcr	p15, 0, r0, c6,	c2, 0		@ set region 2, ROM/FLASH
359
360	mov	r0, #0x06
361	mcr	p15, 0, r0, c2, c0, 0		@ region 1,2 d-cacheable
362	mcr	p15, 0, r0, c2, c0, 1		@ region 1,2 i-cacheable
363#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
364	mov	r0, #0x00			@ disable whole write buffer
365#else
366	mov	r0, #0x02			@ region 1 write bufferred
367#endif
368	mcr	p15, 0, r0, c3, c0, 0
369
370/*
371 *  Access Permission Settings for future permission control by PU.
372 *
373 *				priv.	user
374 * 	region 0 (whole)	rw	--	: b0001
375 * 	region 1 (RAM)		rw	rw	: b0011
376 * 	region 2 (FLASH)	rw	r-	: b0010
377 *	region 3~7 (none)	--	--	: b0000
378 */
379	mov	r0, #0x00000031
380	orr	r0, r0, #0x00000200
381	mcr	p15, 0, r0, c5, c0, 2		@ set data access permission
382	mcr	p15, 0, r0, c5, c0, 3		@ set inst. access permission
383
384	mrc	p15, 0, r0, c1, c0		@ get control register
385	orr	r0, r0, #0x00001000		@ I-cache
386	orr	r0, r0, #0x00000005		@ MPU/D-cache
387#ifdef CONFIG_CPU_CACHE_ROUND_ROBIN
388	orr	r0, r0, #0x00004000		@ .1.. .... .... ....
389#endif
390	mov	pc, lr
391
392	.size	__arm946_setup, . - __arm946_setup
393
394	__INITDATA
395
396	@ define struct processor (see <asm/proc-fns.h> and proc-macros.S)
397	define_processor_functions arm946, dabort=nommu_early_abort, pabort=legacy_pabort, nommu=1
398
399	.section ".rodata"
400
401	string	cpu_arch_name, "armv5te"
402	string	cpu_elf_name, "v5t"
403	string	cpu_arm946_name, "ARM946E-S"
404
405	.align
406
407	.section ".proc.info.init", #alloc, #execinstr
408	.type	__arm946_proc_info,#object
409__arm946_proc_info:
410	.long	0x41009460
411	.long	0xff00fff0
412	.long	0
413	b	__arm946_setup
414	.long	cpu_arch_name
415	.long	cpu_elf_name
416	.long	HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB
417	.long	cpu_arm946_name
418	.long	arm946_processor_functions
419	.long	0
420	.long	0
421	.long	arm940_cache_fns
422	.size	__arm946_proc_info, . - __arm946_proc_info
423
424