diff options
author | Michael Sevakis <jethead71@rockbox.org> | 2009-02-11 23:56:00 +0000 |
---|---|---|
committer | Michael Sevakis <jethead71@rockbox.org> | 2009-02-11 23:56:00 +0000 |
commit | 63e709c7c83a3c0a462275d6b7c053804127e295 (patch) | |
tree | ba6b42e381886d8803b607e34567e14ef77436fe /firmware/target/arm/mmu-arm.c | |
parent | 00a9685985881866dd08d9dc38ef58e93a27917f (diff) | |
download | rockbox-63e709c7c83a3c0a462275d6b7c053804127e295.tar.gz rockbox-63e709c7c83a3c0a462275d6b7c053804127e295.zip |
Refine the routines in mmu-arm.c and move them to mmu-arm.S since the code is now 100% assembly.
git-svn-id: svn://svn.rockbox.org/rockbox/trunk@19980 a1c6a512-1295-4272-9138-f99709370657
Diffstat (limited to 'firmware/target/arm/mmu-arm.c')
-rw-r--r-- | firmware/target/arm/mmu-arm.c | 322 |
1 files changed, 0 insertions, 322 deletions
diff --git a/firmware/target/arm/mmu-arm.c b/firmware/target/arm/mmu-arm.c deleted file mode 100644 index fae7fd0b8f..0000000000 --- a/firmware/target/arm/mmu-arm.c +++ /dev/null | |||
@@ -1,322 +0,0 @@ | |||
1 | /*************************************************************************** | ||
2 | * __________ __ ___. | ||
3 | * Open \______ \ ____ ____ | | _\_ |__ _______ ___ | ||
4 | * Source | _// _ \_/ ___\| |/ /| __ \ / _ \ \/ / | ||
5 | * Jukebox | | ( <_> ) \___| < | \_\ ( <_> > < < | ||
6 | * Firmware |____|_ /\____/ \___ >__|_ \|___ /\____/__/\_ \ | ||
7 | * \/ \/ \/ \/ \/ | ||
8 | * $Id$ | ||
9 | * | ||
10 | * Copyright (C) 2006,2007 by Greg White | ||
11 | * | ||
12 | * This program is free software; you can redistribute it and/or | ||
13 | * modify it under the terms of the GNU General Public License | ||
14 | * as published by the Free Software Foundation; either version 2 | ||
15 | * of the License, or (at your option) any later version. | ||
16 | * | ||
17 | * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY | ||
18 | * KIND, either express or implied. | ||
19 | * | ||
20 | ****************************************************************************/ | ||
21 | #include "cpu.h" | ||
22 | #include "mmu-arm.h" | ||
23 | #include "panic.h" | ||
24 | |||
25 | void __attribute__((naked)) ttb_init(void) { | ||
26 | asm volatile | ||
27 | ( | ||
28 | "mcr p15, 0, %[ttbB], c2, c0, 0 \n" /* Set the TTB base address */ | ||
29 | "mcr p15, 0, %[ffff], c3, c0, 0 \n" /* Set all domains to manager status */ | ||
30 | "bx lr \n" | ||
31 | : | ||
32 | : [ttbB] "r" (TTB_BASE), | ||
33 | [ffff] "r" (0xFFFFFFFF) | ||
34 | ); | ||
35 | } | ||
36 | |||
37 | void __attribute__((naked)) map_section(unsigned int pa, unsigned int va, int mb, int flags) { | ||
38 | asm volatile | ||
39 | ( | ||
40 | /* pa &= (-1 << 20); // align to 1MB */ | ||
41 | "mov r0, r0, lsr #20 \n" | ||
42 | "mov r0, r0, lsl #20 \n" | ||
43 | |||
44 | /* pa |= (flags | 0x412); | ||
45 | * bit breakdown: | ||
46 | * 10: superuser - r/w, user - no access | ||
47 | * 4: should be "1" | ||
48 | * 3,2: Cache flags (flags (r3)) | ||
49 | * 1: Section signature | ||
50 | */ | ||
51 | |||
52 | "orr r0, r0, r3 \n" | ||
53 | "orr r0, r0, #0x410 \n" | ||
54 | "orr r0, r0, #0x2 \n" | ||
55 | : | ||
56 | : | ||
57 | ); | ||
58 | |||
59 | register unsigned long *ttb_base asm ("r3") = TTB_BASE; /* force in r3 */ | ||
60 | |||
61 | asm volatile | ||
62 | ( | ||
63 | /* unsigned int* ttbPtr = TTB_BASE + (va >> 20); | ||
64 | * sections are 1MB size | ||
65 | */ | ||
66 | |||
67 | "mov r1, r1, lsr #20 \n" | ||
68 | "add r1, %[ttbB], r1, lsl #0x2 \n" | ||
69 | |||
70 | /* Add MB to pa, flags are already present in pa, but addition | ||
71 | * should not effect them | ||
72 | * | ||
73 | * #define MB (1 << 20) | ||
74 | * for( ; mb>0; mb--, pa += MB) | ||
75 | * { | ||
76 | * *(ttbPtr++) = pa; | ||
77 | * } | ||
78 | * #undef MB | ||
79 | */ | ||
80 | |||
81 | "cmp r2, #0 \n" | ||
82 | "bxle lr \n" | ||
83 | "mov r3, #0x0 \n" | ||
84 | "loop: \n" | ||
85 | "str r0, [r1], #4 \n" | ||
86 | "add r0, r0, #0x100000 \n" | ||
87 | "add r3, r3, #0x1 \n" | ||
88 | "cmp r2, r3 \n" | ||
89 | "bne loop \n" | ||
90 | "bx lr \n" | ||
91 | : | ||
92 | : [ttbB] "r" (ttb_base) /* This /HAS/ to be in r3 */ | ||
93 | ); | ||
94 | (void) pa; | ||
95 | (void) va; | ||
96 | (void) mb; | ||
97 | (void) flags; | ||
98 | } | ||
99 | |||
100 | void __attribute__((naked)) enable_mmu(void) { | ||
101 | asm volatile( | ||
102 | "mov r0, #0 \n" | ||
103 | "mcr p15, 0, r0, c8, c7, 0 \n" /* invalidate TLB */ | ||
104 | "mcr p15, 0, r0, c7, c7,0 \n" /* invalidate both icache and dcache */ | ||
105 | "mrc p15, 0, r0, c1, c0, 0 \n" | ||
106 | "orr r0, r0, #1 \n" /* enable mmu bit, icache and dcache */ | ||
107 | "orr r0, r0, #1<<2 \n" /* enable dcache */ | ||
108 | "orr r0, r0, #1<<12 \n" /* enable icache */ | ||
109 | "mcr p15, 0, r0, c1, c0, 0 \n" | ||
110 | "nop \n" | ||
111 | "nop \n" | ||
112 | "nop \n" | ||
113 | "nop \n" | ||
114 | "bx lr \n" | ||
115 | : | ||
116 | : | ||
117 | : "r0" | ||
118 | ); | ||
119 | } | ||
120 | |||
121 | #if CONFIG_CPU == IMX31L | ||
122 | void __attribute__((naked)) invalidate_dcache_range(const void *base, unsigned int size) | ||
123 | { | ||
124 | asm volatile( | ||
125 | "add r1, r1, r0 \n" | ||
126 | "mov r2, #0 \n" | ||
127 | "mcrr p15, 0, r1, r0, c14 \n" /* Clean and invalidate dcache range */ | ||
128 | "mcr p15, 0, r2, c7, c10, 4 \n" /* Data synchronization barrier */ | ||
129 | "bx lr \n" | ||
130 | ); | ||
131 | (void)base; (void)size; | ||
132 | } | ||
133 | #else | ||
134 | /* Invalidate DCache for this range */ | ||
135 | /* Will do write back */ | ||
136 | void invalidate_dcache_range(const void *base, unsigned int size) { | ||
137 | unsigned int addr = (((int) base) & ~31); /* Align start to cache line*/ | ||
138 | unsigned int end = ((addr+size) & ~31)+64; /* Align end to cache line, pad */ | ||
139 | asm volatile( | ||
140 | "inv_start: \n" | ||
141 | "mcr p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */ | ||
142 | "add %0, %0, #32 \n" | ||
143 | "cmp %0, %1 \n" | ||
144 | "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */ | ||
145 | "addne %0, %0, #32 \n" | ||
146 | "cmpne %0, %1 \n" | ||
147 | "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */ | ||
148 | "addne %0, %0, #32 \n" | ||
149 | "cmpne %0, %1 \n" | ||
150 | "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */ | ||
151 | "addne %0, %0, #32 \n" | ||
152 | "cmpne %0, %1 \n" | ||
153 | "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */ | ||
154 | "addne %0, %0, #32 \n" | ||
155 | "cmpne %0, %1 \n" | ||
156 | "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */ | ||
157 | "addne %0, %0, #32 \n" | ||
158 | "cmpne %0, %1 \n" | ||
159 | "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */ | ||
160 | "addne %0, %0, #32 \n" | ||
161 | "cmpne %0, %1 \n" | ||
162 | "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */ | ||
163 | "addne %0, %0, #32 \n" | ||
164 | "cmpne %0, %1 \n" | ||
165 | "bne inv_start \n" | ||
166 | "mov %0, #0\n" | ||
167 | "mcr p15,0,%0,c7,c10,4\n" /* Drain write buffer */ | ||
168 | : : "r" (addr), "r" (end) | ||
169 | ); | ||
170 | } | ||
171 | #endif | ||
172 | |||
173 | |||
174 | #if CONFIG_CPU == IMX31L | ||
175 | void __attribute__((naked)) clean_dcache_range(const void *base, unsigned int size) | ||
176 | { | ||
177 | asm volatile( | ||
178 | "add r1, r1, r0 \n" | ||
179 | "mov r2, #0 \n" | ||
180 | "mcrr p15, 0, r1, r0, c12 \n" /* Clean dcache range */ | ||
181 | "mcr p15, 0, r2, c7, c10, 4 \n" /* Data synchronization barrier */ | ||
182 | "bx lr \n" | ||
183 | ); | ||
184 | (void)base; (void)size; | ||
185 | } | ||
186 | #else | ||
187 | /* clean DCache for this range */ | ||
188 | /* forces DCache writeback for the specified range */ | ||
189 | void clean_dcache_range(const void *base, unsigned int size) { | ||
190 | unsigned int addr = (int) base; | ||
191 | unsigned int end = addr+size+32; | ||
192 | asm volatile( | ||
193 | "bic %0, %0, #31 \n" | ||
194 | "clean_start: \n" | ||
195 | "mcr p15, 0, %0, c7, c10, 1 \n" /* Clean this line */ | ||
196 | "add %0, %0, #32 \n" | ||
197 | "cmp %0, %1 \n" | ||
198 | "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */ | ||
199 | "addlo %0, %0, #32 \n" | ||
200 | "cmplo %0, %1 \n" | ||
201 | "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */ | ||
202 | "addlo %0, %0, #32 \n" | ||
203 | "cmplo %0, %1 \n" | ||
204 | "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */ | ||
205 | "addlo %0, %0, #32 \n" | ||
206 | "cmplo %0, %1 \n" | ||
207 | "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */ | ||
208 | "addlo %0, %0, #32 \n" | ||
209 | "cmplo %0, %1 \n" | ||
210 | "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */ | ||
211 | "addlo %0, %0, #32 \n" | ||
212 | "cmplo %0, %1 \n" | ||
213 | "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */ | ||
214 | "addlo %0, %0, #32 \n" | ||
215 | "cmplo %0, %1 \n" | ||
216 | "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */ | ||
217 | "addlo %0, %0, #32 \n" | ||
218 | "cmplo %0, %1 \n" | ||
219 | "blo clean_start \n" | ||
220 | "mov %0, #0\n" | ||
221 | "mcr p15,0,%0,c7,c10,4 \n" /* Drain write buffer */ | ||
222 | : : "r" (addr), "r" (end)); | ||
223 | } | ||
224 | #endif | ||
225 | |||
226 | #if CONFIG_CPU == IMX31L | ||
227 | void __attribute__((naked)) dump_dcache_range(const void *base, unsigned int size) | ||
228 | { | ||
229 | asm volatile( | ||
230 | "add r1, r1, r0 \n" | ||
231 | "mcrr p15, 0, r1, r0, c6 \n" | ||
232 | "bx lr \n" | ||
233 | ); | ||
234 | (void)base; (void)size; | ||
235 | } | ||
236 | #else | ||
237 | /* Dump DCache for this range */ | ||
238 | /* Will *NOT* do write back */ | ||
239 | void dump_dcache_range(const void *base, unsigned int size) { | ||
240 | unsigned int addr = (int) base; | ||
241 | unsigned int end = addr+size; | ||
242 | asm volatile( | ||
243 | "tst %0, #31 \n" /* Check to see if low five bits are set */ | ||
244 | "bic %0, %0, #31 \n" /* Clear them */ | ||
245 | "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line, if those bits were set */ | ||
246 | "add %0, %0, #32 \n" /* Move to the next cache line */ | ||
247 | "tst %1, #31 \n" /* Check last line for bits set */ | ||
248 | "bic %1, %1, #31 \n" /* Clear those bits */ | ||
249 | "mcrne p15, 0, %1, c7, c14, 1 \n" /* Clean and invalidate this line, if not cache aligned */ | ||
250 | "dump_start: \n" | ||
251 | "mcr p15, 0, %0, c7, c6, 1 \n" /* Invalidate this line */ | ||
252 | "add %0, %0, #32 \n" /* Next cache line */ | ||
253 | "cmp %0, %1 \n" | ||
254 | "bne dump_start \n" | ||
255 | "dump_end: \n" | ||
256 | "mcr p15,0,%0,c7,c10,4 \n" /* Drain write buffer */ | ||
257 | : : "r" (addr), "r" (end)); | ||
258 | } | ||
259 | #endif | ||
260 | |||
261 | #if CONFIG_CPU == IMX31L | ||
262 | void __attribute__((naked)) clean_dcache(void) | ||
263 | { | ||
264 | asm volatile ( | ||
265 | /* Clean entire data cache */ | ||
266 | "mov r0, #0 \n" | ||
267 | "mcr p15, 0, r0, c7, c10, 0 \n" | ||
268 | /* Data synchronization barrier */ | ||
269 | "mcr p15, 0, r0, c7, c10, 4 \n" | ||
270 | "bx lr \n" | ||
271 | ); | ||
272 | } | ||
273 | #else | ||
274 | /* Cleans entire DCache */ | ||
275 | void clean_dcache(void) | ||
276 | { | ||
277 | unsigned int index, addr, low; | ||
278 | |||
279 | for(index = 0; index <= 63; index++) | ||
280 | { | ||
281 | for(low = 0;low <= 7; low++) | ||
282 | { | ||
283 | addr = (index << 26) | (low << 5); | ||
284 | asm volatile | ||
285 | ( | ||
286 | "mcr p15, 0, %[addr], c7, c10, 2 \n" /* Clean this entry by index */ | ||
287 | : | ||
288 | : [addr] "r" (addr) | ||
289 | ); | ||
290 | } | ||
291 | } | ||
292 | } | ||
293 | #endif | ||
294 | |||
295 | #if CONFIG_CPU == IMX31L | ||
296 | void invalidate_idcache(void) | ||
297 | { | ||
298 | asm volatile( | ||
299 | /* Clean and invalidate entire data cache */ | ||
300 | "mcr p15, 0, %0, c7, c14, 0 \n" | ||
301 | /* Invalidate entire instruction cache | ||
302 | * Also flushes the branch target cache */ | ||
303 | "mcr p15, 0, %0, c7, c5, 0 \n" | ||
304 | /* Data synchronization barrier */ | ||
305 | "mcr p15, 0, %0, c7, c10, 4 \n" | ||
306 | /* Flush prefetch buffer */ | ||
307 | "mcr p15, 0, %0, c7, c5, 4 \n" | ||
308 | : : "r"(0) | ||
309 | ); | ||
310 | } | ||
311 | #else | ||
312 | void invalidate_idcache(void) | ||
313 | { | ||
314 | clean_dcache(); | ||
315 | asm volatile( | ||
316 | "mov r0, #0 \n" | ||
317 | "mcr p15, 0, r0, c7, c5, 0 \n" | ||
318 | : : : "r0" | ||
319 | ); | ||
320 | } | ||
321 | #endif | ||
322 | |||