summaryrefslogtreecommitdiff
path: root/firmware/target/arm/mmu-arm.c
diff options
context:
space:
mode:
Diffstat (limited to 'firmware/target/arm/mmu-arm.c')
-rw-r--r--firmware/target/arm/mmu-arm.c226
1 files changed, 226 insertions, 0 deletions
diff --git a/firmware/target/arm/mmu-arm.c b/firmware/target/arm/mmu-arm.c
new file mode 100644
index 0000000000..db7f5e59cd
--- /dev/null
+++ b/firmware/target/arm/mmu-arm.c
@@ -0,0 +1,226 @@
1/***************************************************************************
2 * __________ __ ___.
3 * Open \______ \ ____ ____ | | _\_ |__ _______ ___
4 * Source | _// _ \_/ ___\| |/ /| __ \ / _ \ \/ /
5 * Jukebox | | ( <_> ) \___| < | \_\ ( <_> > < <
6 * Firmware |____|_ /\____/ \___ >__|_ \|___ /\____/__/\_ \
7 * \/ \/ \/ \/ \/
8 * $Id$
9 *
10 * Copyright (C) 2006,2007 by Greg White
11 *
12 * All files in this archive are subject to the GNU General Public License.
13 * See the file COPYING in the source tree root for full license agreement.
14 *
15 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
16 * KIND, either express or implied.
17 *
18 ****************************************************************************/
19#include "cpu.h"
20#include "mmu-arm.h"
21#include "panic.h"
22
23#define SECTION_ADDRESS_MASK (-1 << 20)
24#define MB (1 << 20)
25
26void ttb_init(void) {
27 unsigned int* ttbPtr;
28
29 /* must be 16Kb (0x4000) aligned - clear out the TTB */
30 for (ttbPtr=TTB_BASE; ttbPtr<(TTB_SIZE+TTB_BASE); ttbPtr++)
31 {
32 *ttbPtr = 0;
33 }
34
35 /* Set the TTB base address */
36 asm volatile("mcr p15, 0, %0, c2, c0, 0" : : "r" (TTB_BASE));
37
38 /* Set all domains to manager status */
39 asm volatile("mcr p15, 0, %0, c3, c0, 0" : : "r" (0xFFFFFFFF));
40}
41
42void map_section(unsigned int pa, unsigned int va, int mb, int cache_flags) {
43 unsigned int* ttbPtr;
44 int i;
45 int section_no;
46
47 section_no = va >> 20; /* sections are 1Mb size */
48 ttbPtr = TTB_BASE + section_no;
49 pa &= SECTION_ADDRESS_MASK; /* align to 1Mb */
50 for(i=0; i<mb; i++, pa += MB) {
51 *(ttbPtr + i) =
52 pa |
53 1 << 10 | /* superuser - r/w, user - no access */
54 0 << 5 | /* domain 0th */
55 1 << 4 | /* should be "1" */
56 cache_flags |
57 1 << 1; /* Section signature */
58 }
59}
60
61void enable_mmu(void) {
62 int regread;
63
64 asm volatile(
65 "MRC p15, 0, %r0, c1, c0, 0\n" /* Read reg1, control register */
66 : /* outputs */
67 "=r"(regread)
68 : /* inputs */
69 : /* clobbers */
70 "r0"
71 );
72
73 if ( !(regread & 0x04) || !(regread & 0x00001000) ) /* Was the ICache or DCache Enabled? */
74 clean_dcache(); /* If so we need to clean the DCache before invalidating below */
75
76 asm volatile("mov r0, #0\n"
77 "mcr p15, 0, r0, c8, c7, 0\n" /* invalidate TLB */
78
79 "mcr p15, 0, r0, c7, c7,0\n" /* invalidate both icache and dcache */
80
81 "mrc p15, 0, r0, c1, c0, 0\n"
82 "orr r0, r0, #1<<0\n" /* enable mmu bit, icache and dcache */
83 "orr r0, r0, #1<<2\n" /* enable dcache */
84 "orr r0, r0, #1<<12\n" /* enable icache */
85 "mcr p15, 0, r0, c1, c0, 0" : : : "r0");
86 asm volatile("nop \n nop \n nop \n nop");
87}
88
89/* Invalidate DCache for this range */
90/* Will do write back */
91void invalidate_dcache_range(const void *base, unsigned int size) {
92 unsigned int addr = (((int) base) & ~31); /* Align start to cache line*/
93 unsigned int end = ((addr+size) & ~31)+64; /* Align end to cache line, pad */
94 asm volatile(
95"inv_start: \n"
96 "mcr p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
97 "add %0, %0, #32 \n"
98 "cmp %0, %1 \n"
99 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
100 "addne %0, %0, #32 \n"
101 "cmpne %0, %1 \n"
102 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
103 "addne %0, %0, #32 \n"
104 "cmpne %0, %1 \n"
105 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
106 "addne %0, %0, #32 \n"
107 "cmpne %0, %1 \n"
108 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
109 "addne %0, %0, #32 \n"
110 "cmpne %0, %1 \n"
111 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
112 "addne %0, %0, #32 \n"
113 "cmpne %0, %1 \n"
114 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
115 "addne %0, %0, #32 \n"
116 "cmpne %0, %1 \n"
117 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
118 "addne %0, %0, #32 \n"
119 "cmpne %0, %1 \n"
120 "bne inv_start \n"
121 "mov %0, #0\n"
122 "mcr p15,0,%0,c7,c10,4\n" /* Drain write buffer */
123 : : "r" (addr), "r" (end));
124}
125
126/* clean DCache for this range */
127/* forces DCache writeback for the specified range */
128void clean_dcache_range(const void *base, unsigned int size) {
129 unsigned int addr = (int) base;
130 unsigned int end = addr+size+32;
131 asm volatile(
132 "bic %0, %0, #31 \n"
133"clean_start: \n"
134 "mcr p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
135 "add %0, %0, #32 \n"
136 "cmp %0, %1 \n"
137 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
138 "addlo %0, %0, #32 \n"
139 "cmplo %0, %1 \n"
140 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
141 "addlo %0, %0, #32 \n"
142 "cmplo %0, %1 \n"
143 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
144 "addlo %0, %0, #32 \n"
145 "cmplo %0, %1 \n"
146 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
147 "addlo %0, %0, #32 \n"
148 "cmplo %0, %1 \n"
149 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
150 "addlo %0, %0, #32 \n"
151 "cmplo %0, %1 \n"
152 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
153 "addlo %0, %0, #32 \n"
154 "cmplo %0, %1 \n"
155 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
156 "addlo %0, %0, #32 \n"
157 "cmplo %0, %1 \n"
158 "blo clean_start \n"
159 "mov %0, #0\n"
160 "mcr p15,0,%0,c7,c10,4 \n" /* Drain write buffer */
161 : : "r" (addr), "r" (end));
162}
163
164/* Dump DCache for this range */
165/* Will *NOT* do write back */
166void dump_dcache_range(const void *base, unsigned int size) {
167 unsigned int addr = (int) base;
168 unsigned int end = addr+size;
169 asm volatile(
170 "tst %0, #31 \n" /* Check to see if low five bits are set */
171 "bic %0, %0, #31 \n" /* Clear them */
172 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line, if those bits were set */
173 "add %0, %0, #32 \n" /* Move to the next cache line */
174 "tst %1, #31 \n" /* Check last line for bits set */
175 "bic %1, %1, #31 \n" /* Clear those bits */
176 "mcrne p15, 0, %1, c7, c14, 1 \n" /* Clean and invalidate this line, if not cache aligned */
177"dump_start: \n"
178 "mcr p15, 0, %0, c7, c6, 1 \n" /* Invalidate this line */
179 "add %0, %0, #32 \n" /* Next cache line */
180 "cmp %0, %1 \n"
181 "bne dump_start \n"
182"dump_end: \n"
183 "mcr p15,0,%0,c7,c10,4 \n" /* Drain write buffer */
184 : : "r" (addr), "r" (end));
185}
186/* Cleans entire DCache */
187void clean_dcache(void)
188{
189 unsigned int index, addr;
190
191 for(index = 0; index <= 63; index++) {
192 addr = (0 << 5) | (index << 26);
193 asm volatile(
194 "mcr p15, 0, %0, c7, c10, 2 \n" /* Clean this entry by index */
195 : : "r" (addr));
196 addr = (1 << 5) | (index << 26);
197 asm volatile(
198 "mcr p15, 0, %0, c7, c10, 2 \n" /* Clean this entry by index */
199 : : "r" (addr));
200 addr = (2 << 5) | (index << 26);
201 asm volatile(
202 "mcr p15, 0, %0, c7, c10, 2 \n" /* Clean this entry by index */
203 : : "r" (addr));
204 addr = (3 << 5) | (index << 26);
205 asm volatile(
206 "mcr p15, 0, %0, c7, c10, 2 \n" /* Clean this entry by index */
207 : : "r" (addr));
208 addr = (4 << 5) | (index << 26);
209 asm volatile(
210 "mcr p15, 0, %0, c7, c10, 2 \n" /* Clean this entry by index */
211 : : "r" (addr));
212 addr = (5 << 5) | (index << 26);
213 asm volatile(
214 "mcr p15, 0, %0, c7, c10, 2 \n" /* Clean this entry by index */
215 : : "r" (addr));
216 addr = (6 << 5) | (index << 26);
217 asm volatile(
218 "mcr p15, 0, %0, c7, c10, 2 \n" /* Clean this entry by index */
219 : : "r" (addr));
220 addr = (7 << 5) | (index << 26);
221 asm volatile(
222 "mcr p15, 0, %0, c7, c10, 2 \n" /* Clean this entry by index */
223 : : "r" (addr));
224 }
225}
226