Rev 1104 | Rev 1221 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 1104 | Rev 1210 | ||
---|---|---|---|
1 | /* |
1 | /* |
2 | * Copyright (C) 2006 Jakub Jermar |
2 | * Copyright (C) 2006 Jakub Jermar |
3 | * All rights reserved. |
3 | * All rights reserved. |
4 | * |
4 | * |
5 | * Redistribution and use in source and binary forms, with or without |
5 | * Redistribution and use in source and binary forms, with or without |
6 | * modification, are permitted provided that the following conditions |
6 | * modification, are permitted provided that the following conditions |
7 | * are met: |
7 | * are met: |
8 | * |
8 | * |
9 | * - Redistributions of source code must retain the above copyright |
9 | * - Redistributions of source code must retain the above copyright |
10 | * notice, this list of conditions and the following disclaimer. |
10 | * notice, this list of conditions and the following disclaimer. |
11 | * - Redistributions in binary form must reproduce the above copyright |
11 | * - Redistributions in binary form must reproduce the above copyright |
12 | * notice, this list of conditions and the following disclaimer in the |
12 | * notice, this list of conditions and the following disclaimer in the |
13 | * documentation and/or other materials provided with the distribution. |
13 | * documentation and/or other materials provided with the distribution. |
14 | * - The name of the author may not be used to endorse or promote products |
14 | * - The name of the author may not be used to endorse or promote products |
15 | * derived from this software without specific prior written permission. |
15 | * derived from this software without specific prior written permission. |
16 | * |
16 | * |
17 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
17 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
18 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
18 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
19 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
19 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
20 | * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
20 | * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
21 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
21 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
22 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
22 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
23 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
24 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
25 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
26 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
26 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | */ |
27 | */ |
28 | 28 | ||
29 | /* |
29 | /* |
30 | * TLB management. |
30 | * TLB management. |
31 | */ |
31 | */ |
32 | 32 | ||
33 | #include <mm/tlb.h> |
33 | #include <mm/tlb.h> |
34 | #include <mm/asid.h> |
34 | #include <mm/asid.h> |
35 | #include <mm/page.h> |
35 | #include <mm/page.h> |
36 | #include <mm/as.h> |
36 | #include <mm/as.h> |
37 | #include <arch/mm/tlb.h> |
37 | #include <arch/mm/tlb.h> |
38 | #include <arch/mm/page.h> |
38 | #include <arch/mm/page.h> |
- | 39 | #include <arch/mm/vhpt.h> |
|
39 | #include <arch/barrier.h> |
40 | #include <arch/barrier.h> |
40 | #include <arch/interrupt.h> |
41 | #include <arch/interrupt.h> |
41 | #include <arch/pal/pal.h> |
42 | #include <arch/pal/pal.h> |
42 | #include <arch/asm.h> |
43 | #include <arch/asm.h> |
43 | #include <typedefs.h> |
44 | #include <typedefs.h> |
44 | #include <panic.h> |
45 | #include <panic.h> |
45 | #include <print.h> |
46 | #include <print.h> |
46 | #include <arch.h> |
47 | #include <arch.h> |
47 | 48 | ||
48 | /** Invalidate all TLB entries. */ |
49 | /** Invalidate all TLB entries. */ |
49 | void tlb_invalidate_all(void) |
50 | void tlb_invalidate_all(void) |
50 | { |
51 | { |
51 | ipl_t ipl; |
52 | ipl_t ipl; |
52 | __address adr; |
53 | __address adr; |
53 | __u32 count1, count2, stride1, stride2; |
54 | __u32 count1, count2, stride1, stride2; |
54 | 55 | ||
55 | int i,j; |
56 | int i,j; |
56 | 57 | ||
57 | adr = PAL_PTCE_INFO_BASE(); |
58 | adr = PAL_PTCE_INFO_BASE(); |
58 | count1 = PAL_PTCE_INFO_COUNT1(); |
59 | count1 = PAL_PTCE_INFO_COUNT1(); |
59 | count2 = PAL_PTCE_INFO_COUNT2(); |
60 | count2 = PAL_PTCE_INFO_COUNT2(); |
60 | stride1 = PAL_PTCE_INFO_STRIDE1(); |
61 | stride1 = PAL_PTCE_INFO_STRIDE1(); |
61 | stride2 = PAL_PTCE_INFO_STRIDE2(); |
62 | stride2 = PAL_PTCE_INFO_STRIDE2(); |
62 | 63 | ||
63 | ipl = interrupts_disable(); |
64 | ipl = interrupts_disable(); |
64 | 65 | ||
65 | for(i = 0; i < count1; i++) { |
66 | for(i = 0; i < count1; i++) { |
66 | for(j = 0; j < count2; j++) { |
67 | for(j = 0; j < count2; j++) { |
67 | __asm__ volatile ( |
68 | __asm__ volatile ( |
68 | "ptc.e %0 ;;" |
69 | "ptc.e %0 ;;" |
69 | : |
70 | : |
70 | : "r" (adr) |
71 | : "r" (adr) |
71 | ); |
72 | ); |
72 | adr += stride2; |
73 | adr += stride2; |
73 | } |
74 | } |
74 | adr += stride1; |
75 | adr += stride1; |
75 | } |
76 | } |
76 | 77 | ||
77 | interrupts_restore(ipl); |
78 | interrupts_restore(ipl); |
78 | 79 | ||
79 | srlz_d(); |
80 | srlz_d(); |
80 | srlz_i(); |
81 | srlz_i(); |
- | 82 | #ifdef CONFIG_VHPT |
|
- | 83 | vhpt_invalidate_all(); |
|
- | 84 | #endif |
|
81 | } |
85 | } |
82 | 86 | ||
83 | /** Invalidate entries belonging to an address space. |
87 | /** Invalidate entries belonging to an address space. |
84 | * |
88 | * |
85 | * @param asid Address space identifier. |
89 | * @param asid Address space identifier. |
86 | */ |
90 | */ |
87 | void tlb_invalidate_asid(asid_t asid) |
91 | void tlb_invalidate_asid(asid_t asid) |
88 | { |
92 | { |
89 | tlb_invalidate_all(); |
93 | tlb_invalidate_all(); |
90 | } |
94 | } |
91 | 95 | ||
92 | 96 | ||
93 | void tlb_invalidate_pages(asid_t asid, __address page, count_t cnt) |
97 | void tlb_invalidate_pages(asid_t asid, __address page, count_t cnt) |
94 | { |
98 | { |
95 | region_register rr; |
99 | region_register rr; |
96 | bool restore_rr = false; |
100 | bool restore_rr = false; |
97 | int b = 0; |
101 | int b = 0; |
98 | int c = cnt; |
102 | int c = cnt; |
99 | 103 | ||
100 | __address va; |
104 | __address va; |
101 | va = page; |
105 | va = page; |
102 | 106 | ||
103 | rr.word = rr_read(VA2VRN(va)); |
107 | rr.word = rr_read(VA2VRN(va)); |
104 | if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) { |
108 | if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) { |
105 | /* |
109 | /* |
106 | * The selected region register does not contain required RID. |
110 | * The selected region register does not contain required RID. |
107 | * Save the old content of the register and replace the RID. |
111 | * Save the old content of the register and replace the RID. |
108 | */ |
112 | */ |
109 | region_register rr0; |
113 | region_register rr0; |
110 | 114 | ||
111 | rr0 = rr; |
115 | rr0 = rr; |
112 | rr0.map.rid = ASID2RID(asid, VA2VRN(va)); |
116 | rr0.map.rid = ASID2RID(asid, VA2VRN(va)); |
113 | rr_write(VA2VRN(va), rr0.word); |
117 | rr_write(VA2VRN(va), rr0.word); |
114 | srlz_d(); |
118 | srlz_d(); |
115 | srlz_i(); |
119 | srlz_i(); |
116 | } |
120 | } |
117 | 121 | ||
118 | while(c >>= 1) |
122 | while(c >>= 1) |
119 | b++; |
123 | b++; |
120 | b >>= 1; |
124 | b >>= 1; |
121 | __u64 ps; |
125 | __u64 ps; |
122 | 126 | ||
123 | switch (b) { |
127 | switch (b) { |
124 | case 0: /*cnt 1-3*/ |
128 | case 0: /*cnt 1-3*/ |
125 | ps = PAGE_WIDTH; |
129 | ps = PAGE_WIDTH; |
126 | break; |
130 | break; |
127 | case 1: /*cnt 4-15*/ |
131 | case 1: /*cnt 4-15*/ |
128 | /*cnt=((cnt-1)/4)+1;*/ |
132 | /*cnt=((cnt-1)/4)+1;*/ |
129 | ps = PAGE_WIDTH+2; |
133 | ps = PAGE_WIDTH+2; |
130 | va &= ~((1<<ps)-1); |
134 | va &= ~((1<<ps)-1); |
131 | break; |
135 | break; |
132 | case 2: /*cnt 16-63*/ |
136 | case 2: /*cnt 16-63*/ |
133 | /*cnt=((cnt-1)/16)+1;*/ |
137 | /*cnt=((cnt-1)/16)+1;*/ |
134 | ps = PAGE_WIDTH+4; |
138 | ps = PAGE_WIDTH+4; |
135 | va &= ~((1<<ps)-1); |
139 | va &= ~((1<<ps)-1); |
136 | break; |
140 | break; |
137 | case 3: /*cnt 64-255*/ |
141 | case 3: /*cnt 64-255*/ |
138 | /*cnt=((cnt-1)/64)+1;*/ |
142 | /*cnt=((cnt-1)/64)+1;*/ |
139 | ps = PAGE_WIDTH+6; |
143 | ps = PAGE_WIDTH+6; |
140 | va &= ~((1<<ps)-1); |
144 | va &= ~((1<<ps)-1); |
141 | break; |
145 | break; |
142 | case 4: /*cnt 256-1023*/ |
146 | case 4: /*cnt 256-1023*/ |
143 | /*cnt=((cnt-1)/256)+1;*/ |
147 | /*cnt=((cnt-1)/256)+1;*/ |
144 | ps = PAGE_WIDTH+8; |
148 | ps = PAGE_WIDTH+8; |
145 | va &= ~((1<<ps)-1); |
149 | va &= ~((1<<ps)-1); |
146 | break; |
150 | break; |
147 | case 5: /*cnt 1024-4095*/ |
151 | case 5: /*cnt 1024-4095*/ |
148 | /*cnt=((cnt-1)/1024)+1;*/ |
152 | /*cnt=((cnt-1)/1024)+1;*/ |
149 | ps = PAGE_WIDTH+10; |
153 | ps = PAGE_WIDTH+10; |
150 | va &= ~((1<<ps)-1); |
154 | va &= ~((1<<ps)-1); |
151 | break; |
155 | break; |
152 | case 6: /*cnt 4096-16383*/ |
156 | case 6: /*cnt 4096-16383*/ |
153 | /*cnt=((cnt-1)/4096)+1;*/ |
157 | /*cnt=((cnt-1)/4096)+1;*/ |
154 | ps = PAGE_WIDTH+12; |
158 | ps = PAGE_WIDTH+12; |
155 | va &= ~((1<<ps)-1); |
159 | va &= ~((1<<ps)-1); |
156 | break; |
160 | break; |
157 | case 7: /*cnt 16384-65535*/ |
161 | case 7: /*cnt 16384-65535*/ |
158 | case 8: /*cnt 65536-(256K-1)*/ |
162 | case 8: /*cnt 65536-(256K-1)*/ |
159 | /*cnt=((cnt-1)/16384)+1;*/ |
163 | /*cnt=((cnt-1)/16384)+1;*/ |
160 | ps = PAGE_WIDTH+14; |
164 | ps = PAGE_WIDTH+14; |
161 | va &= ~((1<<ps)-1); |
165 | va &= ~((1<<ps)-1); |
162 | break; |
166 | break; |
163 | default: |
167 | default: |
164 | /*cnt=((cnt-1)/(16384*16))+1;*/ |
168 | /*cnt=((cnt-1)/(16384*16))+1;*/ |
165 | ps=PAGE_WIDTH+18; |
169 | ps=PAGE_WIDTH+18; |
166 | va&=~((1<<ps)-1); |
170 | va&=~((1<<ps)-1); |
167 | break; |
171 | break; |
168 | } |
172 | } |
169 | /*cnt+=(page!=va);*/ |
173 | /*cnt+=(page!=va);*/ |
170 | for(; va<(page+cnt*(PAGE_SIZE)); va += (1<<ps)) { |
174 | for(; va<(page+cnt*(PAGE_SIZE)); va += (1<<ps)) { |
171 | __asm__ volatile ( |
175 | __asm__ volatile ( |
172 | "ptc.l %0,%1;;" |
176 | "ptc.l %0,%1;;" |
173 | : |
177 | : |
174 | : "r" (va), "r" (ps<<2) |
178 | : "r" (va), "r" (ps<<2) |
175 | ); |
179 | ); |
176 | } |
180 | } |
177 | srlz_d(); |
181 | srlz_d(); |
178 | srlz_i(); |
182 | srlz_i(); |
179 | 183 | ||
180 | if (restore_rr) { |
184 | if (restore_rr) { |
181 | rr_write(VA2VRN(va), rr.word); |
185 | rr_write(VA2VRN(va), rr.word); |
182 | srlz_d(); |
186 | srlz_d(); |
183 | srlz_i(); |
187 | srlz_i(); |
184 | } |
188 | } |
185 | } |
189 | } |
186 | 190 | ||
187 | 191 | ||
188 | /** Insert data into data translation cache. |
192 | /** Insert data into data translation cache. |
189 | * |
193 | * |
190 | * @param va Virtual page address. |
194 | * @param va Virtual page address. |
191 | * @param asid Address space identifier. |
195 | * @param asid Address space identifier. |
192 | * @param entry The rest of TLB entry as required by TLB insertion format. |
196 | * @param entry The rest of TLB entry as required by TLB insertion format. |
193 | */ |
197 | */ |
194 | void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry) |
198 | void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry) |
195 | { |
199 | { |
196 | tc_mapping_insert(va, asid, entry, true); |
200 | tc_mapping_insert(va, asid, entry, true); |
197 | } |
201 | } |
198 | 202 | ||
199 | /** Insert data into instruction translation cache. |
203 | /** Insert data into instruction translation cache. |
200 | * |
204 | * |
201 | * @param va Virtual page address. |
205 | * @param va Virtual page address. |
202 | * @param asid Address space identifier. |
206 | * @param asid Address space identifier. |
203 | * @param entry The rest of TLB entry as required by TLB insertion format. |
207 | * @param entry The rest of TLB entry as required by TLB insertion format. |
204 | */ |
208 | */ |
205 | void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry) |
209 | void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry) |
206 | { |
210 | { |
207 | tc_mapping_insert(va, asid, entry, false); |
211 | tc_mapping_insert(va, asid, entry, false); |
208 | } |
212 | } |
209 | 213 | ||
210 | /** Insert data into instruction or data translation cache. |
214 | /** Insert data into instruction or data translation cache. |
211 | * |
215 | * |
212 | * @param va Virtual page address. |
216 | * @param va Virtual page address. |
213 | * @param asid Address space identifier. |
217 | * @param asid Address space identifier. |
214 | * @param entry The rest of TLB entry as required by TLB insertion format. |
218 | * @param entry The rest of TLB entry as required by TLB insertion format. |
215 | * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise. |
219 | * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise. |
216 | */ |
220 | */ |
217 | void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc) |
221 | void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc) |
218 | { |
222 | { |
219 | region_register rr; |
223 | region_register rr; |
220 | bool restore_rr = false; |
224 | bool restore_rr = false; |
221 | 225 | ||
222 | rr.word = rr_read(VA2VRN(va)); |
226 | rr.word = rr_read(VA2VRN(va)); |
223 | if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) { |
227 | if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) { |
224 | /* |
228 | /* |
225 | * The selected region register does not contain required RID. |
229 | * The selected region register does not contain required RID. |
226 | * Save the old content of the register and replace the RID. |
230 | * Save the old content of the register and replace the RID. |
227 | */ |
231 | */ |
228 | region_register rr0; |
232 | region_register rr0; |
229 | 233 | ||
230 | rr0 = rr; |
234 | rr0 = rr; |
231 | rr0.map.rid = ASID2RID(asid, VA2VRN(va)); |
235 | rr0.map.rid = ASID2RID(asid, VA2VRN(va)); |
232 | rr_write(VA2VRN(va), rr0.word); |
236 | rr_write(VA2VRN(va), rr0.word); |
233 | srlz_d(); |
237 | srlz_d(); |
234 | srlz_i(); |
238 | srlz_i(); |
235 | } |
239 | } |
236 | 240 | ||
237 | __asm__ volatile ( |
241 | __asm__ volatile ( |
238 | "mov r8=psr;;\n" |
242 | "mov r8=psr;;\n" |
239 | "rsm %0;;\n" /* PSR_IC_MASK */ |
243 | "rsm %0;;\n" /* PSR_IC_MASK */ |
240 | "srlz.d;;\n" |
244 | "srlz.d;;\n" |
241 | "srlz.i;;\n" |
245 | "srlz.i;;\n" |
242 | "mov cr.ifa=%1\n" /* va */ |
246 | "mov cr.ifa=%1\n" /* va */ |
243 | "mov cr.itir=%2;;\n" /* entry.word[1] */ |
247 | "mov cr.itir=%2;;\n" /* entry.word[1] */ |
244 | "cmp.eq p6,p7 = %4,r0;;\n" /* decide between itc and dtc */ |
248 | "cmp.eq p6,p7 = %4,r0;;\n" /* decide between itc and dtc */ |
245 | "(p6) itc.i %3;;\n" |
249 | "(p6) itc.i %3;;\n" |
246 | "(p7) itc.d %3;;\n" |
250 | "(p7) itc.d %3;;\n" |
247 | "mov psr.l=r8;;\n" |
251 | "mov psr.l=r8;;\n" |
248 | "srlz.d;;\n" |
252 | "srlz.d;;\n" |
249 | : |
253 | : |
250 | : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc) |
254 | : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc) |
251 | : "p6", "p7", "r8" |
255 | : "p6", "p7", "r8" |
252 | ); |
256 | ); |
253 | 257 | ||
254 | if (restore_rr) { |
258 | if (restore_rr) { |
255 | rr_write(VA2VRN(va), rr.word); |
259 | rr_write(VA2VRN(va), rr.word); |
256 | srlz_d(); |
260 | srlz_d(); |
257 | srlz_i(); |
261 | srlz_i(); |
258 | } |
262 | } |
259 | } |
263 | } |
260 | 264 | ||
261 | /** Insert data into instruction translation register. |
265 | /** Insert data into instruction translation register. |
262 | * |
266 | * |
263 | * @param va Virtual page address. |
267 | * @param va Virtual page address. |
264 | * @param asid Address space identifier. |
268 | * @param asid Address space identifier. |
265 | * @param entry The rest of TLB entry as required by TLB insertion format. |
269 | * @param entry The rest of TLB entry as required by TLB insertion format. |
266 | * @param tr Translation register. |
270 | * @param tr Translation register. |
267 | */ |
271 | */ |
268 | void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr) |
272 | void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr) |
269 | { |
273 | { |
270 | tr_mapping_insert(va, asid, entry, false, tr); |
274 | tr_mapping_insert(va, asid, entry, false, tr); |
271 | } |
275 | } |
272 | 276 | ||
273 | /** Insert data into data translation register. |
277 | /** Insert data into data translation register. |
274 | * |
278 | * |
275 | * @param va Virtual page address. |
279 | * @param va Virtual page address. |
276 | * @param asid Address space identifier. |
280 | * @param asid Address space identifier. |
277 | * @param entry The rest of TLB entry as required by TLB insertion format. |
281 | * @param entry The rest of TLB entry as required by TLB insertion format. |
278 | * @param tr Translation register. |
282 | * @param tr Translation register. |
279 | */ |
283 | */ |
280 | void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr) |
284 | void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr) |
281 | { |
285 | { |
282 | tr_mapping_insert(va, asid, entry, true, tr); |
286 | tr_mapping_insert(va, asid, entry, true, tr); |
283 | } |
287 | } |
284 | 288 | ||
285 | /** Insert data into instruction or data translation register. |
289 | /** Insert data into instruction or data translation register. |
286 | * |
290 | * |
287 | * @param va Virtual page address. |
291 | * @param va Virtual page address. |
288 | * @param asid Address space identifier. |
292 | * @param asid Address space identifier. |
289 | * @param entry The rest of TLB entry as required by TLB insertion format. |
293 | * @param entry The rest of TLB entry as required by TLB insertion format. |
290 | * @param dtc If true, insert into data translation register, use instruction translation register otherwise. |
294 | * @param dtc If true, insert into data translation register, use instruction translation register otherwise. |
291 | * @param tr Translation register. |
295 | * @param tr Translation register. |
292 | */ |
296 | */ |
293 | void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr) |
297 | void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr) |
294 | { |
298 | { |
295 | region_register rr; |
299 | region_register rr; |
296 | bool restore_rr = false; |
300 | bool restore_rr = false; |
297 | 301 | ||
298 | rr.word = rr_read(VA2VRN(va)); |
302 | rr.word = rr_read(VA2VRN(va)); |
299 | if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) { |
303 | if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) { |
300 | /* |
304 | /* |
301 | * The selected region register does not contain required RID. |
305 | * The selected region register does not contain required RID. |
302 | * Save the old content of the register and replace the RID. |
306 | * Save the old content of the register and replace the RID. |
303 | */ |
307 | */ |
304 | region_register rr0; |
308 | region_register rr0; |
305 | 309 | ||
306 | rr0 = rr; |
310 | rr0 = rr; |
307 | rr0.map.rid = ASID2RID(asid, VA2VRN(va)); |
311 | rr0.map.rid = ASID2RID(asid, VA2VRN(va)); |
308 | rr_write(VA2VRN(va), rr0.word); |
312 | rr_write(VA2VRN(va), rr0.word); |
309 | srlz_d(); |
313 | srlz_d(); |
310 | srlz_i(); |
314 | srlz_i(); |
311 | } |
315 | } |
312 | 316 | ||
313 | __asm__ volatile ( |
317 | __asm__ volatile ( |
314 | "mov r8=psr;;\n" |
318 | "mov r8=psr;;\n" |
315 | "rsm %0;;\n" /* PSR_IC_MASK */ |
319 | "rsm %0;;\n" /* PSR_IC_MASK */ |
316 | "srlz.d;;\n" |
320 | "srlz.d;;\n" |
317 | "srlz.i;;\n" |
321 | "srlz.i;;\n" |
318 | "mov cr.ifa=%1\n" /* va */ |
322 | "mov cr.ifa=%1\n" /* va */ |
319 | "mov cr.itir=%2;;\n" /* entry.word[1] */ |
323 | "mov cr.itir=%2;;\n" /* entry.word[1] */ |
320 | "cmp.eq p6,p7=%5,r0;;\n" /* decide between itr and dtr */ |
324 | "cmp.eq p6,p7=%5,r0;;\n" /* decide between itr and dtr */ |
321 | "(p6) itr.i itr[%4]=%3;;\n" |
325 | "(p6) itr.i itr[%4]=%3;;\n" |
322 | "(p7) itr.d dtr[%4]=%3;;\n" |
326 | "(p7) itr.d dtr[%4]=%3;;\n" |
323 | "mov psr.l=r8;;\n" |
327 | "mov psr.l=r8;;\n" |
324 | "srlz.d;;\n" |
328 | "srlz.d;;\n" |
325 | : |
329 | : |
326 | : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr) |
330 | : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr) |
327 | : "p6", "p7", "r8" |
331 | : "p6", "p7", "r8" |
328 | ); |
332 | ); |
329 | 333 | ||
330 | if (restore_rr) { |
334 | if (restore_rr) { |
331 | rr_write(VA2VRN(va), rr.word); |
335 | rr_write(VA2VRN(va), rr.word); |
332 | srlz_d(); |
336 | srlz_d(); |
333 | srlz_i(); |
337 | srlz_i(); |
334 | } |
338 | } |
335 | } |
339 | } |
336 | 340 | ||
337 | /** Insert data into DTLB. |
341 | /** Insert data into DTLB. |
338 | * |
342 | * |
339 | * @param va Virtual page address. |
343 | * @param va Virtual page address. |
340 | * @param asid Address space identifier. |
344 | * @param asid Address space identifier. |
341 | * @param entry The rest of TLB entry as required by TLB insertion format. |
345 | * @param entry The rest of TLB entry as required by TLB insertion format. |
342 | * @param dtr If true, insert into data translation register, use data translation cache otherwise. |
346 | * @param dtr If true, insert into data translation register, use data translation cache otherwise. |
343 | * @param tr Translation register if dtr is true, ignored otherwise. |
347 | * @param tr Translation register if dtr is true, ignored otherwise. |
344 | */ |
348 | */ |
345 | void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr) |
349 | void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr) |
346 | { |
350 | { |
347 | tlb_entry_t entry; |
351 | tlb_entry_t entry; |
348 | 352 | ||
349 | entry.word[0] = 0; |
353 | entry.word[0] = 0; |
350 | entry.word[1] = 0; |
354 | entry.word[1] = 0; |
351 | 355 | ||
352 | entry.p = true; /* present */ |
356 | entry.p = true; /* present */ |
353 | entry.ma = MA_WRITEBACK; |
357 | entry.ma = MA_WRITEBACK; |
354 | entry.a = true; /* already accessed */ |
358 | entry.a = true; /* already accessed */ |
355 | entry.d = true; /* already dirty */ |
359 | entry.d = true; /* already dirty */ |
356 | entry.pl = PL_KERNEL; |
360 | entry.pl = PL_KERNEL; |
357 | entry.ar = AR_READ | AR_WRITE; |
361 | entry.ar = AR_READ | AR_WRITE; |
358 | entry.ppn = frame >> PPN_SHIFT; |
362 | entry.ppn = frame >> PPN_SHIFT; |
359 | entry.ps = PAGE_WIDTH; |
363 | entry.ps = PAGE_WIDTH; |
360 | 364 | ||
361 | if (dtr) |
365 | if (dtr) |
362 | dtr_mapping_insert(page, ASID_KERNEL, entry, tr); |
366 | dtr_mapping_insert(page, ASID_KERNEL, entry, tr); |
363 | else |
367 | else |
364 | dtc_mapping_insert(page, ASID_KERNEL, entry); |
368 | dtc_mapping_insert(page, ASID_KERNEL, entry); |
365 | } |
369 | } |
366 | 370 | ||
367 | /** Copy content of PTE into data translation cache. |
371 | /** Copy content of PTE into data translation cache. |
368 | * |
372 | * |
369 | * @param t PTE. |
373 | * @param t PTE. |
370 | */ |
374 | */ |
371 | void dtc_pte_copy(pte_t *t) |
375 | void dtc_pte_copy(pte_t *t) |
372 | { |
376 | { |
373 | tlb_entry_t entry; |
377 | tlb_entry_t entry; |
374 | 378 | ||
375 | entry.word[0] = 0; |
379 | entry.word[0] = 0; |
376 | entry.word[1] = 0; |
380 | entry.word[1] = 0; |
377 | 381 | ||
378 | entry.p = t->p; |
382 | entry.p = t->p; |
379 | entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE; |
383 | entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE; |
380 | entry.a = t->a; |
384 | entry.a = t->a; |
381 | entry.d = t->d; |
385 | entry.d = t->d; |
382 | entry.pl = t->k ? PL_KERNEL : PL_USER; |
386 | entry.pl = t->k ? PL_KERNEL : PL_USER; |
383 | entry.ar = t->w ? AR_WRITE : AR_READ; |
387 | entry.ar = t->w ? AR_WRITE : AR_READ; |
384 | entry.ppn = t->frame >> PPN_SHIFT; |
388 | entry.ppn = t->frame >> PPN_SHIFT; |
385 | entry.ps = PAGE_WIDTH; |
389 | entry.ps = PAGE_WIDTH; |
386 | 390 | ||
387 | dtc_mapping_insert(t->page, t->as->asid, entry); |
391 | dtc_mapping_insert(t->page, t->as->asid, entry); |
- | 392 | #ifdef CONFIG_VHPT |
|
- | 393 | vhpt_mapping_insert(t->page, t->as->asid, entry); |
|
- | 394 | #endif |
|
388 | } |
395 | } |
389 | 396 | ||
390 | /** Copy content of PTE into instruction translation cache. |
397 | /** Copy content of PTE into instruction translation cache. |
391 | * |
398 | * |
392 | * @param t PTE. |
399 | * @param t PTE. |
393 | */ |
400 | */ |
394 | void itc_pte_copy(pte_t *t) |
401 | void itc_pte_copy(pte_t *t) |
395 | { |
402 | { |
396 | tlb_entry_t entry; |
403 | tlb_entry_t entry; |
397 | 404 | ||
398 | entry.word[0] = 0; |
405 | entry.word[0] = 0; |
399 | entry.word[1] = 0; |
406 | entry.word[1] = 0; |
400 | 407 | ||
401 | ASSERT(t->x); |
408 | ASSERT(t->x); |
402 | 409 | ||
403 | entry.p = t->p; |
410 | entry.p = t->p; |
404 | entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE; |
411 | entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE; |
405 | entry.a = t->a; |
412 | entry.a = t->a; |
406 | entry.pl = t->k ? PL_KERNEL : PL_USER; |
413 | entry.pl = t->k ? PL_KERNEL : PL_USER; |
407 | entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ; |
414 | entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ; |
408 | entry.ppn = t->frame >> PPN_SHIFT; |
415 | entry.ppn = t->frame >> PPN_SHIFT; |
409 | entry.ps = PAGE_WIDTH; |
416 | entry.ps = PAGE_WIDTH; |
410 | 417 | ||
411 | itc_mapping_insert(t->page, t->as->asid, entry); |
418 | itc_mapping_insert(t->page, t->as->asid, entry); |
- | 419 | #ifdef CONFIG_VHPT |
|
- | 420 | vhpt_mapping_insert(t->page, t->as->asid, entry); |
|
- | 421 | #endif |
|
412 | } |
422 | } |
413 | 423 | ||
414 | /** Instruction TLB fault handler for faults with VHPT turned off. |
424 | /** Instruction TLB fault handler for faults with VHPT turned off. |
415 | * |
425 | * |
416 | * @param vector Interruption vector. |
426 | * @param vector Interruption vector. |
417 | * @param istate Structure with saved interruption state. |
427 | * @param istate Structure with saved interruption state. |
418 | */ |
428 | */ |
419 | void alternate_instruction_tlb_fault(__u64 vector, istate_t *istate) |
429 | void alternate_instruction_tlb_fault(__u64 vector, istate_t *istate) |
420 | { |
430 | { |
421 | region_register rr; |
431 | region_register rr; |
422 | __address va; |
432 | __address va; |
423 | pte_t *t; |
433 | pte_t *t; |
424 | 434 | ||
425 | va = istate->cr_ifa; /* faulting address */ |
435 | va = istate->cr_ifa; /* faulting address */ |
426 | page_table_lock(AS, true); |
436 | page_table_lock(AS, true); |
427 | t = page_mapping_find(AS, va); |
437 | t = page_mapping_find(AS, va); |
428 | if (t) { |
438 | if (t) { |
429 | /* |
439 | /* |
430 | * The mapping was found in software page hash table. |
440 | * The mapping was found in software page hash table. |
431 | * Insert it into data translation cache. |
441 | * Insert it into data translation cache. |
432 | */ |
442 | */ |
433 | itc_pte_copy(t); |
443 | itc_pte_copy(t); |
434 | page_table_unlock(AS, true); |
444 | page_table_unlock(AS, true); |
435 | } else { |
445 | } else { |
436 | /* |
446 | /* |
437 | * Forward the page fault to address space page fault handler. |
447 | * Forward the page fault to address space page fault handler. |
438 | */ |
448 | */ |
439 | page_table_unlock(AS, true); |
449 | page_table_unlock(AS, true); |
440 | if (!as_page_fault(va)) { |
450 | if (!as_page_fault(va)) { |
441 | panic("%s: va=%P, rid=%d, iip=%P\n", __FUNCTION__, istate->cr_ifa, rr.map.rid, istate->cr_iip); |
451 | panic("%s: va=%P, rid=%d, iip=%P\n", __FUNCTION__, istate->cr_ifa, rr.map.rid, istate->cr_iip); |
442 | } |
452 | } |
443 | } |
453 | } |
444 | } |
454 | } |
445 | 455 | ||
446 | /** Data TLB fault handler for faults with VHPT turned off. |
456 | /** Data TLB fault handler for faults with VHPT turned off. |
447 | * |
457 | * |
448 | * @param vector Interruption vector. |
458 | * @param vector Interruption vector. |
449 | * @param istate Structure with saved interruption state. |
459 | * @param istate Structure with saved interruption state. |
450 | */ |
460 | */ |
451 | void alternate_data_tlb_fault(__u64 vector, istate_t *istate) |
461 | void alternate_data_tlb_fault(__u64 vector, istate_t *istate) |
452 | { |
462 | { |
453 | region_register rr; |
463 | region_register rr; |
454 | rid_t rid; |
464 | rid_t rid; |
455 | __address va; |
465 | __address va; |
456 | pte_t *t; |
466 | pte_t *t; |
457 | 467 | ||
458 | va = istate->cr_ifa; /* faulting address */ |
468 | va = istate->cr_ifa; /* faulting address */ |
459 | rr.word = rr_read(VA2VRN(va)); |
469 | rr.word = rr_read(VA2VRN(va)); |
460 | rid = rr.map.rid; |
470 | rid = rr.map.rid; |
461 | if (RID2ASID(rid) == ASID_KERNEL) { |
471 | if (RID2ASID(rid) == ASID_KERNEL) { |
462 | if (VA2VRN(va) == VRN_KERNEL) { |
472 | if (VA2VRN(va) == VRN_KERNEL) { |
463 | /* |
473 | /* |
464 | * Provide KA2PA(identity) mapping for faulting piece of |
474 | * Provide KA2PA(identity) mapping for faulting piece of |
465 | * kernel address space. |
475 | * kernel address space. |
466 | */ |
476 | */ |
467 | dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0); |
477 | dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0); |
468 | return; |
478 | return; |
469 | } |
479 | } |
470 | } |
480 | } |
471 | 481 | ||
472 | page_table_lock(AS, true); |
482 | page_table_lock(AS, true); |
473 | t = page_mapping_find(AS, va); |
483 | t = page_mapping_find(AS, va); |
474 | if (t) { |
484 | if (t) { |
475 | /* |
485 | /* |
476 | * The mapping was found in software page hash table. |
486 | * The mapping was found in software page hash table. |
477 | * Insert it into data translation cache. |
487 | * Insert it into data translation cache. |
478 | */ |
488 | */ |
479 | dtc_pte_copy(t); |
489 | dtc_pte_copy(t); |
480 | page_table_unlock(AS, true); |
490 | page_table_unlock(AS, true); |
481 | } else { |
491 | } else { |
482 | /* |
492 | /* |
483 | * Forward the page fault to address space page fault handler. |
493 | * Forward the page fault to address space page fault handler. |
484 | */ |
494 | */ |
485 | page_table_unlock(AS, true); |
495 | page_table_unlock(AS, true); |
486 | if (!as_page_fault(va)) { |
496 | if (!as_page_fault(va)) { |
487 | panic("%s: va=%P, rid=%d, iip=%P\n", __FUNCTION__, va, rid, istate->cr_iip); |
497 | panic("%s: va=%P, rid=%d, iip=%P\n", __FUNCTION__, va, rid, istate->cr_iip); |
488 | } |
498 | } |
489 | } |
499 | } |
490 | } |
500 | } |
491 | 501 | ||
492 | /** Data nested TLB fault handler. |
502 | /** Data nested TLB fault handler. |
493 | * |
503 | * |
494 | * This fault should not occur. |
504 | * This fault should not occur. |
495 | * |
505 | * |
496 | * @param vector Interruption vector. |
506 | * @param vector Interruption vector. |
497 | * @param istate Structure with saved interruption state. |
507 | * @param istate Structure with saved interruption state. |
498 | */ |
508 | */ |
499 | void data_nested_tlb_fault(__u64 vector, istate_t *istate) |
509 | void data_nested_tlb_fault(__u64 vector, istate_t *istate) |
500 | { |
510 | { |
501 | panic("%s\n", __FUNCTION__); |
511 | panic("%s\n", __FUNCTION__); |
502 | } |
512 | } |
503 | 513 | ||
504 | /** Data Dirty bit fault handler. |
514 | /** Data Dirty bit fault handler. |
505 | * |
515 | * |
506 | * @param vector Interruption vector. |
516 | * @param vector Interruption vector. |
507 | * @param istate Structure with saved interruption state. |
517 | * @param istate Structure with saved interruption state. |
508 | */ |
518 | */ |
509 | void data_dirty_bit_fault(__u64 vector, istate_t *istate) |
519 | void data_dirty_bit_fault(__u64 vector, istate_t *istate) |
510 | { |
520 | { |
511 | pte_t *t; |
521 | pte_t *t; |
512 | 522 | ||
513 | page_table_lock(AS, true); |
523 | page_table_lock(AS, true); |
514 | t = page_mapping_find(AS, istate->cr_ifa); |
524 | t = page_mapping_find(AS, istate->cr_ifa); |
515 | ASSERT(t && t->p); |
525 | ASSERT(t && t->p); |
516 | if (t && t->p) { |
526 | if (t && t->p) { |
517 | /* |
527 | /* |
518 | * Update the Dirty bit in page tables and reinsert |
528 | * Update the Dirty bit in page tables and reinsert |
519 | * the mapping into DTC. |
529 | * the mapping into DTC. |
520 | */ |
530 | */ |
521 | t->d = true; |
531 | t->d = true; |
522 | dtc_pte_copy(t); |
532 | dtc_pte_copy(t); |
523 | } |
533 | } |
524 | page_table_unlock(AS, true); |
534 | page_table_unlock(AS, true); |
525 | } |
535 | } |
526 | 536 | ||
527 | /** Instruction access bit fault handler. |
537 | /** Instruction access bit fault handler. |
528 | * |
538 | * |
529 | * @param vector Interruption vector. |
539 | * @param vector Interruption vector. |
530 | * @param istate Structure with saved interruption state. |
540 | * @param istate Structure with saved interruption state. |
531 | */ |
541 | */ |
532 | void instruction_access_bit_fault(__u64 vector, istate_t *istate) |
542 | void instruction_access_bit_fault(__u64 vector, istate_t *istate) |
533 | { |
543 | { |
534 | pte_t *t; |
544 | pte_t *t; |
535 | 545 | ||
536 | page_table_lock(AS, true); |
546 | page_table_lock(AS, true); |
537 | t = page_mapping_find(AS, istate->cr_ifa); |
547 | t = page_mapping_find(AS, istate->cr_ifa); |
538 | ASSERT(t && t->p); |
548 | ASSERT(t && t->p); |
539 | if (t && t->p) { |
549 | if (t && t->p) { |
540 | /* |
550 | /* |
541 | * Update the Accessed bit in page tables and reinsert |
551 | * Update the Accessed bit in page tables and reinsert |
542 | * the mapping into ITC. |
552 | * the mapping into ITC. |
543 | */ |
553 | */ |
544 | t->a = true; |
554 | t->a = true; |
545 | itc_pte_copy(t); |
555 | itc_pte_copy(t); |
546 | } |
556 | } |
547 | page_table_unlock(AS, true); |
557 | page_table_unlock(AS, true); |
548 | } |
558 | } |
549 | 559 | ||
550 | /** Data access bit fault handler. |
560 | /** Data access bit fault handler. |
551 | * |
561 | * |
552 | * @param vector Interruption vector. |
562 | * @param vector Interruption vector. |
553 | * @param istate Structure with saved interruption state. |
563 | * @param istate Structure with saved interruption state. |
554 | */ |
564 | */ |
555 | void data_access_bit_fault(__u64 vector, istate_t *istate) |
565 | void data_access_bit_fault(__u64 vector, istate_t *istate) |
556 | { |
566 | { |
557 | pte_t *t; |
567 | pte_t *t; |
558 | 568 | ||
559 | page_table_lock(AS, true); |
569 | page_table_lock(AS, true); |
560 | t = page_mapping_find(AS, istate->cr_ifa); |
570 | t = page_mapping_find(AS, istate->cr_ifa); |
561 | ASSERT(t && t->p); |
571 | ASSERT(t && t->p); |
562 | if (t && t->p) { |
572 | if (t && t->p) { |
563 | /* |
573 | /* |
564 | * Update the Accessed bit in page tables and reinsert |
574 | * Update the Accessed bit in page tables and reinsert |
565 | * the mapping into DTC. |
575 | * the mapping into DTC. |
566 | */ |
576 | */ |
567 | t->a = true; |
577 | t->a = true; |
568 | dtc_pte_copy(t); |
578 | dtc_pte_copy(t); |
569 | } |
579 | } |
570 | page_table_unlock(AS, true); |
580 | page_table_unlock(AS, true); |
571 | } |
581 | } |
572 | 582 | ||
573 | /** Page not present fault handler. |
583 | /** Page not present fault handler. |
574 | * |
584 | * |
575 | * @param vector Interruption vector. |
585 | * @param vector Interruption vector. |
576 | * @param istate Structure with saved interruption state. |
586 | * @param istate Structure with saved interruption state. |
577 | */ |
587 | */ |
578 | void page_not_present(__u64 vector, istate_t *istate) |
588 | void page_not_present(__u64 vector, istate_t *istate) |
579 | { |
589 | { |
580 | region_register rr; |
590 | region_register rr; |
581 | __address va; |
591 | __address va; |
582 | pte_t *t; |
592 | pte_t *t; |
583 | 593 | ||
584 | va = istate->cr_ifa; /* faulting address */ |
594 | va = istate->cr_ifa; /* faulting address */ |
585 | page_table_lock(AS, true); |
595 | page_table_lock(AS, true); |
586 | t = page_mapping_find(AS, va); |
596 | t = page_mapping_find(AS, va); |
587 | ASSERT(t); |
597 | ASSERT(t); |
588 | 598 | ||
589 | if (t->p) { |
599 | if (t->p) { |
590 | /* |
600 | /* |
591 | * If the Present bit is set in page hash table, just copy it |
601 | * If the Present bit is set in page hash table, just copy it |
592 | * and update ITC/DTC. |
602 | * and update ITC/DTC. |
593 | */ |
603 | */ |
594 | if (t->x) |
604 | if (t->x) |
595 | itc_pte_copy(t); |
605 | itc_pte_copy(t); |
596 | else |
606 | else |
597 | dtc_pte_copy(t); |
607 | dtc_pte_copy(t); |
598 | page_table_unlock(AS, true); |
608 | page_table_unlock(AS, true); |
599 | } else { |
609 | } else { |
600 | page_table_unlock(AS, true); |
610 | page_table_unlock(AS, true); |
601 | if (!as_page_fault(va)) { |
611 | if (!as_page_fault(va)) { |
602 | panic("%s: va=%P, rid=%d\n", __FUNCTION__, va, rr.map.rid); |
612 | panic("%s: va=%P, rid=%d\n", __FUNCTION__, va, rr.map.rid); |
603 | } |
613 | } |
604 | } |
614 | } |
605 | } |
615 | } |
606 | 616 |