Subversion Repositories HelenOS-historic

Rev

Rev 935 | Rev 945 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 935 Rev 944
1
/*
1
/*
2
 * Copyright (C) 2006 Jakub Jermar
2
 * Copyright (C) 2006 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/*
29
/*
30
 * TLB management.
30
 * TLB management.
31
 */
31
 */
32
 
32
 
33
#include <mm/tlb.h>
33
#include <mm/tlb.h>
34
#include <mm/asid.h>
34
#include <mm/asid.h>
35
#include <mm/page.h>
35
#include <mm/page.h>
36
#include <mm/as.h>
36
#include <mm/as.h>
37
#include <arch/mm/tlb.h>
37
#include <arch/mm/tlb.h>
38
#include <arch/mm/page.h>
38
#include <arch/mm/page.h>
39
#include <arch/barrier.h>
39
#include <arch/barrier.h>
40
#include <arch/interrupt.h>
40
#include <arch/interrupt.h>
41
#include <arch/pal/pal.h>
41
#include <arch/pal/pal.h>
42
#include <arch/asm.h>
42
#include <arch/asm.h>
43
#include <typedefs.h>
43
#include <typedefs.h>
44
#include <panic.h>
44
#include <panic.h>
45
#include <arch.h>
45
#include <arch.h>
46
 
46
 
-
 
47
 
-
 
48
 
47
/** Invalidate all TLB entries. */
49
/** Invalidate all TLB entries. */
48
void tlb_invalidate_all(void)
50
void tlb_invalidate_all(void)
49
{
51
{
50
        __address adr;
52
        __address adr;
51
        __u32 count1,count2,stride1,stride2;
53
        __u32 count1,count2,stride1,stride2;
52
       
54
       
53
        int i,j;
55
        int i,j;
54
       
56
       
55
        adr=PAL_PTCE_INFO_BASE();
57
        adr=PAL_PTCE_INFO_BASE();
56
        count1=PAL_PTCE_INFO_COUNT1();
58
        count1=PAL_PTCE_INFO_COUNT1();
57
        count2=PAL_PTCE_INFO_COUNT2();
59
        count2=PAL_PTCE_INFO_COUNT2();
58
        stride1=PAL_PTCE_INFO_STRIDE1();
60
        stride1=PAL_PTCE_INFO_STRIDE1();
59
        stride2=PAL_PTCE_INFO_STRIDE2();
61
        stride2=PAL_PTCE_INFO_STRIDE2();
60
       
62
       
61
        interrupts_disable();
63
        interrupts_disable();
62
 
64
 
63
        for(i=0;i<count1;i++)
65
        for(i=0;i<count1;i++)
64
        {
66
        {
65
            for(j=0;j<count2;j++)
67
            for(j=0;j<count2;j++)
66
            {
68
            {
67
                asm volatile
69
                asm volatile
68
                (
70
                (
69
                    "ptc.e %0;;"
71
                    "ptc.e %0;;"
70
                    :
72
                    :
71
                    :"r" (adr)
73
                    :"r" (adr)
72
                );
74
                );
73
                adr+=stride2;
75
                adr+=stride2;
74
            }
76
            }
75
            adr+=stride1;
77
            adr+=stride1;
76
        }
78
        }
77
 
79
 
78
        interrupts_enable();
80
        interrupts_enable();
79
 
81
 
80
        srlz_d();
82
        srlz_d();
81
        srlz_i();
83
        srlz_i();
82
}
84
}
83
 
85
 
84
/** Invalidate entries belonging to an address space.
86
/** Invalidate entries belonging to an address space.
85
 *
87
 *
86
 * @param asid Address space identifier.
88
 * @param asid Address space identifier.
87
 */
89
 */
88
void tlb_invalidate_asid(asid_t asid)
90
void tlb_invalidate_asid(asid_t asid)
89
{
91
{
90
    /* TODO */
92
    /* TODO */
91
    tlb_invalidate_all();
93
    tlb_invalidate_all();
92
}
94
}
93
 
95
 
-
 
96
extern void d(void);
-
 
97
void d(void)
-
 
98
{
-
 
99
}
-
 
100
 
94
 
101
 
95
void tlb_invalidate_pages(asid_t asid, __address page, count_t cnt)
102
void tlb_invalidate_pages(asid_t asid, __address va, count_t cnt)
96
{
103
{
97
 
104
 
-
 
105
 
-
 
106
    region_register rr;
-
 
107
    bool restore_rr = false;
-
 
108
    int b=0;
-
 
109
    int c=cnt;
-
 
110
    int i;
-
 
111
 
-
 
112
    rr.word = rr_read(VA2VRN(va));
-
 
113
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
-
 
114
        /*
-
 
115
         * The selected region register does not contain required RID.
-
 
116
         * Save the old content of the register and replace the RID.
-
 
117
         */
-
 
118
        region_register rr0;
-
 
119
 
-
 
120
        rr0 = rr;
-
 
121
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
-
 
122
        rr_write(VA2VRN(va), rr0.word);
-
 
123
        srlz_d();
-
 
124
        srlz_i();
-
 
125
    }
-
 
126
   
-
 
127
    while(c>>=1)    b++;
-
 
128
    b>>=1;
-
 
129
    __u64 ps;
-
 
130
   
-
 
131
    switch(b)
-
 
132
    {
-
 
133
        case 0: /*cnt 1-3*/
-
 
134
        {
-
 
135
            ps=PAGE_WIDTH;
-
 
136
            break;
-
 
137
        }
-
 
138
        case 1: /*cnt 4-15*/
-
 
139
        {
-
 
140
            cnt=(cnt/4)+1;
-
 
141
            ps=PAGE_WIDTH+2;
-
 
142
            va&=~((1<<ps)-1);
-
 
143
            break;
-
 
144
        }
-
 
145
        case 2: /*cnt 16-63*/
-
 
146
        {
-
 
147
            cnt=(cnt/16)+1;
-
 
148
            ps=PAGE_WIDTH+4;
-
 
149
            va&=~((1<<ps)-1);
-
 
150
            break;
-
 
151
        }
-
 
152
        case 3: /*cnt 64-255*/
-
 
153
        {
-
 
154
            cnt=(cnt/64)+1;
-
 
155
            ps=PAGE_WIDTH+6;
-
 
156
            va&=~((1<<ps)-1);
-
 
157
            break;
-
 
158
        }
-
 
159
        case 4: /*cnt 256-1023*/
-
 
160
        {
-
 
161
            cnt=(cnt/256)+1;
-
 
162
            ps=PAGE_WIDTH+8;
-
 
163
            va&=~((1<<ps)-1);
-
 
164
            break;
-
 
165
        }
-
 
166
        case 5: /*cnt 1024-4095*/
-
 
167
        {
-
 
168
            cnt=(cnt/1024)+1;
-
 
169
            ps=PAGE_WIDTH+10;
-
 
170
            va&=~((1<<ps)-1);
-
 
171
            break;
-
 
172
        }
-
 
173
        case 6: /*cnt 4096-16383*/
-
 
174
        {
-
 
175
            cnt=(cnt/4096)+1;
-
 
176
            ps=PAGE_WIDTH+12;
-
 
177
            va&=~((1<<ps)-1);
-
 
178
            break;
-
 
179
        }
-
 
180
        case 7: /*cnt 16384-65535*/
-
 
181
        case 8: /*cnt 65536-(256K-1)*/
-
 
182
        {
-
 
183
            cnt=(cnt/16384)+1;
-
 
184
            ps=PAGE_WIDTH+14;
-
 
185
            va&=~((1<<ps)-1);
-
 
186
            break;
-
 
187
        }
-
 
188
        default:
-
 
189
        {
-
 
190
            cnt=(cnt/(16384*16))+1;
-
 
191
            ps=PAGE_WIDTH+18;
-
 
192
            va&=~((1<<ps)-1);
-
 
193
            break;
-
 
194
        }
-
 
195
           
-
 
196
    }
-
 
197
    d();
-
 
198
    for(i=0;i<cnt;i++)  {
-
 
199
    __asm__ volatile
-
 
200
    (
-
 
201
        "ptc.l %0,%1;;"
-
 
202
        :
-
 
203
        : "r"(va), "r"(ps<<2)
-
 
204
    );
-
 
205
    va+=(1<<ps);
-
 
206
    }
-
 
207
    srlz_d();
-
 
208
    srlz_i();
-
 
209
   
-
 
210
   
-
 
211
    if (restore_rr) {
-
 
212
        rr_write(VA2VRN(va), rr.word);
-
 
213
        srlz_d();
-
 
214
        srlz_i();
-
 
215
    }
-
 
216
 
98
 
217
 
99
}
218
}
100
 
219
 
101
 
220
 
102
/** Insert data into data translation cache.
221
/** Insert data into data translation cache.
103
 *
222
 *
104
 * @param va Virtual page address.
223
 * @param va Virtual page address.
105
 * @param asid Address space identifier.
224
 * @param asid Address space identifier.
106
 * @param entry The rest of TLB entry as required by TLB insertion format.
225
 * @param entry The rest of TLB entry as required by TLB insertion format.
107
 */
226
 */
108
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
227
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
109
{
228
{
110
    tc_mapping_insert(va, asid, entry, true);
229
    tc_mapping_insert(va, asid, entry, true);
111
}
230
}
112
 
231
 
113
/** Insert data into instruction translation cache.
232
/** Insert data into instruction translation cache.
114
 *
233
 *
115
 * @param va Virtual page address.
234
 * @param va Virtual page address.
116
 * @param asid Address space identifier.
235
 * @param asid Address space identifier.
117
 * @param entry The rest of TLB entry as required by TLB insertion format.
236
 * @param entry The rest of TLB entry as required by TLB insertion format.
118
 */
237
 */
119
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
238
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
120
{
239
{
121
    tc_mapping_insert(va, asid, entry, false);
240
    tc_mapping_insert(va, asid, entry, false);
122
}
241
}
123
 
242
 
124
/** Insert data into instruction or data translation cache.
243
/** Insert data into instruction or data translation cache.
125
 *
244
 *
126
 * @param va Virtual page address.
245
 * @param va Virtual page address.
127
 * @param asid Address space identifier.
246
 * @param asid Address space identifier.
128
 * @param entry The rest of TLB entry as required by TLB insertion format.
247
 * @param entry The rest of TLB entry as required by TLB insertion format.
129
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
248
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
130
 */
249
 */
131
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
250
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
132
{
251
{
133
    region_register rr;
252
    region_register rr;
134
    bool restore_rr = false;
253
    bool restore_rr = false;
135
 
254
 
136
    rr.word = rr_read(VA2VRN(va));
255
    rr.word = rr_read(VA2VRN(va));
137
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
256
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
138
        /*
257
        /*
139
         * The selected region register does not contain required RID.
258
         * The selected region register does not contain required RID.
140
         * Save the old content of the register and replace the RID.
259
         * Save the old content of the register and replace the RID.
141
         */
260
         */
142
        region_register rr0;
261
        region_register rr0;
143
 
262
 
144
        rr0 = rr;
263
        rr0 = rr;
145
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
264
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
146
        rr_write(VA2VRN(va), rr0.word);
265
        rr_write(VA2VRN(va), rr0.word);
147
        srlz_d();
266
        srlz_d();
148
        srlz_i();
267
        srlz_i();
149
    }
268
    }
150
   
269
   
151
    __asm__ volatile (
270
    __asm__ volatile (
152
        "mov r8=psr;;\n"
271
        "mov r8=psr;;\n"
153
        "rsm %0;;\n"            /* PSR_IC_MASK */
272
        "rsm %0;;\n"            /* PSR_IC_MASK */
154
        "srlz.d;;\n"
273
        "srlz.d;;\n"
155
        "srlz.i;;\n"
274
        "srlz.i;;\n"
156
        "mov cr.ifa=%1\n"       /* va */
275
        "mov cr.ifa=%1\n"       /* va */
157
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
276
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
158
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
277
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
159
        "(p6) itc.i %3;;\n"
278
        "(p6) itc.i %3;;\n"
160
        "(p7) itc.d %3;;\n"
279
        "(p7) itc.d %3;;\n"
161
        "mov psr.l=r8;;\n"
280
        "mov psr.l=r8;;\n"
162
        "srlz.d;;\n"
281
        "srlz.d;;\n"
163
        :
282
        :
164
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
283
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
165
        : "p6", "p7", "r8"
284
        : "p6", "p7", "r8"
166
    );
285
    );
167
   
286
   
168
    if (restore_rr) {
287
    if (restore_rr) {
169
        rr_write(VA2VRN(va), rr.word);
288
        rr_write(VA2VRN(va), rr.word);
170
        srlz_d();
289
        srlz_d();
171
        srlz_i();
290
        srlz_i();
172
    }
291
    }
173
}
292
}
174
 
293
 
175
/** Insert data into instruction translation register.
294
/** Insert data into instruction translation register.
176
 *
295
 *
177
 * @param va Virtual page address.
296
 * @param va Virtual page address.
178
 * @param asid Address space identifier.
297
 * @param asid Address space identifier.
179
 * @param entry The rest of TLB entry as required by TLB insertion format.
298
 * @param entry The rest of TLB entry as required by TLB insertion format.
180
 * @param tr Translation register.
299
 * @param tr Translation register.
181
 */
300
 */
182
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
301
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
183
{
302
{
184
    tr_mapping_insert(va, asid, entry, false, tr);
303
    tr_mapping_insert(va, asid, entry, false, tr);
185
}
304
}
186
 
305
 
187
/** Insert data into data translation register.
306
/** Insert data into data translation register.
188
 *
307
 *
189
 * @param va Virtual page address.
308
 * @param va Virtual page address.
190
 * @param asid Address space identifier.
309
 * @param asid Address space identifier.
191
 * @param entry The rest of TLB entry as required by TLB insertion format.
310
 * @param entry The rest of TLB entry as required by TLB insertion format.
192
 * @param tr Translation register.
311
 * @param tr Translation register.
193
 */
312
 */
194
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
313
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
195
{
314
{
196
    tr_mapping_insert(va, asid, entry, true, tr);
315
    tr_mapping_insert(va, asid, entry, true, tr);
197
}
316
}
198
 
317
 
199
/** Insert data into instruction or data translation register.
318
/** Insert data into instruction or data translation register.
200
 *
319
 *
201
 * @param va Virtual page address.
320
 * @param va Virtual page address.
202
 * @param asid Address space identifier.
321
 * @param asid Address space identifier.
203
 * @param entry The rest of TLB entry as required by TLB insertion format.
322
 * @param entry The rest of TLB entry as required by TLB insertion format.
204
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
323
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
205
 * @param tr Translation register.
324
 * @param tr Translation register.
206
 */
325
 */
207
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
326
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
208
{
327
{
209
    region_register rr;
328
    region_register rr;
210
    bool restore_rr = false;
329
    bool restore_rr = false;
211
 
330
 
212
    rr.word = rr_read(VA2VRN(va));
331
    rr.word = rr_read(VA2VRN(va));
213
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
332
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
214
        /*
333
        /*
215
         * The selected region register does not contain required RID.
334
         * The selected region register does not contain required RID.
216
         * Save the old content of the register and replace the RID.
335
         * Save the old content of the register and replace the RID.
217
         */
336
         */
218
        region_register rr0;
337
        region_register rr0;
219
 
338
 
220
        rr0 = rr;
339
        rr0 = rr;
221
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
340
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
222
        rr_write(VA2VRN(va), rr0.word);
341
        rr_write(VA2VRN(va), rr0.word);
223
        srlz_d();
342
        srlz_d();
224
        srlz_i();
343
        srlz_i();
225
    }
344
    }
226
 
345
 
227
    __asm__ volatile (
346
    __asm__ volatile (
228
        "mov r8=psr;;\n"
347
        "mov r8=psr;;\n"
229
        "rsm %0;;\n"            /* PSR_IC_MASK */
348
        "rsm %0;;\n"            /* PSR_IC_MASK */
230
        "srlz.d;;\n"
349
        "srlz.d;;\n"
231
        "srlz.i;;\n"
350
        "srlz.i;;\n"
232
        "mov cr.ifa=%1\n"           /* va */         
351
        "mov cr.ifa=%1\n"           /* va */         
233
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
352
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
234
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
353
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
235
        "(p6) itr.i itr[%4]=%3;;\n"
354
        "(p6) itr.i itr[%4]=%3;;\n"
236
        "(p7) itr.d dtr[%4]=%3;;\n"
355
        "(p7) itr.d dtr[%4]=%3;;\n"
237
        "mov psr.l=r8;;\n"
356
        "mov psr.l=r8;;\n"
238
        "srlz.d;;\n"
357
        "srlz.d;;\n"
239
        :
358
        :
240
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
359
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
241
        : "p6", "p7", "r8"
360
        : "p6", "p7", "r8"
242
    );
361
    );
243
   
362
   
244
    if (restore_rr) {
363
    if (restore_rr) {
245
        rr_write(VA2VRN(va), rr.word);
364
        rr_write(VA2VRN(va), rr.word);
246
        srlz_d();
365
        srlz_d();
247
        srlz_i();
366
        srlz_i();
248
    }
367
    }
249
}
368
}
250
 
369
 
251
/** Insert data into DTLB.
370
/** Insert data into DTLB.
252
 *
371
 *
253
 * @param va Virtual page address.
372
 * @param va Virtual page address.
254
 * @param asid Address space identifier.
373
 * @param asid Address space identifier.
255
 * @param entry The rest of TLB entry as required by TLB insertion format.
374
 * @param entry The rest of TLB entry as required by TLB insertion format.
256
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
375
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
257
 * @param tr Translation register if dtr is true, ignored otherwise.
376
 * @param tr Translation register if dtr is true, ignored otherwise.
258
 */
377
 */
259
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
378
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
260
{
379
{
261
    tlb_entry_t entry;
380
    tlb_entry_t entry;
262
   
381
   
263
    entry.word[0] = 0;
382
    entry.word[0] = 0;
264
    entry.word[1] = 0;
383
    entry.word[1] = 0;
265
   
384
   
266
    entry.p = true;         /* present */
385
    entry.p = true;         /* present */
267
    entry.ma = MA_WRITEBACK;
386
    entry.ma = MA_WRITEBACK;
268
    entry.a = true;         /* already accessed */
387
    entry.a = true;         /* already accessed */
269
    entry.d = true;         /* already dirty */
388
    entry.d = true;         /* already dirty */
270
    entry.pl = PL_KERNEL;
389
    entry.pl = PL_KERNEL;
271
    entry.ar = AR_READ | AR_WRITE;
390
    entry.ar = AR_READ | AR_WRITE;
272
    entry.ppn = frame >> PPN_SHIFT;
391
    entry.ppn = frame >> PPN_SHIFT;
273
    entry.ps = PAGE_WIDTH;
392
    entry.ps = PAGE_WIDTH;
274
   
393
   
275
    if (dtr)
394
    if (dtr)
276
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
395
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
277
    else
396
    else
278
        dtc_mapping_insert(page, ASID_KERNEL, entry);
397
        dtc_mapping_insert(page, ASID_KERNEL, entry);
279
}
398
}
280
 
399
 
281
/** Copy content of PTE into data translation cache.
400
/** Copy content of PTE into data translation cache.
282
 *
401
 *
283
 * @param t PTE.
402
 * @param t PTE.
284
 */
403
 */
285
void dtc_pte_copy(pte_t *t)
404
void dtc_pte_copy(pte_t *t)
286
{
405
{
287
    tlb_entry_t entry;
406
    tlb_entry_t entry;
288
 
407
 
289
    entry.word[0] = 0;
408
    entry.word[0] = 0;
290
    entry.word[1] = 0;
409
    entry.word[1] = 0;
291
   
410
   
292
    entry.p = t->p;
411
    entry.p = t->p;
293
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
412
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
294
    entry.a = t->a;
413
    entry.a = t->a;
295
    entry.d = t->d;
414
    entry.d = t->d;
296
    entry.pl = t->k ? PL_KERNEL : PL_USER;
415
    entry.pl = t->k ? PL_KERNEL : PL_USER;
297
    entry.ar = t->w ? AR_WRITE : AR_READ;
416
    entry.ar = t->w ? AR_WRITE : AR_READ;
298
    entry.ppn = t->frame >> PPN_SHIFT;
417
    entry.ppn = t->frame >> PPN_SHIFT;
299
    entry.ps = PAGE_WIDTH;
418
    entry.ps = PAGE_WIDTH;
300
   
419
   
301
    dtc_mapping_insert(t->page, t->as->asid, entry);
420
    dtc_mapping_insert(t->page, t->as->asid, entry);
302
}
421
}
303
 
422
 
304
/** Copy content of PTE into instruction translation cache.
423
/** Copy content of PTE into instruction translation cache.
305
 *
424
 *
306
 * @param t PTE.
425
 * @param t PTE.
307
 */
426
 */
308
void itc_pte_copy(pte_t *t)
427
void itc_pte_copy(pte_t *t)
309
{
428
{
310
    tlb_entry_t entry;
429
    tlb_entry_t entry;
311
 
430
 
312
    entry.word[0] = 0;
431
    entry.word[0] = 0;
313
    entry.word[1] = 0;
432
    entry.word[1] = 0;
314
   
433
   
315
    ASSERT(t->x);
434
    ASSERT(t->x);
316
   
435
   
317
    entry.p = t->p;
436
    entry.p = t->p;
318
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
437
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
319
    entry.a = t->a;
438
    entry.a = t->a;
320
    entry.pl = t->k ? PL_KERNEL : PL_USER;
439
    entry.pl = t->k ? PL_KERNEL : PL_USER;
321
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
440
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
322
    entry.ppn = t->frame >> PPN_SHIFT;
441
    entry.ppn = t->frame >> PPN_SHIFT;
323
    entry.ps = PAGE_WIDTH;
442
    entry.ps = PAGE_WIDTH;
324
   
443
   
325
    itc_mapping_insert(t->page, t->as->asid, entry);
444
    itc_mapping_insert(t->page, t->as->asid, entry);
326
}
445
}
327
 
446
 
328
/** Instruction TLB fault handler for faults with VHPT turned off.
447
/** Instruction TLB fault handler for faults with VHPT turned off.
329
 *
448
 *
330
 * @param vector Interruption vector.
449
 * @param vector Interruption vector.
331
 * @param pstate Structure with saved interruption state.
450
 * @param pstate Structure with saved interruption state.
332
 */
451
 */
333
void alternate_instruction_tlb_fault(__u64 vector, struct exception_regdump *pstate)
452
void alternate_instruction_tlb_fault(__u64 vector, struct exception_regdump *pstate)
334
{
453
{
335
    region_register rr;
454
    region_register rr;
336
    __address va;
455
    __address va;
337
    pte_t *t;
456
    pte_t *t;
338
   
457
   
339
    va = pstate->cr_ifa;    /* faulting address */
458
    va = pstate->cr_ifa;    /* faulting address */
340
    t = page_mapping_find(AS, va);
459
    t = page_mapping_find(AS, va);
341
    if (t) {
460
    if (t) {
342
        /*
461
        /*
343
         * The mapping was found in software page hash table.
462
         * The mapping was found in software page hash table.
344
         * Insert it into data translation cache.
463
         * Insert it into data translation cache.
345
         */
464
         */
346
        itc_pte_copy(t);
465
        itc_pte_copy(t);
347
    } else {
466
    } else {
348
        /*
467
        /*
349
         * Forward the page fault to address space page fault handler.
468
         * Forward the page fault to address space page fault handler.
350
         */
469
         */
351
        if (!as_page_fault(va)) {
470
        if (!as_page_fault(va)) {
352
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
471
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
353
        }
472
        }
354
    }
473
    }
355
}
474
}
356
 
475
 
357
/** Data TLB fault handler for faults with VHPT turned off.
476
/** Data TLB fault handler for faults with VHPT turned off.
358
 *
477
 *
359
 * @param vector Interruption vector.
478
 * @param vector Interruption vector.
360
 * @param pstate Structure with saved interruption state.
479
 * @param pstate Structure with saved interruption state.
361
 */
480
 */
362
void alternate_data_tlb_fault(__u64 vector, struct exception_regdump *pstate)
481
void alternate_data_tlb_fault(__u64 vector, struct exception_regdump *pstate)
363
{
482
{
364
    region_register rr;
483
    region_register rr;
365
    rid_t rid;
484
    rid_t rid;
366
    __address va;
485
    __address va;
367
    pte_t *t;
486
    pte_t *t;
368
   
487
   
369
    va = pstate->cr_ifa;    /* faulting address */
488
    va = pstate->cr_ifa;    /* faulting address */
370
    rr.word = rr_read(VA2VRN(va));
489
    rr.word = rr_read(VA2VRN(va));
371
    rid = rr.map.rid;
490
    rid = rr.map.rid;
372
    if (RID2ASID(rid) == ASID_KERNEL) {
491
    if (RID2ASID(rid) == ASID_KERNEL) {
373
        if (VA2VRN(va) == VRN_KERNEL) {
492
        if (VA2VRN(va) == VRN_KERNEL) {
374
            /*
493
            /*
375
             * Provide KA2PA(identity) mapping for faulting piece of
494
             * Provide KA2PA(identity) mapping for faulting piece of
376
             * kernel address space.
495
             * kernel address space.
377
             */
496
             */
378
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
497
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
379
            return;
498
            return;
380
        }
499
        }
381
    }
500
    }
382
 
501
 
383
    t = page_mapping_find(AS, va);
502
    t = page_mapping_find(AS, va);
384
    if (t) {
503
    if (t) {
385
        /*
504
        /*
386
         * The mapping was found in software page hash table.
505
         * The mapping was found in software page hash table.
387
         * Insert it into data translation cache.
506
         * Insert it into data translation cache.
388
         */
507
         */
389
        dtc_pte_copy(t);
508
        dtc_pte_copy(t);
390
    } else {
509
    } else {
391
        /*
510
        /*
392
         * Forward the page fault to address space page fault handler.
511
         * Forward the page fault to address space page fault handler.
393
         */
512
         */
394
        if (!as_page_fault(va)) {
513
        if (!as_page_fault(va)) {
395
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
514
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
396
        }
515
        }
397
    }
516
    }
398
}
517
}
399
 
518
 
400
/** Data nested TLB fault handler.
519
/** Data nested TLB fault handler.
401
 *
520
 *
402
 * This fault should not occur.
521
 * This fault should not occur.
403
 *
522
 *
404
 * @param vector Interruption vector.
523
 * @param vector Interruption vector.
405
 * @param pstate Structure with saved interruption state.
524
 * @param pstate Structure with saved interruption state.
406
 */
525
 */
407
void data_nested_tlb_fault(__u64 vector, struct exception_regdump *pstate)
526
void data_nested_tlb_fault(__u64 vector, struct exception_regdump *pstate)
408
{
527
{
409
    panic("%s\n", __FUNCTION__);
528
    panic("%s\n", __FUNCTION__);
410
}
529
}
411
 
530
 
412
/** Data Dirty bit fault handler.
531
/** Data Dirty bit fault handler.
413
 *
532
 *
414
 * @param vector Interruption vector.
533
 * @param vector Interruption vector.
415
 * @param pstate Structure with saved interruption state.
534
 * @param pstate Structure with saved interruption state.
416
 */
535
 */
417
void data_dirty_bit_fault(__u64 vector, struct exception_regdump *pstate)
536
void data_dirty_bit_fault(__u64 vector, struct exception_regdump *pstate)
418
{
537
{
419
    pte_t *t;
538
    pte_t *t;
420
 
539
 
421
    t = page_mapping_find(AS, pstate->cr_ifa);
540
    t = page_mapping_find(AS, pstate->cr_ifa);
422
    ASSERT(t && t->p);
541
    ASSERT(t && t->p);
423
    if (t && t->p) {
542
    if (t && t->p) {
424
        /*
543
        /*
425
         * Update the Dirty bit in page tables and reinsert
544
         * Update the Dirty bit in page tables and reinsert
426
         * the mapping into DTC.
545
         * the mapping into DTC.
427
         */
546
         */
428
        t->d = true;
547
        t->d = true;
429
        dtc_pte_copy(t);
548
        dtc_pte_copy(t);
430
    }
549
    }
431
}
550
}
432
 
551
 
433
/** Instruction access bit fault handler.
552
/** Instruction access bit fault handler.
434
 *
553
 *
435
 * @param vector Interruption vector.
554
 * @param vector Interruption vector.
436
 * @param pstate Structure with saved interruption state.
555
 * @param pstate Structure with saved interruption state.
437
 */
556
 */
438
void instruction_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
557
void instruction_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
439
{
558
{
440
    pte_t *t;
559
    pte_t *t;
441
 
560
 
442
    t = page_mapping_find(AS, pstate->cr_ifa);
561
    t = page_mapping_find(AS, pstate->cr_ifa);
443
    ASSERT(t && t->p);
562
    ASSERT(t && t->p);
444
    if (t && t->p) {
563
    if (t && t->p) {
445
        /*
564
        /*
446
         * Update the Accessed bit in page tables and reinsert
565
         * Update the Accessed bit in page tables and reinsert
447
         * the mapping into ITC.
566
         * the mapping into ITC.
448
         */
567
         */
449
        t->a = true;
568
        t->a = true;
450
        itc_pte_copy(t);
569
        itc_pte_copy(t);
451
    }
570
    }
452
}
571
}
453
 
572
 
454
/** Data access bit fault handler.
573
/** Data access bit fault handler.
455
 *
574
 *
456
 * @param vector Interruption vector.
575
 * @param vector Interruption vector.
457
 * @param pstate Structure with saved interruption state.
576
 * @param pstate Structure with saved interruption state.
458
 */
577
 */
459
void data_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
578
void data_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
460
{
579
{
461
    pte_t *t;
580
    pte_t *t;
462
 
581
 
463
    t = page_mapping_find(AS, pstate->cr_ifa);
582
    t = page_mapping_find(AS, pstate->cr_ifa);
464
    ASSERT(t && t->p);
583
    ASSERT(t && t->p);
465
    if (t && t->p) {
584
    if (t && t->p) {
466
        /*
585
        /*
467
         * Update the Accessed bit in page tables and reinsert
586
         * Update the Accessed bit in page tables and reinsert
468
         * the mapping into DTC.
587
         * the mapping into DTC.
469
         */
588
         */
470
        t->a = true;
589
        t->a = true;
471
        dtc_pte_copy(t);
590
        dtc_pte_copy(t);
472
    }
591
    }
473
}
592
}
474
 
593
 
475
/** Page not present fault handler.
594
/** Page not present fault handler.
476
 *
595
 *
477
 * @param vector Interruption vector.
596
 * @param vector Interruption vector.
478
 * @param pstate Structure with saved interruption state.
597
 * @param pstate Structure with saved interruption state.
479
 */
598
 */
480
void page_not_present(__u64 vector, struct exception_regdump *pstate)
599
void page_not_present(__u64 vector, struct exception_regdump *pstate)
481
{
600
{
482
    region_register rr;
601
    region_register rr;
483
    __address va;
602
    __address va;
484
    pte_t *t;
603
    pte_t *t;
485
   
604
   
486
    va = pstate->cr_ifa;    /* faulting address */
605
    va = pstate->cr_ifa;    /* faulting address */
487
    t = page_mapping_find(AS, va);
606
    t = page_mapping_find(AS, va);
488
    ASSERT(t);
607
    ASSERT(t);
489
   
608
   
490
    if (t->p) {
609
    if (t->p) {
491
        /*
610
        /*
492
         * If the Present bit is set in page hash table, just copy it
611
         * If the Present bit is set in page hash table, just copy it
493
         * and update ITC/DTC.
612
         * and update ITC/DTC.
494
         */
613
         */
495
        if (t->x)
614
        if (t->x)
496
            itc_pte_copy(t);
615
            itc_pte_copy(t);
497
        else
616
        else
498
            dtc_pte_copy(t);
617
            dtc_pte_copy(t);
499
    } else {
618
    } else {
500
        if (!as_page_fault(va)) {
619
        if (!as_page_fault(va)) {
501
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
620
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
502
        }
621
        }
503
    }
622
    }
504
}
623
}
505
 
624