Subversion Repositories HelenOS-historic

Rev

Rev 944 | Rev 947 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 944 Rev 945
1
/*
1
/*
2
 * Copyright (C) 2006 Jakub Jermar
2
 * Copyright (C) 2006 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/*
29
/*
30
 * TLB management.
30
 * TLB management.
31
 */
31
 */
32
 
32
 
33
#include <mm/tlb.h>
33
#include <mm/tlb.h>
34
#include <mm/asid.h>
34
#include <mm/asid.h>
35
#include <mm/page.h>
35
#include <mm/page.h>
36
#include <mm/as.h>
36
#include <mm/as.h>
37
#include <arch/mm/tlb.h>
37
#include <arch/mm/tlb.h>
38
#include <arch/mm/page.h>
38
#include <arch/mm/page.h>
39
#include <arch/barrier.h>
39
#include <arch/barrier.h>
40
#include <arch/interrupt.h>
40
#include <arch/interrupt.h>
41
#include <arch/pal/pal.h>
41
#include <arch/pal/pal.h>
42
#include <arch/asm.h>
42
#include <arch/asm.h>
43
#include <typedefs.h>
43
#include <typedefs.h>
44
#include <panic.h>
44
#include <panic.h>
45
#include <arch.h>
45
#include <arch.h>
46
 
46
 
47
 
47
 
48
 
48
 
49
/** Invalidate all TLB entries. */
49
/** Invalidate all TLB entries. */
50
void tlb_invalidate_all(void)
50
void tlb_invalidate_all(void)
51
{
51
{
52
        __address adr;
52
        __address adr;
53
        __u32 count1,count2,stride1,stride2;
53
        __u32 count1,count2,stride1,stride2;
54
       
54
       
55
        int i,j;
55
        int i,j;
56
       
56
       
57
        adr=PAL_PTCE_INFO_BASE();
57
        adr=PAL_PTCE_INFO_BASE();
58
        count1=PAL_PTCE_INFO_COUNT1();
58
        count1=PAL_PTCE_INFO_COUNT1();
59
        count2=PAL_PTCE_INFO_COUNT2();
59
        count2=PAL_PTCE_INFO_COUNT2();
60
        stride1=PAL_PTCE_INFO_STRIDE1();
60
        stride1=PAL_PTCE_INFO_STRIDE1();
61
        stride2=PAL_PTCE_INFO_STRIDE2();
61
        stride2=PAL_PTCE_INFO_STRIDE2();
62
       
62
       
63
        interrupts_disable();
63
        interrupts_disable();
64
 
64
 
65
        for(i=0;i<count1;i++)
65
        for(i=0;i<count1;i++)
66
        {
66
        {
67
            for(j=0;j<count2;j++)
67
            for(j=0;j<count2;j++)
68
            {
68
            {
69
                asm volatile
69
                asm volatile
70
                (
70
                (
71
                    "ptc.e %0;;"
71
                    "ptc.e %0;;"
72
                    :
72
                    :
73
                    :"r" (adr)
73
                    :"r" (adr)
74
                );
74
                );
75
                adr+=stride2;
75
                adr+=stride2;
76
            }
76
            }
77
            adr+=stride1;
77
            adr+=stride1;
78
        }
78
        }
79
 
79
 
80
        interrupts_enable();
80
        interrupts_enable();
81
 
81
 
82
        srlz_d();
82
        srlz_d();
83
        srlz_i();
83
        srlz_i();
84
}
84
}
85
 
85
 
86
/** Invalidate entries belonging to an address space.
86
/** Invalidate entries belonging to an address space.
87
 *
87
 *
88
 * @param asid Address space identifier.
88
 * @param asid Address space identifier.
89
 */
89
 */
90
void tlb_invalidate_asid(asid_t asid)
90
void tlb_invalidate_asid(asid_t asid)
91
{
91
{
92
    /* TODO */
92
    /* TODO */
93
    tlb_invalidate_all();
93
    tlb_invalidate_all();
94
}
94
}
95
 
95
 
96
extern void d(void);
-
 
97
void d(void)
-
 
98
{
-
 
99
}
-
 
100
 
-
 
101
 
96
 
102
void tlb_invalidate_pages(asid_t asid, __address va, count_t cnt)
97
void tlb_invalidate_pages(asid_t asid, __address va, count_t cnt)
103
{
98
{
104
 
99
 
105
 
100
 
106
    region_register rr;
101
    region_register rr;
107
    bool restore_rr = false;
102
    bool restore_rr = false;
108
    int b=0;
103
    int b=0;
109
    int c=cnt;
104
    int c=cnt;
110
    int i;
105
    int i;
111
 
106
 
112
    rr.word = rr_read(VA2VRN(va));
107
    rr.word = rr_read(VA2VRN(va));
113
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
108
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
114
        /*
109
        /*
115
         * The selected region register does not contain required RID.
110
         * The selected region register does not contain required RID.
116
         * Save the old content of the register and replace the RID.
111
         * Save the old content of the register and replace the RID.
117
         */
112
         */
118
        region_register rr0;
113
        region_register rr0;
119
 
114
 
120
        rr0 = rr;
115
        rr0 = rr;
121
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
116
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
122
        rr_write(VA2VRN(va), rr0.word);
117
        rr_write(VA2VRN(va), rr0.word);
123
        srlz_d();
118
        srlz_d();
124
        srlz_i();
119
        srlz_i();
125
    }
120
    }
126
   
121
   
127
    while(c>>=1)    b++;
122
    while(c>>=1)    b++;
128
    b>>=1;
123
    b>>=1;
129
    __u64 ps;
124
    __u64 ps;
130
   
125
   
131
    switch(b)
126
    switch(b)
132
    {
127
    {
133
        case 0: /*cnt 1-3*/
128
        case 0: /*cnt 1-3*/
134
        {
129
        {
135
            ps=PAGE_WIDTH;
130
            ps=PAGE_WIDTH;
136
            break;
131
            break;
137
        }
132
        }
138
        case 1: /*cnt 4-15*/
133
        case 1: /*cnt 4-15*/
139
        {
134
        {
140
            cnt=(cnt/4)+1;
135
            cnt=(cnt/4)+1;
141
            ps=PAGE_WIDTH+2;
136
            ps=PAGE_WIDTH+2;
142
            va&=~((1<<ps)-1);
137
            va&=~((1<<ps)-1);
143
            break;
138
            break;
144
        }
139
        }
145
        case 2: /*cnt 16-63*/
140
        case 2: /*cnt 16-63*/
146
        {
141
        {
147
            cnt=(cnt/16)+1;
142
            cnt=(cnt/16)+1;
148
            ps=PAGE_WIDTH+4;
143
            ps=PAGE_WIDTH+4;
149
            va&=~((1<<ps)-1);
144
            va&=~((1<<ps)-1);
150
            break;
145
            break;
151
        }
146
        }
152
        case 3: /*cnt 64-255*/
147
        case 3: /*cnt 64-255*/
153
        {
148
        {
154
            cnt=(cnt/64)+1;
149
            cnt=(cnt/64)+1;
155
            ps=PAGE_WIDTH+6;
150
            ps=PAGE_WIDTH+6;
156
            va&=~((1<<ps)-1);
151
            va&=~((1<<ps)-1);
157
            break;
152
            break;
158
        }
153
        }
159
        case 4: /*cnt 256-1023*/
154
        case 4: /*cnt 256-1023*/
160
        {
155
        {
161
            cnt=(cnt/256)+1;
156
            cnt=(cnt/256)+1;
162
            ps=PAGE_WIDTH+8;
157
            ps=PAGE_WIDTH+8;
163
            va&=~((1<<ps)-1);
158
            va&=~((1<<ps)-1);
164
            break;
159
            break;
165
        }
160
        }
166
        case 5: /*cnt 1024-4095*/
161
        case 5: /*cnt 1024-4095*/
167
        {
162
        {
168
            cnt=(cnt/1024)+1;
163
            cnt=(cnt/1024)+1;
169
            ps=PAGE_WIDTH+10;
164
            ps=PAGE_WIDTH+10;
170
            va&=~((1<<ps)-1);
165
            va&=~((1<<ps)-1);
171
            break;
166
            break;
172
        }
167
        }
173
        case 6: /*cnt 4096-16383*/
168
        case 6: /*cnt 4096-16383*/
174
        {
169
        {
175
            cnt=(cnt/4096)+1;
170
            cnt=(cnt/4096)+1;
176
            ps=PAGE_WIDTH+12;
171
            ps=PAGE_WIDTH+12;
177
            va&=~((1<<ps)-1);
172
            va&=~((1<<ps)-1);
178
            break;
173
            break;
179
        }
174
        }
180
        case 7: /*cnt 16384-65535*/
175
        case 7: /*cnt 16384-65535*/
181
        case 8: /*cnt 65536-(256K-1)*/
176
        case 8: /*cnt 65536-(256K-1)*/
182
        {
177
        {
183
            cnt=(cnt/16384)+1;
178
            cnt=(cnt/16384)+1;
184
            ps=PAGE_WIDTH+14;
179
            ps=PAGE_WIDTH+14;
185
            va&=~((1<<ps)-1);
180
            va&=~((1<<ps)-1);
186
            break;
181
            break;
187
        }
182
        }
188
        default:
183
        default:
189
        {
184
        {
190
            cnt=(cnt/(16384*16))+1;
185
            cnt=(cnt/(16384*16))+1;
191
            ps=PAGE_WIDTH+18;
186
            ps=PAGE_WIDTH+18;
192
            va&=~((1<<ps)-1);
187
            va&=~((1<<ps)-1);
193
            break;
188
            break;
194
        }
189
        }
195
           
190
           
196
    }
191
    }
197
    d();
-
 
198
    for(i=0;i<cnt;i++)  {
192
    for(i=0;i<cnt;i++)  {
199
    __asm__ volatile
193
    __asm__ volatile
200
    (
194
    (
201
        "ptc.l %0,%1;;"
195
        "ptc.l %0,%1;;"
202
        :
196
        :
203
        : "r"(va), "r"(ps<<2)
197
        : "r"(va), "r"(ps<<2)
204
    );
198
    );
205
    va+=(1<<ps);
199
    va+=(1<<ps);
206
    }
200
    }
207
    srlz_d();
201
    srlz_d();
208
    srlz_i();
202
    srlz_i();
209
   
203
   
210
   
204
   
211
    if (restore_rr) {
205
    if (restore_rr) {
212
        rr_write(VA2VRN(va), rr.word);
206
        rr_write(VA2VRN(va), rr.word);
213
        srlz_d();
207
        srlz_d();
214
        srlz_i();
208
        srlz_i();
215
    }
209
    }
216
 
210
 
217
 
211
 
218
}
212
}
219
 
213
 
220
 
214
 
221
/** Insert data into data translation cache.
215
/** Insert data into data translation cache.
222
 *
216
 *
223
 * @param va Virtual page address.
217
 * @param va Virtual page address.
224
 * @param asid Address space identifier.
218
 * @param asid Address space identifier.
225
 * @param entry The rest of TLB entry as required by TLB insertion format.
219
 * @param entry The rest of TLB entry as required by TLB insertion format.
226
 */
220
 */
227
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
221
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
228
{
222
{
229
    tc_mapping_insert(va, asid, entry, true);
223
    tc_mapping_insert(va, asid, entry, true);
230
}
224
}
231
 
225
 
232
/** Insert data into instruction translation cache.
226
/** Insert data into instruction translation cache.
233
 *
227
 *
234
 * @param va Virtual page address.
228
 * @param va Virtual page address.
235
 * @param asid Address space identifier.
229
 * @param asid Address space identifier.
236
 * @param entry The rest of TLB entry as required by TLB insertion format.
230
 * @param entry The rest of TLB entry as required by TLB insertion format.
237
 */
231
 */
238
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
232
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
239
{
233
{
240
    tc_mapping_insert(va, asid, entry, false);
234
    tc_mapping_insert(va, asid, entry, false);
241
}
235
}
242
 
236
 
243
/** Insert data into instruction or data translation cache.
237
/** Insert data into instruction or data translation cache.
244
 *
238
 *
245
 * @param va Virtual page address.
239
 * @param va Virtual page address.
246
 * @param asid Address space identifier.
240
 * @param asid Address space identifier.
247
 * @param entry The rest of TLB entry as required by TLB insertion format.
241
 * @param entry The rest of TLB entry as required by TLB insertion format.
248
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
242
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
249
 */
243
 */
250
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
244
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
251
{
245
{
252
    region_register rr;
246
    region_register rr;
253
    bool restore_rr = false;
247
    bool restore_rr = false;
254
 
248
 
255
    rr.word = rr_read(VA2VRN(va));
249
    rr.word = rr_read(VA2VRN(va));
256
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
250
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
257
        /*
251
        /*
258
         * The selected region register does not contain required RID.
252
         * The selected region register does not contain required RID.
259
         * Save the old content of the register and replace the RID.
253
         * Save the old content of the register and replace the RID.
260
         */
254
         */
261
        region_register rr0;
255
        region_register rr0;
262
 
256
 
263
        rr0 = rr;
257
        rr0 = rr;
264
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
258
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
265
        rr_write(VA2VRN(va), rr0.word);
259
        rr_write(VA2VRN(va), rr0.word);
266
        srlz_d();
260
        srlz_d();
267
        srlz_i();
261
        srlz_i();
268
    }
262
    }
269
   
263
   
270
    __asm__ volatile (
264
    __asm__ volatile (
271
        "mov r8=psr;;\n"
265
        "mov r8=psr;;\n"
272
        "rsm %0;;\n"            /* PSR_IC_MASK */
266
        "rsm %0;;\n"            /* PSR_IC_MASK */
273
        "srlz.d;;\n"
267
        "srlz.d;;\n"
274
        "srlz.i;;\n"
268
        "srlz.i;;\n"
275
        "mov cr.ifa=%1\n"       /* va */
269
        "mov cr.ifa=%1\n"       /* va */
276
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
270
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
277
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
271
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
278
        "(p6) itc.i %3;;\n"
272
        "(p6) itc.i %3;;\n"
279
        "(p7) itc.d %3;;\n"
273
        "(p7) itc.d %3;;\n"
280
        "mov psr.l=r8;;\n"
274
        "mov psr.l=r8;;\n"
281
        "srlz.d;;\n"
275
        "srlz.d;;\n"
282
        :
276
        :
283
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
277
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
284
        : "p6", "p7", "r8"
278
        : "p6", "p7", "r8"
285
    );
279
    );
286
   
280
   
287
    if (restore_rr) {
281
    if (restore_rr) {
288
        rr_write(VA2VRN(va), rr.word);
282
        rr_write(VA2VRN(va), rr.word);
289
        srlz_d();
283
        srlz_d();
290
        srlz_i();
284
        srlz_i();
291
    }
285
    }
292
}
286
}
293
 
287
 
294
/** Insert data into instruction translation register.
288
/** Insert data into instruction translation register.
295
 *
289
 *
296
 * @param va Virtual page address.
290
 * @param va Virtual page address.
297
 * @param asid Address space identifier.
291
 * @param asid Address space identifier.
298
 * @param entry The rest of TLB entry as required by TLB insertion format.
292
 * @param entry The rest of TLB entry as required by TLB insertion format.
299
 * @param tr Translation register.
293
 * @param tr Translation register.
300
 */
294
 */
301
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
295
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
302
{
296
{
303
    tr_mapping_insert(va, asid, entry, false, tr);
297
    tr_mapping_insert(va, asid, entry, false, tr);
304
}
298
}
305
 
299
 
306
/** Insert data into data translation register.
300
/** Insert data into data translation register.
307
 *
301
 *
308
 * @param va Virtual page address.
302
 * @param va Virtual page address.
309
 * @param asid Address space identifier.
303
 * @param asid Address space identifier.
310
 * @param entry The rest of TLB entry as required by TLB insertion format.
304
 * @param entry The rest of TLB entry as required by TLB insertion format.
311
 * @param tr Translation register.
305
 * @param tr Translation register.
312
 */
306
 */
313
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
307
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
314
{
308
{
315
    tr_mapping_insert(va, asid, entry, true, tr);
309
    tr_mapping_insert(va, asid, entry, true, tr);
316
}
310
}
317
 
311
 
318
/** Insert data into instruction or data translation register.
312
/** Insert data into instruction or data translation register.
319
 *
313
 *
320
 * @param va Virtual page address.
314
 * @param va Virtual page address.
321
 * @param asid Address space identifier.
315
 * @param asid Address space identifier.
322
 * @param entry The rest of TLB entry as required by TLB insertion format.
316
 * @param entry The rest of TLB entry as required by TLB insertion format.
323
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
317
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
324
 * @param tr Translation register.
318
 * @param tr Translation register.
325
 */
319
 */
326
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
320
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
327
{
321
{
328
    region_register rr;
322
    region_register rr;
329
    bool restore_rr = false;
323
    bool restore_rr = false;
330
 
324
 
331
    rr.word = rr_read(VA2VRN(va));
325
    rr.word = rr_read(VA2VRN(va));
332
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
326
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
333
        /*
327
        /*
334
         * The selected region register does not contain required RID.
328
         * The selected region register does not contain required RID.
335
         * Save the old content of the register and replace the RID.
329
         * Save the old content of the register and replace the RID.
336
         */
330
         */
337
        region_register rr0;
331
        region_register rr0;
338
 
332
 
339
        rr0 = rr;
333
        rr0 = rr;
340
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
334
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
341
        rr_write(VA2VRN(va), rr0.word);
335
        rr_write(VA2VRN(va), rr0.word);
342
        srlz_d();
336
        srlz_d();
343
        srlz_i();
337
        srlz_i();
344
    }
338
    }
345
 
339
 
346
    __asm__ volatile (
340
    __asm__ volatile (
347
        "mov r8=psr;;\n"
341
        "mov r8=psr;;\n"
348
        "rsm %0;;\n"            /* PSR_IC_MASK */
342
        "rsm %0;;\n"            /* PSR_IC_MASK */
349
        "srlz.d;;\n"
343
        "srlz.d;;\n"
350
        "srlz.i;;\n"
344
        "srlz.i;;\n"
351
        "mov cr.ifa=%1\n"           /* va */         
345
        "mov cr.ifa=%1\n"           /* va */         
352
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
346
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
353
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
347
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
354
        "(p6) itr.i itr[%4]=%3;;\n"
348
        "(p6) itr.i itr[%4]=%3;;\n"
355
        "(p7) itr.d dtr[%4]=%3;;\n"
349
        "(p7) itr.d dtr[%4]=%3;;\n"
356
        "mov psr.l=r8;;\n"
350
        "mov psr.l=r8;;\n"
357
        "srlz.d;;\n"
351
        "srlz.d;;\n"
358
        :
352
        :
359
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
353
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
360
        : "p6", "p7", "r8"
354
        : "p6", "p7", "r8"
361
    );
355
    );
362
   
356
   
363
    if (restore_rr) {
357
    if (restore_rr) {
364
        rr_write(VA2VRN(va), rr.word);
358
        rr_write(VA2VRN(va), rr.word);
365
        srlz_d();
359
        srlz_d();
366
        srlz_i();
360
        srlz_i();
367
    }
361
    }
368
}
362
}
369
 
363
 
370
/** Insert data into DTLB.
364
/** Insert data into DTLB.
371
 *
365
 *
372
 * @param va Virtual page address.
366
 * @param va Virtual page address.
373
 * @param asid Address space identifier.
367
 * @param asid Address space identifier.
374
 * @param entry The rest of TLB entry as required by TLB insertion format.
368
 * @param entry The rest of TLB entry as required by TLB insertion format.
375
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
369
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
376
 * @param tr Translation register if dtr is true, ignored otherwise.
370
 * @param tr Translation register if dtr is true, ignored otherwise.
377
 */
371
 */
378
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
372
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
379
{
373
{
380
    tlb_entry_t entry;
374
    tlb_entry_t entry;
381
   
375
   
382
    entry.word[0] = 0;
376
    entry.word[0] = 0;
383
    entry.word[1] = 0;
377
    entry.word[1] = 0;
384
   
378
   
385
    entry.p = true;         /* present */
379
    entry.p = true;         /* present */
386
    entry.ma = MA_WRITEBACK;
380
    entry.ma = MA_WRITEBACK;
387
    entry.a = true;         /* already accessed */
381
    entry.a = true;         /* already accessed */
388
    entry.d = true;         /* already dirty */
382
    entry.d = true;         /* already dirty */
389
    entry.pl = PL_KERNEL;
383
    entry.pl = PL_KERNEL;
390
    entry.ar = AR_READ | AR_WRITE;
384
    entry.ar = AR_READ | AR_WRITE;
391
    entry.ppn = frame >> PPN_SHIFT;
385
    entry.ppn = frame >> PPN_SHIFT;
392
    entry.ps = PAGE_WIDTH;
386
    entry.ps = PAGE_WIDTH;
393
   
387
   
394
    if (dtr)
388
    if (dtr)
395
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
389
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
396
    else
390
    else
397
        dtc_mapping_insert(page, ASID_KERNEL, entry);
391
        dtc_mapping_insert(page, ASID_KERNEL, entry);
398
}
392
}
399
 
393
 
400
/** Copy content of PTE into data translation cache.
394
/** Copy content of PTE into data translation cache.
401
 *
395
 *
402
 * @param t PTE.
396
 * @param t PTE.
403
 */
397
 */
404
void dtc_pte_copy(pte_t *t)
398
void dtc_pte_copy(pte_t *t)
405
{
399
{
406
    tlb_entry_t entry;
400
    tlb_entry_t entry;
407
 
401
 
408
    entry.word[0] = 0;
402
    entry.word[0] = 0;
409
    entry.word[1] = 0;
403
    entry.word[1] = 0;
410
   
404
   
411
    entry.p = t->p;
405
    entry.p = t->p;
412
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
406
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
413
    entry.a = t->a;
407
    entry.a = t->a;
414
    entry.d = t->d;
408
    entry.d = t->d;
415
    entry.pl = t->k ? PL_KERNEL : PL_USER;
409
    entry.pl = t->k ? PL_KERNEL : PL_USER;
416
    entry.ar = t->w ? AR_WRITE : AR_READ;
410
    entry.ar = t->w ? AR_WRITE : AR_READ;
417
    entry.ppn = t->frame >> PPN_SHIFT;
411
    entry.ppn = t->frame >> PPN_SHIFT;
418
    entry.ps = PAGE_WIDTH;
412
    entry.ps = PAGE_WIDTH;
419
   
413
   
420
    dtc_mapping_insert(t->page, t->as->asid, entry);
414
    dtc_mapping_insert(t->page, t->as->asid, entry);
421
}
415
}
422
 
416
 
423
/** Copy content of PTE into instruction translation cache.
417
/** Copy content of PTE into instruction translation cache.
424
 *
418
 *
425
 * @param t PTE.
419
 * @param t PTE.
426
 */
420
 */
427
void itc_pte_copy(pte_t *t)
421
void itc_pte_copy(pte_t *t)
428
{
422
{
429
    tlb_entry_t entry;
423
    tlb_entry_t entry;
430
 
424
 
431
    entry.word[0] = 0;
425
    entry.word[0] = 0;
432
    entry.word[1] = 0;
426
    entry.word[1] = 0;
433
   
427
   
434
    ASSERT(t->x);
428
    ASSERT(t->x);
435
   
429
   
436
    entry.p = t->p;
430
    entry.p = t->p;
437
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
431
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
438
    entry.a = t->a;
432
    entry.a = t->a;
439
    entry.pl = t->k ? PL_KERNEL : PL_USER;
433
    entry.pl = t->k ? PL_KERNEL : PL_USER;
440
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
434
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
441
    entry.ppn = t->frame >> PPN_SHIFT;
435
    entry.ppn = t->frame >> PPN_SHIFT;
442
    entry.ps = PAGE_WIDTH;
436
    entry.ps = PAGE_WIDTH;
443
   
437
   
444
    itc_mapping_insert(t->page, t->as->asid, entry);
438
    itc_mapping_insert(t->page, t->as->asid, entry);
445
}
439
}
446
 
440
 
447
/** Instruction TLB fault handler for faults with VHPT turned off.
441
/** Instruction TLB fault handler for faults with VHPT turned off.
448
 *
442
 *
449
 * @param vector Interruption vector.
443
 * @param vector Interruption vector.
450
 * @param pstate Structure with saved interruption state.
444
 * @param pstate Structure with saved interruption state.
451
 */
445
 */
452
void alternate_instruction_tlb_fault(__u64 vector, struct exception_regdump *pstate)
446
void alternate_instruction_tlb_fault(__u64 vector, struct exception_regdump *pstate)
453
{
447
{
454
    region_register rr;
448
    region_register rr;
455
    __address va;
449
    __address va;
456
    pte_t *t;
450
    pte_t *t;
457
   
451
   
458
    va = pstate->cr_ifa;    /* faulting address */
452
    va = pstate->cr_ifa;    /* faulting address */
459
    t = page_mapping_find(AS, va);
453
    t = page_mapping_find(AS, va);
460
    if (t) {
454
    if (t) {
461
        /*
455
        /*
462
         * The mapping was found in software page hash table.
456
         * The mapping was found in software page hash table.
463
         * Insert it into data translation cache.
457
         * Insert it into data translation cache.
464
         */
458
         */
465
        itc_pte_copy(t);
459
        itc_pte_copy(t);
466
    } else {
460
    } else {
467
        /*
461
        /*
468
         * Forward the page fault to address space page fault handler.
462
         * Forward the page fault to address space page fault handler.
469
         */
463
         */
470
        if (!as_page_fault(va)) {
464
        if (!as_page_fault(va)) {
471
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
465
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
472
        }
466
        }
473
    }
467
    }
474
}
468
}
475
 
469
 
476
/** Data TLB fault handler for faults with VHPT turned off.
470
/** Data TLB fault handler for faults with VHPT turned off.
477
 *
471
 *
478
 * @param vector Interruption vector.
472
 * @param vector Interruption vector.
479
 * @param pstate Structure with saved interruption state.
473
 * @param pstate Structure with saved interruption state.
480
 */
474
 */
481
void alternate_data_tlb_fault(__u64 vector, struct exception_regdump *pstate)
475
void alternate_data_tlb_fault(__u64 vector, struct exception_regdump *pstate)
482
{
476
{
483
    region_register rr;
477
    region_register rr;
484
    rid_t rid;
478
    rid_t rid;
485
    __address va;
479
    __address va;
486
    pte_t *t;
480
    pte_t *t;
487
   
481
   
488
    va = pstate->cr_ifa;    /* faulting address */
482
    va = pstate->cr_ifa;    /* faulting address */
489
    rr.word = rr_read(VA2VRN(va));
483
    rr.word = rr_read(VA2VRN(va));
490
    rid = rr.map.rid;
484
    rid = rr.map.rid;
491
    if (RID2ASID(rid) == ASID_KERNEL) {
485
    if (RID2ASID(rid) == ASID_KERNEL) {
492
        if (VA2VRN(va) == VRN_KERNEL) {
486
        if (VA2VRN(va) == VRN_KERNEL) {
493
            /*
487
            /*
494
             * Provide KA2PA(identity) mapping for faulting piece of
488
             * Provide KA2PA(identity) mapping for faulting piece of
495
             * kernel address space.
489
             * kernel address space.
496
             */
490
             */
497
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
491
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
498
            return;
492
            return;
499
        }
493
        }
500
    }
494
    }
501
 
495
 
502
    t = page_mapping_find(AS, va);
496
    t = page_mapping_find(AS, va);
503
    if (t) {
497
    if (t) {
504
        /*
498
        /*
505
         * The mapping was found in software page hash table.
499
         * The mapping was found in software page hash table.
506
         * Insert it into data translation cache.
500
         * Insert it into data translation cache.
507
         */
501
         */
508
        dtc_pte_copy(t);
502
        dtc_pte_copy(t);
509
    } else {
503
    } else {
510
        /*
504
        /*
511
         * Forward the page fault to address space page fault handler.
505
         * Forward the page fault to address space page fault handler.
512
         */
506
         */
513
        if (!as_page_fault(va)) {
507
        if (!as_page_fault(va)) {
514
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
508
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
515
        }
509
        }
516
    }
510
    }
517
}
511
}
518
 
512
 
519
/** Data nested TLB fault handler.
513
/** Data nested TLB fault handler.
520
 *
514
 *
521
 * This fault should not occur.
515
 * This fault should not occur.
522
 *
516
 *
523
 * @param vector Interruption vector.
517
 * @param vector Interruption vector.
524
 * @param pstate Structure with saved interruption state.
518
 * @param pstate Structure with saved interruption state.
525
 */
519
 */
526
void data_nested_tlb_fault(__u64 vector, struct exception_regdump *pstate)
520
void data_nested_tlb_fault(__u64 vector, struct exception_regdump *pstate)
527
{
521
{
528
    panic("%s\n", __FUNCTION__);
522
    panic("%s\n", __FUNCTION__);
529
}
523
}
530
 
524
 
531
/** Data Dirty bit fault handler.
525
/** Data Dirty bit fault handler.
532
 *
526
 *
533
 * @param vector Interruption vector.
527
 * @param vector Interruption vector.
534
 * @param pstate Structure with saved interruption state.
528
 * @param pstate Structure with saved interruption state.
535
 */
529
 */
536
void data_dirty_bit_fault(__u64 vector, struct exception_regdump *pstate)
530
void data_dirty_bit_fault(__u64 vector, struct exception_regdump *pstate)
537
{
531
{
538
    pte_t *t;
532
    pte_t *t;
539
 
533
 
540
    t = page_mapping_find(AS, pstate->cr_ifa);
534
    t = page_mapping_find(AS, pstate->cr_ifa);
541
    ASSERT(t && t->p);
535
    ASSERT(t && t->p);
542
    if (t && t->p) {
536
    if (t && t->p) {
543
        /*
537
        /*
544
         * Update the Dirty bit in page tables and reinsert
538
         * Update the Dirty bit in page tables and reinsert
545
         * the mapping into DTC.
539
         * the mapping into DTC.
546
         */
540
         */
547
        t->d = true;
541
        t->d = true;
548
        dtc_pte_copy(t);
542
        dtc_pte_copy(t);
549
    }
543
    }
550
}
544
}
551
 
545
 
552
/** Instruction access bit fault handler.
546
/** Instruction access bit fault handler.
553
 *
547
 *
554
 * @param vector Interruption vector.
548
 * @param vector Interruption vector.
555
 * @param pstate Structure with saved interruption state.
549
 * @param pstate Structure with saved interruption state.
556
 */
550
 */
557
void instruction_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
551
void instruction_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
558
{
552
{
559
    pte_t *t;
553
    pte_t *t;
560
 
554
 
561
    t = page_mapping_find(AS, pstate->cr_ifa);
555
    t = page_mapping_find(AS, pstate->cr_ifa);
562
    ASSERT(t && t->p);
556
    ASSERT(t && t->p);
563
    if (t && t->p) {
557
    if (t && t->p) {
564
        /*
558
        /*
565
         * Update the Accessed bit in page tables and reinsert
559
         * Update the Accessed bit in page tables and reinsert
566
         * the mapping into ITC.
560
         * the mapping into ITC.
567
         */
561
         */
568
        t->a = true;
562
        t->a = true;
569
        itc_pte_copy(t);
563
        itc_pte_copy(t);
570
    }
564
    }
571
}
565
}
572
 
566
 
573
/** Data access bit fault handler.
567
/** Data access bit fault handler.
574
 *
568
 *
575
 * @param vector Interruption vector.
569
 * @param vector Interruption vector.
576
 * @param pstate Structure with saved interruption state.
570
 * @param pstate Structure with saved interruption state.
577
 */
571
 */
578
void data_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
572
void data_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
579
{
573
{
580
    pte_t *t;
574
    pte_t *t;
581
 
575
 
582
    t = page_mapping_find(AS, pstate->cr_ifa);
576
    t = page_mapping_find(AS, pstate->cr_ifa);
583
    ASSERT(t && t->p);
577
    ASSERT(t && t->p);
584
    if (t && t->p) {
578
    if (t && t->p) {
585
        /*
579
        /*
586
         * Update the Accessed bit in page tables and reinsert
580
         * Update the Accessed bit in page tables and reinsert
587
         * the mapping into DTC.
581
         * the mapping into DTC.
588
         */
582
         */
589
        t->a = true;
583
        t->a = true;
590
        dtc_pte_copy(t);
584
        dtc_pte_copy(t);
591
    }
585
    }
592
}
586
}
593
 
587
 
594
/** Page not present fault handler.
588
/** Page not present fault handler.
595
 *
589
 *
596
 * @param vector Interruption vector.
590
 * @param vector Interruption vector.
597
 * @param pstate Structure with saved interruption state.
591
 * @param pstate Structure with saved interruption state.
598
 */
592
 */
599
void page_not_present(__u64 vector, struct exception_regdump *pstate)
593
void page_not_present(__u64 vector, struct exception_regdump *pstate)
600
{
594
{
601
    region_register rr;
595
    region_register rr;
602
    __address va;
596
    __address va;
603
    pte_t *t;
597
    pte_t *t;
604
   
598
   
605
    va = pstate->cr_ifa;    /* faulting address */
599
    va = pstate->cr_ifa;    /* faulting address */
606
    t = page_mapping_find(AS, va);
600
    t = page_mapping_find(AS, va);
607
    ASSERT(t);
601
    ASSERT(t);
608
   
602
   
609
    if (t->p) {
603
    if (t->p) {
610
        /*
604
        /*
611
         * If the Present bit is set in page hash table, just copy it
605
         * If the Present bit is set in page hash table, just copy it
612
         * and update ITC/DTC.
606
         * and update ITC/DTC.
613
         */
607
         */
614
        if (t->x)
608
        if (t->x)
615
            itc_pte_copy(t);
609
            itc_pte_copy(t);
616
        else
610
        else
617
            dtc_pte_copy(t);
611
            dtc_pte_copy(t);
618
    } else {
612
    } else {
619
        if (!as_page_fault(va)) {
613
        if (!as_page_fault(va)) {
620
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
614
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
621
        }
615
        }
622
    }
616
    }
623
}
617
}
624
 
618