Subversion Repositories HelenOS-historic

Rev

Rev 958 | Rev 1044 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 958 Rev 993
1
/*
1
/*
2
 * Copyright (C) 2006 Jakub Jermar
2
 * Copyright (C) 2006 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/*
29
/*
30
 * TLB management.
30
 * TLB management.
31
 */
31
 */
32
 
32
 
33
#include <mm/tlb.h>
33
#include <mm/tlb.h>
34
#include <mm/asid.h>
34
#include <mm/asid.h>
35
#include <mm/page.h>
35
#include <mm/page.h>
36
#include <mm/as.h>
36
#include <mm/as.h>
37
#include <arch/mm/tlb.h>
37
#include <arch/mm/tlb.h>
38
#include <arch/mm/page.h>
38
#include <arch/mm/page.h>
39
#include <arch/barrier.h>
39
#include <arch/barrier.h>
40
#include <arch/interrupt.h>
40
#include <arch/interrupt.h>
41
#include <arch/pal/pal.h>
41
#include <arch/pal/pal.h>
42
#include <arch/asm.h>
42
#include <arch/asm.h>
43
#include <typedefs.h>
43
#include <typedefs.h>
44
#include <panic.h>
44
#include <panic.h>
-
 
45
#include <print.h>
45
#include <arch.h>
46
#include <arch.h>
46
 
47
 
47
 
-
 
48
 
-
 
49
/** Invalidate all TLB entries. */
48
/** Invalidate all TLB entries. */
50
void tlb_invalidate_all(void)
49
void tlb_invalidate_all(void)
51
{
50
{
-
 
51
        ipl_t ipl;
52
        __address adr;
52
        __address adr;
53
        __u32 count1,count2,stride1,stride2;
53
        __u32 count1, count2, stride1, stride2;
54
       
54
       
55
        int i,j;
55
        int i,j;
56
       
56
       
57
        adr=PAL_PTCE_INFO_BASE();
57
        adr = PAL_PTCE_INFO_BASE();
58
        count1=PAL_PTCE_INFO_COUNT1();
58
        count1 = PAL_PTCE_INFO_COUNT1();
59
        count2=PAL_PTCE_INFO_COUNT2();
59
        count2 = PAL_PTCE_INFO_COUNT2();
60
        stride1=PAL_PTCE_INFO_STRIDE1();
60
        stride1 = PAL_PTCE_INFO_STRIDE1();
61
        stride2=PAL_PTCE_INFO_STRIDE2();
61
        stride2 = PAL_PTCE_INFO_STRIDE2();
62
       
62
       
63
        interrupts_disable();
63
        ipl = interrupts_disable();
64
 
64
 
65
        for(i=0;i<count1;i++)
65
        for(i = 0; i < count1; i++) {
66
        {
-
 
67
            for(j=0;j<count2;j++)
66
            for(j = 0; j < count2; j++) {
68
            {
-
 
69
                asm volatile
67
                __asm__ volatile (
70
                (
-
 
71
                    "ptc.e %0;;"
68
                    "ptc.e %0 ;;"
72
                    :
69
                    :
73
                    :"r" (adr)
70
                    : "r" (adr)
74
                );
71
                );
75
                adr+=stride2;
72
                adr += stride2;
76
            }
73
            }
77
            adr+=stride1;
74
            adr += stride1;
78
        }
75
        }
79
 
76
 
80
        interrupts_enable();
77
        interrupts_restore(ipl);
81
 
78
 
82
        srlz_d();
79
        srlz_d();
83
        srlz_i();
80
        srlz_i();
84
}
81
}
85
 
82
 
86
/** Invalidate entries belonging to an address space.
83
/** Invalidate entries belonging to an address space.
87
 *
84
 *
88
 * @param asid Address space identifier.
85
 * @param asid Address space identifier.
89
 */
86
 */
90
void tlb_invalidate_asid(asid_t asid)
87
void tlb_invalidate_asid(asid_t asid)
91
{
88
{
92
    /* TODO */
-
 
93
    tlb_invalidate_all();
89
    tlb_invalidate_all();
94
}
90
}
95
 
91
 
96
 
92
 
97
void tlb_invalidate_pages(asid_t asid, __address page, count_t cnt)
93
void tlb_invalidate_pages(asid_t asid, __address page, count_t cnt)
98
{
94
{
99
 
-
 
100
 
-
 
101
    region_register rr;
95
    region_register rr;
102
    bool restore_rr = false;
96
    bool restore_rr = false;
103
    int b=0;
97
    int b = 0;
104
    int c=cnt;
98
    int c = cnt;
105
 
99
 
106
    __address va;
100
    __address va;
107
    va=page;
101
    va = page;
108
 
102
 
109
    rr.word = rr_read(VA2VRN(va));
103
    rr.word = rr_read(VA2VRN(va));
110
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
104
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
111
        /*
105
        /*
112
         * The selected region register does not contain required RID.
106
         * The selected region register does not contain required RID.
113
         * Save the old content of the register and replace the RID.
107
         * Save the old content of the register and replace the RID.
114
         */
108
         */
115
        region_register rr0;
109
        region_register rr0;
116
 
110
 
117
        rr0 = rr;
111
        rr0 = rr;
118
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
112
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
119
        rr_write(VA2VRN(va), rr0.word);
113
        rr_write(VA2VRN(va), rr0.word);
120
        srlz_d();
114
        srlz_d();
121
        srlz_i();
115
        srlz_i();
122
    }
116
    }
123
   
117
   
124
    while(c>>=1)    b++;
118
    while(c >>= 1)
-
 
119
        b++;
125
    b>>=1;
120
    b >>= 1;
126
    __u64 ps;
121
    __u64 ps;
127
   
122
   
128
    switch(b)
123
    switch (b) {
129
    {
-
 
130
        case 0: /*cnt 1-3*/
124
        case 0: /*cnt 1-3*/
131
        {
-
 
132
            ps=PAGE_WIDTH;
125
            ps = PAGE_WIDTH;
133
            break;
126
            break;
134
        }
-
 
135
        case 1: /*cnt 4-15*/
127
        case 1: /*cnt 4-15*/
136
        {
-
 
137
            /*cnt=((cnt-1)/4)+1;*/
128
            /*cnt=((cnt-1)/4)+1;*/
138
            ps=PAGE_WIDTH+2;
129
            ps = PAGE_WIDTH+2;
139
            va&=~((1<<ps)-1);
130
            va &= ~((1<<ps)-1);
140
            break;
131
            break;
141
        }
-
 
142
        case 2: /*cnt 16-63*/
132
        case 2: /*cnt 16-63*/
143
        {
-
 
144
            /*cnt=((cnt-1)/16)+1;*/
133
            /*cnt=((cnt-1)/16)+1;*/
145
            ps=PAGE_WIDTH+4;
134
            ps = PAGE_WIDTH+4;
146
            va&=~((1<<ps)-1);
135
            va &= ~((1<<ps)-1);
147
            break;
136
            break;
148
        }
-
 
149
        case 3: /*cnt 64-255*/
137
        case 3: /*cnt 64-255*/
150
        {
-
 
151
            /*cnt=((cnt-1)/64)+1;*/
138
            /*cnt=((cnt-1)/64)+1;*/
152
            ps=PAGE_WIDTH+6;
139
            ps = PAGE_WIDTH+6;
153
            va&=~((1<<ps)-1);
140
            va &= ~((1<<ps)-1);
154
            break;
141
            break;
155
        }
-
 
156
        case 4: /*cnt 256-1023*/
142
        case 4: /*cnt 256-1023*/
157
        {
-
 
158
            /*cnt=((cnt-1)/256)+1;*/
143
            /*cnt=((cnt-1)/256)+1;*/
159
            ps=PAGE_WIDTH+8;
144
            ps = PAGE_WIDTH+8;
160
            va&=~((1<<ps)-1);
145
            va &= ~((1<<ps)-1);
161
            break;
146
            break;
162
        }
-
 
163
        case 5: /*cnt 1024-4095*/
147
        case 5: /*cnt 1024-4095*/
164
        {
-
 
165
            /*cnt=((cnt-1)/1024)+1;*/
148
            /*cnt=((cnt-1)/1024)+1;*/
166
            ps=PAGE_WIDTH+10;
149
            ps = PAGE_WIDTH+10;
167
            va&=~((1<<ps)-1);
150
            va &= ~((1<<ps)-1);
168
            break;
151
            break;
169
        }
-
 
170
        case 6: /*cnt 4096-16383*/
152
        case 6: /*cnt 4096-16383*/
171
        {
-
 
172
            /*cnt=((cnt-1)/4096)+1;*/
153
            /*cnt=((cnt-1)/4096)+1;*/
173
            ps=PAGE_WIDTH+12;
154
            ps = PAGE_WIDTH+12;
174
            va&=~((1<<ps)-1);
155
            va &= ~((1<<ps)-1);
175
            break;
156
            break;
176
        }
-
 
177
        case 7: /*cnt 16384-65535*/
157
        case 7: /*cnt 16384-65535*/
178
        case 8: /*cnt 65536-(256K-1)*/
158
        case 8: /*cnt 65536-(256K-1)*/
179
        {
-
 
180
            /*cnt=((cnt-1)/16384)+1;*/
159
            /*cnt=((cnt-1)/16384)+1;*/
181
            ps=PAGE_WIDTH+14;
160
            ps = PAGE_WIDTH+14;
182
            va&=~((1<<ps)-1);
161
            va &= ~((1<<ps)-1);
183
            break;
162
            break;
184
        }
-
 
185
        default:
163
        default:
186
        {
-
 
187
            /*cnt=((cnt-1)/(16384*16))+1;*/
164
            /*cnt=((cnt-1)/(16384*16))+1;*/
188
            ps=PAGE_WIDTH+18;
165
            ps=PAGE_WIDTH+18;
189
            va&=~((1<<ps)-1);
166
            va&=~((1<<ps)-1);
190
            break;
167
            break;
191
        }
-
 
192
    }
168
    }
193
    /*cnt+=(page!=va);*/
169
    /*cnt+=(page!=va);*/
194
    for(;va<(page+cnt*(PAGE_SIZE));va+=(1<<ps)) {
170
    for(; va<(page+cnt*(PAGE_SIZE)); va += (1<<ps)) {
195
        __asm__ volatile
171
        __asm__ volatile (
196
        (
-
 
197
            "ptc.l %0,%1;;"
172
            "ptc.l %0,%1;;"
198
            :
173
            :
199
            : "r"(va), "r"(ps<<2)
174
            : "r" (va), "r" (ps<<2)
200
        );
175
        );
201
    }
176
    }
202
    srlz_d();
177
    srlz_d();
203
    srlz_i();
178
    srlz_i();
204
   
179
   
205
   
-
 
206
    if (restore_rr) {
180
    if (restore_rr) {
207
        rr_write(VA2VRN(va), rr.word);
181
        rr_write(VA2VRN(va), rr.word);
208
        srlz_d();
182
        srlz_d();
209
        srlz_i();
183
        srlz_i();
210
    }
184
    }
211
 
-
 
212
 
-
 
213
}
185
}
214
 
186
 
215
 
187
 
216
/** Insert data into data translation cache.
188
/** Insert data into data translation cache.
217
 *
189
 *
218
 * @param va Virtual page address.
190
 * @param va Virtual page address.
219
 * @param asid Address space identifier.
191
 * @param asid Address space identifier.
220
 * @param entry The rest of TLB entry as required by TLB insertion format.
192
 * @param entry The rest of TLB entry as required by TLB insertion format.
221
 */
193
 */
222
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
194
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
223
{
195
{
224
    tc_mapping_insert(va, asid, entry, true);
196
    tc_mapping_insert(va, asid, entry, true);
225
}
197
}
226
 
198
 
227
/** Insert data into instruction translation cache.
199
/** Insert data into instruction translation cache.
228
 *
200
 *
229
 * @param va Virtual page address.
201
 * @param va Virtual page address.
230
 * @param asid Address space identifier.
202
 * @param asid Address space identifier.
231
 * @param entry The rest of TLB entry as required by TLB insertion format.
203
 * @param entry The rest of TLB entry as required by TLB insertion format.
232
 */
204
 */
233
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
205
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
234
{
206
{
235
    tc_mapping_insert(va, asid, entry, false);
207
    tc_mapping_insert(va, asid, entry, false);
236
}
208
}
237
 
209
 
238
/** Insert data into instruction or data translation cache.
210
/** Insert data into instruction or data translation cache.
239
 *
211
 *
240
 * @param va Virtual page address.
212
 * @param va Virtual page address.
241
 * @param asid Address space identifier.
213
 * @param asid Address space identifier.
242
 * @param entry The rest of TLB entry as required by TLB insertion format.
214
 * @param entry The rest of TLB entry as required by TLB insertion format.
243
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
215
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
244
 */
216
 */
245
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
217
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
246
{
218
{
247
    region_register rr;
219
    region_register rr;
248
    bool restore_rr = false;
220
    bool restore_rr = false;
249
 
221
 
250
    rr.word = rr_read(VA2VRN(va));
222
    rr.word = rr_read(VA2VRN(va));
251
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
223
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
252
        /*
224
        /*
253
         * The selected region register does not contain required RID.
225
         * The selected region register does not contain required RID.
254
         * Save the old content of the register and replace the RID.
226
         * Save the old content of the register and replace the RID.
255
         */
227
         */
256
        region_register rr0;
228
        region_register rr0;
257
 
229
 
258
        rr0 = rr;
230
        rr0 = rr;
259
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
231
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
260
        rr_write(VA2VRN(va), rr0.word);
232
        rr_write(VA2VRN(va), rr0.word);
261
        srlz_d();
233
        srlz_d();
262
        srlz_i();
234
        srlz_i();
263
    }
235
    }
264
   
236
   
265
    __asm__ volatile (
237
    __asm__ volatile (
266
        "mov r8=psr;;\n"
238
        "mov r8=psr;;\n"
267
        "rsm %0;;\n"            /* PSR_IC_MASK */
239
        "rsm %0;;\n"            /* PSR_IC_MASK */
268
        "srlz.d;;\n"
240
        "srlz.d;;\n"
269
        "srlz.i;;\n"
241
        "srlz.i;;\n"
270
        "mov cr.ifa=%1\n"       /* va */
242
        "mov cr.ifa=%1\n"       /* va */
271
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
243
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
272
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
244
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
273
        "(p6) itc.i %3;;\n"
245
        "(p6) itc.i %3;;\n"
274
        "(p7) itc.d %3;;\n"
246
        "(p7) itc.d %3;;\n"
275
        "mov psr.l=r8;;\n"
247
        "mov psr.l=r8;;\n"
276
        "srlz.d;;\n"
248
        "srlz.d;;\n"
277
        :
249
        :
278
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
250
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
279
        : "p6", "p7", "r8"
251
        : "p6", "p7", "r8"
280
    );
252
    );
281
   
253
   
282
    if (restore_rr) {
254
    if (restore_rr) {
283
        rr_write(VA2VRN(va), rr.word);
255
        rr_write(VA2VRN(va), rr.word);
284
        srlz_d();
256
        srlz_d();
285
        srlz_i();
257
        srlz_i();
286
    }
258
    }
287
}
259
}
288
 
260
 
289
/** Insert data into instruction translation register.
261
/** Insert data into instruction translation register.
290
 *
262
 *
291
 * @param va Virtual page address.
263
 * @param va Virtual page address.
292
 * @param asid Address space identifier.
264
 * @param asid Address space identifier.
293
 * @param entry The rest of TLB entry as required by TLB insertion format.
265
 * @param entry The rest of TLB entry as required by TLB insertion format.
294
 * @param tr Translation register.
266
 * @param tr Translation register.
295
 */
267
 */
296
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
268
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
297
{
269
{
298
    tr_mapping_insert(va, asid, entry, false, tr);
270
    tr_mapping_insert(va, asid, entry, false, tr);
299
}
271
}
300
 
272
 
301
/** Insert data into data translation register.
273
/** Insert data into data translation register.
302
 *
274
 *
303
 * @param va Virtual page address.
275
 * @param va Virtual page address.
304
 * @param asid Address space identifier.
276
 * @param asid Address space identifier.
305
 * @param entry The rest of TLB entry as required by TLB insertion format.
277
 * @param entry The rest of TLB entry as required by TLB insertion format.
306
 * @param tr Translation register.
278
 * @param tr Translation register.
307
 */
279
 */
308
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
280
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
309
{
281
{
310
    tr_mapping_insert(va, asid, entry, true, tr);
282
    tr_mapping_insert(va, asid, entry, true, tr);
311
}
283
}
312
 
284
 
313
/** Insert data into instruction or data translation register.
285
/** Insert data into instruction or data translation register.
314
 *
286
 *
315
 * @param va Virtual page address.
287
 * @param va Virtual page address.
316
 * @param asid Address space identifier.
288
 * @param asid Address space identifier.
317
 * @param entry The rest of TLB entry as required by TLB insertion format.
289
 * @param entry The rest of TLB entry as required by TLB insertion format.
318
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
290
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
319
 * @param tr Translation register.
291
 * @param tr Translation register.
320
 */
292
 */
321
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
293
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
322
{
294
{
323
    region_register rr;
295
    region_register rr;
324
    bool restore_rr = false;
296
    bool restore_rr = false;
325
 
297
 
326
    rr.word = rr_read(VA2VRN(va));
298
    rr.word = rr_read(VA2VRN(va));
327
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
299
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
328
        /*
300
        /*
329
         * The selected region register does not contain required RID.
301
         * The selected region register does not contain required RID.
330
         * Save the old content of the register and replace the RID.
302
         * Save the old content of the register and replace the RID.
331
         */
303
         */
332
        region_register rr0;
304
        region_register rr0;
333
 
305
 
334
        rr0 = rr;
306
        rr0 = rr;
335
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
307
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
336
        rr_write(VA2VRN(va), rr0.word);
308
        rr_write(VA2VRN(va), rr0.word);
337
        srlz_d();
309
        srlz_d();
338
        srlz_i();
310
        srlz_i();
339
    }
311
    }
340
 
312
 
341
    __asm__ volatile (
313
    __asm__ volatile (
342
        "mov r8=psr;;\n"
314
        "mov r8=psr;;\n"
343
        "rsm %0;;\n"            /* PSR_IC_MASK */
315
        "rsm %0;;\n"            /* PSR_IC_MASK */
344
        "srlz.d;;\n"
316
        "srlz.d;;\n"
345
        "srlz.i;;\n"
317
        "srlz.i;;\n"
346
        "mov cr.ifa=%1\n"           /* va */         
318
        "mov cr.ifa=%1\n"           /* va */         
347
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
319
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
348
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
320
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
349
        "(p6) itr.i itr[%4]=%3;;\n"
321
        "(p6) itr.i itr[%4]=%3;;\n"
350
        "(p7) itr.d dtr[%4]=%3;;\n"
322
        "(p7) itr.d dtr[%4]=%3;;\n"
351
        "mov psr.l=r8;;\n"
323
        "mov psr.l=r8;;\n"
352
        "srlz.d;;\n"
324
        "srlz.d;;\n"
353
        :
325
        :
354
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
326
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
355
        : "p6", "p7", "r8"
327
        : "p6", "p7", "r8"
356
    );
328
    );
357
   
329
   
358
    if (restore_rr) {
330
    if (restore_rr) {
359
        rr_write(VA2VRN(va), rr.word);
331
        rr_write(VA2VRN(va), rr.word);
360
        srlz_d();
332
        srlz_d();
361
        srlz_i();
333
        srlz_i();
362
    }
334
    }
363
}
335
}
364
 
336
 
365
/** Insert data into DTLB.
337
/** Insert data into DTLB.
366
 *
338
 *
367
 * @param va Virtual page address.
339
 * @param va Virtual page address.
368
 * @param asid Address space identifier.
340
 * @param asid Address space identifier.
369
 * @param entry The rest of TLB entry as required by TLB insertion format.
341
 * @param entry The rest of TLB entry as required by TLB insertion format.
370
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
342
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
371
 * @param tr Translation register if dtr is true, ignored otherwise.
343
 * @param tr Translation register if dtr is true, ignored otherwise.
372
 */
344
 */
373
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
345
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
374
{
346
{
375
    tlb_entry_t entry;
347
    tlb_entry_t entry;
376
   
348
   
377
    entry.word[0] = 0;
349
    entry.word[0] = 0;
378
    entry.word[1] = 0;
350
    entry.word[1] = 0;
379
   
351
   
380
    entry.p = true;         /* present */
352
    entry.p = true;         /* present */
381
    entry.ma = MA_WRITEBACK;
353
    entry.ma = MA_WRITEBACK;
382
    entry.a = true;         /* already accessed */
354
    entry.a = true;         /* already accessed */
383
    entry.d = true;         /* already dirty */
355
    entry.d = true;         /* already dirty */
384
    entry.pl = PL_KERNEL;
356
    entry.pl = PL_KERNEL;
385
    entry.ar = AR_READ | AR_WRITE;
357
    entry.ar = AR_READ | AR_WRITE;
386
    entry.ppn = frame >> PPN_SHIFT;
358
    entry.ppn = frame >> PPN_SHIFT;
387
    entry.ps = PAGE_WIDTH;
359
    entry.ps = PAGE_WIDTH;
388
   
360
   
389
    if (dtr)
361
    if (dtr)
390
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
362
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
391
    else
363
    else
392
        dtc_mapping_insert(page, ASID_KERNEL, entry);
364
        dtc_mapping_insert(page, ASID_KERNEL, entry);
393
}
365
}
394
 
366
 
395
/** Copy content of PTE into data translation cache.
367
/** Copy content of PTE into data translation cache.
396
 *
368
 *
397
 * @param t PTE.
369
 * @param t PTE.
398
 */
370
 */
399
void dtc_pte_copy(pte_t *t)
371
void dtc_pte_copy(pte_t *t)
400
{
372
{
401
    tlb_entry_t entry;
373
    tlb_entry_t entry;
402
 
374
 
403
    entry.word[0] = 0;
375
    entry.word[0] = 0;
404
    entry.word[1] = 0;
376
    entry.word[1] = 0;
405
   
377
   
406
    entry.p = t->p;
378
    entry.p = t->p;
407
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
379
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
408
    entry.a = t->a;
380
    entry.a = t->a;
409
    entry.d = t->d;
381
    entry.d = t->d;
410
    entry.pl = t->k ? PL_KERNEL : PL_USER;
382
    entry.pl = t->k ? PL_KERNEL : PL_USER;
411
    entry.ar = t->w ? AR_WRITE : AR_READ;
383
    entry.ar = t->w ? AR_WRITE : AR_READ;
412
    entry.ppn = t->frame >> PPN_SHIFT;
384
    entry.ppn = t->frame >> PPN_SHIFT;
413
    entry.ps = PAGE_WIDTH;
385
    entry.ps = PAGE_WIDTH;
414
   
386
   
415
    dtc_mapping_insert(t->page, t->as->asid, entry);
387
    dtc_mapping_insert(t->page, t->as->asid, entry);
416
}
388
}
417
 
389
 
418
/** Copy content of PTE into instruction translation cache.
390
/** Copy content of PTE into instruction translation cache.
419
 *
391
 *
420
 * @param t PTE.
392
 * @param t PTE.
421
 */
393
 */
422
void itc_pte_copy(pte_t *t)
394
void itc_pte_copy(pte_t *t)
423
{
395
{
424
    tlb_entry_t entry;
396
    tlb_entry_t entry;
425
 
397
 
426
    entry.word[0] = 0;
398
    entry.word[0] = 0;
427
    entry.word[1] = 0;
399
    entry.word[1] = 0;
428
   
400
   
429
    ASSERT(t->x);
401
    ASSERT(t->x);
430
   
402
   
431
    entry.p = t->p;
403
    entry.p = t->p;
432
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
404
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
433
    entry.a = t->a;
405
    entry.a = t->a;
434
    entry.pl = t->k ? PL_KERNEL : PL_USER;
406
    entry.pl = t->k ? PL_KERNEL : PL_USER;
435
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
407
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
436
    entry.ppn = t->frame >> PPN_SHIFT;
408
    entry.ppn = t->frame >> PPN_SHIFT;
437
    entry.ps = PAGE_WIDTH;
409
    entry.ps = PAGE_WIDTH;
438
   
410
   
439
    itc_mapping_insert(t->page, t->as->asid, entry);
411
    itc_mapping_insert(t->page, t->as->asid, entry);
440
}
412
}
441
 
413
 
442
/** Instruction TLB fault handler for faults with VHPT turned off.
414
/** Instruction TLB fault handler for faults with VHPT turned off.
443
 *
415
 *
444
 * @param vector Interruption vector.
416
 * @param vector Interruption vector.
445
 * @param istate Structure with saved interruption state.
417
 * @param istate Structure with saved interruption state.
446
 */
418
 */
447
void alternate_instruction_tlb_fault(__u64 vector, istate_t *istate)
419
void alternate_instruction_tlb_fault(__u64 vector, istate_t *istate)
448
{
420
{
449
    region_register rr;
421
    region_register rr;
450
    __address va;
422
    __address va;
451
    pte_t *t;
423
    pte_t *t;
452
   
424
   
453
    va = istate->cr_ifa;    /* faulting address */
425
    va = istate->cr_ifa;    /* faulting address */
454
    t = page_mapping_find(AS, va);
426
    t = page_mapping_find(AS, va);
455
    if (t) {
427
    if (t) {
456
        /*
428
        /*
457
         * The mapping was found in software page hash table.
429
         * The mapping was found in software page hash table.
458
         * Insert it into data translation cache.
430
         * Insert it into data translation cache.
459
         */
431
         */
460
        itc_pte_copy(t);
432
        itc_pte_copy(t);
461
    } else {
433
    } else {
462
        /*
434
        /*
463
         * Forward the page fault to address space page fault handler.
435
         * Forward the page fault to address space page fault handler.
464
         */
436
         */
465
        if (!as_page_fault(va)) {
437
        if (!as_page_fault(va)) {
466
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, istate->cr_ifa, rr.map.rid);
438
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, istate->cr_ifa, rr.map.rid);
467
        }
439
        }
468
    }
440
    }
469
}
441
}
470
 
442
 
471
/** Data TLB fault handler for faults with VHPT turned off.
443
/** Data TLB fault handler for faults with VHPT turned off.
472
 *
444
 *
473
 * @param vector Interruption vector.
445
 * @param vector Interruption vector.
474
 * @param istate Structure with saved interruption state.
446
 * @param istate Structure with saved interruption state.
475
 */
447
 */
476
void alternate_data_tlb_fault(__u64 vector, istate_t *istate)
448
void alternate_data_tlb_fault(__u64 vector, istate_t *istate)
477
{
449
{
478
    region_register rr;
450
    region_register rr;
479
    rid_t rid;
451
    rid_t rid;
480
    __address va;
452
    __address va;
481
    pte_t *t;
453
    pte_t *t;
482
   
454
   
483
    va = istate->cr_ifa;    /* faulting address */
455
    va = istate->cr_ifa;    /* faulting address */
484
    rr.word = rr_read(VA2VRN(va));
456
    rr.word = rr_read(VA2VRN(va));
485
    rid = rr.map.rid;
457
    rid = rr.map.rid;
486
    if (RID2ASID(rid) == ASID_KERNEL) {
458
    if (RID2ASID(rid) == ASID_KERNEL) {
487
        if (VA2VRN(va) == VRN_KERNEL) {
459
        if (VA2VRN(va) == VRN_KERNEL) {
488
            /*
460
            /*
489
             * Provide KA2PA(identity) mapping for faulting piece of
461
             * Provide KA2PA(identity) mapping for faulting piece of
490
             * kernel address space.
462
             * kernel address space.
491
             */
463
             */
492
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
464
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
493
            return;
465
            return;
494
        }
466
        }
495
    }
467
    }
496
 
468
 
497
    t = page_mapping_find(AS, va);
469
    t = page_mapping_find(AS, va);
498
    if (t) {
470
    if (t) {
499
        /*
471
        /*
500
         * The mapping was found in software page hash table.
472
         * The mapping was found in software page hash table.
501
         * Insert it into data translation cache.
473
         * Insert it into data translation cache.
502
         */
474
         */
503
        dtc_pte_copy(t);
475
        dtc_pte_copy(t);
504
    } else {
476
    } else {
505
        /*
477
        /*
506
         * Forward the page fault to address space page fault handler.
478
         * Forward the page fault to address space page fault handler.
507
         */
479
         */
508
        if (!as_page_fault(va)) {
480
        if (!as_page_fault(va)) {
509
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, istate->cr_ifa, rr.map.rid);
481
            panic("%s: va=%P, rid=%d, iip=%P\n", __FUNCTION__, va, rid, istate->cr_iip);
510
        }
482
        }
511
    }
483
    }
512
}
484
}
513
 
485
 
514
/** Data nested TLB fault handler.
486
/** Data nested TLB fault handler.
515
 *
487
 *
516
 * This fault should not occur.
488
 * This fault should not occur.
517
 *
489
 *
518
 * @param vector Interruption vector.
490
 * @param vector Interruption vector.
519
 * @param istate Structure with saved interruption state.
491
 * @param istate Structure with saved interruption state.
520
 */
492
 */
521
void data_nested_tlb_fault(__u64 vector, istate_t *istate)
493
void data_nested_tlb_fault(__u64 vector, istate_t *istate)
522
{
494
{
523
    panic("%s\n", __FUNCTION__);
495
    panic("%s\n", __FUNCTION__);
524
}
496
}
525
 
497
 
526
/** Data Dirty bit fault handler.
498
/** Data Dirty bit fault handler.
527
 *
499
 *
528
 * @param vector Interruption vector.
500
 * @param vector Interruption vector.
529
 * @param istate Structure with saved interruption state.
501
 * @param istate Structure with saved interruption state.
530
 */
502
 */
531
void data_dirty_bit_fault(__u64 vector, istate_t *istate)
503
void data_dirty_bit_fault(__u64 vector, istate_t *istate)
532
{
504
{
533
    pte_t *t;
505
    pte_t *t;
534
 
506
 
535
    t = page_mapping_find(AS, istate->cr_ifa);
507
    t = page_mapping_find(AS, istate->cr_ifa);
536
    ASSERT(t && t->p);
508
    ASSERT(t && t->p);
537
    if (t && t->p) {
509
    if (t && t->p) {
538
        /*
510
        /*
539
         * Update the Dirty bit in page tables and reinsert
511
         * Update the Dirty bit in page tables and reinsert
540
         * the mapping into DTC.
512
         * the mapping into DTC.
541
         */
513
         */
542
        t->d = true;
514
        t->d = true;
543
        dtc_pte_copy(t);
515
        dtc_pte_copy(t);
544
    }
516
    }
545
}
517
}
546
 
518
 
547
/** Instruction access bit fault handler.
519
/** Instruction access bit fault handler.
548
 *
520
 *
549
 * @param vector Interruption vector.
521
 * @param vector Interruption vector.
550
 * @param istate Structure with saved interruption state.
522
 * @param istate Structure with saved interruption state.
551
 */
523
 */
552
void instruction_access_bit_fault(__u64 vector, istate_t *istate)
524
void instruction_access_bit_fault(__u64 vector, istate_t *istate)
553
{
525
{
554
    pte_t *t;
526
    pte_t *t;
555
 
527
 
556
    t = page_mapping_find(AS, istate->cr_ifa);
528
    t = page_mapping_find(AS, istate->cr_ifa);
557
    ASSERT(t && t->p);
529
    ASSERT(t && t->p);
558
    if (t && t->p) {
530
    if (t && t->p) {
559
        /*
531
        /*
560
         * Update the Accessed bit in page tables and reinsert
532
         * Update the Accessed bit in page tables and reinsert
561
         * the mapping into ITC.
533
         * the mapping into ITC.
562
         */
534
         */
563
        t->a = true;
535
        t->a = true;
564
        itc_pte_copy(t);
536
        itc_pte_copy(t);
565
    }
537
    }
566
}
538
}
567
 
539
 
568
/** Data access bit fault handler.
540
/** Data access bit fault handler.
569
 *
541
 *
570
 * @param vector Interruption vector.
542
 * @param vector Interruption vector.
571
 * @param istate Structure with saved interruption state.
543
 * @param istate Structure with saved interruption state.
572
 */
544
 */
573
void data_access_bit_fault(__u64 vector, istate_t *istate)
545
void data_access_bit_fault(__u64 vector, istate_t *istate)
574
{
546
{
575
    pte_t *t;
547
    pte_t *t;
576
 
548
 
577
    t = page_mapping_find(AS, istate->cr_ifa);
549
    t = page_mapping_find(AS, istate->cr_ifa);
578
    ASSERT(t && t->p);
550
    ASSERT(t && t->p);
579
    if (t && t->p) {
551
    if (t && t->p) {
580
        /*
552
        /*
581
         * Update the Accessed bit in page tables and reinsert
553
         * Update the Accessed bit in page tables and reinsert
582
         * the mapping into DTC.
554
         * the mapping into DTC.
583
         */
555
         */
584
        t->a = true;
556
        t->a = true;
585
        dtc_pte_copy(t);
557
        dtc_pte_copy(t);
586
    }
558
    }
587
}
559
}
588
 
560
 
589
/** Page not present fault handler.
561
/** Page not present fault handler.
590
 *
562
 *
591
 * @param vector Interruption vector.
563
 * @param vector Interruption vector.
592
 * @param istate Structure with saved interruption state.
564
 * @param istate Structure with saved interruption state.
593
 */
565
 */
594
void page_not_present(__u64 vector, istate_t *istate)
566
void page_not_present(__u64 vector, istate_t *istate)
595
{
567
{
596
    region_register rr;
568
    region_register rr;
597
    __address va;
569
    __address va;
598
    pte_t *t;
570
    pte_t *t;
599
   
571
   
600
    va = istate->cr_ifa;    /* faulting address */
572
    va = istate->cr_ifa;    /* faulting address */
601
    t = page_mapping_find(AS, va);
573
    t = page_mapping_find(AS, va);
602
    ASSERT(t);
574
    ASSERT(t);
603
   
575
   
604
    if (t->p) {
576
    if (t->p) {
605
        /*
577
        /*
606
         * If the Present bit is set in page hash table, just copy it
578
         * If the Present bit is set in page hash table, just copy it
607
         * and update ITC/DTC.
579
         * and update ITC/DTC.
608
         */
580
         */
609
        if (t->x)
581
        if (t->x)
610
            itc_pte_copy(t);
582
            itc_pte_copy(t);
611
        else
583
        else
612
            dtc_pte_copy(t);
584
            dtc_pte_copy(t);
613
    } else {
585
    } else {
614
        if (!as_page_fault(va)) {
586
        if (!as_page_fault(va)) {
615
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, istate->cr_ifa, rr.map.rid);
587
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, va, rr.map.rid);
616
        }
588
        }
617
    }
589
    }
618
}
590
}
619
 
591