Subversion Repositories HelenOS-historic

Rev

Rev 947 | Rev 993 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 947 Rev 958
1
/*
1
/*
2
 * Copyright (C) 2006 Jakub Jermar
2
 * Copyright (C) 2006 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/*
29
/*
30
 * TLB management.
30
 * TLB management.
31
 */
31
 */
32
 
32
 
33
#include <mm/tlb.h>
33
#include <mm/tlb.h>
34
#include <mm/asid.h>
34
#include <mm/asid.h>
35
#include <mm/page.h>
35
#include <mm/page.h>
36
#include <mm/as.h>
36
#include <mm/as.h>
37
#include <arch/mm/tlb.h>
37
#include <arch/mm/tlb.h>
38
#include <arch/mm/page.h>
38
#include <arch/mm/page.h>
39
#include <arch/barrier.h>
39
#include <arch/barrier.h>
40
#include <arch/interrupt.h>
40
#include <arch/interrupt.h>
41
#include <arch/pal/pal.h>
41
#include <arch/pal/pal.h>
42
#include <arch/asm.h>
42
#include <arch/asm.h>
43
#include <typedefs.h>
43
#include <typedefs.h>
44
#include <panic.h>
44
#include <panic.h>
45
#include <arch.h>
45
#include <arch.h>
46
 
46
 
47
 
47
 
48
 
48
 
49
/** Invalidate all TLB entries. */
49
/** Invalidate all TLB entries. */
50
void tlb_invalidate_all(void)
50
void tlb_invalidate_all(void)
51
{
51
{
52
        __address adr;
52
        __address adr;
53
        __u32 count1,count2,stride1,stride2;
53
        __u32 count1,count2,stride1,stride2;
54
       
54
       
55
        int i,j;
55
        int i,j;
56
       
56
       
57
        adr=PAL_PTCE_INFO_BASE();
57
        adr=PAL_PTCE_INFO_BASE();
58
        count1=PAL_PTCE_INFO_COUNT1();
58
        count1=PAL_PTCE_INFO_COUNT1();
59
        count2=PAL_PTCE_INFO_COUNT2();
59
        count2=PAL_PTCE_INFO_COUNT2();
60
        stride1=PAL_PTCE_INFO_STRIDE1();
60
        stride1=PAL_PTCE_INFO_STRIDE1();
61
        stride2=PAL_PTCE_INFO_STRIDE2();
61
        stride2=PAL_PTCE_INFO_STRIDE2();
62
       
62
       
63
        interrupts_disable();
63
        interrupts_disable();
64
 
64
 
65
        for(i=0;i<count1;i++)
65
        for(i=0;i<count1;i++)
66
        {
66
        {
67
            for(j=0;j<count2;j++)
67
            for(j=0;j<count2;j++)
68
            {
68
            {
69
                asm volatile
69
                asm volatile
70
                (
70
                (
71
                    "ptc.e %0;;"
71
                    "ptc.e %0;;"
72
                    :
72
                    :
73
                    :"r" (adr)
73
                    :"r" (adr)
74
                );
74
                );
75
                adr+=stride2;
75
                adr+=stride2;
76
            }
76
            }
77
            adr+=stride1;
77
            adr+=stride1;
78
        }
78
        }
79
 
79
 
80
        interrupts_enable();
80
        interrupts_enable();
81
 
81
 
82
        srlz_d();
82
        srlz_d();
83
        srlz_i();
83
        srlz_i();
84
}
84
}
85
 
85
 
86
/** Invalidate entries belonging to an address space.
86
/** Invalidate entries belonging to an address space.
87
 *
87
 *
88
 * @param asid Address space identifier.
88
 * @param asid Address space identifier.
89
 */
89
 */
90
void tlb_invalidate_asid(asid_t asid)
90
void tlb_invalidate_asid(asid_t asid)
91
{
91
{
92
    /* TODO */
92
    /* TODO */
93
    tlb_invalidate_all();
93
    tlb_invalidate_all();
94
}
94
}
95
 
95
 
96
 
96
 
97
void tlb_invalidate_pages(asid_t asid, __address page, count_t cnt)
97
void tlb_invalidate_pages(asid_t asid, __address page, count_t cnt)
98
{
98
{
99
 
99
 
100
 
100
 
101
    region_register rr;
101
    region_register rr;
102
    bool restore_rr = false;
102
    bool restore_rr = false;
103
    int b=0;
103
    int b=0;
104
    int c=cnt;
104
    int c=cnt;
105
 
105
 
106
    __address va;
106
    __address va;
107
    va=page;
107
    va=page;
108
 
108
 
109
    rr.word = rr_read(VA2VRN(va));
109
    rr.word = rr_read(VA2VRN(va));
110
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
110
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
111
        /*
111
        /*
112
         * The selected region register does not contain required RID.
112
         * The selected region register does not contain required RID.
113
         * Save the old content of the register and replace the RID.
113
         * Save the old content of the register and replace the RID.
114
         */
114
         */
115
        region_register rr0;
115
        region_register rr0;
116
 
116
 
117
        rr0 = rr;
117
        rr0 = rr;
118
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
118
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
119
        rr_write(VA2VRN(va), rr0.word);
119
        rr_write(VA2VRN(va), rr0.word);
120
        srlz_d();
120
        srlz_d();
121
        srlz_i();
121
        srlz_i();
122
    }
122
    }
123
   
123
   
124
    while(c>>=1)    b++;
124
    while(c>>=1)    b++;
125
    b>>=1;
125
    b>>=1;
126
    __u64 ps;
126
    __u64 ps;
127
   
127
   
128
    switch(b)
128
    switch(b)
129
    {
129
    {
130
        case 0: /*cnt 1-3*/
130
        case 0: /*cnt 1-3*/
131
        {
131
        {
132
            ps=PAGE_WIDTH;
132
            ps=PAGE_WIDTH;
133
            break;
133
            break;
134
        }
134
        }
135
        case 1: /*cnt 4-15*/
135
        case 1: /*cnt 4-15*/
136
        {
136
        {
137
            /*cnt=((cnt-1)/4)+1;*/
137
            /*cnt=((cnt-1)/4)+1;*/
138
            ps=PAGE_WIDTH+2;
138
            ps=PAGE_WIDTH+2;
139
            va&=~((1<<ps)-1);
139
            va&=~((1<<ps)-1);
140
            break;
140
            break;
141
        }
141
        }
142
        case 2: /*cnt 16-63*/
142
        case 2: /*cnt 16-63*/
143
        {
143
        {
144
            /*cnt=((cnt-1)/16)+1;*/
144
            /*cnt=((cnt-1)/16)+1;*/
145
            ps=PAGE_WIDTH+4;
145
            ps=PAGE_WIDTH+4;
146
            va&=~((1<<ps)-1);
146
            va&=~((1<<ps)-1);
147
            break;
147
            break;
148
        }
148
        }
149
        case 3: /*cnt 64-255*/
149
        case 3: /*cnt 64-255*/
150
        {
150
        {
151
            /*cnt=((cnt-1)/64)+1;*/
151
            /*cnt=((cnt-1)/64)+1;*/
152
            ps=PAGE_WIDTH+6;
152
            ps=PAGE_WIDTH+6;
153
            va&=~((1<<ps)-1);
153
            va&=~((1<<ps)-1);
154
            break;
154
            break;
155
        }
155
        }
156
        case 4: /*cnt 256-1023*/
156
        case 4: /*cnt 256-1023*/
157
        {
157
        {
158
            /*cnt=((cnt-1)/256)+1;*/
158
            /*cnt=((cnt-1)/256)+1;*/
159
            ps=PAGE_WIDTH+8;
159
            ps=PAGE_WIDTH+8;
160
            va&=~((1<<ps)-1);
160
            va&=~((1<<ps)-1);
161
            break;
161
            break;
162
        }
162
        }
163
        case 5: /*cnt 1024-4095*/
163
        case 5: /*cnt 1024-4095*/
164
        {
164
        {
165
            /*cnt=((cnt-1)/1024)+1;*/
165
            /*cnt=((cnt-1)/1024)+1;*/
166
            ps=PAGE_WIDTH+10;
166
            ps=PAGE_WIDTH+10;
167
            va&=~((1<<ps)-1);
167
            va&=~((1<<ps)-1);
168
            break;
168
            break;
169
        }
169
        }
170
        case 6: /*cnt 4096-16383*/
170
        case 6: /*cnt 4096-16383*/
171
        {
171
        {
172
            /*cnt=((cnt-1)/4096)+1;*/
172
            /*cnt=((cnt-1)/4096)+1;*/
173
            ps=PAGE_WIDTH+12;
173
            ps=PAGE_WIDTH+12;
174
            va&=~((1<<ps)-1);
174
            va&=~((1<<ps)-1);
175
            break;
175
            break;
176
        }
176
        }
177
        case 7: /*cnt 16384-65535*/
177
        case 7: /*cnt 16384-65535*/
178
        case 8: /*cnt 65536-(256K-1)*/
178
        case 8: /*cnt 65536-(256K-1)*/
179
        {
179
        {
180
            /*cnt=((cnt-1)/16384)+1;*/
180
            /*cnt=((cnt-1)/16384)+1;*/
181
            ps=PAGE_WIDTH+14;
181
            ps=PAGE_WIDTH+14;
182
            va&=~((1<<ps)-1);
182
            va&=~((1<<ps)-1);
183
            break;
183
            break;
184
        }
184
        }
185
        default:
185
        default:
186
        {
186
        {
187
            /*cnt=((cnt-1)/(16384*16))+1;*/
187
            /*cnt=((cnt-1)/(16384*16))+1;*/
188
            ps=PAGE_WIDTH+18;
188
            ps=PAGE_WIDTH+18;
189
            va&=~((1<<ps)-1);
189
            va&=~((1<<ps)-1);
190
            break;
190
            break;
191
        }
191
        }
192
    }
192
    }
193
    /*cnt+=(page!=va);*/
193
    /*cnt+=(page!=va);*/
194
    for(;va<(page+cnt*(PAGE_SIZE));va+=(1<<ps)) {
194
    for(;va<(page+cnt*(PAGE_SIZE));va+=(1<<ps)) {
195
        __asm__ volatile
195
        __asm__ volatile
196
        (
196
        (
197
            "ptc.l %0,%1;;"
197
            "ptc.l %0,%1;;"
198
            :
198
            :
199
            : "r"(va), "r"(ps<<2)
199
            : "r"(va), "r"(ps<<2)
200
        );
200
        );
201
    }
201
    }
202
    srlz_d();
202
    srlz_d();
203
    srlz_i();
203
    srlz_i();
204
   
204
   
205
   
205
   
206
    if (restore_rr) {
206
    if (restore_rr) {
207
        rr_write(VA2VRN(va), rr.word);
207
        rr_write(VA2VRN(va), rr.word);
208
        srlz_d();
208
        srlz_d();
209
        srlz_i();
209
        srlz_i();
210
    }
210
    }
211
 
211
 
212
 
212
 
213
}
213
}
214
 
214
 
215
 
215
 
216
/** Insert data into data translation cache.
216
/** Insert data into data translation cache.
217
 *
217
 *
218
 * @param va Virtual page address.
218
 * @param va Virtual page address.
219
 * @param asid Address space identifier.
219
 * @param asid Address space identifier.
220
 * @param entry The rest of TLB entry as required by TLB insertion format.
220
 * @param entry The rest of TLB entry as required by TLB insertion format.
221
 */
221
 */
222
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
222
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
223
{
223
{
224
    tc_mapping_insert(va, asid, entry, true);
224
    tc_mapping_insert(va, asid, entry, true);
225
}
225
}
226
 
226
 
227
/** Insert data into instruction translation cache.
227
/** Insert data into instruction translation cache.
228
 *
228
 *
229
 * @param va Virtual page address.
229
 * @param va Virtual page address.
230
 * @param asid Address space identifier.
230
 * @param asid Address space identifier.
231
 * @param entry The rest of TLB entry as required by TLB insertion format.
231
 * @param entry The rest of TLB entry as required by TLB insertion format.
232
 */
232
 */
233
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
233
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
234
{
234
{
235
    tc_mapping_insert(va, asid, entry, false);
235
    tc_mapping_insert(va, asid, entry, false);
236
}
236
}
237
 
237
 
238
/** Insert data into instruction or data translation cache.
238
/** Insert data into instruction or data translation cache.
239
 *
239
 *
240
 * @param va Virtual page address.
240
 * @param va Virtual page address.
241
 * @param asid Address space identifier.
241
 * @param asid Address space identifier.
242
 * @param entry The rest of TLB entry as required by TLB insertion format.
242
 * @param entry The rest of TLB entry as required by TLB insertion format.
243
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
243
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
244
 */
244
 */
245
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
245
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
246
{
246
{
247
    region_register rr;
247
    region_register rr;
248
    bool restore_rr = false;
248
    bool restore_rr = false;
249
 
249
 
250
    rr.word = rr_read(VA2VRN(va));
250
    rr.word = rr_read(VA2VRN(va));
251
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
251
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
252
        /*
252
        /*
253
         * The selected region register does not contain required RID.
253
         * The selected region register does not contain required RID.
254
         * Save the old content of the register and replace the RID.
254
         * Save the old content of the register and replace the RID.
255
         */
255
         */
256
        region_register rr0;
256
        region_register rr0;
257
 
257
 
258
        rr0 = rr;
258
        rr0 = rr;
259
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
259
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
260
        rr_write(VA2VRN(va), rr0.word);
260
        rr_write(VA2VRN(va), rr0.word);
261
        srlz_d();
261
        srlz_d();
262
        srlz_i();
262
        srlz_i();
263
    }
263
    }
264
   
264
   
265
    __asm__ volatile (
265
    __asm__ volatile (
266
        "mov r8=psr;;\n"
266
        "mov r8=psr;;\n"
267
        "rsm %0;;\n"            /* PSR_IC_MASK */
267
        "rsm %0;;\n"            /* PSR_IC_MASK */
268
        "srlz.d;;\n"
268
        "srlz.d;;\n"
269
        "srlz.i;;\n"
269
        "srlz.i;;\n"
270
        "mov cr.ifa=%1\n"       /* va */
270
        "mov cr.ifa=%1\n"       /* va */
271
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
271
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
272
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
272
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
273
        "(p6) itc.i %3;;\n"
273
        "(p6) itc.i %3;;\n"
274
        "(p7) itc.d %3;;\n"
274
        "(p7) itc.d %3;;\n"
275
        "mov psr.l=r8;;\n"
275
        "mov psr.l=r8;;\n"
276
        "srlz.d;;\n"
276
        "srlz.d;;\n"
277
        :
277
        :
278
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
278
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
279
        : "p6", "p7", "r8"
279
        : "p6", "p7", "r8"
280
    );
280
    );
281
   
281
   
282
    if (restore_rr) {
282
    if (restore_rr) {
283
        rr_write(VA2VRN(va), rr.word);
283
        rr_write(VA2VRN(va), rr.word);
284
        srlz_d();
284
        srlz_d();
285
        srlz_i();
285
        srlz_i();
286
    }
286
    }
287
}
287
}
288
 
288
 
289
/** Insert data into instruction translation register.
289
/** Insert data into instruction translation register.
290
 *
290
 *
291
 * @param va Virtual page address.
291
 * @param va Virtual page address.
292
 * @param asid Address space identifier.
292
 * @param asid Address space identifier.
293
 * @param entry The rest of TLB entry as required by TLB insertion format.
293
 * @param entry The rest of TLB entry as required by TLB insertion format.
294
 * @param tr Translation register.
294
 * @param tr Translation register.
295
 */
295
 */
296
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
296
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
297
{
297
{
298
    tr_mapping_insert(va, asid, entry, false, tr);
298
    tr_mapping_insert(va, asid, entry, false, tr);
299
}
299
}
300
 
300
 
301
/** Insert data into data translation register.
301
/** Insert data into data translation register.
302
 *
302
 *
303
 * @param va Virtual page address.
303
 * @param va Virtual page address.
304
 * @param asid Address space identifier.
304
 * @param asid Address space identifier.
305
 * @param entry The rest of TLB entry as required by TLB insertion format.
305
 * @param entry The rest of TLB entry as required by TLB insertion format.
306
 * @param tr Translation register.
306
 * @param tr Translation register.
307
 */
307
 */
308
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
308
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
309
{
309
{
310
    tr_mapping_insert(va, asid, entry, true, tr);
310
    tr_mapping_insert(va, asid, entry, true, tr);
311
}
311
}
312
 
312
 
313
/** Insert data into instruction or data translation register.
313
/** Insert data into instruction or data translation register.
314
 *
314
 *
315
 * @param va Virtual page address.
315
 * @param va Virtual page address.
316
 * @param asid Address space identifier.
316
 * @param asid Address space identifier.
317
 * @param entry The rest of TLB entry as required by TLB insertion format.
317
 * @param entry The rest of TLB entry as required by TLB insertion format.
318
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
318
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
319
 * @param tr Translation register.
319
 * @param tr Translation register.
320
 */
320
 */
321
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
321
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
322
{
322
{
323
    region_register rr;
323
    region_register rr;
324
    bool restore_rr = false;
324
    bool restore_rr = false;
325
 
325
 
326
    rr.word = rr_read(VA2VRN(va));
326
    rr.word = rr_read(VA2VRN(va));
327
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
327
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
328
        /*
328
        /*
329
         * The selected region register does not contain required RID.
329
         * The selected region register does not contain required RID.
330
         * Save the old content of the register and replace the RID.
330
         * Save the old content of the register and replace the RID.
331
         */
331
         */
332
        region_register rr0;
332
        region_register rr0;
333
 
333
 
334
        rr0 = rr;
334
        rr0 = rr;
335
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
335
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
336
        rr_write(VA2VRN(va), rr0.word);
336
        rr_write(VA2VRN(va), rr0.word);
337
        srlz_d();
337
        srlz_d();
338
        srlz_i();
338
        srlz_i();
339
    }
339
    }
340
 
340
 
341
    __asm__ volatile (
341
    __asm__ volatile (
342
        "mov r8=psr;;\n"
342
        "mov r8=psr;;\n"
343
        "rsm %0;;\n"            /* PSR_IC_MASK */
343
        "rsm %0;;\n"            /* PSR_IC_MASK */
344
        "srlz.d;;\n"
344
        "srlz.d;;\n"
345
        "srlz.i;;\n"
345
        "srlz.i;;\n"
346
        "mov cr.ifa=%1\n"           /* va */         
346
        "mov cr.ifa=%1\n"           /* va */         
347
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
347
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
348
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
348
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
349
        "(p6) itr.i itr[%4]=%3;;\n"
349
        "(p6) itr.i itr[%4]=%3;;\n"
350
        "(p7) itr.d dtr[%4]=%3;;\n"
350
        "(p7) itr.d dtr[%4]=%3;;\n"
351
        "mov psr.l=r8;;\n"
351
        "mov psr.l=r8;;\n"
352
        "srlz.d;;\n"
352
        "srlz.d;;\n"
353
        :
353
        :
354
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
354
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
355
        : "p6", "p7", "r8"
355
        : "p6", "p7", "r8"
356
    );
356
    );
357
   
357
   
358
    if (restore_rr) {
358
    if (restore_rr) {
359
        rr_write(VA2VRN(va), rr.word);
359
        rr_write(VA2VRN(va), rr.word);
360
        srlz_d();
360
        srlz_d();
361
        srlz_i();
361
        srlz_i();
362
    }
362
    }
363
}
363
}
364
 
364
 
365
/** Insert data into DTLB.
365
/** Insert data into DTLB.
366
 *
366
 *
367
 * @param va Virtual page address.
367
 * @param va Virtual page address.
368
 * @param asid Address space identifier.
368
 * @param asid Address space identifier.
369
 * @param entry The rest of TLB entry as required by TLB insertion format.
369
 * @param entry The rest of TLB entry as required by TLB insertion format.
370
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
370
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
371
 * @param tr Translation register if dtr is true, ignored otherwise.
371
 * @param tr Translation register if dtr is true, ignored otherwise.
372
 */
372
 */
373
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
373
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
374
{
374
{
375
    tlb_entry_t entry;
375
    tlb_entry_t entry;
376
   
376
   
377
    entry.word[0] = 0;
377
    entry.word[0] = 0;
378
    entry.word[1] = 0;
378
    entry.word[1] = 0;
379
   
379
   
380
    entry.p = true;         /* present */
380
    entry.p = true;         /* present */
381
    entry.ma = MA_WRITEBACK;
381
    entry.ma = MA_WRITEBACK;
382
    entry.a = true;         /* already accessed */
382
    entry.a = true;         /* already accessed */
383
    entry.d = true;         /* already dirty */
383
    entry.d = true;         /* already dirty */
384
    entry.pl = PL_KERNEL;
384
    entry.pl = PL_KERNEL;
385
    entry.ar = AR_READ | AR_WRITE;
385
    entry.ar = AR_READ | AR_WRITE;
386
    entry.ppn = frame >> PPN_SHIFT;
386
    entry.ppn = frame >> PPN_SHIFT;
387
    entry.ps = PAGE_WIDTH;
387
    entry.ps = PAGE_WIDTH;
388
   
388
   
389
    if (dtr)
389
    if (dtr)
390
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
390
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
391
    else
391
    else
392
        dtc_mapping_insert(page, ASID_KERNEL, entry);
392
        dtc_mapping_insert(page, ASID_KERNEL, entry);
393
}
393
}
394
 
394
 
395
/** Copy content of PTE into data translation cache.
395
/** Copy content of PTE into data translation cache.
396
 *
396
 *
397
 * @param t PTE.
397
 * @param t PTE.
398
 */
398
 */
399
void dtc_pte_copy(pte_t *t)
399
void dtc_pte_copy(pte_t *t)
400
{
400
{
401
    tlb_entry_t entry;
401
    tlb_entry_t entry;
402
 
402
 
403
    entry.word[0] = 0;
403
    entry.word[0] = 0;
404
    entry.word[1] = 0;
404
    entry.word[1] = 0;
405
   
405
   
406
    entry.p = t->p;
406
    entry.p = t->p;
407
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
407
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
408
    entry.a = t->a;
408
    entry.a = t->a;
409
    entry.d = t->d;
409
    entry.d = t->d;
410
    entry.pl = t->k ? PL_KERNEL : PL_USER;
410
    entry.pl = t->k ? PL_KERNEL : PL_USER;
411
    entry.ar = t->w ? AR_WRITE : AR_READ;
411
    entry.ar = t->w ? AR_WRITE : AR_READ;
412
    entry.ppn = t->frame >> PPN_SHIFT;
412
    entry.ppn = t->frame >> PPN_SHIFT;
413
    entry.ps = PAGE_WIDTH;
413
    entry.ps = PAGE_WIDTH;
414
   
414
   
415
    dtc_mapping_insert(t->page, t->as->asid, entry);
415
    dtc_mapping_insert(t->page, t->as->asid, entry);
416
}
416
}
417
 
417
 
418
/** Copy content of PTE into instruction translation cache.
418
/** Copy content of PTE into instruction translation cache.
419
 *
419
 *
420
 * @param t PTE.
420
 * @param t PTE.
421
 */
421
 */
422
void itc_pte_copy(pte_t *t)
422
void itc_pte_copy(pte_t *t)
423
{
423
{
424
    tlb_entry_t entry;
424
    tlb_entry_t entry;
425
 
425
 
426
    entry.word[0] = 0;
426
    entry.word[0] = 0;
427
    entry.word[1] = 0;
427
    entry.word[1] = 0;
428
   
428
   
429
    ASSERT(t->x);
429
    ASSERT(t->x);
430
   
430
   
431
    entry.p = t->p;
431
    entry.p = t->p;
432
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
432
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
433
    entry.a = t->a;
433
    entry.a = t->a;
434
    entry.pl = t->k ? PL_KERNEL : PL_USER;
434
    entry.pl = t->k ? PL_KERNEL : PL_USER;
435
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
435
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
436
    entry.ppn = t->frame >> PPN_SHIFT;
436
    entry.ppn = t->frame >> PPN_SHIFT;
437
    entry.ps = PAGE_WIDTH;
437
    entry.ps = PAGE_WIDTH;
438
   
438
   
439
    itc_mapping_insert(t->page, t->as->asid, entry);
439
    itc_mapping_insert(t->page, t->as->asid, entry);
440
}
440
}
441
 
441
 
442
/** Instruction TLB fault handler for faults with VHPT turned off.
442
/** Instruction TLB fault handler for faults with VHPT turned off.
443
 *
443
 *
444
 * @param vector Interruption vector.
444
 * @param vector Interruption vector.
445
 * @param pstate Structure with saved interruption state.
445
 * @param istate Structure with saved interruption state.
446
 */
446
 */
447
void alternate_instruction_tlb_fault(__u64 vector, struct exception_regdump *pstate)
447
void alternate_instruction_tlb_fault(__u64 vector, istate_t *istate)
448
{
448
{
449
    region_register rr;
449
    region_register rr;
450
    __address va;
450
    __address va;
451
    pte_t *t;
451
    pte_t *t;
452
   
452
   
453
    va = pstate->cr_ifa;    /* faulting address */
453
    va = istate->cr_ifa;    /* faulting address */
454
    t = page_mapping_find(AS, va);
454
    t = page_mapping_find(AS, va);
455
    if (t) {
455
    if (t) {
456
        /*
456
        /*
457
         * The mapping was found in software page hash table.
457
         * The mapping was found in software page hash table.
458
         * Insert it into data translation cache.
458
         * Insert it into data translation cache.
459
         */
459
         */
460
        itc_pte_copy(t);
460
        itc_pte_copy(t);
461
    } else {
461
    } else {
462
        /*
462
        /*
463
         * Forward the page fault to address space page fault handler.
463
         * Forward the page fault to address space page fault handler.
464
         */
464
         */
465
        if (!as_page_fault(va)) {
465
        if (!as_page_fault(va)) {
466
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
466
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, istate->cr_ifa, rr.map.rid);
467
        }
467
        }
468
    }
468
    }
469
}
469
}
470
 
470
 
471
/** Data TLB fault handler for faults with VHPT turned off.
471
/** Data TLB fault handler for faults with VHPT turned off.
472
 *
472
 *
473
 * @param vector Interruption vector.
473
 * @param vector Interruption vector.
474
 * @param pstate Structure with saved interruption state.
474
 * @param istate Structure with saved interruption state.
475
 */
475
 */
476
void alternate_data_tlb_fault(__u64 vector, struct exception_regdump *pstate)
476
void alternate_data_tlb_fault(__u64 vector, istate_t *istate)
477
{
477
{
478
    region_register rr;
478
    region_register rr;
479
    rid_t rid;
479
    rid_t rid;
480
    __address va;
480
    __address va;
481
    pte_t *t;
481
    pte_t *t;
482
   
482
   
483
    va = pstate->cr_ifa;    /* faulting address */
483
    va = istate->cr_ifa;    /* faulting address */
484
    rr.word = rr_read(VA2VRN(va));
484
    rr.word = rr_read(VA2VRN(va));
485
    rid = rr.map.rid;
485
    rid = rr.map.rid;
486
    if (RID2ASID(rid) == ASID_KERNEL) {
486
    if (RID2ASID(rid) == ASID_KERNEL) {
487
        if (VA2VRN(va) == VRN_KERNEL) {
487
        if (VA2VRN(va) == VRN_KERNEL) {
488
            /*
488
            /*
489
             * Provide KA2PA(identity) mapping for faulting piece of
489
             * Provide KA2PA(identity) mapping for faulting piece of
490
             * kernel address space.
490
             * kernel address space.
491
             */
491
             */
492
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
492
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
493
            return;
493
            return;
494
        }
494
        }
495
    }
495
    }
496
 
496
 
497
    t = page_mapping_find(AS, va);
497
    t = page_mapping_find(AS, va);
498
    if (t) {
498
    if (t) {
499
        /*
499
        /*
500
         * The mapping was found in software page hash table.
500
         * The mapping was found in software page hash table.
501
         * Insert it into data translation cache.
501
         * Insert it into data translation cache.
502
         */
502
         */
503
        dtc_pte_copy(t);
503
        dtc_pte_copy(t);
504
    } else {
504
    } else {
505
        /*
505
        /*
506
         * Forward the page fault to address space page fault handler.
506
         * Forward the page fault to address space page fault handler.
507
         */
507
         */
508
        if (!as_page_fault(va)) {
508
        if (!as_page_fault(va)) {
509
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
509
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, istate->cr_ifa, rr.map.rid);
510
        }
510
        }
511
    }
511
    }
512
}
512
}
513
 
513
 
514
/** Data nested TLB fault handler.
514
/** Data nested TLB fault handler.
515
 *
515
 *
516
 * This fault should not occur.
516
 * This fault should not occur.
517
 *
517
 *
518
 * @param vector Interruption vector.
518
 * @param vector Interruption vector.
519
 * @param pstate Structure with saved interruption state.
519
 * @param istate Structure with saved interruption state.
520
 */
520
 */
521
void data_nested_tlb_fault(__u64 vector, struct exception_regdump *pstate)
521
void data_nested_tlb_fault(__u64 vector, istate_t *istate)
522
{
522
{
523
    panic("%s\n", __FUNCTION__);
523
    panic("%s\n", __FUNCTION__);
524
}
524
}
525
 
525
 
526
/** Data Dirty bit fault handler.
526
/** Data Dirty bit fault handler.
527
 *
527
 *
528
 * @param vector Interruption vector.
528
 * @param vector Interruption vector.
529
 * @param pstate Structure with saved interruption state.
529
 * @param istate Structure with saved interruption state.
530
 */
530
 */
531
void data_dirty_bit_fault(__u64 vector, struct exception_regdump *pstate)
531
void data_dirty_bit_fault(__u64 vector, istate_t *istate)
532
{
532
{
533
    pte_t *t;
533
    pte_t *t;
534
 
534
 
535
    t = page_mapping_find(AS, pstate->cr_ifa);
535
    t = page_mapping_find(AS, istate->cr_ifa);
536
    ASSERT(t && t->p);
536
    ASSERT(t && t->p);
537
    if (t && t->p) {
537
    if (t && t->p) {
538
        /*
538
        /*
539
         * Update the Dirty bit in page tables and reinsert
539
         * Update the Dirty bit in page tables and reinsert
540
         * the mapping into DTC.
540
         * the mapping into DTC.
541
         */
541
         */
542
        t->d = true;
542
        t->d = true;
543
        dtc_pte_copy(t);
543
        dtc_pte_copy(t);
544
    }
544
    }
545
}
545
}
546
 
546
 
547
/** Instruction access bit fault handler.
547
/** Instruction access bit fault handler.
548
 *
548
 *
549
 * @param vector Interruption vector.
549
 * @param vector Interruption vector.
550
 * @param pstate Structure with saved interruption state.
550
 * @param istate Structure with saved interruption state.
551
 */
551
 */
552
void instruction_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
552
void instruction_access_bit_fault(__u64 vector, istate_t *istate)
553
{
553
{
554
    pte_t *t;
554
    pte_t *t;
555
 
555
 
556
    t = page_mapping_find(AS, pstate->cr_ifa);
556
    t = page_mapping_find(AS, istate->cr_ifa);
557
    ASSERT(t && t->p);
557
    ASSERT(t && t->p);
558
    if (t && t->p) {
558
    if (t && t->p) {
559
        /*
559
        /*
560
         * Update the Accessed bit in page tables and reinsert
560
         * Update the Accessed bit in page tables and reinsert
561
         * the mapping into ITC.
561
         * the mapping into ITC.
562
         */
562
         */
563
        t->a = true;
563
        t->a = true;
564
        itc_pte_copy(t);
564
        itc_pte_copy(t);
565
    }
565
    }
566
}
566
}
567
 
567
 
568
/** Data access bit fault handler.
568
/** Data access bit fault handler.
569
 *
569
 *
570
 * @param vector Interruption vector.
570
 * @param vector Interruption vector.
571
 * @param pstate Structure with saved interruption state.
571
 * @param istate Structure with saved interruption state.
572
 */
572
 */
573
void data_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
573
void data_access_bit_fault(__u64 vector, istate_t *istate)
574
{
574
{
575
    pte_t *t;
575
    pte_t *t;
576
 
576
 
577
    t = page_mapping_find(AS, pstate->cr_ifa);
577
    t = page_mapping_find(AS, istate->cr_ifa);
578
    ASSERT(t && t->p);
578
    ASSERT(t && t->p);
579
    if (t && t->p) {
579
    if (t && t->p) {
580
        /*
580
        /*
581
         * Update the Accessed bit in page tables and reinsert
581
         * Update the Accessed bit in page tables and reinsert
582
         * the mapping into DTC.
582
         * the mapping into DTC.
583
         */
583
         */
584
        t->a = true;
584
        t->a = true;
585
        dtc_pte_copy(t);
585
        dtc_pte_copy(t);
586
    }
586
    }
587
}
587
}
588
 
588
 
589
/** Page not present fault handler.
589
/** Page not present fault handler.
590
 *
590
 *
591
 * @param vector Interruption vector.
591
 * @param vector Interruption vector.
592
 * @param pstate Structure with saved interruption state.
592
 * @param istate Structure with saved interruption state.
593
 */
593
 */
594
void page_not_present(__u64 vector, struct exception_regdump *pstate)
594
void page_not_present(__u64 vector, istate_t *istate)
595
{
595
{
596
    region_register rr;
596
    region_register rr;
597
    __address va;
597
    __address va;
598
    pte_t *t;
598
    pte_t *t;
599
   
599
   
600
    va = pstate->cr_ifa;    /* faulting address */
600
    va = istate->cr_ifa;    /* faulting address */
601
    t = page_mapping_find(AS, va);
601
    t = page_mapping_find(AS, va);
602
    ASSERT(t);
602
    ASSERT(t);
603
   
603
   
604
    if (t->p) {
604
    if (t->p) {
605
        /*
605
        /*
606
         * If the Present bit is set in page hash table, just copy it
606
         * If the Present bit is set in page hash table, just copy it
607
         * and update ITC/DTC.
607
         * and update ITC/DTC.
608
         */
608
         */
609
        if (t->x)
609
        if (t->x)
610
            itc_pte_copy(t);
610
            itc_pte_copy(t);
611
        else
611
        else
612
            dtc_pte_copy(t);
612
            dtc_pte_copy(t);
613
    } else {
613
    } else {
614
        if (!as_page_fault(va)) {
614
        if (!as_page_fault(va)) {
615
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
615
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, istate->cr_ifa, rr.map.rid);
616
        }
616
        }
617
    }
617
    }
618
}
618
}
619
 
619