Subversion Repositories HelenOS-historic

Rev

Rev 928 | Rev 945 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 928 Rev 935
1
/*
1
/*
2
 * Copyright (C) 2006 Jakub Jermar
2
 * Copyright (C) 2006 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/*
29
/*
30
 * TLB management.
30
 * TLB management.
31
 */
31
 */
32
 
32
 
33
#include <mm/tlb.h>
33
#include <mm/tlb.h>
34
#include <mm/asid.h>
34
#include <mm/asid.h>
35
#include <mm/page.h>
35
#include <mm/page.h>
36
#include <mm/as.h>
36
#include <mm/as.h>
37
#include <arch/mm/tlb.h>
37
#include <arch/mm/tlb.h>
38
#include <arch/mm/page.h>
38
#include <arch/mm/page.h>
39
#include <arch/barrier.h>
39
#include <arch/barrier.h>
40
#include <arch/interrupt.h>
40
#include <arch/interrupt.h>
41
#include <arch/pal/pal.h>
41
#include <arch/pal/pal.h>
42
#include <arch/asm.h>
42
#include <arch/asm.h>
43
#include <typedefs.h>
43
#include <typedefs.h>
44
#include <panic.h>
44
#include <panic.h>
45
#include <arch.h>
45
#include <arch.h>
46
 
46
 
47
/** Invalidate all TLB entries. */
47
/** Invalidate all TLB entries. */
48
void tlb_invalidate_all(void)
48
void tlb_invalidate_all(void)
49
{
49
{
50
        __address adr;
50
        __address adr;
51
        __u32 count1,count2,stride1,stride2;
51
        __u32 count1,count2,stride1,stride2;
52
       
52
       
53
        int i,j;
53
        int i,j;
54
       
54
       
55
        adr=PAL_PTCE_INFO_BASE();
55
        adr=PAL_PTCE_INFO_BASE();
56
        count1=PAL_PTCE_INFO_COUNT1();
56
        count1=PAL_PTCE_INFO_COUNT1();
57
        count2=PAL_PTCE_INFO_COUNT2();
57
        count2=PAL_PTCE_INFO_COUNT2();
58
        stride1=PAL_PTCE_INFO_STRIDE1();
58
        stride1=PAL_PTCE_INFO_STRIDE1();
59
        stride2=PAL_PTCE_INFO_STRIDE2();
59
        stride2=PAL_PTCE_INFO_STRIDE2();
60
       
60
       
61
        interrupts_disable();
61
        interrupts_disable();
62
 
62
 
63
        for(i=0;i<count1;i++)
63
        for(i=0;i<count1;i++)
64
        {
64
        {
65
            for(j=0;j<count2;j++)
65
            for(j=0;j<count2;j++)
66
            {
66
            {
67
                asm volatile
67
                asm volatile
68
                (
68
                (
69
                    "ptc.e %0;;"
69
                    "ptc.e %0;;"
70
                    :
70
                    :
71
                    :"r" (adr)
71
                    :"r" (adr)
72
                );
72
                );
73
                adr+=stride2;
73
                adr+=stride2;
74
            }
74
            }
75
            adr+=stride1;
75
            adr+=stride1;
76
        }
76
        }
77
 
77
 
78
        interrupts_enable();
78
        interrupts_enable();
79
 
79
 
80
        srlz_d();
80
        srlz_d();
81
        srlz_i();
81
        srlz_i();
82
}
82
}
83
 
83
 
84
/** Invalidate entries belonging to an address space.
84
/** Invalidate entries belonging to an address space.
85
 *
85
 *
86
 * @param asid Address space identifier.
86
 * @param asid Address space identifier.
87
 */
87
 */
88
void tlb_invalidate_asid(asid_t asid)
88
void tlb_invalidate_asid(asid_t asid)
89
{
89
{
90
    /* TODO */
90
    /* TODO */
-
 
91
    tlb_invalidate_all();
91
}
92
}
92
 
93
 
-
 
94
 
-
 
95
void tlb_invalidate_pages(asid_t asid, __address page, count_t cnt)
-
 
96
{
-
 
97
 
-
 
98
 
-
 
99
}
-
 
100
 
-
 
101
 
93
/** Insert data into data translation cache.
102
/** Insert data into data translation cache.
94
 *
103
 *
95
 * @param va Virtual page address.
104
 * @param va Virtual page address.
96
 * @param asid Address space identifier.
105
 * @param asid Address space identifier.
97
 * @param entry The rest of TLB entry as required by TLB insertion format.
106
 * @param entry The rest of TLB entry as required by TLB insertion format.
98
 */
107
 */
99
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
108
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
100
{
109
{
101
    tc_mapping_insert(va, asid, entry, true);
110
    tc_mapping_insert(va, asid, entry, true);
102
}
111
}
103
 
112
 
104
/** Insert data into instruction translation cache.
113
/** Insert data into instruction translation cache.
105
 *
114
 *
106
 * @param va Virtual page address.
115
 * @param va Virtual page address.
107
 * @param asid Address space identifier.
116
 * @param asid Address space identifier.
108
 * @param entry The rest of TLB entry as required by TLB insertion format.
117
 * @param entry The rest of TLB entry as required by TLB insertion format.
109
 */
118
 */
110
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
119
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
111
{
120
{
112
    tc_mapping_insert(va, asid, entry, false);
121
    tc_mapping_insert(va, asid, entry, false);
113
}
122
}
114
 
123
 
115
/** Insert data into instruction or data translation cache.
124
/** Insert data into instruction or data translation cache.
116
 *
125
 *
117
 * @param va Virtual page address.
126
 * @param va Virtual page address.
118
 * @param asid Address space identifier.
127
 * @param asid Address space identifier.
119
 * @param entry The rest of TLB entry as required by TLB insertion format.
128
 * @param entry The rest of TLB entry as required by TLB insertion format.
120
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
129
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
121
 */
130
 */
122
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
131
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
123
{
132
{
124
    region_register rr;
133
    region_register rr;
125
    bool restore_rr = false;
134
    bool restore_rr = false;
126
 
135
 
127
    rr.word = rr_read(VA2VRN(va));
136
    rr.word = rr_read(VA2VRN(va));
128
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
137
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
129
        /*
138
        /*
130
         * The selected region register does not contain required RID.
139
         * The selected region register does not contain required RID.
131
         * Save the old content of the register and replace the RID.
140
         * Save the old content of the register and replace the RID.
132
         */
141
         */
133
        region_register rr0;
142
        region_register rr0;
134
 
143
 
135
        rr0 = rr;
144
        rr0 = rr;
136
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
145
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
137
        rr_write(VA2VRN(va), rr0.word);
146
        rr_write(VA2VRN(va), rr0.word);
138
        srlz_d();
147
        srlz_d();
139
        srlz_i();
148
        srlz_i();
140
    }
149
    }
141
   
150
   
142
    __asm__ volatile (
151
    __asm__ volatile (
143
        "mov r8=psr;;\n"
152
        "mov r8=psr;;\n"
144
        "rsm %0;;\n"            /* PSR_IC_MASK */
153
        "rsm %0;;\n"            /* PSR_IC_MASK */
145
        "srlz.d;;\n"
154
        "srlz.d;;\n"
146
        "srlz.i;;\n"
155
        "srlz.i;;\n"
147
        "mov cr.ifa=%1\n"       /* va */
156
        "mov cr.ifa=%1\n"       /* va */
148
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
157
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
149
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
158
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
150
        "(p6) itc.i %3;;\n"
159
        "(p6) itc.i %3;;\n"
151
        "(p7) itc.d %3;;\n"
160
        "(p7) itc.d %3;;\n"
152
        "mov psr.l=r8;;\n"
161
        "mov psr.l=r8;;\n"
153
        "srlz.d;;\n"
162
        "srlz.d;;\n"
154
        :
163
        :
155
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
164
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
156
        : "p6", "p7", "r8"
165
        : "p6", "p7", "r8"
157
    );
166
    );
158
   
167
   
159
    if (restore_rr) {
168
    if (restore_rr) {
160
        rr_write(VA2VRN(va), rr.word);
169
        rr_write(VA2VRN(va), rr.word);
161
        srlz_d();
170
        srlz_d();
162
        srlz_i();
171
        srlz_i();
163
    }
172
    }
164
}
173
}
165
 
174
 
166
/** Insert data into instruction translation register.
175
/** Insert data into instruction translation register.
167
 *
176
 *
168
 * @param va Virtual page address.
177
 * @param va Virtual page address.
169
 * @param asid Address space identifier.
178
 * @param asid Address space identifier.
170
 * @param entry The rest of TLB entry as required by TLB insertion format.
179
 * @param entry The rest of TLB entry as required by TLB insertion format.
171
 * @param tr Translation register.
180
 * @param tr Translation register.
172
 */
181
 */
173
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
182
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
174
{
183
{
175
    tr_mapping_insert(va, asid, entry, false, tr);
184
    tr_mapping_insert(va, asid, entry, false, tr);
176
}
185
}
177
 
186
 
178
/** Insert data into data translation register.
187
/** Insert data into data translation register.
179
 *
188
 *
180
 * @param va Virtual page address.
189
 * @param va Virtual page address.
181
 * @param asid Address space identifier.
190
 * @param asid Address space identifier.
182
 * @param entry The rest of TLB entry as required by TLB insertion format.
191
 * @param entry The rest of TLB entry as required by TLB insertion format.
183
 * @param tr Translation register.
192
 * @param tr Translation register.
184
 */
193
 */
185
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
194
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
186
{
195
{
187
    tr_mapping_insert(va, asid, entry, true, tr);
196
    tr_mapping_insert(va, asid, entry, true, tr);
188
}
197
}
189
 
198
 
190
/** Insert data into instruction or data translation register.
199
/** Insert data into instruction or data translation register.
191
 *
200
 *
192
 * @param va Virtual page address.
201
 * @param va Virtual page address.
193
 * @param asid Address space identifier.
202
 * @param asid Address space identifier.
194
 * @param entry The rest of TLB entry as required by TLB insertion format.
203
 * @param entry The rest of TLB entry as required by TLB insertion format.
195
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
204
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
196
 * @param tr Translation register.
205
 * @param tr Translation register.
197
 */
206
 */
198
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
207
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
199
{
208
{
200
    region_register rr;
209
    region_register rr;
201
    bool restore_rr = false;
210
    bool restore_rr = false;
202
 
211
 
203
    rr.word = rr_read(VA2VRN(va));
212
    rr.word = rr_read(VA2VRN(va));
204
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
213
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
205
        /*
214
        /*
206
         * The selected region register does not contain required RID.
215
         * The selected region register does not contain required RID.
207
         * Save the old content of the register and replace the RID.
216
         * Save the old content of the register and replace the RID.
208
         */
217
         */
209
        region_register rr0;
218
        region_register rr0;
210
 
219
 
211
        rr0 = rr;
220
        rr0 = rr;
212
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
221
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
213
        rr_write(VA2VRN(va), rr0.word);
222
        rr_write(VA2VRN(va), rr0.word);
214
        srlz_d();
223
        srlz_d();
215
        srlz_i();
224
        srlz_i();
216
    }
225
    }
217
 
226
 
218
    __asm__ volatile (
227
    __asm__ volatile (
219
        "mov r8=psr;;\n"
228
        "mov r8=psr;;\n"
220
        "rsm %0;;\n"            /* PSR_IC_MASK */
229
        "rsm %0;;\n"            /* PSR_IC_MASK */
221
        "srlz.d;;\n"
230
        "srlz.d;;\n"
222
        "srlz.i;;\n"
231
        "srlz.i;;\n"
223
        "mov cr.ifa=%1\n"           /* va */         
232
        "mov cr.ifa=%1\n"           /* va */         
224
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
233
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
225
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
234
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
226
        "(p6) itr.i itr[%4]=%3;;\n"
235
        "(p6) itr.i itr[%4]=%3;;\n"
227
        "(p7) itr.d dtr[%4]=%3;;\n"
236
        "(p7) itr.d dtr[%4]=%3;;\n"
228
        "mov psr.l=r8;;\n"
237
        "mov psr.l=r8;;\n"
229
        "srlz.d;;\n"
238
        "srlz.d;;\n"
230
        :
239
        :
231
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
240
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
232
        : "p6", "p7", "r8"
241
        : "p6", "p7", "r8"
233
    );
242
    );
234
   
243
   
235
    if (restore_rr) {
244
    if (restore_rr) {
236
        rr_write(VA2VRN(va), rr.word);
245
        rr_write(VA2VRN(va), rr.word);
237
        srlz_d();
246
        srlz_d();
238
        srlz_i();
247
        srlz_i();
239
    }
248
    }
240
}
249
}
241
 
250
 
242
/** Insert data into DTLB.
251
/** Insert data into DTLB.
243
 *
252
 *
244
 * @param va Virtual page address.
253
 * @param va Virtual page address.
245
 * @param asid Address space identifier.
254
 * @param asid Address space identifier.
246
 * @param entry The rest of TLB entry as required by TLB insertion format.
255
 * @param entry The rest of TLB entry as required by TLB insertion format.
247
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
256
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
248
 * @param tr Translation register if dtr is true, ignored otherwise.
257
 * @param tr Translation register if dtr is true, ignored otherwise.
249
 */
258
 */
250
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
259
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
251
{
260
{
252
    tlb_entry_t entry;
261
    tlb_entry_t entry;
253
   
262
   
254
    entry.word[0] = 0;
263
    entry.word[0] = 0;
255
    entry.word[1] = 0;
264
    entry.word[1] = 0;
256
   
265
   
257
    entry.p = true;         /* present */
266
    entry.p = true;         /* present */
258
    entry.ma = MA_WRITEBACK;
267
    entry.ma = MA_WRITEBACK;
259
    entry.a = true;         /* already accessed */
268
    entry.a = true;         /* already accessed */
260
    entry.d = true;         /* already dirty */
269
    entry.d = true;         /* already dirty */
261
    entry.pl = PL_KERNEL;
270
    entry.pl = PL_KERNEL;
262
    entry.ar = AR_READ | AR_WRITE;
271
    entry.ar = AR_READ | AR_WRITE;
263
    entry.ppn = frame >> PPN_SHIFT;
272
    entry.ppn = frame >> PPN_SHIFT;
264
    entry.ps = PAGE_WIDTH;
273
    entry.ps = PAGE_WIDTH;
265
   
274
   
266
    if (dtr)
275
    if (dtr)
267
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
276
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
268
    else
277
    else
269
        dtc_mapping_insert(page, ASID_KERNEL, entry);
278
        dtc_mapping_insert(page, ASID_KERNEL, entry);
270
}
279
}
271
 
280
 
272
/** Copy content of PTE into data translation cache.
281
/** Copy content of PTE into data translation cache.
273
 *
282
 *
274
 * @param t PTE.
283
 * @param t PTE.
275
 */
284
 */
276
void dtc_pte_copy(pte_t *t)
285
void dtc_pte_copy(pte_t *t)
277
{
286
{
278
    tlb_entry_t entry;
287
    tlb_entry_t entry;
279
 
288
 
280
    entry.word[0] = 0;
289
    entry.word[0] = 0;
281
    entry.word[1] = 0;
290
    entry.word[1] = 0;
282
   
291
   
283
    entry.p = t->p;
292
    entry.p = t->p;
284
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
293
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
285
    entry.a = t->a;
294
    entry.a = t->a;
286
    entry.d = t->d;
295
    entry.d = t->d;
287
    entry.pl = t->k ? PL_KERNEL : PL_USER;
296
    entry.pl = t->k ? PL_KERNEL : PL_USER;
288
    entry.ar = t->w ? AR_WRITE : AR_READ;
297
    entry.ar = t->w ? AR_WRITE : AR_READ;
289
    entry.ppn = t->frame >> PPN_SHIFT;
298
    entry.ppn = t->frame >> PPN_SHIFT;
290
    entry.ps = PAGE_WIDTH;
299
    entry.ps = PAGE_WIDTH;
291
   
300
   
292
    dtc_mapping_insert(t->page, t->as->asid, entry);
301
    dtc_mapping_insert(t->page, t->as->asid, entry);
293
}
302
}
294
 
303
 
295
/** Copy content of PTE into instruction translation cache.
304
/** Copy content of PTE into instruction translation cache.
296
 *
305
 *
297
 * @param t PTE.
306
 * @param t PTE.
298
 */
307
 */
299
void itc_pte_copy(pte_t *t)
308
void itc_pte_copy(pte_t *t)
300
{
309
{
301
    tlb_entry_t entry;
310
    tlb_entry_t entry;
302
 
311
 
303
    entry.word[0] = 0;
312
    entry.word[0] = 0;
304
    entry.word[1] = 0;
313
    entry.word[1] = 0;
305
   
314
   
306
    ASSERT(t->x);
315
    ASSERT(t->x);
307
   
316
   
308
    entry.p = t->p;
317
    entry.p = t->p;
309
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
318
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
310
    entry.a = t->a;
319
    entry.a = t->a;
311
    entry.pl = t->k ? PL_KERNEL : PL_USER;
320
    entry.pl = t->k ? PL_KERNEL : PL_USER;
312
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
321
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
313
    entry.ppn = t->frame >> PPN_SHIFT;
322
    entry.ppn = t->frame >> PPN_SHIFT;
314
    entry.ps = PAGE_WIDTH;
323
    entry.ps = PAGE_WIDTH;
315
   
324
   
316
    itc_mapping_insert(t->page, t->as->asid, entry);
325
    itc_mapping_insert(t->page, t->as->asid, entry);
317
}
326
}
318
 
327
 
319
/** Instruction TLB fault handler for faults with VHPT turned off.
328
/** Instruction TLB fault handler for faults with VHPT turned off.
320
 *
329
 *
321
 * @param vector Interruption vector.
330
 * @param vector Interruption vector.
322
 * @param pstate Structure with saved interruption state.
331
 * @param pstate Structure with saved interruption state.
323
 */
332
 */
324
void alternate_instruction_tlb_fault(__u64 vector, struct exception_regdump *pstate)
333
void alternate_instruction_tlb_fault(__u64 vector, struct exception_regdump *pstate)
325
{
334
{
326
    region_register rr;
335
    region_register rr;
327
    __address va;
336
    __address va;
328
    pte_t *t;
337
    pte_t *t;
329
   
338
   
330
    va = pstate->cr_ifa;    /* faulting address */
339
    va = pstate->cr_ifa;    /* faulting address */
331
    t = page_mapping_find(AS, va);
340
    t = page_mapping_find(AS, va);
332
    if (t) {
341
    if (t) {
333
        /*
342
        /*
334
         * The mapping was found in software page hash table.
343
         * The mapping was found in software page hash table.
335
         * Insert it into data translation cache.
344
         * Insert it into data translation cache.
336
         */
345
         */
337
        itc_pte_copy(t);
346
        itc_pte_copy(t);
338
    } else {
347
    } else {
339
        /*
348
        /*
340
         * Forward the page fault to address space page fault handler.
349
         * Forward the page fault to address space page fault handler.
341
         */
350
         */
342
        if (!as_page_fault(va)) {
351
        if (!as_page_fault(va)) {
343
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
352
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
344
        }
353
        }
345
    }
354
    }
346
}
355
}
347
 
356
 
348
/** Data TLB fault handler for faults with VHPT turned off.
357
/** Data TLB fault handler for faults with VHPT turned off.
349
 *
358
 *
350
 * @param vector Interruption vector.
359
 * @param vector Interruption vector.
351
 * @param pstate Structure with saved interruption state.
360
 * @param pstate Structure with saved interruption state.
352
 */
361
 */
353
void alternate_data_tlb_fault(__u64 vector, struct exception_regdump *pstate)
362
void alternate_data_tlb_fault(__u64 vector, struct exception_regdump *pstate)
354
{
363
{
355
    region_register rr;
364
    region_register rr;
356
    rid_t rid;
365
    rid_t rid;
357
    __address va;
366
    __address va;
358
    pte_t *t;
367
    pte_t *t;
359
   
368
   
360
    va = pstate->cr_ifa;    /* faulting address */
369
    va = pstate->cr_ifa;    /* faulting address */
361
    rr.word = rr_read(VA2VRN(va));
370
    rr.word = rr_read(VA2VRN(va));
362
    rid = rr.map.rid;
371
    rid = rr.map.rid;
363
    if (RID2ASID(rid) == ASID_KERNEL) {
372
    if (RID2ASID(rid) == ASID_KERNEL) {
364
        if (VA2VRN(va) == VRN_KERNEL) {
373
        if (VA2VRN(va) == VRN_KERNEL) {
365
            /*
374
            /*
366
             * Provide KA2PA(identity) mapping for faulting piece of
375
             * Provide KA2PA(identity) mapping for faulting piece of
367
             * kernel address space.
376
             * kernel address space.
368
             */
377
             */
369
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
378
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
370
            return;
379
            return;
371
        }
380
        }
372
    }
381
    }
373
 
382
 
374
    t = page_mapping_find(AS, va);
383
    t = page_mapping_find(AS, va);
375
    if (t) {
384
    if (t) {
376
        /*
385
        /*
377
         * The mapping was found in software page hash table.
386
         * The mapping was found in software page hash table.
378
         * Insert it into data translation cache.
387
         * Insert it into data translation cache.
379
         */
388
         */
380
        dtc_pte_copy(t);
389
        dtc_pte_copy(t);
381
    } else {
390
    } else {
382
        /*
391
        /*
383
         * Forward the page fault to address space page fault handler.
392
         * Forward the page fault to address space page fault handler.
384
         */
393
         */
385
        if (!as_page_fault(va)) {
394
        if (!as_page_fault(va)) {
386
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
395
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
387
        }
396
        }
388
    }
397
    }
389
}
398
}
390
 
399
 
391
/** Data nested TLB fault handler.
400
/** Data nested TLB fault handler.
392
 *
401
 *
393
 * This fault should not occur.
402
 * This fault should not occur.
394
 *
403
 *
395
 * @param vector Interruption vector.
404
 * @param vector Interruption vector.
396
 * @param pstate Structure with saved interruption state.
405
 * @param pstate Structure with saved interruption state.
397
 */
406
 */
398
void data_nested_tlb_fault(__u64 vector, struct exception_regdump *pstate)
407
void data_nested_tlb_fault(__u64 vector, struct exception_regdump *pstate)
399
{
408
{
400
    panic("%s\n", __FUNCTION__);
409
    panic("%s\n", __FUNCTION__);
401
}
410
}
402
 
411
 
403
/** Data Dirty bit fault handler.
412
/** Data Dirty bit fault handler.
404
 *
413
 *
405
 * @param vector Interruption vector.
414
 * @param vector Interruption vector.
406
 * @param pstate Structure with saved interruption state.
415
 * @param pstate Structure with saved interruption state.
407
 */
416
 */
408
void data_dirty_bit_fault(__u64 vector, struct exception_regdump *pstate)
417
void data_dirty_bit_fault(__u64 vector, struct exception_regdump *pstate)
409
{
418
{
410
    pte_t *t;
419
    pte_t *t;
411
 
420
 
412
    t = page_mapping_find(AS, pstate->cr_ifa);
421
    t = page_mapping_find(AS, pstate->cr_ifa);
413
    ASSERT(t && t->p);
422
    ASSERT(t && t->p);
414
    if (t && t->p) {
423
    if (t && t->p) {
415
        /*
424
        /*
416
         * Update the Dirty bit in page tables and reinsert
425
         * Update the Dirty bit in page tables and reinsert
417
         * the mapping into DTC.
426
         * the mapping into DTC.
418
         */
427
         */
419
        t->d = true;
428
        t->d = true;
420
        dtc_pte_copy(t);
429
        dtc_pte_copy(t);
421
    }
430
    }
422
}
431
}
423
 
432
 
424
/** Instruction access bit fault handler.
433
/** Instruction access bit fault handler.
425
 *
434
 *
426
 * @param vector Interruption vector.
435
 * @param vector Interruption vector.
427
 * @param pstate Structure with saved interruption state.
436
 * @param pstate Structure with saved interruption state.
428
 */
437
 */
429
void instruction_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
438
void instruction_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
430
{
439
{
431
    pte_t *t;
440
    pte_t *t;
432
 
441
 
433
    t = page_mapping_find(AS, pstate->cr_ifa);
442
    t = page_mapping_find(AS, pstate->cr_ifa);
434
    ASSERT(t && t->p);
443
    ASSERT(t && t->p);
435
    if (t && t->p) {
444
    if (t && t->p) {
436
        /*
445
        /*
437
         * Update the Accessed bit in page tables and reinsert
446
         * Update the Accessed bit in page tables and reinsert
438
         * the mapping into ITC.
447
         * the mapping into ITC.
439
         */
448
         */
440
        t->a = true;
449
        t->a = true;
441
        itc_pte_copy(t);
450
        itc_pte_copy(t);
442
    }
451
    }
443
}
452
}
444
 
453
 
445
/** Data access bit fault handler.
454
/** Data access bit fault handler.
446
 *
455
 *
447
 * @param vector Interruption vector.
456
 * @param vector Interruption vector.
448
 * @param pstate Structure with saved interruption state.
457
 * @param pstate Structure with saved interruption state.
449
 */
458
 */
450
void data_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
459
void data_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
451
{
460
{
452
    pte_t *t;
461
    pte_t *t;
453
 
462
 
454
    t = page_mapping_find(AS, pstate->cr_ifa);
463
    t = page_mapping_find(AS, pstate->cr_ifa);
455
    ASSERT(t && t->p);
464
    ASSERT(t && t->p);
456
    if (t && t->p) {
465
    if (t && t->p) {
457
        /*
466
        /*
458
         * Update the Accessed bit in page tables and reinsert
467
         * Update the Accessed bit in page tables and reinsert
459
         * the mapping into DTC.
468
         * the mapping into DTC.
460
         */
469
         */
461
        t->a = true;
470
        t->a = true;
462
        dtc_pte_copy(t);
471
        dtc_pte_copy(t);
463
    }
472
    }
464
}
473
}
465
 
474
 
466
/** Page not present fault handler.
475
/** Page not present fault handler.
467
 *
476
 *
468
 * @param vector Interruption vector.
477
 * @param vector Interruption vector.
469
 * @param pstate Structure with saved interruption state.
478
 * @param pstate Structure with saved interruption state.
470
 */
479
 */
471
void page_not_present(__u64 vector, struct exception_regdump *pstate)
480
void page_not_present(__u64 vector, struct exception_regdump *pstate)
472
{
481
{
473
    region_register rr;
482
    region_register rr;
474
    __address va;
483
    __address va;
475
    pte_t *t;
484
    pte_t *t;
476
   
485
   
477
    va = pstate->cr_ifa;    /* faulting address */
486
    va = pstate->cr_ifa;    /* faulting address */
478
    t = page_mapping_find(AS, va);
487
    t = page_mapping_find(AS, va);
479
    ASSERT(t);
488
    ASSERT(t);
480
   
489
   
481
    if (t->p) {
490
    if (t->p) {
482
        /*
491
        /*
483
         * If the Present bit is set in page hash table, just copy it
492
         * If the Present bit is set in page hash table, just copy it
484
         * and update ITC/DTC.
493
         * and update ITC/DTC.
485
         */
494
         */
486
        if (t->x)
495
        if (t->x)
487
            itc_pte_copy(t);
496
            itc_pte_copy(t);
488
        else
497
        else
489
            dtc_pte_copy(t);
498
            dtc_pte_copy(t);
490
    } else {
499
    } else {
491
        if (!as_page_fault(va)) {
500
        if (!as_page_fault(va)) {
492
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
501
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
493
        }
502
        }
494
    }
503
    }
495
}
504
}
496
 
505