Subversion Repositories HelenOS-historic

Rev

Rev 902 | Rev 928 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 902 Rev 919
1
/*
1
/*
2
 * Copyright (C) 2006 Jakub Jermar
2
 * Copyright (C) 2006 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/*
29
/*
30
 * TLB management.
30
 * TLB management.
31
 */
31
 */
32
 
32
 
33
#include <mm/tlb.h>
33
#include <mm/tlb.h>
34
#include <mm/asid.h>
34
#include <mm/asid.h>
35
#include <mm/page.h>
35
#include <mm/page.h>
36
#include <mm/as.h>
36
#include <mm/as.h>
37
#include <arch/mm/tlb.h>
37
#include <arch/mm/tlb.h>
38
#include <arch/mm/page.h>
38
#include <arch/mm/page.h>
39
#include <arch/barrier.h>
39
#include <arch/barrier.h>
40
#include <arch/interrupt.h>
40
#include <arch/interrupt.h>
41
#include <typedefs.h>
41
#include <typedefs.h>
42
#include <panic.h>
42
#include <panic.h>
43
#include <arch.h>
43
#include <arch.h>
44
 
44
 
45
/** Invalidate all TLB entries. */
45
/** Invalidate all TLB entries. */
46
void tlb_invalidate_all(void)
46
void tlb_invalidate_all(void)
47
{
47
{
48
    /* TODO */
48
    /* TODO */
49
}
49
}
50
 
50
 
51
/** Invalidate entries belonging to an address space.
51
/** Invalidate entries belonging to an address space.
52
 *
52
 *
53
 * @param asid Address space identifier.
53
 * @param asid Address space identifier.
54
 */
54
 */
55
void tlb_invalidate_asid(asid_t asid)
55
void tlb_invalidate_asid(asid_t asid)
56
{
56
{
57
    /* TODO */
57
    /* TODO */
58
}
58
}
59
 
59
 
60
/** Insert data into data translation cache.
60
/** Insert data into data translation cache.
61
 *
61
 *
62
 * @param va Virtual page address.
62
 * @param va Virtual page address.
63
 * @param asid Address space identifier.
63
 * @param asid Address space identifier.
64
 * @param entry The rest of TLB entry as required by TLB insertion format.
64
 * @param entry The rest of TLB entry as required by TLB insertion format.
65
 */
65
 */
66
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry) {
66
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
-
 
67
{
67
    tc_mapping_insert(va, asid, entry, true);
68
    tc_mapping_insert(va, asid, entry, true);
68
}
69
}
69
 
70
 
70
/** Insert data into instruction translation cache.
71
/** Insert data into instruction translation cache.
71
 *
72
 *
72
 * @param va Virtual page address.
73
 * @param va Virtual page address.
73
 * @param asid Address space identifier.
74
 * @param asid Address space identifier.
74
 * @param entry The rest of TLB entry as required by TLB insertion format.
75
 * @param entry The rest of TLB entry as required by TLB insertion format.
75
 */
76
 */
76
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry) {
77
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
-
 
78
{
77
    tc_mapping_insert(va, asid, entry, false);
79
    tc_mapping_insert(va, asid, entry, false);
78
}
80
}
79
 
81
 
80
/** Insert data into instruction or data translation cache.
82
/** Insert data into instruction or data translation cache.
81
 *
83
 *
82
 * @param va Virtual page address.
84
 * @param va Virtual page address.
83
 * @param asid Address space identifier.
85
 * @param asid Address space identifier.
84
 * @param entry The rest of TLB entry as required by TLB insertion format.
86
 * @param entry The rest of TLB entry as required by TLB insertion format.
85
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
87
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
86
 */
88
 */
87
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
89
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
88
{
90
{
89
    region_register rr;
91
    region_register rr;
90
    bool restore_rr = false;
92
    bool restore_rr = false;
91
 
93
 
92
    rr.word = rr_read(VA2VRN(va));
94
    rr.word = rr_read(VA2VRN(va));
93
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
95
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
94
        /*
96
        /*
95
         * The selected region register does not contain required RID.
97
         * The selected region register does not contain required RID.
96
         * Save the old content of the register and replace the RID.
98
         * Save the old content of the register and replace the RID.
97
         */
99
         */
98
        region_register rr0;
100
        region_register rr0;
99
 
101
 
100
        rr0 = rr;
102
        rr0 = rr;
101
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
103
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
102
        rr_write(VA2VRN(va), rr0.word);
104
        rr_write(VA2VRN(va), rr0.word);
103
        srlz_d();
105
        srlz_d();
104
        srlz_i();
106
        srlz_i();
105
    }
107
    }
106
   
108
   
107
    __asm__ volatile (
109
    __asm__ volatile (
108
        "mov r8=psr;;\n"
110
        "mov r8=psr;;\n"
109
        "rsm %0;;\n"            /* PSR_IC_MASK */
111
        "rsm %0;;\n"            /* PSR_IC_MASK */
110
        "srlz.d;;\n"
112
        "srlz.d;;\n"
111
        "srlz.i;;\n"
113
        "srlz.i;;\n"
112
        "mov cr.ifa=%1\n"       /* va */
114
        "mov cr.ifa=%1\n"       /* va */
113
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
115
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
114
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
116
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
115
        "(p6) itc.i %3;;\n"
117
        "(p6) itc.i %3;;\n"
116
        "(p7) itc.d %3;;\n"
118
        "(p7) itc.d %3;;\n"
117
        "mov psr.l=r8;;\n"
119
        "mov psr.l=r8;;\n"
118
        "srlz.d;;\n"
120
        "srlz.d;;\n"
119
        :
121
        :
120
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
122
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
121
        : "p6", "p7", "r8"
123
        : "p6", "p7", "r8"
122
    );
124
    );
123
   
125
   
124
    if (restore_rr) {
126
    if (restore_rr) {
125
        rr_write(VA2VRN(va), rr.word);
127
        rr_write(VA2VRN(va), rr.word);
126
        srlz_d();
128
        srlz_d();
127
        srlz_i();
129
        srlz_i();
128
    }
130
    }
129
}
131
}
130
 
132
 
131
/** Insert data into instruction translation register.
133
/** Insert data into instruction translation register.
132
 *
134
 *
133
 * @param va Virtual page address.
135
 * @param va Virtual page address.
134
 * @param asid Address space identifier.
136
 * @param asid Address space identifier.
135
 * @param entry The rest of TLB entry as required by TLB insertion format.
137
 * @param entry The rest of TLB entry as required by TLB insertion format.
136
 * @param tr Translation register.
138
 * @param tr Translation register.
137
 */
139
 */
138
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
140
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
139
{
141
{
140
    tr_mapping_insert(va, asid, entry, false, tr);
142
    tr_mapping_insert(va, asid, entry, false, tr);
141
}
143
}
142
 
144
 
143
/** Insert data into data translation register.
145
/** Insert data into data translation register.
144
 *
146
 *
145
 * @param va Virtual page address.
147
 * @param va Virtual page address.
146
 * @param asid Address space identifier.
148
 * @param asid Address space identifier.
147
 * @param entry The rest of TLB entry as required by TLB insertion format.
149
 * @param entry The rest of TLB entry as required by TLB insertion format.
148
 * @param tr Translation register.
150
 * @param tr Translation register.
149
 */
151
 */
150
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
152
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
151
{
153
{
152
    tr_mapping_insert(va, asid, entry, true, tr);
154
    tr_mapping_insert(va, asid, entry, true, tr);
153
}
155
}
154
 
156
 
155
/** Insert data into instruction or data translation register.
157
/** Insert data into instruction or data translation register.
156
 *
158
 *
157
 * @param va Virtual page address.
159
 * @param va Virtual page address.
158
 * @param asid Address space identifier.
160
 * @param asid Address space identifier.
159
 * @param entry The rest of TLB entry as required by TLB insertion format.
161
 * @param entry The rest of TLB entry as required by TLB insertion format.
160
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
162
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
161
 * @param tr Translation register.
163
 * @param tr Translation register.
162
 */
164
 */
163
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
165
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
164
{
166
{
165
    region_register rr;
167
    region_register rr;
166
    bool restore_rr = false;
168
    bool restore_rr = false;
167
 
169
 
168
    rr.word = rr_read(VA2VRN(va));
170
    rr.word = rr_read(VA2VRN(va));
169
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
171
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
170
        /*
172
        /*
171
         * The selected region register does not contain required RID.
173
         * The selected region register does not contain required RID.
172
         * Save the old content of the register and replace the RID.
174
         * Save the old content of the register and replace the RID.
173
         */
175
         */
174
        region_register rr0;
176
        region_register rr0;
175
 
177
 
176
        rr0 = rr;
178
        rr0 = rr;
177
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
179
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
178
        rr_write(VA2VRN(va), rr0.word);
180
        rr_write(VA2VRN(va), rr0.word);
179
        srlz_d();
181
        srlz_d();
180
        srlz_i();
182
        srlz_i();
181
    }
183
    }
182
 
184
 
183
    __asm__ volatile (
185
    __asm__ volatile (
184
        "mov r8=psr;;\n"
186
        "mov r8=psr;;\n"
185
        "rsm %0;;\n"            /* PSR_IC_MASK */
187
        "rsm %0;;\n"            /* PSR_IC_MASK */
186
        "srlz.d;;\n"
188
        "srlz.d;;\n"
187
        "srlz.i;;\n"
189
        "srlz.i;;\n"
188
        "mov cr.ifa=%1\n"           /* va */         
190
        "mov cr.ifa=%1\n"           /* va */         
189
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
191
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
190
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
192
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
191
        "(p6) itr.i itr[%4]=%3;;\n"
193
        "(p6) itr.i itr[%4]=%3;;\n"
192
        "(p7) itr.d dtr[%4]=%3;;\n"
194
        "(p7) itr.d dtr[%4]=%3;;\n"
193
        "mov psr.l=r8;;\n"
195
        "mov psr.l=r8;;\n"
194
        "srlz.d;;\n"
196
        "srlz.d;;\n"
195
        :
197
        :
196
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
198
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
197
        : "p6", "p7", "r8"
199
        : "p6", "p7", "r8"
198
    );
200
    );
199
   
201
   
200
    if (restore_rr) {
202
    if (restore_rr) {
201
        rr_write(VA2VRN(va), rr.word);
203
        rr_write(VA2VRN(va), rr.word);
202
        srlz_d();
204
        srlz_d();
203
        srlz_i();
205
        srlz_i();
204
    }
206
    }
205
}
207
}
206
 
208
 
207
/** Insert data into DTLB.
209
/** Insert data into DTLB.
208
 *
210
 *
209
 * @param va Virtual page address.
211
 * @param va Virtual page address.
210
 * @param asid Address space identifier.
212
 * @param asid Address space identifier.
211
 * @param entry The rest of TLB entry as required by TLB insertion format.
213
 * @param entry The rest of TLB entry as required by TLB insertion format.
212
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
214
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
213
 * @param tr Translation register if dtr is true, ignored otherwise.
215
 * @param tr Translation register if dtr is true, ignored otherwise.
214
 */
216
 */
215
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
217
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
216
{
218
{
217
    tlb_entry_t entry;
219
    tlb_entry_t entry;
218
   
220
   
219
    entry.word[0] = 0;
221
    entry.word[0] = 0;
220
    entry.word[1] = 0;
222
    entry.word[1] = 0;
221
   
223
   
222
    entry.p = true;         /* present */
224
    entry.p = true;         /* present */
223
    entry.ma = MA_WRITEBACK;
225
    entry.ma = MA_WRITEBACK;
224
    entry.a = true;         /* already accessed */
226
    entry.a = true;         /* already accessed */
225
    entry.d = true;         /* already dirty */
227
    entry.d = true;         /* already dirty */
226
    entry.pl = PL_KERNEL;
228
    entry.pl = PL_KERNEL;
227
    entry.ar = AR_READ | AR_WRITE;
229
    entry.ar = AR_READ | AR_WRITE;
228
    entry.ppn = frame >> PPN_SHIFT;
230
    entry.ppn = frame >> PPN_SHIFT;
229
    entry.ps = PAGE_WIDTH;
231
    entry.ps = PAGE_WIDTH;
230
   
232
   
231
    if (dtr)
233
    if (dtr)
232
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
234
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
233
    else
235
    else
234
        dtc_mapping_insert(page, ASID_KERNEL, entry);
236
        dtc_mapping_insert(page, ASID_KERNEL, entry);
235
}
237
}
236
 
238
 
237
/** Copy content of PTE into data translation cache.
239
/** Copy content of PTE into data translation cache.
238
 *
240
 *
239
 * @param t PTE.
241
 * @param t PTE.
240
 */
242
 */
241
void dtc_pte_copy(pte_t *t)
243
void dtc_pte_copy(pte_t *t)
242
{
244
{
243
    tlb_entry_t entry;
245
    tlb_entry_t entry;
244
 
246
 
245
    entry.word[0] = 0;
247
    entry.word[0] = 0;
246
    entry.word[1] = 0;
248
    entry.word[1] = 0;
247
   
249
   
248
    entry.p = t->p;
250
    entry.p = t->p;
249
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
251
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
250
    entry.a = t->a;
252
    entry.a = t->a;
251
    entry.d = t->d;
253
    entry.d = t->d;
252
    entry.pl = t->k ? PL_KERNEL : PL_USER;
254
    entry.pl = t->k ? PL_KERNEL : PL_USER;
253
    entry.ar = t->w ? AR_WRITE : AR_READ;
255
    entry.ar = t->w ? AR_WRITE : AR_READ;
254
    entry.ppn = t->frame >> PPN_SHIFT;
256
    entry.ppn = t->frame >> PPN_SHIFT;
255
    entry.ps = PAGE_WIDTH;
257
    entry.ps = PAGE_WIDTH;
256
   
258
   
257
    dtc_mapping_insert(t->page, t->as->asid, entry);
259
    dtc_mapping_insert(t->page, t->as->asid, entry);
258
}
260
}
259
 
261
 
260
/** Copy content of PTE into instruction translation cache.
262
/** Copy content of PTE into instruction translation cache.
261
 *
263
 *
262
 * @param t PTE.
264
 * @param t PTE.
263
 */
265
 */
264
void itc_pte_copy(pte_t *t)
266
void itc_pte_copy(pte_t *t)
265
{
267
{
266
    tlb_entry_t entry;
268
    tlb_entry_t entry;
267
 
269
 
268
    entry.word[0] = 0;
270
    entry.word[0] = 0;
269
    entry.word[1] = 0;
271
    entry.word[1] = 0;
270
   
272
   
271
    ASSERT(t->x);
273
    ASSERT(t->x);
272
   
274
   
273
    entry.p = t->p;
275
    entry.p = t->p;
274
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
276
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
275
    entry.a = t->a;
277
    entry.a = t->a;
276
    entry.pl = t->k ? PL_KERNEL : PL_USER;
278
    entry.pl = t->k ? PL_KERNEL : PL_USER;
277
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
279
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
278
    entry.ppn = t->frame >> PPN_SHIFT;
280
    entry.ppn = t->frame >> PPN_SHIFT;
279
    entry.ps = PAGE_WIDTH;
281
    entry.ps = PAGE_WIDTH;
280
   
282
   
281
    itc_mapping_insert(t->page, t->as->asid, entry);
283
    itc_mapping_insert(t->page, t->as->asid, entry);
282
}
284
}
283
 
285
 
284
/** Instruction TLB fault handler for faults with VHPT turned off.
286
/** Instruction TLB fault handler for faults with VHPT turned off.
285
 *
287
 *
286
 * @param vector Interruption vector.
288
 * @param vector Interruption vector.
287
 * @param pstate Structure with saved interruption state.
289
 * @param pstate Structure with saved interruption state.
288
 */
290
 */
289
void alternate_instruction_tlb_fault(__u64 vector, struct exception_regdump *pstate)
291
void alternate_instruction_tlb_fault(__u64 vector, struct exception_regdump *pstate)
290
{
292
{
291
    region_register rr;
293
    region_register rr;
292
    __address va;
294
    __address va;
293
    pte_t *t;
295
    pte_t *t;
294
   
296
   
295
    va = pstate->cr_ifa;    /* faulting address */
297
    va = pstate->cr_ifa;    /* faulting address */
296
    t = page_mapping_find(AS, va);
298
    t = page_mapping_find(AS, va);
297
    if (t) {
299
    if (t) {
298
        /*
300
        /*
299
         * The mapping was found in software page hash table.
301
         * The mapping was found in software page hash table.
300
         * Insert it into data translation cache.
302
         * Insert it into data translation cache.
301
         */
303
         */
302
        itc_pte_copy(t);
304
        itc_pte_copy(t);
303
    } else {
305
    } else {
304
        /*
306
        /*
305
         * Forward the page fault to address space page fault handler.
307
         * Forward the page fault to address space page fault handler.
306
         */
308
         */
307
        if (!as_page_fault(va)) {
309
        if (!as_page_fault(va)) {
308
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
310
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
309
        }
311
        }
310
    }
312
    }
311
}
313
}
312
 
314
 
313
/** Data TLB fault handler for faults with VHPT turned off.
315
/** Data TLB fault handler for faults with VHPT turned off.
314
 *
316
 *
315
 * @param vector Interruption vector.
317
 * @param vector Interruption vector.
316
 * @param pstate Structure with saved interruption state.
318
 * @param pstate Structure with saved interruption state.
317
 */
319
 */
318
void alternate_data_tlb_fault(__u64 vector, struct exception_regdump *pstate)
320
void alternate_data_tlb_fault(__u64 vector, struct exception_regdump *pstate)
319
{
321
{
320
    region_register rr;
322
    region_register rr;
321
    rid_t rid;
323
    rid_t rid;
322
    __address va;
324
    __address va;
323
    pte_t *t;
325
    pte_t *t;
324
   
326
   
325
    va = pstate->cr_ifa;    /* faulting address */
327
    va = pstate->cr_ifa;    /* faulting address */
326
    rr.word = rr_read(VA2VRN(va));
328
    rr.word = rr_read(VA2VRN(va));
327
    rid = rr.map.rid;
329
    rid = rr.map.rid;
328
    if (RID2ASID(rid) == ASID_KERNEL) {
330
    if (RID2ASID(rid) == ASID_KERNEL) {
329
        if (VA2VRN(va) == VRN_KERNEL) {
331
        if (VA2VRN(va) == VRN_KERNEL) {
330
            /*
332
            /*
331
             * Provide KA2PA(identity) mapping for faulting piece of
333
             * Provide KA2PA(identity) mapping for faulting piece of
332
             * kernel address space.
334
             * kernel address space.
333
             */
335
             */
334
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
336
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
335
            return;
337
            return;
336
        }
338
        }
337
    }
339
    }
338
   
340
 
339
    t = page_mapping_find(AS, va);
341
    t = page_mapping_find(AS, va);
340
    if (t) {
342
    if (t) {
341
        /*
343
        /*
342
         * The mapping was found in software page hash table.
344
         * The mapping was found in software page hash table.
343
         * Insert it into data translation cache.
345
         * Insert it into data translation cache.
344
         */
346
         */
345
        dtc_pte_copy(t);
347
        dtc_pte_copy(t);
346
    } else {
348
    } else {
347
        /*
349
        /*
348
         * Forward the page fault to address space page fault handler.
350
         * Forward the page fault to address space page fault handler.
349
         */
351
         */
350
        if (!as_page_fault(va)) {
352
        if (!as_page_fault(va)) {
351
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
353
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
352
        }
354
        }
353
    }
355
    }
354
}
356
}
355
 
357
 
356
/** Data nested TLB fault handler.
358
/** Data nested TLB fault handler.
357
 *
359
 *
358
 * This fault should not occur.
360
 * This fault should not occur.
359
 *
361
 *
360
 * @param vector Interruption vector.
362
 * @param vector Interruption vector.
361
 * @param pstate Structure with saved interruption state.
363
 * @param pstate Structure with saved interruption state.
362
 */
364
 */
363
void data_nested_tlb_fault(__u64 vector, struct exception_regdump *pstate)
365
void data_nested_tlb_fault(__u64 vector, struct exception_regdump *pstate)
364
{
366
{
365
    panic("%s\n", __FUNCTION__);
367
    panic("%s\n", __FUNCTION__);
366
}
368
}
367
 
369
 
368
/** Data Dirty bit fault handler.
370
/** Data Dirty bit fault handler.
369
 *
371
 *
370
 * @param vector Interruption vector.
372
 * @param vector Interruption vector.
371
 * @param pstate Structure with saved interruption state.
373
 * @param pstate Structure with saved interruption state.
372
 */
374
 */
373
void data_dirty_bit_fault(__u64 vector, struct exception_regdump *pstate)
375
void data_dirty_bit_fault(__u64 vector, struct exception_regdump *pstate)
374
{
376
{
375
    pte_t *t;
377
    pte_t *t;
376
 
378
 
377
    t = page_mapping_find(AS, pstate->cr_ifa);
379
    t = page_mapping_find(AS, pstate->cr_ifa);
378
    ASSERT(t && t->p);
380
    ASSERT(t && t->p);
379
    if (t && t->p) {
381
    if (t && t->p) {
380
        /*
382
        /*
381
         * Update the Dirty bit in page tables and reinsert
383
         * Update the Dirty bit in page tables and reinsert
382
         * the mapping into DTC.
384
         * the mapping into DTC.
383
         */
385
         */
384
        t->d = true;
386
        t->d = true;
385
        dtc_pte_copy(t);
387
        dtc_pte_copy(t);
386
    }
388
    }
387
}
389
}
388
 
390
 
389
/** Instruction access bit fault handler.
391
/** Instruction access bit fault handler.
390
 *
392
 *
391
 * @param vector Interruption vector.
393
 * @param vector Interruption vector.
392
 * @param pstate Structure with saved interruption state.
394
 * @param pstate Structure with saved interruption state.
393
 */
395
 */
394
void instruction_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
396
void instruction_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
395
{
397
{
396
    pte_t *t;
398
    pte_t *t;
397
 
399
 
398
    t = page_mapping_find(AS, pstate->cr_ifa);
400
    t = page_mapping_find(AS, pstate->cr_ifa);
399
    ASSERT(t && t->p);
401
    ASSERT(t && t->p);
400
    if (t && t->p) {
402
    if (t && t->p) {
401
        /*
403
        /*
402
         * Update the Accessed bit in page tables and reinsert
404
         * Update the Accessed bit in page tables and reinsert
403
         * the mapping into ITC.
405
         * the mapping into ITC.
404
         */
406
         */
405
        t->a = true;
407
        t->a = true;
406
        itc_pte_copy(t);
408
        itc_pte_copy(t);
407
    }
409
    }
408
}
410
}
409
 
411
 
410
/** Data access bit fault handler.
412
/** Data access bit fault handler.
411
 *
413
 *
412
 * @param vector Interruption vector.
414
 * @param vector Interruption vector.
413
 * @param pstate Structure with saved interruption state.
415
 * @param pstate Structure with saved interruption state.
414
 */
416
 */
415
void data_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
417
void data_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
416
{
418
{
417
    pte_t *t;
419
    pte_t *t;
418
 
420
 
419
    t = page_mapping_find(AS, pstate->cr_ifa);
421
    t = page_mapping_find(AS, pstate->cr_ifa);
420
    ASSERT(t && t->p);
422
    ASSERT(t && t->p);
421
    if (t && t->p) {
423
    if (t && t->p) {
422
        /*
424
        /*
423
         * Update the Accessed bit in page tables and reinsert
425
         * Update the Accessed bit in page tables and reinsert
424
         * the mapping into DTC.
426
         * the mapping into DTC.
425
         */
427
         */
426
        t->a = true;
428
        t->a = true;
427
        dtc_pte_copy(t);
429
        dtc_pte_copy(t);
428
    }
430
    }
429
}
431
}
430
 
432
 
431
/** Page not present fault handler.
433
/** Page not present fault handler.
432
 *
434
 *
433
 * @param vector Interruption vector.
435
 * @param vector Interruption vector.
434
 * @param pstate Structure with saved interruption state.
436
 * @param pstate Structure with saved interruption state.
435
 */
437
 */
436
void page_not_present(__u64 vector, struct exception_regdump *pstate)
438
void page_not_present(__u64 vector, struct exception_regdump *pstate)
437
{
439
{
438
    region_register rr;
440
    region_register rr;
439
    __address va;
441
    __address va;
440
    pte_t *t;
442
    pte_t *t;
441
   
443
   
442
    va = pstate->cr_ifa;    /* faulting address */
444
    va = pstate->cr_ifa;    /* faulting address */
443
    t = page_mapping_find(AS, va);
445
    t = page_mapping_find(AS, va);
444
    ASSERT(t);
446
    ASSERT(t);
445
   
447
   
446
    if (t->p) {
448
    if (t->p) {
447
        /*
449
        /*
448
         * If the Present bit is set in page hash table, just copy it
450
         * If the Present bit is set in page hash table, just copy it
449
         * and update ITC/DTC.
451
         * and update ITC/DTC.
450
         */
452
         */
451
        if (t->x)
453
        if (t->x)
452
            itc_pte_copy(t);
454
            itc_pte_copy(t);
453
        else
455
        else
454
            dtc_pte_copy(t);
456
            dtc_pte_copy(t);
455
    } else {
457
    } else {
456
        if (!as_page_fault(va)) {
458
        if (!as_page_fault(va)) {
457
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
459
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
458
        }
460
        }
459
    }
461
    }
460
}
462
}
461
 
463