Subversion Repositories HelenOS-historic

Rev

Rev 919 | Rev 935 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 919 Rev 928
1
/*
1
/*
2
 * Copyright (C) 2006 Jakub Jermar
2
 * Copyright (C) 2006 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/*
29
/*
30
 * TLB management.
30
 * TLB management.
31
 */
31
 */
32
 
32
 
33
#include <mm/tlb.h>
33
#include <mm/tlb.h>
34
#include <mm/asid.h>
34
#include <mm/asid.h>
35
#include <mm/page.h>
35
#include <mm/page.h>
36
#include <mm/as.h>
36
#include <mm/as.h>
37
#include <arch/mm/tlb.h>
37
#include <arch/mm/tlb.h>
38
#include <arch/mm/page.h>
38
#include <arch/mm/page.h>
39
#include <arch/barrier.h>
39
#include <arch/barrier.h>
40
#include <arch/interrupt.h>
40
#include <arch/interrupt.h>
-
 
41
#include <arch/pal/pal.h>
-
 
42
#include <arch/asm.h>
41
#include <typedefs.h>
43
#include <typedefs.h>
42
#include <panic.h>
44
#include <panic.h>
43
#include <arch.h>
45
#include <arch.h>
44
 
46
 
45
/** Invalidate all TLB entries. */
47
/** Invalidate all TLB entries. */
46
void tlb_invalidate_all(void)
48
void tlb_invalidate_all(void)
47
{
49
{
-
 
50
        __address adr;
-
 
51
        __u32 count1,count2,stride1,stride2;
-
 
52
       
-
 
53
        int i,j;
-
 
54
       
-
 
55
        adr=PAL_PTCE_INFO_BASE();
-
 
56
        count1=PAL_PTCE_INFO_COUNT1();
-
 
57
        count2=PAL_PTCE_INFO_COUNT2();
-
 
58
        stride1=PAL_PTCE_INFO_STRIDE1();
-
 
59
        stride2=PAL_PTCE_INFO_STRIDE2();
-
 
60
       
-
 
61
        interrupts_disable();
-
 
62
 
-
 
63
        for(i=0;i<count1;i++)
-
 
64
        {
-
 
65
            for(j=0;j<count2;j++)
-
 
66
            {
-
 
67
                asm volatile
-
 
68
                (
-
 
69
                    "ptc.e %0;;"
-
 
70
                    :
-
 
71
                    :"r" (adr)
-
 
72
                );
-
 
73
                adr+=stride2;
-
 
74
            }
-
 
75
            adr+=stride1;
-
 
76
        }
-
 
77
 
-
 
78
        interrupts_enable();
-
 
79
 
-
 
80
        srlz_d();
48
    /* TODO */
81
        srlz_i();
49
}
82
}
50
 
83
 
51
/** Invalidate entries belonging to an address space.
84
/** Invalidate entries belonging to an address space.
52
 *
85
 *
53
 * @param asid Address space identifier.
86
 * @param asid Address space identifier.
54
 */
87
 */
55
void tlb_invalidate_asid(asid_t asid)
88
void tlb_invalidate_asid(asid_t asid)
56
{
89
{
57
    /* TODO */
90
    /* TODO */
58
}
91
}
59
 
92
 
60
/** Insert data into data translation cache.
93
/** Insert data into data translation cache.
61
 *
94
 *
62
 * @param va Virtual page address.
95
 * @param va Virtual page address.
63
 * @param asid Address space identifier.
96
 * @param asid Address space identifier.
64
 * @param entry The rest of TLB entry as required by TLB insertion format.
97
 * @param entry The rest of TLB entry as required by TLB insertion format.
65
 */
98
 */
66
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
99
void dtc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
67
{
100
{
68
    tc_mapping_insert(va, asid, entry, true);
101
    tc_mapping_insert(va, asid, entry, true);
69
}
102
}
70
 
103
 
71
/** Insert data into instruction translation cache.
104
/** Insert data into instruction translation cache.
72
 *
105
 *
73
 * @param va Virtual page address.
106
 * @param va Virtual page address.
74
 * @param asid Address space identifier.
107
 * @param asid Address space identifier.
75
 * @param entry The rest of TLB entry as required by TLB insertion format.
108
 * @param entry The rest of TLB entry as required by TLB insertion format.
76
 */
109
 */
77
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
110
void itc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry)
78
{
111
{
79
    tc_mapping_insert(va, asid, entry, false);
112
    tc_mapping_insert(va, asid, entry, false);
80
}
113
}
81
 
114
 
82
/** Insert data into instruction or data translation cache.
115
/** Insert data into instruction or data translation cache.
83
 *
116
 *
84
 * @param va Virtual page address.
117
 * @param va Virtual page address.
85
 * @param asid Address space identifier.
118
 * @param asid Address space identifier.
86
 * @param entry The rest of TLB entry as required by TLB insertion format.
119
 * @param entry The rest of TLB entry as required by TLB insertion format.
87
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
120
 * @param dtc If true, insert into data translation cache, use instruction translation cache otherwise.
88
 */
121
 */
89
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
122
void tc_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtc)
90
{
123
{
91
    region_register rr;
124
    region_register rr;
92
    bool restore_rr = false;
125
    bool restore_rr = false;
93
 
126
 
94
    rr.word = rr_read(VA2VRN(va));
127
    rr.word = rr_read(VA2VRN(va));
95
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
128
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
96
        /*
129
        /*
97
         * The selected region register does not contain required RID.
130
         * The selected region register does not contain required RID.
98
         * Save the old content of the register and replace the RID.
131
         * Save the old content of the register and replace the RID.
99
         */
132
         */
100
        region_register rr0;
133
        region_register rr0;
101
 
134
 
102
        rr0 = rr;
135
        rr0 = rr;
103
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
136
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
104
        rr_write(VA2VRN(va), rr0.word);
137
        rr_write(VA2VRN(va), rr0.word);
105
        srlz_d();
138
        srlz_d();
106
        srlz_i();
139
        srlz_i();
107
    }
140
    }
108
   
141
   
109
    __asm__ volatile (
142
    __asm__ volatile (
110
        "mov r8=psr;;\n"
143
        "mov r8=psr;;\n"
111
        "rsm %0;;\n"            /* PSR_IC_MASK */
144
        "rsm %0;;\n"            /* PSR_IC_MASK */
112
        "srlz.d;;\n"
145
        "srlz.d;;\n"
113
        "srlz.i;;\n"
146
        "srlz.i;;\n"
114
        "mov cr.ifa=%1\n"       /* va */
147
        "mov cr.ifa=%1\n"       /* va */
115
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
148
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
116
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
149
        "cmp.eq p6,p7 = %4,r0;;\n"  /* decide between itc and dtc */
117
        "(p6) itc.i %3;;\n"
150
        "(p6) itc.i %3;;\n"
118
        "(p7) itc.d %3;;\n"
151
        "(p7) itc.d %3;;\n"
119
        "mov psr.l=r8;;\n"
152
        "mov psr.l=r8;;\n"
120
        "srlz.d;;\n"
153
        "srlz.d;;\n"
121
        :
154
        :
122
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
155
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (dtc)
123
        : "p6", "p7", "r8"
156
        : "p6", "p7", "r8"
124
    );
157
    );
125
   
158
   
126
    if (restore_rr) {
159
    if (restore_rr) {
127
        rr_write(VA2VRN(va), rr.word);
160
        rr_write(VA2VRN(va), rr.word);
128
        srlz_d();
161
        srlz_d();
129
        srlz_i();
162
        srlz_i();
130
    }
163
    }
131
}
164
}
132
 
165
 
133
/** Insert data into instruction translation register.
166
/** Insert data into instruction translation register.
134
 *
167
 *
135
 * @param va Virtual page address.
168
 * @param va Virtual page address.
136
 * @param asid Address space identifier.
169
 * @param asid Address space identifier.
137
 * @param entry The rest of TLB entry as required by TLB insertion format.
170
 * @param entry The rest of TLB entry as required by TLB insertion format.
138
 * @param tr Translation register.
171
 * @param tr Translation register.
139
 */
172
 */
140
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
173
void itr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
141
{
174
{
142
    tr_mapping_insert(va, asid, entry, false, tr);
175
    tr_mapping_insert(va, asid, entry, false, tr);
143
}
176
}
144
 
177
 
145
/** Insert data into data translation register.
178
/** Insert data into data translation register.
146
 *
179
 *
147
 * @param va Virtual page address.
180
 * @param va Virtual page address.
148
 * @param asid Address space identifier.
181
 * @param asid Address space identifier.
149
 * @param entry The rest of TLB entry as required by TLB insertion format.
182
 * @param entry The rest of TLB entry as required by TLB insertion format.
150
 * @param tr Translation register.
183
 * @param tr Translation register.
151
 */
184
 */
152
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
185
void dtr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, index_t tr)
153
{
186
{
154
    tr_mapping_insert(va, asid, entry, true, tr);
187
    tr_mapping_insert(va, asid, entry, true, tr);
155
}
188
}
156
 
189
 
157
/** Insert data into instruction or data translation register.
190
/** Insert data into instruction or data translation register.
158
 *
191
 *
159
 * @param va Virtual page address.
192
 * @param va Virtual page address.
160
 * @param asid Address space identifier.
193
 * @param asid Address space identifier.
161
 * @param entry The rest of TLB entry as required by TLB insertion format.
194
 * @param entry The rest of TLB entry as required by TLB insertion format.
162
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
195
 * @param dtc If true, insert into data translation register, use instruction translation register otherwise.
163
 * @param tr Translation register.
196
 * @param tr Translation register.
164
 */
197
 */
165
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
198
void tr_mapping_insert(__address va, asid_t asid, tlb_entry_t entry, bool dtr, index_t tr)
166
{
199
{
167
    region_register rr;
200
    region_register rr;
168
    bool restore_rr = false;
201
    bool restore_rr = false;
169
 
202
 
170
    rr.word = rr_read(VA2VRN(va));
203
    rr.word = rr_read(VA2VRN(va));
171
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
204
    if ((restore_rr = (rr.map.rid != ASID2RID(asid, VA2VRN(va))))) {
172
        /*
205
        /*
173
         * The selected region register does not contain required RID.
206
         * The selected region register does not contain required RID.
174
         * Save the old content of the register and replace the RID.
207
         * Save the old content of the register and replace the RID.
175
         */
208
         */
176
        region_register rr0;
209
        region_register rr0;
177
 
210
 
178
        rr0 = rr;
211
        rr0 = rr;
179
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
212
        rr0.map.rid = ASID2RID(asid, VA2VRN(va));
180
        rr_write(VA2VRN(va), rr0.word);
213
        rr_write(VA2VRN(va), rr0.word);
181
        srlz_d();
214
        srlz_d();
182
        srlz_i();
215
        srlz_i();
183
    }
216
    }
184
 
217
 
185
    __asm__ volatile (
218
    __asm__ volatile (
186
        "mov r8=psr;;\n"
219
        "mov r8=psr;;\n"
187
        "rsm %0;;\n"            /* PSR_IC_MASK */
220
        "rsm %0;;\n"            /* PSR_IC_MASK */
188
        "srlz.d;;\n"
221
        "srlz.d;;\n"
189
        "srlz.i;;\n"
222
        "srlz.i;;\n"
190
        "mov cr.ifa=%1\n"           /* va */         
223
        "mov cr.ifa=%1\n"           /* va */         
191
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
224
        "mov cr.itir=%2;;\n"        /* entry.word[1] */
192
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
225
        "cmp.eq p6,p7=%5,r0;;\n"    /* decide between itr and dtr */
193
        "(p6) itr.i itr[%4]=%3;;\n"
226
        "(p6) itr.i itr[%4]=%3;;\n"
194
        "(p7) itr.d dtr[%4]=%3;;\n"
227
        "(p7) itr.d dtr[%4]=%3;;\n"
195
        "mov psr.l=r8;;\n"
228
        "mov psr.l=r8;;\n"
196
        "srlz.d;;\n"
229
        "srlz.d;;\n"
197
        :
230
        :
198
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
231
        : "i" (PSR_IC_MASK), "r" (va), "r" (entry.word[1]), "r" (entry.word[0]), "r" (tr), "r" (dtr)
199
        : "p6", "p7", "r8"
232
        : "p6", "p7", "r8"
200
    );
233
    );
201
   
234
   
202
    if (restore_rr) {
235
    if (restore_rr) {
203
        rr_write(VA2VRN(va), rr.word);
236
        rr_write(VA2VRN(va), rr.word);
204
        srlz_d();
237
        srlz_d();
205
        srlz_i();
238
        srlz_i();
206
    }
239
    }
207
}
240
}
208
 
241
 
209
/** Insert data into DTLB.
242
/** Insert data into DTLB.
210
 *
243
 *
211
 * @param va Virtual page address.
244
 * @param va Virtual page address.
212
 * @param asid Address space identifier.
245
 * @param asid Address space identifier.
213
 * @param entry The rest of TLB entry as required by TLB insertion format.
246
 * @param entry The rest of TLB entry as required by TLB insertion format.
214
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
247
 * @param dtr If true, insert into data translation register, use data translation cache otherwise.
215
 * @param tr Translation register if dtr is true, ignored otherwise.
248
 * @param tr Translation register if dtr is true, ignored otherwise.
216
 */
249
 */
217
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
250
void dtlb_kernel_mapping_insert(__address page, __address frame, bool dtr, index_t tr)
218
{
251
{
219
    tlb_entry_t entry;
252
    tlb_entry_t entry;
220
   
253
   
221
    entry.word[0] = 0;
254
    entry.word[0] = 0;
222
    entry.word[1] = 0;
255
    entry.word[1] = 0;
223
   
256
   
224
    entry.p = true;         /* present */
257
    entry.p = true;         /* present */
225
    entry.ma = MA_WRITEBACK;
258
    entry.ma = MA_WRITEBACK;
226
    entry.a = true;         /* already accessed */
259
    entry.a = true;         /* already accessed */
227
    entry.d = true;         /* already dirty */
260
    entry.d = true;         /* already dirty */
228
    entry.pl = PL_KERNEL;
261
    entry.pl = PL_KERNEL;
229
    entry.ar = AR_READ | AR_WRITE;
262
    entry.ar = AR_READ | AR_WRITE;
230
    entry.ppn = frame >> PPN_SHIFT;
263
    entry.ppn = frame >> PPN_SHIFT;
231
    entry.ps = PAGE_WIDTH;
264
    entry.ps = PAGE_WIDTH;
232
   
265
   
233
    if (dtr)
266
    if (dtr)
234
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
267
        dtr_mapping_insert(page, ASID_KERNEL, entry, tr);
235
    else
268
    else
236
        dtc_mapping_insert(page, ASID_KERNEL, entry);
269
        dtc_mapping_insert(page, ASID_KERNEL, entry);
237
}
270
}
238
 
271
 
239
/** Copy content of PTE into data translation cache.
272
/** Copy content of PTE into data translation cache.
240
 *
273
 *
241
 * @param t PTE.
274
 * @param t PTE.
242
 */
275
 */
243
void dtc_pte_copy(pte_t *t)
276
void dtc_pte_copy(pte_t *t)
244
{
277
{
245
    tlb_entry_t entry;
278
    tlb_entry_t entry;
246
 
279
 
247
    entry.word[0] = 0;
280
    entry.word[0] = 0;
248
    entry.word[1] = 0;
281
    entry.word[1] = 0;
249
   
282
   
250
    entry.p = t->p;
283
    entry.p = t->p;
251
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
284
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
252
    entry.a = t->a;
285
    entry.a = t->a;
253
    entry.d = t->d;
286
    entry.d = t->d;
254
    entry.pl = t->k ? PL_KERNEL : PL_USER;
287
    entry.pl = t->k ? PL_KERNEL : PL_USER;
255
    entry.ar = t->w ? AR_WRITE : AR_READ;
288
    entry.ar = t->w ? AR_WRITE : AR_READ;
256
    entry.ppn = t->frame >> PPN_SHIFT;
289
    entry.ppn = t->frame >> PPN_SHIFT;
257
    entry.ps = PAGE_WIDTH;
290
    entry.ps = PAGE_WIDTH;
258
   
291
   
259
    dtc_mapping_insert(t->page, t->as->asid, entry);
292
    dtc_mapping_insert(t->page, t->as->asid, entry);
260
}
293
}
261
 
294
 
262
/** Copy content of PTE into instruction translation cache.
295
/** Copy content of PTE into instruction translation cache.
263
 *
296
 *
264
 * @param t PTE.
297
 * @param t PTE.
265
 */
298
 */
266
void itc_pte_copy(pte_t *t)
299
void itc_pte_copy(pte_t *t)
267
{
300
{
268
    tlb_entry_t entry;
301
    tlb_entry_t entry;
269
 
302
 
270
    entry.word[0] = 0;
303
    entry.word[0] = 0;
271
    entry.word[1] = 0;
304
    entry.word[1] = 0;
272
   
305
   
273
    ASSERT(t->x);
306
    ASSERT(t->x);
274
   
307
   
275
    entry.p = t->p;
308
    entry.p = t->p;
276
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
309
    entry.ma = t->c ? MA_WRITEBACK : MA_UNCACHEABLE;
277
    entry.a = t->a;
310
    entry.a = t->a;
278
    entry.pl = t->k ? PL_KERNEL : PL_USER;
311
    entry.pl = t->k ? PL_KERNEL : PL_USER;
279
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
312
    entry.ar = t->x ? (AR_EXECUTE | AR_READ) : AR_READ;
280
    entry.ppn = t->frame >> PPN_SHIFT;
313
    entry.ppn = t->frame >> PPN_SHIFT;
281
    entry.ps = PAGE_WIDTH;
314
    entry.ps = PAGE_WIDTH;
282
   
315
   
283
    itc_mapping_insert(t->page, t->as->asid, entry);
316
    itc_mapping_insert(t->page, t->as->asid, entry);
284
}
317
}
285
 
318
 
286
/** Instruction TLB fault handler for faults with VHPT turned off.
319
/** Instruction TLB fault handler for faults with VHPT turned off.
287
 *
320
 *
288
 * @param vector Interruption vector.
321
 * @param vector Interruption vector.
289
 * @param pstate Structure with saved interruption state.
322
 * @param pstate Structure with saved interruption state.
290
 */
323
 */
291
void alternate_instruction_tlb_fault(__u64 vector, struct exception_regdump *pstate)
324
void alternate_instruction_tlb_fault(__u64 vector, struct exception_regdump *pstate)
292
{
325
{
293
    region_register rr;
326
    region_register rr;
294
    __address va;
327
    __address va;
295
    pte_t *t;
328
    pte_t *t;
296
   
329
   
297
    va = pstate->cr_ifa;    /* faulting address */
330
    va = pstate->cr_ifa;    /* faulting address */
298
    t = page_mapping_find(AS, va);
331
    t = page_mapping_find(AS, va);
299
    if (t) {
332
    if (t) {
300
        /*
333
        /*
301
         * The mapping was found in software page hash table.
334
         * The mapping was found in software page hash table.
302
         * Insert it into data translation cache.
335
         * Insert it into data translation cache.
303
         */
336
         */
304
        itc_pte_copy(t);
337
        itc_pte_copy(t);
305
    } else {
338
    } else {
306
        /*
339
        /*
307
         * Forward the page fault to address space page fault handler.
340
         * Forward the page fault to address space page fault handler.
308
         */
341
         */
309
        if (!as_page_fault(va)) {
342
        if (!as_page_fault(va)) {
310
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
343
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
311
        }
344
        }
312
    }
345
    }
313
}
346
}
314
 
347
 
315
/** Data TLB fault handler for faults with VHPT turned off.
348
/** Data TLB fault handler for faults with VHPT turned off.
316
 *
349
 *
317
 * @param vector Interruption vector.
350
 * @param vector Interruption vector.
318
 * @param pstate Structure with saved interruption state.
351
 * @param pstate Structure with saved interruption state.
319
 */
352
 */
320
void alternate_data_tlb_fault(__u64 vector, struct exception_regdump *pstate)
353
void alternate_data_tlb_fault(__u64 vector, struct exception_regdump *pstate)
321
{
354
{
322
    region_register rr;
355
    region_register rr;
323
    rid_t rid;
356
    rid_t rid;
324
    __address va;
357
    __address va;
325
    pte_t *t;
358
    pte_t *t;
326
   
359
   
327
    va = pstate->cr_ifa;    /* faulting address */
360
    va = pstate->cr_ifa;    /* faulting address */
328
    rr.word = rr_read(VA2VRN(va));
361
    rr.word = rr_read(VA2VRN(va));
329
    rid = rr.map.rid;
362
    rid = rr.map.rid;
330
    if (RID2ASID(rid) == ASID_KERNEL) {
363
    if (RID2ASID(rid) == ASID_KERNEL) {
331
        if (VA2VRN(va) == VRN_KERNEL) {
364
        if (VA2VRN(va) == VRN_KERNEL) {
332
            /*
365
            /*
333
             * Provide KA2PA(identity) mapping for faulting piece of
366
             * Provide KA2PA(identity) mapping for faulting piece of
334
             * kernel address space.
367
             * kernel address space.
335
             */
368
             */
336
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
369
            dtlb_kernel_mapping_insert(va, KA2PA(va), false, 0);
337
            return;
370
            return;
338
        }
371
        }
339
    }
372
    }
340
 
373
 
341
    t = page_mapping_find(AS, va);
374
    t = page_mapping_find(AS, va);
342
    if (t) {
375
    if (t) {
343
        /*
376
        /*
344
         * The mapping was found in software page hash table.
377
         * The mapping was found in software page hash table.
345
         * Insert it into data translation cache.
378
         * Insert it into data translation cache.
346
         */
379
         */
347
        dtc_pte_copy(t);
380
        dtc_pte_copy(t);
348
    } else {
381
    } else {
349
        /*
382
        /*
350
         * Forward the page fault to address space page fault handler.
383
         * Forward the page fault to address space page fault handler.
351
         */
384
         */
352
        if (!as_page_fault(va)) {
385
        if (!as_page_fault(va)) {
353
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
386
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
354
        }
387
        }
355
    }
388
    }
356
}
389
}
357
 
390
 
358
/** Data nested TLB fault handler.
391
/** Data nested TLB fault handler.
359
 *
392
 *
360
 * This fault should not occur.
393
 * This fault should not occur.
361
 *
394
 *
362
 * @param vector Interruption vector.
395
 * @param vector Interruption vector.
363
 * @param pstate Structure with saved interruption state.
396
 * @param pstate Structure with saved interruption state.
364
 */
397
 */
365
void data_nested_tlb_fault(__u64 vector, struct exception_regdump *pstate)
398
void data_nested_tlb_fault(__u64 vector, struct exception_regdump *pstate)
366
{
399
{
367
    panic("%s\n", __FUNCTION__);
400
    panic("%s\n", __FUNCTION__);
368
}
401
}
369
 
402
 
370
/** Data Dirty bit fault handler.
403
/** Data Dirty bit fault handler.
371
 *
404
 *
372
 * @param vector Interruption vector.
405
 * @param vector Interruption vector.
373
 * @param pstate Structure with saved interruption state.
406
 * @param pstate Structure with saved interruption state.
374
 */
407
 */
375
void data_dirty_bit_fault(__u64 vector, struct exception_regdump *pstate)
408
void data_dirty_bit_fault(__u64 vector, struct exception_regdump *pstate)
376
{
409
{
377
    pte_t *t;
410
    pte_t *t;
378
 
411
 
379
    t = page_mapping_find(AS, pstate->cr_ifa);
412
    t = page_mapping_find(AS, pstate->cr_ifa);
380
    ASSERT(t && t->p);
413
    ASSERT(t && t->p);
381
    if (t && t->p) {
414
    if (t && t->p) {
382
        /*
415
        /*
383
         * Update the Dirty bit in page tables and reinsert
416
         * Update the Dirty bit in page tables and reinsert
384
         * the mapping into DTC.
417
         * the mapping into DTC.
385
         */
418
         */
386
        t->d = true;
419
        t->d = true;
387
        dtc_pte_copy(t);
420
        dtc_pte_copy(t);
388
    }
421
    }
389
}
422
}
390
 
423
 
391
/** Instruction access bit fault handler.
424
/** Instruction access bit fault handler.
392
 *
425
 *
393
 * @param vector Interruption vector.
426
 * @param vector Interruption vector.
394
 * @param pstate Structure with saved interruption state.
427
 * @param pstate Structure with saved interruption state.
395
 */
428
 */
396
void instruction_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
429
void instruction_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
397
{
430
{
398
    pte_t *t;
431
    pte_t *t;
399
 
432
 
400
    t = page_mapping_find(AS, pstate->cr_ifa);
433
    t = page_mapping_find(AS, pstate->cr_ifa);
401
    ASSERT(t && t->p);
434
    ASSERT(t && t->p);
402
    if (t && t->p) {
435
    if (t && t->p) {
403
        /*
436
        /*
404
         * Update the Accessed bit in page tables and reinsert
437
         * Update the Accessed bit in page tables and reinsert
405
         * the mapping into ITC.
438
         * the mapping into ITC.
406
         */
439
         */
407
        t->a = true;
440
        t->a = true;
408
        itc_pte_copy(t);
441
        itc_pte_copy(t);
409
    }
442
    }
410
}
443
}
411
 
444
 
412
/** Data access bit fault handler.
445
/** Data access bit fault handler.
413
 *
446
 *
414
 * @param vector Interruption vector.
447
 * @param vector Interruption vector.
415
 * @param pstate Structure with saved interruption state.
448
 * @param pstate Structure with saved interruption state.
416
 */
449
 */
417
void data_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
450
void data_access_bit_fault(__u64 vector, struct exception_regdump *pstate)
418
{
451
{
419
    pte_t *t;
452
    pte_t *t;
420
 
453
 
421
    t = page_mapping_find(AS, pstate->cr_ifa);
454
    t = page_mapping_find(AS, pstate->cr_ifa);
422
    ASSERT(t && t->p);
455
    ASSERT(t && t->p);
423
    if (t && t->p) {
456
    if (t && t->p) {
424
        /*
457
        /*
425
         * Update the Accessed bit in page tables and reinsert
458
         * Update the Accessed bit in page tables and reinsert
426
         * the mapping into DTC.
459
         * the mapping into DTC.
427
         */
460
         */
428
        t->a = true;
461
        t->a = true;
429
        dtc_pte_copy(t);
462
        dtc_pte_copy(t);
430
    }
463
    }
431
}
464
}
432
 
465
 
433
/** Page not present fault handler.
466
/** Page not present fault handler.
434
 *
467
 *
435
 * @param vector Interruption vector.
468
 * @param vector Interruption vector.
436
 * @param pstate Structure with saved interruption state.
469
 * @param pstate Structure with saved interruption state.
437
 */
470
 */
438
void page_not_present(__u64 vector, struct exception_regdump *pstate)
471
void page_not_present(__u64 vector, struct exception_regdump *pstate)
439
{
472
{
440
    region_register rr;
473
    region_register rr;
441
    __address va;
474
    __address va;
442
    pte_t *t;
475
    pte_t *t;
443
   
476
   
444
    va = pstate->cr_ifa;    /* faulting address */
477
    va = pstate->cr_ifa;    /* faulting address */
445
    t = page_mapping_find(AS, va);
478
    t = page_mapping_find(AS, va);
446
    ASSERT(t);
479
    ASSERT(t);
447
   
480
   
448
    if (t->p) {
481
    if (t->p) {
449
        /*
482
        /*
450
         * If the Present bit is set in page hash table, just copy it
483
         * If the Present bit is set in page hash table, just copy it
451
         * and update ITC/DTC.
484
         * and update ITC/DTC.
452
         */
485
         */
453
        if (t->x)
486
        if (t->x)
454
            itc_pte_copy(t);
487
            itc_pte_copy(t);
455
        else
488
        else
456
            dtc_pte_copy(t);
489
            dtc_pte_copy(t);
457
    } else {
490
    } else {
458
        if (!as_page_fault(va)) {
491
        if (!as_page_fault(va)) {
459
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
492
            panic("%s: va=%P, rid=%d\n", __FUNCTION__, pstate->cr_ifa, rr.map.rid);
460
        }
493
        }
461
    }
494
    }
462
}
495
}
463
 
496