Subversion Repositories HelenOS

Rev

Rev 3770 | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 3770 Rev 3862
1
/*
1
/*
2
 * Copyright (c) 2006 Jakub Jermar
2
 * Copyright (c) 2006 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup sparc64interrupt
29
/** @addtogroup sparc64interrupt
30
 * @{
30
 * @{
31
 */
31
 */
32
/**
32
/**
33
 * @file
33
 * @file
34
 * @brief This file contains fast MMU trap handlers.
34
 * @brief This file contains fast MMU trap handlers.
35
 */
35
 */
36
 
36
 
37
#ifndef KERN_sparc64_sun4u_MMU_TRAP_H_
37
#ifndef KERN_sparc64_sun4u_MMU_TRAP_H_
38
#define KERN_sparc64_sun4u_MMU_TRAP_H_
38
#define KERN_sparc64_sun4u_MMU_TRAP_H_
39
 
39
 
40
#include <arch/stack.h>
40
#include <arch/stack.h>
41
#include <arch/sun4u/regdef.h>
41
#include <arch/regdef.h>
42
#include <arch/mm/sun4u/tlb.h>
-
 
43
#include <arch/mm/sun4u/tlb.h>
42
#include <arch/mm/tlb.h>
44
#include <arch/mm/sun4u/mmu.h>
43
#include <arch/mm/mmu.h>
45
#include <arch/mm/sun4u/tte.h>
44
#include <arch/mm/tte.h>
46
#include <arch/trap/regwin.h>
45
#include <arch/trap/regwin.h>
47
#include <arch/sun4u/arch.h>
46
#include <arch/arch.h>
48
 
47
 
49
#ifdef CONFIG_TSB
48
#ifdef CONFIG_TSB
50
#include <arch/mm/tsb.h>
49
#include <arch/mm/tsb.h>
51
#endif
50
#endif
52
 
51
 
53
#define TT_FAST_INSTRUCTION_ACCESS_MMU_MISS 0x64
52
#define TT_FAST_INSTRUCTION_ACCESS_MMU_MISS 0x64
54
#define TT_FAST_DATA_ACCESS_MMU_MISS        0x68
53
#define TT_FAST_DATA_ACCESS_MMU_MISS        0x68
55
#define TT_FAST_DATA_ACCESS_PROTECTION      0x6c
54
#define TT_FAST_DATA_ACCESS_PROTECTION      0x6c
56
 
55
 
57
#define FAST_MMU_HANDLER_SIZE           128
56
#define FAST_MMU_HANDLER_SIZE           128
58
 
57
 
59
#ifdef __ASM__
58
#ifdef __ASM__
60
 
59
 
61
.macro FAST_INSTRUCTION_ACCESS_MMU_MISS_HANDLER
60
.macro FAST_INSTRUCTION_ACCESS_MMU_MISS_HANDLER
62
    /*
61
    /*
63
     * First, try to refill TLB from TSB.
62
     * First, try to refill TLB from TSB.
64
     */
63
     */
65
#ifdef CONFIG_TSB
64
#ifdef CONFIG_TSB
66
    ldxa [%g0] ASI_IMMU, %g1            ! read TSB Tag Target Register
65
    ldxa [%g0] ASI_IMMU, %g1            ! read TSB Tag Target Register
67
    ldxa [%g0] ASI_IMMU_TSB_8KB_PTR_REG, %g2    ! read TSB 8K Pointer
66
    ldxa [%g0] ASI_IMMU_TSB_8KB_PTR_REG, %g2    ! read TSB 8K Pointer
68
    ldda [%g2] ASI_NUCLEUS_QUAD_LDD, %g4        ! 16-byte atomic load into %g4 and %g5
67
    ldda [%g2] ASI_NUCLEUS_QUAD_LDD, %g4        ! 16-byte atomic load into %g4 and %g5
69
    cmp %g1, %g4                    ! is this the entry we are looking for?
68
    cmp %g1, %g4                    ! is this the entry we are looking for?
70
    bne,pn %xcc, 0f
69
    bne,pn %xcc, 0f
71
    nop
70
    nop
72
    stxa %g5, [%g0] ASI_ITLB_DATA_IN_REG        ! copy mapping from ITSB to ITLB
71
    stxa %g5, [%g0] ASI_ITLB_DATA_IN_REG        ! copy mapping from ITSB to ITLB
73
    retry
72
    retry
74
 
73
 
75
#endif
74
#endif
76
0:
75
0:
77
    wrpr %g0, PSTATE_PRIV_BIT | PSTATE_AG_BIT, %pstate
76
    wrpr %g0, PSTATE_PRIV_BIT | PSTATE_AG_BIT, %pstate
78
    PREEMPTIBLE_HANDLER fast_instruction_access_mmu_miss
77
    PREEMPTIBLE_HANDLER fast_instruction_access_mmu_miss
79
.endm
78
.endm
80
 
79
 
81
.macro FAST_DATA_ACCESS_MMU_MISS_HANDLER tl
80
.macro FAST_DATA_ACCESS_MMU_MISS_HANDLER tl
82
    /*
81
    /*
83
     * First, try to refill TLB from TSB.
82
     * First, try to refill TLB from TSB.
84
     */
83
     */
85
 
84
 
86
#ifdef CONFIG_TSB
85
#ifdef CONFIG_TSB
87
    ldxa [%g0] ASI_DMMU, %g1            ! read TSB Tag Target Register
86
    ldxa [%g0] ASI_DMMU, %g1            ! read TSB Tag Target Register
88
    srlx %g1, TSB_TAG_TARGET_CONTEXT_SHIFT, %g2 ! is this a kernel miss?
87
    srlx %g1, TSB_TAG_TARGET_CONTEXT_SHIFT, %g2 ! is this a kernel miss?
89
    brz,pn %g2, 0f
88
    brz,pn %g2, 0f
90
    ldxa [%g0] ASI_DMMU_TSB_8KB_PTR_REG, %g3    ! read TSB 8K Pointer
89
    ldxa [%g0] ASI_DMMU_TSB_8KB_PTR_REG, %g3    ! read TSB 8K Pointer
91
    ldda [%g3] ASI_NUCLEUS_QUAD_LDD, %g4        ! 16-byte atomic load into %g4 and %g5
90
    ldda [%g3] ASI_NUCLEUS_QUAD_LDD, %g4        ! 16-byte atomic load into %g4 and %g5
92
    cmp %g1, %g4                    ! is this the entry we are looking for?
91
    cmp %g1, %g4                    ! is this the entry we are looking for?
93
    bne,pn %xcc, 0f
92
    bne,pn %xcc, 0f
94
    nop
93
    nop
95
    stxa %g5, [%g0] ASI_DTLB_DATA_IN_REG        ! copy mapping from DTSB to DTLB
94
    stxa %g5, [%g0] ASI_DTLB_DATA_IN_REG        ! copy mapping from DTSB to DTLB
96
    retry
95
    retry
97
#endif
96
#endif
98
 
97
 
99
    /*
98
    /*
100
     * Second, test if it is the portion of the kernel address space
99
     * Second, test if it is the portion of the kernel address space
101
     * which is faulting. If that is the case, immediately create
100
     * which is faulting. If that is the case, immediately create
102
     * identity mapping for that page in DTLB. VPN 0 is excluded from
101
     * identity mapping for that page in DTLB. VPN 0 is excluded from
103
     * this treatment.
102
     * this treatment.
104
     *
103
     *
105
     * Note that branch-delay slots are used in order to save space.
104
     * Note that branch-delay slots are used in order to save space.
106
     */
105
     */
107
0:
106
0:
108
    mov VA_DMMU_TAG_ACCESS, %g1
107
    mov VA_DMMU_TAG_ACCESS, %g1
109
    ldxa [%g1] ASI_DMMU, %g1            ! read the faulting Context and VPN
108
    ldxa [%g1] ASI_DMMU, %g1            ! read the faulting Context and VPN
110
    set TLB_TAG_ACCESS_CONTEXT_MASK, %g2
109
    set TLB_TAG_ACCESS_CONTEXT_MASK, %g2
111
    andcc %g1, %g2, %g3             ! get Context
110
    andcc %g1, %g2, %g3             ! get Context
112
    bnz 0f                      ! Context is non-zero
111
    bnz 0f                      ! Context is non-zero
113
    andncc %g1, %g2, %g3                ! get page address into %g3
112
    andncc %g1, %g2, %g3                ! get page address into %g3
114
    bz 0f                       ! page address is zero
113
    bz 0f                       ! page address is zero
115
 
114
 
116
    sethi %hi(kernel_8k_tlb_data_template), %g2
115
    sethi %hi(kernel_8k_tlb_data_template), %g2
117
    ldx [%g2 + %lo(kernel_8k_tlb_data_template)], %g2
116
    ldx [%g2 + %lo(kernel_8k_tlb_data_template)], %g2
118
    or %g3, %g2, %g2
117
    or %g3, %g2, %g2
119
    stxa %g2, [%g0] ASI_DTLB_DATA_IN_REG        ! identity map the kernel page
118
    stxa %g2, [%g0] ASI_DTLB_DATA_IN_REG        ! identity map the kernel page
120
    retry
119
    retry
121
 
120
 
122
    /*
121
    /*
123
     * Third, catch and handle special cases when the trap is caused by
122
     * Third, catch and handle special cases when the trap is caused by
124
     * the userspace register window spill or fill handler. In case
123
     * the userspace register window spill or fill handler. In case
125
     * one of these two traps caused this trap, we just lower the trap
124
     * one of these two traps caused this trap, we just lower the trap
126
     * level and service the DTLB miss. In the end, we restart
125
     * level and service the DTLB miss. In the end, we restart
127
     * the offending SAVE or RESTORE.
126
     * the offending SAVE or RESTORE.
128
     */
127
     */
129
0:
128
0:
130
.if (\tl > 0)
129
.if (\tl > 0)
131
    wrpr %g0, 1, %tl
130
    wrpr %g0, 1, %tl
132
.endif
131
.endif
133
 
132
 
134
    /*
133
    /*
135
     * Switch from the MM globals.
134
     * Switch from the MM globals.
136
     */
135
     */
137
    wrpr %g0, PSTATE_PRIV_BIT | PSTATE_AG_BIT, %pstate
136
    wrpr %g0, PSTATE_PRIV_BIT | PSTATE_AG_BIT, %pstate
138
 
137
 
139
    /*
138
    /*
140
     * Read the Tag Access register for the higher-level handler.
139
     * Read the Tag Access register for the higher-level handler.
141
     * This is necessary to survive nested DTLB misses.
140
     * This is necessary to survive nested DTLB misses.
142
     */
141
     */
143
    mov VA_DMMU_TAG_ACCESS, %g2
142
    mov VA_DMMU_TAG_ACCESS, %g2
144
    ldxa [%g2] ASI_DMMU, %g2
143
    ldxa [%g2] ASI_DMMU, %g2
145
 
144
 
146
    /*
145
    /*
147
     * g2 will be passed as an argument to fast_data_access_mmu_miss().
146
     * g2 will be passed as an argument to fast_data_access_mmu_miss().
148
     */
147
     */
149
    PREEMPTIBLE_HANDLER fast_data_access_mmu_miss
148
    PREEMPTIBLE_HANDLER fast_data_access_mmu_miss
150
.endm
149
.endm
151
 
150
 
152
.macro FAST_DATA_ACCESS_PROTECTION_HANDLER tl
151
.macro FAST_DATA_ACCESS_PROTECTION_HANDLER tl
153
    /*
152
    /*
154
     * The same special case as in FAST_DATA_ACCESS_MMU_MISS_HANDLER.
153
     * The same special case as in FAST_DATA_ACCESS_MMU_MISS_HANDLER.
155
     */
154
     */
156
 
155
 
157
.if (\tl > 0)
156
.if (\tl > 0)
158
    wrpr %g0, 1, %tl
157
    wrpr %g0, 1, %tl
159
.endif
158
.endif
160
 
159
 
161
    /*
160
    /*
162
     * Switch from the MM globals.
161
     * Switch from the MM globals.
163
     */
162
     */
164
    wrpr %g0, PSTATE_PRIV_BIT | PSTATE_AG_BIT, %pstate
163
    wrpr %g0, PSTATE_PRIV_BIT | PSTATE_AG_BIT, %pstate
165
 
164
 
166
    /*
165
    /*
167
     * Read the Tag Access register for the higher-level handler.
166
     * Read the Tag Access register for the higher-level handler.
168
     * This is necessary to survive nested DTLB misses.
167
     * This is necessary to survive nested DTLB misses.
169
     */
168
     */
170
    mov VA_DMMU_TAG_ACCESS, %g2
169
    mov VA_DMMU_TAG_ACCESS, %g2
171
    ldxa [%g2] ASI_DMMU, %g2
170
    ldxa [%g2] ASI_DMMU, %g2
172
 
171
 
173
    /*
172
    /*
174
     * g2 will be passed as an argument to fast_data_access_mmu_miss().
173
     * g2 will be passed as an argument to fast_data_access_mmu_miss().
175
     */
174
     */
176
    PREEMPTIBLE_HANDLER fast_data_access_protection
175
    PREEMPTIBLE_HANDLER fast_data_access_protection
177
.endm
176
.endm
178
 
177
 
179
#endif /* __ASM__ */
178
#endif /* __ASM__ */
180
 
179
 
181
#endif
180
#endif
182
 
181
 
183
/** @}
182
/** @}
184
 */
183
 */
185
 
184