Rev 1212 | Rev 1278 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 1212 | Rev 1222 | ||
---|---|---|---|
1 | # |
1 | # |
2 | # Copyright (C) 2003-2004 Jakub Jermar |
2 | # Copyright (C) 2003-2004 Jakub Jermar |
3 | # All rights reserved. |
3 | # All rights reserved. |
4 | # |
4 | # |
5 | # Redistribution and use in source and binary forms, with or without |
5 | # Redistribution and use in source and binary forms, with or without |
6 | # modification, are permitted provided that the following conditions |
6 | # modification, are permitted provided that the following conditions |
7 | # are met: |
7 | # are met: |
8 | # |
8 | # |
9 | # - Redistributions of source code must retain the above copyright |
9 | # - Redistributions of source code must retain the above copyright |
10 | # notice, this list of conditions and the following disclaimer. |
10 | # notice, this list of conditions and the following disclaimer. |
11 | # - Redistributions in binary form must reproduce the above copyright |
11 | # - Redistributions in binary form must reproduce the above copyright |
12 | # notice, this list of conditions and the following disclaimer in the |
12 | # notice, this list of conditions and the following disclaimer in the |
13 | # documentation and/or other materials provided with the distribution. |
13 | # documentation and/or other materials provided with the distribution. |
14 | # - The name of the author may not be used to endorse or promote products |
14 | # - The name of the author may not be used to endorse or promote products |
15 | # derived from this software without specific prior written permission. |
15 | # derived from this software without specific prior written permission. |
16 | # |
16 | # |
17 | # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
17 | # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
18 | # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
18 | # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
19 | # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
19 | # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
20 | # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
20 | # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
21 | # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
21 | # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
22 | # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
22 | # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
23 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
24 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
25 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
26 | # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
26 | # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | # |
27 | # |
28 | 28 | ||
29 | #include <arch/asm/regname.h> |
29 | #include <arch/asm/regname.h> |
30 | #include <arch/mm/page.h> |
30 | #include <arch/mm/page.h> |
31 | #include <arch/asm/boot.h> |
31 | #include <arch/asm/boot.h> |
32 | #include <arch/context_offset.h> |
32 | #include <arch/context_offset.h> |
33 | 33 | ||
34 | .text |
34 | .text |
35 | 35 | ||
36 | .set noat |
36 | .set noat |
37 | .set noreorder |
37 | .set noreorder |
38 | .set nomacro |
38 | .set nomacro |
39 | 39 | ||
40 | .global kernel_image_start |
40 | .global kernel_image_start |
41 | .global tlb_refill_entry |
41 | .global tlb_refill_entry |
42 | .global cache_error_entry |
42 | .global cache_error_entry |
43 | .global exception_entry |
43 | .global exception_entry |
44 | .global userspace_asm |
44 | .global userspace_asm |
45 | 45 | ||
46 | # Which status bits should are thread-local |
46 | # Which status bits should are thread-local |
47 | #define REG_SAVE_MASK 0x1f # KSU(UM), EXL, ERL, IE |
47 | #define REG_SAVE_MASK 0x1f # KSU(UM), EXL, ERL, IE |
48 | 48 | ||
49 | # Save registers to space defined by \r |
49 | # Save registers to space defined by \r |
50 | # We will change status: Disable ERL,EXL,UM,IE |
50 | # We will change status: Disable ERL,EXL,UM,IE |
51 | # These changes will be automatically reversed in REGISTER_LOAD |
51 | # These changes will be automatically reversed in REGISTER_LOAD |
52 | # SP is NOT saved as part of these registers |
52 | # SP is NOT saved as part of these registers |
53 | .macro REGISTERS_STORE_AND_EXC_RESET r |
53 | .macro REGISTERS_STORE_AND_EXC_RESET r |
54 | sw $at,EOFFSET_AT(\r) |
54 | sw $at,EOFFSET_AT(\r) |
55 | sw $v0,EOFFSET_V0(\r) |
55 | sw $v0,EOFFSET_V0(\r) |
56 | sw $v1,EOFFSET_V1(\r) |
56 | sw $v1,EOFFSET_V1(\r) |
57 | sw $a0,EOFFSET_A0(\r) |
57 | sw $a0,EOFFSET_A0(\r) |
58 | sw $a1,EOFFSET_A1(\r) |
58 | sw $a1,EOFFSET_A1(\r) |
59 | sw $a2,EOFFSET_A2(\r) |
59 | sw $a2,EOFFSET_A2(\r) |
60 | sw $a3,EOFFSET_A3(\r) |
60 | sw $a3,EOFFSET_A3(\r) |
61 | sw $t0,EOFFSET_T0(\r) |
61 | sw $t0,EOFFSET_T0(\r) |
62 | sw $t1,EOFFSET_T1(\r) |
62 | sw $t1,EOFFSET_T1(\r) |
63 | sw $t2,EOFFSET_T2(\r) |
63 | sw $t2,EOFFSET_T2(\r) |
64 | sw $t3,EOFFSET_T3(\r) |
64 | sw $t3,EOFFSET_T3(\r) |
65 | sw $t4,EOFFSET_T4(\r) |
65 | sw $t4,EOFFSET_T4(\r) |
66 | sw $t5,EOFFSET_T5(\r) |
66 | sw $t5,EOFFSET_T5(\r) |
67 | sw $t6,EOFFSET_T6(\r) |
67 | sw $t6,EOFFSET_T6(\r) |
68 | sw $t7,EOFFSET_T7(\r) |
68 | sw $t7,EOFFSET_T7(\r) |
69 | sw $t8,EOFFSET_T8(\r) |
69 | sw $t8,EOFFSET_T8(\r) |
70 | sw $t9,EOFFSET_T9(\r) |
70 | sw $t9,EOFFSET_T9(\r) |
71 | 71 | ||
72 | mflo $at |
72 | mflo $at |
73 | sw $at, EOFFSET_LO(\r) |
73 | sw $at, EOFFSET_LO(\r) |
74 | mfhi $at |
74 | mfhi $at |
75 | sw $at, EOFFSET_HI(\r) |
75 | sw $at, EOFFSET_HI(\r) |
76 | 76 | ||
77 | #ifdef CONFIG_DEBUG_ALLREGS |
77 | #ifdef CONFIG_DEBUG_ALLREGS |
78 | sw $s0,EOFFSET_S0(\r) |
78 | sw $s0,EOFFSET_S0(\r) |
79 | sw $s1,EOFFSET_S1(\r) |
79 | sw $s1,EOFFSET_S1(\r) |
80 | sw $s2,EOFFSET_S2(\r) |
80 | sw $s2,EOFFSET_S2(\r) |
81 | sw $s3,EOFFSET_S3(\r) |
81 | sw $s3,EOFFSET_S3(\r) |
82 | sw $s4,EOFFSET_S4(\r) |
82 | sw $s4,EOFFSET_S4(\r) |
83 | sw $s5,EOFFSET_S5(\r) |
83 | sw $s5,EOFFSET_S5(\r) |
84 | sw $s6,EOFFSET_S6(\r) |
84 | sw $s6,EOFFSET_S6(\r) |
85 | sw $s7,EOFFSET_S7(\r) |
85 | sw $s7,EOFFSET_S7(\r) |
86 | sw $s8,EOFFSET_S8(\r) |
86 | sw $s8,EOFFSET_S8(\r) |
87 | #endif |
87 | #endif |
88 | 88 | ||
89 | sw $gp,EOFFSET_GP(\r) |
89 | sw $gp,EOFFSET_GP(\r) |
90 | sw $ra,EOFFSET_RA(\r) |
90 | sw $ra,EOFFSET_RA(\r) |
91 | sw $k1,EOFFSET_K1(\r) |
91 | sw $k1,EOFFSET_K1(\r) |
92 | 92 | ||
93 | mfc0 $t0, $status |
93 | mfc0 $t0, $status |
94 | mfc0 $t1, $epc |
94 | mfc0 $t1, $epc |
95 | 95 | ||
96 | and $t2, $t0, REG_SAVE_MASK # Save only KSU,EXL,ERL,IE |
96 | and $t2, $t0, REG_SAVE_MASK # Save only KSU,EXL,ERL,IE |
97 | li $t3, ~(0x1f) |
97 | li $t3, ~(0x1f) |
98 | and $t0, $t0, $t3 # Clear KSU,EXL,ERL,IE |
98 | and $t0, $t0, $t3 # Clear KSU,EXL,ERL,IE |
99 | 99 | ||
100 | sw $t2,EOFFSET_STATUS(\r) |
100 | sw $t2,EOFFSET_STATUS(\r) |
101 | sw $t1,EOFFSET_EPC(\r) |
101 | sw $t1,EOFFSET_EPC(\r) |
102 | mtc0 $t0, $status |
102 | mtc0 $t0, $status |
103 | .endm |
103 | .endm |
104 | 104 | ||
105 | .macro REGISTERS_LOAD r |
105 | .macro REGISTERS_LOAD r |
106 | # Update only UM,EXR,IE from status, the rest |
106 | # Update only UM,EXR,IE from status, the rest |
107 | # is controlled by OS and not bound to task |
107 | # is controlled by OS and not bound to task |
108 | mfc0 $t0, $status |
108 | mfc0 $t0, $status |
109 | lw $t1,EOFFSET_STATUS(\r) |
109 | lw $t1,EOFFSET_STATUS(\r) |
110 | 110 | ||
111 | li $t2, ~REG_SAVE_MASK # Mask UM,EXL,ERL,IE |
111 | li $t2, ~REG_SAVE_MASK # Mask UM,EXL,ERL,IE |
112 | and $t0, $t0, $t2 |
112 | and $t0, $t0, $t2 |
113 | 113 | ||
114 | or $t0, $t0, $t1 # Copy UM,EXL,ERL,IE from saved status |
114 | or $t0, $t0, $t1 # Copy UM,EXL,ERL,IE from saved status |
115 | mtc0 $t0, $status |
115 | mtc0 $t0, $status |
116 | 116 | ||
117 | lw $v0,EOFFSET_V0(\r) |
117 | lw $v0,EOFFSET_V0(\r) |
118 | lw $v1,EOFFSET_V1(\r) |
118 | lw $v1,EOFFSET_V1(\r) |
119 | lw $a0,EOFFSET_A0(\r) |
119 | lw $a0,EOFFSET_A0(\r) |
120 | lw $a1,EOFFSET_A1(\r) |
120 | lw $a1,EOFFSET_A1(\r) |
121 | lw $a2,EOFFSET_A2(\r) |
121 | lw $a2,EOFFSET_A2(\r) |
122 | lw $a3,EOFFSET_A3(\r) |
122 | lw $a3,EOFFSET_A3(\r) |
123 | lw $t0,EOFFSET_T0(\r) |
123 | lw $t0,EOFFSET_T0(\r) |
124 | lw $t1,EOFFSET_T1(\r) |
124 | lw $t1,EOFFSET_T1(\r) |
125 | lw $t2,EOFFSET_T2(\r) |
125 | lw $t2,EOFFSET_T2(\r) |
126 | lw $t3,EOFFSET_T3(\r) |
126 | lw $t3,EOFFSET_T3(\r) |
127 | lw $t4,EOFFSET_T4(\r) |
127 | lw $t4,EOFFSET_T4(\r) |
128 | lw $t5,EOFFSET_T5(\r) |
128 | lw $t5,EOFFSET_T5(\r) |
129 | lw $t6,EOFFSET_T6(\r) |
129 | lw $t6,EOFFSET_T6(\r) |
130 | lw $t7,EOFFSET_T7(\r) |
130 | lw $t7,EOFFSET_T7(\r) |
131 | lw $t8,EOFFSET_T8(\r) |
131 | lw $t8,EOFFSET_T8(\r) |
132 | lw $t9,EOFFSET_T9(\r) |
132 | lw $t9,EOFFSET_T9(\r) |
133 | 133 | ||
134 | #ifdef CONFIG_DEBUG_ALLREGS |
134 | #ifdef CONFIG_DEBUG_ALLREGS |
135 | lw $s0,EOFFSET_S0(\r) |
135 | lw $s0,EOFFSET_S0(\r) |
136 | lw $s1,EOFFSET_S1(\r) |
136 | lw $s1,EOFFSET_S1(\r) |
137 | lw $s2,EOFFSET_S2(\r) |
137 | lw $s2,EOFFSET_S2(\r) |
138 | lw $s3,EOFFSET_S3(\r) |
138 | lw $s3,EOFFSET_S3(\r) |
139 | lw $s4,EOFFSET_S4(\r) |
139 | lw $s4,EOFFSET_S4(\r) |
140 | lw $s5,EOFFSET_S5(\r) |
140 | lw $s5,EOFFSET_S5(\r) |
141 | lw $s6,EOFFSET_S6(\r) |
141 | lw $s6,EOFFSET_S6(\r) |
142 | lw $s7,EOFFSET_S7(\r) |
142 | lw $s7,EOFFSET_S7(\r) |
143 | lw $s8,EOFFSET_S8(\r) |
143 | lw $s8,EOFFSET_S8(\r) |
144 | #endif |
144 | #endif |
145 | lw $gp,EOFFSET_GP(\r) |
145 | lw $gp,EOFFSET_GP(\r) |
146 | lw $ra,EOFFSET_RA(\r) |
146 | lw $ra,EOFFSET_RA(\r) |
147 | lw $k1,EOFFSET_K1(\r) |
147 | lw $k1,EOFFSET_K1(\r) |
148 | 148 | ||
149 | lw $at,EOFFSET_LO(\r) |
149 | lw $at,EOFFSET_LO(\r) |
150 | mtlo $at |
150 | mtlo $at |
151 | lw $at,EOFFSET_HI(\r) |
151 | lw $at,EOFFSET_HI(\r) |
152 | mthi $at |
152 | mthi $at |
153 | 153 | ||
154 | lw $at,EOFFSET_EPC(\r) |
154 | lw $at,EOFFSET_EPC(\r) |
155 | mtc0 $at, $epc |
155 | mtc0 $at, $epc |
156 | 156 | ||
157 | lw $at,EOFFSET_AT(\r) |
157 | lw $at,EOFFSET_AT(\r) |
158 | lw $sp,EOFFSET_SP(\r) |
158 | lw $sp,EOFFSET_SP(\r) |
159 | .endm |
159 | .endm |
160 | 160 | ||
161 | # Move kernel stack pointer address to register K0 |
161 | # Move kernel stack pointer address to register K0 |
162 | # - if we are in user mode, load the appropriate stack |
162 | # - if we are in user mode, load the appropriate stack |
163 | # address |
163 | # address |
164 | .macro KERNEL_STACK_TO_K0 |
164 | .macro KERNEL_STACK_TO_K0 |
165 | # If we are in user mode |
165 | # If we are in user mode |
166 | mfc0 $k0, $status |
166 | mfc0 $k0, $status |
167 | andi $k0, 0x10 |
167 | andi $k0, 0x10 |
168 | 168 | ||
169 | beq $k0, $0, 1f |
169 | beq $k0, $0, 1f |
170 | add $k0, $sp, 0 |
170 | add $k0, $sp, 0 |
171 | 171 | ||
172 | # Move $k0 pointer to kernel stack |
172 | # Move $k0 pointer to kernel stack |
173 | lui $k0, %hi(supervisor_sp) |
173 | lui $k0, %hi(supervisor_sp) |
174 | ori $k0, $k0, %lo(supervisor_sp) |
174 | ori $k0, $k0, %lo(supervisor_sp) |
175 | # Move $k0 (superveisor_sp) |
175 | # Move $k0 (superveisor_sp) |
176 | lw $k0, 0($k0) |
176 | lw $k0, 0($k0) |
177 | 1: |
177 | 1: |
178 | .endm |
178 | .endm |
179 | 179 | ||
180 | .org 0x0 |
180 | .org 0x0 |
181 | kernel_image_start: |
181 | kernel_image_start: |
182 | /* Load temporary stack */ |
182 | /* Load temporary stack */ |
183 | lui $sp, %hi(end_stack) |
183 | lui $sp, %hi(end_stack) |
184 | ori $sp, $sp, %lo(end_stack) |
184 | ori $sp, $sp, %lo(end_stack) |
185 | 185 | ||
186 | /* Not sure about this, but might be needed for PIC code???? */ |
186 | /* Not sure about this, but might be needed for PIC code???? */ |
187 | lui $gp, 0x8000 |
187 | lui $gp, 0x8000 |
188 | 188 | ||
- | 189 | jal arch_pre_main |
|
- | 190 | nop |
|
- | 191 | ||
189 | jal main_bsp |
192 | j main_bsp |
190 | nop |
193 | nop |
191 | - | ||
192 | 194 | ||
193 | .space TEMP_STACK_SIZE |
195 | .space TEMP_STACK_SIZE |
194 | end_stack: |
196 | end_stack: |
195 | 197 | ||
196 | tlb_refill_entry: |
198 | tlb_refill_entry: |
197 | j tlb_refill_handler |
199 | j tlb_refill_handler |
198 | nop |
200 | nop |
199 | 201 | ||
200 | cache_error_entry: |
202 | cache_error_entry: |
201 | j cache_error_handler |
203 | j cache_error_handler |
202 | nop |
204 | nop |
203 | 205 | ||
204 | exception_entry: |
206 | exception_entry: |
205 | j exception_handler |
207 | j exception_handler |
206 | nop |
208 | nop |
207 | 209 | ||
208 | 210 | ||
209 | 211 | ||
210 | exception_handler: |
212 | exception_handler: |
211 | KERNEL_STACK_TO_K0 |
213 | KERNEL_STACK_TO_K0 |
212 | sub $k0, REGISTER_SPACE |
214 | sub $k0, REGISTER_SPACE |
213 | sw $sp,EOFFSET_SP($k0) |
215 | sw $sp,EOFFSET_SP($k0) |
214 | move $sp, $k0 |
216 | move $sp, $k0 |
215 | 217 | ||
216 | mfc0 $k0, $cause |
218 | mfc0 $k0, $cause |
217 | 219 | ||
218 | sra $k0, $k0, 0x2 # cp0_exc_cause() part 1 |
220 | sra $k0, $k0, 0x2 # cp0_exc_cause() part 1 |
219 | andi $k0, $k0, 0x1f # cp0_exc_cause() part 2 |
221 | andi $k0, $k0, 0x1f # cp0_exc_cause() part 2 |
220 | sub $k0, 8 # 8=SYSCALL |
222 | sub $k0, 8 # 8=SYSCALL |
221 | 223 | ||
222 | beqz $k0, syscall_shortcut |
224 | beqz $k0, syscall_shortcut |
223 | add $k0, 8 # Revert $k1 back to correct exc number |
225 | add $k0, 8 # Revert $k1 back to correct exc number |
224 | 226 | ||
225 | REGISTERS_STORE_AND_EXC_RESET $sp |
227 | REGISTERS_STORE_AND_EXC_RESET $sp |
226 | 228 | ||
227 | move $a1, $sp |
229 | move $a1, $sp |
228 | jal exc_dispatch # exc_dispatch(excno, register_space) |
230 | jal exc_dispatch # exc_dispatch(excno, register_space) |
229 | move $a0, $k0 |
231 | move $a0, $k0 |
230 | 232 | ||
231 | REGISTERS_LOAD $sp |
233 | REGISTERS_LOAD $sp |
232 | # The $sp is automatically restored to former value |
234 | # The $sp is automatically restored to former value |
233 | eret |
235 | eret |
234 | 236 | ||
235 | # it seems that mips reserves some space on stack for varfuncs??? |
237 | # it seems that mips reserves some space on stack for varfuncs??? |
236 | #define SS_ARG4 16 |
238 | #define SS_ARG4 16 |
237 | #define SS_SP EOFFSET_SP |
239 | #define SS_SP EOFFSET_SP |
238 | #define SS_STATUS EOFFSET_STATUS |
240 | #define SS_STATUS EOFFSET_STATUS |
239 | #define SS_EPC EOFFSET_EPC |
241 | #define SS_EPC EOFFSET_EPC |
240 | syscall_shortcut: |
242 | syscall_shortcut: |
241 | # We have a lot of space on the stack, with free use |
243 | # We have a lot of space on the stack, with free use |
242 | mfc0 $t1, $epc |
244 | mfc0 $t1, $epc |
243 | mfc0 $t0, $status |
245 | mfc0 $t0, $status |
244 | sw $t1,SS_EPC($sp) # Save EPC |
246 | sw $t1,SS_EPC($sp) # Save EPC |
245 | 247 | ||
246 | and $t2, $t0, REG_SAVE_MASK # Save only KSU,EXL,ERL,IE |
248 | and $t2, $t0, REG_SAVE_MASK # Save only KSU,EXL,ERL,IE |
247 | li $t3, ~(0x1f) |
249 | li $t3, ~(0x1f) |
248 | and $t0, $t0, $t3 # Clear KSU,EXL,ERL |
250 | and $t0, $t0, $t3 # Clear KSU,EXL,ERL |
249 | ori $t0, $t0, 0x1 # Set IE |
251 | ori $t0, $t0, 0x1 # Set IE |
250 | 252 | ||
251 | sw $t2,SS_STATUS($sp) |
253 | sw $t2,SS_STATUS($sp) |
252 | mtc0 $t0, $status |
254 | mtc0 $t0, $status |
253 | 255 | ||
254 | li $t4, 2 # SYS_INT_CONTROL |
256 | li $t4, 2 # SYS_INT_CONTROL |
255 | beq $t4, $v0, sysc_int_control |
257 | beq $t4, $v0, sysc_int_control |
256 | nop |
258 | nop |
257 | 259 | ||
258 | # CALL Syscall handler |
260 | # CALL Syscall handler |
259 | jal syscall_handler |
261 | jal syscall_handler |
260 | sw $v0, SS_ARG4($sp) # save v0 - arg4 to stack |
262 | sw $v0, SS_ARG4($sp) # save v0 - arg4 to stack |
261 | 263 | ||
262 | sysc_exit: |
264 | sysc_exit: |
263 | # restore status |
265 | # restore status |
264 | mfc0 $t0, $status |
266 | mfc0 $t0, $status |
265 | lw $t1,SS_STATUS($sp) |
267 | lw $t1,SS_STATUS($sp) |
266 | 268 | ||
267 | # Change back to EXL=1(from last exception), otherwise |
269 | # Change back to EXL=1(from last exception), otherwise |
268 | # an interrupt could rewrite the CP0-EPC |
270 | # an interrupt could rewrite the CP0-EPC |
269 | li $t2, ~REG_SAVE_MASK # Mask UM,EXL,ERL,IE |
271 | li $t2, ~REG_SAVE_MASK # Mask UM,EXL,ERL,IE |
270 | and $t0, $t0, $t2 |
272 | and $t0, $t0, $t2 |
271 | or $t0, $t0, $t1 # Copy UM,EXL,ERL,IE from saved status |
273 | or $t0, $t0, $t1 # Copy UM,EXL,ERL,IE from saved status |
272 | mtc0 $t0, $status |
274 | mtc0 $t0, $status |
273 | 275 | ||
274 | # restore epc+4 |
276 | # restore epc+4 |
275 | lw $t0,SS_EPC($sp) |
277 | lw $t0,SS_EPC($sp) |
276 | addi $t0, $t0, 4 |
278 | addi $t0, $t0, 4 |
277 | mtc0 $t0, $epc |
279 | mtc0 $t0, $epc |
278 | 280 | ||
279 | lw $sp,SS_SP($sp) # restore sp |
281 | lw $sp,SS_SP($sp) # restore sp |
280 | 282 | ||
281 | eret |
283 | eret |
282 | 284 | ||
283 | sysc_int_control: |
285 | sysc_int_control: |
284 | jal ddi_int_control |
286 | jal ddi_int_control |
285 | addi $a1, $sp, SS_STATUS |
287 | addi $a1, $sp, SS_STATUS |
286 | 288 | ||
287 | j sysc_exit |
289 | j sysc_exit |
288 | nop |
290 | nop |
289 | 291 | ||
290 | tlb_refill_handler: |
292 | tlb_refill_handler: |
291 | KERNEL_STACK_TO_K0 |
293 | KERNEL_STACK_TO_K0 |
292 | sub $k0, REGISTER_SPACE |
294 | sub $k0, REGISTER_SPACE |
293 | REGISTERS_STORE_AND_EXC_RESET $k0 |
295 | REGISTERS_STORE_AND_EXC_RESET $k0 |
294 | sw $sp,EOFFSET_SP($k0) |
296 | sw $sp,EOFFSET_SP($k0) |
295 | add $sp, $k0, 0 |
297 | add $sp, $k0, 0 |
296 | 298 | ||
297 | add $a0, $sp, 0 |
299 | add $a0, $sp, 0 |
298 | jal tlb_refill /* tlb_refill(register_space) */ |
300 | jal tlb_refill /* tlb_refill(register_space) */ |
299 | nop |
301 | nop |
300 | 302 | ||
301 | REGISTERS_LOAD $sp |
303 | REGISTERS_LOAD $sp |
302 | 304 | ||
303 | eret |
305 | eret |
304 | 306 | ||
305 | cache_error_handler: |
307 | cache_error_handler: |
306 | KERNEL_STACK_TO_K0 |
308 | KERNEL_STACK_TO_K0 |
307 | sub $k0, REGISTER_SPACE |
309 | sub $k0, REGISTER_SPACE |
308 | REGISTERS_STORE_AND_EXC_RESET $k0 |
310 | REGISTERS_STORE_AND_EXC_RESET $k0 |
309 | sw $sp,EOFFSET_SP($k0) |
311 | sw $sp,EOFFSET_SP($k0) |
310 | add $sp, $k0, 0 |
312 | add $sp, $k0, 0 |
311 | 313 | ||
312 | jal cache_error |
314 | jal cache_error |
313 | nop |
315 | nop |
314 | 316 | ||
315 | REGISTERS_LOAD $sp |
317 | REGISTERS_LOAD $sp |
316 | 318 | ||
317 | eret |
319 | eret |
318 | 320 | ||
319 | userspace_asm: |
321 | userspace_asm: |
320 | add $sp, $a0, 0 |
322 | add $sp, $a0, 0 |
321 | add $v0, $a1, 0 |
323 | add $v0, $a1, 0 |
322 | add $t9, $a2, 0 # Set up correct entry into PIC code |
324 | add $t9, $a2, 0 # Set up correct entry into PIC code |
323 | eret |
325 | eret |
324 | - | ||
325 | 326 | ||
- | 327 | ||
- | 328 |
|
|
- | 329 | ||
- | 330 | ||
- | 331 |