Subversion Repositories HelenOS-historic

Rev

Rev 1386 | Rev 1749 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1386 Rev 1686
1
#
1
#
2
# Copyright (C) 2003-2004 Jakub Jermar
2
# Copyright (C) 2003-2004 Jakub Jermar
3
# All rights reserved.
3
# All rights reserved.
4
#
4
#
5
# Redistribution and use in source and binary forms, with or without
5
# Redistribution and use in source and binary forms, with or without
6
# modification, are permitted provided that the following conditions
6
# modification, are permitted provided that the following conditions
7
# are met:
7
# are met:
8
#
8
#
9
# - Redistributions of source code must retain the above copyright
9
# - Redistributions of source code must retain the above copyright
10
#   notice, this list of conditions and the following disclaimer.
10
#   notice, this list of conditions and the following disclaimer.
11
# - Redistributions in binary form must reproduce the above copyright
11
# - Redistributions in binary form must reproduce the above copyright
12
#   notice, this list of conditions and the following disclaimer in the
12
#   notice, this list of conditions and the following disclaimer in the
13
#   documentation and/or other materials provided with the distribution.
13
#   documentation and/or other materials provided with the distribution.
14
# - The name of the author may not be used to endorse or promote products
14
# - The name of the author may not be used to endorse or promote products
15
#   derived from this software without specific prior written permission.
15
#   derived from this software without specific prior written permission.
16
#
16
#
17
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
#
27
#
28
 
28
 
29
#include <arch/asm/regname.h>
29
#include <arch/asm/regname.h>
30
#include <arch/mm/page.h>
30
#include <arch/mm/page.h>
31
#include <arch/asm/boot.h>
31
#include <arch/asm/boot.h>
32
#include <arch/context_offset.h>
32
#include <arch/context_offset.h>
33
	
33
	
34
.text
34
.text
35
 
35
 
36
.set noat
36
.set noat
37
.set noreorder
37
.set noreorder
38
.set nomacro
38
.set nomacro
39
 
39
 
40
.global kernel_image_start
40
.global kernel_image_start
41
.global tlb_refill_entry
41
.global tlb_refill_entry
42
.global cache_error_entry
42
.global cache_error_entry
43
.global exception_entry
43
.global exception_entry
44
.global userspace_asm
44
.global userspace_asm
45
 
45
 
46
# Which status bits should are thread-local
46
# Which status bits should are thread-local
47
#define REG_SAVE_MASK 0x1f # KSU(UM), EXL, ERL, IE
47
#define REG_SAVE_MASK 0x1f # KSU(UM), EXL, ERL, IE
48
	
48
	
49
# Save registers to space defined by \r
49
# Save registers to space defined by \r
50
# We will change status: Disable ERL,EXL,UM,IE
50
# We will change status: Disable ERL,EXL,UM,IE
51
# These changes will be automatically reversed in REGISTER_LOAD
51
# These changes will be automatically reversed in REGISTER_LOAD
52
# SP is NOT saved as part of these registers
52
# SP is NOT saved as part of these registers
53
.macro REGISTERS_STORE_AND_EXC_RESET r
53
.macro REGISTERS_STORE_AND_EXC_RESET r
54
	sw $at,EOFFSET_AT(\r)
54
	sw $at,EOFFSET_AT(\r)
55
	sw $v0,EOFFSET_V0(\r)
55
	sw $v0,EOFFSET_V0(\r)
56
	sw $v1,EOFFSET_V1(\r)
56
	sw $v1,EOFFSET_V1(\r)
57
	sw $a0,EOFFSET_A0(\r)
57
	sw $a0,EOFFSET_A0(\r)
58
	sw $a1,EOFFSET_A1(\r)
58
	sw $a1,EOFFSET_A1(\r)
59
	sw $a2,EOFFSET_A2(\r)
59
	sw $a2,EOFFSET_A2(\r)
60
	sw $a3,EOFFSET_A3(\r)
60
	sw $a3,EOFFSET_A3(\r)
61
	sw $t0,EOFFSET_T0(\r)
61
	sw $t0,EOFFSET_T0(\r)
62
	sw $t1,EOFFSET_T1(\r)
62
	sw $t1,EOFFSET_T1(\r)
63
	sw $t2,EOFFSET_T2(\r)
63
	sw $t2,EOFFSET_T2(\r)
64
	sw $t3,EOFFSET_T3(\r)
64
	sw $t3,EOFFSET_T3(\r)
65
	sw $t4,EOFFSET_T4(\r)
65
	sw $t4,EOFFSET_T4(\r)
66
	sw $t5,EOFFSET_T5(\r)
66
	sw $t5,EOFFSET_T5(\r)
67
	sw $t6,EOFFSET_T6(\r)
67
	sw $t6,EOFFSET_T6(\r)
68
	sw $t7,EOFFSET_T7(\r)
68
	sw $t7,EOFFSET_T7(\r)
69
	sw $t8,EOFFSET_T8(\r)
69
	sw $t8,EOFFSET_T8(\r)
70
	sw $t9,EOFFSET_T9(\r)
70
	sw $t9,EOFFSET_T9(\r)
71
 
71
 
72
	mflo $at
72
	mflo $at
73
	sw $at, EOFFSET_LO(\r)
73
	sw $at, EOFFSET_LO(\r)
74
	mfhi $at
74
	mfhi $at
75
	sw $at, EOFFSET_HI(\r)
75
	sw $at, EOFFSET_HI(\r)
76
	
76
	
77
#ifdef CONFIG_DEBUG_ALLREGS	
77
#ifdef CONFIG_DEBUG_ALLREGS	
78
	sw $s0,EOFFSET_S0(\r)
78
	sw $s0,EOFFSET_S0(\r)
79
	sw $s1,EOFFSET_S1(\r)
79
	sw $s1,EOFFSET_S1(\r)
80
	sw $s2,EOFFSET_S2(\r)
80
	sw $s2,EOFFSET_S2(\r)
81
	sw $s3,EOFFSET_S3(\r)
81
	sw $s3,EOFFSET_S3(\r)
82
	sw $s4,EOFFSET_S4(\r)
82
	sw $s4,EOFFSET_S4(\r)
83
	sw $s5,EOFFSET_S5(\r)
83
	sw $s5,EOFFSET_S5(\r)
84
	sw $s6,EOFFSET_S6(\r)
84
	sw $s6,EOFFSET_S6(\r)
85
	sw $s7,EOFFSET_S7(\r)
85
	sw $s7,EOFFSET_S7(\r)
86
	sw $s8,EOFFSET_S8(\r)
86
	sw $s8,EOFFSET_S8(\r)
87
#endif
87
#endif
88
	
88
	
89
	sw $gp,EOFFSET_GP(\r)
89
	sw $gp,EOFFSET_GP(\r)
90
	sw $ra,EOFFSET_RA(\r)
90
	sw $ra,EOFFSET_RA(\r)
91
	sw $k1,EOFFSET_K1(\r)
91
	sw $k1,EOFFSET_K1(\r)
92
 
92
 
93
	mfc0 $t0, $status
93
	mfc0 $t0, $status
94
	mfc0 $t1, $epc
94
	mfc0 $t1, $epc
95
	
95
	
96
	and $t2, $t0, REG_SAVE_MASK  # Save only KSU,EXL,ERL,IE
96
	and $t2, $t0, REG_SAVE_MASK  # Save only KSU,EXL,ERL,IE
97
	li $t3, ~(0x1f)
97
	li $t3, ~(0x1f)
98
	and $t0, $t0, $t3           # Clear KSU,EXL,ERL,IE
98
	and $t0, $t0, $t3           # Clear KSU,EXL,ERL,IE
99
	
99
	
100
	sw $t2,EOFFSET_STATUS(\r)
100
	sw $t2,EOFFSET_STATUS(\r)
101
	sw $t1,EOFFSET_EPC(\r)
101
	sw $t1,EOFFSET_EPC(\r)
102
	mtc0 $t0, $status
102
	mtc0 $t0, $status
103
.endm
103
.endm
104
 
104
 
105
.macro REGISTERS_LOAD r
105
.macro REGISTERS_LOAD r
106
	# Update only UM,EXR,IE from status, the rest
106
	# Update only UM,EXR,IE from status, the rest
107
	# is controlled by OS and not bound to task
107
	# is controlled by OS and not bound to task
108
	mfc0 $t0, $status
108
	mfc0 $t0, $status
109
	lw $t1,EOFFSET_STATUS(\r)
109
	lw $t1,EOFFSET_STATUS(\r)
110
 
110
 
111
	li $t2, ~REG_SAVE_MASK    # Mask UM,EXL,ERL,IE
111
	li $t2, ~REG_SAVE_MASK    # Mask UM,EXL,ERL,IE
112
	and $t0, $t0, $t2
112
	and $t0, $t0, $t2
113
	
113
	
114
	or $t0, $t0, $t1   # Copy UM,EXL,ERL,IE from saved status
114
	or $t0, $t0, $t1   # Copy UM,EXL,ERL,IE from saved status
115
	mtc0 $t0, $status
115
	mtc0 $t0, $status
116
	
116
	
117
	lw $v0,EOFFSET_V0(\r)
117
	lw $v0,EOFFSET_V0(\r)
118
	lw $v1,EOFFSET_V1(\r)
118
	lw $v1,EOFFSET_V1(\r)
119
	lw $a0,EOFFSET_A0(\r)
119
	lw $a0,EOFFSET_A0(\r)
120
	lw $a1,EOFFSET_A1(\r)
120
	lw $a1,EOFFSET_A1(\r)
121
	lw $a2,EOFFSET_A2(\r)
121
	lw $a2,EOFFSET_A2(\r)
122
	lw $a3,EOFFSET_A3(\r)
122
	lw $a3,EOFFSET_A3(\r)
123
	lw $t0,EOFFSET_T0(\r)
123
	lw $t0,EOFFSET_T0(\r)
124
	lw $t1,EOFFSET_T1(\r)
124
	lw $t1,EOFFSET_T1(\r)
125
	lw $t2,EOFFSET_T2(\r)
125
	lw $t2,EOFFSET_T2(\r)
126
	lw $t3,EOFFSET_T3(\r)
126
	lw $t3,EOFFSET_T3(\r)
127
	lw $t4,EOFFSET_T4(\r)
127
	lw $t4,EOFFSET_T4(\r)
128
	lw $t5,EOFFSET_T5(\r)
128
	lw $t5,EOFFSET_T5(\r)
129
	lw $t6,EOFFSET_T6(\r)
129
	lw $t6,EOFFSET_T6(\r)
130
	lw $t7,EOFFSET_T7(\r)
130
	lw $t7,EOFFSET_T7(\r)
131
	lw $t8,EOFFSET_T8(\r)
131
	lw $t8,EOFFSET_T8(\r)
132
	lw $t9,EOFFSET_T9(\r)
132
	lw $t9,EOFFSET_T9(\r)
133
	
133
	
134
#ifdef CONFIG_DEBUG_ALLREGS	
134
#ifdef CONFIG_DEBUG_ALLREGS	
135
	lw $s0,EOFFSET_S0(\r)
135
	lw $s0,EOFFSET_S0(\r)
136
	lw $s1,EOFFSET_S1(\r)
136
	lw $s1,EOFFSET_S1(\r)
137
	lw $s2,EOFFSET_S2(\r)
137
	lw $s2,EOFFSET_S2(\r)
138
	lw $s3,EOFFSET_S3(\r)
138
	lw $s3,EOFFSET_S3(\r)
139
	lw $s4,EOFFSET_S4(\r)
139
	lw $s4,EOFFSET_S4(\r)
140
	lw $s5,EOFFSET_S5(\r)
140
	lw $s5,EOFFSET_S5(\r)
141
	lw $s6,EOFFSET_S6(\r)
141
	lw $s6,EOFFSET_S6(\r)
142
	lw $s7,EOFFSET_S7(\r)
142
	lw $s7,EOFFSET_S7(\r)
143
	lw $s8,EOFFSET_S8(\r)
143
	lw $s8,EOFFSET_S8(\r)
144
#endif
144
#endif
145
	lw $gp,EOFFSET_GP(\r)
145
	lw $gp,EOFFSET_GP(\r)
146
	lw $ra,EOFFSET_RA(\r)
146
	lw $ra,EOFFSET_RA(\r)
147
	lw $k1,EOFFSET_K1(\r)
147
	lw $k1,EOFFSET_K1(\r)
148
	
148
	
149
	lw $at,EOFFSET_LO(\r)
149
	lw $at,EOFFSET_LO(\r)
150
	mtlo $at
150
	mtlo $at
151
	lw $at,EOFFSET_HI(\r)
151
	lw $at,EOFFSET_HI(\r)
152
	mthi $at
152
	mthi $at
153
 
153
 
154
	lw $at,EOFFSET_EPC(\r)
154
	lw $at,EOFFSET_EPC(\r)
155
	mtc0 $at, $epc
155
	mtc0 $at, $epc
156
	
156
	
157
	lw $at,EOFFSET_AT(\r)
157
	lw $at,EOFFSET_AT(\r)
158
	lw $sp,EOFFSET_SP(\r)
158
	lw $sp,EOFFSET_SP(\r)
159
.endm
159
.endm
160
 
160
 
161
# Move kernel stack pointer address to register K0
161
# Move kernel stack pointer address to register K0
162
# - if we are in user mode, load the appropriate stack
162
# - if we are in user mode, load the appropriate stack
163
# address
163
# address
164
.macro KERNEL_STACK_TO_K0
164
.macro KERNEL_STACK_TO_K0
165
	# If we are in user mode
165
	# If we are in user mode
166
	mfc0 $k0, $status
166
	mfc0 $k0, $status
167
	andi $k0, 0x10
167
	andi $k0, 0x10
168
	
168
	
169
	beq $k0, $0, 1f
169
	beq $k0, $0, 1f
170
	add $k0, $sp, 0
170
	add $k0, $sp, 0
171
	
171
	
172
	# Move $k0 pointer to kernel stack
172
	# Move $k0 pointer to kernel stack
173
	lui $k0, %hi(supervisor_sp)
173
	lui $k0, %hi(supervisor_sp)
174
	ori $k0, $k0, %lo(supervisor_sp)
174
	ori $k0, $k0, %lo(supervisor_sp)
175
	# Move $k0 (superveisor_sp)
175
	# Move $k0 (superveisor_sp)
176
	lw $k0, 0($k0)
176
	lw $k0, 0($k0)
177
1:		
177
1:		
178
.endm
178
.endm
179
		
179
		
180
.org 0x0
180
.org 0x0
181
kernel_image_start:
181
kernel_image_start:
182
	/* Load temporary stack */
182
	/* Load temporary stack */
183
	lui $sp, %hi(end_stack)
183
	lui $sp, %hi(end_stack)
184
	ori $sp, $sp, %lo(end_stack)
184
	ori $sp, $sp, %lo(end_stack)
-
 
185
	
-
 
186
	/* $a1 contains physical address of bootinfo_t */
-
 
187
	/* $a2 contains size of bootinfo_t */
185
 
188
	
-
 
189
	beq $a2, $0, bootinfo_end
-
 
190
	
186
	/* Not sure about this, but might be needed for PIC code???? */
191
	/* Not sure about this, but might be needed for PIC code???? */
187
	lui $gp, 0x8000
192
	lui $gp, 0x8000
188
	
193
	
-
 
194
	lui $a3, 0x8000
-
 
195
	addu $a1, $a1, $a3
-
 
196
	
-
 
197
	lui $a3, %hi(bootinfo)
-
 
198
	ori $a3, $a3, %lo(bootinfo)
-
 
199
	
-
 
200
	bootinfo_loop:
-
 
201
		
-
 
202
		lw $v0, 0($a1)
-
 
203
		sw $v0, 0($a3)
-
 
204
		
-
 
205
		addi $a1, $a1, 4
-
 
206
		addi $a3, $a3, 4
-
 
207
		addi $a2, $a2, -4
-
 
208
		
-
 
209
		bgtz $a2, bootinfo_loop
-
 
210
		nop
-
 
211
		
-
 
212
	bootinfo_end:
-
 
213
	
189
	jal arch_pre_main
214
	jal arch_pre_main
190
	nop
215
	nop
191
	
216
	
192
	j main_bsp
217
	j main_bsp
193
	nop
218
	nop
194
 
219
 
195
	.space TEMP_STACK_SIZE
220
	.space TEMP_STACK_SIZE
196
end_stack:
221
end_stack:
197
 
222
 
198
tlb_refill_entry:
223
tlb_refill_entry:
199
	j tlb_refill_handler
224
	j tlb_refill_handler
200
	nop
225
	nop
201
 
226
 
202
cache_error_entry:
227
cache_error_entry:
203
	j cache_error_handler
228
	j cache_error_handler
204
	nop
229
	nop
205
 
230
 
206
exception_entry:
231
exception_entry:
207
	j exception_handler
232
	j exception_handler
208
	nop	
233
	nop	
209
 
234
 
210
	
235
	
211
	
236
	
212
exception_handler:
237
exception_handler:
213
	KERNEL_STACK_TO_K0
238
	KERNEL_STACK_TO_K0
214
	sub $k0, REGISTER_SPACE
239
	sub $k0, REGISTER_SPACE
215
	sw $sp,EOFFSET_SP($k0)
240
	sw $sp,EOFFSET_SP($k0)
216
	move $sp, $k0
241
	move $sp, $k0
217
	
242
	
218
	mfc0 $k0, $cause
243
	mfc0 $k0, $cause
219
	
244
	
220
	sra $k0, $k0, 0x2     # cp0_exc_cause() part 1
245
	sra $k0, $k0, 0x2     # cp0_exc_cause() part 1
221
	andi $k0, $k0, 0x1f   # cp0_exc_cause() part 2
246
	andi $k0, $k0, 0x1f   # cp0_exc_cause() part 2
222
	sub $k0, 8            # 8=SYSCALL
247
	sub $k0, 8            # 8=SYSCALL
223
	
248
	
224
	beqz $k0, syscall_shortcut
249
	beqz $k0, syscall_shortcut
225
	add $k0, 8            # Revert $k0 back to correct exc number
250
	add $k0, 8            # Revert $k0 back to correct exc number
226
	
251
	
227
	REGISTERS_STORE_AND_EXC_RESET $sp
252
	REGISTERS_STORE_AND_EXC_RESET $sp
228
	
253
	
229
	move $a1, $sp
254
	move $a1, $sp
230
	jal exc_dispatch      # exc_dispatch(excno, register_space)
255
	jal exc_dispatch      # exc_dispatch(excno, register_space)
231
	move $a0, $k0
256
	move $a0, $k0
232
 
257
 
233
	REGISTERS_LOAD $sp
258
	REGISTERS_LOAD $sp
234
	# The $sp is automatically restored to former value
259
	# The $sp is automatically restored to former value
235
	eret
260
	eret
236
 
261
 
237
# it seems that mips reserves some space on stack for varfuncs???
262
# it seems that mips reserves some space on stack for varfuncs???
238
#define SS_ARG4   16
263
#define SS_ARG4   16
239
#define SS_SP     EOFFSET_SP
264
#define SS_SP     EOFFSET_SP
240
#define SS_STATUS EOFFSET_STATUS
265
#define SS_STATUS EOFFSET_STATUS
241
#define SS_EPC    EOFFSET_EPC
266
#define SS_EPC    EOFFSET_EPC
242
#define SS_K1     EOFFSET_K1
267
#define SS_K1     EOFFSET_K1
243
syscall_shortcut:
268
syscall_shortcut:
244
	# We have a lot of space on the stack, with free use
269
	# We have a lot of space on the stack, with free use
245
	mfc0 $t1, $epc
270
	mfc0 $t1, $epc
246
	mfc0 $t0, $status
271
	mfc0 $t0, $status
247
	sw $t1,SS_EPC($sp)  # Save EPC
272
	sw $t1,SS_EPC($sp)  # Save EPC
248
	sw $k1,SS_K1($sp)   # Save k1, which is not saved during context switch
273
	sw $k1,SS_K1($sp)   # Save k1, which is not saved during context switch
249
	
274
	
250
	and $t2, $t0, REG_SAVE_MASK # Save only KSU,EXL,ERL,IE
275
	and $t2, $t0, REG_SAVE_MASK # Save only KSU,EXL,ERL,IE
251
	li $t3, ~(0x1f)
276
	li $t3, ~(0x1f)
252
	and $t0, $t0, $t3           # Clear KSU,EXL,ERL
277
	and $t0, $t0, $t3           # Clear KSU,EXL,ERL
253
	ori $t0, $t0, 0x1           # Set IE
278
	ori $t0, $t0, 0x1           # Set IE
254
 
279
 
255
	sw $t2,SS_STATUS($sp)
280
	sw $t2,SS_STATUS($sp)
256
	mtc0 $t0, $status
281
	mtc0 $t0, $status
257
 
282
 
258
	# CALL Syscall handler
283
	# CALL Syscall handler
259
	jal syscall_handler
284
	jal syscall_handler
260
	sw $v0, SS_ARG4($sp)        # save v0 - arg4 to stack
285
	sw $v0, SS_ARG4($sp)        # save v0 - arg4 to stack
261
 
286
 
262
	# restore status
287
	# restore status
263
	mfc0 $t0, $status
288
	mfc0 $t0, $status
264
	lw $t1,SS_STATUS($sp)
289
	lw $t1,SS_STATUS($sp)
265
 
290
 
266
	# Change back to EXL=1(from last exception), otherwise
291
	# Change back to EXL=1(from last exception), otherwise
267
	# an interrupt could rewrite the CP0-EPC
292
	# an interrupt could rewrite the CP0-EPC
268
	li $t2, ~REG_SAVE_MASK      # Mask UM,EXL,ERL,IE
293
	li $t2, ~REG_SAVE_MASK      # Mask UM,EXL,ERL,IE
269
	and $t0, $t0, $t2
294
	and $t0, $t0, $t2
270
	or $t0, $t0, $t1            # Copy UM,EXL,ERL,IE from saved status
295
	or $t0, $t0, $t1            # Copy UM,EXL,ERL,IE from saved status
271
	mtc0 $t0, $status
296
	mtc0 $t0, $status
272
			
297
			
273
	# restore epc+4
298
	# restore epc+4
274
	lw $t0,SS_EPC($sp)
299
	lw $t0,SS_EPC($sp)
275
	lw $k1,SS_K1($sp)
300
	lw $k1,SS_K1($sp)
276
	addi $t0, $t0, 4
301
	addi $t0, $t0, 4
277
	mtc0 $t0, $epc
302
	mtc0 $t0, $epc
278
	
303
	
279
	lw $sp,SS_SP($sp) # restore sp
304
	lw $sp,SS_SP($sp) # restore sp
280
	
305
	
281
	eret
306
	eret
282
		
307
		
283
tlb_refill_handler:
308
tlb_refill_handler:
284
	KERNEL_STACK_TO_K0
309
	KERNEL_STACK_TO_K0
285
	sub $k0, REGISTER_SPACE
310
	sub $k0, REGISTER_SPACE
286
	REGISTERS_STORE_AND_EXC_RESET $k0
311
	REGISTERS_STORE_AND_EXC_RESET $k0
287
	sw $sp,EOFFSET_SP($k0)
312
	sw $sp,EOFFSET_SP($k0)
288
	add $sp, $k0, 0
313
	add $sp, $k0, 0
289
 
314
 
290
	jal tlb_refill /* tlb_refill(register_space) */
315
	jal tlb_refill /* tlb_refill(register_space) */
291
	add $a0, $sp, 0 
316
	add $a0, $sp, 0 
292
 
317
 
293
	REGISTERS_LOAD $sp
318
	REGISTERS_LOAD $sp
294
 
319
 
295
	eret
320
	eret
296
 
321
 
297
cache_error_handler:
322
cache_error_handler:
298
	KERNEL_STACK_TO_K0
323
	KERNEL_STACK_TO_K0
299
	sub $k0, REGISTER_SPACE
324
	sub $k0, REGISTER_SPACE
300
	REGISTERS_STORE_AND_EXC_RESET $k0
325
	REGISTERS_STORE_AND_EXC_RESET $k0
301
	sw $sp,EOFFSET_SP($k0)
326
	sw $sp,EOFFSET_SP($k0)
302
	add $sp, $k0, 0
327
	add $sp, $k0, 0
303
 
328
 
304
	jal cache_error
329
	jal cache_error
305
	nop
330
	nop
306
 
331
 
307
	REGISTERS_LOAD $sp
332
	REGISTERS_LOAD $sp
308
 
333
 
309
	eret
334
	eret
310
 
335
 
311
userspace_asm:
336
userspace_asm:
312
	add $sp, $a0, 0
337
	add $sp, $a0, 0
313
	add $v0, $a1, 0 
338
	add $v0, $a1, 0 
314
	add $t9, $a2, 0   # Set up correct entry into PIC code 
339
	add $t9, $a2, 0   # Set up correct entry into PIC code 
315
	eret
340
	eret
316
 
341