Subversion Repositories HelenOS

Rev

Rev 3022 | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 3022 Rev 4055
1
#
1
#
2
# Copyright (c) 2003-2004 Jakub Jermar
2
# Copyright (c) 2003-2004 Jakub Jermar
3
# All rights reserved.
3
# All rights reserved.
4
#
4
#
5
# Redistribution and use in source and binary forms, with or without
5
# Redistribution and use in source and binary forms, with or without
6
# modification, are permitted provided that the following conditions
6
# modification, are permitted provided that the following conditions
7
# are met:
7
# are met:
8
#
8
#
9
# - Redistributions of source code must retain the above copyright
9
# - Redistributions of source code must retain the above copyright
10
#   notice, this list of conditions and the following disclaimer.
10
#   notice, this list of conditions and the following disclaimer.
11
# - Redistributions in binary form must reproduce the above copyright
11
# - Redistributions in binary form must reproduce the above copyright
12
#   notice, this list of conditions and the following disclaimer in the
12
#   notice, this list of conditions and the following disclaimer in the
13
#   documentation and/or other materials provided with the distribution.
13
#   documentation and/or other materials provided with the distribution.
14
# - The name of the author may not be used to endorse or promote products
14
# - The name of the author may not be used to endorse or promote products
15
#   derived from this software without specific prior written permission.
15
#   derived from this software without specific prior written permission.
16
#
16
#
17
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
#
27
#
28
 
28
 
29
#include <arch/asm/regname.h>
29
#include <arch/asm/regname.h>
30
#include <arch/mm/page.h>
30
#include <arch/mm/page.h>
31
#include <arch/asm/boot.h>
31
#include <arch/asm/boot.h>
32
#include <arch/context_offset.h>
32
#include <arch/context_offset.h>
33
#include <arch/stack.h>
33
#include <arch/stack.h>
34
	
34
 
35
.text
35
.text
36
 
36
 
37
.set noat
37
.set noat
38
.set noreorder
38
.set noreorder
39
.set nomacro
39
.set nomacro
40
 
40
 
41
.global kernel_image_start
41
.global kernel_image_start
42
.global tlb_refill_entry
42
.global tlb_refill_entry
43
.global cache_error_entry
43
.global cache_error_entry
44
.global exception_entry
44
.global exception_entry
45
.global userspace_asm
45
.global userspace_asm
46
 
46
 
47
# Which status bits should are thread-local
47
# Which status bits should are thread-local
48
#define REG_SAVE_MASK 0x1f # KSU(UM), EXL, ERL, IE
48
#define REG_SAVE_MASK 0x1f # KSU(UM), EXL, ERL, IE
49
	
49
	
50
# Save registers to space defined by \r
50
# Save registers to space defined by \r
51
# We will change status: Disable ERL,EXL,UM,IE
51
# We will change status: Disable ERL,EXL,UM,IE
52
# These changes will be automatically reversed in REGISTER_LOAD
52
# These changes will be automatically reversed in REGISTER_LOAD
53
# SP is NOT saved as part of these registers
53
# SP is NOT saved as part of these registers
54
.macro REGISTERS_STORE_AND_EXC_RESET r
54
.macro REGISTERS_STORE_AND_EXC_RESET r
55
	sw $at, EOFFSET_AT(\r)
55
	sw $at, EOFFSET_AT(\r)
56
	sw $v0, EOFFSET_V0(\r)
56
	sw $v0, EOFFSET_V0(\r)
57
	sw $v1, EOFFSET_V1(\r)
57
	sw $v1, EOFFSET_V1(\r)
58
	sw $a0, EOFFSET_A0(\r)
58
	sw $a0, EOFFSET_A0(\r)
59
	sw $a1, EOFFSET_A1(\r)
59
	sw $a1, EOFFSET_A1(\r)
60
	sw $a2, EOFFSET_A2(\r)
60
	sw $a2, EOFFSET_A2(\r)
61
	sw $a3, EOFFSET_A3(\r)
61
	sw $a3, EOFFSET_A3(\r)
62
	sw $t0, EOFFSET_T0(\r)
62
	sw $t0, EOFFSET_T0(\r)
63
	sw $t1, EOFFSET_T1(\r)
63
	sw $t1, EOFFSET_T1(\r)
64
	sw $t2, EOFFSET_T2(\r)
64
	sw $t2, EOFFSET_T2(\r)
65
	sw $t3, EOFFSET_T3(\r)
65
	sw $t3, EOFFSET_T3(\r)
66
	sw $t4, EOFFSET_T4(\r)
66
	sw $t4, EOFFSET_T4(\r)
67
	sw $t5, EOFFSET_T5(\r)
67
	sw $t5, EOFFSET_T5(\r)
68
	sw $t6, EOFFSET_T6(\r)
68
	sw $t6, EOFFSET_T6(\r)
69
	sw $t7, EOFFSET_T7(\r)
69
	sw $t7, EOFFSET_T7(\r)
70
	sw $t8, EOFFSET_T8(\r)
70
	sw $t8, EOFFSET_T8(\r)
71
	sw $t9, EOFFSET_T9(\r)
71
	sw $t9, EOFFSET_T9(\r)
72
 
72
 
73
	mflo $at
73
	mflo $at
74
	sw $at, EOFFSET_LO(\r)
74
	sw $at, EOFFSET_LO(\r)
75
	mfhi $at
75
	mfhi $at
76
	sw $at, EOFFSET_HI(\r)
76
	sw $at, EOFFSET_HI(\r)
77
	
77
	
78
#ifdef CONFIG_DEBUG_ALLREGS	
-
 
79
	sw $s0, EOFFSET_S0(\r)
-
 
80
	sw $s1, EOFFSET_S1(\r)
-
 
81
	sw $s2, EOFFSET_S2(\r)
-
 
82
	sw $s3, EOFFSET_S3(\r)
-
 
83
	sw $s4, EOFFSET_S4(\r)
-
 
84
	sw $s5, EOFFSET_S5(\r)
-
 
85
	sw $s6, EOFFSET_S6(\r)
-
 
86
	sw $s7, EOFFSET_S7(\r)
-
 
87
	sw $s8, EOFFSET_S8(\r)
-
 
88
#endif
-
 
89
	
-
 
90
	sw $gp, EOFFSET_GP(\r)
78
	sw $gp, EOFFSET_GP(\r)
91
	sw $ra, EOFFSET_RA(\r)
79
	sw $ra, EOFFSET_RA(\r)
92
	sw $k1, EOFFSET_K1(\r)
80
	sw $k1, EOFFSET_K1(\r)
93
 
81
 
94
	mfc0 $t0, $status
82
	mfc0 $t0, $status
95
	mfc0 $t1, $epc
83
	mfc0 $t1, $epc
96
	
84
	
97
	and $t2, $t0, REG_SAVE_MASK	# Save only KSU,EXL,ERL,IE
85
	and $t2, $t0, REG_SAVE_MASK	# Save only KSU,EXL,ERL,IE
98
	li $t3, ~(0x1f)
86
	li $t3, ~(0x1f)
99
	and $t0, $t0, $t3		# Clear KSU,EXL,ERL,IE
87
	and $t0, $t0, $t3		# Clear KSU,EXL,ERL,IE
100
	
88
	
101
	sw $t2,EOFFSET_STATUS(\r)
89
	sw $t2,EOFFSET_STATUS(\r)
102
	sw $t1,EOFFSET_EPC(\r)
90
	sw $t1,EOFFSET_EPC(\r)
103
	mtc0 $t0, $status
91
	mtc0 $t0, $status
104
.endm
92
.endm
105
 
93
 
106
.macro REGISTERS_LOAD r
94
.macro REGISTERS_LOAD r
107
	# Update only UM,EXR,IE from status, the rest
95
	# Update only UM,EXR,IE from status, the rest
108
	# is controlled by OS and not bound to task
96
	# is controlled by OS and not bound to task
109
	mfc0 $t0, $status
97
	mfc0 $t0, $status
110
	lw $t1,EOFFSET_STATUS(\r)
98
	lw $t1,EOFFSET_STATUS(\r)
111
 
99
 
112
	li $t2, ~REG_SAVE_MASK		# Mask UM,EXL,ERL,IE
100
	li $t2, ~REG_SAVE_MASK		# Mask UM,EXL,ERL,IE
113
	and $t0, $t0, $t2
101
	and $t0, $t0, $t2
114
	
102
	
115
	or $t0, $t0, $t1		# Copy UM,EXL, ERL, IE from saved status
103
	or $t0, $t0, $t1		# Copy UM,EXL, ERL, IE from saved status
116
	mtc0 $t0, $status
104
	mtc0 $t0, $status
117
	
105
	
118
	lw $v0, EOFFSET_V0(\r)
106
	lw $v0, EOFFSET_V0(\r)
119
	lw $v1, EOFFSET_V1(\r)
107
	lw $v1, EOFFSET_V1(\r)
120
	lw $a0, EOFFSET_A0(\r)
108
	lw $a0, EOFFSET_A0(\r)
121
	lw $a1, EOFFSET_A1(\r)
109
	lw $a1, EOFFSET_A1(\r)
122
	lw $a2, EOFFSET_A2(\r)
110
	lw $a2, EOFFSET_A2(\r)
123
	lw $a3, EOFFSET_A3(\r)
111
	lw $a3, EOFFSET_A3(\r)
124
	lw $t0, EOFFSET_T0(\r)
112
	lw $t0, EOFFSET_T0(\r)
125
	lw $t1, EOFFSET_T1(\r)
113
	lw $t1, EOFFSET_T1(\r)
126
	lw $t2, EOFFSET_T2(\r)
114
	lw $t2, EOFFSET_T2(\r)
127
	lw $t3, EOFFSET_T3(\r)
115
	lw $t3, EOFFSET_T3(\r)
128
	lw $t4, EOFFSET_T4(\r)
116
	lw $t4, EOFFSET_T4(\r)
129
	lw $t5, EOFFSET_T5(\r)
117
	lw $t5, EOFFSET_T5(\r)
130
	lw $t6, EOFFSET_T6(\r)
118
	lw $t6, EOFFSET_T6(\r)
131
	lw $t7, EOFFSET_T7(\r)
119
	lw $t7, EOFFSET_T7(\r)
132
	lw $t8, EOFFSET_T8(\r)
120
	lw $t8, EOFFSET_T8(\r)
133
	lw $t9, EOFFSET_T9(\r)
121
	lw $t9, EOFFSET_T9(\r)
134
	
122
	
135
#ifdef CONFIG_DEBUG_ALLREGS	
-
 
136
	lw $s0, EOFFSET_S0(\r)
-
 
137
	lw $s1, EOFFSET_S1(\r)
-
 
138
	lw $s2, EOFFSET_S2(\r)
-
 
139
	lw $s3, EOFFSET_S3(\r)
-
 
140
	lw $s4, EOFFSET_S4(\r)
-
 
141
	lw $s5, EOFFSET_S5(\r)
-
 
142
	lw $s6, EOFFSET_S6(\r)
-
 
143
	lw $s7, EOFFSET_S7(\r)
-
 
144
	lw $s8, EOFFSET_S8(\r)
-
 
145
#endif
-
 
146
	lw $gp, EOFFSET_GP(\r)
123
	lw $gp, EOFFSET_GP(\r)
147
	lw $ra, EOFFSET_RA(\r)
124
	lw $ra, EOFFSET_RA(\r)
148
	lw $k1, EOFFSET_K1(\r)
125
	lw $k1, EOFFSET_K1(\r)
149
	
126
	
150
	lw $at, EOFFSET_LO(\r)
127
	lw $at, EOFFSET_LO(\r)
151
	mtlo $at
128
	mtlo $at
152
	lw $at, EOFFSET_HI(\r)
129
	lw $at, EOFFSET_HI(\r)
153
	mthi $at
130
	mthi $at
154
 
131
 
155
	lw $at, EOFFSET_EPC(\r)
132
	lw $at, EOFFSET_EPC(\r)
156
	mtc0 $at, $epc
133
	mtc0 $at, $epc
157
	
134
	
158
	lw $at, EOFFSET_AT(\r)
135
	lw $at, EOFFSET_AT(\r)
159
	lw $sp, EOFFSET_SP(\r)
136
	lw $sp, EOFFSET_SP(\r)
160
.endm
137
.endm
161
 
138
 
162
# Move kernel stack pointer address to register K0
139
# Move kernel stack pointer address to register K0
163
# - if we are in user mode, load the appropriate stack
140
# - if we are in user mode, load the appropriate stack
164
# address
141
# address
165
.macro KERNEL_STACK_TO_K0
142
.macro KERNEL_STACK_TO_K0
166
	# If we are in user mode
143
	# If we are in user mode
167
	mfc0 $k0, $status
144
	mfc0 $k0, $status
168
	andi $k0, 0x10
145
	andi $k0, 0x10
169
	
146
	
170
	beq $k0, $0, 1f
147
	beq $k0, $0, 1f
171
	add $k0, $sp, 0
148
	add $k0, $sp, 0
172
	
149
	
173
	# Move $k0 pointer to kernel stack
150
	# Move $k0 pointer to kernel stack
174
	lui $k0, %hi(supervisor_sp)
151
	lui $k0, %hi(supervisor_sp)
175
	ori $k0, $k0, %lo(supervisor_sp)
152
	ori $k0, $k0, %lo(supervisor_sp)
176
	# Move $k0 (superveisor_sp)
153
	# Move $k0 (superveisor_sp)
177
	lw $k0, 0($k0)
154
	lw $k0, 0($k0)
178
1:		
155
1:
179
.endm
156
.endm
180
		
157
 
181
.org 0x0
158
.org 0x0
182
kernel_image_start:
159
kernel_image_start:
183
	/* Load temporary stack */
160
	/* Load temporary stack */
184
	lui $sp, %hi(end_stack)
161
	lui $sp, %hi(end_stack)
185
	ori $sp, $sp, %lo(end_stack)
162
	ori $sp, $sp, %lo(end_stack)
186
	
163
	
187
	/* $a1 contains physical address of bootinfo_t */
-
 
188
	/* $a2 contains size of bootinfo_t */
164
	/* Not sure about this, but might
189
	
-
 
190
	beq $a2, $0, bootinfo_end
-
 
191
	
-
 
192
	/* Not sure about this, but might be needed for PIC code???? */
165
	   be needed for PIC code */
193
	lui $gp, 0x8000
166
	lui $gp, 0x8000
194
	
167
	
195
	lui $a3, %hi(bootinfo)
-
 
196
	ori $a3, $a3, %lo(bootinfo)
168
	/* $a1 contains physical address of bootinfo_t */
197
	
-
 
198
	bootinfo_loop:
-
 
199
		
-
 
200
		lw $v0, 0($a1)
-
 
201
		sw $v0, 0($a3)
-
 
202
		
-
 
203
		addi $a1, $a1, 4
-
 
204
		addi $a3, $a3, 4
-
 
205
		addi $a2, $a2, -4
-
 
206
		
-
 
207
		bgtz $a2, bootinfo_loop
-
 
208
		nop
-
 
209
		
-
 
210
	bootinfo_end:
-
 
211
	
169
	
212
	jal arch_pre_main
170
	jal arch_pre_main
213
	nop
171
	nop
214
	
172
	
215
	j main_bsp
173
	j main_bsp
216
	nop
174
	nop
217
 
175
 
218
	.space TEMP_STACK_SIZE
176
	.space TEMP_STACK_SIZE
219
end_stack:
177
end_stack:
220
 
178
 
221
tlb_refill_entry:
179
tlb_refill_entry:
222
	j tlb_refill_handler
180
	j tlb_refill_handler
223
	nop
181
	nop
224
 
182
 
225
cache_error_entry:
183
cache_error_entry:
226
	j cache_error_handler
184
	j cache_error_handler
227
	nop
185
	nop
228
 
186
 
229
exception_entry:
187
exception_entry:
230
	j exception_handler
188
	j exception_handler
231
	nop	
189
	nop
232
	
190
 
233
exception_handler:
191
exception_handler:
234
	KERNEL_STACK_TO_K0
192
	KERNEL_STACK_TO_K0
235
	sub $k0, REGISTER_SPACE
193
	sub $k0, REGISTER_SPACE
236
	sw $sp, EOFFSET_SP($k0)
194
	sw $sp, EOFFSET_SP($k0)
237
	move $sp, $k0
195
	move $sp, $k0
238
	
196
	
239
	mfc0 $k0, $cause
197
	mfc0 $k0, $cause
240
	
198
	
241
	sra $k0, $k0, 0x2		# cp0_exc_cause() part 1
199
	sra $k0, $k0, 0x2    # cp0_exc_cause() part 1
242
	andi $k0, $k0, 0x1f		# cp0_exc_cause() part 2
200
	andi $k0, $k0, 0x1f  # cp0_exc_cause() part 2
243
	sub $k0, 8			# 8 = SYSCALL
201
	sub $k0, 8           # 8 = SYSCALL
244
	
202
	
245
	beqz $k0, syscall_shortcut
203
	beqz $k0, syscall_shortcut
246
	add $k0, 8			# Revert $k0 back to correct exc number
204
	add $k0, 8           # Revert $k0 back to correct exc number
247
	
205
	
248
	REGISTERS_STORE_AND_EXC_RESET $sp
206
	REGISTERS_STORE_AND_EXC_RESET $sp
249
	
207
	
250
	move $a1, $sp
208
	move $a1, $sp
251
	jal exc_dispatch		# exc_dispatch(excno, register_space)
209
	jal exc_dispatch     # exc_dispatch(excno, register_space)
252
	move $a0, $k0
210
	move $a0, $k0
253
 
211
 
254
	REGISTERS_LOAD $sp
212
	REGISTERS_LOAD $sp
255
	# The $sp is automatically restored to former value
213
	# The $sp is automatically restored to former value
256
	eret
214
	eret
257
 
215
 
258
## Syscall entry
216
## Syscall entry
259
#
217
#
260
# Registers:
218
# Registers:
261
#
219
#
262
# @param v0		Syscall number.
220
# @param v0		Syscall number.
263
# @param a0		1st argument.
221
# @param a0		1st argument.
264
# @param a1		2nd argument.
222
# @param a1		2nd argument.
265
# @param a2		3rd argument.
223
# @param a2		3rd argument.
266
# @param a3		4th argument.
224
# @param a3		4th argument.
267
# @param t0		5th argument.
225
# @param t0		5th argument.
268
# @param t1		6th argument.
226
# @param t1		6th argument.
269
#
227
#
270
# @return		The return value will be stored in v0.
228
# @return		The return value will be stored in v0.
271
#
229
#
272
#define SS_SP		EOFFSET_SP
230
#define SS_SP		EOFFSET_SP
273
#define SS_STATUS	EOFFSET_STATUS
231
#define SS_STATUS	EOFFSET_STATUS
274
#define SS_EPC		EOFFSET_EPC
232
#define SS_EPC		EOFFSET_EPC
275
#define SS_K1		EOFFSET_K1
233
#define SS_K1		EOFFSET_K1
276
syscall_shortcut:
234
syscall_shortcut:
277
	# We have a lot of space on the stack, with free use
235
	# We have a lot of space on the stack, with free use
278
	mfc0 $t3, $epc
236
	mfc0 $t3, $epc
279
	mfc0 $t2, $status
237
	mfc0 $t2, $status
280
	sw $t3, SS_EPC($sp)		# Save EPC
238
	sw $t3, SS_EPC($sp)		# Save EPC
281
	sw $k1, SS_K1($sp)   		# Save k1 not saved on context switch
239
	sw $k1, SS_K1($sp)   		# Save k1 not saved on context switch
282
	
240
	
283
	and $t4, $t2, REG_SAVE_MASK	# Save only KSU, EXL, ERL, IE
241
	and $t4, $t2, REG_SAVE_MASK	# Save only KSU, EXL, ERL, IE
284
	li $t5, ~(0x1f)
242
	li $t5, ~(0x1f)
285
	and $t2, $t2, $t5		# Clear KSU, EXL, ERL
243
	and $t2, $t2, $t5		# Clear KSU, EXL, ERL
286
	ori $t2, $t2, 0x1		# Set IE
244
	ori $t2, $t2, 0x1		# Set IE
287
 
245
 
288
	sw $t4, SS_STATUS($sp)
246
	sw $t4, SS_STATUS($sp)
289
	mtc0 $t2, $status
247
	mtc0 $t2, $status
290
 
248
 
291
	#
249
	#
292
	# Call the higher level system call handler
250
	# Call the higher level system call handler
293
	# We are going to reuse part of the unused exception stack frame
251
	# We are going to reuse part of the unused exception stack frame
294
	#
252
	#
295
	sw $t0, STACK_ARG4($sp)		# save the 5th argument on the stack
253
	sw $t0, STACK_ARG4($sp)		# save the 5th argument on the stack
296
	sw $t1, STACK_ARG5($sp)		# save the 6th argument on the stack
254
	sw $t1, STACK_ARG5($sp)		# save the 6th argument on the stack
297
	jal syscall_handler
255
	jal syscall_handler
298
	sw $v0, STACK_ARG6($sp)		# save the syscall number on the stack
256
	sw $v0, STACK_ARG6($sp)		# save the syscall number on the stack
299
 
257
 
300
	# restore status
258
	# restore status
301
	mfc0 $t2, $status
259
	mfc0 $t2, $status
302
	lw $t3, SS_STATUS($sp)
260
	lw $t3, SS_STATUS($sp)
303
 
261
 
304
	# Change back to EXL = 1 (from last exception), otherwise
262
	# Change back to EXL = 1 (from last exception), otherwise
305
	# an interrupt could rewrite the CP0 - EPC
263
	# an interrupt could rewrite the CP0 - EPC
306
	li $t4, ~REG_SAVE_MASK		# Mask UM, EXL, ERL, IE
264
	li $t4, ~REG_SAVE_MASK		# Mask UM, EXL, ERL, IE
307
	and $t2, $t2, $t4
265
	and $t2, $t2, $t4
308
	or $t2, $t2, $t3		# Copy saved UM, EXL, ERL, IE
266
	or $t2, $t2, $t3		# Copy saved UM, EXL, ERL, IE
309
	mtc0 $t2, $status
267
	mtc0 $t2, $status
310
			
268
			
311
	# restore epc + 4
269
	# restore epc + 4
312
	lw $t2, SS_EPC($sp)
270
	lw $t2, SS_EPC($sp)
313
	lw $k1, SS_K1($sp)
271
	lw $k1, SS_K1($sp)
314
	addi $t2, $t2, 4
272
	addi $t2, $t2, 4
315
	mtc0 $t2, $epc
273
	mtc0 $t2, $epc
316
	
274
	
317
	lw $sp, SS_SP($sp)		# restore sp
275
	lw $sp, SS_SP($sp)		# restore sp
318
	
276
	
319
	eret
277
	eret
320
		
278
		
321
tlb_refill_handler:
279
tlb_refill_handler:
322
	KERNEL_STACK_TO_K0
280
	KERNEL_STACK_TO_K0
323
	sub $k0, REGISTER_SPACE
281
	sub $k0, REGISTER_SPACE
324
	REGISTERS_STORE_AND_EXC_RESET $k0
282
	REGISTERS_STORE_AND_EXC_RESET $k0
325
	sw $sp,EOFFSET_SP($k0)
283
	sw $sp,EOFFSET_SP($k0)
326
	add $sp, $k0, 0
284
	add $sp, $k0, 0
327
 
285
 
328
	jal tlb_refill
286
	jal tlb_refill
329
	add $a0, $sp, 0 
287
	add $a0, $sp, 0 
330
 
288
 
331
	REGISTERS_LOAD $sp
289
	REGISTERS_LOAD $sp
332
 
290
 
333
	eret
291
	eret
334
 
292
 
335
cache_error_handler:
293
cache_error_handler:
336
	KERNEL_STACK_TO_K0
294
	KERNEL_STACK_TO_K0
337
	sub $k0, REGISTER_SPACE
295
	sub $k0, REGISTER_SPACE
338
	REGISTERS_STORE_AND_EXC_RESET $k0
296
	REGISTERS_STORE_AND_EXC_RESET $k0
339
	sw $sp,EOFFSET_SP($k0)
297
	sw $sp,EOFFSET_SP($k0)
340
	add $sp, $k0, 0
298
	add $sp, $k0, 0
341
 
299
 
342
	jal cache_error
300
	jal cache_error
343
	add $a0, $sp, 0 
301
	add $a0, $sp, 0 
344
 
302
 
345
	REGISTERS_LOAD $sp
303
	REGISTERS_LOAD $sp
346
 
304
 
347
	eret
305
	eret
348
 
306
 
349
userspace_asm:
307
userspace_asm:
350
	add $sp, $a0, 0
308
	add $sp, $a0, 0
351
	add $v0, $a1, 0 
309
	add $v0, $a1, 0 
352
	add $t9, $a2, 0			# Set up correct entry into PIC code 
310
	add $t9, $a2, 0			# Set up correct entry into PIC code
-
 
311
	xor $a0, $a0, $a0		# $a0 is defined to hold pcb_ptr
-
 
312
					# set it to 0
353
	eret
313
	eret
354
 
314