Subversion Repositories HelenOS

Rev

Rev 3890 | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 3890 Rev 3922
1
#
1
#
2
# Copyright (c) 2003-2004 Jakub Jermar
2
# Copyright (c) 2003-2004 Jakub Jermar
3
# All rights reserved.
3
# All rights reserved.
4
#
4
#
5
# Redistribution and use in source and binary forms, with or without
5
# Redistribution and use in source and binary forms, with or without
6
# modification, are permitted provided that the following conditions
6
# modification, are permitted provided that the following conditions
7
# are met:
7
# are met:
8
#
8
#
9
# - Redistributions of source code must retain the above copyright
9
# - Redistributions of source code must retain the above copyright
10
#   notice, this list of conditions and the following disclaimer.
10
#   notice, this list of conditions and the following disclaimer.
11
# - Redistributions in binary form must reproduce the above copyright
11
# - Redistributions in binary form must reproduce the above copyright
12
#   notice, this list of conditions and the following disclaimer in the
12
#   notice, this list of conditions and the following disclaimer in the
13
#   documentation and/or other materials provided with the distribution.
13
#   documentation and/or other materials provided with the distribution.
14
# - The name of the author may not be used to endorse or promote products
14
# - The name of the author may not be used to endorse or promote products
15
#   derived from this software without specific prior written permission.
15
#   derived from this software without specific prior written permission.
16
#
16
#
17
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
#
27
#
28
 
28
 
29
#include <arch/asm/regname.h>
29
#include <arch/asm/regname.h>
30
#include <arch/mm/page.h>
30
#include <arch/mm/page.h>
31
#include <arch/asm/boot.h>
31
#include <arch/asm/boot.h>
32
#include <arch/context_offset.h>
32
#include <arch/context_offset.h>
33
#include <arch/stack.h>
33
#include <arch/stack.h>
34
	
34
 
35
.text
35
.text
36
 
36
 
37
.set noat
37
.set noat
38
.set noreorder
38
.set noreorder
39
.set nomacro
39
.set nomacro
40
 
40
 
41
.global kernel_image_start
41
.global kernel_image_start
42
.global tlb_refill_entry
42
.global tlb_refill_entry
43
.global cache_error_entry
43
.global cache_error_entry
44
.global exception_entry
44
.global exception_entry
45
.global userspace_asm
45
.global userspace_asm
46
 
46
 
47
# Which status bits should are thread-local
47
# Which status bits should are thread-local
48
#define REG_SAVE_MASK 0x1f # KSU(UM), EXL, ERL, IE
48
#define REG_SAVE_MASK 0x1f # KSU(UM), EXL, ERL, IE
49
	
49
	
50
# Save registers to space defined by \r
50
# Save registers to space defined by \r
51
# We will change status: Disable ERL,EXL,UM,IE
51
# We will change status: Disable ERL,EXL,UM,IE
52
# These changes will be automatically reversed in REGISTER_LOAD
52
# These changes will be automatically reversed in REGISTER_LOAD
53
# SP is NOT saved as part of these registers
53
# SP is NOT saved as part of these registers
54
.macro REGISTERS_STORE_AND_EXC_RESET r
54
.macro REGISTERS_STORE_AND_EXC_RESET r
55
	sw $at, EOFFSET_AT(\r)
55
	sw $at, EOFFSET_AT(\r)
56
	sw $v0, EOFFSET_V0(\r)
56
	sw $v0, EOFFSET_V0(\r)
57
	sw $v1, EOFFSET_V1(\r)
57
	sw $v1, EOFFSET_V1(\r)
58
	sw $a0, EOFFSET_A0(\r)
58
	sw $a0, EOFFSET_A0(\r)
59
	sw $a1, EOFFSET_A1(\r)
59
	sw $a1, EOFFSET_A1(\r)
60
	sw $a2, EOFFSET_A2(\r)
60
	sw $a2, EOFFSET_A2(\r)
61
	sw $a3, EOFFSET_A3(\r)
61
	sw $a3, EOFFSET_A3(\r)
62
	sw $t0, EOFFSET_T0(\r)
62
	sw $t0, EOFFSET_T0(\r)
63
	sw $t1, EOFFSET_T1(\r)
63
	sw $t1, EOFFSET_T1(\r)
64
	sw $t2, EOFFSET_T2(\r)
64
	sw $t2, EOFFSET_T2(\r)
65
	sw $t3, EOFFSET_T3(\r)
65
	sw $t3, EOFFSET_T3(\r)
66
	sw $t4, EOFFSET_T4(\r)
66
	sw $t4, EOFFSET_T4(\r)
67
	sw $t5, EOFFSET_T5(\r)
67
	sw $t5, EOFFSET_T5(\r)
68
	sw $t6, EOFFSET_T6(\r)
68
	sw $t6, EOFFSET_T6(\r)
69
	sw $t7, EOFFSET_T7(\r)
69
	sw $t7, EOFFSET_T7(\r)
70
	sw $t8, EOFFSET_T8(\r)
70
	sw $t8, EOFFSET_T8(\r)
71
	sw $t9, EOFFSET_T9(\r)
71
	sw $t9, EOFFSET_T9(\r)
72
 
72
 
73
	mflo $at
73
	mflo $at
74
	sw $at, EOFFSET_LO(\r)
74
	sw $at, EOFFSET_LO(\r)
75
	mfhi $at
75
	mfhi $at
76
	sw $at, EOFFSET_HI(\r)
76
	sw $at, EOFFSET_HI(\r)
77
	
77
	
78
	sw $gp, EOFFSET_GP(\r)
78
	sw $gp, EOFFSET_GP(\r)
79
	sw $ra, EOFFSET_RA(\r)
79
	sw $ra, EOFFSET_RA(\r)
80
	sw $k1, EOFFSET_K1(\r)
80
	sw $k1, EOFFSET_K1(\r)
81
 
81
 
82
	mfc0 $t0, $status
82
	mfc0 $t0, $status
83
	mfc0 $t1, $epc
83
	mfc0 $t1, $epc
84
	
84
	
85
	and $t2, $t0, REG_SAVE_MASK	# Save only KSU,EXL,ERL,IE
85
	and $t2, $t0, REG_SAVE_MASK	# Save only KSU,EXL,ERL,IE
86
	li $t3, ~(0x1f)
86
	li $t3, ~(0x1f)
87
	and $t0, $t0, $t3		# Clear KSU,EXL,ERL,IE
87
	and $t0, $t0, $t3		# Clear KSU,EXL,ERL,IE
88
	
88
	
89
	sw $t2,EOFFSET_STATUS(\r)
89
	sw $t2,EOFFSET_STATUS(\r)
90
	sw $t1,EOFFSET_EPC(\r)
90
	sw $t1,EOFFSET_EPC(\r)
91
	mtc0 $t0, $status
91
	mtc0 $t0, $status
92
.endm
92
.endm
93
 
93
 
94
.macro REGISTERS_LOAD r
94
.macro REGISTERS_LOAD r
95
	# Update only UM,EXR,IE from status, the rest
95
	# Update only UM,EXR,IE from status, the rest
96
	# is controlled by OS and not bound to task
96
	# is controlled by OS and not bound to task
97
	mfc0 $t0, $status
97
	mfc0 $t0, $status
98
	lw $t1,EOFFSET_STATUS(\r)
98
	lw $t1,EOFFSET_STATUS(\r)
99
 
99
 
100
	li $t2, ~REG_SAVE_MASK		# Mask UM,EXL,ERL,IE
100
	li $t2, ~REG_SAVE_MASK		# Mask UM,EXL,ERL,IE
101
	and $t0, $t0, $t2
101
	and $t0, $t0, $t2
102
	
102
	
103
	or $t0, $t0, $t1		# Copy UM,EXL, ERL, IE from saved status
103
	or $t0, $t0, $t1		# Copy UM,EXL, ERL, IE from saved status
104
	mtc0 $t0, $status
104
	mtc0 $t0, $status
105
	
105
	
106
	lw $v0, EOFFSET_V0(\r)
106
	lw $v0, EOFFSET_V0(\r)
107
	lw $v1, EOFFSET_V1(\r)
107
	lw $v1, EOFFSET_V1(\r)
108
	lw $a0, EOFFSET_A0(\r)
108
	lw $a0, EOFFSET_A0(\r)
109
	lw $a1, EOFFSET_A1(\r)
109
	lw $a1, EOFFSET_A1(\r)
110
	lw $a2, EOFFSET_A2(\r)
110
	lw $a2, EOFFSET_A2(\r)
111
	lw $a3, EOFFSET_A3(\r)
111
	lw $a3, EOFFSET_A3(\r)
112
	lw $t0, EOFFSET_T0(\r)
112
	lw $t0, EOFFSET_T0(\r)
113
	lw $t1, EOFFSET_T1(\r)
113
	lw $t1, EOFFSET_T1(\r)
114
	lw $t2, EOFFSET_T2(\r)
114
	lw $t2, EOFFSET_T2(\r)
115
	lw $t3, EOFFSET_T3(\r)
115
	lw $t3, EOFFSET_T3(\r)
116
	lw $t4, EOFFSET_T4(\r)
116
	lw $t4, EOFFSET_T4(\r)
117
	lw $t5, EOFFSET_T5(\r)
117
	lw $t5, EOFFSET_T5(\r)
118
	lw $t6, EOFFSET_T6(\r)
118
	lw $t6, EOFFSET_T6(\r)
119
	lw $t7, EOFFSET_T7(\r)
119
	lw $t7, EOFFSET_T7(\r)
120
	lw $t8, EOFFSET_T8(\r)
120
	lw $t8, EOFFSET_T8(\r)
121
	lw $t9, EOFFSET_T9(\r)
121
	lw $t9, EOFFSET_T9(\r)
122
	
122
	
123
	lw $gp, EOFFSET_GP(\r)
123
	lw $gp, EOFFSET_GP(\r)
124
	lw $ra, EOFFSET_RA(\r)
124
	lw $ra, EOFFSET_RA(\r)
125
	lw $k1, EOFFSET_K1(\r)
125
	lw $k1, EOFFSET_K1(\r)
126
	
126
	
127
	lw $at, EOFFSET_LO(\r)
127
	lw $at, EOFFSET_LO(\r)
128
	mtlo $at
128
	mtlo $at
129
	lw $at, EOFFSET_HI(\r)
129
	lw $at, EOFFSET_HI(\r)
130
	mthi $at
130
	mthi $at
131
 
131
 
132
	lw $at, EOFFSET_EPC(\r)
132
	lw $at, EOFFSET_EPC(\r)
133
	mtc0 $at, $epc
133
	mtc0 $at, $epc
134
	
134
	
135
	lw $at, EOFFSET_AT(\r)
135
	lw $at, EOFFSET_AT(\r)
136
	lw $sp, EOFFSET_SP(\r)
136
	lw $sp, EOFFSET_SP(\r)
137
.endm
137
.endm
138
 
138
 
139
# Move kernel stack pointer address to register K0
139
# Move kernel stack pointer address to register K0
140
# - if we are in user mode, load the appropriate stack
140
# - if we are in user mode, load the appropriate stack
141
# address
141
# address
142
.macro KERNEL_STACK_TO_K0
142
.macro KERNEL_STACK_TO_K0
143
	# If we are in user mode
143
	# If we are in user mode
144
	mfc0 $k0, $status
144
	mfc0 $k0, $status
145
	andi $k0, 0x10
145
	andi $k0, 0x10
146
	
146
	
147
	beq $k0, $0, 1f
147
	beq $k0, $0, 1f
148
	add $k0, $sp, 0
148
	add $k0, $sp, 0
149
	
149
	
150
	# Move $k0 pointer to kernel stack
150
	# Move $k0 pointer to kernel stack
151
	lui $k0, %hi(supervisor_sp)
151
	lui $k0, %hi(supervisor_sp)
152
	ori $k0, $k0, %lo(supervisor_sp)
152
	ori $k0, $k0, %lo(supervisor_sp)
153
	# Move $k0 (superveisor_sp)
153
	# Move $k0 (superveisor_sp)
154
	lw $k0, 0($k0)
154
	lw $k0, 0($k0)
155
1:		
155
1:
156
.endm
156
.endm
157
		
157
 
158
.org 0x0
158
.org 0x0
159
kernel_image_start:
159
kernel_image_start:
160
	/* Load temporary stack */
160
	/* Load temporary stack */
161
	lui $sp, %hi(end_stack)
161
	lui $sp, %hi(end_stack)
162
	ori $sp, $sp, %lo(end_stack)
162
	ori $sp, $sp, %lo(end_stack)
163
	
163
	
164
	/* $a1 contains physical address of bootinfo_t */
-
 
165
	/* $a2 contains size of bootinfo_t */
164
	/* Not sure about this, but might
166
	
-
 
167
	beq $a2, $0, bootinfo_end
-
 
168
	
-
 
169
	/* Not sure about this, but might be needed for PIC code???? */
165
	   be needed for PIC code */
170
	lui $gp, 0x8000
166
	lui $gp, 0x8000
171
	
167
	
172
	lui $a3, %hi(bootinfo)
-
 
173
	ori $a3, $a3, %lo(bootinfo)
168
	/* $a1 contains physical address of bootinfo_t */
174
	
-
 
175
	bootinfo_loop:
-
 
176
		
-
 
177
		lw $v0, 0($a1)
-
 
178
		sw $v0, 0($a3)
-
 
179
		
-
 
180
		addi $a1, $a1, 4
-
 
181
		addi $a3, $a3, 4
-
 
182
		addi $a2, $a2, -4
-
 
183
		
-
 
184
		bgtz $a2, bootinfo_loop
-
 
185
		nop
-
 
186
		
-
 
187
	bootinfo_end:
-
 
188
	
169
	
189
	jal arch_pre_main
170
	jal arch_pre_main
190
	nop
171
	nop
191
	
172
	
192
	j main_bsp
173
	j main_bsp
193
	nop
174
	nop
194
 
175
 
195
	.space TEMP_STACK_SIZE
176
	.space TEMP_STACK_SIZE
196
end_stack:
177
end_stack:
197
 
178
 
198
tlb_refill_entry:
179
tlb_refill_entry:
199
	j tlb_refill_handler
180
	j tlb_refill_handler
200
	nop
181
	nop
201
 
182
 
202
cache_error_entry:
183
cache_error_entry:
203
	j cache_error_handler
184
	j cache_error_handler
204
	nop
185
	nop
205
 
186
 
206
exception_entry:
187
exception_entry:
207
	j exception_handler
188
	j exception_handler
208
	nop	
189
	nop
209
	
190
 
210
exception_handler:
191
exception_handler:
211
	KERNEL_STACK_TO_K0
192
	KERNEL_STACK_TO_K0
212
	sub $k0, REGISTER_SPACE
193
	sub $k0, REGISTER_SPACE
213
	sw $sp, EOFFSET_SP($k0)
194
	sw $sp, EOFFSET_SP($k0)
214
	move $sp, $k0
195
	move $sp, $k0
215
	
196
	
216
	mfc0 $k0, $cause
197
	mfc0 $k0, $cause
217
	
198
	
218
	sra $k0, $k0, 0x2		# cp0_exc_cause() part 1
199
	sra $k0, $k0, 0x2    # cp0_exc_cause() part 1
219
	andi $k0, $k0, 0x1f		# cp0_exc_cause() part 2
200
	andi $k0, $k0, 0x1f  # cp0_exc_cause() part 2
220
	sub $k0, 8			# 8 = SYSCALL
201
	sub $k0, 8           # 8 = SYSCALL
221
	
202
	
222
	beqz $k0, syscall_shortcut
203
	beqz $k0, syscall_shortcut
223
	add $k0, 8			# Revert $k0 back to correct exc number
204
	add $k0, 8           # Revert $k0 back to correct exc number
224
	
205
	
225
	REGISTERS_STORE_AND_EXC_RESET $sp
206
	REGISTERS_STORE_AND_EXC_RESET $sp
226
	
207
	
227
	move $a1, $sp
208
	move $a1, $sp
228
	jal exc_dispatch		# exc_dispatch(excno, register_space)
209
	jal exc_dispatch     # exc_dispatch(excno, register_space)
229
	move $a0, $k0
210
	move $a0, $k0
230
 
211
 
231
	REGISTERS_LOAD $sp
212
	REGISTERS_LOAD $sp
232
	# The $sp is automatically restored to former value
213
	# The $sp is automatically restored to former value
233
	eret
214
	eret
234
 
215
 
235
## Syscall entry
216
## Syscall entry
236
#
217
#
237
# Registers:
218
# Registers:
238
#
219
#
239
# @param v0		Syscall number.
220
# @param v0		Syscall number.
240
# @param a0		1st argument.
221
# @param a0		1st argument.
241
# @param a1		2nd argument.
222
# @param a1		2nd argument.
242
# @param a2		3rd argument.
223
# @param a2		3rd argument.
243
# @param a3		4th argument.
224
# @param a3		4th argument.
244
# @param t0		5th argument.
225
# @param t0		5th argument.
245
# @param t1		6th argument.
226
# @param t1		6th argument.
246
#
227
#
247
# @return		The return value will be stored in v0.
228
# @return		The return value will be stored in v0.
248
#
229
#
249
#define SS_SP		EOFFSET_SP
230
#define SS_SP		EOFFSET_SP
250
#define SS_STATUS	EOFFSET_STATUS
231
#define SS_STATUS	EOFFSET_STATUS
251
#define SS_EPC		EOFFSET_EPC
232
#define SS_EPC		EOFFSET_EPC
252
#define SS_K1		EOFFSET_K1
233
#define SS_K1		EOFFSET_K1
253
syscall_shortcut:
234
syscall_shortcut:
254
	# We have a lot of space on the stack, with free use
235
	# We have a lot of space on the stack, with free use
255
	mfc0 $t3, $epc
236
	mfc0 $t3, $epc
256
	mfc0 $t2, $status
237
	mfc0 $t2, $status
257
	sw $t3, SS_EPC($sp)		# Save EPC
238
	sw $t3, SS_EPC($sp)		# Save EPC
258
	sw $k1, SS_K1($sp)   		# Save k1 not saved on context switch
239
	sw $k1, SS_K1($sp)   		# Save k1 not saved on context switch
259
	
240
	
260
	and $t4, $t2, REG_SAVE_MASK	# Save only KSU, EXL, ERL, IE
241
	and $t4, $t2, REG_SAVE_MASK	# Save only KSU, EXL, ERL, IE
261
	li $t5, ~(0x1f)
242
	li $t5, ~(0x1f)
262
	and $t2, $t2, $t5		# Clear KSU, EXL, ERL
243
	and $t2, $t2, $t5		# Clear KSU, EXL, ERL
263
	ori $t2, $t2, 0x1		# Set IE
244
	ori $t2, $t2, 0x1		# Set IE
264
 
245
 
265
	sw $t4, SS_STATUS($sp)
246
	sw $t4, SS_STATUS($sp)
266
	mtc0 $t2, $status
247
	mtc0 $t2, $status
267
 
248
 
268
	#
249
	#
269
	# Call the higher level system call handler
250
	# Call the higher level system call handler
270
	# We are going to reuse part of the unused exception stack frame
251
	# We are going to reuse part of the unused exception stack frame
271
	#
252
	#
272
	sw $t0, STACK_ARG4($sp)		# save the 5th argument on the stack
253
	sw $t0, STACK_ARG4($sp)		# save the 5th argument on the stack
273
	sw $t1, STACK_ARG5($sp)		# save the 6th argument on the stack
254
	sw $t1, STACK_ARG5($sp)		# save the 6th argument on the stack
274
	jal syscall_handler
255
	jal syscall_handler
275
	sw $v0, STACK_ARG6($sp)		# save the syscall number on the stack
256
	sw $v0, STACK_ARG6($sp)		# save the syscall number on the stack
276
 
257
 
277
	# restore status
258
	# restore status
278
	mfc0 $t2, $status
259
	mfc0 $t2, $status
279
	lw $t3, SS_STATUS($sp)
260
	lw $t3, SS_STATUS($sp)
280
 
261
 
281
	# Change back to EXL = 1 (from last exception), otherwise
262
	# Change back to EXL = 1 (from last exception), otherwise
282
	# an interrupt could rewrite the CP0 - EPC
263
	# an interrupt could rewrite the CP0 - EPC
283
	li $t4, ~REG_SAVE_MASK		# Mask UM, EXL, ERL, IE
264
	li $t4, ~REG_SAVE_MASK		# Mask UM, EXL, ERL, IE
284
	and $t2, $t2, $t4
265
	and $t2, $t2, $t4
285
	or $t2, $t2, $t3		# Copy saved UM, EXL, ERL, IE
266
	or $t2, $t2, $t3		# Copy saved UM, EXL, ERL, IE
286
	mtc0 $t2, $status
267
	mtc0 $t2, $status
287
			
268
			
288
	# restore epc + 4
269
	# restore epc + 4
289
	lw $t2, SS_EPC($sp)
270
	lw $t2, SS_EPC($sp)
290
	lw $k1, SS_K1($sp)
271
	lw $k1, SS_K1($sp)
291
	addi $t2, $t2, 4
272
	addi $t2, $t2, 4
292
	mtc0 $t2, $epc
273
	mtc0 $t2, $epc
293
	
274
	
294
	lw $sp, SS_SP($sp)		# restore sp
275
	lw $sp, SS_SP($sp)		# restore sp
295
	
276
	
296
	eret
277
	eret
297
		
278
		
298
tlb_refill_handler:
279
tlb_refill_handler:
299
	KERNEL_STACK_TO_K0
280
	KERNEL_STACK_TO_K0
300
	sub $k0, REGISTER_SPACE
281
	sub $k0, REGISTER_SPACE
301
	REGISTERS_STORE_AND_EXC_RESET $k0
282
	REGISTERS_STORE_AND_EXC_RESET $k0
302
	sw $sp,EOFFSET_SP($k0)
283
	sw $sp,EOFFSET_SP($k0)
303
	add $sp, $k0, 0
284
	add $sp, $k0, 0
304
 
285
 
305
	jal tlb_refill
286
	jal tlb_refill
306
	add $a0, $sp, 0 
287
	add $a0, $sp, 0 
307
 
288
 
308
	REGISTERS_LOAD $sp
289
	REGISTERS_LOAD $sp
309
 
290
 
310
	eret
291
	eret
311
 
292
 
312
cache_error_handler:
293
cache_error_handler:
313
	KERNEL_STACK_TO_K0
294
	KERNEL_STACK_TO_K0
314
	sub $k0, REGISTER_SPACE
295
	sub $k0, REGISTER_SPACE
315
	REGISTERS_STORE_AND_EXC_RESET $k0
296
	REGISTERS_STORE_AND_EXC_RESET $k0
316
	sw $sp,EOFFSET_SP($k0)
297
	sw $sp,EOFFSET_SP($k0)
317
	add $sp, $k0, 0
298
	add $sp, $k0, 0
318
 
299
 
319
	jal cache_error
300
	jal cache_error
320
	add $a0, $sp, 0 
301
	add $a0, $sp, 0 
321
 
302
 
322
	REGISTERS_LOAD $sp
303
	REGISTERS_LOAD $sp
323
 
304
 
324
	eret
305
	eret
325
 
306
 
326
userspace_asm:
307
userspace_asm:
327
	add $sp, $a0, 0
308
	add $sp, $a0, 0
328
	add $v0, $a1, 0 
309
	add $v0, $a1, 0 
329
	add $t9, $a2, 0			# Set up correct entry into PIC code
310
	add $t9, $a2, 0			# Set up correct entry into PIC code
330
	xor $a0, $a0, $a0		# $a0 is defined to hold pcb_ptr
311
	xor $a0, $a0, $a0		# $a0 is defined to hold pcb_ptr
331
					# set it to 0
312
					# set it to 0
332
	eret
313
	eret
333
 
314