Subversion Repositories HelenOS-historic

Rev

Rev 1293 | Rev 1686 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1293 Rev 1386
1
#
1
#
2
# Copyright (C) 2003-2004 Jakub Jermar
2
# Copyright (C) 2003-2004 Jakub Jermar
3
# All rights reserved.
3
# All rights reserved.
4
#
4
#
5
# Redistribution and use in source and binary forms, with or without
5
# Redistribution and use in source and binary forms, with or without
6
# modification, are permitted provided that the following conditions
6
# modification, are permitted provided that the following conditions
7
# are met:
7
# are met:
8
#
8
#
9
# - Redistributions of source code must retain the above copyright
9
# - Redistributions of source code must retain the above copyright
10
#   notice, this list of conditions and the following disclaimer.
10
#   notice, this list of conditions and the following disclaimer.
11
# - Redistributions in binary form must reproduce the above copyright
11
# - Redistributions in binary form must reproduce the above copyright
12
#   notice, this list of conditions and the following disclaimer in the
12
#   notice, this list of conditions and the following disclaimer in the
13
#   documentation and/or other materials provided with the distribution.
13
#   documentation and/or other materials provided with the distribution.
14
# - The name of the author may not be used to endorse or promote products
14
# - The name of the author may not be used to endorse or promote products
15
#   derived from this software without specific prior written permission.
15
#   derived from this software without specific prior written permission.
16
#
16
#
17
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
#
27
#
28
 
28
 
29
#include <arch/asm/regname.h>
29
#include <arch/asm/regname.h>
30
#include <arch/mm/page.h>
30
#include <arch/mm/page.h>
31
#include <arch/asm/boot.h>
31
#include <arch/asm/boot.h>
32
#include <arch/context_offset.h>
32
#include <arch/context_offset.h>
33
	
33
	
34
.text
34
.text
35
 
35
 
36
.set noat
36
.set noat
37
.set noreorder
37
.set noreorder
38
.set nomacro
38
.set nomacro
39
 
39
 
40
.global kernel_image_start
40
.global kernel_image_start
41
.global tlb_refill_entry
41
.global tlb_refill_entry
42
.global cache_error_entry
42
.global cache_error_entry
43
.global exception_entry
43
.global exception_entry
44
.global userspace_asm
44
.global userspace_asm
45
 
45
 
46
# Which status bits should are thread-local
46
# Which status bits should are thread-local
47
#define REG_SAVE_MASK 0x1f # KSU(UM), EXL, ERL, IE
47
#define REG_SAVE_MASK 0x1f # KSU(UM), EXL, ERL, IE
48
	
48
	
49
# Save registers to space defined by \r
49
# Save registers to space defined by \r
50
# We will change status: Disable ERL,EXL,UM,IE
50
# We will change status: Disable ERL,EXL,UM,IE
51
# These changes will be automatically reversed in REGISTER_LOAD
51
# These changes will be automatically reversed in REGISTER_LOAD
52
# SP is NOT saved as part of these registers
52
# SP is NOT saved as part of these registers
53
.macro REGISTERS_STORE_AND_EXC_RESET r
53
.macro REGISTERS_STORE_AND_EXC_RESET r
54
	sw $at,EOFFSET_AT(\r)
54
	sw $at,EOFFSET_AT(\r)
55
	sw $v0,EOFFSET_V0(\r)
55
	sw $v0,EOFFSET_V0(\r)
56
	sw $v1,EOFFSET_V1(\r)
56
	sw $v1,EOFFSET_V1(\r)
57
	sw $a0,EOFFSET_A0(\r)
57
	sw $a0,EOFFSET_A0(\r)
58
	sw $a1,EOFFSET_A1(\r)
58
	sw $a1,EOFFSET_A1(\r)
59
	sw $a2,EOFFSET_A2(\r)
59
	sw $a2,EOFFSET_A2(\r)
60
	sw $a3,EOFFSET_A3(\r)
60
	sw $a3,EOFFSET_A3(\r)
61
	sw $t0,EOFFSET_T0(\r)
61
	sw $t0,EOFFSET_T0(\r)
62
	sw $t1,EOFFSET_T1(\r)
62
	sw $t1,EOFFSET_T1(\r)
63
	sw $t2,EOFFSET_T2(\r)
63
	sw $t2,EOFFSET_T2(\r)
64
	sw $t3,EOFFSET_T3(\r)
64
	sw $t3,EOFFSET_T3(\r)
65
	sw $t4,EOFFSET_T4(\r)
65
	sw $t4,EOFFSET_T4(\r)
66
	sw $t5,EOFFSET_T5(\r)
66
	sw $t5,EOFFSET_T5(\r)
67
	sw $t6,EOFFSET_T6(\r)
67
	sw $t6,EOFFSET_T6(\r)
68
	sw $t7,EOFFSET_T7(\r)
68
	sw $t7,EOFFSET_T7(\r)
69
	sw $t8,EOFFSET_T8(\r)
69
	sw $t8,EOFFSET_T8(\r)
70
	sw $t9,EOFFSET_T9(\r)
70
	sw $t9,EOFFSET_T9(\r)
71
 
71
 
72
	mflo $at
72
	mflo $at
73
	sw $at, EOFFSET_LO(\r)
73
	sw $at, EOFFSET_LO(\r)
74
	mfhi $at
74
	mfhi $at
75
	sw $at, EOFFSET_HI(\r)
75
	sw $at, EOFFSET_HI(\r)
76
	
76
	
77
#ifdef CONFIG_DEBUG_ALLREGS	
77
#ifdef CONFIG_DEBUG_ALLREGS	
78
	sw $s0,EOFFSET_S0(\r)
78
	sw $s0,EOFFSET_S0(\r)
79
	sw $s1,EOFFSET_S1(\r)
79
	sw $s1,EOFFSET_S1(\r)
80
	sw $s2,EOFFSET_S2(\r)
80
	sw $s2,EOFFSET_S2(\r)
81
	sw $s3,EOFFSET_S3(\r)
81
	sw $s3,EOFFSET_S3(\r)
82
	sw $s4,EOFFSET_S4(\r)
82
	sw $s4,EOFFSET_S4(\r)
83
	sw $s5,EOFFSET_S5(\r)
83
	sw $s5,EOFFSET_S5(\r)
84
	sw $s6,EOFFSET_S6(\r)
84
	sw $s6,EOFFSET_S6(\r)
85
	sw $s7,EOFFSET_S7(\r)
85
	sw $s7,EOFFSET_S7(\r)
86
	sw $s8,EOFFSET_S8(\r)
86
	sw $s8,EOFFSET_S8(\r)
87
#endif
87
#endif
88
	
88
	
89
	sw $gp,EOFFSET_GP(\r)
89
	sw $gp,EOFFSET_GP(\r)
90
	sw $ra,EOFFSET_RA(\r)
90
	sw $ra,EOFFSET_RA(\r)
91
	sw $k1,EOFFSET_K1(\r)
91
	sw $k1,EOFFSET_K1(\r)
92
 
92
 
93
	mfc0 $t0, $status
93
	mfc0 $t0, $status
94
	mfc0 $t1, $epc
94
	mfc0 $t1, $epc
95
	
95
	
96
	and $t2, $t0, REG_SAVE_MASK  # Save only KSU,EXL,ERL,IE
96
	and $t2, $t0, REG_SAVE_MASK  # Save only KSU,EXL,ERL,IE
97
	li $t3, ~(0x1f)
97
	li $t3, ~(0x1f)
98
	and $t0, $t0, $t3           # Clear KSU,EXL,ERL,IE
98
	and $t0, $t0, $t3           # Clear KSU,EXL,ERL,IE
99
	
99
	
100
	sw $t2,EOFFSET_STATUS(\r)
100
	sw $t2,EOFFSET_STATUS(\r)
101
	sw $t1,EOFFSET_EPC(\r)
101
	sw $t1,EOFFSET_EPC(\r)
102
	mtc0 $t0, $status
102
	mtc0 $t0, $status
103
.endm
103
.endm
104
 
104
 
105
.macro REGISTERS_LOAD r
105
.macro REGISTERS_LOAD r
106
	# Update only UM,EXR,IE from status, the rest
106
	# Update only UM,EXR,IE from status, the rest
107
	# is controlled by OS and not bound to task
107
	# is controlled by OS and not bound to task
108
	mfc0 $t0, $status
108
	mfc0 $t0, $status
109
	lw $t1,EOFFSET_STATUS(\r)
109
	lw $t1,EOFFSET_STATUS(\r)
110
 
110
 
111
	li $t2, ~REG_SAVE_MASK    # Mask UM,EXL,ERL,IE
111
	li $t2, ~REG_SAVE_MASK    # Mask UM,EXL,ERL,IE
112
	and $t0, $t0, $t2
112
	and $t0, $t0, $t2
113
	
113
	
114
	or $t0, $t0, $t1   # Copy UM,EXL,ERL,IE from saved status
114
	or $t0, $t0, $t1   # Copy UM,EXL,ERL,IE from saved status
115
	mtc0 $t0, $status
115
	mtc0 $t0, $status
116
	
116
	
117
	lw $v0,EOFFSET_V0(\r)
117
	lw $v0,EOFFSET_V0(\r)
118
	lw $v1,EOFFSET_V1(\r)
118
	lw $v1,EOFFSET_V1(\r)
119
	lw $a0,EOFFSET_A0(\r)
119
	lw $a0,EOFFSET_A0(\r)
120
	lw $a1,EOFFSET_A1(\r)
120
	lw $a1,EOFFSET_A1(\r)
121
	lw $a2,EOFFSET_A2(\r)
121
	lw $a2,EOFFSET_A2(\r)
122
	lw $a3,EOFFSET_A3(\r)
122
	lw $a3,EOFFSET_A3(\r)
123
	lw $t0,EOFFSET_T0(\r)
123
	lw $t0,EOFFSET_T0(\r)
124
	lw $t1,EOFFSET_T1(\r)
124
	lw $t1,EOFFSET_T1(\r)
125
	lw $t2,EOFFSET_T2(\r)
125
	lw $t2,EOFFSET_T2(\r)
126
	lw $t3,EOFFSET_T3(\r)
126
	lw $t3,EOFFSET_T3(\r)
127
	lw $t4,EOFFSET_T4(\r)
127
	lw $t4,EOFFSET_T4(\r)
128
	lw $t5,EOFFSET_T5(\r)
128
	lw $t5,EOFFSET_T5(\r)
129
	lw $t6,EOFFSET_T6(\r)
129
	lw $t6,EOFFSET_T6(\r)
130
	lw $t7,EOFFSET_T7(\r)
130
	lw $t7,EOFFSET_T7(\r)
131
	lw $t8,EOFFSET_T8(\r)
131
	lw $t8,EOFFSET_T8(\r)
132
	lw $t9,EOFFSET_T9(\r)
132
	lw $t9,EOFFSET_T9(\r)
133
	
133
	
134
#ifdef CONFIG_DEBUG_ALLREGS	
134
#ifdef CONFIG_DEBUG_ALLREGS	
135
	lw $s0,EOFFSET_S0(\r)
135
	lw $s0,EOFFSET_S0(\r)
136
	lw $s1,EOFFSET_S1(\r)
136
	lw $s1,EOFFSET_S1(\r)
137
	lw $s2,EOFFSET_S2(\r)
137
	lw $s2,EOFFSET_S2(\r)
138
	lw $s3,EOFFSET_S3(\r)
138
	lw $s3,EOFFSET_S3(\r)
139
	lw $s4,EOFFSET_S4(\r)
139
	lw $s4,EOFFSET_S4(\r)
140
	lw $s5,EOFFSET_S5(\r)
140
	lw $s5,EOFFSET_S5(\r)
141
	lw $s6,EOFFSET_S6(\r)
141
	lw $s6,EOFFSET_S6(\r)
142
	lw $s7,EOFFSET_S7(\r)
142
	lw $s7,EOFFSET_S7(\r)
143
	lw $s8,EOFFSET_S8(\r)
143
	lw $s8,EOFFSET_S8(\r)
144
#endif
144
#endif
145
	lw $gp,EOFFSET_GP(\r)
145
	lw $gp,EOFFSET_GP(\r)
146
	lw $ra,EOFFSET_RA(\r)
146
	lw $ra,EOFFSET_RA(\r)
147
	lw $k1,EOFFSET_K1(\r)
147
	lw $k1,EOFFSET_K1(\r)
148
	
148
	
149
	lw $at,EOFFSET_LO(\r)
149
	lw $at,EOFFSET_LO(\r)
150
	mtlo $at
150
	mtlo $at
151
	lw $at,EOFFSET_HI(\r)
151
	lw $at,EOFFSET_HI(\r)
152
	mthi $at
152
	mthi $at
153
 
153
 
154
	lw $at,EOFFSET_EPC(\r)
154
	lw $at,EOFFSET_EPC(\r)
155
	mtc0 $at, $epc
155
	mtc0 $at, $epc
156
	
156
	
157
	lw $at,EOFFSET_AT(\r)
157
	lw $at,EOFFSET_AT(\r)
158
	lw $sp,EOFFSET_SP(\r)
158
	lw $sp,EOFFSET_SP(\r)
159
.endm
159
.endm
160
 
160
 
161
# Move kernel stack pointer address to register K0
161
# Move kernel stack pointer address to register K0
162
# - if we are in user mode, load the appropriate stack
162
# - if we are in user mode, load the appropriate stack
163
# address
163
# address
164
.macro KERNEL_STACK_TO_K0
164
.macro KERNEL_STACK_TO_K0
165
	# If we are in user mode
165
	# If we are in user mode
166
	mfc0 $k0, $status
166
	mfc0 $k0, $status
167
	andi $k0, 0x10
167
	andi $k0, 0x10
168
	
168
	
169
	beq $k0, $0, 1f
169
	beq $k0, $0, 1f
170
	add $k0, $sp, 0
170
	add $k0, $sp, 0
171
	
171
	
172
	# Move $k0 pointer to kernel stack
172
	# Move $k0 pointer to kernel stack
173
	lui $k0, %hi(supervisor_sp)
173
	lui $k0, %hi(supervisor_sp)
174
	ori $k0, $k0, %lo(supervisor_sp)
174
	ori $k0, $k0, %lo(supervisor_sp)
175
	# Move $k0 (superveisor_sp)
175
	# Move $k0 (superveisor_sp)
176
	lw $k0, 0($k0)
176
	lw $k0, 0($k0)
177
1:		
177
1:		
178
.endm
178
.endm
179
		
179
		
180
.org 0x0
180
.org 0x0
181
kernel_image_start:
181
kernel_image_start:
182
	/* Load temporary stack */
182
	/* Load temporary stack */
183
	lui $sp, %hi(end_stack)
183
	lui $sp, %hi(end_stack)
184
	ori $sp, $sp, %lo(end_stack)
184
	ori $sp, $sp, %lo(end_stack)
185
 
185
 
186
	/* Not sure about this, but might be needed for PIC code???? */
186
	/* Not sure about this, but might be needed for PIC code???? */
187
	lui $gp, 0x8000
187
	lui $gp, 0x8000
188
	
188
	
189
	jal arch_pre_main
189
	jal arch_pre_main
190
	nop
190
	nop
191
	
191
	
192
	j main_bsp
192
	j main_bsp
193
	nop
193
	nop
194
 
194
 
195
	.space TEMP_STACK_SIZE
195
	.space TEMP_STACK_SIZE
196
end_stack:
196
end_stack:
197
 
197
 
198
tlb_refill_entry:
198
tlb_refill_entry:
199
	j tlb_refill_handler
199
	j tlb_refill_handler
200
	nop
200
	nop
201
 
201
 
202
cache_error_entry:
202
cache_error_entry:
203
	j cache_error_handler
203
	j cache_error_handler
204
	nop
204
	nop
205
 
205
 
206
exception_entry:
206
exception_entry:
207
	j exception_handler
207
	j exception_handler
208
	nop	
208
	nop	
209
 
209
 
210
	
210
	
211
	
211
	
212
exception_handler:
212
exception_handler:
213
	KERNEL_STACK_TO_K0
213
	KERNEL_STACK_TO_K0
214
	sub $k0, REGISTER_SPACE
214
	sub $k0, REGISTER_SPACE
215
	sw $sp,EOFFSET_SP($k0)
215
	sw $sp,EOFFSET_SP($k0)
216
	move $sp, $k0
216
	move $sp, $k0
217
	
217
	
218
	mfc0 $k0, $cause
218
	mfc0 $k0, $cause
219
	
219
	
220
	sra $k0, $k0, 0x2     # cp0_exc_cause() part 1
220
	sra $k0, $k0, 0x2     # cp0_exc_cause() part 1
221
	andi $k0, $k0, 0x1f   # cp0_exc_cause() part 2
221
	andi $k0, $k0, 0x1f   # cp0_exc_cause() part 2
222
	sub $k0, 8            # 8=SYSCALL
222
	sub $k0, 8            # 8=SYSCALL
223
	
223
	
224
	beqz $k0, syscall_shortcut
224
	beqz $k0, syscall_shortcut
225
	add $k0, 8            # Revert $k1 back to correct exc number
225
	add $k0, 8            # Revert $k0 back to correct exc number
226
	
226
	
227
	REGISTERS_STORE_AND_EXC_RESET $sp
227
	REGISTERS_STORE_AND_EXC_RESET $sp
228
	
228
	
229
	move $a1, $sp
229
	move $a1, $sp
230
	jal exc_dispatch      # exc_dispatch(excno, register_space)
230
	jal exc_dispatch      # exc_dispatch(excno, register_space)
231
	move $a0, $k0
231
	move $a0, $k0
232
 
232
 
233
	REGISTERS_LOAD $sp
233
	REGISTERS_LOAD $sp
234
	# The $sp is automatically restored to former value
234
	# The $sp is automatically restored to former value
235
	eret
235
	eret
236
 
236
 
237
# it seems that mips reserves some space on stack for varfuncs???
237
# it seems that mips reserves some space on stack for varfuncs???
238
#define SS_ARG4   16
238
#define SS_ARG4   16
239
#define SS_SP     EOFFSET_SP
239
#define SS_SP     EOFFSET_SP
240
#define SS_STATUS EOFFSET_STATUS
240
#define SS_STATUS EOFFSET_STATUS
241
#define SS_EPC    EOFFSET_EPC
241
#define SS_EPC    EOFFSET_EPC
-
 
242
#define SS_K1     EOFFSET_K1
242
syscall_shortcut:
243
syscall_shortcut:
243
	# We have a lot of space on the stack, with free use
244
	# We have a lot of space on the stack, with free use
244
	mfc0 $t1, $epc
245
	mfc0 $t1, $epc
245
	mfc0 $t0, $status
246
	mfc0 $t0, $status
246
	sw $t1,SS_EPC($sp)  # Save EPC
247
	sw $t1,SS_EPC($sp)  # Save EPC
-
 
248
	sw $k1,SS_K1($sp)   # Save k1, which is not saved during context switch
247
	
249
	
248
	and $t2, $t0, REG_SAVE_MASK # Save only KSU,EXL,ERL,IE
250
	and $t2, $t0, REG_SAVE_MASK # Save only KSU,EXL,ERL,IE
249
	li $t3, ~(0x1f)
251
	li $t3, ~(0x1f)
250
	and $t0, $t0, $t3           # Clear KSU,EXL,ERL
252
	and $t0, $t0, $t3           # Clear KSU,EXL,ERL
251
	ori $t0, $t0, 0x1           # Set IE
253
	ori $t0, $t0, 0x1           # Set IE
252
 
254
 
253
	sw $t2,SS_STATUS($sp)
255
	sw $t2,SS_STATUS($sp)
254
	mtc0 $t0, $status
256
	mtc0 $t0, $status
255
 
257
 
256
	# CALL Syscall handler
258
	# CALL Syscall handler
257
	jal syscall_handler
259
	jal syscall_handler
258
	sw $v0, SS_ARG4($sp)        # save v0 - arg4 to stack
260
	sw $v0, SS_ARG4($sp)        # save v0 - arg4 to stack
259
 
261
 
260
	# restore status
262
	# restore status
261
	mfc0 $t0, $status
263
	mfc0 $t0, $status
262
	lw $t1,SS_STATUS($sp)
264
	lw $t1,SS_STATUS($sp)
263
 
265
 
264
	# Change back to EXL=1(from last exception), otherwise
266
	# Change back to EXL=1(from last exception), otherwise
265
	# an interrupt could rewrite the CP0-EPC
267
	# an interrupt could rewrite the CP0-EPC
266
	li $t2, ~REG_SAVE_MASK      # Mask UM,EXL,ERL,IE
268
	li $t2, ~REG_SAVE_MASK      # Mask UM,EXL,ERL,IE
267
	and $t0, $t0, $t2
269
	and $t0, $t0, $t2
268
	or $t0, $t0, $t1            # Copy UM,EXL,ERL,IE from saved status
270
	or $t0, $t0, $t1            # Copy UM,EXL,ERL,IE from saved status
269
	mtc0 $t0, $status
271
	mtc0 $t0, $status
270
			
272
			
271
	# restore epc+4
273
	# restore epc+4
272
	lw $t0,SS_EPC($sp)
274
	lw $t0,SS_EPC($sp)
-
 
275
	lw $k1,SS_K1($sp)
273
	addi $t0, $t0, 4
276
	addi $t0, $t0, 4
274
	mtc0 $t0, $epc
277
	mtc0 $t0, $epc
275
	
278
	
276
	lw $sp,SS_SP($sp) # restore sp
279
	lw $sp,SS_SP($sp) # restore sp
277
	
280
	
278
	eret
281
	eret
279
		
282
		
280
tlb_refill_handler:
283
tlb_refill_handler:
281
	KERNEL_STACK_TO_K0
284
	KERNEL_STACK_TO_K0
282
	sub $k0, REGISTER_SPACE
285
	sub $k0, REGISTER_SPACE
283
	REGISTERS_STORE_AND_EXC_RESET $k0
286
	REGISTERS_STORE_AND_EXC_RESET $k0
284
	sw $sp,EOFFSET_SP($k0)
287
	sw $sp,EOFFSET_SP($k0)
285
	add $sp, $k0, 0
288
	add $sp, $k0, 0
286
 
289
 
287
	jal tlb_refill /* tlb_refill(register_space) */
290
	jal tlb_refill /* tlb_refill(register_space) */
288
	add $a0, $sp, 0 
291
	add $a0, $sp, 0 
289
 
292
 
290
	REGISTERS_LOAD $sp
293
	REGISTERS_LOAD $sp
291
 
294
 
292
	eret
295
	eret
293
 
296
 
294
cache_error_handler:
297
cache_error_handler:
295
	KERNEL_STACK_TO_K0
298
	KERNEL_STACK_TO_K0
296
	sub $k0, REGISTER_SPACE
299
	sub $k0, REGISTER_SPACE
297
	REGISTERS_STORE_AND_EXC_RESET $k0
300
	REGISTERS_STORE_AND_EXC_RESET $k0
298
	sw $sp,EOFFSET_SP($k0)
301
	sw $sp,EOFFSET_SP($k0)
299
	add $sp, $k0, 0
302
	add $sp, $k0, 0
300
 
303
 
301
	jal cache_error
304
	jal cache_error
302
	nop
305
	nop
303
 
306
 
304
	REGISTERS_LOAD $sp
307
	REGISTERS_LOAD $sp
305
 
308
 
306
	eret
309
	eret
307
 
310
 
308
userspace_asm:
311
userspace_asm:
309
	add $sp, $a0, 0
312
	add $sp, $a0, 0
310
	add $v0, $a1, 0 
313
	add $v0, $a1, 0 
311
	add $t9, $a2, 0   # Set up correct entry into PIC code 
314
	add $t9, $a2, 0   # Set up correct entry into PIC code 
312
	eret
315
	eret
313
 
316
 
314
 
317
 
315

Generated by GNU Enscript 1.6.6.
318

Generated by GNU Enscript 1.6.6.
316
 
319
 
317
 
320
 
318
 
321