Subversion Repositories HelenOS

Rev

Rev 2607 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 2607 Rev 3022
1
#
1
#
2
# Copyright (c) 2003-2004 Jakub Jermar
2
# Copyright (c) 2003-2004 Jakub Jermar
3
# All rights reserved.
3
# All rights reserved.
4
#
4
#
5
# Redistribution and use in source and binary forms, with or without
5
# Redistribution and use in source and binary forms, with or without
6
# modification, are permitted provided that the following conditions
6
# modification, are permitted provided that the following conditions
7
# are met:
7
# are met:
8
#
8
#
9
# - Redistributions of source code must retain the above copyright
9
# - Redistributions of source code must retain the above copyright
10
#   notice, this list of conditions and the following disclaimer.
10
#   notice, this list of conditions and the following disclaimer.
11
# - Redistributions in binary form must reproduce the above copyright
11
# - Redistributions in binary form must reproduce the above copyright
12
#   notice, this list of conditions and the following disclaimer in the
12
#   notice, this list of conditions and the following disclaimer in the
13
#   documentation and/or other materials provided with the distribution.
13
#   documentation and/or other materials provided with the distribution.
14
# - The name of the author may not be used to endorse or promote products
14
# - The name of the author may not be used to endorse or promote products
15
#   derived from this software without specific prior written permission.
15
#   derived from this software without specific prior written permission.
16
#
16
#
17
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
#
27
#
28
 
28
 
29
#include <arch/asm/regname.h>
29
#include <arch/asm/regname.h>
30
#include <arch/mm/page.h>
30
#include <arch/mm/page.h>
31
#include <arch/asm/boot.h>
31
#include <arch/asm/boot.h>
32
#include <arch/context_offset.h>
32
#include <arch/context_offset.h>
33
#include <arch/stack.h>
33
#include <arch/stack.h>
34
	
34
	
35
.text
35
.text
36
 
36
 
37
.set noat
37
.set noat
38
.set noreorder
38
.set noreorder
39
.set nomacro
39
.set nomacro
40
 
40
 
41
.global kernel_image_start
41
.global kernel_image_start
42
.global tlb_refill_entry
42
.global tlb_refill_entry
43
.global cache_error_entry
43
.global cache_error_entry
44
.global exception_entry
44
.global exception_entry
45
.global userspace_asm
45
.global userspace_asm
46
 
46
 
47
# Which status bits should are thread-local
47
# Which status bits should are thread-local
48
#define REG_SAVE_MASK 0x1f # KSU(UM), EXL, ERL, IE
48
#define REG_SAVE_MASK 0x1f # KSU(UM), EXL, ERL, IE
49
	
49
	
50
# Save registers to space defined by \r
50
# Save registers to space defined by \r
51
# We will change status: Disable ERL,EXL,UM,IE
51
# We will change status: Disable ERL,EXL,UM,IE
52
# These changes will be automatically reversed in REGISTER_LOAD
52
# These changes will be automatically reversed in REGISTER_LOAD
53
# SP is NOT saved as part of these registers
53
# SP is NOT saved as part of these registers
54
.macro REGISTERS_STORE_AND_EXC_RESET r
54
.macro REGISTERS_STORE_AND_EXC_RESET r
55
	sw $at, EOFFSET_AT(\r)
55
	sw $at, EOFFSET_AT(\r)
56
	sw $v0, EOFFSET_V0(\r)
56
	sw $v0, EOFFSET_V0(\r)
57
	sw $v1, EOFFSET_V1(\r)
57
	sw $v1, EOFFSET_V1(\r)
58
	sw $a0, EOFFSET_A0(\r)
58
	sw $a0, EOFFSET_A0(\r)
59
	sw $a1, EOFFSET_A1(\r)
59
	sw $a1, EOFFSET_A1(\r)
60
	sw $a2, EOFFSET_A2(\r)
60
	sw $a2, EOFFSET_A2(\r)
61
	sw $a3, EOFFSET_A3(\r)
61
	sw $a3, EOFFSET_A3(\r)
62
	sw $t0, EOFFSET_T0(\r)
62
	sw $t0, EOFFSET_T0(\r)
63
	sw $t1, EOFFSET_T1(\r)
63
	sw $t1, EOFFSET_T1(\r)
64
	sw $t2, EOFFSET_T2(\r)
64
	sw $t2, EOFFSET_T2(\r)
65
	sw $t3, EOFFSET_T3(\r)
65
	sw $t3, EOFFSET_T3(\r)
66
	sw $t4, EOFFSET_T4(\r)
66
	sw $t4, EOFFSET_T4(\r)
67
	sw $t5, EOFFSET_T5(\r)
67
	sw $t5, EOFFSET_T5(\r)
68
	sw $t6, EOFFSET_T6(\r)
68
	sw $t6, EOFFSET_T6(\r)
69
	sw $t7, EOFFSET_T7(\r)
69
	sw $t7, EOFFSET_T7(\r)
70
	sw $t8, EOFFSET_T8(\r)
70
	sw $t8, EOFFSET_T8(\r)
71
	sw $t9, EOFFSET_T9(\r)
71
	sw $t9, EOFFSET_T9(\r)
72
 
72
 
73
	mflo $at
73
	mflo $at
74
	sw $at, EOFFSET_LO(\r)
74
	sw $at, EOFFSET_LO(\r)
75
	mfhi $at
75
	mfhi $at
76
	sw $at, EOFFSET_HI(\r)
76
	sw $at, EOFFSET_HI(\r)
77
	
77
	
78
#ifdef CONFIG_DEBUG_ALLREGS	
78
#ifdef CONFIG_DEBUG_ALLREGS	
79
	sw $s0, EOFFSET_S0(\r)
79
	sw $s0, EOFFSET_S0(\r)
80
	sw $s1, EOFFSET_S1(\r)
80
	sw $s1, EOFFSET_S1(\r)
81
	sw $s2, EOFFSET_S2(\r)
81
	sw $s2, EOFFSET_S2(\r)
82
	sw $s3, EOFFSET_S3(\r)
82
	sw $s3, EOFFSET_S3(\r)
83
	sw $s4, EOFFSET_S4(\r)
83
	sw $s4, EOFFSET_S4(\r)
84
	sw $s5, EOFFSET_S5(\r)
84
	sw $s5, EOFFSET_S5(\r)
85
	sw $s6, EOFFSET_S6(\r)
85
	sw $s6, EOFFSET_S6(\r)
86
	sw $s7, EOFFSET_S7(\r)
86
	sw $s7, EOFFSET_S7(\r)
87
	sw $s8, EOFFSET_S8(\r)
87
	sw $s8, EOFFSET_S8(\r)
88
#endif
88
#endif
89
	
89
	
90
	sw $gp, EOFFSET_GP(\r)
90
	sw $gp, EOFFSET_GP(\r)
91
	sw $ra, EOFFSET_RA(\r)
91
	sw $ra, EOFFSET_RA(\r)
92
	sw $k1, EOFFSET_K1(\r)
92
	sw $k1, EOFFSET_K1(\r)
93
 
93
 
94
	mfc0 $t0, $status
94
	mfc0 $t0, $status
95
	mfc0 $t1, $epc
95
	mfc0 $t1, $epc
96
	
96
	
97
	and $t2, $t0, REG_SAVE_MASK	# Save only KSU,EXL,ERL,IE
97
	and $t2, $t0, REG_SAVE_MASK	# Save only KSU,EXL,ERL,IE
98
	li $t3, ~(0x1f)
98
	li $t3, ~(0x1f)
99
	and $t0, $t0, $t3		# Clear KSU,EXL,ERL,IE
99
	and $t0, $t0, $t3		# Clear KSU,EXL,ERL,IE
100
	
100
	
101
	sw $t2,EOFFSET_STATUS(\r)
101
	sw $t2,EOFFSET_STATUS(\r)
102
	sw $t1,EOFFSET_EPC(\r)
102
	sw $t1,EOFFSET_EPC(\r)
103
	mtc0 $t0, $status
103
	mtc0 $t0, $status
104
.endm
104
.endm
105
 
105
 
106
.macro REGISTERS_LOAD r
106
.macro REGISTERS_LOAD r
107
	# Update only UM,EXR,IE from status, the rest
107
	# Update only UM,EXR,IE from status, the rest
108
	# is controlled by OS and not bound to task
108
	# is controlled by OS and not bound to task
109
	mfc0 $t0, $status
109
	mfc0 $t0, $status
110
	lw $t1,EOFFSET_STATUS(\r)
110
	lw $t1,EOFFSET_STATUS(\r)
111
 
111
 
112
	li $t2, ~REG_SAVE_MASK		# Mask UM,EXL,ERL,IE
112
	li $t2, ~REG_SAVE_MASK		# Mask UM,EXL,ERL,IE
113
	and $t0, $t0, $t2
113
	and $t0, $t0, $t2
114
	
114
	
115
	or $t0, $t0, $t1		# Copy UM,EXL, ERL, IE from saved status
115
	or $t0, $t0, $t1		# Copy UM,EXL, ERL, IE from saved status
116
	mtc0 $t0, $status
116
	mtc0 $t0, $status
117
	
117
	
118
	lw $v0, EOFFSET_V0(\r)
118
	lw $v0, EOFFSET_V0(\r)
119
	lw $v1, EOFFSET_V1(\r)
119
	lw $v1, EOFFSET_V1(\r)
120
	lw $a0, EOFFSET_A0(\r)
120
	lw $a0, EOFFSET_A0(\r)
121
	lw $a1, EOFFSET_A1(\r)
121
	lw $a1, EOFFSET_A1(\r)
122
	lw $a2, EOFFSET_A2(\r)
122
	lw $a2, EOFFSET_A2(\r)
123
	lw $a3, EOFFSET_A3(\r)
123
	lw $a3, EOFFSET_A3(\r)
124
	lw $t0, EOFFSET_T0(\r)
124
	lw $t0, EOFFSET_T0(\r)
125
	lw $t1, EOFFSET_T1(\r)
125
	lw $t1, EOFFSET_T1(\r)
126
	lw $t2, EOFFSET_T2(\r)
126
	lw $t2, EOFFSET_T2(\r)
127
	lw $t3, EOFFSET_T3(\r)
127
	lw $t3, EOFFSET_T3(\r)
128
	lw $t4, EOFFSET_T4(\r)
128
	lw $t4, EOFFSET_T4(\r)
129
	lw $t5, EOFFSET_T5(\r)
129
	lw $t5, EOFFSET_T5(\r)
130
	lw $t6, EOFFSET_T6(\r)
130
	lw $t6, EOFFSET_T6(\r)
131
	lw $t7, EOFFSET_T7(\r)
131
	lw $t7, EOFFSET_T7(\r)
132
	lw $t8, EOFFSET_T8(\r)
132
	lw $t8, EOFFSET_T8(\r)
133
	lw $t9, EOFFSET_T9(\r)
133
	lw $t9, EOFFSET_T9(\r)
134
	
134
	
135
#ifdef CONFIG_DEBUG_ALLREGS	
135
#ifdef CONFIG_DEBUG_ALLREGS	
136
	lw $s0, EOFFSET_S0(\r)
136
	lw $s0, EOFFSET_S0(\r)
137
	lw $s1, EOFFSET_S1(\r)
137
	lw $s1, EOFFSET_S1(\r)
138
	lw $s2, EOFFSET_S2(\r)
138
	lw $s2, EOFFSET_S2(\r)
139
	lw $s3, EOFFSET_S3(\r)
139
	lw $s3, EOFFSET_S3(\r)
140
	lw $s4, EOFFSET_S4(\r)
140
	lw $s4, EOFFSET_S4(\r)
141
	lw $s5, EOFFSET_S5(\r)
141
	lw $s5, EOFFSET_S5(\r)
142
	lw $s6, EOFFSET_S6(\r)
142
	lw $s6, EOFFSET_S6(\r)
143
	lw $s7, EOFFSET_S7(\r)
143
	lw $s7, EOFFSET_S7(\r)
144
	lw $s8, EOFFSET_S8(\r)
144
	lw $s8, EOFFSET_S8(\r)
145
#endif
145
#endif
146
	lw $gp, EOFFSET_GP(\r)
146
	lw $gp, EOFFSET_GP(\r)
147
	lw $ra, EOFFSET_RA(\r)
147
	lw $ra, EOFFSET_RA(\r)
148
	lw $k1, EOFFSET_K1(\r)
148
	lw $k1, EOFFSET_K1(\r)
149
	
149
	
150
	lw $at, EOFFSET_LO(\r)
150
	lw $at, EOFFSET_LO(\r)
151
	mtlo $at
151
	mtlo $at
152
	lw $at, EOFFSET_HI(\r)
152
	lw $at, EOFFSET_HI(\r)
153
	mthi $at
153
	mthi $at
154
 
154
 
155
	lw $at, EOFFSET_EPC(\r)
155
	lw $at, EOFFSET_EPC(\r)
156
	mtc0 $at, $epc
156
	mtc0 $at, $epc
157
	
157
	
158
	lw $at, EOFFSET_AT(\r)
158
	lw $at, EOFFSET_AT(\r)
159
	lw $sp, EOFFSET_SP(\r)
159
	lw $sp, EOFFSET_SP(\r)
160
.endm
160
.endm
161
 
161
 
162
# Move kernel stack pointer address to register K0
162
# Move kernel stack pointer address to register K0
163
# - if we are in user mode, load the appropriate stack
163
# - if we are in user mode, load the appropriate stack
164
# address
164
# address
165
.macro KERNEL_STACK_TO_K0
165
.macro KERNEL_STACK_TO_K0
166
	# If we are in user mode
166
	# If we are in user mode
167
	mfc0 $k0, $status
167
	mfc0 $k0, $status
168
	andi $k0, 0x10
168
	andi $k0, 0x10
169
	
169
	
170
	beq $k0, $0, 1f
170
	beq $k0, $0, 1f
171
	add $k0, $sp, 0
171
	add $k0, $sp, 0
172
	
172
	
173
	# Move $k0 pointer to kernel stack
173
	# Move $k0 pointer to kernel stack
174
	lui $k0, %hi(supervisor_sp)
174
	lui $k0, %hi(supervisor_sp)
175
	ori $k0, $k0, %lo(supervisor_sp)
175
	ori $k0, $k0, %lo(supervisor_sp)
176
	# Move $k0 (superveisor_sp)
176
	# Move $k0 (superveisor_sp)
177
	lw $k0, 0($k0)
177
	lw $k0, 0($k0)
178
1:		
178
1:		
179
.endm
179
.endm
180
		
180
		
181
.org 0x0
181
.org 0x0
182
kernel_image_start:
182
kernel_image_start:
183
	/* Load temporary stack */
183
	/* Load temporary stack */
184
	lui $sp, %hi(end_stack)
184
	lui $sp, %hi(end_stack)
185
	ori $sp, $sp, %lo(end_stack)
185
	ori $sp, $sp, %lo(end_stack)
186
	
186
	
187
	/* $a1 contains physical address of bootinfo_t */
187
	/* $a1 contains physical address of bootinfo_t */
188
	/* $a2 contains size of bootinfo_t */
188
	/* $a2 contains size of bootinfo_t */
189
	
189
	
190
	beq $a2, $0, bootinfo_end
190
	beq $a2, $0, bootinfo_end
191
	
191
	
192
	/* Not sure about this, but might be needed for PIC code???? */
192
	/* Not sure about this, but might be needed for PIC code???? */
193
	lui $gp, 0x8000
193
	lui $gp, 0x8000
194
	
194
	
195
	lui $a3, %hi(bootinfo)
195
	lui $a3, %hi(bootinfo)
196
	ori $a3, $a3, %lo(bootinfo)
196
	ori $a3, $a3, %lo(bootinfo)
197
	
197
	
198
	bootinfo_loop:
198
	bootinfo_loop:
199
		
199
		
200
		lw $v0, 0($a1)
200
		lw $v0, 0($a1)
201
		sw $v0, 0($a3)
201
		sw $v0, 0($a3)
202
		
202
		
203
		addi $a1, $a1, 4
203
		addi $a1, $a1, 4
204
		addi $a3, $a3, 4
204
		addi $a3, $a3, 4
205
		addi $a2, $a2, -4
205
		addi $a2, $a2, -4
206
		
206
		
207
		bgtz $a2, bootinfo_loop
207
		bgtz $a2, bootinfo_loop
208
		nop
208
		nop
209
		
209
		
210
	bootinfo_end:
210
	bootinfo_end:
211
	
211
	
212
	jal arch_pre_main
212
	jal arch_pre_main
213
	nop
213
	nop
214
	
214
	
215
	j main_bsp
215
	j main_bsp
216
	nop
216
	nop
217
 
217
 
218
	.space TEMP_STACK_SIZE
218
	.space TEMP_STACK_SIZE
219
end_stack:
219
end_stack:
220
 
220
 
221
tlb_refill_entry:
221
tlb_refill_entry:
222
	j tlb_refill_handler
222
	j tlb_refill_handler
223
	nop
223
	nop
224
 
224
 
225
cache_error_entry:
225
cache_error_entry:
226
	j cache_error_handler
226
	j cache_error_handler
227
	nop
227
	nop
228
 
228
 
229
exception_entry:
229
exception_entry:
230
	j exception_handler
230
	j exception_handler
231
	nop	
231
	nop	
232
	
232
	
233
exception_handler:
233
exception_handler:
234
	KERNEL_STACK_TO_K0
234
	KERNEL_STACK_TO_K0
235
	sub $k0, REGISTER_SPACE
235
	sub $k0, REGISTER_SPACE
236
	sw $sp, EOFFSET_SP($k0)
236
	sw $sp, EOFFSET_SP($k0)
237
	move $sp, $k0
237
	move $sp, $k0
238
	
238
	
239
	mfc0 $k0, $cause
239
	mfc0 $k0, $cause
240
	
240
	
241
	sra $k0, $k0, 0x2		# cp0_exc_cause() part 1
241
	sra $k0, $k0, 0x2		# cp0_exc_cause() part 1
242
	andi $k0, $k0, 0x1f		# cp0_exc_cause() part 2
242
	andi $k0, $k0, 0x1f		# cp0_exc_cause() part 2
243
	sub $k0, 8			# 8 = SYSCALL
243
	sub $k0, 8			# 8 = SYSCALL
244
	
244
	
245
	beqz $k0, syscall_shortcut
245
	beqz $k0, syscall_shortcut
246
	add $k0, 8			# Revert $k0 back to correct exc number
246
	add $k0, 8			# Revert $k0 back to correct exc number
247
	
247
	
248
	REGISTERS_STORE_AND_EXC_RESET $sp
248
	REGISTERS_STORE_AND_EXC_RESET $sp
249
	
249
	
250
	move $a1, $sp
250
	move $a1, $sp
251
	jal exc_dispatch		# exc_dispatch(excno, register_space)
251
	jal exc_dispatch		# exc_dispatch(excno, register_space)
252
	move $a0, $k0
252
	move $a0, $k0
253
 
253
 
254
	REGISTERS_LOAD $sp
254
	REGISTERS_LOAD $sp
255
	# The $sp is automatically restored to former value
255
	# The $sp is automatically restored to former value
256
	eret
256
	eret
257
 
257
 
258
## Syscall entry
258
## Syscall entry
259
#
259
#
260
# Registers:
260
# Registers:
261
#
261
#
262
# @param v0		Syscall number.
262
# @param v0		Syscall number.
263
# @param a0		1st argument.
263
# @param a0		1st argument.
264
# @param a1		2nd argument.
264
# @param a1		2nd argument.
265
# @param a2		3rd argument.
265
# @param a2		3rd argument.
266
# @param a3		4th argument.
266
# @param a3		4th argument.
267
# @param t0		5th argument.
267
# @param t0		5th argument.
268
# @param t1		6th argument.
268
# @param t1		6th argument.
269
#
269
#
270
# @return		The return value will be stored in v0.
270
# @return		The return value will be stored in v0.
271
#
271
#
272
#define SS_SP		EOFFSET_SP
272
#define SS_SP		EOFFSET_SP
273
#define SS_STATUS	EOFFSET_STATUS
273
#define SS_STATUS	EOFFSET_STATUS
274
#define SS_EPC		EOFFSET_EPC
274
#define SS_EPC		EOFFSET_EPC
275
#define SS_K1		EOFFSET_K1
275
#define SS_K1		EOFFSET_K1
276
syscall_shortcut:
276
syscall_shortcut:
277
	# We have a lot of space on the stack, with free use
277
	# We have a lot of space on the stack, with free use
278
	mfc0 $t3, $epc
278
	mfc0 $t3, $epc
279
	mfc0 $t2, $status
279
	mfc0 $t2, $status
280
	sw $t3, SS_EPC($sp)		# Save EPC
280
	sw $t3, SS_EPC($sp)		# Save EPC
281
	sw $k1, SS_K1($sp)   		# Save k1 not saved on context switch
281
	sw $k1, SS_K1($sp)   		# Save k1 not saved on context switch
282
	
282
	
283
	and $t4, $t2, REG_SAVE_MASK	# Save only KSU, EXL, ERL, IE
283
	and $t4, $t2, REG_SAVE_MASK	# Save only KSU, EXL, ERL, IE
284
	li $t5, ~(0x1f)
284
	li $t5, ~(0x1f)
285
	and $t2, $t2, $t5		# Clear KSU, EXL, ERL
285
	and $t2, $t2, $t5		# Clear KSU, EXL, ERL
286
	ori $t2, $t2, 0x1		# Set IE
286
	ori $t2, $t2, 0x1		# Set IE
287
 
287
 
288
	sw $t4, SS_STATUS($sp)
288
	sw $t4, SS_STATUS($sp)
289
	mtc0 $t2, $status
289
	mtc0 $t2, $status
290
 
290
 
291
	#
291
	#
292
	# Call the higher level system call handler
292
	# Call the higher level system call handler
293
	# We are going to reuse part of the unused exception stack frame
293
	# We are going to reuse part of the unused exception stack frame
294
	#
294
	#
295
	sw $t0, STACK_ARG4($sp)		# save the 5th argument on the stack
295
	sw $t0, STACK_ARG4($sp)		# save the 5th argument on the stack
296
	sw $t1, STACK_ARG5($sp)		# save the 6th argument on the stack
296
	sw $t1, STACK_ARG5($sp)		# save the 6th argument on the stack
297
	jal syscall_handler
297
	jal syscall_handler
298
	sw $v0, STACK_ARG6($sp)		# save the syscall number on the stack
298
	sw $v0, STACK_ARG6($sp)		# save the syscall number on the stack
299
 
299
 
300
	# restore status
300
	# restore status
301
	mfc0 $t2, $status
301
	mfc0 $t2, $status
302
	lw $t3, SS_STATUS($sp)
302
	lw $t3, SS_STATUS($sp)
303
 
303
 
304
	# Change back to EXL = 1 (from last exception), otherwise
304
	# Change back to EXL = 1 (from last exception), otherwise
305
	# an interrupt could rewrite the CP0 - EPC
305
	# an interrupt could rewrite the CP0 - EPC
306
	li $t4, ~REG_SAVE_MASK		# Mask UM, EXL, ERL, IE
306
	li $t4, ~REG_SAVE_MASK		# Mask UM, EXL, ERL, IE
307
	and $t2, $t2, $t4
307
	and $t2, $t2, $t4
308
	or $t2, $t2, $t3		# Copy saved UM, EXL, ERL, IE
308
	or $t2, $t2, $t3		# Copy saved UM, EXL, ERL, IE
309
	mtc0 $t2, $status
309
	mtc0 $t2, $status
310
			
310
			
311
	# restore epc + 4
311
	# restore epc + 4
312
	lw $t2, SS_EPC($sp)
312
	lw $t2, SS_EPC($sp)
313
	lw $k1, SS_K1($sp)
313
	lw $k1, SS_K1($sp)
314
	addi $t2, $t2, 4
314
	addi $t2, $t2, 4
315
	mtc0 $t2, $epc
315
	mtc0 $t2, $epc
316
	
316
	
317
	lw $sp, SS_SP($sp)		# restore sp
317
	lw $sp, SS_SP($sp)		# restore sp
318
	
318
	
319
	eret
319
	eret
320
		
320
		
321
tlb_refill_handler:
321
tlb_refill_handler:
322
	KERNEL_STACK_TO_K0
322
	KERNEL_STACK_TO_K0
323
	sub $k0, REGISTER_SPACE
323
	sub $k0, REGISTER_SPACE
324
	REGISTERS_STORE_AND_EXC_RESET $k0
324
	REGISTERS_STORE_AND_EXC_RESET $k0
325
	sw $sp,EOFFSET_SP($k0)
325
	sw $sp,EOFFSET_SP($k0)
326
	add $sp, $k0, 0
326
	add $sp, $k0, 0
327
 
327
 
328
	jal tlb_refill
328
	jal tlb_refill
329
	add $a0, $sp, 0 
329
	add $a0, $sp, 0 
330
 
330
 
331
	REGISTERS_LOAD $sp
331
	REGISTERS_LOAD $sp
332
 
332
 
333
	eret
333
	eret
334
 
334
 
335
cache_error_handler:
335
cache_error_handler:
336
	KERNEL_STACK_TO_K0
336
	KERNEL_STACK_TO_K0
337
	sub $k0, REGISTER_SPACE
337
	sub $k0, REGISTER_SPACE
338
	REGISTERS_STORE_AND_EXC_RESET $k0
338
	REGISTERS_STORE_AND_EXC_RESET $k0
339
	sw $sp,EOFFSET_SP($k0)
339
	sw $sp,EOFFSET_SP($k0)
340
	add $sp, $k0, 0
340
	add $sp, $k0, 0
341
 
341
 
342
	jal cache_error
342
	jal cache_error
343
	add $a0, $sp, 0 
343
	add $a0, $sp, 0 
344
 
344
 
345
	REGISTERS_LOAD $sp
345
	REGISTERS_LOAD $sp
346
 
346
 
347
	eret
347
	eret
348
 
348
 
349
userspace_asm:
349
userspace_asm:
350
	add $sp, $a0, 0
350
	add $sp, $a0, 0
351
	add $v0, $a1, 0 
351
	add $v0, $a1, 0 
352
	add $t9, $a2, 0			# Set up correct entry into PIC code 
352
	add $t9, $a2, 0			# Set up correct entry into PIC code 
353
	eret
353
	eret
354
 
354
 
355
 
355
 
356

Generated by GNU Enscript 1.6.6.
356

Generated by GNU Enscript 1.6.6.
357
 
357
 
358
 
358
 
359
 
359