Subversion Repositories HelenOS

Rev

Rev 2018 | Rev 2606 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
224 palkovsky 1
#
2071 jermar 2
# Copyright (c) 2005 Ondrej Palkovsky
224 palkovsky 3
# All rights reserved.
4
#
5
# Redistribution and use in source and binary forms, with or without
6
# modification, are permitted provided that the following conditions
7
# are met:
8
#
9
# - Redistributions of source code must retain the above copyright
10
#   notice, this list of conditions and the following disclaimer.
11
# - Redistributions in binary form must reproduce the above copyright
12
#   notice, this list of conditions and the following disclaimer in the
13
#   documentation and/or other materials provided with the distribution.
14
# - The name of the author may not be used to endorse or promote products
15
#   derived from this software without specific prior written permission.
16
#
17
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
#
28
 
1021 jermar 29
#define IREGISTER_SPACE 120
224 palkovsky 30
 
1021 jermar 31
#define IOFFSET_RAX 0x0
32
#define IOFFSET_RBX 0x8
33
#define IOFFSET_RCX 0x10
34
#define IOFFSET_RDX 0x18
35
#define IOFFSET_RSI 0x20
36
#define IOFFSET_RDI 0x28
37
#define IOFFSET_R8 0x30
38
#define IOFFSET_R9 0x38
39
#define IOFFSET_R10 0x40
40
#define IOFFSET_R11 0x48
41
#define IOFFSET_R12 0x50
42
#define IOFFSET_R13 0x58
43
#define IOFFSET_R14 0x60
44
#define IOFFSET_R15 0x68
45
#define IOFFSET_RBP 0x70
46
 
224 palkovsky 47
#  Mask for interrupts 0 - 31 (bits 0 - 31) where 0 means that int has no error word
48
# and 1 means interrupt with error word
49
#define ERROR_WORD_INTERRUPT_LIST 0x00027D00
50
 
51
#include <arch/pm.h>
808 palkovsky 52
#include <arch/mm/page.h>
224 palkovsky 53
 
54
.text
55
.global interrupt_handlers
803 palkovsky 56
.global syscall_entry
224 palkovsky 57
.global panic_printf
58
 
59
panic_printf:
60
	movq $halt, (%rsp)
61
	jmp printf
62
 
252 palkovsky 63
.global cpuid
242 palkovsky 64
.global has_cpuid
2018 decky 65
.global get_cycle
251 palkovsky 66
.global read_efer_flag
67
.global set_efer_flag
1288 jermar 68
.global memcpy
69
.global memcpy_from_uspace
70
.global memcpy_to_uspace
71
.global memcpy_from_uspace_failover_address
72
.global memcpy_to_uspace_failover_address
73
 
74
#define MEMCPY_DST	%rdi
75
#define MEMCPY_SRC	%rsi
76
#define MEMCPY_SIZE	%rdx
77
 
78
/**
79
 * Copy memory from/to userspace.
80
 *
81
 * This is almost conventional memcpy().
82
 * The difference is that there is a failover part
83
 * to where control is returned from a page fault if
84
 * the page fault occurs during copy_from_uspace()
85
 * or copy_to_uspace().
86
 *
87
 * @param MEMCPY_DST	Destination address.
88
 * @param MEMCPY_SRC	Source address.
89
 * @param MEMCPY_SIZE	Number of bytes to copy.
90
 *
91
 * @retrun MEMCPY_SRC on success, 0 on failure.
92
 */
93
memcpy:
94
memcpy_from_uspace:
95
memcpy_to_uspace:
96
	movq MEMCPY_SRC, %rax
97
 
98
	movq MEMCPY_SIZE, %rcx
99
	shrq $3, %rcx			/* size / 8 */
251 palkovsky 100
 
1288 jermar 101
	rep movsq			/* copy as much as possible word by word */
102
 
103
	movq MEMCPY_SIZE, %rcx
104
	andq $7, %rcx			/* size % 8 */
105
	jz 0f
106
 
107
	rep movsb			/* copy the rest byte by byte */
108
 
109
0:
110
	ret				/* return MEMCPY_SRC, success */
111
 
112
memcpy_from_uspace_failover_address:
113
memcpy_to_uspace_failover_address:
114
	xorq %rax, %rax			/* return 0, failure */
115
	ret
116
 
242 palkovsky 117
## Determine CPUID support
118
#
119
# Return 0 in EAX if CPUID is not support, 1 if supported.
120
#
121
has_cpuid:
122
	pushfq			# store flags
123
	popq %rax		# read flags
348 jermar 124
	movq %rax,%rdx		# copy flags
125
	btcl $21,%edx		# swap the ID bit
126
	pushq %rdx
242 palkovsky 127
	popfq			# propagate the change into flags
128
	pushfq
348 jermar 129
	popq %rdx		# read flags	
242 palkovsky 130
	andl $(1<<21),%eax	# interested only in ID bit
348 jermar 131
	andl $(1<<21),%edx
132
	xorl %edx,%eax		# 0 if not supported, 1 if supported
242 palkovsky 133
	ret
134
 
251 palkovsky 135
cpuid:
136
	movq %rbx, %r10  # we have to preserve rbx across function calls
242 palkovsky 137
 
251 palkovsky 138
	movl %edi,%eax	# load the command into %eax
139
 
140
	cpuid	
141
	movl %eax,0(%rsi)
142
	movl %ebx,4(%rsi)
143
	movl %ecx,8(%rsi)
144
	movl %edx,12(%rsi)
145
 
146
	movq %r10, %rbx
147
	ret
148
 
2018 decky 149
get_cycle:
242 palkovsky 150
	xorq %rax,%rax
151
	rdtsc
152
	ret
251 palkovsky 153
 
154
set_efer_flag:
155
	movq $0xc0000080, %rcx
156
	rdmsr
157
	btsl %edi, %eax
158
	wrmsr
159
	ret
242 palkovsky 160
 
251 palkovsky 161
read_efer_flag:	
162
	movq $0xc0000080, %rcx
163
	rdmsr
164
	ret 		
242 palkovsky 165
 
224 palkovsky 166
# Push all general purpose registers on stack except %rbp, %rsp
799 palkovsky 167
.macro save_all_gpr
168
	movq %rax, IOFFSET_RAX(%rsp)
169
	movq %rcx, IOFFSET_RCX(%rsp)
170
	movq %rdx, IOFFSET_RDX(%rsp)
171
	movq %rsi, IOFFSET_RSI(%rsp)
172
	movq %rdi, IOFFSET_RDI(%rsp)
173
	movq %r8, IOFFSET_R8(%rsp)
174
	movq %r9, IOFFSET_R9(%rsp)
175
	movq %r10, IOFFSET_R10(%rsp)
176
	movq %r11, IOFFSET_R11(%rsp)
1094 palkovsky 177
#ifdef CONFIG_DEBUG_ALLREGS	
178
	movq %rbx, IOFFSET_RBX(%rsp)
179
	movq %rbp, IOFFSET_RBP(%rsp)
799 palkovsky 180
	movq %r12, IOFFSET_R12(%rsp)
181
	movq %r13, IOFFSET_R13(%rsp)
182
	movq %r14, IOFFSET_R14(%rsp)
183
	movq %r15, IOFFSET_R15(%rsp)
1094 palkovsky 184
#endif
224 palkovsky 185
.endm
186
 
799 palkovsky 187
.macro restore_all_gpr
188
	movq IOFFSET_RAX(%rsp), %rax
189
	movq IOFFSET_RCX(%rsp), %rcx
190
	movq IOFFSET_RDX(%rsp), %rdx
191
	movq IOFFSET_RSI(%rsp), %rsi
192
	movq IOFFSET_RDI(%rsp), %rdi
193
	movq IOFFSET_R8(%rsp), %r8
194
	movq IOFFSET_R9(%rsp), %r9
195
	movq IOFFSET_R10(%rsp), %r10
196
	movq IOFFSET_R11(%rsp), %r11
1094 palkovsky 197
#ifdef CONFIG_DEBUG_ALLREGS	
198
	movq IOFFSET_RBX(%rsp), %rbx
199
	movq IOFFSET_RBP(%rsp), %rbp
799 palkovsky 200
	movq IOFFSET_R12(%rsp), %r12
201
	movq IOFFSET_R13(%rsp), %r13
202
	movq IOFFSET_R14(%rsp), %r14
203
	movq IOFFSET_R15(%rsp), %r15
1094 palkovsky 204
#endif
224 palkovsky 205
.endm
1021 jermar 206
 
1094 palkovsky 207
#ifdef CONFIG_DEBUG_ALLREGS
208
# define INTERRUPT_ALIGN 256
209
#else
210
# define INTERRUPT_ALIGN 128
211
#endif
212
 
224 palkovsky 213
## Declare interrupt handlers
214
#
215
# Declare interrupt handlers for n interrupt
216
# vectors starting at vector i.
217
#
1021 jermar 218
# The handlers call exc_dispatch().
224 palkovsky 219
#
220
.macro handler i n
221
 
1021 jermar 222
	/*
223
	 * Choose between version with error code and version without error code.
224
	 * Both versions have to be of the same size. amd64 assembly is, however,
225
	 * a little bit tricky. For instance, subq $0x80, %rsp and subq $0x78, %rsp
226
	 * can result in two instructions with different op-code lengths.
1121 jermar 227
	 * Therefore we align the interrupt handlers.
1021 jermar 228
	 */
224 palkovsky 229
 
1021 jermar 230
	.iflt \i-32
231
		.if (1 << \i) & ERROR_WORD_INTERRUPT_LIST
232
			/*
233
			 * Version with error word.
234
			 */
235
			subq $IREGISTER_SPACE, %rsp
236
		.else
237
			/*
238
			 * Version without error word,
239
			 */
240
			subq $(IREGISTER_SPACE+8), %rsp
241
		.endif
242
	.else
243
		/*
244
		 * Version without error word,
245
		 */
246
		subq $(IREGISTER_SPACE+8), %rsp
247
	.endif	
224 palkovsky 248
 
1021 jermar 249
	save_all_gpr
224 palkovsky 250
 
1021 jermar 251
	movq $(\i), %rdi   	# %rdi - first parameter
252
	movq %rsp, %rsi   	# %rsi - pointer to istate
253
	call exc_dispatch 	# exc_dispatch(i, istate)
254
 
799 palkovsky 255
	restore_all_gpr
256
	# $8 = Skip error word
1021 jermar 257
	addq $(IREGISTER_SPACE+8), %rsp
224 palkovsky 258
	iretq
259
 
1094 palkovsky 260
	.align INTERRUPT_ALIGN
224 palkovsky 261
	.if (\n-\i)-1
262
	handler "(\i+1)",\n
263
	.endif
264
.endm
1094 palkovsky 265
 
266
.align INTERRUPT_ALIGN
224 palkovsky 267
interrupt_handlers:
268
h_start:
269
	handler 0 IDT_ITEMS
270
h_end:
803 palkovsky 271
 
224 palkovsky 272
 
803 palkovsky 273
syscall_entry:
806 palkovsky 274
	# Switch to hidden gs	
275
	swapgs
808 palkovsky 276
	# %gs:0 now points to pointer to stack page
277
	mov %gs:0, %r10     # We have a ptr to stack page in r10
278
	addq $PAGE_SIZE-16, %r10 # We need some space to store old %sp
806 palkovsky 279
 
280
	movq %rsp, 0(%r10)  # Save old stack pointer to stack
281
	movq %r10, %rsp     # Change to new stack
282
	pushq %rcx          # Return address
283
	pushq %r11          # Save flags
284
 
285
	# Switch back to remain consistent
286
	swapgs 
287
 
955 palkovsky 288
	sti
806 palkovsky 289
	movq %r9, %rcx      # Exchange last parameter as a third
1212 palkovsky 290
 
803 palkovsky 291
	call syscall_handler
955 palkovsky 292
	cli                 # We will be touching stack pointer
293
 
806 palkovsky 294
	popq %r11
295
	popq %rcx
296
	movq 0(%rsp), %rsp
297
	sysretq
803 palkovsky 298
 
1278 palkovsky 299
 
224 palkovsky 300
.data
301
.global interrupt_handler_size
302
 
820 jermar 303
interrupt_handler_size: .quad (h_end-h_start)/IDT_ITEMS