Rev 1288 | Rev 2018 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 1288 | Rev 1787 | ||
---|---|---|---|
1 | # |
1 | # |
2 | # Copyright (C) 2005 Ondrej Palkovsky |
2 | # Copyright (C) 2005 Ondrej Palkovsky |
3 | # All rights reserved. |
3 | # All rights reserved. |
4 | # |
4 | # |
5 | # Redistribution and use in source and binary forms, with or without |
5 | # Redistribution and use in source and binary forms, with or without |
6 | # modification, are permitted provided that the following conditions |
6 | # modification, are permitted provided that the following conditions |
7 | # are met: |
7 | # are met: |
8 | # |
8 | # |
9 | # - Redistributions of source code must retain the above copyright |
9 | # - Redistributions of source code must retain the above copyright |
10 | # notice, this list of conditions and the following disclaimer. |
10 | # notice, this list of conditions and the following disclaimer. |
11 | # - Redistributions in binary form must reproduce the above copyright |
11 | # - Redistributions in binary form must reproduce the above copyright |
12 | # notice, this list of conditions and the following disclaimer in the |
12 | # notice, this list of conditions and the following disclaimer in the |
13 | # documentation and/or other materials provided with the distribution. |
13 | # documentation and/or other materials provided with the distribution. |
14 | # - The name of the author may not be used to endorse or promote products |
14 | # - The name of the author may not be used to endorse or promote products |
15 | # derived from this software without specific prior written permission. |
15 | # derived from this software without specific prior written permission. |
16 | # |
16 | # |
17 | # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
17 | # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
18 | # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
18 | # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
19 | # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
19 | # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
20 | # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
20 | # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
21 | # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
21 | # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
22 | # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
22 | # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
23 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
24 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
25 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
26 | # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
26 | # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | # |
27 | # |
28 | 28 | ||
29 | #define IREGISTER_SPACE 120 |
29 | #define IREGISTER_SPACE 120 |
30 | 30 | ||
31 | #define IOFFSET_RAX 0x0 |
31 | #define IOFFSET_RAX 0x0 |
32 | #define IOFFSET_RBX 0x8 |
32 | #define IOFFSET_RBX 0x8 |
33 | #define IOFFSET_RCX 0x10 |
33 | #define IOFFSET_RCX 0x10 |
34 | #define IOFFSET_RDX 0x18 |
34 | #define IOFFSET_RDX 0x18 |
35 | #define IOFFSET_RSI 0x20 |
35 | #define IOFFSET_RSI 0x20 |
36 | #define IOFFSET_RDI 0x28 |
36 | #define IOFFSET_RDI 0x28 |
37 | #define IOFFSET_R8 0x30 |
37 | #define IOFFSET_R8 0x30 |
38 | #define IOFFSET_R9 0x38 |
38 | #define IOFFSET_R9 0x38 |
39 | #define IOFFSET_R10 0x40 |
39 | #define IOFFSET_R10 0x40 |
40 | #define IOFFSET_R11 0x48 |
40 | #define IOFFSET_R11 0x48 |
41 | #define IOFFSET_R12 0x50 |
41 | #define IOFFSET_R12 0x50 |
42 | #define IOFFSET_R13 0x58 |
42 | #define IOFFSET_R13 0x58 |
43 | #define IOFFSET_R14 0x60 |
43 | #define IOFFSET_R14 0x60 |
44 | #define IOFFSET_R15 0x68 |
44 | #define IOFFSET_R15 0x68 |
45 | #define IOFFSET_RBP 0x70 |
45 | #define IOFFSET_RBP 0x70 |
46 | 46 | ||
47 | # Mask for interrupts 0 - 31 (bits 0 - 31) where 0 means that int has no error word |
47 | # Mask for interrupts 0 - 31 (bits 0 - 31) where 0 means that int has no error word |
48 | # and 1 means interrupt with error word |
48 | # and 1 means interrupt with error word |
49 | #define ERROR_WORD_INTERRUPT_LIST 0x00027D00 |
49 | #define ERROR_WORD_INTERRUPT_LIST 0x00027D00 |
50 | 50 | ||
51 | #include <arch/pm.h> |
51 | #include <arch/pm.h> |
52 | #include <arch/mm/page.h> |
52 | #include <arch/mm/page.h> |
53 | 53 | ||
54 | .text |
54 | .text |
55 | .global interrupt_handlers |
55 | .global interrupt_handlers |
56 | .global syscall_entry |
56 | .global syscall_entry |
57 | .global panic_printf |
57 | .global panic_printf |
58 | 58 | ||
59 | panic_printf: |
59 | panic_printf: |
60 | movq $halt, (%rsp) |
60 | movq $halt, (%rsp) |
61 | jmp printf |
61 | jmp printf |
62 | 62 | ||
63 | .global cpuid |
63 | .global cpuid |
64 | .global has_cpuid |
64 | .global has_cpuid |
65 | .global rdtsc |
65 | .global rdtsc |
66 | .global read_efer_flag |
66 | .global read_efer_flag |
67 | .global set_efer_flag |
67 | .global set_efer_flag |
68 | .global memcpy |
68 | .global memcpy |
69 | .global memcpy_from_uspace |
69 | .global memcpy_from_uspace |
70 | .global memcpy_to_uspace |
70 | .global memcpy_to_uspace |
71 | .global memcpy_from_uspace_failover_address |
71 | .global memcpy_from_uspace_failover_address |
72 | .global memcpy_to_uspace_failover_address |
72 | .global memcpy_to_uspace_failover_address |
73 | 73 | ||
74 | #define MEMCPY_DST %rdi |
74 | #define MEMCPY_DST %rdi |
75 | #define MEMCPY_SRC %rsi |
75 | #define MEMCPY_SRC %rsi |
76 | #define MEMCPY_SIZE %rdx |
76 | #define MEMCPY_SIZE %rdx |
77 | 77 | ||
78 | /** |
78 | /** |
79 | * Copy memory from/to userspace. |
79 | * Copy memory from/to userspace. |
80 | * |
80 | * |
81 | * This is almost conventional memcpy(). |
81 | * This is almost conventional memcpy(). |
82 | * The difference is that there is a failover part |
82 | * The difference is that there is a failover part |
83 | * to where control is returned from a page fault if |
83 | * to where control is returned from a page fault if |
84 | * the page fault occurs during copy_from_uspace() |
84 | * the page fault occurs during copy_from_uspace() |
85 | * or copy_to_uspace(). |
85 | * or copy_to_uspace(). |
86 | * |
86 | * |
87 | * @param MEMCPY_DST Destination address. |
87 | * @param MEMCPY_DST Destination address. |
88 | * @param MEMCPY_SRC Source address. |
88 | * @param MEMCPY_SRC Source address. |
89 | * @param MEMCPY_SIZE Number of bytes to copy. |
89 | * @param MEMCPY_SIZE Number of bytes to copy. |
90 | * |
90 | * |
91 | * @retrun MEMCPY_SRC on success, 0 on failure. |
91 | * @retrun MEMCPY_SRC on success, 0 on failure. |
92 | */ |
92 | */ |
93 | memcpy: |
93 | memcpy: |
94 | memcpy_from_uspace: |
94 | memcpy_from_uspace: |
95 | memcpy_to_uspace: |
95 | memcpy_to_uspace: |
96 | movq MEMCPY_SRC, %rax |
96 | movq MEMCPY_SRC, %rax |
97 | 97 | ||
98 | movq MEMCPY_SIZE, %rcx |
98 | movq MEMCPY_SIZE, %rcx |
99 | shrq $3, %rcx /* size / 8 */ |
99 | shrq $3, %rcx /* size / 8 */ |
100 | 100 | ||
101 | rep movsq /* copy as much as possible word by word */ |
101 | rep movsq /* copy as much as possible word by word */ |
102 | 102 | ||
103 | movq MEMCPY_SIZE, %rcx |
103 | movq MEMCPY_SIZE, %rcx |
104 | andq $7, %rcx /* size % 8 */ |
104 | andq $7, %rcx /* size % 8 */ |
105 | jz 0f |
105 | jz 0f |
106 | 106 | ||
107 | rep movsb /* copy the rest byte by byte */ |
107 | rep movsb /* copy the rest byte by byte */ |
108 | 108 | ||
109 | 0: |
109 | 0: |
110 | ret /* return MEMCPY_SRC, success */ |
110 | ret /* return MEMCPY_SRC, success */ |
111 | 111 | ||
112 | memcpy_from_uspace_failover_address: |
112 | memcpy_from_uspace_failover_address: |
113 | memcpy_to_uspace_failover_address: |
113 | memcpy_to_uspace_failover_address: |
114 | xorq %rax, %rax /* return 0, failure */ |
114 | xorq %rax, %rax /* return 0, failure */ |
115 | ret |
115 | ret |
116 | 116 | ||
117 | ## Determine CPUID support |
117 | ## Determine CPUID support |
118 | # |
118 | # |
119 | # Return 0 in EAX if CPUID is not support, 1 if supported. |
119 | # Return 0 in EAX if CPUID is not support, 1 if supported. |
120 | # |
120 | # |
121 | has_cpuid: |
121 | has_cpuid: |
122 | pushfq # store flags |
122 | pushfq # store flags |
123 | popq %rax # read flags |
123 | popq %rax # read flags |
124 | movq %rax,%rdx # copy flags |
124 | movq %rax,%rdx # copy flags |
125 | btcl $21,%edx # swap the ID bit |
125 | btcl $21,%edx # swap the ID bit |
126 | pushq %rdx |
126 | pushq %rdx |
127 | popfq # propagate the change into flags |
127 | popfq # propagate the change into flags |
128 | pushfq |
128 | pushfq |
129 | popq %rdx # read flags |
129 | popq %rdx # read flags |
130 | andl $(1<<21),%eax # interested only in ID bit |
130 | andl $(1<<21),%eax # interested only in ID bit |
131 | andl $(1<<21),%edx |
131 | andl $(1<<21),%edx |
132 | xorl %edx,%eax # 0 if not supported, 1 if supported |
132 | xorl %edx,%eax # 0 if not supported, 1 if supported |
133 | ret |
133 | ret |
134 | 134 | ||
135 | cpuid: |
135 | cpuid: |
136 | movq %rbx, %r10 # we have to preserve rbx across function calls |
136 | movq %rbx, %r10 # we have to preserve rbx across function calls |
137 | 137 | ||
138 | movl %edi,%eax # load the command into %eax |
138 | movl %edi,%eax # load the command into %eax |
139 | 139 | ||
140 | cpuid |
140 | cpuid |
141 | movl %eax,0(%rsi) |
141 | movl %eax,0(%rsi) |
142 | movl %ebx,4(%rsi) |
142 | movl %ebx,4(%rsi) |
143 | movl %ecx,8(%rsi) |
143 | movl %ecx,8(%rsi) |
144 | movl %edx,12(%rsi) |
144 | movl %edx,12(%rsi) |
145 | 145 | ||
146 | movq %r10, %rbx |
146 | movq %r10, %rbx |
147 | ret |
147 | ret |
148 | 148 | ||
149 | rdtsc: |
149 | rdtsc: |
150 | xorq %rax,%rax |
150 | xorq %rax,%rax |
151 | rdtsc |
151 | rdtsc |
152 | ret |
152 | ret |
153 | 153 | ||
154 | set_efer_flag: |
154 | set_efer_flag: |
155 | movq $0xc0000080, %rcx |
155 | movq $0xc0000080, %rcx |
156 | rdmsr |
156 | rdmsr |
157 | btsl %edi, %eax |
157 | btsl %edi, %eax |
158 | wrmsr |
158 | wrmsr |
159 | ret |
159 | ret |
160 | 160 | ||
161 | read_efer_flag: |
161 | read_efer_flag: |
162 | movq $0xc0000080, %rcx |
162 | movq $0xc0000080, %rcx |
163 | rdmsr |
163 | rdmsr |
164 | ret |
164 | ret |
165 | 165 | ||
166 | # Push all general purpose registers on stack except %rbp, %rsp |
166 | # Push all general purpose registers on stack except %rbp, %rsp |
167 | .macro save_all_gpr |
167 | .macro save_all_gpr |
168 | movq %rax, IOFFSET_RAX(%rsp) |
168 | movq %rax, IOFFSET_RAX(%rsp) |
169 | movq %rcx, IOFFSET_RCX(%rsp) |
169 | movq %rcx, IOFFSET_RCX(%rsp) |
170 | movq %rdx, IOFFSET_RDX(%rsp) |
170 | movq %rdx, IOFFSET_RDX(%rsp) |
171 | movq %rsi, IOFFSET_RSI(%rsp) |
171 | movq %rsi, IOFFSET_RSI(%rsp) |
172 | movq %rdi, IOFFSET_RDI(%rsp) |
172 | movq %rdi, IOFFSET_RDI(%rsp) |
173 | movq %r8, IOFFSET_R8(%rsp) |
173 | movq %r8, IOFFSET_R8(%rsp) |
174 | movq %r9, IOFFSET_R9(%rsp) |
174 | movq %r9, IOFFSET_R9(%rsp) |
175 | movq %r10, IOFFSET_R10(%rsp) |
175 | movq %r10, IOFFSET_R10(%rsp) |
176 | movq %r11, IOFFSET_R11(%rsp) |
176 | movq %r11, IOFFSET_R11(%rsp) |
177 | #ifdef CONFIG_DEBUG_ALLREGS |
177 | #ifdef CONFIG_DEBUG_ALLREGS |
178 | movq %rbx, IOFFSET_RBX(%rsp) |
178 | movq %rbx, IOFFSET_RBX(%rsp) |
179 | movq %rbp, IOFFSET_RBP(%rsp) |
179 | movq %rbp, IOFFSET_RBP(%rsp) |
180 | movq %r12, IOFFSET_R12(%rsp) |
180 | movq %r12, IOFFSET_R12(%rsp) |
181 | movq %r13, IOFFSET_R13(%rsp) |
181 | movq %r13, IOFFSET_R13(%rsp) |
182 | movq %r14, IOFFSET_R14(%rsp) |
182 | movq %r14, IOFFSET_R14(%rsp) |
183 | movq %r15, IOFFSET_R15(%rsp) |
183 | movq %r15, IOFFSET_R15(%rsp) |
184 | #endif |
184 | #endif |
185 | .endm |
185 | .endm |
186 | 186 | ||
187 | .macro restore_all_gpr |
187 | .macro restore_all_gpr |
188 | movq IOFFSET_RAX(%rsp), %rax |
188 | movq IOFFSET_RAX(%rsp), %rax |
189 | movq IOFFSET_RCX(%rsp), %rcx |
189 | movq IOFFSET_RCX(%rsp), %rcx |
190 | movq IOFFSET_RDX(%rsp), %rdx |
190 | movq IOFFSET_RDX(%rsp), %rdx |
191 | movq IOFFSET_RSI(%rsp), %rsi |
191 | movq IOFFSET_RSI(%rsp), %rsi |
192 | movq IOFFSET_RDI(%rsp), %rdi |
192 | movq IOFFSET_RDI(%rsp), %rdi |
193 | movq IOFFSET_R8(%rsp), %r8 |
193 | movq IOFFSET_R8(%rsp), %r8 |
194 | movq IOFFSET_R9(%rsp), %r9 |
194 | movq IOFFSET_R9(%rsp), %r9 |
195 | movq IOFFSET_R10(%rsp), %r10 |
195 | movq IOFFSET_R10(%rsp), %r10 |
196 | movq IOFFSET_R11(%rsp), %r11 |
196 | movq IOFFSET_R11(%rsp), %r11 |
197 | #ifdef CONFIG_DEBUG_ALLREGS |
197 | #ifdef CONFIG_DEBUG_ALLREGS |
198 | movq IOFFSET_RBX(%rsp), %rbx |
198 | movq IOFFSET_RBX(%rsp), %rbx |
199 | movq IOFFSET_RBP(%rsp), %rbp |
199 | movq IOFFSET_RBP(%rsp), %rbp |
200 | movq IOFFSET_R12(%rsp), %r12 |
200 | movq IOFFSET_R12(%rsp), %r12 |
201 | movq IOFFSET_R13(%rsp), %r13 |
201 | movq IOFFSET_R13(%rsp), %r13 |
202 | movq IOFFSET_R14(%rsp), %r14 |
202 | movq IOFFSET_R14(%rsp), %r14 |
203 | movq IOFFSET_R15(%rsp), %r15 |
203 | movq IOFFSET_R15(%rsp), %r15 |
204 | #endif |
204 | #endif |
205 | .endm |
205 | .endm |
206 | 206 | ||
207 | #ifdef CONFIG_DEBUG_ALLREGS |
207 | #ifdef CONFIG_DEBUG_ALLREGS |
208 | # define INTERRUPT_ALIGN 256 |
208 | # define INTERRUPT_ALIGN 256 |
209 | #else |
209 | #else |
210 | # define INTERRUPT_ALIGN 128 |
210 | # define INTERRUPT_ALIGN 128 |
211 | #endif |
211 | #endif |
212 | 212 | ||
213 | ## Declare interrupt handlers |
213 | ## Declare interrupt handlers |
214 | # |
214 | # |
215 | # Declare interrupt handlers for n interrupt |
215 | # Declare interrupt handlers for n interrupt |
216 | # vectors starting at vector i. |
216 | # vectors starting at vector i. |
217 | # |
217 | # |
218 | # The handlers call exc_dispatch(). |
218 | # The handlers call exc_dispatch(). |
219 | # |
219 | # |
220 | .macro handler i n |
220 | .macro handler i n |
221 | 221 | ||
222 | /* |
222 | /* |
223 | * Choose between version with error code and version without error code. |
223 | * Choose between version with error code and version without error code. |
224 | * Both versions have to be of the same size. amd64 assembly is, however, |
224 | * Both versions have to be of the same size. amd64 assembly is, however, |
225 | * a little bit tricky. For instance, subq $0x80, %rsp and subq $0x78, %rsp |
225 | * a little bit tricky. For instance, subq $0x80, %rsp and subq $0x78, %rsp |
226 | * can result in two instructions with different op-code lengths. |
226 | * can result in two instructions with different op-code lengths. |
227 | * Therefore we align the interrupt handlers. |
227 | * Therefore we align the interrupt handlers. |
228 | */ |
228 | */ |
229 | 229 | ||
230 | .iflt \i-32 |
230 | .iflt \i-32 |
231 | .if (1 << \i) & ERROR_WORD_INTERRUPT_LIST |
231 | .if (1 << \i) & ERROR_WORD_INTERRUPT_LIST |
232 | /* |
232 | /* |
233 | * Version with error word. |
233 | * Version with error word. |
234 | */ |
234 | */ |
235 | subq $IREGISTER_SPACE, %rsp |
235 | subq $IREGISTER_SPACE, %rsp |
236 | .else |
236 | .else |
237 | /* |
237 | /* |
238 | * Version without error word, |
238 | * Version without error word, |
239 | */ |
239 | */ |
240 | subq $(IREGISTER_SPACE+8), %rsp |
240 | subq $(IREGISTER_SPACE+8), %rsp |
241 | .endif |
241 | .endif |
242 | .else |
242 | .else |
243 | /* |
243 | /* |
244 | * Version without error word, |
244 | * Version without error word, |
245 | */ |
245 | */ |
246 | subq $(IREGISTER_SPACE+8), %rsp |
246 | subq $(IREGISTER_SPACE+8), %rsp |
247 | .endif |
247 | .endif |
248 | 248 | ||
249 | save_all_gpr |
249 | save_all_gpr |
250 | 250 | ||
251 | movq $(\i), %rdi # %rdi - first parameter |
251 | movq $(\i), %rdi # %rdi - first parameter |
252 | movq %rsp, %rsi # %rsi - pointer to istate |
252 | movq %rsp, %rsi # %rsi - pointer to istate |
253 | call exc_dispatch # exc_dispatch(i, istate) |
253 | call exc_dispatch # exc_dispatch(i, istate) |
254 | 254 | ||
255 | restore_all_gpr |
255 | restore_all_gpr |
256 | # $8 = Skip error word |
256 | # $8 = Skip error word |
257 | addq $(IREGISTER_SPACE+8), %rsp |
257 | addq $(IREGISTER_SPACE+8), %rsp |
258 | iretq |
258 | iretq |
259 | 259 | ||
260 | .align INTERRUPT_ALIGN |
260 | .align INTERRUPT_ALIGN |
261 | .if (\n-\i)-1 |
261 | .if (\n-\i)-1 |
262 | handler "(\i+1)",\n |
262 | handler "(\i+1)",\n |
263 | .endif |
263 | .endif |
264 | .endm |
264 | .endm |
265 | 265 | ||
266 | .align INTERRUPT_ALIGN |
266 | .align INTERRUPT_ALIGN |
267 | interrupt_handlers: |
267 | interrupt_handlers: |
268 | h_start: |
268 | h_start: |
269 | handler 0 IDT_ITEMS |
269 | handler 0 IDT_ITEMS |
270 | h_end: |
270 | h_end: |
271 | 271 | ||
272 | 272 | ||
273 | syscall_entry: |
273 | syscall_entry: |
274 | # Switch to hidden gs |
274 | # Switch to hidden gs |
275 | swapgs |
275 | swapgs |
276 | # %gs:0 now points to pointer to stack page |
276 | # %gs:0 now points to pointer to stack page |
277 | mov %gs:0, %r10 # We have a ptr to stack page in r10 |
277 | mov %gs:0, %r10 # We have a ptr to stack page in r10 |
278 | addq $PAGE_SIZE-16, %r10 # We need some space to store old %sp |
278 | addq $PAGE_SIZE-16, %r10 # We need some space to store old %sp |
279 | 279 | ||
280 | movq %rsp, 0(%r10) # Save old stack pointer to stack |
280 | movq %rsp, 0(%r10) # Save old stack pointer to stack |
281 | movq %r10, %rsp # Change to new stack |
281 | movq %r10, %rsp # Change to new stack |
282 | pushq %rcx # Return address |
282 | pushq %rcx # Return address |
283 | pushq %r11 # Save flags |
283 | pushq %r11 # Save flags |
284 | 284 | ||
285 | # Switch back to remain consistent |
285 | # Switch back to remain consistent |
286 | swapgs |
286 | swapgs |
287 | 287 | ||
288 | sti |
288 | sti |
289 | movq %r9, %rcx # Exchange last parameter as a third |
289 | movq %r9, %rcx # Exchange last parameter as a third |
290 | 290 | ||
291 | call syscall_handler |
291 | call syscall_handler |
292 | cli # We will be touching stack pointer |
292 | cli # We will be touching stack pointer |
293 | 293 | ||
294 | popq %r11 |
294 | popq %r11 |
295 | popq %rcx |
295 | popq %rcx |
296 | movq 0(%rsp), %rsp |
296 | movq 0(%rsp), %rsp |
297 | sysretq |
297 | sysretq |
298 | 298 | ||
299 | 299 | ||
300 | .data |
300 | .data |
301 | .global interrupt_handler_size |
301 | .global interrupt_handler_size |
302 | 302 | ||
303 | interrupt_handler_size: .quad (h_end-h_start)/IDT_ITEMS |
303 | interrupt_handler_size: .quad (h_end-h_start)/IDT_ITEMS |
304 | 304 |