Rev 3044 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed
| Rev | Author | Line No. | Line |
|---|---|---|---|
| 224 | palkovsky | 1 | # |
| 2071 | jermar | 2 | # Copyright (c) 2005 Ondrej Palkovsky |
| 224 | palkovsky | 3 | # All rights reserved. |
| 4 | # |
||
| 5 | # Redistribution and use in source and binary forms, with or without |
||
| 6 | # modification, are permitted provided that the following conditions |
||
| 7 | # are met: |
||
| 8 | # |
||
| 9 | # - Redistributions of source code must retain the above copyright |
||
| 10 | # notice, this list of conditions and the following disclaimer. |
||
| 11 | # - Redistributions in binary form must reproduce the above copyright |
||
| 12 | # notice, this list of conditions and the following disclaimer in the |
||
| 13 | # documentation and/or other materials provided with the distribution. |
||
| 14 | # - The name of the author may not be used to endorse or promote products |
||
| 15 | # derived from this software without specific prior written permission. |
||
| 16 | # |
||
| 17 | # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
||
| 18 | # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
||
| 19 | # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
||
| 20 | # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
||
| 21 | # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
||
| 22 | # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||
| 23 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||
| 24 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||
| 25 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
||
| 26 | # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||
| 27 | # |
||
| 28 | |||
| 1021 | jermar | 29 | #define IREGISTER_SPACE 120 |
| 224 | palkovsky | 30 | |
| 1021 | jermar | 31 | #define IOFFSET_RAX 0x0 |
| 32 | #define IOFFSET_RBX 0x8 |
||
| 33 | #define IOFFSET_RCX 0x10 |
||
| 34 | #define IOFFSET_RDX 0x18 |
||
| 35 | #define IOFFSET_RSI 0x20 |
||
| 36 | #define IOFFSET_RDI 0x28 |
||
| 37 | #define IOFFSET_R8 0x30 |
||
| 38 | #define IOFFSET_R9 0x38 |
||
| 39 | #define IOFFSET_R10 0x40 |
||
| 40 | #define IOFFSET_R11 0x48 |
||
| 41 | #define IOFFSET_R12 0x50 |
||
| 42 | #define IOFFSET_R13 0x58 |
||
| 43 | #define IOFFSET_R14 0x60 |
||
| 44 | #define IOFFSET_R15 0x68 |
||
| 45 | #define IOFFSET_RBP 0x70 |
||
| 46 | |||
| 224 | palkovsky | 47 | # Mask for interrupts 0 - 31 (bits 0 - 31) where 0 means that int has no error word |
| 48 | # and 1 means interrupt with error word |
||
| 49 | #define ERROR_WORD_INTERRUPT_LIST 0x00027D00 |
||
| 50 | |||
| 51 | #include <arch/pm.h> |
||
| 808 | palkovsky | 52 | #include <arch/mm/page.h> |
| 224 | palkovsky | 53 | |
| 54 | .text |
||
| 55 | .global interrupt_handlers |
||
| 803 | palkovsky | 56 | .global syscall_entry |
| 224 | palkovsky | 57 | .global panic_printf |
| 58 | |||
| 59 | panic_printf: |
||
| 60 | movq $halt, (%rsp) |
||
| 61 | jmp printf |
||
| 62 | |||
| 252 | palkovsky | 63 | .global cpuid |
| 242 | palkovsky | 64 | .global has_cpuid |
| 2018 | decky | 65 | .global get_cycle |
| 251 | palkovsky | 66 | .global read_efer_flag |
| 67 | .global set_efer_flag |
||
| 3044 | jermar | 68 | .global memsetb |
| 69 | .global memsetw |
||
| 1288 | jermar | 70 | .global memcpy |
| 71 | .global memcpy_from_uspace |
||
| 72 | .global memcpy_to_uspace |
||
| 73 | .global memcpy_from_uspace_failover_address |
||
| 74 | .global memcpy_to_uspace_failover_address |
||
| 75 | |||
| 3044 | jermar | 76 | # Wrapper for generic memsetb |
| 77 | memsetb: |
||
| 78 | jmp _memsetb |
||
| 79 | |||
| 80 | # Wrapper for generic memsetw |
||
| 81 | memsetw: |
||
| 82 | jmp _memsetw |
||
| 83 | |||
| 1288 | jermar | 84 | #define MEMCPY_DST %rdi |
| 85 | #define MEMCPY_SRC %rsi |
||
| 86 | #define MEMCPY_SIZE %rdx |
||
| 87 | |||
| 88 | /** |
||
| 89 | * Copy memory from/to userspace. |
||
| 90 | * |
||
| 91 | * This is almost conventional memcpy(). |
||
| 92 | * The difference is that there is a failover part |
||
| 93 | * to where control is returned from a page fault if |
||
| 94 | * the page fault occurs during copy_from_uspace() |
||
| 95 | * or copy_to_uspace(). |
||
| 96 | * |
||
| 97 | * @param MEMCPY_DST Destination address. |
||
| 98 | * @param MEMCPY_SRC Source address. |
||
| 99 | * @param MEMCPY_SIZE Number of bytes to copy. |
||
| 100 | * |
||
| 3274 | jermar | 101 | * @retrun MEMCPY_DST on success, 0 on failure. |
| 1288 | jermar | 102 | */ |
| 103 | memcpy: |
||
| 104 | memcpy_from_uspace: |
||
| 105 | memcpy_to_uspace: |
||
| 3274 | jermar | 106 | movq MEMCPY_DST, %rax |
| 1288 | jermar | 107 | |
| 108 | movq MEMCPY_SIZE, %rcx |
||
| 109 | shrq $3, %rcx /* size / 8 */ |
||
| 251 | palkovsky | 110 | |
| 1288 | jermar | 111 | rep movsq /* copy as much as possible word by word */ |
| 112 | |||
| 113 | movq MEMCPY_SIZE, %rcx |
||
| 114 | andq $7, %rcx /* size % 8 */ |
||
| 115 | jz 0f |
||
| 116 | |||
| 117 | rep movsb /* copy the rest byte by byte */ |
||
| 118 | |||
| 119 | 0: |
||
| 120 | ret /* return MEMCPY_SRC, success */ |
||
| 121 | |||
| 122 | memcpy_from_uspace_failover_address: |
||
| 123 | memcpy_to_uspace_failover_address: |
||
| 124 | xorq %rax, %rax /* return 0, failure */ |
||
| 125 | ret |
||
| 126 | |||
| 242 | palkovsky | 127 | ## Determine CPUID support |
| 128 | # |
||
| 129 | # Return 0 in EAX if CPUID is not support, 1 if supported. |
||
| 130 | # |
||
| 131 | has_cpuid: |
||
| 132 | pushfq # store flags |
||
| 133 | popq %rax # read flags |
||
| 348 | jermar | 134 | movq %rax,%rdx # copy flags |
| 135 | btcl $21,%edx # swap the ID bit |
||
| 136 | pushq %rdx |
||
| 242 | palkovsky | 137 | popfq # propagate the change into flags |
| 138 | pushfq |
||
| 348 | jermar | 139 | popq %rdx # read flags |
| 242 | palkovsky | 140 | andl $(1<<21),%eax # interested only in ID bit |
| 348 | jermar | 141 | andl $(1<<21),%edx |
| 142 | xorl %edx,%eax # 0 if not supported, 1 if supported |
||
| 242 | palkovsky | 143 | ret |
| 144 | |||
| 251 | palkovsky | 145 | cpuid: |
| 146 | movq %rbx, %r10 # we have to preserve rbx across function calls |
||
| 242 | palkovsky | 147 | |
| 251 | palkovsky | 148 | movl %edi,%eax # load the command into %eax |
| 149 | |||
| 150 | cpuid |
||
| 151 | movl %eax,0(%rsi) |
||
| 152 | movl %ebx,4(%rsi) |
||
| 153 | movl %ecx,8(%rsi) |
||
| 154 | movl %edx,12(%rsi) |
||
| 155 | |||
| 156 | movq %r10, %rbx |
||
| 157 | ret |
||
| 158 | |||
| 2018 | decky | 159 | get_cycle: |
| 242 | palkovsky | 160 | xorq %rax,%rax |
| 161 | rdtsc |
||
| 162 | ret |
||
| 251 | palkovsky | 163 | |
| 164 | set_efer_flag: |
||
| 165 | movq $0xc0000080, %rcx |
||
| 166 | rdmsr |
||
| 167 | btsl %edi, %eax |
||
| 168 | wrmsr |
||
| 169 | ret |
||
| 242 | palkovsky | 170 | |
| 251 | palkovsky | 171 | read_efer_flag: |
| 172 | movq $0xc0000080, %rcx |
||
| 173 | rdmsr |
||
| 174 | ret |
||
| 242 | palkovsky | 175 | |
| 224 | palkovsky | 176 | # Push all general purpose registers on stack except %rbp, %rsp |
| 799 | palkovsky | 177 | .macro save_all_gpr |
| 178 | movq %rax, IOFFSET_RAX(%rsp) |
||
| 179 | movq %rcx, IOFFSET_RCX(%rsp) |
||
| 180 | movq %rdx, IOFFSET_RDX(%rsp) |
||
| 181 | movq %rsi, IOFFSET_RSI(%rsp) |
||
| 182 | movq %rdi, IOFFSET_RDI(%rsp) |
||
| 183 | movq %r8, IOFFSET_R8(%rsp) |
||
| 184 | movq %r9, IOFFSET_R9(%rsp) |
||
| 185 | movq %r10, IOFFSET_R10(%rsp) |
||
| 186 | movq %r11, IOFFSET_R11(%rsp) |
||
| 1094 | palkovsky | 187 | #ifdef CONFIG_DEBUG_ALLREGS |
| 188 | movq %rbx, IOFFSET_RBX(%rsp) |
||
| 189 | movq %rbp, IOFFSET_RBP(%rsp) |
||
| 799 | palkovsky | 190 | movq %r12, IOFFSET_R12(%rsp) |
| 191 | movq %r13, IOFFSET_R13(%rsp) |
||
| 192 | movq %r14, IOFFSET_R14(%rsp) |
||
| 193 | movq %r15, IOFFSET_R15(%rsp) |
||
| 1094 | palkovsky | 194 | #endif |
| 224 | palkovsky | 195 | .endm |
| 196 | |||
| 799 | palkovsky | 197 | .macro restore_all_gpr |
| 198 | movq IOFFSET_RAX(%rsp), %rax |
||
| 199 | movq IOFFSET_RCX(%rsp), %rcx |
||
| 200 | movq IOFFSET_RDX(%rsp), %rdx |
||
| 201 | movq IOFFSET_RSI(%rsp), %rsi |
||
| 202 | movq IOFFSET_RDI(%rsp), %rdi |
||
| 203 | movq IOFFSET_R8(%rsp), %r8 |
||
| 204 | movq IOFFSET_R9(%rsp), %r9 |
||
| 205 | movq IOFFSET_R10(%rsp), %r10 |
||
| 206 | movq IOFFSET_R11(%rsp), %r11 |
||
| 1094 | palkovsky | 207 | #ifdef CONFIG_DEBUG_ALLREGS |
| 208 | movq IOFFSET_RBX(%rsp), %rbx |
||
| 209 | movq IOFFSET_RBP(%rsp), %rbp |
||
| 799 | palkovsky | 210 | movq IOFFSET_R12(%rsp), %r12 |
| 211 | movq IOFFSET_R13(%rsp), %r13 |
||
| 212 | movq IOFFSET_R14(%rsp), %r14 |
||
| 213 | movq IOFFSET_R15(%rsp), %r15 |
||
| 1094 | palkovsky | 214 | #endif |
| 224 | palkovsky | 215 | .endm |
| 1021 | jermar | 216 | |
| 1094 | palkovsky | 217 | #ifdef CONFIG_DEBUG_ALLREGS |
| 218 | # define INTERRUPT_ALIGN 256 |
||
| 219 | #else |
||
| 220 | # define INTERRUPT_ALIGN 128 |
||
| 221 | #endif |
||
| 222 | |||
| 224 | palkovsky | 223 | ## Declare interrupt handlers |
| 224 | # |
||
| 225 | # Declare interrupt handlers for n interrupt |
||
| 226 | # vectors starting at vector i. |
||
| 227 | # |
||
| 1021 | jermar | 228 | # The handlers call exc_dispatch(). |
| 224 | palkovsky | 229 | # |
| 230 | .macro handler i n |
||
| 231 | |||
| 1021 | jermar | 232 | /* |
| 2606 | jermar | 233 | * Choose between version with error code and version without error |
| 234 | * code. Both versions have to be of the same size. amd64 assembly is, |
||
| 235 | * however, a little bit tricky. For instance, subq $0x80, %rsp and |
||
| 236 | * subq $0x78, %rsp can result in two instructions with different |
||
| 237 | * op-code lengths. |
||
| 1121 | jermar | 238 | * Therefore we align the interrupt handlers. |
| 1021 | jermar | 239 | */ |
| 224 | palkovsky | 240 | |
| 1021 | jermar | 241 | .iflt \i-32 |
| 242 | .if (1 << \i) & ERROR_WORD_INTERRUPT_LIST |
||
| 243 | /* |
||
| 244 | * Version with error word. |
||
| 245 | */ |
||
| 246 | subq $IREGISTER_SPACE, %rsp |
||
| 247 | .else |
||
| 248 | /* |
||
| 249 | * Version without error word, |
||
| 250 | */ |
||
| 251 | subq $(IREGISTER_SPACE+8), %rsp |
||
| 252 | .endif |
||
| 253 | .else |
||
| 254 | /* |
||
| 255 | * Version without error word, |
||
| 256 | */ |
||
| 257 | subq $(IREGISTER_SPACE+8), %rsp |
||
| 258 | .endif |
||
| 224 | palkovsky | 259 | |
| 1021 | jermar | 260 | save_all_gpr |
| 2784 | jermar | 261 | cld |
| 224 | palkovsky | 262 | |
| 1021 | jermar | 263 | movq $(\i), %rdi # %rdi - first parameter |
| 264 | movq %rsp, %rsi # %rsi - pointer to istate |
||
| 265 | call exc_dispatch # exc_dispatch(i, istate) |
||
| 266 | |||
| 799 | palkovsky | 267 | restore_all_gpr |
| 268 | # $8 = Skip error word |
||
| 1021 | jermar | 269 | addq $(IREGISTER_SPACE+8), %rsp |
| 224 | palkovsky | 270 | iretq |
| 271 | |||
| 1094 | palkovsky | 272 | .align INTERRUPT_ALIGN |
| 224 | palkovsky | 273 | .if (\n-\i)-1 |
| 274 | handler "(\i+1)",\n |
||
| 275 | .endif |
||
| 276 | .endm |
||
| 1094 | palkovsky | 277 | |
| 278 | .align INTERRUPT_ALIGN |
||
| 224 | palkovsky | 279 | interrupt_handlers: |
| 280 | h_start: |
||
| 281 | handler 0 IDT_ITEMS |
||
| 282 | h_end: |
||
| 803 | palkovsky | 283 | |
| 2606 | jermar | 284 | ## Low-level syscall handler |
| 285 | # |
||
| 286 | # Registers on entry: |
||
| 287 | # |
||
| 288 | # @param rcx Userspace return address. |
||
| 289 | # @param r11 Userspace RLFAGS. |
||
| 290 | # |
||
| 291 | # @param rax Syscall number. |
||
| 292 | # @param rdi 1st syscall argument. |
||
| 293 | # @param rsi 2nd syscall argument. |
||
| 294 | # @param rdx 3rd syscall argument. |
||
| 295 | # @param r10 4th syscall argument. Used instead of RCX because the |
||
| 296 | # SYSCALL instruction clobbers it. |
||
| 297 | # @param r8 5th syscall argument. |
||
| 298 | # @param r9 6th syscall argument. |
||
| 299 | # |
||
| 300 | # @return Return value is in rax. |
||
| 301 | # |
||
| 803 | palkovsky | 302 | syscall_entry: |
| 2606 | jermar | 303 | swapgs # Switch to hidden gs |
| 304 | # |
||
| 305 | # %gs:0 Scratch space for this thread's user RSP |
||
| 306 | # %gs:8 Address to be used as this thread's kernel RSP |
||
| 307 | # |
||
| 308 | movq %rsp, %gs:0 # Save this thread's user RSP |
||
| 309 | movq %gs:8, %rsp # Set this thread's kernel RSP |
||
| 310 | swapgs # Switch back to remain consistent |
||
| 311 | sti |
||
| 806 | palkovsky | 312 | |
| 2606 | jermar | 313 | pushq %rcx |
| 314 | pushq %r11 |
||
| 806 | palkovsky | 315 | |
| 2606 | jermar | 316 | movq %r10, %rcx # Copy the 4th argument where it is expected |
| 317 | pushq %rax |
||
| 803 | palkovsky | 318 | call syscall_handler |
| 2606 | jermar | 319 | addq $8, %rsp |
| 955 | palkovsky | 320 | |
| 806 | palkovsky | 321 | popq %r11 |
| 322 | popq %rcx |
||
| 2606 | jermar | 323 | |
| 324 | cli |
||
| 325 | swapgs |
||
| 326 | movq %gs:0, %rsp # Restore the user RSP |
||
| 327 | swapgs |
||
| 328 | |||
| 806 | palkovsky | 329 | sysretq |
| 2606 | jermar | 330 | |
| 224 | palkovsky | 331 | .data |
| 332 | .global interrupt_handler_size |
||
| 333 | |||
| 820 | jermar | 334 | interrupt_handler_size: .quad (h_end-h_start)/IDT_ITEMS |