Rev 806 | Rev 1072 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed
Rev | Author | Line No. | Line |
---|---|---|---|
173 | jermar | 1 | /* |
2 | * Copyright (C) 2005 Jakub Jermar |
||
3 | * All rights reserved. |
||
4 | * |
||
5 | * Redistribution and use in source and binary forms, with or without |
||
6 | * modification, are permitted provided that the following conditions |
||
7 | * are met: |
||
8 | * |
||
9 | * - Redistributions of source code must retain the above copyright |
||
10 | * notice, this list of conditions and the following disclaimer. |
||
11 | * - Redistributions in binary form must reproduce the above copyright |
||
12 | * notice, this list of conditions and the following disclaimer in the |
||
13 | * documentation and/or other materials provided with the distribution. |
||
14 | * - The name of the author may not be used to endorse or promote products |
||
15 | * derived from this software without specific prior written permission. |
||
16 | * |
||
17 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
||
18 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
||
19 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
||
20 | * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
||
21 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
||
22 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||
23 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||
24 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||
25 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
||
26 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||
27 | */ |
||
28 | |||
29 | #ifndef __amd64_ASM_H__ |
||
30 | #define __amd64_ASM_H__ |
||
31 | |||
32 | #include <arch/types.h> |
||
33 | #include <config.h> |
||
34 | |||
597 | jermar | 35 | extern void asm_delay_loop(__u32 t); |
36 | extern void asm_fake_loop(__u32 t); |
||
200 | palkovsky | 37 | |
253 | jermar | 38 | /** Return base address of current stack. |
39 | * |
||
40 | * Return the base address of the current stack. |
||
41 | * The stack is assumed to be STACK_SIZE bytes long. |
||
42 | * The stack must start on page boundary. |
||
43 | */ |
||
173 | jermar | 44 | static inline __address get_stack_base(void) |
45 | { |
||
226 | palkovsky | 46 | __address v; |
47 | |||
48 | __asm__ volatile ("andq %%rsp, %0\n" : "=r" (v) : "0" (~((__u64)STACK_SIZE-1))); |
||
49 | |||
50 | return v; |
||
173 | jermar | 51 | } |
52 | |||
348 | jermar | 53 | static inline void cpu_sleep(void) { __asm__ volatile ("hlt\n"); }; |
54 | static inline void cpu_halt(void) { __asm__ volatile ("hlt\n"); }; |
||
197 | palkovsky | 55 | |
200 | palkovsky | 56 | |
625 | palkovsky | 57 | /** Byte from port |
58 | * |
||
59 | * Get byte from port |
||
60 | * |
||
61 | * @param port Port to read from |
||
62 | * @return Value read |
||
63 | */ |
||
64 | static inline __u8 inb(__u16 port) { __u8 val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; } |
||
200 | palkovsky | 65 | |
625 | palkovsky | 66 | /** Byte to port |
67 | * |
||
68 | * Output byte to port |
||
69 | * |
||
70 | * @param port Port to write to |
||
71 | * @param val Value to write |
||
72 | */ |
||
73 | static inline void outb(__u16 port, __u8 val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); } |
||
200 | palkovsky | 74 | |
806 | palkovsky | 75 | /** Swap Hidden part of GS register with visible one */ |
76 | static inline void swapgs(void) { __asm__ volatile("swapgs"); } |
||
77 | |||
413 | jermar | 78 | /** Enable interrupts. |
200 | palkovsky | 79 | * |
80 | * Enable interrupts and return previous |
||
81 | * value of EFLAGS. |
||
413 | jermar | 82 | * |
83 | * @return Old interrupt priority level. |
||
200 | palkovsky | 84 | */ |
413 | jermar | 85 | static inline ipl_t interrupts_enable(void) { |
86 | ipl_t v; |
||
200 | palkovsky | 87 | __asm__ volatile ( |
88 | "pushfq\n" |
||
89 | "popq %0\n" |
||
90 | "sti\n" |
||
91 | : "=r" (v) |
||
92 | ); |
||
93 | return v; |
||
94 | } |
||
95 | |||
413 | jermar | 96 | /** Disable interrupts. |
200 | palkovsky | 97 | * |
98 | * Disable interrupts and return previous |
||
99 | * value of EFLAGS. |
||
413 | jermar | 100 | * |
101 | * @return Old interrupt priority level. |
||
200 | palkovsky | 102 | */ |
413 | jermar | 103 | static inline ipl_t interrupts_disable(void) { |
104 | ipl_t v; |
||
200 | palkovsky | 105 | __asm__ volatile ( |
106 | "pushfq\n" |
||
107 | "popq %0\n" |
||
108 | "cli\n" |
||
109 | : "=r" (v) |
||
110 | ); |
||
111 | return v; |
||
112 | } |
||
113 | |||
413 | jermar | 114 | /** Restore interrupt priority level. |
200 | palkovsky | 115 | * |
116 | * Restore EFLAGS. |
||
413 | jermar | 117 | * |
118 | * @param ipl Saved interrupt priority level. |
||
200 | palkovsky | 119 | */ |
413 | jermar | 120 | static inline void interrupts_restore(ipl_t ipl) { |
200 | palkovsky | 121 | __asm__ volatile ( |
122 | "pushq %0\n" |
||
123 | "popfq\n" |
||
413 | jermar | 124 | : : "r" (ipl) |
200 | palkovsky | 125 | ); |
126 | } |
||
127 | |||
413 | jermar | 128 | /** Return interrupt priority level. |
206 | palkovsky | 129 | * |
130 | * Return EFLAFS. |
||
413 | jermar | 131 | * |
132 | * @return Current interrupt priority level. |
||
206 | palkovsky | 133 | */ |
413 | jermar | 134 | static inline ipl_t interrupts_read(void) { |
135 | ipl_t v; |
||
206 | palkovsky | 136 | __asm__ volatile ( |
137 | "pushfq\n" |
||
138 | "popq %0\n" |
||
139 | : "=r" (v) |
||
140 | ); |
||
141 | return v; |
||
142 | } |
||
200 | palkovsky | 143 | |
282 | palkovsky | 144 | /** Read CR0 |
145 | * |
||
146 | * Return value in CR0 |
||
147 | * |
||
148 | * @return Value read. |
||
149 | */ |
||
150 | static inline __u64 read_cr0(void) |
||
151 | { |
||
152 | __u64 v; |
||
348 | jermar | 153 | __asm__ volatile ("movq %%cr0,%0\n" : "=r" (v)); |
282 | palkovsky | 154 | return v; |
155 | } |
||
156 | |||
216 | palkovsky | 157 | /** Read CR2 |
158 | * |
||
159 | * Return value in CR2 |
||
160 | * |
||
161 | * @return Value read. |
||
162 | */ |
||
282 | palkovsky | 163 | static inline __u64 read_cr2(void) |
164 | { |
||
165 | __u64 v; |
||
348 | jermar | 166 | __asm__ volatile ("movq %%cr2,%0\n" : "=r" (v)); |
282 | palkovsky | 167 | return v; |
168 | } |
||
216 | palkovsky | 169 | |
219 | palkovsky | 170 | /** Write CR3 |
171 | * |
||
172 | * Write value to CR3. |
||
173 | * |
||
174 | * @param v Value to be written. |
||
175 | */ |
||
282 | palkovsky | 176 | static inline void write_cr3(__u64 v) |
177 | { |
||
178 | __asm__ volatile ("movq %0,%%cr3\n" : : "r" (v)); |
||
179 | } |
||
216 | palkovsky | 180 | |
219 | palkovsky | 181 | /** Read CR3 |
182 | * |
||
183 | * Return value in CR3 |
||
184 | * |
||
185 | * @return Value read. |
||
186 | */ |
||
282 | palkovsky | 187 | static inline __u64 read_cr3(void) |
188 | { |
||
189 | __u64 v; |
||
190 | __asm__ volatile ("movq %%cr3,%0" : "=r" (v)); |
||
191 | return v; |
||
192 | } |
||
219 | palkovsky | 193 | |
803 | palkovsky | 194 | /** Write to MSR */ |
195 | static inline void write_msr(__u32 msr, __u64 value) |
||
196 | { |
||
197 | __asm__ volatile ( |
||
198 | "wrmsr;" : : "c" (msr), |
||
199 | "a" ((__u32)(value)), |
||
200 | "d" ((__u32)(value >> 32)) |
||
201 | ); |
||
202 | } |
||
219 | palkovsky | 203 | |
803 | palkovsky | 204 | static inline __native read_msr(__u32 msr) |
205 | { |
||
206 | __u32 ax, dx; |
||
207 | |||
208 | __asm__ volatile ( |
||
209 | "rdmsr;" : "=a"(ax), "=d"(dx) : "c" (msr) |
||
210 | ); |
||
211 | return ((__u64)dx << 32) | ax; |
||
212 | } |
||
213 | |||
214 | |||
268 | palkovsky | 215 | /** Enable local APIC |
216 | * |
||
217 | * Enable local APIC in MSR. |
||
218 | */ |
||
219 | static inline void enable_l_apic_in_msr() |
||
220 | { |
||
221 | __asm__ volatile ( |
||
348 | jermar | 222 | "movl $0x1b, %%ecx\n" |
223 | "rdmsr\n" |
||
224 | "orl $(1<<11),%%eax\n" |
||
225 | "orl $(0xfee00000),%%eax\n" |
||
226 | "wrmsr\n" |
||
268 | palkovsky | 227 | : |
228 | : |
||
229 | :"%eax","%ecx","%edx" |
||
230 | ); |
||
231 | } |
||
232 | |||
581 | palkovsky | 233 | static inline __address * get_ip() |
234 | { |
||
235 | __address *ip; |
||
236 | |||
237 | __asm__ volatile ( |
||
238 | "mov %%rip, %0" |
||
239 | : "=r" (ip) |
||
240 | ); |
||
241 | return ip; |
||
242 | } |
||
243 | |||
597 | jermar | 244 | /** Invalidate TLB Entry. |
245 | * |
||
246 | * @param addr Address on a page whose TLB entry is to be invalidated. |
||
247 | */ |
||
248 | static inline void invlpg(__address addr) |
||
249 | { |
||
984 | palkovsky | 250 | __asm__ volatile ("invlpg %0\n" :: "m" (*((__native *)addr))); |
597 | jermar | 251 | } |
581 | palkovsky | 252 | |
206 | palkovsky | 253 | extern size_t interrupt_handler_size; |
254 | extern void interrupt_handlers(void); |
||
255 | |||
173 | jermar | 256 | #endif |