Rev 1831 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 1831 | Rev 1840 | ||
---|---|---|---|
1 | /* |
1 | /* |
2 | * Copyright (C) 2001-2004 Jakub Jermar |
2 | * Copyright (C) 2001-2004 Jakub Jermar |
3 | * Copyright (C) 2005 Sergey Bondari |
3 | * Copyright (C) 2005 Sergey Bondari |
- | 4 | * Copyright (C) 2006 Martin Decky |
|
4 | * All rights reserved. |
5 | * All rights reserved. |
5 | * |
6 | * |
6 | * Redistribution and use in source and binary forms, with or without |
7 | * Redistribution and use in source and binary forms, with or without |
7 | * modification, are permitted provided that the following conditions |
8 | * modification, are permitted provided that the following conditions |
8 | * are met: |
9 | * are met: |
9 | * |
10 | * |
10 | * - Redistributions of source code must retain the above copyright |
11 | * - Redistributions of source code must retain the above copyright |
11 | * notice, this list of conditions and the following disclaimer. |
12 | * notice, this list of conditions and the following disclaimer. |
12 | * - Redistributions in binary form must reproduce the above copyright |
13 | * - Redistributions in binary form must reproduce the above copyright |
13 | * notice, this list of conditions and the following disclaimer in the |
14 | * notice, this list of conditions and the following disclaimer in the |
14 | * documentation and/or other materials provided with the distribution. |
15 | * documentation and/or other materials provided with the distribution. |
15 | * - The name of the author may not be used to endorse or promote products |
16 | * - The name of the author may not be used to endorse or promote products |
16 | * derived from this software without specific prior written permission. |
17 | * derived from this software without specific prior written permission. |
17 | * |
18 | * |
18 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
19 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
19 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
20 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
20 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
21 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
21 | * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
22 | * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
22 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
23 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
23 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
24 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
24 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
25 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
25 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
26 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
26 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
27 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
27 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
28 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
28 | */ |
29 | */ |
29 | 30 | ||
30 | /** @addtogroup xen32 |
31 | /** @addtogroup xen32 |
31 | * @{ |
32 | * @{ |
32 | */ |
33 | */ |
33 | /** @file |
34 | /** @file |
34 | */ |
35 | */ |
35 | 36 | ||
36 | #ifndef __xen32_ASM_H__ |
37 | #ifndef __xen32_ASM_H__ |
37 | #define __xen32_ASM_H__ |
38 | #define __xen32_ASM_H__ |
38 | 39 | ||
39 | #include <arch/pm.h> |
40 | #include <arch/pm.h> |
40 | #include <arch/types.h> |
41 | #include <arch/types.h> |
41 | #include <arch/barrier.h> |
42 | #include <arch/barrier.h> |
42 | #include <config.h> |
43 | #include <config.h> |
43 | 44 | ||
44 | extern void enable_l_apic_in_msr(void); |
45 | extern void enable_l_apic_in_msr(void); |
45 | 46 | ||
46 | 47 | ||
47 | extern void asm_delay_loop(uint32_t t); |
48 | extern void asm_delay_loop(uint32_t t); |
48 | extern void asm_fake_loop(uint32_t t); |
49 | extern void asm_fake_loop(uint32_t t); |
49 | 50 | ||
50 | 51 | ||
51 | /** Halt CPU |
52 | /** Halt CPU |
52 | * |
53 | * |
53 | * Halt the current CPU until interrupt event. |
54 | * Halt the current CPU until interrupt event. |
54 | */ |
55 | */ |
55 | #define cpu_halt() ((void) 0) |
56 | #define cpu_halt() ((void) 0) |
56 | #define cpu_sleep() ((void) 0) |
57 | #define cpu_sleep() ((void) 0) |
57 | 58 | ||
58 | #define GEN_READ_REG(reg) static inline unative_t read_ ##reg (void) \ |
59 | #define GEN_READ_REG(reg) static inline unative_t read_ ##reg (void) \ |
59 | { \ |
60 | { \ |
60 | unative_t res; \ |
61 | unative_t res; \ |
61 | __asm__ volatile ("movl %%" #reg ", %0" : "=r" (res) ); \ |
62 | __asm__ volatile ("movl %%" #reg ", %0" : "=r" (res) ); \ |
62 | return res; \ |
63 | return res; \ |
63 | } |
64 | } |
64 | 65 | ||
65 | #define GEN_WRITE_REG(reg) static inline void write_ ##reg (unative_t regn) \ |
66 | #define GEN_WRITE_REG(reg) static inline void write_ ##reg (unative_t regn) \ |
66 | { \ |
67 | { \ |
67 | __asm__ volatile ("movl %0, %%" #reg : : "r" (regn)); \ |
68 | __asm__ volatile ("movl %0, %%" #reg : : "r" (regn)); \ |
68 | } |
69 | } |
69 | 70 | ||
70 | GEN_READ_REG(cr0); |
71 | GEN_READ_REG(cr0); |
71 | GEN_READ_REG(cr2); |
72 | GEN_READ_REG(cr2); |
72 | 73 | ||
73 | GEN_READ_REG(dr0); |
74 | GEN_READ_REG(dr0); |
74 | GEN_READ_REG(dr1); |
75 | GEN_READ_REG(dr1); |
75 | GEN_READ_REG(dr2); |
76 | GEN_READ_REG(dr2); |
76 | GEN_READ_REG(dr3); |
77 | GEN_READ_REG(dr3); |
77 | GEN_READ_REG(dr6); |
78 | GEN_READ_REG(dr6); |
78 | GEN_READ_REG(dr7); |
79 | GEN_READ_REG(dr7); |
79 | 80 | ||
80 | GEN_WRITE_REG(dr0); |
81 | GEN_WRITE_REG(dr0); |
81 | GEN_WRITE_REG(dr1); |
82 | GEN_WRITE_REG(dr1); |
82 | GEN_WRITE_REG(dr2); |
83 | GEN_WRITE_REG(dr2); |
83 | GEN_WRITE_REG(dr3); |
84 | GEN_WRITE_REG(dr3); |
84 | GEN_WRITE_REG(dr6); |
85 | GEN_WRITE_REG(dr6); |
85 | GEN_WRITE_REG(dr7); |
86 | GEN_WRITE_REG(dr7); |
86 | 87 | ||
87 | /** Byte to port |
88 | /** Byte to port |
88 | * |
89 | * |
89 | * Output byte to port |
90 | * Output byte to port |
90 | * |
91 | * |
91 | * @param port Port to write to |
92 | * @param port Port to write to |
92 | * @param val Value to write |
93 | * @param val Value to write |
93 | */ |
94 | */ |
94 | static inline void outb(uint16_t port, uint8_t val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); } |
95 | static inline void outb(uint16_t port, uint8_t val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); } |
95 | 96 | ||
96 | /** Word to port |
97 | /** Word to port |
97 | * |
98 | * |
98 | * Output word to port |
99 | * Output word to port |
99 | * |
100 | * |
100 | * @param port Port to write to |
101 | * @param port Port to write to |
101 | * @param val Value to write |
102 | * @param val Value to write |
102 | */ |
103 | */ |
103 | static inline void outw(uint16_t port, uint16_t val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); } |
104 | static inline void outw(uint16_t port, uint16_t val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); } |
104 | 105 | ||
105 | /** Double word to port |
106 | /** Double word to port |
106 | * |
107 | * |
107 | * Output double word to port |
108 | * Output double word to port |
108 | * |
109 | * |
109 | * @param port Port to write to |
110 | * @param port Port to write to |
110 | * @param val Value to write |
111 | * @param val Value to write |
111 | */ |
112 | */ |
112 | static inline void outl(uint16_t port, uint32_t val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); } |
113 | static inline void outl(uint16_t port, uint32_t val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); } |
113 | 114 | ||
114 | /** Byte from port |
115 | /** Byte from port |
115 | * |
116 | * |
116 | * Get byte from port |
117 | * Get byte from port |
117 | * |
118 | * |
118 | * @param port Port to read from |
119 | * @param port Port to read from |
119 | * @return Value read |
120 | * @return Value read |
120 | */ |
121 | */ |
121 | static inline uint8_t inb(uint16_t port) { uint8_t val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; } |
122 | static inline uint8_t inb(uint16_t port) { uint8_t val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; } |
122 | 123 | ||
123 | /** Word from port |
124 | /** Word from port |
124 | * |
125 | * |
125 | * Get word from port |
126 | * Get word from port |
126 | * |
127 | * |
127 | * @param port Port to read from |
128 | * @param port Port to read from |
128 | * @return Value read |
129 | * @return Value read |
129 | */ |
130 | */ |
130 | static inline uint16_t inw(uint16_t port) { uint16_t val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; } |
131 | static inline uint16_t inw(uint16_t port) { uint16_t val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; } |
131 | 132 | ||
132 | /** Double word from port |
133 | /** Double word from port |
133 | * |
134 | * |
134 | * Get double word from port |
135 | * Get double word from port |
135 | * |
136 | * |
136 | * @param port Port to read from |
137 | * @param port Port to read from |
137 | * @return Value read |
138 | * @return Value read |
138 | */ |
139 | */ |
139 | static inline uint32_t inl(uint16_t port) { uint32_t val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; } |
140 | static inline uint32_t inl(uint16_t port) { uint32_t val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; } |
140 | 141 | ||
141 | /** Enable interrupts. |
142 | /** Enable interrupts. |
142 | * |
143 | * |
143 | * Enable interrupts and return previous |
144 | * Enable interrupts and return previous |
144 | * value of EFLAGS. |
145 | * value of EFLAGS. |
145 | * |
146 | * |
146 | * @return Old interrupt priority level. |
147 | * @return Old interrupt priority level. |
147 | */ |
148 | */ |
148 | static inline ipl_t interrupts_enable(void) |
149 | static inline ipl_t interrupts_enable(void) |
149 | { |
150 | { |
150 | // FIXME SMP |
151 | // FIXME SMP |
151 | 152 | ||
152 | ipl_t v = shared_info.vcpu_info[0].evtchn_upcall_mask; |
153 | ipl_t v = shared_info.vcpu_info[0].evtchn_upcall_mask; |
153 | write_barrier(); |
154 | write_barrier(); |
154 | shared_info.vcpu_info[0].evtchn_upcall_mask = 0; |
155 | shared_info.vcpu_info[0].evtchn_upcall_mask = 0; |
155 | write_barrier(); |
156 | write_barrier(); |
156 | if (shared_info.vcpu_info[0].evtchn_upcall_pending) |
157 | if (shared_info.vcpu_info[0].evtchn_upcall_pending) |
157 | force_evtchn_callback(); |
158 | force_evtchn_callback(); |
158 | 159 | ||
159 | return v; |
160 | return v; |
160 | } |
161 | } |
161 | 162 | ||
162 | /** Disable interrupts. |
163 | /** Disable interrupts. |
163 | * |
164 | * |
164 | * Disable interrupts and return previous |
165 | * Disable interrupts and return previous |
165 | * value of EFLAGS. |
166 | * value of EFLAGS. |
166 | * |
167 | * |
167 | * @return Old interrupt priority level. |
168 | * @return Old interrupt priority level. |
168 | */ |
169 | */ |
169 | static inline ipl_t interrupts_disable(void) |
170 | static inline ipl_t interrupts_disable(void) |
170 | { |
171 | { |
171 | // FIXME SMP |
172 | // FIXME SMP |
172 | 173 | ||
173 | ipl_t v = shared_info.vcpu_info[0].evtchn_upcall_mask; |
174 | ipl_t v = shared_info.vcpu_info[0].evtchn_upcall_mask; |
174 | shared_info.vcpu_info[0].evtchn_upcall_mask = 1; |
175 | shared_info.vcpu_info[0].evtchn_upcall_mask = 1; |
175 | write_barrier(); |
176 | write_barrier(); |
176 | 177 | ||
177 | return v; |
178 | return v; |
178 | } |
179 | } |
179 | 180 | ||
180 | /** Restore interrupt priority level. |
181 | /** Restore interrupt priority level. |
181 | * |
182 | * |
182 | * Restore EFLAGS. |
183 | * Restore EFLAGS. |
183 | * |
184 | * |
184 | * @param ipl Saved interrupt priority level. |
185 | * @param ipl Saved interrupt priority level. |
185 | */ |
186 | */ |
186 | static inline void interrupts_restore(ipl_t ipl) |
187 | static inline void interrupts_restore(ipl_t ipl) |
187 | { |
188 | { |
188 | if (ipl == 0) |
189 | if (ipl == 0) |
189 | interrupts_enable(); |
190 | interrupts_enable(); |
190 | else |
191 | else |
191 | interrupts_disable(); |
192 | interrupts_disable(); |
192 | } |
193 | } |
193 | 194 | ||
194 | /** Return interrupt priority level. |
195 | /** Return interrupt priority level. |
195 | * |
196 | * |
196 | * @return EFLAFS. |
197 | * @return EFLAFS. |
197 | */ |
198 | */ |
198 | static inline ipl_t interrupts_read(void) |
199 | static inline ipl_t interrupts_read(void) |
199 | { |
200 | { |
200 | // FIXME SMP |
201 | // FIXME SMP |
201 | 202 | ||
202 | return shared_info.vcpu_info[0].evtchn_upcall_mask; |
203 | return shared_info.vcpu_info[0].evtchn_upcall_mask; |
203 | } |
204 | } |
204 | 205 | ||
205 | /** Return base address of current stack |
206 | /** Return base address of current stack |
206 | * |
207 | * |
207 | * Return the base address of the current stack. |
208 | * Return the base address of the current stack. |
208 | * The stack is assumed to be STACK_SIZE bytes long. |
209 | * The stack is assumed to be STACK_SIZE bytes long. |
209 | * The stack must start on page boundary. |
210 | * The stack must start on page boundary. |
210 | */ |
211 | */ |
211 | static inline uintptr_t get_stack_base(void) |
212 | static inline uintptr_t get_stack_base(void) |
212 | { |
213 | { |
213 | uintptr_t v; |
214 | uintptr_t v; |
214 | 215 | ||
215 | __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1))); |
216 | __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1))); |
216 | 217 | ||
217 | return v; |
218 | return v; |
218 | } |
219 | } |
219 | 220 | ||
220 | static inline uint64_t rdtsc(void) |
221 | static inline uint64_t rdtsc(void) |
221 | { |
222 | { |
222 | uint64_t v; |
223 | uint64_t v; |
223 | 224 | ||
224 | __asm__ volatile("rdtsc\n" : "=A" (v)); |
225 | __asm__ volatile("rdtsc\n" : "=A" (v)); |
225 | 226 | ||
226 | return v; |
227 | return v; |
227 | } |
228 | } |
228 | 229 | ||
229 | /** Return current IP address */ |
230 | /** Return current IP address */ |
230 | static inline uintptr_t * get_ip() |
231 | static inline uintptr_t * get_ip() |
231 | { |
232 | { |
232 | uintptr_t *ip; |
233 | uintptr_t *ip; |
233 | 234 | ||
234 | __asm__ volatile ( |
235 | __asm__ volatile ( |
235 | "mov %%eip, %0" |
236 | "mov %%eip, %0" |
236 | : "=r" (ip) |
237 | : "=r" (ip) |
237 | ); |
238 | ); |
238 | return ip; |
239 | return ip; |
239 | } |
240 | } |
240 | 241 | ||
241 | /** Invalidate TLB Entry. |
242 | /** Invalidate TLB Entry. |
242 | * |
243 | * |
243 | * @param addr Address on a page whose TLB entry is to be invalidated. |
244 | * @param addr Address on a page whose TLB entry is to be invalidated. |
244 | */ |
245 | */ |
245 | static inline void invlpg(uintptr_t addr) |
246 | static inline void invlpg(uintptr_t addr) |
246 | { |
247 | { |
247 | __asm__ volatile ("invlpg %0\n" :: "m" (*(unative_t *)addr)); |
248 | __asm__ volatile ("invlpg %0\n" :: "m" (*(unative_t *)addr)); |
248 | } |
249 | } |
249 | 250 | ||
250 | /** Load GDTR register from memory. |
251 | /** Load GDTR register from memory. |
251 | * |
252 | * |
252 | * @param gdtr_reg Address of memory from where to load GDTR. |
253 | * @param gdtr_reg Address of memory from where to load GDTR. |
253 | */ |
254 | */ |
254 | static inline void gdtr_load(ptr_16_32_t *gdtr_reg) |
255 | static inline void gdtr_load(ptr_16_32_t *gdtr_reg) |
255 | { |
256 | { |
256 | __asm__ volatile ("lgdtl %0\n" : : "m" (*gdtr_reg)); |
257 | __asm__ volatile ("lgdtl %0\n" : : "m" (*gdtr_reg)); |
257 | } |
258 | } |
258 | 259 | ||
259 | /** Store GDTR register to memory. |
260 | /** Store GDTR register to memory. |
260 | * |
261 | * |
261 | * @param gdtr_reg Address of memory to where to load GDTR. |
262 | * @param gdtr_reg Address of memory to where to load GDTR. |
262 | */ |
263 | */ |
263 | static inline void gdtr_store(ptr_16_32_t *gdtr_reg) |
264 | static inline void gdtr_store(ptr_16_32_t *gdtr_reg) |
264 | { |
265 | { |
265 | __asm__ volatile ("sgdtl %0\n" : : "m" (*gdtr_reg)); |
266 | __asm__ volatile ("sgdtl %0\n" : : "m" (*gdtr_reg)); |
266 | } |
267 | } |
267 | 268 | ||
268 | /** Load TR from descriptor table. |
269 | /** Load TR from descriptor table. |
269 | * |
270 | * |
270 | * @param sel Selector specifying descriptor of TSS segment. |
271 | * @param sel Selector specifying descriptor of TSS segment. |
271 | */ |
272 | */ |
272 | static inline void tr_load(uint16_t sel) |
273 | static inline void tr_load(uint16_t sel) |
273 | { |
274 | { |
274 | __asm__ volatile ("ltr %0" : : "r" (sel)); |
275 | __asm__ volatile ("ltr %0" : : "r" (sel)); |
275 | } |
276 | } |
276 | 277 | ||
277 | #endif |
278 | #endif |
278 | 279 | ||
279 | /** @} |
280 | /** @} |
280 | */ |
281 | */ |
281 | 282 |