Rev 1251 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 1251 | Rev 1702 | ||
---|---|---|---|
1 | /* |
1 | /* |
2 | * Copyright (C) 2001-2004 Jakub Jermar |
2 | * Copyright (C) 2001-2004 Jakub Jermar |
3 | * Copyright (C) 2005 Sergey Bondari |
3 | * Copyright (C) 2005 Sergey Bondari |
4 | * All rights reserved. |
4 | * All rights reserved. |
5 | * |
5 | * |
6 | * Redistribution and use in source and binary forms, with or without |
6 | * Redistribution and use in source and binary forms, with or without |
7 | * modification, are permitted provided that the following conditions |
7 | * modification, are permitted provided that the following conditions |
8 | * are met: |
8 | * are met: |
9 | * |
9 | * |
10 | * - Redistributions of source code must retain the above copyright |
10 | * - Redistributions of source code must retain the above copyright |
11 | * notice, this list of conditions and the following disclaimer. |
11 | * notice, this list of conditions and the following disclaimer. |
12 | * - Redistributions in binary form must reproduce the above copyright |
12 | * - Redistributions in binary form must reproduce the above copyright |
13 | * notice, this list of conditions and the following disclaimer in the |
13 | * notice, this list of conditions and the following disclaimer in the |
14 | * documentation and/or other materials provided with the distribution. |
14 | * documentation and/or other materials provided with the distribution. |
15 | * - The name of the author may not be used to endorse or promote products |
15 | * - The name of the author may not be used to endorse or promote products |
16 | * derived from this software without specific prior written permission. |
16 | * derived from this software without specific prior written permission. |
17 | * |
17 | * |
18 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
18 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
19 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
19 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
20 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
20 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
21 | * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
21 | * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
22 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
22 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
23 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
24 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
25 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
26 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
26 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
27 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
28 | */ |
28 | */ |
29 | 29 | ||
- | 30 | /** @addtogroup ia32 |
|
- | 31 | * @{ |
|
- | 32 | */ |
|
- | 33 | /** @file |
|
- | 34 | */ |
|
- | 35 | ||
30 | #ifndef __ia32_ASM_H__ |
36 | #ifndef __ia32_ASM_H__ |
31 | #define __ia32_ASM_H__ |
37 | #define __ia32_ASM_H__ |
32 | 38 | ||
33 | #include <arch/pm.h> |
39 | #include <arch/pm.h> |
34 | #include <arch/types.h> |
40 | #include <arch/types.h> |
35 | #include <config.h> |
41 | #include <config.h> |
36 | 42 | ||
37 | extern __u32 interrupt_handler_size; |
43 | extern __u32 interrupt_handler_size; |
38 | 44 | ||
39 | extern void paging_on(void); |
45 | extern void paging_on(void); |
40 | 46 | ||
41 | extern void interrupt_handlers(void); |
47 | extern void interrupt_handlers(void); |
42 | 48 | ||
43 | extern void enable_l_apic_in_msr(void); |
49 | extern void enable_l_apic_in_msr(void); |
44 | 50 | ||
45 | 51 | ||
46 | extern void asm_delay_loop(__u32 t); |
52 | extern void asm_delay_loop(__u32 t); |
47 | extern void asm_fake_loop(__u32 t); |
53 | extern void asm_fake_loop(__u32 t); |
48 | 54 | ||
49 | 55 | ||
50 | /** Halt CPU |
56 | /** Halt CPU |
51 | * |
57 | * |
52 | * Halt the current CPU until interrupt event. |
58 | * Halt the current CPU until interrupt event. |
53 | */ |
59 | */ |
54 | static inline void cpu_halt(void) { __asm__("hlt\n"); }; |
60 | static inline void cpu_halt(void) { __asm__("hlt\n"); }; |
55 | static inline void cpu_sleep(void) { __asm__("hlt\n"); }; |
61 | static inline void cpu_sleep(void) { __asm__("hlt\n"); }; |
56 | 62 | ||
57 | #define GEN_READ_REG(reg) static inline __native read_ ##reg (void) \ |
63 | #define GEN_READ_REG(reg) static inline __native read_ ##reg (void) \ |
58 | { \ |
64 | { \ |
59 | __native res; \ |
65 | __native res; \ |
60 | __asm__ volatile ("movl %%" #reg ", %0" : "=r" (res) ); \ |
66 | __asm__ volatile ("movl %%" #reg ", %0" : "=r" (res) ); \ |
61 | return res; \ |
67 | return res; \ |
62 | } |
68 | } |
63 | 69 | ||
64 | #define GEN_WRITE_REG(reg) static inline void write_ ##reg (__native regn) \ |
70 | #define GEN_WRITE_REG(reg) static inline void write_ ##reg (__native regn) \ |
65 | { \ |
71 | { \ |
66 | __asm__ volatile ("movl %0, %%" #reg : : "r" (regn)); \ |
72 | __asm__ volatile ("movl %0, %%" #reg : : "r" (regn)); \ |
67 | } |
73 | } |
68 | 74 | ||
69 | GEN_READ_REG(cr0); |
75 | GEN_READ_REG(cr0); |
70 | GEN_READ_REG(cr2); |
76 | GEN_READ_REG(cr2); |
71 | GEN_READ_REG(cr3); |
77 | GEN_READ_REG(cr3); |
72 | GEN_WRITE_REG(cr3); |
78 | GEN_WRITE_REG(cr3); |
73 | 79 | ||
74 | GEN_READ_REG(dr0); |
80 | GEN_READ_REG(dr0); |
75 | GEN_READ_REG(dr1); |
81 | GEN_READ_REG(dr1); |
76 | GEN_READ_REG(dr2); |
82 | GEN_READ_REG(dr2); |
77 | GEN_READ_REG(dr3); |
83 | GEN_READ_REG(dr3); |
78 | GEN_READ_REG(dr6); |
84 | GEN_READ_REG(dr6); |
79 | GEN_READ_REG(dr7); |
85 | GEN_READ_REG(dr7); |
80 | 86 | ||
81 | GEN_WRITE_REG(dr0); |
87 | GEN_WRITE_REG(dr0); |
82 | GEN_WRITE_REG(dr1); |
88 | GEN_WRITE_REG(dr1); |
83 | GEN_WRITE_REG(dr2); |
89 | GEN_WRITE_REG(dr2); |
84 | GEN_WRITE_REG(dr3); |
90 | GEN_WRITE_REG(dr3); |
85 | GEN_WRITE_REG(dr6); |
91 | GEN_WRITE_REG(dr6); |
86 | GEN_WRITE_REG(dr7); |
92 | GEN_WRITE_REG(dr7); |
87 | 93 | ||
88 | /** Byte to port |
94 | /** Byte to port |
89 | * |
95 | * |
90 | * Output byte to port |
96 | * Output byte to port |
91 | * |
97 | * |
92 | * @param port Port to write to |
98 | * @param port Port to write to |
93 | * @param val Value to write |
99 | * @param val Value to write |
94 | */ |
100 | */ |
95 | static inline void outb(__u16 port, __u8 val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); } |
101 | static inline void outb(__u16 port, __u8 val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); } |
96 | 102 | ||
97 | /** Word to port |
103 | /** Word to port |
98 | * |
104 | * |
99 | * Output word to port |
105 | * Output word to port |
100 | * |
106 | * |
101 | * @param port Port to write to |
107 | * @param port Port to write to |
102 | * @param val Value to write |
108 | * @param val Value to write |
103 | */ |
109 | */ |
104 | static inline void outw(__u16 port, __u16 val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); } |
110 | static inline void outw(__u16 port, __u16 val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); } |
105 | 111 | ||
106 | /** Double word to port |
112 | /** Double word to port |
107 | * |
113 | * |
108 | * Output double word to port |
114 | * Output double word to port |
109 | * |
115 | * |
110 | * @param port Port to write to |
116 | * @param port Port to write to |
111 | * @param val Value to write |
117 | * @param val Value to write |
112 | */ |
118 | */ |
113 | static inline void outl(__u16 port, __u32 val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); } |
119 | static inline void outl(__u16 port, __u32 val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); } |
114 | 120 | ||
115 | /** Byte from port |
121 | /** Byte from port |
116 | * |
122 | * |
117 | * Get byte from port |
123 | * Get byte from port |
118 | * |
124 | * |
119 | * @param port Port to read from |
125 | * @param port Port to read from |
120 | * @return Value read |
126 | * @return Value read |
121 | */ |
127 | */ |
122 | static inline __u8 inb(__u16 port) { __u8 val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; } |
128 | static inline __u8 inb(__u16 port) { __u8 val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; } |
123 | 129 | ||
124 | /** Word from port |
130 | /** Word from port |
125 | * |
131 | * |
126 | * Get word from port |
132 | * Get word from port |
127 | * |
133 | * |
128 | * @param port Port to read from |
134 | * @param port Port to read from |
129 | * @return Value read |
135 | * @return Value read |
130 | */ |
136 | */ |
131 | static inline __u16 inw(__u16 port) { __u16 val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; } |
137 | static inline __u16 inw(__u16 port) { __u16 val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; } |
132 | 138 | ||
133 | /** Double word from port |
139 | /** Double word from port |
134 | * |
140 | * |
135 | * Get double word from port |
141 | * Get double word from port |
136 | * |
142 | * |
137 | * @param port Port to read from |
143 | * @param port Port to read from |
138 | * @return Value read |
144 | * @return Value read |
139 | */ |
145 | */ |
140 | static inline __u32 inl(__u16 port) { __u32 val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; } |
146 | static inline __u32 inl(__u16 port) { __u32 val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; } |
141 | 147 | ||
142 | /** Enable interrupts. |
148 | /** Enable interrupts. |
143 | * |
149 | * |
144 | * Enable interrupts and return previous |
150 | * Enable interrupts and return previous |
145 | * value of EFLAGS. |
151 | * value of EFLAGS. |
146 | * |
152 | * |
147 | * @return Old interrupt priority level. |
153 | * @return Old interrupt priority level. |
148 | */ |
154 | */ |
149 | static inline ipl_t interrupts_enable(void) |
155 | static inline ipl_t interrupts_enable(void) |
150 | { |
156 | { |
151 | ipl_t v; |
157 | ipl_t v; |
152 | __asm__ volatile ( |
158 | __asm__ volatile ( |
153 | "pushf\n\t" |
159 | "pushf\n\t" |
154 | "popl %0\n\t" |
160 | "popl %0\n\t" |
155 | "sti\n" |
161 | "sti\n" |
156 | : "=r" (v) |
162 | : "=r" (v) |
157 | ); |
163 | ); |
158 | return v; |
164 | return v; |
159 | } |
165 | } |
160 | 166 | ||
161 | /** Disable interrupts. |
167 | /** Disable interrupts. |
162 | * |
168 | * |
163 | * Disable interrupts and return previous |
169 | * Disable interrupts and return previous |
164 | * value of EFLAGS. |
170 | * value of EFLAGS. |
165 | * |
171 | * |
166 | * @return Old interrupt priority level. |
172 | * @return Old interrupt priority level. |
167 | */ |
173 | */ |
168 | static inline ipl_t interrupts_disable(void) |
174 | static inline ipl_t interrupts_disable(void) |
169 | { |
175 | { |
170 | ipl_t v; |
176 | ipl_t v; |
171 | __asm__ volatile ( |
177 | __asm__ volatile ( |
172 | "pushf\n\t" |
178 | "pushf\n\t" |
173 | "popl %0\n\t" |
179 | "popl %0\n\t" |
174 | "cli\n" |
180 | "cli\n" |
175 | : "=r" (v) |
181 | : "=r" (v) |
176 | ); |
182 | ); |
177 | return v; |
183 | return v; |
178 | } |
184 | } |
179 | 185 | ||
180 | /** Restore interrupt priority level. |
186 | /** Restore interrupt priority level. |
181 | * |
187 | * |
182 | * Restore EFLAGS. |
188 | * Restore EFLAGS. |
183 | * |
189 | * |
184 | * @param ipl Saved interrupt priority level. |
190 | * @param ipl Saved interrupt priority level. |
185 | */ |
191 | */ |
186 | static inline void interrupts_restore(ipl_t ipl) |
192 | static inline void interrupts_restore(ipl_t ipl) |
187 | { |
193 | { |
188 | __asm__ volatile ( |
194 | __asm__ volatile ( |
189 | "pushl %0\n\t" |
195 | "pushl %0\n\t" |
190 | "popf\n" |
196 | "popf\n" |
191 | : : "r" (ipl) |
197 | : : "r" (ipl) |
192 | ); |
198 | ); |
193 | } |
199 | } |
194 | 200 | ||
195 | /** Return interrupt priority level. |
201 | /** Return interrupt priority level. |
196 | * |
202 | * |
197 | * @return EFLAFS. |
203 | * @return EFLAFS. |
198 | */ |
204 | */ |
199 | static inline ipl_t interrupts_read(void) |
205 | static inline ipl_t interrupts_read(void) |
200 | { |
206 | { |
201 | ipl_t v; |
207 | ipl_t v; |
202 | __asm__ volatile ( |
208 | __asm__ volatile ( |
203 | "pushf\n\t" |
209 | "pushf\n\t" |
204 | "popl %0\n" |
210 | "popl %0\n" |
205 | : "=r" (v) |
211 | : "=r" (v) |
206 | ); |
212 | ); |
207 | return v; |
213 | return v; |
208 | } |
214 | } |
209 | 215 | ||
210 | /** Return base address of current stack |
216 | /** Return base address of current stack |
211 | * |
217 | * |
212 | * Return the base address of the current stack. |
218 | * Return the base address of the current stack. |
213 | * The stack is assumed to be STACK_SIZE bytes long. |
219 | * The stack is assumed to be STACK_SIZE bytes long. |
214 | * The stack must start on page boundary. |
220 | * The stack must start on page boundary. |
215 | */ |
221 | */ |
216 | static inline __address get_stack_base(void) |
222 | static inline __address get_stack_base(void) |
217 | { |
223 | { |
218 | __address v; |
224 | __address v; |
219 | 225 | ||
220 | __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1))); |
226 | __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1))); |
221 | 227 | ||
222 | return v; |
228 | return v; |
223 | } |
229 | } |
224 | 230 | ||
225 | static inline __u64 rdtsc(void) |
231 | static inline __u64 rdtsc(void) |
226 | { |
232 | { |
227 | __u64 v; |
233 | __u64 v; |
228 | 234 | ||
229 | __asm__ volatile("rdtsc\n" : "=A" (v)); |
235 | __asm__ volatile("rdtsc\n" : "=A" (v)); |
230 | 236 | ||
231 | return v; |
237 | return v; |
232 | } |
238 | } |
233 | 239 | ||
234 | /** Return current IP address */ |
240 | /** Return current IP address */ |
235 | static inline __address * get_ip() |
241 | static inline __address * get_ip() |
236 | { |
242 | { |
237 | __address *ip; |
243 | __address *ip; |
238 | 244 | ||
239 | __asm__ volatile ( |
245 | __asm__ volatile ( |
240 | "mov %%eip, %0" |
246 | "mov %%eip, %0" |
241 | : "=r" (ip) |
247 | : "=r" (ip) |
242 | ); |
248 | ); |
243 | return ip; |
249 | return ip; |
244 | } |
250 | } |
245 | 251 | ||
246 | /** Invalidate TLB Entry. |
252 | /** Invalidate TLB Entry. |
247 | * |
253 | * |
248 | * @param addr Address on a page whose TLB entry is to be invalidated. |
254 | * @param addr Address on a page whose TLB entry is to be invalidated. |
249 | */ |
255 | */ |
250 | static inline void invlpg(__address addr) |
256 | static inline void invlpg(__address addr) |
251 | { |
257 | { |
252 | __asm__ volatile ("invlpg %0\n" :: "m" (*(__native *)addr)); |
258 | __asm__ volatile ("invlpg %0\n" :: "m" (*(__native *)addr)); |
253 | } |
259 | } |
254 | 260 | ||
255 | /** Load GDTR register from memory. |
261 | /** Load GDTR register from memory. |
256 | * |
262 | * |
257 | * @param gdtr_reg Address of memory from where to load GDTR. |
263 | * @param gdtr_reg Address of memory from where to load GDTR. |
258 | */ |
264 | */ |
259 | static inline void gdtr_load(ptr_16_32_t *gdtr_reg) |
265 | static inline void gdtr_load(ptr_16_32_t *gdtr_reg) |
260 | { |
266 | { |
261 | __asm__ volatile ("lgdtl %0\n" : : "m" (*gdtr_reg)); |
267 | __asm__ volatile ("lgdtl %0\n" : : "m" (*gdtr_reg)); |
262 | } |
268 | } |
263 | 269 | ||
264 | /** Store GDTR register to memory. |
270 | /** Store GDTR register to memory. |
265 | * |
271 | * |
266 | * @param gdtr_reg Address of memory to where to load GDTR. |
272 | * @param gdtr_reg Address of memory to where to load GDTR. |
267 | */ |
273 | */ |
268 | static inline void gdtr_store(ptr_16_32_t *gdtr_reg) |
274 | static inline void gdtr_store(ptr_16_32_t *gdtr_reg) |
269 | { |
275 | { |
270 | __asm__ volatile ("sgdtl %0\n" : : "m" (*gdtr_reg)); |
276 | __asm__ volatile ("sgdtl %0\n" : : "m" (*gdtr_reg)); |
271 | } |
277 | } |
272 | 278 | ||
273 | /** Load IDTR register from memory. |
279 | /** Load IDTR register from memory. |
274 | * |
280 | * |
275 | * @param idtr_reg Address of memory from where to load IDTR. |
281 | * @param idtr_reg Address of memory from where to load IDTR. |
276 | */ |
282 | */ |
277 | static inline void idtr_load(ptr_16_32_t *idtr_reg) |
283 | static inline void idtr_load(ptr_16_32_t *idtr_reg) |
278 | { |
284 | { |
279 | __asm__ volatile ("lidtl %0\n" : : "m" (*idtr_reg)); |
285 | __asm__ volatile ("lidtl %0\n" : : "m" (*idtr_reg)); |
280 | } |
286 | } |
281 | 287 | ||
282 | /** Load TR from descriptor table. |
288 | /** Load TR from descriptor table. |
283 | * |
289 | * |
284 | * @param sel Selector specifying descriptor of TSS segment. |
290 | * @param sel Selector specifying descriptor of TSS segment. |
285 | */ |
291 | */ |
286 | static inline void tr_load(__u16 sel) |
292 | static inline void tr_load(__u16 sel) |
287 | { |
293 | { |
288 | __asm__ volatile ("ltr %0" : : "r" (sel)); |
294 | __asm__ volatile ("ltr %0" : : "r" (sel)); |
289 | } |
295 | } |
290 | 296 | ||
291 | #endif |
297 | #endif |
- | 298 | ||
- | 299 | /** @} |
|
- | 300 | */ |
|
- | 301 | ||
292 | 302 |