Subversion Repositories HelenOS-historic

Rev

Rev 1187 | Rev 1702 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1187 Rev 1251
1
/*
1
/*
2
 * Copyright (C) 2001-2004 Jakub Jermar
2
 * Copyright (C) 2001-2004 Jakub Jermar
3
 * Copyright (C) 2005 Sergey Bondari
3
 * Copyright (C) 2005 Sergey Bondari
4
 * All rights reserved.
4
 * All rights reserved.
5
 *
5
 *
6
 * Redistribution and use in source and binary forms, with or without
6
 * Redistribution and use in source and binary forms, with or without
7
 * modification, are permitted provided that the following conditions
7
 * modification, are permitted provided that the following conditions
8
 * are met:
8
 * are met:
9
 *
9
 *
10
 * - Redistributions of source code must retain the above copyright
10
 * - Redistributions of source code must retain the above copyright
11
 *   notice, this list of conditions and the following disclaimer.
11
 *   notice, this list of conditions and the following disclaimer.
12
 * - Redistributions in binary form must reproduce the above copyright
12
 * - Redistributions in binary form must reproduce the above copyright
13
 *   notice, this list of conditions and the following disclaimer in the
13
 *   notice, this list of conditions and the following disclaimer in the
14
 *   documentation and/or other materials provided with the distribution.
14
 *   documentation and/or other materials provided with the distribution.
15
 * - The name of the author may not be used to endorse or promote products
15
 * - The name of the author may not be used to endorse or promote products
16
 *   derived from this software without specific prior written permission.
16
 *   derived from this software without specific prior written permission.
17
 *
17
 *
18
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
 */
28
 */
29
 
29
 
30
#ifndef __ia32_ASM_H__
30
#ifndef __ia32_ASM_H__
31
#define __ia32_ASM_H__
31
#define __ia32_ASM_H__
32
 
32
 
33
#include <arch/pm.h>
33
#include <arch/pm.h>
34
#include <arch/types.h>
34
#include <arch/types.h>
35
#include <config.h>
35
#include <config.h>
36
 
36
 
37
extern __u32 interrupt_handler_size;
37
extern __u32 interrupt_handler_size;
38
 
38
 
39
extern void paging_on(void);
39
extern void paging_on(void);
40
 
40
 
41
extern void interrupt_handlers(void);
41
extern void interrupt_handlers(void);
42
 
42
 
43
extern void enable_l_apic_in_msr(void);
43
extern void enable_l_apic_in_msr(void);
44
 
44
 
45
 
45
 
46
extern void asm_delay_loop(__u32 t);
46
extern void asm_delay_loop(__u32 t);
47
extern void asm_fake_loop(__u32 t);
47
extern void asm_fake_loop(__u32 t);
48
 
48
 
49
 
49
 
50
/** Halt CPU
50
/** Halt CPU
51
 *
51
 *
52
 * Halt the current CPU until interrupt event.
52
 * Halt the current CPU until interrupt event.
53
 */
53
 */
54
static inline void cpu_halt(void) { __asm__("hlt\n"); };
54
static inline void cpu_halt(void) { __asm__("hlt\n"); };
55
static inline void cpu_sleep(void) { __asm__("hlt\n"); };
55
static inline void cpu_sleep(void) { __asm__("hlt\n"); };
56
 
56
 
57
#define GEN_READ_REG(reg) static inline __native read_ ##reg (void) \
57
#define GEN_READ_REG(reg) static inline __native read_ ##reg (void) \
58
    { \
58
    { \
59
    __native res; \
59
    __native res; \
60
    __asm__ volatile ("movl %%" #reg ", %0" : "=r" (res) ); \
60
    __asm__ volatile ("movl %%" #reg ", %0" : "=r" (res) ); \
61
    return res; \
61
    return res; \
62
    }
62
    }
63
 
63
 
64
#define GEN_WRITE_REG(reg) static inline void write_ ##reg (__native regn) \
64
#define GEN_WRITE_REG(reg) static inline void write_ ##reg (__native regn) \
65
    { \
65
    { \
66
    __asm__ volatile ("movl %0, %%" #reg : : "r" (regn)); \
66
    __asm__ volatile ("movl %0, %%" #reg : : "r" (regn)); \
67
    }
67
    }
68
 
68
 
69
GEN_READ_REG(cr0);
69
GEN_READ_REG(cr0);
70
GEN_READ_REG(cr2);
70
GEN_READ_REG(cr2);
71
GEN_READ_REG(cr3);
71
GEN_READ_REG(cr3);
72
GEN_WRITE_REG(cr3);
72
GEN_WRITE_REG(cr3);
73
 
73
 
74
GEN_READ_REG(dr0);
74
GEN_READ_REG(dr0);
75
GEN_READ_REG(dr1);
75
GEN_READ_REG(dr1);
76
GEN_READ_REG(dr2);
76
GEN_READ_REG(dr2);
77
GEN_READ_REG(dr3);
77
GEN_READ_REG(dr3);
78
GEN_READ_REG(dr6);
78
GEN_READ_REG(dr6);
79
GEN_READ_REG(dr7);
79
GEN_READ_REG(dr7);
80
 
80
 
81
GEN_WRITE_REG(dr0);
81
GEN_WRITE_REG(dr0);
82
GEN_WRITE_REG(dr1);
82
GEN_WRITE_REG(dr1);
83
GEN_WRITE_REG(dr2);
83
GEN_WRITE_REG(dr2);
84
GEN_WRITE_REG(dr3);
84
GEN_WRITE_REG(dr3);
85
GEN_WRITE_REG(dr6);
85
GEN_WRITE_REG(dr6);
86
GEN_WRITE_REG(dr7);
86
GEN_WRITE_REG(dr7);
87
 
87
 
88
/** Byte to port
88
/** Byte to port
89
 *
89
 *
90
 * Output byte to port
90
 * Output byte to port
91
 *
91
 *
92
 * @param port Port to write to
92
 * @param port Port to write to
93
 * @param val Value to write
93
 * @param val Value to write
94
 */
94
 */
95
static inline void outb(__u16 port, __u8 val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); }
95
static inline void outb(__u16 port, __u8 val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); }
96
 
96
 
97
/** Word to port
97
/** Word to port
98
 *
98
 *
99
 * Output word to port
99
 * Output word to port
100
 *
100
 *
101
 * @param port Port to write to
101
 * @param port Port to write to
102
 * @param val Value to write
102
 * @param val Value to write
103
 */
103
 */
104
static inline void outw(__u16 port, __u16 val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); }
104
static inline void outw(__u16 port, __u16 val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); }
105
 
105
 
106
/** Double word to port
106
/** Double word to port
107
 *
107
 *
108
 * Output double word to port
108
 * Output double word to port
109
 *
109
 *
110
 * @param port Port to write to
110
 * @param port Port to write to
111
 * @param val Value to write
111
 * @param val Value to write
112
 */
112
 */
113
static inline void outl(__u16 port, __u32 val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); }
113
static inline void outl(__u16 port, __u32 val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); }
114
 
114
 
115
/** Byte from port
115
/** Byte from port
116
 *
116
 *
117
 * Get byte from port
117
 * Get byte from port
118
 *
118
 *
119
 * @param port Port to read from
119
 * @param port Port to read from
120
 * @return Value read
120
 * @return Value read
121
 */
121
 */
122
static inline __u8 inb(__u16 port) { __u8 val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; }
122
static inline __u8 inb(__u16 port) { __u8 val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; }
123
 
123
 
124
/** Word from port
124
/** Word from port
125
 *
125
 *
126
 * Get word from port
126
 * Get word from port
127
 *
127
 *
128
 * @param port Port to read from
128
 * @param port Port to read from
129
 * @return Value read
129
 * @return Value read
130
 */
130
 */
131
static inline __u16 inw(__u16 port) { __u16 val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; }
131
static inline __u16 inw(__u16 port) { __u16 val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; }
132
 
132
 
133
/** Double word from port
133
/** Double word from port
134
 *
134
 *
135
 * Get double word from port
135
 * Get double word from port
136
 *
136
 *
137
 * @param port Port to read from
137
 * @param port Port to read from
138
 * @return Value read
138
 * @return Value read
139
 */
139
 */
140
static inline __u32 inl(__u16 port) { __u32 val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; }
140
static inline __u32 inl(__u16 port) { __u32 val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; }
141
 
141
 
142
/** Enable interrupts.
142
/** Enable interrupts.
143
 *
143
 *
144
 * Enable interrupts and return previous
144
 * Enable interrupts and return previous
145
 * value of EFLAGS.
145
 * value of EFLAGS.
146
 *
146
 *
147
 * @return Old interrupt priority level.
147
 * @return Old interrupt priority level.
148
 */
148
 */
149
static inline ipl_t interrupts_enable(void)
149
static inline ipl_t interrupts_enable(void)
150
{
150
{
151
    ipl_t v;
151
    ipl_t v;
152
    __asm__ volatile (
152
    __asm__ volatile (
153
        "pushf\n\t"
153
        "pushf\n\t"
154
        "popl %0\n\t"
154
        "popl %0\n\t"
155
        "sti\n"
155
        "sti\n"
156
        : "=r" (v)
156
        : "=r" (v)
157
    );
157
    );
158
    return v;
158
    return v;
159
}
159
}
160
 
160
 
161
/** Disable interrupts.
161
/** Disable interrupts.
162
 *
162
 *
163
 * Disable interrupts and return previous
163
 * Disable interrupts and return previous
164
 * value of EFLAGS.
164
 * value of EFLAGS.
165
 *
165
 *
166
 * @return Old interrupt priority level.
166
 * @return Old interrupt priority level.
167
 */
167
 */
168
static inline ipl_t interrupts_disable(void)
168
static inline ipl_t interrupts_disable(void)
169
{
169
{
170
    ipl_t v;
170
    ipl_t v;
171
    __asm__ volatile (
171
    __asm__ volatile (
172
        "pushf\n\t"
172
        "pushf\n\t"
173
        "popl %0\n\t"
173
        "popl %0\n\t"
174
        "cli\n"
174
        "cli\n"
175
        : "=r" (v)
175
        : "=r" (v)
176
    );
176
    );
177
    return v;
177
    return v;
178
}
178
}
179
 
179
 
180
/** Restore interrupt priority level.
180
/** Restore interrupt priority level.
181
 *
181
 *
182
 * Restore EFLAGS.
182
 * Restore EFLAGS.
183
 *
183
 *
184
 * @param ipl Saved interrupt priority level.
184
 * @param ipl Saved interrupt priority level.
185
 */
185
 */
186
static inline void interrupts_restore(ipl_t ipl)
186
static inline void interrupts_restore(ipl_t ipl)
187
{
187
{
188
    __asm__ volatile (
188
    __asm__ volatile (
189
        "pushl %0\n\t"
189
        "pushl %0\n\t"
190
        "popf\n"
190
        "popf\n"
191
        : : "r" (ipl)
191
        : : "r" (ipl)
192
    );
192
    );
193
}
193
}
194
 
194
 
195
/** Return interrupt priority level.
195
/** Return interrupt priority level.
196
 *
196
 *
197
 * @return EFLAFS.
197
 * @return EFLAFS.
198
 */
198
 */
199
static inline ipl_t interrupts_read(void)
199
static inline ipl_t interrupts_read(void)
200
{
200
{
201
    ipl_t v;
201
    ipl_t v;
202
    __asm__ volatile (
202
    __asm__ volatile (
203
        "pushf\n\t"
203
        "pushf\n\t"
204
        "popl %0\n"
204
        "popl %0\n"
205
        : "=r" (v)
205
        : "=r" (v)
206
    );
206
    );
207
    return v;
207
    return v;
208
}
208
}
209
 
209
 
210
/** Return base address of current stack
210
/** Return base address of current stack
211
 *
211
 *
212
 * Return the base address of the current stack.
212
 * Return the base address of the current stack.
213
 * The stack is assumed to be STACK_SIZE bytes long.
213
 * The stack is assumed to be STACK_SIZE bytes long.
214
 * The stack must start on page boundary.
214
 * The stack must start on page boundary.
215
 */
215
 */
216
static inline __address get_stack_base(void)
216
static inline __address get_stack_base(void)
217
{
217
{
218
    __address v;
218
    __address v;
219
   
219
   
220
    __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1)));
220
    __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1)));
221
   
221
   
222
    return v;
222
    return v;
223
}
223
}
224
 
224
 
225
static inline __u64 rdtsc(void)
225
static inline __u64 rdtsc(void)
226
{
226
{
227
    __u64 v;
227
    __u64 v;
228
   
228
   
229
    __asm__ volatile("rdtsc\n" : "=A" (v));
229
    __asm__ volatile("rdtsc\n" : "=A" (v));
230
   
230
   
231
    return v;
231
    return v;
232
}
232
}
233
 
233
 
234
/** Return current IP address */
234
/** Return current IP address */
235
static inline __address * get_ip()
235
static inline __address * get_ip()
236
{
236
{
237
    __address *ip;
237
    __address *ip;
238
 
238
 
239
    __asm__ volatile (
239
    __asm__ volatile (
240
        "mov %%eip, %0"
240
        "mov %%eip, %0"
241
        : "=r" (ip)
241
        : "=r" (ip)
242
        );
242
        );
243
    return ip;
243
    return ip;
244
}
244
}
245
 
245
 
246
/** Invalidate TLB Entry.
246
/** Invalidate TLB Entry.
247
 *
247
 *
248
 * @param addr Address on a page whose TLB entry is to be invalidated.
248
 * @param addr Address on a page whose TLB entry is to be invalidated.
249
 */
249
 */
250
static inline void invlpg(__address addr)
250
static inline void invlpg(__address addr)
251
{
251
{
252
    __asm__ volatile ("invlpg %0\n" :: "m" (*(__native *)addr));
252
    __asm__ volatile ("invlpg %0\n" :: "m" (*(__native *)addr));
253
}
253
}
254
 
254
 
255
/** Load GDTR register from memory.
255
/** Load GDTR register from memory.
256
 *
256
 *
257
 * @param gdtr_reg Address of memory from where to load GDTR.
257
 * @param gdtr_reg Address of memory from where to load GDTR.
258
 */
258
 */
259
static inline void gdtr_load(ptr_16_32_t *gdtr_reg)
259
static inline void gdtr_load(ptr_16_32_t *gdtr_reg)
260
{
260
{
261
    __asm__ volatile ("lgdt %0\n" : : "m" (*gdtr_reg));
261
    __asm__ volatile ("lgdtl %0\n" : : "m" (*gdtr_reg));
262
}
262
}
263
 
263
 
264
/** Store GDTR register to memory.
264
/** Store GDTR register to memory.
265
 *
265
 *
266
 * @param gdtr_reg Address of memory to where to load GDTR.
266
 * @param gdtr_reg Address of memory to where to load GDTR.
267
 */
267
 */
268
static inline void gdtr_store(ptr_16_32_t *gdtr_reg)
268
static inline void gdtr_store(ptr_16_32_t *gdtr_reg)
269
{
269
{
270
    __asm__ volatile ("sgdt %0\n" : : "m" (*gdtr_reg));
270
    __asm__ volatile ("sgdtl %0\n" : : "m" (*gdtr_reg));
271
}
271
}
272
 
272
 
273
/** Load IDTR register from memory.
273
/** Load IDTR register from memory.
274
 *
274
 *
275
 * @param idtr_reg Address of memory from where to load IDTR.
275
 * @param idtr_reg Address of memory from where to load IDTR.
276
 */
276
 */
277
static inline void idtr_load(ptr_16_32_t *idtr_reg)
277
static inline void idtr_load(ptr_16_32_t *idtr_reg)
278
{
278
{
279
    __asm__ volatile ("lidt %0\n" : : "m" (*idtr_reg));
279
    __asm__ volatile ("lidtl %0\n" : : "m" (*idtr_reg));
280
}
280
}
281
 
281
 
282
/** Load TR from descriptor table.
282
/** Load TR from descriptor table.
283
 *
283
 *
284
 * @param sel Selector specifying descriptor of TSS segment.
284
 * @param sel Selector specifying descriptor of TSS segment.
285
 */
285
 */
286
static inline void tr_load(__u16 sel)
286
static inline void tr_load(__u16 sel)
287
{
287
{
288
    __asm__ volatile ("ltr %0" : : "r" (sel));
288
    __asm__ volatile ("ltr %0" : : "r" (sel));
289
}
289
}
290
 
290
 
291
#endif
291
#endif
292
 
292