Subversion Repositories HelenOS-historic

Rev

Rev 1702 | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1702 Rev 1780
1
/*
1
/*
2
 * Copyright (C) 2001-2004 Jakub Jermar
2
 * Copyright (C) 2001-2004 Jakub Jermar
3
 * Copyright (C) 2005 Sergey Bondari
3
 * Copyright (C) 2005 Sergey Bondari
4
 * All rights reserved.
4
 * All rights reserved.
5
 *
5
 *
6
 * Redistribution and use in source and binary forms, with or without
6
 * Redistribution and use in source and binary forms, with or without
7
 * modification, are permitted provided that the following conditions
7
 * modification, are permitted provided that the following conditions
8
 * are met:
8
 * are met:
9
 *
9
 *
10
 * - Redistributions of source code must retain the above copyright
10
 * - Redistributions of source code must retain the above copyright
11
 *   notice, this list of conditions and the following disclaimer.
11
 *   notice, this list of conditions and the following disclaimer.
12
 * - Redistributions in binary form must reproduce the above copyright
12
 * - Redistributions in binary form must reproduce the above copyright
13
 *   notice, this list of conditions and the following disclaimer in the
13
 *   notice, this list of conditions and the following disclaimer in the
14
 *   documentation and/or other materials provided with the distribution.
14
 *   documentation and/or other materials provided with the distribution.
15
 * - The name of the author may not be used to endorse or promote products
15
 * - The name of the author may not be used to endorse or promote products
16
 *   derived from this software without specific prior written permission.
16
 *   derived from this software without specific prior written permission.
17
 *
17
 *
18
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
 */
28
 */
29
 
29
 
30
 /** @addtogroup ia32  
30
 /** @addtogroup ia32  
31
 * @{
31
 * @{
32
 */
32
 */
33
/** @file
33
/** @file
34
 */
34
 */
35
 
35
 
36
#ifndef __ia32_ASM_H__
36
#ifndef __ia32_ASM_H__
37
#define __ia32_ASM_H__
37
#define __ia32_ASM_H__
38
 
38
 
39
#include <arch/pm.h>
39
#include <arch/pm.h>
40
#include <arch/types.h>
40
#include <arch/types.h>
41
#include <config.h>
41
#include <config.h>
42
 
42
 
43
extern __u32 interrupt_handler_size;
43
extern uint32_t interrupt_handler_size;
44
 
44
 
45
extern void paging_on(void);
45
extern void paging_on(void);
46
 
46
 
47
extern void interrupt_handlers(void);
47
extern void interrupt_handlers(void);
48
 
48
 
49
extern void enable_l_apic_in_msr(void);
49
extern void enable_l_apic_in_msr(void);
50
 
50
 
51
 
51
 
52
extern void asm_delay_loop(__u32 t);
52
extern void asm_delay_loop(uint32_t t);
53
extern void asm_fake_loop(__u32 t);
53
extern void asm_fake_loop(uint32_t t);
54
 
54
 
55
 
55
 
56
/** Halt CPU
56
/** Halt CPU
57
 *
57
 *
58
 * Halt the current CPU until interrupt event.
58
 * Halt the current CPU until interrupt event.
59
 */
59
 */
60
static inline void cpu_halt(void) { __asm__("hlt\n"); };
60
static inline void cpu_halt(void) { __asm__("hlt\n"); };
61
static inline void cpu_sleep(void) { __asm__("hlt\n"); };
61
static inline void cpu_sleep(void) { __asm__("hlt\n"); };
62
 
62
 
63
#define GEN_READ_REG(reg) static inline __native read_ ##reg (void) \
63
#define GEN_READ_REG(reg) static inline unative_t read_ ##reg (void) \
64
    { \
64
    { \
65
    __native res; \
65
    unative_t res; \
66
    __asm__ volatile ("movl %%" #reg ", %0" : "=r" (res) ); \
66
    __asm__ volatile ("movl %%" #reg ", %0" : "=r" (res) ); \
67
    return res; \
67
    return res; \
68
    }
68
    }
69
 
69
 
70
#define GEN_WRITE_REG(reg) static inline void write_ ##reg (__native regn) \
70
#define GEN_WRITE_REG(reg) static inline void write_ ##reg (unative_t regn) \
71
    { \
71
    { \
72
    __asm__ volatile ("movl %0, %%" #reg : : "r" (regn)); \
72
    __asm__ volatile ("movl %0, %%" #reg : : "r" (regn)); \
73
    }
73
    }
74
 
74
 
75
GEN_READ_REG(cr0);
75
GEN_READ_REG(cr0);
76
GEN_READ_REG(cr2);
76
GEN_READ_REG(cr2);
77
GEN_READ_REG(cr3);
77
GEN_READ_REG(cr3);
78
GEN_WRITE_REG(cr3);
78
GEN_WRITE_REG(cr3);
79
 
79
 
80
GEN_READ_REG(dr0);
80
GEN_READ_REG(dr0);
81
GEN_READ_REG(dr1);
81
GEN_READ_REG(dr1);
82
GEN_READ_REG(dr2);
82
GEN_READ_REG(dr2);
83
GEN_READ_REG(dr3);
83
GEN_READ_REG(dr3);
84
GEN_READ_REG(dr6);
84
GEN_READ_REG(dr6);
85
GEN_READ_REG(dr7);
85
GEN_READ_REG(dr7);
86
 
86
 
87
GEN_WRITE_REG(dr0);
87
GEN_WRITE_REG(dr0);
88
GEN_WRITE_REG(dr1);
88
GEN_WRITE_REG(dr1);
89
GEN_WRITE_REG(dr2);
89
GEN_WRITE_REG(dr2);
90
GEN_WRITE_REG(dr3);
90
GEN_WRITE_REG(dr3);
91
GEN_WRITE_REG(dr6);
91
GEN_WRITE_REG(dr6);
92
GEN_WRITE_REG(dr7);
92
GEN_WRITE_REG(dr7);
93
 
93
 
94
/** Byte to port
94
/** Byte to port
95
 *
95
 *
96
 * Output byte to port
96
 * Output byte to port
97
 *
97
 *
98
 * @param port Port to write to
98
 * @param port Port to write to
99
 * @param val Value to write
99
 * @param val Value to write
100
 */
100
 */
101
static inline void outb(__u16 port, __u8 val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); }
101
static inline void outb(uint16_t port, uint8_t val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); }
102
 
102
 
103
/** Word to port
103
/** Word to port
104
 *
104
 *
105
 * Output word to port
105
 * Output word to port
106
 *
106
 *
107
 * @param port Port to write to
107
 * @param port Port to write to
108
 * @param val Value to write
108
 * @param val Value to write
109
 */
109
 */
110
static inline void outw(__u16 port, __u16 val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); }
110
static inline void outw(uint16_t port, uint16_t val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); }
111
 
111
 
112
/** Double word to port
112
/** Double word to port
113
 *
113
 *
114
 * Output double word to port
114
 * Output double word to port
115
 *
115
 *
116
 * @param port Port to write to
116
 * @param port Port to write to
117
 * @param val Value to write
117
 * @param val Value to write
118
 */
118
 */
119
static inline void outl(__u16 port, __u32 val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); }
119
static inline void outl(uint16_t port, uint32_t val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); }
120
 
120
 
121
/** Byte from port
121
/** Byte from port
122
 *
122
 *
123
 * Get byte from port
123
 * Get byte from port
124
 *
124
 *
125
 * @param port Port to read from
125
 * @param port Port to read from
126
 * @return Value read
126
 * @return Value read
127
 */
127
 */
128
static inline __u8 inb(__u16 port) { __u8 val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; }
128
static inline uint8_t inb(uint16_t port) { uint8_t val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; }
129
 
129
 
130
/** Word from port
130
/** Word from port
131
 *
131
 *
132
 * Get word from port
132
 * Get word from port
133
 *
133
 *
134
 * @param port Port to read from
134
 * @param port Port to read from
135
 * @return Value read
135
 * @return Value read
136
 */
136
 */
137
static inline __u16 inw(__u16 port) { __u16 val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; }
137
static inline uint16_t inw(uint16_t port) { uint16_t val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; }
138
 
138
 
139
/** Double word from port
139
/** Double word from port
140
 *
140
 *
141
 * Get double word from port
141
 * Get double word from port
142
 *
142
 *
143
 * @param port Port to read from
143
 * @param port Port to read from
144
 * @return Value read
144
 * @return Value read
145
 */
145
 */
146
static inline __u32 inl(__u16 port) { __u32 val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; }
146
static inline uint32_t inl(uint16_t port) { uint32_t val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; }
147
 
147
 
148
/** Enable interrupts.
148
/** Enable interrupts.
149
 *
149
 *
150
 * Enable interrupts and return previous
150
 * Enable interrupts and return previous
151
 * value of EFLAGS.
151
 * value of EFLAGS.
152
 *
152
 *
153
 * @return Old interrupt priority level.
153
 * @return Old interrupt priority level.
154
 */
154
 */
155
static inline ipl_t interrupts_enable(void)
155
static inline ipl_t interrupts_enable(void)
156
{
156
{
157
    ipl_t v;
157
    ipl_t v;
158
    __asm__ volatile (
158
    __asm__ volatile (
159
        "pushf\n\t"
159
        "pushf\n\t"
160
        "popl %0\n\t"
160
        "popl %0\n\t"
161
        "sti\n"
161
        "sti\n"
162
        : "=r" (v)
162
        : "=r" (v)
163
    );
163
    );
164
    return v;
164
    return v;
165
}
165
}
166
 
166
 
167
/** Disable interrupts.
167
/** Disable interrupts.
168
 *
168
 *
169
 * Disable interrupts and return previous
169
 * Disable interrupts and return previous
170
 * value of EFLAGS.
170
 * value of EFLAGS.
171
 *
171
 *
172
 * @return Old interrupt priority level.
172
 * @return Old interrupt priority level.
173
 */
173
 */
174
static inline ipl_t interrupts_disable(void)
174
static inline ipl_t interrupts_disable(void)
175
{
175
{
176
    ipl_t v;
176
    ipl_t v;
177
    __asm__ volatile (
177
    __asm__ volatile (
178
        "pushf\n\t"
178
        "pushf\n\t"
179
        "popl %0\n\t"
179
        "popl %0\n\t"
180
        "cli\n"
180
        "cli\n"
181
        : "=r" (v)
181
        : "=r" (v)
182
    );
182
    );
183
    return v;
183
    return v;
184
}
184
}
185
 
185
 
186
/** Restore interrupt priority level.
186
/** Restore interrupt priority level.
187
 *
187
 *
188
 * Restore EFLAGS.
188
 * Restore EFLAGS.
189
 *
189
 *
190
 * @param ipl Saved interrupt priority level.
190
 * @param ipl Saved interrupt priority level.
191
 */
191
 */
192
static inline void interrupts_restore(ipl_t ipl)
192
static inline void interrupts_restore(ipl_t ipl)
193
{
193
{
194
    __asm__ volatile (
194
    __asm__ volatile (
195
        "pushl %0\n\t"
195
        "pushl %0\n\t"
196
        "popf\n"
196
        "popf\n"
197
        : : "r" (ipl)
197
        : : "r" (ipl)
198
    );
198
    );
199
}
199
}
200
 
200
 
201
/** Return interrupt priority level.
201
/** Return interrupt priority level.
202
 *
202
 *
203
 * @return EFLAFS.
203
 * @return EFLAFS.
204
 */
204
 */
205
static inline ipl_t interrupts_read(void)
205
static inline ipl_t interrupts_read(void)
206
{
206
{
207
    ipl_t v;
207
    ipl_t v;
208
    __asm__ volatile (
208
    __asm__ volatile (
209
        "pushf\n\t"
209
        "pushf\n\t"
210
        "popl %0\n"
210
        "popl %0\n"
211
        : "=r" (v)
211
        : "=r" (v)
212
    );
212
    );
213
    return v;
213
    return v;
214
}
214
}
215
 
215
 
216
/** Return base address of current stack
216
/** Return base address of current stack
217
 *
217
 *
218
 * Return the base address of the current stack.
218
 * Return the base address of the current stack.
219
 * The stack is assumed to be STACK_SIZE bytes long.
219
 * The stack is assumed to be STACK_SIZE bytes long.
220
 * The stack must start on page boundary.
220
 * The stack must start on page boundary.
221
 */
221
 */
222
static inline __address get_stack_base(void)
222
static inline uintptr_t get_stack_base(void)
223
{
223
{
224
    __address v;
224
    uintptr_t v;
225
   
225
   
226
    __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1)));
226
    __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1)));
227
   
227
   
228
    return v;
228
    return v;
229
}
229
}
230
 
230
 
231
static inline __u64 rdtsc(void)
231
static inline uint64_t rdtsc(void)
232
{
232
{
233
    __u64 v;
233
    uint64_t v;
234
   
234
   
235
    __asm__ volatile("rdtsc\n" : "=A" (v));
235
    __asm__ volatile("rdtsc\n" : "=A" (v));
236
   
236
   
237
    return v;
237
    return v;
238
}
238
}
239
 
239
 
240
/** Return current IP address */
240
/** Return current IP address */
241
static inline __address * get_ip()
241
static inline uintptr_t * get_ip()
242
{
242
{
243
    __address *ip;
243
    uintptr_t *ip;
244
 
244
 
245
    __asm__ volatile (
245
    __asm__ volatile (
246
        "mov %%eip, %0"
246
        "mov %%eip, %0"
247
        : "=r" (ip)
247
        : "=r" (ip)
248
        );
248
        );
249
    return ip;
249
    return ip;
250
}
250
}
251
 
251
 
252
/** Invalidate TLB Entry.
252
/** Invalidate TLB Entry.
253
 *
253
 *
254
 * @param addr Address on a page whose TLB entry is to be invalidated.
254
 * @param addr Address on a page whose TLB entry is to be invalidated.
255
 */
255
 */
256
static inline void invlpg(__address addr)
256
static inline void invlpg(uintptr_t addr)
257
{
257
{
258
    __asm__ volatile ("invlpg %0\n" :: "m" (*(__native *)addr));
258
    __asm__ volatile ("invlpg %0\n" :: "m" (*(unative_t *)addr));
259
}
259
}
260
 
260
 
261
/** Load GDTR register from memory.
261
/** Load GDTR register from memory.
262
 *
262
 *
263
 * @param gdtr_reg Address of memory from where to load GDTR.
263
 * @param gdtr_reg Address of memory from where to load GDTR.
264
 */
264
 */
265
static inline void gdtr_load(ptr_16_32_t *gdtr_reg)
265
static inline void gdtr_load(ptr_16_32_t *gdtr_reg)
266
{
266
{
267
    __asm__ volatile ("lgdtl %0\n" : : "m" (*gdtr_reg));
267
    __asm__ volatile ("lgdtl %0\n" : : "m" (*gdtr_reg));
268
}
268
}
269
 
269
 
270
/** Store GDTR register to memory.
270
/** Store GDTR register to memory.
271
 *
271
 *
272
 * @param gdtr_reg Address of memory to where to load GDTR.
272
 * @param gdtr_reg Address of memory to where to load GDTR.
273
 */
273
 */
274
static inline void gdtr_store(ptr_16_32_t *gdtr_reg)
274
static inline void gdtr_store(ptr_16_32_t *gdtr_reg)
275
{
275
{
276
    __asm__ volatile ("sgdtl %0\n" : : "m" (*gdtr_reg));
276
    __asm__ volatile ("sgdtl %0\n" : : "m" (*gdtr_reg));
277
}
277
}
278
 
278
 
279
/** Load IDTR register from memory.
279
/** Load IDTR register from memory.
280
 *
280
 *
281
 * @param idtr_reg Address of memory from where to load IDTR.
281
 * @param idtr_reg Address of memory from where to load IDTR.
282
 */
282
 */
283
static inline void idtr_load(ptr_16_32_t *idtr_reg)
283
static inline void idtr_load(ptr_16_32_t *idtr_reg)
284
{
284
{
285
    __asm__ volatile ("lidtl %0\n" : : "m" (*idtr_reg));
285
    __asm__ volatile ("lidtl %0\n" : : "m" (*idtr_reg));
286
}
286
}
287
 
287
 
288
/** Load TR from descriptor table.
288
/** Load TR from descriptor table.
289
 *
289
 *
290
 * @param sel Selector specifying descriptor of TSS segment.
290
 * @param sel Selector specifying descriptor of TSS segment.
291
 */
291
 */
292
static inline void tr_load(__u16 sel)
292
static inline void tr_load(uint16_t sel)
293
{
293
{
294
    __asm__ volatile ("ltr %0" : : "r" (sel));
294
    __asm__ volatile ("ltr %0" : : "r" (sel));
295
}
295
}
296
 
296
 
297
#endif
297
#endif
298
 
298
 
299
 /** @}
299
 /** @}
300
 */
300
 */
301
 
301
 
302
 
302