Subversion Repositories HelenOS-historic

Rev

Rev 581 | Rev 984 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 581 Rev 597
1
/*
1
/*
2
 * Copyright (C) 2001-2004 Jakub Jermar
2
 * Copyright (C) 2001-2004 Jakub Jermar
3
 * Copyright (C) 2005 Sergey Bondari
3
 * Copyright (C) 2005 Sergey Bondari
4
 * All rights reserved.
4
 * All rights reserved.
5
 *
5
 *
6
 * Redistribution and use in source and binary forms, with or without
6
 * Redistribution and use in source and binary forms, with or without
7
 * modification, are permitted provided that the following conditions
7
 * modification, are permitted provided that the following conditions
8
 * are met:
8
 * are met:
9
 *
9
 *
10
 * - Redistributions of source code must retain the above copyright
10
 * - Redistributions of source code must retain the above copyright
11
 *   notice, this list of conditions and the following disclaimer.
11
 *   notice, this list of conditions and the following disclaimer.
12
 * - Redistributions in binary form must reproduce the above copyright
12
 * - Redistributions in binary form must reproduce the above copyright
13
 *   notice, this list of conditions and the following disclaimer in the
13
 *   notice, this list of conditions and the following disclaimer in the
14
 *   documentation and/or other materials provided with the distribution.
14
 *   documentation and/or other materials provided with the distribution.
15
 * - The name of the author may not be used to endorse or promote products
15
 * - The name of the author may not be used to endorse or promote products
16
 *   derived from this software without specific prior written permission.
16
 *   derived from this software without specific prior written permission.
17
 *
17
 *
18
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
 */
28
 */
29
 
29
 
30
#ifndef __ia32_ASM_H__
30
#ifndef __ia32_ASM_H__
31
#define __ia32_ASM_H__
31
#define __ia32_ASM_H__
32
 
32
 
33
#include <arch/types.h>
33
#include <arch/types.h>
34
#include <config.h>
34
#include <config.h>
35
 
35
 
36
extern __u32 interrupt_handler_size;
36
extern __u32 interrupt_handler_size;
37
 
37
 
38
extern void paging_on(void);
38
extern void paging_on(void);
39
 
39
 
40
extern void interrupt_handlers(void);
40
extern void interrupt_handlers(void);
41
 
41
 
42
extern void enable_l_apic_in_msr(void);
42
extern void enable_l_apic_in_msr(void);
43
 
43
 
44
 
44
 
45
void asm_delay_loop(__u32 t);
45
extern void asm_delay_loop(__u32 t);
46
void asm_fake_loop(__u32 t);
46
extern void asm_fake_loop(__u32 t);
47
 
47
 
48
 
48
 
49
/** Halt CPU
49
/** Halt CPU
50
 *
50
 *
51
 * Halt the current CPU until interrupt event.
51
 * Halt the current CPU until interrupt event.
52
 */
52
 */
53
static inline void cpu_halt(void) { __asm__("hlt\n"); };
53
static inline void cpu_halt(void) { __asm__("hlt\n"); };
54
static inline void cpu_sleep(void) { __asm__("hlt\n"); };
54
static inline void cpu_sleep(void) { __asm__("hlt\n"); };
55
 
55
 
56
/** Read CR2
56
/** Read CR2
57
 *
57
 *
58
 * Return value in CR2
58
 * Return value in CR2
59
 *
59
 *
60
 * @return Value read.
60
 * @return Value read.
61
 */
61
 */
62
static inline __u32 read_cr2(void) { __u32 v; __asm__ volatile ("movl %%cr2,%0\n" : "=r" (v)); return v; }
62
static inline __u32 read_cr2(void) { __u32 v; __asm__ volatile ("movl %%cr2,%0\n" : "=r" (v)); return v; }
63
 
63
 
64
/** Write CR3
64
/** Write CR3
65
 *
65
 *
66
 * Write value to CR3.
66
 * Write value to CR3.
67
 *
67
 *
68
 * @param v Value to be written.
68
 * @param v Value to be written.
69
 */
69
 */
70
static inline void write_cr3(__u32 v) { __asm__ volatile ("movl %0,%%cr3\n" : : "r" (v)); }
70
static inline void write_cr3(__u32 v) { __asm__ volatile ("movl %0,%%cr3\n" : : "r" (v)); }
71
 
71
 
72
/** Read CR3
72
/** Read CR3
73
 *
73
 *
74
 * Return value in CR3
74
 * Return value in CR3
75
 *
75
 *
76
 * @return Value read.
76
 * @return Value read.
77
 */
77
 */
78
static inline __u32 read_cr3(void) { __u32 v; __asm__ volatile ("movl %%cr3,%0\n" : "=r" (v)); return v; }
78
static inline __u32 read_cr3(void) { __u32 v; __asm__ volatile ("movl %%cr3,%0\n" : "=r" (v)); return v; }
79
 
79
 
80
/** Byte to port
80
/** Byte to port
81
 *
81
 *
82
 * Output byte to port
82
 * Output byte to port
83
 *
83
 *
84
 * @param port Port to write to
84
 * @param port Port to write to
85
 * @param val Value to write
85
 * @param val Value to write
86
 */
86
 */
87
static inline void outb(__u16 port, __u8 val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); }
87
static inline void outb(__u16 port, __u8 val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); }
88
 
88
 
89
/** Word to port
89
/** Word to port
90
 *
90
 *
91
 * Output word to port
91
 * Output word to port
92
 *
92
 *
93
 * @param port Port to write to
93
 * @param port Port to write to
94
 * @param val Value to write
94
 * @param val Value to write
95
 */
95
 */
96
static inline void outw(__u16 port, __u16 val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); }
96
static inline void outw(__u16 port, __u16 val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); }
97
 
97
 
98
/** Double word to port
98
/** Double word to port
99
 *
99
 *
100
 * Output double word to port
100
 * Output double word to port
101
 *
101
 *
102
 * @param port Port to write to
102
 * @param port Port to write to
103
 * @param val Value to write
103
 * @param val Value to write
104
 */
104
 */
105
static inline void outl(__u16 port, __u32 val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); }
105
static inline void outl(__u16 port, __u32 val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); }
106
 
106
 
107
/** Byte from port
107
/** Byte from port
108
 *
108
 *
109
 * Get byte from port
109
 * Get byte from port
110
 *
110
 *
111
 * @param port Port to read from
111
 * @param port Port to read from
112
 * @return Value read
112
 * @return Value read
113
 */
113
 */
114
static inline __u8 inb(__u16 port) { __u8 val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; }
114
static inline __u8 inb(__u16 port) { __u8 val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; }
115
 
115
 
116
/** Word from port
116
/** Word from port
117
 *
117
 *
118
 * Get word from port
118
 * Get word from port
119
 *
119
 *
120
 * @param port Port to read from
120
 * @param port Port to read from
121
 * @return Value read
121
 * @return Value read
122
 */
122
 */
123
static inline __u16 inw(__u16 port) { __u16 val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; }
123
static inline __u16 inw(__u16 port) { __u16 val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; }
124
 
124
 
125
/** Double word from port
125
/** Double word from port
126
 *
126
 *
127
 * Get double word from port
127
 * Get double word from port
128
 *
128
 *
129
 * @param port Port to read from
129
 * @param port Port to read from
130
 * @return Value read
130
 * @return Value read
131
 */
131
 */
132
static inline __u32 inl(__u16 port) { __u32 val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; }
132
static inline __u32 inl(__u16 port) { __u32 val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; }
133
 
133
 
134
/** Enable interrupts.
134
/** Enable interrupts.
135
 *
135
 *
136
 * Enable interrupts and return previous
136
 * Enable interrupts and return previous
137
 * value of EFLAGS.
137
 * value of EFLAGS.
138
 *
138
 *
139
 * @return Old interrupt priority level.
139
 * @return Old interrupt priority level.
140
 */
140
 */
141
static inline ipl_t interrupts_enable(void)
141
static inline ipl_t interrupts_enable(void)
142
{
142
{
143
    ipl_t v;
143
    ipl_t v;
144
    __asm__ volatile (
144
    __asm__ volatile (
145
        "pushf\n\t"
145
        "pushf\n\t"
146
        "popl %0\n\t"
146
        "popl %0\n\t"
147
        "sti\n"
147
        "sti\n"
148
        : "=r" (v)
148
        : "=r" (v)
149
    );
149
    );
150
    return v;
150
    return v;
151
}
151
}
152
 
152
 
153
/** Disable interrupts.
153
/** Disable interrupts.
154
 *
154
 *
155
 * Disable interrupts and return previous
155
 * Disable interrupts and return previous
156
 * value of EFLAGS.
156
 * value of EFLAGS.
157
 *
157
 *
158
 * @return Old interrupt priority level.
158
 * @return Old interrupt priority level.
159
 */
159
 */
160
static inline ipl_t interrupts_disable(void)
160
static inline ipl_t interrupts_disable(void)
161
{
161
{
162
    ipl_t v;
162
    ipl_t v;
163
    __asm__ volatile (
163
    __asm__ volatile (
164
        "pushf\n\t"
164
        "pushf\n\t"
165
        "popl %0\n\t"
165
        "popl %0\n\t"
166
        "cli\n"
166
        "cli\n"
167
        : "=r" (v)
167
        : "=r" (v)
168
    );
168
    );
169
    return v;
169
    return v;
170
}
170
}
171
 
171
 
172
/** Restore interrupt priority level.
172
/** Restore interrupt priority level.
173
 *
173
 *
174
 * Restore EFLAGS.
174
 * Restore EFLAGS.
175
 *
175
 *
176
 * @param ipl Saved interrupt priority level.
176
 * @param ipl Saved interrupt priority level.
177
 */
177
 */
178
static inline void interrupts_restore(ipl_t ipl)
178
static inline void interrupts_restore(ipl_t ipl)
179
{
179
{
180
    __asm__ volatile (
180
    __asm__ volatile (
181
        "pushl %0\n\t"
181
        "pushl %0\n\t"
182
        "popf\n"
182
        "popf\n"
183
        : : "r" (ipl)
183
        : : "r" (ipl)
184
    );
184
    );
185
}
185
}
186
 
186
 
187
/** Return interrupt priority level.
187
/** Return interrupt priority level.
188
 *
188
 *
189
 * @return EFLAFS.
189
 * @return EFLAFS.
190
 */
190
 */
191
static inline ipl_t interrupts_read(void)
191
static inline ipl_t interrupts_read(void)
192
{
192
{
193
    ipl_t v;
193
    ipl_t v;
194
    __asm__ volatile (
194
    __asm__ volatile (
195
        "pushf\n\t"
195
        "pushf\n\t"
196
        "popl %0\n"
196
        "popl %0\n"
197
        : "=r" (v)
197
        : "=r" (v)
198
    );
198
    );
199
    return v;
199
    return v;
200
}
200
}
201
 
201
 
202
/** Return base address of current stack
202
/** Return base address of current stack
203
 *
203
 *
204
 * Return the base address of the current stack.
204
 * Return the base address of the current stack.
205
 * The stack is assumed to be STACK_SIZE bytes long.
205
 * The stack is assumed to be STACK_SIZE bytes long.
206
 * The stack must start on page boundary.
206
 * The stack must start on page boundary.
207
 */
207
 */
208
static inline __address get_stack_base(void)
208
static inline __address get_stack_base(void)
209
{
209
{
210
    __address v;
210
    __address v;
211
   
211
   
212
    __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1)));
212
    __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1)));
213
   
213
   
214
    return v;
214
    return v;
215
}
215
}
216
 
216
 
217
static inline __u64 rdtsc(void)
217
static inline __u64 rdtsc(void)
218
{
218
{
219
    __u64 v;
219
    __u64 v;
220
   
220
   
221
    __asm__ volatile("rdtsc\n" : "=A" (v));
221
    __asm__ volatile("rdtsc\n" : "=A" (v));
222
   
222
   
223
    return v;
223
    return v;
224
}
224
}
225
 
225
 
226
/** Return current IP address */
226
/** Return current IP address */
227
static inline __address * get_ip()
227
static inline __address * get_ip()
228
{
228
{
229
    __address *ip;
229
    __address *ip;
230
 
230
 
231
    __asm__ volatile (
231
    __asm__ volatile (
232
        "mov %%eip, %0"
232
        "mov %%eip, %0"
233
        : "=r" (ip)
233
        : "=r" (ip)
234
        );
234
        );
235
    return ip;
235
    return ip;
236
}
236
}
-
 
237
 
-
 
238
/** Invalidate TLB Entry.
-
 
239
 *
-
 
240
 * @param addr Address on a page whose TLB entry is to be invalidated.
-
 
241
 */
-
 
242
static inline void invlpg(__address addr)
-
 
243
{
-
 
244
    __asm__ volatile ("invlpg %0\n" :: "m" (addr));
-
 
245
}
237
 
246
 
238
#endif
247
#endif
239
 
248