Subversion Repositories HelenOS

Rev

Rev 1829 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1829 Rev 1830
1
/*
1
/*
2
 * Copyright (C) 2001-2004 Jakub Jermar
2
 * Copyright (C) 2001-2004 Jakub Jermar
3
 * Copyright (C) 2005 Sergey Bondari
3
 * Copyright (C) 2005 Sergey Bondari
4
 * All rights reserved.
4
 * All rights reserved.
5
 *
5
 *
6
 * Redistribution and use in source and binary forms, with or without
6
 * Redistribution and use in source and binary forms, with or without
7
 * modification, are permitted provided that the following conditions
7
 * modification, are permitted provided that the following conditions
8
 * are met:
8
 * are met:
9
 *
9
 *
10
 * - Redistributions of source code must retain the above copyright
10
 * - Redistributions of source code must retain the above copyright
11
 *   notice, this list of conditions and the following disclaimer.
11
 *   notice, this list of conditions and the following disclaimer.
12
 * - Redistributions in binary form must reproduce the above copyright
12
 * - Redistributions in binary form must reproduce the above copyright
13
 *   notice, this list of conditions and the following disclaimer in the
13
 *   notice, this list of conditions and the following disclaimer in the
14
 *   documentation and/or other materials provided with the distribution.
14
 *   documentation and/or other materials provided with the distribution.
15
 * - The name of the author may not be used to endorse or promote products
15
 * - The name of the author may not be used to endorse or promote products
16
 *   derived from this software without specific prior written permission.
16
 *   derived from this software without specific prior written permission.
17
 *
17
 *
18
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
 */
28
 */
29
 
29
 
30
/** @addtogroup xen32
30
/** @addtogroup xen32
31
 * @{
31
 * @{
32
 */
32
 */
33
/** @file
33
/** @file
34
 */
34
 */
35
 
35
 
36
#ifndef __xen32_ASM_H__
36
#ifndef __xen32_ASM_H__
37
#define __xen32_ASM_H__
37
#define __xen32_ASM_H__
38
 
38
 
39
#include <arch/pm.h>
39
#include <arch/pm.h>
40
#include <arch/types.h>
40
#include <arch/types.h>
-
 
41
#include <arch/barrier.h>
41
#include <config.h>
42
#include <config.h>
42
 
43
 
43
extern uint32_t interrupt_handler_size;
44
extern uint32_t interrupt_handler_size;
44
 
45
 
45
extern void interrupt_handlers(void);
46
extern void interrupt_handlers(void);
46
 
47
 
47
extern void enable_l_apic_in_msr(void);
48
extern void enable_l_apic_in_msr(void);
48
 
49
 
49
 
50
 
50
extern void asm_delay_loop(uint32_t t);
51
extern void asm_delay_loop(uint32_t t);
51
extern void asm_fake_loop(uint32_t t);
52
extern void asm_fake_loop(uint32_t t);
52
 
53
 
53
 
54
 
54
/** Halt CPU
55
/** Halt CPU
55
 *
56
 *
56
 * Halt the current CPU until interrupt event.
57
 * Halt the current CPU until interrupt event.
57
 */
58
 */
58
#define cpu_halt() ((void) 0)
59
#define cpu_halt() ((void) 0)
59
#define cpu_sleep() ((void) 0)
60
#define cpu_sleep() ((void) 0)
60
 
61
 
61
#define GEN_READ_REG(reg) static inline unative_t read_ ##reg (void) \
62
#define GEN_READ_REG(reg) static inline unative_t read_ ##reg (void) \
62
    { \
63
    { \
63
    unative_t res; \
64
    unative_t res; \
64
    __asm__ volatile ("movl %%" #reg ", %0" : "=r" (res) ); \
65
    __asm__ volatile ("movl %%" #reg ", %0" : "=r" (res) ); \
65
    return res; \
66
    return res; \
66
    }
67
    }
67
 
68
 
68
#define GEN_WRITE_REG(reg) static inline void write_ ##reg (unative_t regn) \
69
#define GEN_WRITE_REG(reg) static inline void write_ ##reg (unative_t regn) \
69
    { \
70
    { \
70
    __asm__ volatile ("movl %0, %%" #reg : : "r" (regn)); \
71
    __asm__ volatile ("movl %0, %%" #reg : : "r" (regn)); \
71
    }
72
    }
72
 
73
 
73
GEN_READ_REG(cr0);
74
GEN_READ_REG(cr0);
74
GEN_READ_REG(cr2);
75
GEN_READ_REG(cr2);
75
 
76
 
76
GEN_READ_REG(dr0);
77
GEN_READ_REG(dr0);
77
GEN_READ_REG(dr1);
78
GEN_READ_REG(dr1);
78
GEN_READ_REG(dr2);
79
GEN_READ_REG(dr2);
79
GEN_READ_REG(dr3);
80
GEN_READ_REG(dr3);
80
GEN_READ_REG(dr6);
81
GEN_READ_REG(dr6);
81
GEN_READ_REG(dr7);
82
GEN_READ_REG(dr7);
82
 
83
 
83
GEN_WRITE_REG(dr0);
84
GEN_WRITE_REG(dr0);
84
GEN_WRITE_REG(dr1);
85
GEN_WRITE_REG(dr1);
85
GEN_WRITE_REG(dr2);
86
GEN_WRITE_REG(dr2);
86
GEN_WRITE_REG(dr3);
87
GEN_WRITE_REG(dr3);
87
GEN_WRITE_REG(dr6);
88
GEN_WRITE_REG(dr6);
88
GEN_WRITE_REG(dr7);
89
GEN_WRITE_REG(dr7);
89
 
90
 
90
/** Byte to port
91
/** Byte to port
91
 *
92
 *
92
 * Output byte to port
93
 * Output byte to port
93
 *
94
 *
94
 * @param port Port to write to
95
 * @param port Port to write to
95
 * @param val Value to write
96
 * @param val Value to write
96
 */
97
 */
97
static inline void outb(uint16_t port, uint8_t val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); }
98
static inline void outb(uint16_t port, uint8_t val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); }
98
 
99
 
99
/** Word to port
100
/** Word to port
100
 *
101
 *
101
 * Output word to port
102
 * Output word to port
102
 *
103
 *
103
 * @param port Port to write to
104
 * @param port Port to write to
104
 * @param val Value to write
105
 * @param val Value to write
105
 */
106
 */
106
static inline void outw(uint16_t port, uint16_t val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); }
107
static inline void outw(uint16_t port, uint16_t val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); }
107
 
108
 
108
/** Double word to port
109
/** Double word to port
109
 *
110
 *
110
 * Output double word to port
111
 * Output double word to port
111
 *
112
 *
112
 * @param port Port to write to
113
 * @param port Port to write to
113
 * @param val Value to write
114
 * @param val Value to write
114
 */
115
 */
115
static inline void outl(uint16_t port, uint32_t val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); }
116
static inline void outl(uint16_t port, uint32_t val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); }
116
 
117
 
117
/** Byte from port
118
/** Byte from port
118
 *
119
 *
119
 * Get byte from port
120
 * Get byte from port
120
 *
121
 *
121
 * @param port Port to read from
122
 * @param port Port to read from
122
 * @return Value read
123
 * @return Value read
123
 */
124
 */
124
static inline uint8_t inb(uint16_t port) { uint8_t val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; }
125
static inline uint8_t inb(uint16_t port) { uint8_t val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; }
125
 
126
 
126
/** Word from port
127
/** Word from port
127
 *
128
 *
128
 * Get word from port
129
 * Get word from port
129
 *
130
 *
130
 * @param port Port to read from
131
 * @param port Port to read from
131
 * @return Value read
132
 * @return Value read
132
 */
133
 */
133
static inline uint16_t inw(uint16_t port) { uint16_t val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; }
134
static inline uint16_t inw(uint16_t port) { uint16_t val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; }
134
 
135
 
135
/** Double word from port
136
/** Double word from port
136
 *
137
 *
137
 * Get double word from port
138
 * Get double word from port
138
 *
139
 *
139
 * @param port Port to read from
140
 * @param port Port to read from
140
 * @return Value read
141
 * @return Value read
141
 */
142
 */
142
static inline uint32_t inl(uint16_t port) { uint32_t val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; }
143
static inline uint32_t inl(uint16_t port) { uint32_t val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; }
143
 
144
 
144
/** Enable interrupts.
145
/** Enable interrupts.
145
 *
146
 *
146
 * Enable interrupts and return previous
147
 * Enable interrupts and return previous
147
 * value of EFLAGS.
148
 * value of EFLAGS.
148
 *
149
 *
149
 * @return Old interrupt priority level.
150
 * @return Old interrupt priority level.
150
 */
151
 */
151
static inline ipl_t interrupts_enable(void)
152
static inline ipl_t interrupts_enable(void)
152
{
153
{
153
    ipl_t v = 0;
154
    // FIXME SMP
-
 
155
   
154
/*  __asm__ volatile (
156
    ipl_t v = shared_info.vcpu_info[0].evtchn_upcall_mask;
155
        "pushf\n\t"
157
    write_barrier();
156
        "popl %0\n\t"
158
    shared_info.vcpu_info[0].evtchn_upcall_mask = 0;
157
        "sti\n"
159
    write_barrier();
-
 
160
    if (shared_info.vcpu_info[0].evtchn_upcall_pending)
158
        : "=r" (v)
161
        force_evtchn_callback();
159
    );*/
162
   
160
    return v;
163
    return v;
161
}
164
}
162
 
165
 
163
/** Disable interrupts.
166
/** Disable interrupts.
164
 *
167
 *
165
 * Disable interrupts and return previous
168
 * Disable interrupts and return previous
166
 * value of EFLAGS.
169
 * value of EFLAGS.
167
 *
170
 *
168
 * @return Old interrupt priority level.
171
 * @return Old interrupt priority level.
169
 */
172
 */
170
static inline ipl_t interrupts_disable(void)
173
static inline ipl_t interrupts_disable(void)
171
{
174
{
172
    ipl_t v = 0;
175
    // FIXME SMP
-
 
176
   
173
/*  __asm__ volatile (
177
    ipl_t v = shared_info.vcpu_info[0].evtchn_upcall_mask;
174
        "pushf\n\t"
-
 
175
        "popl %0\n\t"
178
    shared_info.vcpu_info[0].evtchn_upcall_mask = 1;
176
        "cli\n"
-
 
177
        : "=r" (v)
179
    write_barrier();
178
    );*/
180
   
179
    return v;
181
    return v;
180
}
182
}
181
 
183
 
182
/** Restore interrupt priority level.
184
/** Restore interrupt priority level.
183
 *
185
 *
184
 * Restore EFLAGS.
186
 * Restore EFLAGS.
185
 *
187
 *
186
 * @param ipl Saved interrupt priority level.
188
 * @param ipl Saved interrupt priority level.
187
 */
189
 */
188
static inline void interrupts_restore(ipl_t ipl)
190
static inline void interrupts_restore(ipl_t ipl)
189
{
191
{
190
/*  __asm__ volatile (
192
    if (ipl == 0)
191
        "pushl %0\n\t"
193
        interrupts_enable();
192
        "popf\n"
194
    else
193
        : : "r" (ipl)
195
        interrupts_disable();
194
    );*/
-
 
195
}
196
}
196
 
197
 
197
/** Return interrupt priority level.
198
/** Return interrupt priority level.
198
 *
199
 *
199
 * @return EFLAFS.
200
 * @return EFLAFS.
200
 */
201
 */
201
static inline ipl_t interrupts_read(void)
202
static inline ipl_t interrupts_read(void)
202
{
203
{
203
    ipl_t v = 0;
-
 
204
/*  __asm__ volatile (
-
 
205
        "pushf\n\t"
-
 
206
        "popl %0\n"
-
 
207
        : "=r" (v)
204
    // FIXME SMP
208
    );*/
205
   
209
    return v;
206
    return shared_info.vcpu_info[0].evtchn_upcall_mask;
210
}
207
}
211
 
208
 
212
/** Return base address of current stack
209
/** Return base address of current stack
213
 *
210
 *
214
 * Return the base address of the current stack.
211
 * Return the base address of the current stack.
215
 * The stack is assumed to be STACK_SIZE bytes long.
212
 * The stack is assumed to be STACK_SIZE bytes long.
216
 * The stack must start on page boundary.
213
 * The stack must start on page boundary.
217
 */
214
 */
218
static inline uintptr_t get_stack_base(void)
215
static inline uintptr_t get_stack_base(void)
219
{
216
{
220
    uintptr_t v;
217
    uintptr_t v;
221
   
218
   
222
    __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1)));
219
    __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1)));
223
   
220
   
224
    return v;
221
    return v;
225
}
222
}
226
 
223
 
227
static inline uint64_t rdtsc(void)
224
static inline uint64_t rdtsc(void)
228
{
225
{
229
    uint64_t v;
226
    uint64_t v;
230
   
227
   
231
    __asm__ volatile("rdtsc\n" : "=A" (v));
228
    __asm__ volatile("rdtsc\n" : "=A" (v));
232
   
229
   
233
    return v;
230
    return v;
234
}
231
}
235
 
232
 
236
/** Return current IP address */
233
/** Return current IP address */
237
static inline uintptr_t * get_ip()
234
static inline uintptr_t * get_ip()
238
{
235
{
239
    uintptr_t *ip;
236
    uintptr_t *ip;
240
 
237
 
241
    __asm__ volatile (
238
    __asm__ volatile (
242
        "mov %%eip, %0"
239
        "mov %%eip, %0"
243
        : "=r" (ip)
240
        : "=r" (ip)
244
        );
241
        );
245
    return ip;
242
    return ip;
246
}
243
}
247
 
244
 
248
/** Invalidate TLB Entry.
245
/** Invalidate TLB Entry.
249
 *
246
 *
250
 * @param addr Address on a page whose TLB entry is to be invalidated.
247
 * @param addr Address on a page whose TLB entry is to be invalidated.
251
 */
248
 */
252
static inline void invlpg(uintptr_t addr)
249
static inline void invlpg(uintptr_t addr)
253
{
250
{
254
    __asm__ volatile ("invlpg %0\n" :: "m" (*(unative_t *)addr));
251
    __asm__ volatile ("invlpg %0\n" :: "m" (*(unative_t *)addr));
255
}
252
}
256
 
253
 
257
/** Load GDTR register from memory.
254
/** Load GDTR register from memory.
258
 *
255
 *
259
 * @param gdtr_reg Address of memory from where to load GDTR.
256
 * @param gdtr_reg Address of memory from where to load GDTR.
260
 */
257
 */
261
static inline void gdtr_load(ptr_16_32_t *gdtr_reg)
258
static inline void gdtr_load(ptr_16_32_t *gdtr_reg)
262
{
259
{
263
    __asm__ volatile ("lgdtl %0\n" : : "m" (*gdtr_reg));
260
    __asm__ volatile ("lgdtl %0\n" : : "m" (*gdtr_reg));
264
}
261
}
265
 
262
 
266
/** Store GDTR register to memory.
263
/** Store GDTR register to memory.
267
 *
264
 *
268
 * @param gdtr_reg Address of memory to where to load GDTR.
265
 * @param gdtr_reg Address of memory to where to load GDTR.
269
 */
266
 */
270
static inline void gdtr_store(ptr_16_32_t *gdtr_reg)
267
static inline void gdtr_store(ptr_16_32_t *gdtr_reg)
271
{
268
{
272
    __asm__ volatile ("sgdtl %0\n" : : "m" (*gdtr_reg));
269
    __asm__ volatile ("sgdtl %0\n" : : "m" (*gdtr_reg));
273
}
270
}
274
 
271
 
275
/** Load TR from descriptor table.
272
/** Load TR from descriptor table.
276
 *
273
 *
277
 * @param sel Selector specifying descriptor of TSS segment.
274
 * @param sel Selector specifying descriptor of TSS segment.
278
 */
275
 */
279
static inline void tr_load(uint16_t sel)
276
static inline void tr_load(uint16_t sel)
280
{
277
{
281
    __asm__ volatile ("ltr %0" : : "r" (sel));
278
    __asm__ volatile ("ltr %0" : : "r" (sel));
282
}
279
}
283
 
280
 
284
#endif
281
#endif
285
 
282
 
286
/** @}
283
/** @}
287
 */
284
 */
288
 
285