Subversion Repositories HelenOS

Rev

Rev 1830 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1830 Rev 1831
1
/*
1
/*
2
 * Copyright (C) 2001-2004 Jakub Jermar
2
 * Copyright (C) 2001-2004 Jakub Jermar
3
 * Copyright (C) 2005 Sergey Bondari
3
 * Copyright (C) 2005 Sergey Bondari
4
 * All rights reserved.
4
 * All rights reserved.
5
 *
5
 *
6
 * Redistribution and use in source and binary forms, with or without
6
 * Redistribution and use in source and binary forms, with or without
7
 * modification, are permitted provided that the following conditions
7
 * modification, are permitted provided that the following conditions
8
 * are met:
8
 * are met:
9
 *
9
 *
10
 * - Redistributions of source code must retain the above copyright
10
 * - Redistributions of source code must retain the above copyright
11
 *   notice, this list of conditions and the following disclaimer.
11
 *   notice, this list of conditions and the following disclaimer.
12
 * - Redistributions in binary form must reproduce the above copyright
12
 * - Redistributions in binary form must reproduce the above copyright
13
 *   notice, this list of conditions and the following disclaimer in the
13
 *   notice, this list of conditions and the following disclaimer in the
14
 *   documentation and/or other materials provided with the distribution.
14
 *   documentation and/or other materials provided with the distribution.
15
 * - The name of the author may not be used to endorse or promote products
15
 * - The name of the author may not be used to endorse or promote products
16
 *   derived from this software without specific prior written permission.
16
 *   derived from this software without specific prior written permission.
17
 *
17
 *
18
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
 */
28
 */
29
 
29
 
30
/** @addtogroup xen32
30
/** @addtogroup xen32
31
 * @{
31
 * @{
32
 */
32
 */
33
/** @file
33
/** @file
34
 */
34
 */
35
 
35
 
36
#ifndef __xen32_ASM_H__
36
#ifndef __xen32_ASM_H__
37
#define __xen32_ASM_H__
37
#define __xen32_ASM_H__
38
 
38
 
39
#include <arch/pm.h>
39
#include <arch/pm.h>
40
#include <arch/types.h>
40
#include <arch/types.h>
41
#include <arch/barrier.h>
41
#include <arch/barrier.h>
42
#include <config.h>
42
#include <config.h>
43
 
43
 
44
extern uint32_t interrupt_handler_size;
-
 
45
 
-
 
46
extern void interrupt_handlers(void);
-
 
47
 
-
 
48
extern void enable_l_apic_in_msr(void);
44
extern void enable_l_apic_in_msr(void);
49
 
45
 
50
 
46
 
51
extern void asm_delay_loop(uint32_t t);
47
extern void asm_delay_loop(uint32_t t);
52
extern void asm_fake_loop(uint32_t t);
48
extern void asm_fake_loop(uint32_t t);
53
 
49
 
54
 
50
 
55
/** Halt CPU
51
/** Halt CPU
56
 *
52
 *
57
 * Halt the current CPU until interrupt event.
53
 * Halt the current CPU until interrupt event.
58
 */
54
 */
59
#define cpu_halt() ((void) 0)
55
#define cpu_halt() ((void) 0)
60
#define cpu_sleep() ((void) 0)
56
#define cpu_sleep() ((void) 0)
61
 
57
 
62
#define GEN_READ_REG(reg) static inline unative_t read_ ##reg (void) \
58
#define GEN_READ_REG(reg) static inline unative_t read_ ##reg (void) \
63
    { \
59
    { \
64
    unative_t res; \
60
    unative_t res; \
65
    __asm__ volatile ("movl %%" #reg ", %0" : "=r" (res) ); \
61
    __asm__ volatile ("movl %%" #reg ", %0" : "=r" (res) ); \
66
    return res; \
62
    return res; \
67
    }
63
    }
68
 
64
 
69
#define GEN_WRITE_REG(reg) static inline void write_ ##reg (unative_t regn) \
65
#define GEN_WRITE_REG(reg) static inline void write_ ##reg (unative_t regn) \
70
    { \
66
    { \
71
    __asm__ volatile ("movl %0, %%" #reg : : "r" (regn)); \
67
    __asm__ volatile ("movl %0, %%" #reg : : "r" (regn)); \
72
    }
68
    }
73
 
69
 
74
GEN_READ_REG(cr0);
70
GEN_READ_REG(cr0);
75
GEN_READ_REG(cr2);
71
GEN_READ_REG(cr2);
76
 
72
 
77
GEN_READ_REG(dr0);
73
GEN_READ_REG(dr0);
78
GEN_READ_REG(dr1);
74
GEN_READ_REG(dr1);
79
GEN_READ_REG(dr2);
75
GEN_READ_REG(dr2);
80
GEN_READ_REG(dr3);
76
GEN_READ_REG(dr3);
81
GEN_READ_REG(dr6);
77
GEN_READ_REG(dr6);
82
GEN_READ_REG(dr7);
78
GEN_READ_REG(dr7);
83
 
79
 
84
GEN_WRITE_REG(dr0);
80
GEN_WRITE_REG(dr0);
85
GEN_WRITE_REG(dr1);
81
GEN_WRITE_REG(dr1);
86
GEN_WRITE_REG(dr2);
82
GEN_WRITE_REG(dr2);
87
GEN_WRITE_REG(dr3);
83
GEN_WRITE_REG(dr3);
88
GEN_WRITE_REG(dr6);
84
GEN_WRITE_REG(dr6);
89
GEN_WRITE_REG(dr7);
85
GEN_WRITE_REG(dr7);
90
 
86
 
91
/** Byte to port
87
/** Byte to port
92
 *
88
 *
93
 * Output byte to port
89
 * Output byte to port
94
 *
90
 *
95
 * @param port Port to write to
91
 * @param port Port to write to
96
 * @param val Value to write
92
 * @param val Value to write
97
 */
93
 */
98
static inline void outb(uint16_t port, uint8_t val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); }
94
static inline void outb(uint16_t port, uint8_t val) { __asm__ volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port) ); }
99
 
95
 
100
/** Word to port
96
/** Word to port
101
 *
97
 *
102
 * Output word to port
98
 * Output word to port
103
 *
99
 *
104
 * @param port Port to write to
100
 * @param port Port to write to
105
 * @param val Value to write
101
 * @param val Value to write
106
 */
102
 */
107
static inline void outw(uint16_t port, uint16_t val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); }
103
static inline void outw(uint16_t port, uint16_t val) { __asm__ volatile ("outw %w0, %w1\n" : : "a" (val), "d" (port) ); }
108
 
104
 
109
/** Double word to port
105
/** Double word to port
110
 *
106
 *
111
 * Output double word to port
107
 * Output double word to port
112
 *
108
 *
113
 * @param port Port to write to
109
 * @param port Port to write to
114
 * @param val Value to write
110
 * @param val Value to write
115
 */
111
 */
116
static inline void outl(uint16_t port, uint32_t val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); }
112
static inline void outl(uint16_t port, uint32_t val) { __asm__ volatile ("outl %l0, %w1\n" : : "a" (val), "d" (port) ); }
117
 
113
 
118
/** Byte from port
114
/** Byte from port
119
 *
115
 *
120
 * Get byte from port
116
 * Get byte from port
121
 *
117
 *
122
 * @param port Port to read from
118
 * @param port Port to read from
123
 * @return Value read
119
 * @return Value read
124
 */
120
 */
125
static inline uint8_t inb(uint16_t port) { uint8_t val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; }
121
static inline uint8_t inb(uint16_t port) { uint8_t val; __asm__ volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port) ); return val; }
126
 
122
 
127
/** Word from port
123
/** Word from port
128
 *
124
 *
129
 * Get word from port
125
 * Get word from port
130
 *
126
 *
131
 * @param port Port to read from
127
 * @param port Port to read from
132
 * @return Value read
128
 * @return Value read
133
 */
129
 */
134
static inline uint16_t inw(uint16_t port) { uint16_t val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; }
130
static inline uint16_t inw(uint16_t port) { uint16_t val; __asm__ volatile ("inw %w1, %w0 \n" : "=a" (val) : "d" (port) ); return val; }
135
 
131
 
136
/** Double word from port
132
/** Double word from port
137
 *
133
 *
138
 * Get double word from port
134
 * Get double word from port
139
 *
135
 *
140
 * @param port Port to read from
136
 * @param port Port to read from
141
 * @return Value read
137
 * @return Value read
142
 */
138
 */
143
static inline uint32_t inl(uint16_t port) { uint32_t val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; }
139
static inline uint32_t inl(uint16_t port) { uint32_t val; __asm__ volatile ("inl %w1, %l0 \n" : "=a" (val) : "d" (port) ); return val; }
144
 
140
 
145
/** Enable interrupts.
141
/** Enable interrupts.
146
 *
142
 *
147
 * Enable interrupts and return previous
143
 * Enable interrupts and return previous
148
 * value of EFLAGS.
144
 * value of EFLAGS.
149
 *
145
 *
150
 * @return Old interrupt priority level.
146
 * @return Old interrupt priority level.
151
 */
147
 */
152
static inline ipl_t interrupts_enable(void)
148
static inline ipl_t interrupts_enable(void)
153
{
149
{
154
    // FIXME SMP
150
    // FIXME SMP
155
   
151
   
156
    ipl_t v = shared_info.vcpu_info[0].evtchn_upcall_mask;
152
    ipl_t v = shared_info.vcpu_info[0].evtchn_upcall_mask;
157
    write_barrier();
153
    write_barrier();
158
    shared_info.vcpu_info[0].evtchn_upcall_mask = 0;
154
    shared_info.vcpu_info[0].evtchn_upcall_mask = 0;
159
    write_barrier();
155
    write_barrier();
160
    if (shared_info.vcpu_info[0].evtchn_upcall_pending)
156
    if (shared_info.vcpu_info[0].evtchn_upcall_pending)
161
        force_evtchn_callback();
157
        force_evtchn_callback();
162
   
158
   
163
    return v;
159
    return v;
164
}
160
}
165
 
161
 
166
/** Disable interrupts.
162
/** Disable interrupts.
167
 *
163
 *
168
 * Disable interrupts and return previous
164
 * Disable interrupts and return previous
169
 * value of EFLAGS.
165
 * value of EFLAGS.
170
 *
166
 *
171
 * @return Old interrupt priority level.
167
 * @return Old interrupt priority level.
172
 */
168
 */
173
static inline ipl_t interrupts_disable(void)
169
static inline ipl_t interrupts_disable(void)
174
{
170
{
175
    // FIXME SMP
171
    // FIXME SMP
176
   
172
   
177
    ipl_t v = shared_info.vcpu_info[0].evtchn_upcall_mask;
173
    ipl_t v = shared_info.vcpu_info[0].evtchn_upcall_mask;
178
    shared_info.vcpu_info[0].evtchn_upcall_mask = 1;
174
    shared_info.vcpu_info[0].evtchn_upcall_mask = 1;
179
    write_barrier();
175
    write_barrier();
180
   
176
   
181
    return v;
177
    return v;
182
}
178
}
183
 
179
 
184
/** Restore interrupt priority level.
180
/** Restore interrupt priority level.
185
 *
181
 *
186
 * Restore EFLAGS.
182
 * Restore EFLAGS.
187
 *
183
 *
188
 * @param ipl Saved interrupt priority level.
184
 * @param ipl Saved interrupt priority level.
189
 */
185
 */
190
static inline void interrupts_restore(ipl_t ipl)
186
static inline void interrupts_restore(ipl_t ipl)
191
{
187
{
192
    if (ipl == 0)
188
    if (ipl == 0)
193
        interrupts_enable();
189
        interrupts_enable();
194
    else
190
    else
195
        interrupts_disable();
191
        interrupts_disable();
196
}
192
}
197
 
193
 
198
/** Return interrupt priority level.
194
/** Return interrupt priority level.
199
 *
195
 *
200
 * @return EFLAFS.
196
 * @return EFLAFS.
201
 */
197
 */
202
static inline ipl_t interrupts_read(void)
198
static inline ipl_t interrupts_read(void)
203
{
199
{
204
    // FIXME SMP
200
    // FIXME SMP
205
   
201
   
206
    return shared_info.vcpu_info[0].evtchn_upcall_mask;
202
    return shared_info.vcpu_info[0].evtchn_upcall_mask;
207
}
203
}
208
 
204
 
209
/** Return base address of current stack
205
/** Return base address of current stack
210
 *
206
 *
211
 * Return the base address of the current stack.
207
 * Return the base address of the current stack.
212
 * The stack is assumed to be STACK_SIZE bytes long.
208
 * The stack is assumed to be STACK_SIZE bytes long.
213
 * The stack must start on page boundary.
209
 * The stack must start on page boundary.
214
 */
210
 */
215
static inline uintptr_t get_stack_base(void)
211
static inline uintptr_t get_stack_base(void)
216
{
212
{
217
    uintptr_t v;
213
    uintptr_t v;
218
   
214
   
219
    __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1)));
215
    __asm__ volatile ("andl %%esp, %0\n" : "=r" (v) : "0" (~(STACK_SIZE-1)));
220
   
216
   
221
    return v;
217
    return v;
222
}
218
}
223
 
219
 
224
static inline uint64_t rdtsc(void)
220
static inline uint64_t rdtsc(void)
225
{
221
{
226
    uint64_t v;
222
    uint64_t v;
227
   
223
   
228
    __asm__ volatile("rdtsc\n" : "=A" (v));
224
    __asm__ volatile("rdtsc\n" : "=A" (v));
229
   
225
   
230
    return v;
226
    return v;
231
}
227
}
232
 
228
 
233
/** Return current IP address */
229
/** Return current IP address */
234
static inline uintptr_t * get_ip()
230
static inline uintptr_t * get_ip()
235
{
231
{
236
    uintptr_t *ip;
232
    uintptr_t *ip;
237
 
233
 
238
    __asm__ volatile (
234
    __asm__ volatile (
239
        "mov %%eip, %0"
235
        "mov %%eip, %0"
240
        : "=r" (ip)
236
        : "=r" (ip)
241
        );
237
        );
242
    return ip;
238
    return ip;
243
}
239
}
244
 
240
 
245
/** Invalidate TLB Entry.
241
/** Invalidate TLB Entry.
246
 *
242
 *
247
 * @param addr Address on a page whose TLB entry is to be invalidated.
243
 * @param addr Address on a page whose TLB entry is to be invalidated.
248
 */
244
 */
249
static inline void invlpg(uintptr_t addr)
245
static inline void invlpg(uintptr_t addr)
250
{
246
{
251
    __asm__ volatile ("invlpg %0\n" :: "m" (*(unative_t *)addr));
247
    __asm__ volatile ("invlpg %0\n" :: "m" (*(unative_t *)addr));
252
}
248
}
253
 
249
 
254
/** Load GDTR register from memory.
250
/** Load GDTR register from memory.
255
 *
251
 *
256
 * @param gdtr_reg Address of memory from where to load GDTR.
252
 * @param gdtr_reg Address of memory from where to load GDTR.
257
 */
253
 */
258
static inline void gdtr_load(ptr_16_32_t *gdtr_reg)
254
static inline void gdtr_load(ptr_16_32_t *gdtr_reg)
259
{
255
{
260
    __asm__ volatile ("lgdtl %0\n" : : "m" (*gdtr_reg));
256
    __asm__ volatile ("lgdtl %0\n" : : "m" (*gdtr_reg));
261
}
257
}
262
 
258
 
263
/** Store GDTR register to memory.
259
/** Store GDTR register to memory.
264
 *
260
 *
265
 * @param gdtr_reg Address of memory to where to load GDTR.
261
 * @param gdtr_reg Address of memory to where to load GDTR.
266
 */
262
 */
267
static inline void gdtr_store(ptr_16_32_t *gdtr_reg)
263
static inline void gdtr_store(ptr_16_32_t *gdtr_reg)
268
{
264
{
269
    __asm__ volatile ("sgdtl %0\n" : : "m" (*gdtr_reg));
265
    __asm__ volatile ("sgdtl %0\n" : : "m" (*gdtr_reg));
270
}
266
}
271
 
267
 
272
/** Load TR from descriptor table.
268
/** Load TR from descriptor table.
273
 *
269
 *
274
 * @param sel Selector specifying descriptor of TSS segment.
270
 * @param sel Selector specifying descriptor of TSS segment.
275
 */
271
 */
276
static inline void tr_load(uint16_t sel)
272
static inline void tr_load(uint16_t sel)
277
{
273
{
278
    __asm__ volatile ("ltr %0" : : "r" (sel));
274
    __asm__ volatile ("ltr %0" : : "r" (sel));
279
}
275
}
280
 
276
 
281
#endif
277
#endif
282
 
278
 
283
/** @}
279
/** @}
284
 */
280
 */
285
 
281