Subversion Repositories HelenOS

Rev

Rev 2787 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 2787 Rev 4377
Line 24... Line 24...
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup amd64  
29
/** @addtogroup amd64
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef KERN_amd64_ASM_H_
35
#ifndef KERN_amd64_ASM_H_
36
#define KERN_amd64_ASM_H_
36
#define KERN_amd64_ASM_H_
37
 
37
 
38
#include <config.h>
38
#include <config.h>
-
 
39
#include <arch/types.h>
-
 
40
#include <typedefs.h>
39
 
41
 
40
extern void asm_delay_loop(uint32_t t);
42
extern void asm_delay_loop(uint32_t t);
41
extern void asm_fake_loop(uint32_t t);
43
extern void asm_fake_loop(uint32_t t);
42
 
44
 
43
/** Return base address of current stack.
45
/** Return base address of current stack.
44
 *
46
 *
45
 * Return the base address of the current stack.
47
 * Return the base address of the current stack.
46
 * The stack is assumed to be STACK_SIZE bytes long.
48
 * The stack is assumed to be STACK_SIZE bytes long.
47
 * The stack must start on page boundary.
49
 * The stack must start on page boundary.
-
 
50
 *
48
 */
51
 */
49
static inline uintptr_t get_stack_base(void)
52
static inline uintptr_t get_stack_base(void)
50
{
53
{
51
    uintptr_t v;
54
    uintptr_t v;
52
   
55
   
-
 
56
    asm volatile (
-
 
57
        "andq %%rsp, %[v]\n"
-
 
58
        : [v] "=r" (v)
53
    asm volatile ("andq %%rsp, %0\n" : "=r" (v) : "0" (~((uint64_t)STACK_SIZE-1)));
59
        : "0" (~((uint64_t) STACK_SIZE-1))
-
 
60
    );
54
   
61
   
55
    return v;
62
    return v;
56
}
63
}
57
 
64
 
58
static inline void cpu_sleep(void)
65
static inline void cpu_sleep(void)
Line 60... Line 67...
60
    asm volatile ("hlt\n");
67
    asm volatile ("hlt\n");
61
}
68
}
62
 
69
 
63
static inline void cpu_halt(void)
70
static inline void cpu_halt(void)
64
{
71
{
65
    asm volatile ("hlt\n");
72
    asm volatile (
-
 
73
        "0:\n"
-
 
74
        "   hlt\n"
-
 
75
        "   jmp 0b\n"
-
 
76
    );
66
}
77
}
67
 
78
 
68
 
79
 
69
/** Byte from port
80
/** Byte from port
70
 *
81
 *
71
 * Get byte from port
82
 * Get byte from port
72
 *
83
 *
73
 * @param port Port to read from
84
 * @param port Port to read from
74
 * @return Value read
85
 * @return Value read
-
 
86
 *
75
 */
87
 */
76
static inline uint8_t inb(uint16_t port)
88
static inline uint8_t pio_read_8(ioport8_t *port)
77
{
89
{
78
    uint8_t val;
90
    uint8_t val;
-
 
91
   
-
 
92
    asm volatile (
-
 
93
        "inb %w[port], %b[val]\n"
-
 
94
        : [val] "=a" (val)
-
 
95
        : [port] "d" (port)
-
 
96
    );
-
 
97
   
-
 
98
    return val;
-
 
99
}
79
 
100
 
-
 
101
/** Word from port
-
 
102
 *
-
 
103
 * Get word from port
-
 
104
 *
-
 
105
 * @param port Port to read from
-
 
106
 * @return Value read
-
 
107
 *
-
 
108
 */
80
    asm volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port));
109
static inline uint16_t pio_read_16(ioport16_t *port)
-
 
110
{
-
 
111
    uint16_t val;
-
 
112
   
-
 
113
    asm volatile (
-
 
114
        "inw %w[port], %w[val]\n"
-
 
115
        : [val] "=a" (val)
-
 
116
        : [port] "d" (port)
-
 
117
    );
-
 
118
   
-
 
119
    return val;
-
 
120
}
-
 
121
 
-
 
122
/** Double word from port
-
 
123
 *
-
 
124
 * Get double word from port
-
 
125
 *
-
 
126
 * @param port Port to read from
-
 
127
 * @return Value read
-
 
128
 *
-
 
129
 */
-
 
130
static inline uint32_t pio_read_32(ioport32_t *port)
-
 
131
{
-
 
132
    uint32_t val;
-
 
133
   
-
 
134
    asm volatile (
-
 
135
        "inl %w[port], %[val]\n"
-
 
136
        : [val] "=a" (val)
-
 
137
        : [port] "d" (port)
-
 
138
    );
-
 
139
   
81
    return val;
140
    return val;
82
}
141
}
83
 
142
 
84
/** Byte to port
143
/** Byte to port
85
 *
144
 *
86
 * Output byte to port
145
 * Output byte to port
87
 *
146
 *
88
 * @param port Port to write to
147
 * @param port Port to write to
89
 * @param val Value to write
148
 * @param val Value to write
-
 
149
 *
-
 
150
 */
-
 
151
static inline void pio_write_8(ioport8_t *port, uint8_t val)
-
 
152
{
-
 
153
    asm volatile (
-
 
154
        "outb %b[val], %w[port]\n"
-
 
155
        :: [val] "a" (val), [port] "d" (port)
-
 
156
    );
-
 
157
}
-
 
158
 
-
 
159
/** Word to port
-
 
160
 *
-
 
161
 * Output word to port
-
 
162
 *
-
 
163
 * @param port Port to write to
-
 
164
 * @param val Value to write
-
 
165
 *
90
 */
166
 */
91
static inline void outb(uint16_t port, uint8_t val)
167
static inline void pio_write_16(ioport16_t *port, uint16_t val)
92
{
168
{
-
 
169
    asm volatile (
-
 
170
        "outw %w[val], %w[port]\n"
93
    asm volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port));
171
        :: [val] "a" (val), [port] "d" (port)
-
 
172
    );
-
 
173
}
-
 
174
 
-
 
175
/** Double word to port
-
 
176
 *
-
 
177
 * Output double word to port
-
 
178
 *
-
 
179
 * @param port Port to write to
-
 
180
 * @param val Value to write
-
 
181
 *
-
 
182
 */
-
 
183
static inline void pio_write_32(ioport32_t *port, uint32_t val)
-
 
184
{
-
 
185
    asm volatile (
-
 
186
        "outl %[val], %w[port]\n"
-
 
187
        :: [val] "a" (val), [port] "d" (port)
-
 
188
    );
94
}
189
}
95
 
190
 
96
/** Swap Hidden part of GS register with visible one */
191
/** Swap Hidden part of GS register with visible one */
97
static inline void swapgs(void)
192
static inline void swapgs(void)
98
{
193
{
Line 103... Line 198...
103
 *
198
 *
104
 * Enable interrupts and return previous
199
 * Enable interrupts and return previous
105
 * value of EFLAGS.
200
 * value of EFLAGS.
106
 *
201
 *
107
 * @return Old interrupt priority level.
202
 * @return Old interrupt priority level.
-
 
203
 *
108
 */
204
 */
109
static inline ipl_t interrupts_enable(void) {
205
static inline ipl_t interrupts_enable(void) {
110
    ipl_t v;
206
    ipl_t v;
-
 
207
   
111
    __asm__ volatile (
208
    asm volatile (
112
        "pushfq\n"
209
        "pushfq\n"
113
        "popq %0\n"
210
        "popq %[v]\n"
114
        "sti\n"
211
        "sti\n"
115
        : "=r" (v)
212
        : [v] "=r" (v)
116
    );
213
    );
-
 
214
   
117
    return v;
215
    return v;
118
}
216
}
119
 
217
 
120
/** Disable interrupts.
218
/** Disable interrupts.
121
 *
219
 *
122
 * Disable interrupts and return previous
220
 * Disable interrupts and return previous
123
 * value of EFLAGS.
221
 * value of EFLAGS.
124
 *
222
 *
125
 * @return Old interrupt priority level.
223
 * @return Old interrupt priority level.
-
 
224
 *
126
 */
225
 */
127
static inline ipl_t interrupts_disable(void) {
226
static inline ipl_t interrupts_disable(void) {
128
    ipl_t v;
227
    ipl_t v;
-
 
228
   
129
    __asm__ volatile (
229
    asm volatile (
130
        "pushfq\n"
230
        "pushfq\n"
131
        "popq %0\n"
231
        "popq %[v]\n"
132
        "cli\n"
232
        "cli\n"
133
        : "=r" (v)
233
        : [v] "=r" (v)
134
        );
234
    );
-
 
235
   
135
    return v;
236
    return v;
136
}
237
}
137
 
238
 
138
/** Restore interrupt priority level.
239
/** Restore interrupt priority level.
139
 *
240
 *
140
 * Restore EFLAGS.
241
 * Restore EFLAGS.
141
 *
242
 *
142
 * @param ipl Saved interrupt priority level.
243
 * @param ipl Saved interrupt priority level.
-
 
244
 *
143
 */
245
 */
144
static inline void interrupts_restore(ipl_t ipl) {
246
static inline void interrupts_restore(ipl_t ipl) {
145
    __asm__ volatile (
247
    asm volatile (
146
        "pushq %0\n"
248
        "pushq %[ipl]\n"
147
        "popfq\n"
249
        "popfq\n"
148
        : : "r" (ipl)
250
        :: [ipl] "r" (ipl)
149
        );
251
    );
150
}
252
}
151
 
253
 
152
/** Return interrupt priority level.
254
/** Return interrupt priority level.
153
 *
255
 *
154
 * Return EFLAFS.
256
 * Return EFLAFS.
155
 *
257
 *
156
 * @return Current interrupt priority level.
258
 * @return Current interrupt priority level.
-
 
259
 *
157
 */
260
 */
158
static inline ipl_t interrupts_read(void) {
261
static inline ipl_t interrupts_read(void) {
159
    ipl_t v;
262
    ipl_t v;
-
 
263
   
160
    __asm__ volatile (
264
    asm volatile (
161
        "pushfq\n"
265
        "pushfq\n"
162
        "popq %0\n"
266
        "popq %[v]\n"
163
        : "=r" (v)
267
        : [v] "=r" (v)
164
    );
268
    );
-
 
269
   
165
    return v;
270
    return v;
166
}
271
}
167
 
272
 
168
/** Write to MSR */
273
/** Write to MSR */
169
static inline void write_msr(uint32_t msr, uint64_t value)
274
static inline void write_msr(uint32_t msr, uint64_t value)
170
{
275
{
171
    __asm__ volatile (
276
    asm volatile (
-
 
277
        "wrmsr\n"
172
        "wrmsr;" : : "c" (msr),
278
        :: "c" (msr),
173
        "a" ((uint32_t)(value)),
279
           "a" ((uint32_t) (value)),
174
        "d" ((uint32_t)(value >> 32))
280
           "d" ((uint32_t) (value >> 32))
175
        );
281
    );
176
}
282
}
177
 
283
 
178
static inline unative_t read_msr(uint32_t msr)
284
static inline unative_t read_msr(uint32_t msr)
179
{
285
{
180
    uint32_t ax, dx;
286
    uint32_t ax, dx;
181
 
287
   
182
    __asm__ volatile (
288
    asm volatile (
-
 
289
        "rdmsr\n"
183
        "rdmsr;" : "=a"(ax), "=d"(dx) : "c" (msr)
290
        : "=a" (ax), "=d" (dx)
-
 
291
        : "c" (msr)
184
        );
292
    );
-
 
293
   
185
    return ((uint64_t)dx << 32) | ax;
294
    return ((uint64_t) dx << 32) | ax;
186
}
295
}
187
 
296
 
188
 
297
 
189
/** Enable local APIC
298
/** Enable local APIC
190
 *
299
 *
191
 * Enable local APIC in MSR.
300
 * Enable local APIC in MSR.
-
 
301
 *
192
 */
302
 */
193
static inline void enable_l_apic_in_msr()
303
static inline void enable_l_apic_in_msr()
194
{
304
{
195
    __asm__ volatile (
305
    asm volatile (
196
        "movl $0x1b, %%ecx\n"
306
        "movl $0x1b, %%ecx\n"
197
        "rdmsr\n"
307
        "rdmsr\n"
198
        "orl $(1<<11),%%eax\n"
308
        "orl $(1 << 11),%%eax\n"
199
        "orl $(0xfee00000),%%eax\n"
309
        "orl $(0xfee00000),%%eax\n"
200
        "wrmsr\n"
310
        "wrmsr\n"
201
        :
-
 
202
        :
-
 
203
        :"%eax","%ecx","%edx"
311
        ::: "%eax","%ecx","%edx"
204
        );
312
    );
205
}
313
}
206
 
314
 
207
static inline uintptr_t * get_ip()
315
static inline uintptr_t * get_ip()
208
{
316
{
209
    uintptr_t *ip;
317
    uintptr_t *ip;
210
 
318
   
211
    __asm__ volatile (
319
    asm volatile (
212
        "mov %%rip, %0"
320
        "mov %%rip, %[ip]"
213
        : "=r" (ip)
321
        : [ip] "=r" (ip)
214
        );
322
    );
-
 
323
   
215
    return ip;
324
    return ip;
216
}
325
}
217
 
326
 
218
/** Invalidate TLB Entry.
327
/** Invalidate TLB Entry.
219
 *
328
 *
220
 * @param addr Address on a page whose TLB entry is to be invalidated.
329
 * @param addr Address on a page whose TLB entry is to be invalidated.
-
 
330
 *
221
 */
331
 */
222
static inline void invlpg(uintptr_t addr)
332
static inline void invlpg(uintptr_t addr)
223
{
333
{
-
 
334
    asm volatile (
-
 
335
        "invlpg %[addr]\n"
224
    __asm__ volatile ("invlpg %0\n" :: "m" (*((unative_t *)addr)));
336
        :: [addr] "m" (*((unative_t *) addr))
-
 
337
    );
225
}
338
}
226
 
339
 
227
/** Load GDTR register from memory.
340
/** Load GDTR register from memory.
228
 *
341
 *
229
 * @param gdtr_reg Address of memory from where to load GDTR.
342
 * @param gdtr_reg Address of memory from where to load GDTR.
-
 
343
 *
230
 */
344
 */
231
static inline void gdtr_load(struct ptr_16_64 *gdtr_reg)
345
static inline void gdtr_load(ptr_16_64_t *gdtr_reg)
232
{
346
{
-
 
347
    asm volatile (
-
 
348
        "lgdtq %[gdtr_reg]\n"
233
    __asm__ volatile ("lgdtq %0\n" : : "m" (*gdtr_reg));
349
        :: [gdtr_reg] "m" (*gdtr_reg)
-
 
350
    );
234
}
351
}
235
 
352
 
236
/** Store GDTR register to memory.
353
/** Store GDTR register to memory.
237
 *
354
 *
238
 * @param gdtr_reg Address of memory to where to load GDTR.
355
 * @param gdtr_reg Address of memory to where to load GDTR.
-
 
356
 *
239
 */
357
 */
240
static inline void gdtr_store(struct ptr_16_64 *gdtr_reg)
358
static inline void gdtr_store(ptr_16_64_t *gdtr_reg)
241
{
359
{
-
 
360
    asm volatile (
-
 
361
        "sgdtq %[gdtr_reg]\n"
242
    __asm__ volatile ("sgdtq %0\n" : : "m" (*gdtr_reg));
362
        :: [gdtr_reg] "m" (*gdtr_reg)
-
 
363
    );
243
}
364
}
244
 
365
 
245
/** Load IDTR register from memory.
366
/** Load IDTR register from memory.
246
 *
367
 *
247
 * @param idtr_reg Address of memory from where to load IDTR.
368
 * @param idtr_reg Address of memory from where to load IDTR.
-
 
369
 *
248
 */
370
 */
249
static inline void idtr_load(struct ptr_16_64 *idtr_reg)
371
static inline void idtr_load(ptr_16_64_t *idtr_reg)
250
{
372
{
-
 
373
    asm volatile (
-
 
374
        "lidtq %[idtr_reg]\n"
251
    __asm__ volatile ("lidtq %0\n" : : "m" (*idtr_reg));
375
        :: [idtr_reg] "m" (*idtr_reg));
252
}
376
}
253
 
377
 
254
/** Load TR from descriptor table.
378
/** Load TR from descriptor table.
255
 *
379
 *
256
 * @param sel Selector specifying descriptor of TSS segment.
380
 * @param sel Selector specifying descriptor of TSS segment.
-
 
381
 *
257
 */
382
 */
258
static inline void tr_load(uint16_t sel)
383
static inline void tr_load(uint16_t sel)
259
{
384
{
-
 
385
    asm volatile (
-
 
386
        "ltr %[sel]"
260
    __asm__ volatile ("ltr %0" : : "r" (sel));
387
        :: [sel] "r" (sel)
-
 
388
    );
261
}
389
}
262
 
390
 
263
#define GEN_READ_REG(reg) static inline unative_t read_ ##reg (void) \
391
#define GEN_READ_REG(reg) static inline unative_t read_ ##reg (void) \
264
    { \
392
    { \
265
    unative_t res; \
393
        unative_t res; \
-
 
394
        asm volatile ( \
266
    __asm__ volatile ("movq %%" #reg ", %0" : "=r" (res) ); \
395
            "movq %%" #reg ", %[res]" \
-
 
396
            : [res] "=r" (res) \
-
 
397
        ); \
267
    return res; \
398
        return res; \
268
    }
399
    }
269
 
400
 
270
#define GEN_WRITE_REG(reg) static inline void write_ ##reg (unative_t regn) \
401
#define GEN_WRITE_REG(reg) static inline void write_ ##reg (unative_t regn) \
271
    { \
402
    { \
-
 
403
        asm volatile ( \
272
    __asm__ volatile ("movq %0, %%" #reg : : "r" (regn)); \
404
            "movq %[regn], %%" #reg \
-
 
405
            :: [regn] "r" (regn) \
273
    }
406
        ); \
-
 
407
    }
274
 
408
 
275
GEN_READ_REG(cr0)
409
GEN_READ_REG(cr0)
276
GEN_READ_REG(cr2)
410
GEN_READ_REG(cr2)
277
GEN_READ_REG(cr3)
411
GEN_READ_REG(cr3)
278
GEN_WRITE_REG(cr3)
412
GEN_WRITE_REG(cr3)