Subversion Repositories HelenOS

Rev

Rev 3022 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 3022 Rev 4055
Line 24... Line 24...
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup amd64  
29
/** @addtogroup amd64
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef KERN_amd64_ASM_H_
35
#ifndef KERN_amd64_ASM_H_
36
#define KERN_amd64_ASM_H_
36
#define KERN_amd64_ASM_H_
37
 
37
 
38
#include <config.h>
38
#include <config.h>
-
 
39
#include <arch/types.h>
-
 
40
#include <typedefs.h>
39
 
41
 
40
extern void asm_delay_loop(uint32_t t);
42
extern void asm_delay_loop(uint32_t t);
41
extern void asm_fake_loop(uint32_t t);
43
extern void asm_fake_loop(uint32_t t);
42
 
44
 
43
/** Return base address of current stack.
45
/** Return base address of current stack.
44
 *
46
 *
45
 * Return the base address of the current stack.
47
 * Return the base address of the current stack.
46
 * The stack is assumed to be STACK_SIZE bytes long.
48
 * The stack is assumed to be STACK_SIZE bytes long.
47
 * The stack must start on page boundary.
49
 * The stack must start on page boundary.
-
 
50
 *
48
 */
51
 */
49
static inline uintptr_t get_stack_base(void)
52
static inline uintptr_t get_stack_base(void)
50
{
53
{
51
    uintptr_t v;
54
    uintptr_t v;
52
   
55
   
-
 
56
    asm volatile (
-
 
57
        "andq %%rsp, %[v]\n"
-
 
58
        : [v] "=r" (v)
53
    asm volatile ("andq %%rsp, %0\n" : "=r" (v) : "0" (~((uint64_t)STACK_SIZE-1)));
59
        : "0" (~((uint64_t) STACK_SIZE-1))
-
 
60
    );
54
   
61
   
55
    return v;
62
    return v;
56
}
63
}
57
 
64
 
58
static inline void cpu_sleep(void)
65
static inline void cpu_sleep(void)
Line 70... Line 77...
70
 *
77
 *
71
 * Get byte from port
78
 * Get byte from port
72
 *
79
 *
73
 * @param port Port to read from
80
 * @param port Port to read from
74
 * @return Value read
81
 * @return Value read
-
 
82
 *
75
 */
83
 */
76
static inline uint8_t inb(uint16_t port)
84
static inline uint8_t pio_read_8(ioport8_t *port)
77
{
85
{
78
    uint8_t val;
86
    uint8_t val;
-
 
87
   
-
 
88
    asm volatile (
-
 
89
        "inb %w[port], %b[val]\n"
-
 
90
        : [val] "=a" (val)
-
 
91
        : [port] "d" (port)
-
 
92
    );
-
 
93
   
-
 
94
    return val;
-
 
95
}
-
 
96
 
-
 
97
/** Word from port
-
 
98
 *
-
 
99
 * Get word from port
-
 
100
 *
-
 
101
 * @param port Port to read from
-
 
102
 * @return Value read
-
 
103
 *
-
 
104
 */
-
 
105
static inline uint16_t pio_read_16(ioport16_t *port)
-
 
106
{
-
 
107
    uint16_t val;
-
 
108
   
-
 
109
    asm volatile (
-
 
110
        "inw %w[port], %w[val]\n"
-
 
111
        : [val] "=a" (val)
-
 
112
        : [port] "d" (port)
-
 
113
    );
-
 
114
   
-
 
115
    return val;
-
 
116
}
79
 
117
 
-
 
118
/** Double word from port
-
 
119
 *
-
 
120
 * Get double word from port
-
 
121
 *
-
 
122
 * @param port Port to read from
-
 
123
 * @return Value read
-
 
124
 *
-
 
125
 */
80
    asm volatile ("inb %w1, %b0 \n" : "=a" (val) : "d" (port));
126
static inline uint32_t pio_read_32(ioport32_t *port)
-
 
127
{
-
 
128
    uint32_t val;
-
 
129
   
-
 
130
    asm volatile (
-
 
131
        "inl %w[port], %[val]\n"
-
 
132
        : [val] "=a" (val)
-
 
133
        : [port] "d" (port)
-
 
134
    );
-
 
135
   
81
    return val;
136
    return val;
82
}
137
}
83
 
138
 
84
/** Byte to port
139
/** Byte to port
85
 *
140
 *
86
 * Output byte to port
141
 * Output byte to port
87
 *
142
 *
88
 * @param port Port to write to
143
 * @param port Port to write to
89
 * @param val Value to write
144
 * @param val Value to write
-
 
145
 *
-
 
146
 */
-
 
147
static inline void pio_write_8(ioport8_t *port, uint8_t val)
-
 
148
{
-
 
149
    asm volatile (
-
 
150
        "outb %b[val], %w[port]\n"
-
 
151
        :: [val] "a" (val), [port] "d" (port)
-
 
152
    );
-
 
153
}
-
 
154
 
-
 
155
/** Word to port
-
 
156
 *
-
 
157
 * Output word to port
-
 
158
 *
-
 
159
 * @param port Port to write to
-
 
160
 * @param val Value to write
-
 
161
 *
90
 */
162
 */
91
static inline void outb(uint16_t port, uint8_t val)
163
static inline void pio_write_16(ioport16_t *port, uint16_t val)
92
{
164
{
-
 
165
    asm volatile (
-
 
166
        "outw %w[val], %w[port]\n"
93
    asm volatile ("outb %b0, %w1\n" : : "a" (val), "d" (port));
167
        :: [val] "a" (val), [port] "d" (port)
-
 
168
    );
-
 
169
}
-
 
170
 
-
 
171
/** Double word to port
-
 
172
 *
-
 
173
 * Output double word to port
-
 
174
 *
-
 
175
 * @param port Port to write to
-
 
176
 * @param val Value to write
-
 
177
 *
-
 
178
 */
-
 
179
static inline void pio_write_32(ioport32_t *port, uint32_t val)
-
 
180
{
-
 
181
    asm volatile (
-
 
182
        "outl %[val], %w[port]\n"
-
 
183
        :: [val] "a" (val), [port] "d" (port)
-
 
184
    );
94
}
185
}
95
 
186
 
96
/** Swap Hidden part of GS register with visible one */
187
/** Swap Hidden part of GS register with visible one */
97
static inline void swapgs(void)
188
static inline void swapgs(void)
98
{
189
{
Line 103... Line 194...
103
 *
194
 *
104
 * Enable interrupts and return previous
195
 * Enable interrupts and return previous
105
 * value of EFLAGS.
196
 * value of EFLAGS.
106
 *
197
 *
107
 * @return Old interrupt priority level.
198
 * @return Old interrupt priority level.
-
 
199
 *
108
 */
200
 */
109
static inline ipl_t interrupts_enable(void) {
201
static inline ipl_t interrupts_enable(void) {
110
    ipl_t v;
202
    ipl_t v;
-
 
203
   
111
    __asm__ volatile (
204
    asm volatile (
112
        "pushfq\n"
205
        "pushfq\n"
113
        "popq %0\n"
206
        "popq %[v]\n"
114
        "sti\n"
207
        "sti\n"
115
        : "=r" (v)
208
        : [v] "=r" (v)
116
    );
209
    );
-
 
210
   
117
    return v;
211
    return v;
118
}
212
}
119
 
213
 
120
/** Disable interrupts.
214
/** Disable interrupts.
121
 *
215
 *
122
 * Disable interrupts and return previous
216
 * Disable interrupts and return previous
123
 * value of EFLAGS.
217
 * value of EFLAGS.
124
 *
218
 *
125
 * @return Old interrupt priority level.
219
 * @return Old interrupt priority level.
-
 
220
 *
126
 */
221
 */
127
static inline ipl_t interrupts_disable(void) {
222
static inline ipl_t interrupts_disable(void) {
128
    ipl_t v;
223
    ipl_t v;
-
 
224
   
129
    __asm__ volatile (
225
    asm volatile (
130
        "pushfq\n"
226
        "pushfq\n"
131
        "popq %0\n"
227
        "popq %[v]\n"
132
        "cli\n"
228
        "cli\n"
133
        : "=r" (v)
229
        : [v] "=r" (v)
134
        );
230
    );
-
 
231
   
135
    return v;
232
    return v;
136
}
233
}
137
 
234
 
138
/** Restore interrupt priority level.
235
/** Restore interrupt priority level.
139
 *
236
 *
140
 * Restore EFLAGS.
237
 * Restore EFLAGS.
141
 *
238
 *
142
 * @param ipl Saved interrupt priority level.
239
 * @param ipl Saved interrupt priority level.
-
 
240
 *
143
 */
241
 */
144
static inline void interrupts_restore(ipl_t ipl) {
242
static inline void interrupts_restore(ipl_t ipl) {
145
    __asm__ volatile (
243
    asm volatile (
146
        "pushq %0\n"
244
        "pushq %[ipl]\n"
147
        "popfq\n"
245
        "popfq\n"
148
        : : "r" (ipl)
246
        :: [ipl] "r" (ipl)
149
        );
247
    );
150
}
248
}
151
 
249
 
152
/** Return interrupt priority level.
250
/** Return interrupt priority level.
153
 *
251
 *
154
 * Return EFLAFS.
252
 * Return EFLAFS.
155
 *
253
 *
156
 * @return Current interrupt priority level.
254
 * @return Current interrupt priority level.
-
 
255
 *
157
 */
256
 */
158
static inline ipl_t interrupts_read(void) {
257
static inline ipl_t interrupts_read(void) {
159
    ipl_t v;
258
    ipl_t v;
-
 
259
   
160
    __asm__ volatile (
260
    asm volatile (
161
        "pushfq\n"
261
        "pushfq\n"
162
        "popq %0\n"
262
        "popq %[v]\n"
163
        : "=r" (v)
263
        : [v] "=r" (v)
164
    );
264
    );
-
 
265
   
165
    return v;
266
    return v;
166
}
267
}
167
 
268
 
168
/** Write to MSR */
269
/** Write to MSR */
169
static inline void write_msr(uint32_t msr, uint64_t value)
270
static inline void write_msr(uint32_t msr, uint64_t value)
170
{
271
{
171
    __asm__ volatile (
272
    asm volatile (
-
 
273
        "wrmsr\n"
172
        "wrmsr;" : : "c" (msr),
274
        :: "c" (msr),
173
        "a" ((uint32_t)(value)),
275
           "a" ((uint32_t) (value)),
174
        "d" ((uint32_t)(value >> 32))
276
           "d" ((uint32_t) (value >> 32))
175
        );
277
    );
176
}
278
}
177
 
279
 
178
static inline unative_t read_msr(uint32_t msr)
280
static inline unative_t read_msr(uint32_t msr)
179
{
281
{
180
    uint32_t ax, dx;
282
    uint32_t ax, dx;
181
 
283
   
182
    __asm__ volatile (
284
    asm volatile (
-
 
285
        "rdmsr\n"
183
        "rdmsr;" : "=a"(ax), "=d"(dx) : "c" (msr)
286
        : "=a" (ax), "=d" (dx)
-
 
287
        : "c" (msr)
184
        );
288
    );
-
 
289
   
185
    return ((uint64_t)dx << 32) | ax;
290
    return ((uint64_t) dx << 32) | ax;
186
}
291
}
187
 
292
 
188
 
293
 
189
/** Enable local APIC
294
/** Enable local APIC
190
 *
295
 *
191
 * Enable local APIC in MSR.
296
 * Enable local APIC in MSR.
-
 
297
 *
192
 */
298
 */
193
static inline void enable_l_apic_in_msr()
299
static inline void enable_l_apic_in_msr()
194
{
300
{
195
    __asm__ volatile (
301
    asm volatile (
196
        "movl $0x1b, %%ecx\n"
302
        "movl $0x1b, %%ecx\n"
197
        "rdmsr\n"
303
        "rdmsr\n"
198
        "orl $(1<<11),%%eax\n"
304
        "orl $(1 << 11),%%eax\n"
199
        "orl $(0xfee00000),%%eax\n"
305
        "orl $(0xfee00000),%%eax\n"
200
        "wrmsr\n"
306
        "wrmsr\n"
201
        :
-
 
202
        :
-
 
203
        :"%eax","%ecx","%edx"
307
        ::: "%eax","%ecx","%edx"
204
        );
308
    );
205
}
309
}
206
 
310
 
207
static inline uintptr_t * get_ip()
311
static inline uintptr_t * get_ip()
208
{
312
{
209
    uintptr_t *ip;
313
    uintptr_t *ip;
210
 
314
   
211
    __asm__ volatile (
315
    asm volatile (
212
        "mov %%rip, %0"
316
        "mov %%rip, %[ip]"
213
        : "=r" (ip)
317
        : [ip] "=r" (ip)
214
        );
318
    );
-
 
319
   
215
    return ip;
320
    return ip;
216
}
321
}
217
 
322
 
218
/** Invalidate TLB Entry.
323
/** Invalidate TLB Entry.
219
 *
324
 *
220
 * @param addr Address on a page whose TLB entry is to be invalidated.
325
 * @param addr Address on a page whose TLB entry is to be invalidated.
-
 
326
 *
221
 */
327
 */
222
static inline void invlpg(uintptr_t addr)
328
static inline void invlpg(uintptr_t addr)
223
{
329
{
-
 
330
    asm volatile (
-
 
331
        "invlpg %[addr]\n"
224
    __asm__ volatile ("invlpg %0\n" :: "m" (*((unative_t *)addr)));
332
        :: [addr] "m" (*((unative_t *) addr))
-
 
333
    );
225
}
334
}
226
 
335
 
227
/** Load GDTR register from memory.
336
/** Load GDTR register from memory.
228
 *
337
 *
229
 * @param gdtr_reg Address of memory from where to load GDTR.
338
 * @param gdtr_reg Address of memory from where to load GDTR.
-
 
339
 *
230
 */
340
 */
231
static inline void gdtr_load(struct ptr_16_64 *gdtr_reg)
341
static inline void gdtr_load(struct ptr_16_64 *gdtr_reg)
232
{
342
{
-
 
343
    asm volatile (
-
 
344
        "lgdtq %[gdtr_reg]\n"
233
    __asm__ volatile ("lgdtq %0\n" : : "m" (*gdtr_reg));
345
        :: [gdtr_reg] "m" (*gdtr_reg)
-
 
346
    );
234
}
347
}
235
 
348
 
236
/** Store GDTR register to memory.
349
/** Store GDTR register to memory.
237
 *
350
 *
238
 * @param gdtr_reg Address of memory to where to load GDTR.
351
 * @param gdtr_reg Address of memory to where to load GDTR.
-
 
352
 *
239
 */
353
 */
240
static inline void gdtr_store(struct ptr_16_64 *gdtr_reg)
354
static inline void gdtr_store(struct ptr_16_64 *gdtr_reg)
241
{
355
{
-
 
356
    asm volatile (
-
 
357
        "sgdtq %[gdtr_reg]\n"
242
    __asm__ volatile ("sgdtq %0\n" : : "m" (*gdtr_reg));
358
        :: [gdtr_reg] "m" (*gdtr_reg)
-
 
359
    );
243
}
360
}
244
 
361
 
245
/** Load IDTR register from memory.
362
/** Load IDTR register from memory.
246
 *
363
 *
247
 * @param idtr_reg Address of memory from where to load IDTR.
364
 * @param idtr_reg Address of memory from where to load IDTR.
-
 
365
 *
248
 */
366
 */
249
static inline void idtr_load(struct ptr_16_64 *idtr_reg)
367
static inline void idtr_load(struct ptr_16_64 *idtr_reg)
250
{
368
{
-
 
369
    asm volatile (
-
 
370
        "lidtq %[idtr_reg]\n"
251
    __asm__ volatile ("lidtq %0\n" : : "m" (*idtr_reg));
371
        :: [idtr_reg] "m" (*idtr_reg));
252
}
372
}
253
 
373
 
254
/** Load TR from descriptor table.
374
/** Load TR from descriptor table.
255
 *
375
 *
256
 * @param sel Selector specifying descriptor of TSS segment.
376
 * @param sel Selector specifying descriptor of TSS segment.
-
 
377
 *
257
 */
378
 */
258
static inline void tr_load(uint16_t sel)
379
static inline void tr_load(uint16_t sel)
259
{
380
{
-
 
381
    asm volatile (
-
 
382
        "ltr %[sel]"
260
    __asm__ volatile ("ltr %0" : : "r" (sel));
383
        :: [sel] "r" (sel)
-
 
384
    );
261
}
385
}
262
 
386
 
263
#define GEN_READ_REG(reg) static inline unative_t read_ ##reg (void) \
387
#define GEN_READ_REG(reg) static inline unative_t read_ ##reg (void) \
264
    { \
388
    { \
265
    unative_t res; \
389
        unative_t res; \
-
 
390
        asm volatile ( \
266
    __asm__ volatile ("movq %%" #reg ", %0" : "=r" (res) ); \
391
            "movq %%" #reg ", %[res]" \
-
 
392
            : [res] "=r" (res) \
-
 
393
        ); \
267
    return res; \
394
        return res; \
268
    }
395
    }
269
 
396
 
270
#define GEN_WRITE_REG(reg) static inline void write_ ##reg (unative_t regn) \
397
#define GEN_WRITE_REG(reg) static inline void write_ ##reg (unative_t regn) \
271
    { \
398
    { \
-
 
399
        asm volatile ( \
272
    __asm__ volatile ("movq %0, %%" #reg : : "r" (regn)); \
400
            "movq %[regn], %%" #reg \
-
 
401
            :: [regn] "r" (regn) \
273
    }
402
        ); \
-
 
403
    }
274
 
404
 
275
GEN_READ_REG(cr0)
405
GEN_READ_REG(cr0)
276
GEN_READ_REG(cr2)
406
GEN_READ_REG(cr2)
277
GEN_READ_REG(cr3)
407
GEN_READ_REG(cr3)
278
GEN_WRITE_REG(cr3)
408
GEN_WRITE_REG(cr3)