/trunk/kernel/arch/amd64/include/memstr.h |
---|
51,7 → 51,7 |
{ |
unative_t d0, d1, d2; |
__asm__ __volatile__( |
asm volatile( |
"rep movsq\n\t" |
"movq %4, %%rcx\n\t" |
"andq $7, %%rcx\n\t" |
82,7 → 82,7 |
unative_t d0, d1, d2; |
unative_t ret; |
__asm__ ( |
asm ( |
"repe cmpsb\n\t" |
"je 1f\n\t" |
"movq %3, %0\n\t" |
108,7 → 108,7 |
{ |
unative_t d0, d1; |
__asm__ __volatile__ ( |
asm volatile ( |
"rep stosw\n\t" |
: "=&D" (d0), "=&c" (d1), "=a" (x) |
: "0" (dst), "1" ((unative_t)cnt), "2" (x) |
130,7 → 130,7 |
{ |
unative_t d0, d1; |
__asm__ __volatile__ ( |
asm volatile ( |
"rep stosb\n\t" |
: "=&D" (d0), "=&c" (d1), "=a" (x) |
: "0" (dst), "1" ((unative_t)cnt), "2" (x) |
/trunk/kernel/arch/amd64/include/atomic.h |
---|
42,17 → 42,17 |
static inline void atomic_inc(atomic_t *val) { |
#ifdef CONFIG_SMP |
__asm__ volatile ("lock incq %0\n" : "=m" (val->count)); |
asm volatile ("lock incq %0\n" : "=m" (val->count)); |
#else |
__asm__ volatile ("incq %0\n" : "=m" (val->count)); |
asm volatile ("incq %0\n" : "=m" (val->count)); |
#endif /* CONFIG_SMP */ |
} |
static inline void atomic_dec(atomic_t *val) { |
#ifdef CONFIG_SMP |
__asm__ volatile ("lock decq %0\n" : "=m" (val->count)); |
asm volatile ("lock decq %0\n" : "=m" (val->count)); |
#else |
__asm__ volatile ("decq %0\n" : "=m" (val->count)); |
asm volatile ("decq %0\n" : "=m" (val->count)); |
#endif /* CONFIG_SMP */ |
} |
60,7 → 60,7 |
{ |
long r = 1; |
__asm__ volatile ( |
asm volatile ( |
"lock xaddq %1, %0\n" |
: "=m" (val->count), "+r" (r) |
); |
72,7 → 72,7 |
{ |
long r = -1; |
__asm__ volatile ( |
asm volatile ( |
"lock xaddq %1, %0\n" |
: "=m" (val->count), "+r" (r) |
); |
86,7 → 86,7 |
static inline uint64_t test_and_set(atomic_t *val) { |
uint64_t v; |
__asm__ volatile ( |
asm volatile ( |
"movq $1, %0\n" |
"xchgq %0, %1\n" |
: "=r" (v),"=m" (val->count) |
102,7 → 102,7 |
uint64_t tmp; |
preemption_disable(); |
__asm__ volatile ( |
asm volatile ( |
"0:;" |
#ifdef CONFIG_HT |
"pause;" |
/trunk/kernel/arch/amd64/include/asm.h |
---|
52,7 → 52,7 |
{ |
uintptr_t v; |
__asm__ volatile ("andq %%rsp, %0\n" : "=r" (v) : "0" (~((uint64_t)STACK_SIZE-1))); |
asm volatile ("andq %%rsp, %0\n" : "=r" (v) : "0" (~((uint64_t)STACK_SIZE-1))); |
return v; |
} |
/trunk/kernel/arch/amd64/src/fpu_context.c |
---|
40,7 → 40,7 |
/** Save FPU (mmx, sse) context using fxsave instruction */ |
void fpu_context_save(fpu_context_t *fctx) |
{ |
__asm__ volatile ( |
asm volatile ( |
"fxsave %0" |
: "=m"(*fctx) |
); |
49,7 → 49,7 |
/** Restore FPU (mmx,sse) context using fxrstor instruction */ |
void fpu_context_restore(fpu_context_t *fctx) |
{ |
__asm__ volatile ( |
asm volatile ( |
"fxrstor %0" |
: "=m"(*fctx) |
); |
58,7 → 58,7 |
void fpu_init() |
{ |
/* TODO: Zero all SSE, MMX etc. registers */ |
__asm__ volatile ( |
asm volatile ( |
"fninit;" |
); |
} |
/trunk/kernel/arch/amd64/src/cpu/cpu.c |
---|
76,7 → 76,7 |
*/ |
void cpu_setup_fpu(void) |
{ |
__asm__ volatile ( |
asm volatile ( |
"movq %%cr0, %%rax;" |
"btsq $1, %%rax;" /* cr0.mp */ |
"btrq $2, %%rax;" /* cr0.em */ |
99,7 → 99,7 |
*/ |
void fpu_disable(void) |
{ |
__asm__ volatile ( |
asm volatile ( |
"mov %%cr0,%%rax;" |
"bts $3,%%rax;" |
"mov %%rax,%%cr0;" |
111,7 → 111,7 |
void fpu_enable(void) |
{ |
__asm__ volatile ( |
asm volatile ( |
"mov %%cr0,%%rax;" |
"btr $3,%%rax;" |
"mov %%rax,%%cr0;" |
/trunk/kernel/arch/amd64/src/userspace.c |
---|
54,7 → 54,7 |
/* Clear CF,PF,AF,ZF,SF,DF,OF */ |
ipl &= ~(0xcd4); |
__asm__ volatile ("" |
asm volatile ("" |
"pushq %0\n" |
"pushq %1\n" |
"pushq %2\n" |