Subversion Repositories HelenOS

Compare Revisions

Ignore whitespace Rev 2081 → Rev 2082

/trunk/kernel/arch/amd64/include/memstr.h
51,7 → 51,7
{
unative_t d0, d1, d2;
 
__asm__ __volatile__(
asm volatile(
"rep movsq\n\t"
"movq %4, %%rcx\n\t"
"andq $7, %%rcx\n\t"
82,7 → 82,7
unative_t d0, d1, d2;
unative_t ret;
__asm__ (
asm (
"repe cmpsb\n\t"
"je 1f\n\t"
"movq %3, %0\n\t"
108,7 → 108,7
{
unative_t d0, d1;
__asm__ __volatile__ (
asm volatile (
"rep stosw\n\t"
: "=&D" (d0), "=&c" (d1), "=a" (x)
: "0" (dst), "1" ((unative_t)cnt), "2" (x)
130,7 → 130,7
{
unative_t d0, d1;
__asm__ __volatile__ (
asm volatile (
"rep stosb\n\t"
: "=&D" (d0), "=&c" (d1), "=a" (x)
: "0" (dst), "1" ((unative_t)cnt), "2" (x)
/trunk/kernel/arch/amd64/include/atomic.h
42,17 → 42,17
 
static inline void atomic_inc(atomic_t *val) {
#ifdef CONFIG_SMP
__asm__ volatile ("lock incq %0\n" : "=m" (val->count));
asm volatile ("lock incq %0\n" : "=m" (val->count));
#else
__asm__ volatile ("incq %0\n" : "=m" (val->count));
asm volatile ("incq %0\n" : "=m" (val->count));
#endif /* CONFIG_SMP */
}
 
static inline void atomic_dec(atomic_t *val) {
#ifdef CONFIG_SMP
__asm__ volatile ("lock decq %0\n" : "=m" (val->count));
asm volatile ("lock decq %0\n" : "=m" (val->count));
#else
__asm__ volatile ("decq %0\n" : "=m" (val->count));
asm volatile ("decq %0\n" : "=m" (val->count));
#endif /* CONFIG_SMP */
}
 
60,7 → 60,7
{
long r = 1;
 
__asm__ volatile (
asm volatile (
"lock xaddq %1, %0\n"
: "=m" (val->count), "+r" (r)
);
72,7 → 72,7
{
long r = -1;
__asm__ volatile (
asm volatile (
"lock xaddq %1, %0\n"
: "=m" (val->count), "+r" (r)
);
86,7 → 86,7
static inline uint64_t test_and_set(atomic_t *val) {
uint64_t v;
__asm__ volatile (
asm volatile (
"movq $1, %0\n"
"xchgq %0, %1\n"
: "=r" (v),"=m" (val->count)
102,7 → 102,7
uint64_t tmp;
 
preemption_disable();
__asm__ volatile (
asm volatile (
"0:;"
#ifdef CONFIG_HT
"pause;"
/trunk/kernel/arch/amd64/include/asm.h
52,7 → 52,7
{
uintptr_t v;
__asm__ volatile ("andq %%rsp, %0\n" : "=r" (v) : "0" (~((uint64_t)STACK_SIZE-1)));
asm volatile ("andq %%rsp, %0\n" : "=r" (v) : "0" (~((uint64_t)STACK_SIZE-1)));
return v;
}