/trunk/kernel/arch/mips32/include/atomic.h |
---|
55,7 → 55,7 |
{ |
long tmp, v; |
__asm__ volatile ( |
asm volatile ( |
"1:\n" |
" ll %0, %1\n" |
" addiu %0, %0, %3\n" /* same as addi, but never traps on overflow */ |
/trunk/kernel/arch/mips32/include/asm.h |
---|
43,7 → 43,7 |
static inline void cpu_sleep(void) |
{ |
/* Most of the simulators do not support */ |
/* __asm__ volatile ("wait"); */ |
/* asm volatile ("wait"); */ |
} |
/** Return base address of current stack |
56,7 → 56,7 |
{ |
uintptr_t v; |
__asm__ volatile ("and %0, $29, %1\n" : "=r" (v) : "r" (~(STACK_SIZE-1))); |
asm volatile ("and %0, $29, %1\n" : "=r" (v) : "r" (~(STACK_SIZE-1))); |
return v; |
} |
/trunk/kernel/arch/mips32/include/mm/tlb.h |
---|
142,7 → 142,7 |
*/ |
static inline void tlbp(void) |
{ |
__asm__ volatile ("tlbp\n\t"); |
asm volatile ("tlbp\n\t"); |
} |
152,7 → 152,7 |
*/ |
static inline void tlbr(void) |
{ |
__asm__ volatile ("tlbr\n\t"); |
asm volatile ("tlbr\n\t"); |
} |
/** Write Indexed TLB Entry |
161,7 → 161,7 |
*/ |
static inline void tlbwi(void) |
{ |
__asm__ volatile ("tlbwi\n\t"); |
asm volatile ("tlbwi\n\t"); |
} |
/** Write Random TLB Entry |
170,7 → 170,7 |
*/ |
static inline void tlbwr(void) |
{ |
__asm__ volatile ("tlbwr\n\t"); |
asm volatile ("tlbwr\n\t"); |
} |
#define tlb_invalidate(asid) tlb_invalidate_asid(asid) |
/trunk/kernel/arch/mips32/include/barrier.h |
---|
38,12 → 38,12 |
/* |
* TODO: implement true MIPS memory barriers for macros below. |
*/ |
#define CS_ENTER_BARRIER() __asm__ volatile ("" ::: "memory") |
#define CS_LEAVE_BARRIER() __asm__ volatile ("" ::: "memory") |
#define CS_ENTER_BARRIER() asm volatile ("" ::: "memory") |
#define CS_LEAVE_BARRIER() asm volatile ("" ::: "memory") |
#define memory_barrier() __asm__ volatile ("" ::: "memory") |
#define read_barrier() __asm__ volatile ("" ::: "memory") |
#define write_barrier() __asm__ volatile ("" ::: "memory") |
#define memory_barrier() asm volatile ("" ::: "memory") |
#define read_barrier() asm volatile ("" ::: "memory") |
#define write_barrier() asm volatile ("" ::: "memory") |
#endif |