/kernel/trunk/arch/sparc64/include/trap.h |
---|
29,6 → 29,16 |
#ifndef __sparc64_TRAP_H__ |
#define __sparc64_TRAP_H__ |
#include <arch/trap_table.h> |
#include <arch/asm.h> |
/** Switch to in-kernel trap table. */ |
static inline void trap_switch_trap_table(void) |
{ |
/* Point TBA to kernel copy of OFW's trap table. */ |
tba_write((__u64) trap_table); |
} |
extern void trap_init(void); |
#endif |
/kernel/trunk/arch/sparc64/include/atomic.h |
---|
38,12 → 38,24 |
* WARNING: the following functions cause the code to be preemption-unsafe !!! |
*/ |
static inline void atomic_inc(atomic_t *val) { |
static inline void atomic_inc(atomic_t *val) |
{ |
val->count++; |
} |
static inline void atomic_dec(atomic_t *val) { |
static inline void atomic_dec(atomic_t *val) |
{ |
val->count--; |
} |
static inline void atomic_set(atomic_t *val, __u64 i) |
{ |
val->count = i; |
} |
static inline __u64 atomic_get(atomic_t *val) |
{ |
return val->count; |
} |
#endif |
/kernel/trunk/arch/sparc64/src/sparc64.c |
---|
35,6 → 35,7 |
void arch_pre_mm_init(void) |
{ |
ofw_sparc64_console_init(); |
trap_init(); |
} |
void arch_post_mm_init(void) |
43,7 → 44,6 |
void arch_pre_smp_init(void) |
{ |
trap_init(); |
} |
void arch_post_smp_init(void) |
/kernel/trunk/arch/sparc64/src/trap.c |
---|
31,6 → 31,7 |
#include <arch/asm.h> |
#include <memstr.h> |
/** Initialize trap table. */ |
void trap_init(void) |
{ |
/* |
37,7 → 38,4 |
* Copy OFW's trap table into kernel. |
*/ |
memcpy((void *) trap_table, (void *) tba_read(), TRAP_TABLE_SIZE); |
/* Point TBA to kernel copy of OFW's trap table. */ |
tba_write((__u64) trap_table); |
} |
/kernel/trunk/arch/sparc64/src/mm/tlb.c |
---|
35,6 → 35,7 |
#include <arch/types.h> |
#include <typedefs.h> |
#include <config.h> |
#include <arch/trap.h> |
/** Initialize ITLB and DTLB. |
* |
43,6 → 44,9 |
* kernel 4M locked entry can be installed. |
* After TLB is initialized, MMU is enabled |
* again. |
* |
* Switching MMU off imposes the requirement for |
* the kernel to run in identity mapped environment. |
*/ |
void tlb_arch_init(void) |
{ |
81,6 → 85,13 |
itlb_data_in_write(data.value); |
dtlb_data_in_write(data.value); |
/* |
* Register window traps can occur before MMU is enabled again. |
* This ensures that any such traps will be handled from |
* kernel identity mapped trap handler. |
*/ |
trap_switch_trap_table(); |
tlb_invalidate_all(); |
dmmu_enable(); |
/kernel/trunk/arch/ia64/include/atomic.h |
---|
33,11 → 33,17 |
typedef struct { volatile __u64 count; } atomic_t; |
static inline atomic_t atomic_add(atomic_t *val, int imm) |
/** Atomic addition. |
* |
* @param val Atomic value. |
* @param imm Value to add. |
* |
* @return Value after addition. |
*/ |
static inline count_t atomic_add(atomic_t *val, int imm) |
{ |
atomic_t v; |
count_t v; |
__asm__ volatile ("fetchadd8.rel %0 = %1, %2\n" : "=r" (v), "+m" (val->count) : "i" (imm)); |
return v; |
56,12 → 62,10 |
static inline void atomic_inc(atomic_t *val) { atomic_add(val, 1); } |
static inline void atomic_dec(atomic_t *val) { atomic_add(val, -1); } |
static inline count_t atomic_inc_pre(atomic_t *val) { return atomic_add(val, 1); } |
static inline count_t atomic_dec_pre(atomic_t *val) { return atomic_add(val, -1); } |
static inline atomic_t atomic_inc_pre(atomic_t *val) { return atomic_add(val, 1); } |
static inline atomic_t atomic_dec_pre(atomic_t *val) { return atomic_add(val, -1); } |
static inline count_t atomic_inc_post(atomic_t *val) { return atomic_add(val, 1) + 1; } |
static inline count_t atomic_dec_post(atomic_t *val) { return atomic_add(val, -1) - 1; } |
static inline atomic_t atomic_inc_post(atomic_t *val) { return atomic_add(val, 1) + 1; } |
static inline atomic_t atomic_dec_post(atomic_t *val) { return atomic_add(val, -1) - 1; } |
#endif |
/kernel/trunk/arch/mips32/include/atomic.h |
---|
50,9 → 50,9 |
* |
* @return Value after addition. |
*/ |
static inline atomic_t atomic_add(atomic_t *val, int i) |
static inline count_t atomic_add(atomic_t *val, int i) |
{ |
atomic_t tmp, v; |
count_t tmp, v; |
__asm__ volatile ( |
"1:\n" |
/kernel/trunk/arch/ia32/include/atomic.h |
---|
59,27 → 59,29 |
#endif /* CONFIG_SMP */ |
} |
static inline atomic_t atomic_inc_pre(atomic_t *val) |
static inline count_t atomic_inc_pre(atomic_t *val) |
{ |
atomic_t r; |
count_t r; |
__asm__ volatile ( |
"movl $1, %0\n" |
"lock xaddl %0, %1\n" |
: "=r"(r), "=m" (val->count) |
); |
return r; |
} |
static inline count_t atomic_dec_pre(atomic_t *val) |
{ |
count_t r; |
static inline atomic_t atomic_dec_pre(atomic_t *val) |
{ |
atomic_t r; |
__asm__ volatile ( |
"movl $-1, %0\n" |
"lock xaddl %0, %1\n" |
: "=r"(r), "=m" (*val) |
); |
return r; |
} |