/branches/rcu/kernel/arch/sparc64/include/types.h |
---|
62,6 → 62,7 |
typedef int64_t native_t; |
typedef uint8_t bool; |
typedef uint64_t thread_id_t; |
typedef uint64_t task_id_t; |
typedef uint32_t context_id_t; |
/branches/rcu/kernel/arch/sparc64/include/trap/mmu.h |
---|
129,7 → 129,21 |
wrpr %g0, 1, %tl |
.endif |
/* |
* Switch from the MM globals. |
*/ |
wrpr %g0, PSTATE_PRIV_BIT | PSTATE_AG_BIT, %pstate |
/* |
* Read the Tag Access register for the higher-level handler. |
* This is necessary to survive nested DTLB misses. |
*/ |
mov VA_DMMU_TAG_ACCESS, %g2 |
ldxa [%g2] ASI_DMMU, %g2 |
/* |
* g2 will be passed as an argument to fast_data_access_mmu_miss(). |
*/ |
PREEMPTIBLE_HANDLER fast_data_access_mmu_miss |
.endm |
142,7 → 156,21 |
wrpr %g0, 1, %tl |
.endif |
/* |
* Switch from the MM globals. |
*/ |
wrpr %g0, PSTATE_PRIV_BIT | PSTATE_AG_BIT, %pstate |
/* |
* Read the Tag Access register for the higher-level handler. |
* This is necessary to survive nested DTLB misses. |
*/ |
mov VA_DMMU_TAG_ACCESS, %g2 |
ldxa [%g2] ASI_DMMU, %g2 |
/* |
* g2 will be passed as an argument to fast_data_access_mmu_miss(). |
*/ |
PREEMPTIBLE_HANDLER fast_data_access_protection |
.endm |
/branches/rcu/kernel/arch/sparc64/include/mm/tlb.h |
---|
428,9 → 428,9 |
membar(); |
} |
extern void fast_instruction_access_mmu_miss(int n, istate_t *istate); |
extern void fast_data_access_mmu_miss(int n, istate_t *istate); |
extern void fast_data_access_protection(int n, istate_t *istate); |
extern void fast_instruction_access_mmu_miss(unative_t unused, istate_t *istate); |
extern void fast_data_access_mmu_miss(tlb_tag_access_reg_t tag, istate_t *istate); |
extern void fast_data_access_protection(tlb_tag_access_reg_t tag , istate_t *istate); |
extern void dtlb_insert_mapping(uintptr_t page, uintptr_t frame, int pagesize, bool locked, bool cacheable); |
/branches/rcu/kernel/arch/sparc64/include/barrier.h |
---|
39,12 → 39,12 |
* Our critical section barriers are prepared for the weakest RMO memory model. |
*/ |
#define CS_ENTER_BARRIER() \ |
asm volatile ( \ |
asm volatile ( \ |
"membar #LoadLoad | #LoadStore\n" \ |
::: "memory" \ |
) |
#define CS_LEAVE_BARRIER() \ |
asm volatile ( \ |
asm volatile ( \ |
"membar #StoreStore\n" \ |
"membar #LoadStore\n" \ |
::: "memory" \ |