Subversion Repositories HelenOS

Compare Revisions

Ignore whitespace Rev 4344 → Rev 4345

/branches/dynload/kernel/arch/arm32/include/regutils.h
26,7 → 26,7
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
 
/** @addtogroup arm32
/** @addtogroup arm32
* @{
*/
/**
57,7 → 57,10
static inline uint32_t nm## _status_reg_read(void) \
{ \
uint32_t retval; \
asm volatile("mrs %0, " #reg : "=r" (retval)); \
asm volatile( \
"mrs %[retval], " #reg \
: [retval] "=r" (retval) \
); \
return retval; \
}
 
64,7 → 67,10
#define GEN_STATUS_WRITE(nm,reg,fieldname, field) \
static inline void nm## _status_reg_ ##fieldname## _write(uint32_t value) \
{ \
asm volatile("msr " #reg "_" #field ", %0" : : "r" (value)); \
asm volatile( \
"msr " #reg "_" #field ", %[value]" \
:: [value] "r" (value) \
); \
}
 
 
/branches/dynload/kernel/arch/arm32/include/types.h
64,10 → 64,6
typedef uint32_t unative_t;
typedef int32_t native_t;
 
typedef volatile uint8_t ioport8_t;
typedef volatile uint16_t ioport16_t;
typedef volatile uint32_t ioport32_t;
 
typedef struct {
} fncptr_t;
 
/branches/dynload/kernel/arch/arm32/include/atomic.h
26,10 → 26,10
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
 
/** @addtogroup arm32
/** @addtogroup arm32
* @{
*/
/** @file
/** @file
* @brief Atomic operations.
*/
 
42,26 → 42,26
* @param i Value to be added.
*
* @return Value after addition.
*
*/
static inline long atomic_add(atomic_t *val, int i)
{
int ret;
volatile long *mem = &(val->count);
 
asm volatile (
"1:\n"
"ldr r2, [%1] \n"
"add r3, r2, %2 \n"
"str r3, %0 \n"
"swp r3, r3, [%1] \n"
"cmp r3, r2 \n"
"bne 1b \n"
 
: "=m" (ret)
: "r" (mem), "r" (i)
"1:\n"
"ldr r2, [%[mem]]\n"
"add r3, r2, %[i]\n"
"str r3, %[ret]\n"
"swp r3, r3, [%[mem]]\n"
"cmp r3, r2\n"
"bne 1b\n"
: [ret] "=m" (ret)
: [mem] "r" (mem), [i] "r" (i)
: "r3", "r2"
);
 
return ret;
}
 
/branches/dynload/kernel/arch/arm32/include/arch.h
39,11 → 39,14
#define TASKMAP_MAX_RECORDS 32
#define CPUMAP_MAX_RECORDS 32
 
#define BOOTINFO_TASK_NAME_BUFLEN 32
 
#include <typedefs.h>
 
typedef struct {
uintptr_t addr;
uint32_t size;
char name[BOOTINFO_TASK_NAME_BUFLEN];
} utask_t;
 
typedef struct {
/branches/dynload/kernel/arch/arm32/include/asm.h
26,10 → 26,10
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
 
/** @addtogroup arm32
/** @addtogroup arm32
* @{
*/
/** @file
/** @file
* @brief Declarations of functions implemented in assembly.
*/
 
36,6 → 36,7
#ifndef KERN_arm32_ASM_H_
#define KERN_arm32_ASM_H_
 
#include <typedefs.h>
#include <arch/types.h>
#include <arch/stack.h>
#include <config.h>
77,18 → 78,19
}
 
/** Return base address of current stack.
*
*
* Return the base address of the current stack.
* The stack is assumed to be STACK_SIZE bytes long.
* The stack must start on page boundary.
*
*/
static inline uintptr_t get_stack_base(void)
{
uintptr_t v;
asm volatile (
"and %0, sp, %1\n"
: "=r" (v)
: "r" (~(STACK_SIZE - 1))
"and %[v], sp, %[size]\n"
: [v] "=r" (v)
: [size] "r" (~(STACK_SIZE - 1))
);
return v;
}
/branches/dynload/kernel/arch/arm32/include/mm/page.h
193,9 → 193,8
static inline void set_ptl0_addr(pte_level0_t *pt)
{
asm volatile (
"mcr p15, 0, %0, c2, c0, 0 \n"
:
: "r"(pt)
"mcr p15, 0, %[pt], c2, c0, 0\n"
:: [pt] "r" (pt)
);
}
 
/branches/dynload/kernel/arch/arm32/include/barrier.h
26,7 → 26,7
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
 
/** @addtogroup arm32
/** @addtogroup arm32
* @{
*/
/** @file
39,12 → 39,12
/*
* TODO: implement true ARM memory barriers for macros below.
*/
#define CS_ENTER_BARRIER() asm volatile ("" ::: "memory")
#define CS_LEAVE_BARRIER() asm volatile ("" ::: "memory")
#define CS_ENTER_BARRIER() asm volatile ("" ::: "memory")
#define CS_LEAVE_BARRIER() asm volatile ("" ::: "memory")
 
#define memory_barrier() asm volatile ("" ::: "memory")
#define read_barrier() asm volatile ("" ::: "memory")
#define write_barrier() asm volatile ("" ::: "memory")
#define memory_barrier() asm volatile ("" ::: "memory")
#define read_barrier() asm volatile ("" ::: "memory")
#define write_barrier() asm volatile ("" ::: "memory")
 
#define smc_coherence(a)
#define smc_coherence_block(a, l)
/branches/dynload/kernel/arch/arm32/src/exception.c
63,57 → 63,60
*
* Temporary exception stack is used to save a few registers
* before stack switch takes place.
*
*/
inline static void setup_stack_and_save_regs()
{
asm volatile(
"ldr r13, =exc_stack \n"
"stmfd r13!, {r0} \n"
"mrs r0, spsr \n"
"and r0, r0, #0x1f \n"
"cmp r0, #0x10 \n"
"bne 1f \n"
 
asm volatile (
"ldr r13, =exc_stack\n"
"stmfd r13!, {r0}\n"
"mrs r0, spsr\n"
"and r0, r0, #0x1f\n"
"cmp r0, #0x10\n"
"bne 1f\n"
/* prev mode was usermode */
"ldmfd r13!, {r0} \n"
"ldr r13, =supervisor_sp \n"
"ldr r13, [r13] \n"
"stmfd r13!, {lr} \n"
"stmfd r13!, {r0-r12} \n"
"stmfd r13!, {r13, lr}^ \n"
"mrs r0, spsr \n"
"stmfd r13!, {r0} \n"
"b 2f \n"
 
"ldmfd r13!, {r0}\n"
"ldr r13, =supervisor_sp\n"
"ldr r13, [r13]\n"
"stmfd r13!, {lr}\n"
"stmfd r13!, {r0-r12}\n"
"stmfd r13!, {r13, lr}^\n"
"mrs r0, spsr\n"
"stmfd r13!, {r0}\n"
"b 2f\n"
/* mode was not usermode */
"1:\n"
"stmfd r13!, {r1, r2, r3} \n"
"mrs r1, cpsr \n"
"mov r2, lr \n"
"bic r1, r1, #0x1f \n"
"orr r1, r1, r0 \n"
"mrs r0, cpsr \n"
"msr cpsr_c, r1 \n"
 
"mov r3, r13 \n"
"stmfd r13!, {r2} \n"
"mov r2, lr \n"
"stmfd r13!, {r4-r12} \n"
"mov r1, r13 \n"
/* the following two lines are for debugging */
"mov sp, #0 \n"
"mov lr, #0 \n"
"msr cpsr_c, r0 \n"
 
"ldmfd r13!, {r4, r5, r6, r7} \n"
"stmfd r1!, {r4, r5, r6} \n"
"stmfd r1!, {r7} \n"
"stmfd r1!, {r2} \n"
"stmfd r1!, {r3} \n"
"mrs r0, spsr \n"
"stmfd r1!, {r0} \n"
"mov r13, r1 \n"
"2:\n"
"1:\n"
"stmfd r13!, {r1, r2, r3}\n"
"mrs r1, cpsr\n"
"mov r2, lr\n"
"bic r1, r1, #0x1f\n"
"orr r1, r1, r0\n"
"mrs r0, cpsr\n"
"msr cpsr_c, r1\n"
"mov r3, r13\n"
"stmfd r13!, {r2}\n"
"mov r2, lr\n"
"stmfd r13!, {r4-r12}\n"
"mov r1, r13\n"
/* the following two lines are for debugging */
"mov sp, #0\n"
"mov lr, #0\n"
"msr cpsr_c, r0\n"
"ldmfd r13!, {r4, r5, r6, r7}\n"
"stmfd r1!, {r4, r5, r6}\n"
"stmfd r1!, {r7}\n"
"stmfd r1!, {r2}\n"
"stmfd r1!, {r3}\n"
"mrs r0, spsr\n"
"stmfd r1!, {r0}\n"
"mov r13, r1\n"
"2:\n"
);
}
 
189,10 → 192,13
}
 
/** Calls exception dispatch routine. */
#define CALL_EXC_DISPATCH(exception) \
asm("mov r0, %0" : : "i" (exception)); \
asm("mov r1, r13"); \
asm("bl exc_dispatch");
#define CALL_EXC_DISPATCH(exception) \
asm volatile ( \
"mov r0, %[exc]\n" \
"mov r1, r13\n" \
"bl exc_dispatch\n" \
:: [exc] "i" (exception) \
);\
 
/** General exception handler.
*
201,9 → 207,9
*
* @param exception Exception number.
*/
#define PROCESS_EXCEPTION(exception) \
setup_stack_and_save_regs(); \
CALL_EXC_DISPATCH(exception) \
#define PROCESS_EXCEPTION(exception) \
setup_stack_and_save_regs(); \
CALL_EXC_DISPATCH(exception) \
load_regs();
 
/** Updates specified exception vector to jump to given handler.
333,17 → 339,23
{
uint32_t control_reg;
asm volatile("mrc p15, 0, %0, c1, c1" : "=r" (control_reg));
asm volatile (
"mrc p15, 0, %[control_reg], c1, c1"
: [control_reg] "=r" (control_reg)
);
/* switch on the high vectors bit */
control_reg |= CP15_R1_HIGH_VECTORS_BIT;
asm volatile("mcr p15, 0, %0, c1, c1" : : "r" (control_reg));
asm volatile (
"mcr p15, 0, %[control_reg], c1, c1"
:: [control_reg] "r" (control_reg)
);
}
#endif
 
/** Initializes exception handling.
*
*
* Installs low-level exception handlers and then registers
* exceptions and their handlers to kernel exception dispatcher.
*/
/branches/dynload/kernel/arch/arm32/src/arm32.c
48,8 → 48,9
#include <arch/machine.h>
#include <userspace.h>
#include <macros.h>
#include <string.h>
 
/** Performs arm32 specific initialization before main_bsp() is called. */
/** Performs arm32-specific initialization before main_bsp() is called. */
void arch_pre_main(void *entry __attribute__((unused)), bootinfo_t *bootinfo)
{
unsigned int i;
59,6 → 60,8
for (i = 0; i < min3(bootinfo->cnt, TASKMAP_MAX_RECORDS, CONFIG_INIT_TASKS); ++i) {
init.tasks[i].addr = bootinfo->tasks[i].addr;
init.tasks[i].size = bootinfo->tasks[i].size;
strncpy(init.tasks[i].name, bootinfo->tasks[i].name,
CONFIG_TASK_NAME_BUFLEN);
}
}
 
/branches/dynload/kernel/arch/arm32/src/cpu/cpu.c
36,7 → 36,7
#include <arch/cpu.h>
#include <cpu.h>
#include <arch.h>
#include <print.h>
#include <print.h>
 
/** Number of indexes left out in the #imp_data array */
#define IMP_DATA_START_OFFSET 0x40
82,10 → 82,10
{
uint32_t ident;
asm volatile (
"mrc p15, 0, %0, c0, c0, 0\n"
: "=r" (ident)
"mrc p15, 0, %[ident], c0, c0, 0\n"
: [ident] "=r" (ident)
);
 
cpu->imp_num = ident >> 24;
cpu->variant_num = (ident << 8) >> 28;
cpu->arch_num = (ident << 12) >> 28;
/branches/dynload/kernel/arch/arm32/src/mm/tlb.c
48,7 → 48,7
asm volatile (
"eor r1, r1\n"
"mcr p15, 0, r1, c8, c7, 0\n"
: : : "r1"
::: "r1"
);
}
 
68,9 → 68,8
static inline void invalidate_page(uintptr_t page)
{
asm volatile (
"mcr p15, 0, %0, c8, c7, 1"
:
: "r" (page)
"mcr p15, 0, %[page], c8, c7, 1\n"
:: [page] "r" (page)
);
}
 
/branches/dynload/kernel/arch/arm32/src/mm/page_fault.c
49,12 → 49,13
static inline fault_status_t read_fault_status_register(void)
{
fault_status_union_t fsu;
 
/* fault status is stored in CP15 register 5 */
asm volatile (
"mrc p15, 0, %0, c5, c0, 0"
: "=r"(fsu.dummy)
"mrc p15, 0, %[dummy], c5, c0, 0"
: [dummy] "=r" (fsu.dummy)
);
return fsu.fs;
}
 
61,17 → 62,18
/** Returns FAR (fault address register) content.
*
* @return FAR (fault address register) content (address that caused a page
* fault)
* fault)
*/
static inline uintptr_t read_fault_address_register(void)
{
uintptr_t ret;
 
/* fault adress is stored in CP15 register 6 */
asm volatile (
"mrc p15, 0, %0, c6, c0, 0"
: "=r"(ret)
"mrc p15, 0, %[ret], c6, c0, 0"
: [ret] "=r" (ret)
);
return ret;
}
 
80,29 → 82,26
* @param instr Instruction
*
* @return true when instruction is load/store, false otherwise
*
*/
static inline bool is_load_store_instruction(instruction_t instr)
{
/* load store immediate offset */
if (instr.type == 0x2) {
if (instr.type == 0x2)
return true;
}
 
/* load store register offset */
if (instr.type == 0x3 && instr.bit4 == 0) {
if ((instr.type == 0x3) && (instr.bit4 == 0))
return true;
}
 
/* load store multiple */
if (instr.type == 0x4) {
if (instr.type == 0x4)
return true;
}
 
/* oprocessor load/store */
if (instr.type == 0x6) {
if (instr.type == 0x6)
return true;
}
 
return false;
}
 
115,12 → 114,11
static inline bool is_swap_instruction(instruction_t instr)
{
/* swap, swapb instruction */
if (instr.type == 0x0 &&
(instr.opcode == 0x8 || instr.opcode == 0xa) &&
instr.access == 0x0 && instr.bits567 == 0x4 && instr.bit4 == 1) {
if ((instr.type == 0x0) &&
((instr.opcode == 0x8) || (instr.opcode == 0xa)) &&
(instr.access == 0x0) && (instr.bits567 == 0x4) && (instr.bit4 == 1))
return true;
}
 
return false;
}
 
/branches/dynload/kernel/arch/arm32/src/userspace.c
90,12 → 90,11
 
/* set user mode, set registers, jump */
asm volatile (
"mov sp, %0 \n"
"msr spsr_c, %1 \n"
"ldmfd sp!, {r0-r12, sp, lr}^ \n"
"mov sp, %[ustate]\n"
"msr spsr_c, %[user_mode]\n"
"ldmfd sp!, {r0-r12, sp, lr}^\n"
"ldmfd sp!, {pc}^\n"
:
: "r" (&ustate), "r" (user_mode)
:: [ustate] "r" (&ustate), [user_mode] "r" (user_mode)
);
 
/* unreachable */