/SPARTAN/trunk/arch/ppc/include/barrier.h |
---|
32,4 → 32,8 |
#define CS_ENTER_BARRIER() __asm__ volatile ("" ::: "memory") |
#define CS_LEAVE_BARRIER() __asm__ volatile ("" ::: "memory") |
#define memory_barrier() |
#define read_barrier() |
#define write_barrier() |
#endif |
/SPARTAN/trunk/arch/ia64/include/barrier.h |
---|
35,4 → 35,8 |
#define CS_ENTER_BARRIER() __asm__ volatile ("" ::: "memory") |
#define CS_LEAVE_BARRIER() __asm__ volatile ("" ::: "memory") |
#define memory_barrier() |
#define read_barrier() |
#define write_barrier() |
#endif |
/SPARTAN/trunk/arch/mips/include/mm/page.h |
---|
33,7 → 33,6 |
#include <mm/page.h> |
#include <arch/mm/frame.h> |
#include <arch/types.h> |
#include <arch.h> |
#define PAGE_SIZE FRAME_SIZE |
/SPARTAN/trunk/arch/mips/include/barrier.h |
---|
35,4 → 35,8 |
#define CS_ENTER_BARRIER() __asm__ volatile ("" ::: "memory") |
#define CS_LEAVE_BARRIER() __asm__ volatile ("" ::: "memory") |
#define memory_barrier() |
#define read_barrier() |
#define write_barrier() |
#endif |
/SPARTAN/trunk/arch/mips/include/cpu.h |
---|
29,8 → 29,6 |
#ifndef __mips_CPU_H__ |
#define __mips_CPU_H__ |
#include <typedefs.h> |
#define CPU_ID_ARCH 0 |
struct cpu_arch { |
/SPARTAN/trunk/arch/amd64/include/barrier.h |
---|
32,4 → 32,8 |
#define CS_ENTER_BARRIER() __asm__ volatile ("" ::: "memory") |
#define CS_LEAVE_BARRIER() __asm__ volatile ("" ::: "memory") |
#define memory_barrier() |
#define read_barrier() |
#define write_barrier() |
#endif |
/SPARTAN/trunk/arch/ia32/include/asm.h |
---|
31,7 → 31,7 |
#include <arch/types.h> |
#include <typedefs.h> |
#include <mm/page.h> |
#include <config.h> |
#include <synch/spinlock.h> |
#include <arch/boot/memmap.h> |
#include <config.h> |
/SPARTAN/trunk/arch/ia32/include/barrier.h |
---|
43,4 → 43,8 |
#define CS_ENTER_BARRIER() __asm__ volatile ("" ::: "memory") |
#define CS_LEAVE_BARRIER() __asm__ volatile ("" ::: "memory") |
#define memory_barrier() __asm__ volatile ("mfence\n" ::: "memory") |
#define read_barrier() __asm__ volatile ("sfence\n" ::: "memory") |
#define write_barrier() __asm__ volatile ("lfence\n" ::: "memory") |
#endif |
/SPARTAN/trunk/arch/ia32/src/smp/ap.S |
---|
50,10 → 50,12 |
xorw %ax,%ax |
movw %ax,%ds |
lgdt gdtr |
lgdt gdtr # initialize Global Descriptor Table register |
lidt idtr # initialize Interrupt Descriptor Table register |
movl %cr0,%eax |
orl $1,%eax |
movl %eax,%cr0 |
movl %eax,%cr0 # switch to protected mode |
jmpl $KTEXT,$jump_to_kernel |
jump_to_kernel: |
.code32 |
65,10 → 67,8 |
movl (%eax),%esp |
subl $0x80000000,%esp # KA2PA(ctx.sp) |
lidt idtr |
call map_kernel # map kernel and turn paging on |
call map_kernel |
jmpl $KTEXT,$main_ap |
#endif /* __SMP__ */ |
/SPARTAN/trunk/arch/ia32/src/boot/boot.S |
---|
47,10 → 47,12 |
call memmap_arch_init |
lgdt gdtr |
lgdt gdtr # initialize Global Descriptor Table register |
lidt idtr # initialize Interrupt Descriptor Table register |
movl %cr0,%eax |
orl $0x1,%eax |
movl %eax,%cr0 |
movl %eax,%cr0 # switch to protected mode |
jmpl $8,$meeting_point |
meeting_point: |
.code32 |
62,10 → 64,8 |
movw %ax,%ds # kernel data + stack |
movw %ax,%ss |
lidt idtr |
call map_kernel # map kernel and turn paging on |
call map_kernel |
movl $_hardcoded_ktext_size, hardcoded_ktext_size |
movl $_hardcoded_kdata_size, hardcoded_kdata_size |
movl $_hardcoded_load_address, hardcoded_load_address |
94,7 → 94,7 |
leal page_directory, %eax |
movl %eax, %cr3 |
# turn on paging |
# turn paging on |
movl %cr0, %ebx |
orl $(1<<31), %ebx |
movl %ebx, %cr0 |
/SPARTAN/trunk/arch/ia32/src/mm/frame.c |
---|
34,9 → 34,6 |
#include <print.h> |
/* |
* TODO: use the memory map obtained from BIOS |
*/ |
void frame_arch_init(void) |
{ |
__u8 i; |