/SPARTAN/trunk/test/synch/rwlock1/test.c |
---|
28,7 → 28,7 |
#include <test.h> |
#include <arch.h> |
#include <arch/smp/atomic.h> |
#include <arch/atomic.h> |
#include <print.h> |
#include <proc/thread.h> |
/SPARTAN/trunk/test/synch/rwlock2/test.c |
---|
28,7 → 28,7 |
#include <test.h> |
#include <arch.h> |
#include <arch/smp/atomic.h> |
#include <arch/atomic.h> |
#include <print.h> |
#include <proc/thread.h> |
/SPARTAN/trunk/test/synch/rwlock3/test.c |
---|
28,7 → 28,7 |
#include <test.h> |
#include <arch.h> |
#include <arch/smp/atomic.h> |
#include <arch/atomic.h> |
#include <print.h> |
#include <proc/thread.h> |
/SPARTAN/trunk/test/synch/semaphore1/test.c |
---|
28,7 → 28,7 |
#include <test.h> |
#include <arch.h> |
#include <arch/smp/atomic.h> |
#include <arch/atomic.h> |
#include <print.h> |
#include <proc/thread.h> |
/SPARTAN/trunk/test/synch/rwlock4/test.c |
---|
28,7 → 28,7 |
#include <test.h> |
#include <arch.h> |
#include <arch/smp/atomic.h> |
#include <arch/atomic.h> |
#include <print.h> |
#include <proc/thread.h> |
#include <arch/types.h> |
/SPARTAN/trunk/test/synch/semaphore2/test.c |
---|
28,7 → 28,7 |
#include <test.h> |
#include <arch.h> |
#include <arch/smp/atomic.h> |
#include <arch/atomic.h> |
#include <print.h> |
#include <proc/thread.h> |
#include <arch/types.h> |
/SPARTAN/trunk/test/synch/rwlock5/test.c |
---|
28,7 → 28,7 |
#include <test.h> |
#include <arch.h> |
#include <arch/smp/atomic.h> |
#include <arch/atomic.h> |
#include <print.h> |
#include <proc/thread.h> |
/SPARTAN/trunk/test/fpu/fpu1/test.c |
---|
32,7 → 32,7 |
#include <panic.h> |
#include <test.h> |
#include <arch/smp/atomic.h> |
#include <arch/atomic.h> |
#include <proc/thread.h> |
#include <arch.h> |
/SPARTAN/trunk/include/proc/scheduler.h |
---|
43,7 → 43,6 |
int n; |
}; |
extern spinlock_t nrdylock; |
extern volatile int nrdy; |
static thread_t *find_best_thread(void); |
/SPARTAN/trunk/src/synch/spinlock.c |
---|
28,10 → 28,7 |
#include <arch.h> |
#ifdef __SMP__ |
#include <arch/smp/atomic.h> |
#endif |
#include <arch/atomic.h> |
#include <synch/spinlock.h> |
#ifdef __SMP__ |
/SPARTAN/trunk/src/time/clock.c |
---|
38,11 → 38,8 |
#include <print.h> |
#include <arch.h> |
#include <list.h> |
#include <arch/atomic.h> |
#ifdef __SMP__ |
#include <arch/smp/atomic.h> |
#endif |
/** Clock routine |
* |
* Clock routine executed from clock interrupt handler |
/SPARTAN/trunk/src/proc/scheduler.c |
---|
42,11 → 42,8 |
#include <mm/page.h> |
#include <synch/spinlock.h> |
#include <arch/faddr.h> |
#include <arch/atomic.h> |
#ifdef __SMP__ |
#include <arch/smp/atomic.h> |
#endif /* __SMP__ */ |
/* |
* NOTE ON ATOMIC READS: |
* Some architectures cannot read __u32 atomically. |
53,7 → 50,6 |
* For that reason, all accesses to nrdy and the likes must be protected by spinlock. |
*/ |
spinlock_t nrdylock; |
volatile int nrdy; |
77,7 → 73,6 |
*/ |
void scheduler_init(void) |
{ |
spinlock_initialize(&nrdylock); |
} |
140,9 → 135,7 |
continue; |
} |
spinlock_lock(&nrdylock); |
nrdy--; |
spinlock_unlock(&nrdylock); |
atomic_dec(&nrdy); |
spinlock_lock(&CPU->lock); |
CPU->nrdy--; |
498,9 → 491,7 |
cpu->nrdy--; |
spinlock_unlock(&cpu->lock); |
spinlock_lock(&nrdylock); |
nrdy--; |
spinlock_unlock(&nrdylock); |
atomic_dec(&nrdy); |
r->n--; |
list_remove(&t->rq_link); |
/SPARTAN/trunk/src/proc/thread.c |
---|
49,6 → 49,7 |
#include <arch/interrupt.h> |
#include <smp/ipi.h> |
#include <arch/faddr.h> |
#include <arch/atomic.h> |
char *thread_states[] = {"Invalid", "Running", "Sleeping", "Ready", "Entering", "Exiting"}; /**< Thread states */ |
134,9 → 135,8 |
r->n++; |
spinlock_unlock(&r->lock); |
spinlock_lock(&nrdylock); |
avg = ++nrdy / config.cpu_active; |
spinlock_unlock(&nrdylock); |
atomic_inc(&nrdy); |
avg = nrdy / config.cpu_active; |
spinlock_lock(&cpu->lock); |
if ((++cpu->nrdy) > avg) { |
/SPARTAN/trunk/src/mm/tlb.c |
---|
30,7 → 30,7 |
#include <smp/ipi.h> |
#include <synch/spinlock.h> |
#include <typedefs.h> |
#include <arch/smp/atomic.h> |
#include <arch/atomic.h> |
#include <arch/interrupt.h> |
#include <config.h> |
#include <arch.h> |
/SPARTAN/trunk/src/Makefile |
---|
70,7 → 70,7 |
-$(MAKE) clean |
kernel.bin: $(arch_objects) $(objects) $(test_objects) |
$(LD) $(LFLAGS) $(arch_objects) $(objects) $(test_objects) -o $@ >kernel.map |
$(LD) $(LFLAGS) $(arch_objects) $(objects) $(test_objects) -o $@ -Map kernel.map |
%.s: %.S |
$(CC) $(CPPFLAGS) -E $< >$@ |
/SPARTAN/trunk/arch/mips/include/smp/atomic.h |
---|
File deleted |
/SPARTAN/trunk/arch/mips/include/atomic.h |
---|
0,0 → 1,96 |
/* |
* Copyright (C) 2001-2004 Jakub Jermar |
* All rights reserved. |
* |
* Redistribution and use in source and binary forms, with or without |
* modification, are permitted provided that the following conditions |
* are met: |
* |
* - Redistributions of source code must retain the above copyright |
* notice, this list of conditions and the following disclaimer. |
* - Redistributions in binary form must reproduce the above copyright |
* notice, this list of conditions and the following disclaimer in the |
* documentation and/or other materials provided with the distribution. |
* - The name of the author may not be used to endorse or promote products |
* derived from this software without specific prior written permission. |
* |
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
*/ |
#ifndef __MIPS_ATOMIC_H__ |
#define __MIPS_ATOMIC_H__ |
#define atomic_inc(x) (a_add(x,1)) |
#define atomic_dec(x) (a_sub(x,1)) |
/* |
* Atomic addition |
* |
* This case is harder, and we have to use the special LL and SC operations |
* to achieve atomicity. The instructions are similar to LW (load) and SW |
* (store), except that the LL (load-linked) instruction loads the address |
* of the variable to a special register and if another process writes to |
* the same location, the SC (store-conditional) instruction fails. |
*/ |
static inline int a_add( volatile int *val, int i) |
{ |
int tmp, tmp2; |
asm volatile ( |
" .set push\n" |
" .set noreorder\n" |
" nop\n" |
"1:\n" |
" ll %0, %1\n" |
" addu %0, %0, %2\n" |
" move %3, %0\n" |
" sc %0, %1\n" |
" beq %0, 0x0, 1b\n" |
" move %0, %3\n" |
" .set pop\n" |
: "=&r" (tmp), "=o" (*val) |
: "r" (i), "r" (tmp2) |
); |
return tmp; |
} |
/* |
* Atomic subtraction |
* |
* Implemented in the same manner as a_add, except we substract the value. |
*/ |
static inline int a_sub( volatile int *val, int i) |
{ |
int tmp, tmp2; |
asm volatile ( |
" .set push\n" |
" .set noreorder\n" |
" nop\n" |
"1:\n" |
" ll %0, %1\n" |
" subu %0, %0, %2\n" |
" move %3, %0\n" |
" sc %0, %1\n" |
" beq %0, 0x0, 1b\n" |
" move %0, %3\n" |
" .set pop\n" |
: "=&r" (tmp), "=o" (*val) |
: "r" (i), "r" (tmp2) |
); |
return tmp; |
} |
#endif |
/SPARTAN/trunk/arch/mips/_link.ld |
---|
7,7 → 7,7 |
*/ |
OUTPUT_FORMAT(binary) |
ENTRY(kernel_image_start) |
ENTRY(kernel_image_start) |
SECTIONS { |
.image 0x80000000: AT (0x80000000) { |
/SPARTAN/trunk/arch/ia32/include/smp/atomic.h |
---|
File deleted |
/SPARTAN/trunk/arch/ia32/include/atomic.h |
---|
0,0 → 1,40 |
/* |
* Copyright (C) 2001-2004 Jakub Jermar |
* All rights reserved. |
* |
* Redistribution and use in source and binary forms, with or without |
* modification, are permitted provided that the following conditions |
* are met: |
* |
* - Redistributions of source code must retain the above copyright |
* notice, this list of conditions and the following disclaimer. |
* - Redistributions in binary form must reproduce the above copyright |
* notice, this list of conditions and the following disclaimer in the |
* documentation and/or other materials provided with the distribution. |
* - The name of the author may not be used to endorse or promote products |
* derived from this software without specific prior written permission. |
* |
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
*/ |
#ifndef __ia32_ATOMIC_H__ |
#define __ia32_ATOMIC_H__ |
#include <arch/types.h> |
extern void atomic_inc(volatile int *val); |
extern void atomic_dec(volatile int *val); |
extern int test_and_set(int *val); |
extern void spinlock_arch(int *val); |
#endif |
/SPARTAN/trunk/arch/ia32/Makefile.inc |
---|
30,7 → 30,7 |
arch/smp/apic.c \ |
arch/smp/mps.c \ |
arch/smp/smp.c \ |
arch/smp/atomic.S \ |
arch/atomic.S \ |
arch/smp/ipi.c \ |
arch/ia32.c \ |
arch/interrupt.c \ |
/SPARTAN/trunk/arch/ia32/src/smp/atomic.S |
---|
File deleted |
/SPARTAN/trunk/arch/ia32/src/atomic.S |
---|
0,0 → 1,98 |
# |
# Copyright (C) 2001-2004 Jakub Jermar |
# All rights reserved. |
# |
# Redistribution and use in source and binary forms, with or without |
# modification, are permitted provided that the following conditions |
# are met: |
# |
# - Redistributions of source code must retain the above copyright |
# notice, this list of conditions and the following disclaimer. |
# - Redistributions in binary form must reproduce the above copyright |
# notice, this list of conditions and the following disclaimer in the |
# documentation and/or other materials provided with the distribution. |
# - The name of the author may not be used to endorse or promote products |
# derived from this software without specific prior written permission. |
# |
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
# |
.text |
.global atomic_inc |
atomic_inc: |
pushl %ebx |
movl 8(%esp),%ebx |
#ifdef __SMP__ |
lock incl (%ebx) |
#else |
incl (%ebx) |
#endif |
popl %ebx |
ret |
.global atomic_dec |
atomic_dec: |
pushl %ebx |
movl 8(%esp),%ebx |
#ifdef __SMP__ |
lock decl (%ebx) |
#else |
decl (%ebx) |
#endif |
popl %ebx |
ret |
#ifdef __SMP__ |
.global test_and_set |
.global spinlock_arch |
test_and_set: |
pushl %ebx |
movl 8(%esp),%ebx |
movl $1,%eax |
xchgl %eax,(%ebx) # xchg implicitly turns on the LOCK signal |
popl %ebx |
ret |
# |
# This is a bus-and-hyperthreading-friendly implementation of spinlock |
# |
spinlock_arch: |
pushl %eax |
pushl %ebx |
movl 12(%esp),%ebx |
0: |
#ifdef __HT__ |
pause # Pentium 4's with HT love this instruction |
#endif |
movl (%ebx),%eax |
testl %eax,%eax |
jnz 0b # lightweight looping while it is locked |
incl %eax |
xchgl %eax,(%ebx) # now use the atomic operation |
testl %eax,%eax |
jnz 0b |
popl %ebx |
popl %eax |
ret |
#endif |