Subversion Repositories HelenOS-historic

Compare Revisions

Ignore whitespace Rev 474 → Rev 475

/SPARTAN/trunk/arch/sparc64/include/atomic.h
29,16 → 29,20
#ifndef __sparc64_ATOMIC_H__
#define __sparc64_ATOMIC_H__
 
#include <arch/types.h>
 
typedef volatile __u64 atomic_t;
 
/*
* TODO: these are just placeholders for real implementations of atomic_inc and atomic_dec.
* WARNING: the following functions cause the code to be preemption-unsafe !!!
*/
 
static inline atomic_inc(volatile int *val) {
static inline void atomic_inc(atomic_t *val) {
*val++;
}
 
static inline atomic_dec(volatile int *val) {
static inline void atomic_dec(atomic_t *val) {
*val--;
}
 
/SPARTAN/trunk/arch/ia64/include/atomic.h
29,17 → 29,23
#ifndef __ia64_ATOMIC_H__
#define __ia64_ATOMIC_H__
 
/*
* TODO: these are just placeholders for real implementations of atomic_inc and atomic_dec.
* WARNING: the following functions cause the code to be preemption-unsafe !!!
*/
#include <arch/types.h>
 
static inline atomic_inc(volatile int *val) {
*val++;
}
typedef volatile __u64 atomic_t;
 
static inline atomic_dec(volatile int *val) {
*val--;
static inline atomic_t atomic_add(atomic_t *val, int imm)
{
atomic_t v;
 
/*
* __asm__ volatile ("fetchadd8.rel %0 = %1, %2\n" : "=r" (v), "=m" (val) : "i" (imm));
*/
*val += imm;
return v;
}
 
static inline atomic_t atomic_inc(atomic_t *val) { return atomic_add(val, 1); }
static inline atomic_t atomic_dec(atomic_t *val) { return atomic_add(val, -1); }
 
#endif
/SPARTAN/trunk/arch/ppc32/include/atomic.h
29,16 → 29,20
#ifndef __ppc32_ATOMIC_H__
#define __ppc32_ATOMIC_H__
 
#include <arch/types.h>
 
typedef volatile __u32 atomic_t;
 
/*
* TODO: these are just placeholders for real implementations of atomic_inc and atomic_dec.
* WARNING: the following functions cause the code to be preemption-unsafe !!!
*/
 
static inline atomic_inc(volatile int *val) {
static inline void atomic_inc(atomic_t *val) {
*val++;
}
 
static inline atomic_dec(volatile int *val) {
static inline void atomic_dec(atomic_t *val) {
*val--;
}
 
/SPARTAN/trunk/arch/mips32/include/atomic.h
29,9 → 29,13
#ifndef __mips32_ATOMIC_H__
#define __mips32_ATOMIC_H__
 
#include <arch/types.h>
 
#define atomic_inc(x) (a_add(x,1))
#define atomic_dec(x) (a_sub(x,1))
 
typedef volatile __u32 atomic_t;
 
/*
* Atomic addition
*
41,9 → 45,9
* of the variable to a special register and if another process writes to
* the same location, the SC (store-conditional) instruction fails.
*/
static inline int a_add( volatile int *val, int i)
static inline atomic_t a_add(atomic_t *val, int i)
{
int tmp, tmp2;
atomic_t tmp, tmp2;
 
asm volatile (
" .set push\n"
69,10 → 73,10
*
* Implemented in the same manner as a_add, except we substract the value.
*/
static inline int a_sub( volatile int *val, int i)
static inline atomic_t a_sub(atomic_t *val, int i)
 
{
int tmp, tmp2;
atomic_t tmp, tmp2;
 
asm volatile (
" .set push\n"
/SPARTAN/trunk/arch/ia32/include/atomic.h
31,7 → 31,9
 
#include <arch/types.h>
 
static inline void atomic_inc(volatile int *val) {
typedef volatile __u32 atomic_t;
 
static inline void atomic_inc(atomic_t *val) {
#ifdef CONFIG_SMP
__asm__ volatile ("lock incl %0\n" : "=m" (*val));
#else
39,7 → 41,7
#endif /* CONFIG_SMP */
}
 
static inline void atomic_dec(volatile int *val) {
static inline void atomic_dec(atomic_t *val) {
#ifdef CONFIG_SMP
__asm__ volatile ("lock decl %0\n" : "=m" (*val));
#else