Rev 557 | Rev 627 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 557 | Rev 625 | ||
---|---|---|---|
Line 29... | Line 29... | ||
29 | #ifndef __ia32_ATOMIC_H__ |
29 | #ifndef __ia32_ATOMIC_H__ |
30 | #define __ia32_ATOMIC_H__ |
30 | #define __ia32_ATOMIC_H__ |
31 | 31 | ||
32 | #include <arch/types.h> |
32 | #include <arch/types.h> |
33 | 33 | ||
34 | typedef volatile __u32 atomic_t; |
34 | typedef struct { volatile __u32 count; } atomic_t; |
- | 35 | ||
- | 36 | static inline void atomic_set(atomic_t *val, __u32 i) |
|
- | 37 | { |
|
- | 38 | val->count = i; |
|
- | 39 | } |
|
- | 40 | ||
- | 41 | static inline __u32 atomic_get(atomic_t *val) |
|
- | 42 | { |
|
- | 43 | return val->count; |
|
- | 44 | } |
|
35 | 45 | ||
36 | static inline void atomic_inc(atomic_t *val) { |
46 | static inline void atomic_inc(atomic_t *val) { |
37 | #ifdef CONFIG_SMP |
47 | #ifdef CONFIG_SMP |
38 | __asm__ volatile ("lock incl %0\n" : "=m" (*val)); |
48 | __asm__ volatile ("lock incl %0\n" : "=m" (val->count)); |
39 | #else |
49 | #else |
40 | __asm__ volatile ("incl %0\n" : "=m" (*val)); |
50 | __asm__ volatile ("incl %0\n" : "=m" (val->count)); |
41 | #endif /* CONFIG_SMP */ |
51 | #endif /* CONFIG_SMP */ |
42 | } |
52 | } |
43 | 53 | ||
44 | static inline void atomic_dec(atomic_t *val) { |
54 | static inline void atomic_dec(atomic_t *val) { |
45 | #ifdef CONFIG_SMP |
55 | #ifdef CONFIG_SMP |
46 | __asm__ volatile ("lock decl %0\n" : "=m" (*val)); |
56 | __asm__ volatile ("lock decl %0\n" : "=m" (val->count)); |
47 | #else |
57 | #else |
48 | __asm__ volatile ("decl %0\n" : "=m" (*val)); |
58 | __asm__ volatile ("decl %0\n" : "=m" (val->count)); |
49 | #endif /* CONFIG_SMP */ |
59 | #endif /* CONFIG_SMP */ |
50 | } |
60 | } |
51 | 61 | ||
52 | static inline atomic_t atomic_inc_pre(atomic_t *val) |
62 | static inline atomic_t atomic_inc_pre(atomic_t *val) |
53 | { |
63 | { |
54 | atomic_t r; |
64 | atomic_t r; |
55 | __asm__ volatile ( |
65 | __asm__ volatile ( |
56 | "movl $1, %0\n" |
66 | "movl $1, %0\n" |
57 | "lock xaddl %0, %1\n" |
67 | "lock xaddl %0, %1\n" |
58 | : "=r"(r), "=m" (*val) |
68 | : "=r"(r), "=m" (val->count) |
59 | ); |
69 | ); |
60 | return r; |
70 | return r; |
61 | } |
71 | } |
62 | 72 | ||
63 | 73 | ||
Line 74... | Line 84... | ||
74 | } |
84 | } |
75 | 85 | ||
76 | #define atomic_inc_post(val) (atomic_inc_pre(val)+1) |
86 | #define atomic_inc_post(val) (atomic_inc_pre(val)+1) |
77 | #define atomic_dec_post(val) (atomic_dec_pre(val)-1) |
87 | #define atomic_dec_post(val) (atomic_dec_pre(val)-1) |
78 | 88 | ||
79 | static inline int test_and_set(volatile int *val) { |
89 | static inline int test_and_set(atomic_t *val) { |
80 | int v; |
90 | int v; |
81 | 91 | ||
82 | __asm__ volatile ( |
92 | __asm__ volatile ( |
83 | "movl $1, %0\n" |
93 | "movl $1, %0\n" |
84 | "xchgl %0, %1\n" |
94 | "xchgl %0, %1\n" |
85 | : "=r" (v),"=m" (*val) |
95 | : "=r" (v),"=m" (val->count) |
86 | ); |
96 | ); |
87 | 97 | ||
88 | return v; |
98 | return v; |
89 | } |
99 | } |
90 | 100 |