Rev 2071 | Rev 2089 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 2071 | Rev 2082 | ||
---|---|---|---|
Line 40... | Line 40... | ||
40 | #include <preemption.h> |
40 | #include <preemption.h> |
41 | #include <typedefs.h> |
41 | #include <typedefs.h> |
42 | 42 | ||
43 | static inline void atomic_inc(atomic_t *val) { |
43 | static inline void atomic_inc(atomic_t *val) { |
44 | #ifdef CONFIG_SMP |
44 | #ifdef CONFIG_SMP |
45 | __asm__ volatile ("lock incl %0\n" : "=m" (val->count)); |
45 | asm volatile ("lock incl %0\n" : "=m" (val->count)); |
46 | #else |
46 | #else |
47 | __asm__ volatile ("incl %0\n" : "=m" (val->count)); |
47 | asm volatile ("incl %0\n" : "=m" (val->count)); |
48 | #endif /* CONFIG_SMP */ |
48 | #endif /* CONFIG_SMP */ |
49 | } |
49 | } |
50 | 50 | ||
51 | static inline void atomic_dec(atomic_t *val) { |
51 | static inline void atomic_dec(atomic_t *val) { |
52 | #ifdef CONFIG_SMP |
52 | #ifdef CONFIG_SMP |
53 | __asm__ volatile ("lock decl %0\n" : "=m" (val->count)); |
53 | asm volatile ("lock decl %0\n" : "=m" (val->count)); |
54 | #else |
54 | #else |
55 | __asm__ volatile ("decl %0\n" : "=m" (val->count)); |
55 | asm volatile ("decl %0\n" : "=m" (val->count)); |
56 | #endif /* CONFIG_SMP */ |
56 | #endif /* CONFIG_SMP */ |
57 | } |
57 | } |
58 | 58 | ||
59 | static inline long atomic_postinc(atomic_t *val) |
59 | static inline long atomic_postinc(atomic_t *val) |
60 | { |
60 | { |
61 | long r = 1; |
61 | long r = 1; |
62 | 62 | ||
63 | __asm__ volatile ( |
63 | asm volatile ( |
64 | "lock xaddl %1, %0\n" |
64 | "lock xaddl %1, %0\n" |
65 | : "=m" (val->count), "+r" (r) |
65 | : "=m" (val->count), "+r" (r) |
66 | ); |
66 | ); |
67 | 67 | ||
68 | return r; |
68 | return r; |
Line 70... | Line 70... | ||
70 | 70 | ||
71 | static inline long atomic_postdec(atomic_t *val) |
71 | static inline long atomic_postdec(atomic_t *val) |
72 | { |
72 | { |
73 | long r = -1; |
73 | long r = -1; |
74 | 74 | ||
75 | __asm__ volatile ( |
75 | asm volatile ( |
76 | "lock xaddl %1, %0\n" |
76 | "lock xaddl %1, %0\n" |
77 | : "=m" (val->count), "+r"(r) |
77 | : "=m" (val->count), "+r"(r) |
78 | ); |
78 | ); |
79 | 79 | ||
80 | return r; |
80 | return r; |
Line 84... | Line 84... | ||
84 | #define atomic_predec(val) (atomic_postdec(val)-1) |
84 | #define atomic_predec(val) (atomic_postdec(val)-1) |
85 | 85 | ||
86 | static inline uint32_t test_and_set(atomic_t *val) { |
86 | static inline uint32_t test_and_set(atomic_t *val) { |
87 | uint32_t v; |
87 | uint32_t v; |
88 | 88 | ||
89 | __asm__ volatile ( |
89 | asm volatile ( |
90 | "movl $1, %0\n" |
90 | "movl $1, %0\n" |
91 | "xchgl %0, %1\n" |
91 | "xchgl %0, %1\n" |
92 | : "=r" (v),"=m" (val->count) |
92 | : "=r" (v),"=m" (val->count) |
93 | ); |
93 | ); |
94 | 94 | ||
Line 99... | Line 99... | ||
99 | static inline void atomic_lock_arch(atomic_t *val) |
99 | static inline void atomic_lock_arch(atomic_t *val) |
100 | { |
100 | { |
101 | uint32_t tmp; |
101 | uint32_t tmp; |
102 | 102 | ||
103 | preemption_disable(); |
103 | preemption_disable(); |
104 | __asm__ volatile ( |
104 | asm volatile ( |
105 | "0:;" |
105 | "0:;" |
106 | #ifdef CONFIG_HT |
106 | #ifdef CONFIG_HT |
107 | "pause;" /* Pentium 4's HT love this instruction */ |
107 | "pause;" /* Pentium 4's HT love this instruction */ |
108 | #endif |
108 | #endif |
109 | "mov %0, %1;" |
109 | "mov %0, %1;" |