Rev 2071 | Rev 3163 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 2071 | Rev 2082 | ||
---|---|---|---|
Line 40... | Line 40... | ||
40 | #include <preemption.h> |
40 | #include <preemption.h> |
41 | #include <typedefs.h> |
41 | #include <typedefs.h> |
42 | 42 | ||
43 | static inline void atomic_inc(atomic_t *val) { |
43 | static inline void atomic_inc(atomic_t *val) { |
44 | #ifdef CONFIG_SMP |
44 | #ifdef CONFIG_SMP |
45 | __asm__ volatile ("lock incq %0\n" : "=m" (val->count)); |
45 | asm volatile ("lock incq %0\n" : "=m" (val->count)); |
46 | #else |
46 | #else |
47 | __asm__ volatile ("incq %0\n" : "=m" (val->count)); |
47 | asm volatile ("incq %0\n" : "=m" (val->count)); |
48 | #endif /* CONFIG_SMP */ |
48 | #endif /* CONFIG_SMP */ |
49 | } |
49 | } |
50 | 50 | ||
51 | static inline void atomic_dec(atomic_t *val) { |
51 | static inline void atomic_dec(atomic_t *val) { |
52 | #ifdef CONFIG_SMP |
52 | #ifdef CONFIG_SMP |
53 | __asm__ volatile ("lock decq %0\n" : "=m" (val->count)); |
53 | asm volatile ("lock decq %0\n" : "=m" (val->count)); |
54 | #else |
54 | #else |
55 | __asm__ volatile ("decq %0\n" : "=m" (val->count)); |
55 | asm volatile ("decq %0\n" : "=m" (val->count)); |
56 | #endif /* CONFIG_SMP */ |
56 | #endif /* CONFIG_SMP */ |
57 | } |
57 | } |
58 | 58 | ||
59 | static inline long atomic_postinc(atomic_t *val) |
59 | static inline long atomic_postinc(atomic_t *val) |
60 | { |
60 | { |
61 | long r = 1; |
61 | long r = 1; |
62 | 62 | ||
63 | __asm__ volatile ( |
63 | asm volatile ( |
64 | "lock xaddq %1, %0\n" |
64 | "lock xaddq %1, %0\n" |
65 | : "=m" (val->count), "+r" (r) |
65 | : "=m" (val->count), "+r" (r) |
66 | ); |
66 | ); |
67 | 67 | ||
68 | return r; |
68 | return r; |
Line 70... | Line 70... | ||
70 | 70 | ||
71 | static inline long atomic_postdec(atomic_t *val) |
71 | static inline long atomic_postdec(atomic_t *val) |
72 | { |
72 | { |
73 | long r = -1; |
73 | long r = -1; |
74 | 74 | ||
75 | __asm__ volatile ( |
75 | asm volatile ( |
76 | "lock xaddq %1, %0\n" |
76 | "lock xaddq %1, %0\n" |
77 | : "=m" (val->count), "+r" (r) |
77 | : "=m" (val->count), "+r" (r) |
78 | ); |
78 | ); |
79 | 79 | ||
80 | return r; |
80 | return r; |
Line 84... | Line 84... | ||
84 | #define atomic_predec(val) (atomic_postdec(val)-1) |
84 | #define atomic_predec(val) (atomic_postdec(val)-1) |
85 | 85 | ||
86 | static inline uint64_t test_and_set(atomic_t *val) { |
86 | static inline uint64_t test_and_set(atomic_t *val) { |
87 | uint64_t v; |
87 | uint64_t v; |
88 | 88 | ||
89 | __asm__ volatile ( |
89 | asm volatile ( |
90 | "movq $1, %0\n" |
90 | "movq $1, %0\n" |
91 | "xchgq %0, %1\n" |
91 | "xchgq %0, %1\n" |
92 | : "=r" (v),"=m" (val->count) |
92 | : "=r" (v),"=m" (val->count) |
93 | ); |
93 | ); |
94 | 94 | ||
Line 100... | Line 100... | ||
100 | static inline void atomic_lock_arch(atomic_t *val) |
100 | static inline void atomic_lock_arch(atomic_t *val) |
101 | { |
101 | { |
102 | uint64_t tmp; |
102 | uint64_t tmp; |
103 | 103 | ||
104 | preemption_disable(); |
104 | preemption_disable(); |
105 | __asm__ volatile ( |
105 | asm volatile ( |
106 | "0:;" |
106 | "0:;" |
107 | #ifdef CONFIG_HT |
107 | #ifdef CONFIG_HT |
108 | "pause;" |
108 | "pause;" |
109 | #endif |
109 | #endif |
110 | "mov %0, %1;" |
110 | "mov %0, %1;" |