Rev 2787 | Rev 3425 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
| Rev 2787 | Rev 3424 | ||
|---|---|---|---|
| Line 39... | Line 39... | ||
| 39 | #include <arch/barrier.h> |
39 | #include <arch/barrier.h> |
| 40 | #include <preemption.h> |
40 | #include <preemption.h> |
| 41 | 41 | ||
| 42 | static inline void atomic_inc(atomic_t *val) { |
42 | static inline void atomic_inc(atomic_t *val) { |
| 43 | #ifdef CONFIG_SMP |
43 | #ifdef CONFIG_SMP |
| 44 | asm volatile ("lock incq %0\n" : "=m" (val->count)); |
44 | asm volatile ("lock incq %0\n" : "+m" (val->count)); |
| 45 | #else |
45 | #else |
| 46 | asm volatile ("incq %0\n" : "=m" (val->count)); |
46 | asm volatile ("incq %0\n" : "+m" (val->count)); |
| 47 | #endif /* CONFIG_SMP */ |
47 | #endif /* CONFIG_SMP */ |
| 48 | } |
48 | } |
| 49 | 49 | ||
| 50 | static inline void atomic_dec(atomic_t *val) { |
50 | static inline void atomic_dec(atomic_t *val) { |
| 51 | #ifdef CONFIG_SMP |
51 | #ifdef CONFIG_SMP |
| 52 | asm volatile ("lock decq %0\n" : "=m" (val->count)); |
52 | asm volatile ("lock decq %0\n" : "+m" (val->count)); |
| 53 | #else |
53 | #else |
| 54 | asm volatile ("decq %0\n" : "=m" (val->count)); |
54 | asm volatile ("decq %0\n" : "+m" (val->count)); |
| 55 | #endif /* CONFIG_SMP */ |
55 | #endif /* CONFIG_SMP */ |
| 56 | } |
56 | } |
| 57 | 57 | ||
| 58 | static inline long atomic_postinc(atomic_t *val) |
58 | static inline long atomic_postinc(atomic_t *val) |
| 59 | { |
59 | { |
| 60 | long r = 1; |
60 | long r = 1; |
| 61 | 61 | ||
| 62 | asm volatile ( |
62 | asm volatile ( |
| 63 | "lock xaddq %1, %0\n" |
63 | "lock xaddq %1, %0\n" |
| 64 | : "=m" (val->count), "+r" (r) |
64 | : "+m" (val->count), "+r" (r) |
| 65 | ); |
65 | ); |
| 66 | 66 | ||
| 67 | return r; |
67 | return r; |
| 68 | } |
68 | } |
| 69 | 69 | ||
| Line 71... | Line 71... | ||
| 71 | { |
71 | { |
| 72 | long r = -1; |
72 | long r = -1; |
| 73 | 73 | ||
| 74 | asm volatile ( |
74 | asm volatile ( |
| 75 | "lock xaddq %1, %0\n" |
75 | "lock xaddq %1, %0\n" |
| 76 | : "=m" (val->count), "+r" (r) |
76 | : "+m" (val->count), "+r" (r) |
| 77 | ); |
77 | ); |
| 78 | 78 | ||
| 79 | return r; |
79 | return r; |
| 80 | } |
80 | } |
| 81 | 81 | ||
| 82 | #define atomic_preinc(val) (atomic_postinc(val)+1) |
82 | #define atomic_preinc(val) (atomic_postinc(val) + 1) |
| 83 | #define atomic_predec(val) (atomic_postdec(val)-1) |
83 | #define atomic_predec(val) (atomic_postdec(val) - 1) |
| 84 | 84 | ||
| 85 | static inline uint64_t test_and_set(atomic_t *val) { |
85 | static inline uint64_t test_and_set(atomic_t *val) { |
| 86 | uint64_t v; |
86 | uint64_t v; |
| 87 | 87 | ||
| 88 | asm volatile ( |
88 | asm volatile ( |
| 89 | "movq $1, %0\n" |
89 | "movq $1, %0\n" |
| 90 | "xchgq %0, %1\n" |
90 | "xchgq %0, %1\n" |
| 91 | : "=r" (v),"=m" (val->count) |
91 | : "=r" (v), "+m" (val->count) |
| 92 | ); |
92 | ); |
| 93 | 93 | ||
| 94 | return v; |
94 | return v; |
| 95 | } |
95 | } |
| 96 | 96 | ||
| Line 100... | Line 100... | ||
| 100 | { |
100 | { |
| 101 | uint64_t tmp; |
101 | uint64_t tmp; |
| 102 | 102 | ||
| 103 | preemption_disable(); |
103 | preemption_disable(); |
| 104 | asm volatile ( |
104 | asm volatile ( |
| 105 | "0:;" |
105 | "0:\n" |
| 106 | #ifdef CONFIG_HT |
106 | #ifdef CONFIG_HT |
| 107 | "pause;" |
107 | "pause\n" |
| 108 | #endif |
108 | #endif |
| 109 | "mov %0, %1;" |
109 | "mov %0, %1\n" |
| 110 | "testq %1, %1;" |
110 | "testq %1, %1\n" |
| 111 | "jnz 0b;" /* Lightweight looping on locked spinlock */ |
111 | "jnz 0b\n" /* Lightweight looping on locked spinlock */ |
| 112 | 112 | ||
| 113 | "incq %1;" /* now use the atomic operation */ |
113 | "incq %1\n" /* now use the atomic operation */ |
| 114 | "xchgq %0, %1;" |
114 | "xchgq %0, %1\n" |
| 115 | "testq %1, %1;" |
115 | "testq %1, %1\n" |
| 116 | "jnz 0b;" |
116 | "jnz 0b\n" |
| 117 | : "=m"(val->count),"=r"(tmp) |
117 | : "+m" (val->count), "=r"(tmp) |
| 118 | ); |
118 | ); |
| 119 | /* |
119 | /* |
| 120 | * Prevent critical section code from bleeding out this way up. |
120 | * Prevent critical section code from bleeding out this way up. |
| 121 | */ |
121 | */ |
| 122 | CS_ENTER_BARRIER(); |
122 | CS_ENTER_BARRIER(); |
| 123 | } |
123 | } |