Rev 625 | Rev 631 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
| Rev 625 | Rev 627 | ||
|---|---|---|---|
| Line 57... | Line 57... | ||
| 57 | #else |
57 | #else |
| 58 | __asm__ volatile ("decl %0\n" : "=m" (val->count)); |
58 | __asm__ volatile ("decl %0\n" : "=m" (val->count)); |
| 59 | #endif /* CONFIG_SMP */ |
59 | #endif /* CONFIG_SMP */ |
| 60 | } |
60 | } |
| 61 | 61 | ||
| 62 | static inline atomic_t atomic_inc_pre(atomic_t *val) |
62 | static inline count_t atomic_inc_pre(atomic_t *val) |
| 63 | { |
63 | { |
| 64 | atomic_t r; |
64 | count_t r; |
| - | 65 | ||
| 65 | __asm__ volatile ( |
66 | __asm__ volatile ( |
| 66 | "movl $1, %0\n" |
67 | "movl $1, %0\n" |
| 67 | "lock xaddl %0, %1\n" |
68 | "lock xaddl %0, %1\n" |
| 68 | : "=r"(r), "=m" (val->count) |
69 | : "=r" (r), "=m" (val->count) |
| 69 | ); |
70 | ); |
| - | 71 | ||
| 70 | return r; |
72 | return r; |
| 71 | } |
73 | } |
| 72 | 74 | ||
| 73 | - | ||
| 74 | - | ||
| 75 | static inline atomic_t atomic_dec_pre(atomic_t *val) |
75 | static inline count_t atomic_dec_pre(atomic_t *val) |
| 76 | { |
76 | { |
| 77 | atomic_t r; |
77 | count_t r; |
| - | 78 | ||
| 78 | __asm__ volatile ( |
79 | __asm__ volatile ( |
| 79 | "movl $-1, %0\n" |
80 | "movl $-1, %0\n" |
| 80 | "lock xaddl %0, %1\n" |
81 | "lock xaddl %0, %1\n" |
| 81 | : "=r"(r), "=m" (*val) |
82 | : "=r" (r), "=m" (*val) |
| 82 | ); |
83 | ); |
| - | 84 | ||
| 83 | return r; |
85 | return r; |
| 84 | } |
86 | } |
| 85 | 87 | ||
| 86 | #define atomic_inc_post(val) (atomic_inc_pre(val)+1) |
88 | #define atomic_inc_post(val) (atomic_inc_pre(val)+1) |
| 87 | #define atomic_dec_post(val) (atomic_dec_pre(val)-1) |
89 | #define atomic_dec_post(val) (atomic_dec_pre(val)-1) |