Rev 1121 | Rev 1697 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
| Rev 1121 | Rev 1692 | ||
|---|---|---|---|
| Line 50... | Line 50... | ||
| 50 | #endif /* CONFIG_SMP */ |
50 | #endif /* CONFIG_SMP */ |
| 51 | } |
51 | } |
| 52 | 52 | ||
| 53 | static inline long atomic_postinc(atomic_t *val) |
53 | static inline long atomic_postinc(atomic_t *val) |
| 54 | { |
54 | { |
| 55 | long r; |
55 | long r = 1; |
| 56 | 56 | ||
| 57 | __asm__ volatile ( |
57 | __asm__ volatile ( |
| 58 | "movq $1, %0\n" |
- | |
| 59 | "lock xaddq %0, %1\n" |
58 | "lock xaddq %1, %0\n" |
| 60 | : "=r" (r), "=m" (val->count) |
59 | : "=m" (val->count) : "r" (r) |
| 61 | ); |
60 | ); |
| 62 | 61 | ||
| 63 | return r; |
62 | return r; |
| 64 | } |
63 | } |
| 65 | 64 | ||
| 66 | static inline long atomic_postdec(atomic_t *val) |
65 | static inline long atomic_postdec(atomic_t *val) |
| 67 | { |
66 | { |
| 68 | long r; |
67 | long r = -1; |
| 69 | 68 | ||
| 70 | __asm__ volatile ( |
69 | __asm__ volatile ( |
| 71 | "movq $-1, %0\n" |
- | |
| 72 | "lock xaddq %0, %1\n" |
70 | "lock xaddq %1, %0\n" |
| 73 | : "=r" (r), "=m" (val->count) |
71 | : "=m" (val->count) : "r" (r) |
| 74 | ); |
72 | ); |
| 75 | 73 | ||
| 76 | return r; |
74 | return r; |
| 77 | } |
75 | } |
| 78 | 76 | ||