Rev 1860 | Rev 1890 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
| Rev 1860 | Rev 1888 | ||
|---|---|---|---|
| Line 48... | Line 48... | ||
| 48 | * @return Value of the atomic variable as it existed before addition. |
48 | * @return Value of the atomic variable as it existed before addition. |
| 49 | */ |
49 | */ |
| 50 | static inline long atomic_add(atomic_t *val, int i) |
50 | static inline long atomic_add(atomic_t *val, int i) |
| 51 | { |
51 | { |
| 52 | uint64_t a, b; |
52 | uint64_t a, b; |
| 53 | volatile uint64_t x = (uint64_t) &val->count; |
- | |
| 54 | 53 | ||
| 55 | __asm__ volatile ( |
- | |
| 56 | "0:\n" |
54 | do { |
| 57 | "ldx %0, %1\n" |
- | |
| 58 | "add %1, %3, %2\n" |
55 | volatile uintptr_t x = (uint64_t) &val->count; |
| - | 56 | ||
| 59 | "casx %0, %1, %2\n" |
57 | a = *((uint64_t *) x); |
| 60 | "cmp %1, %2\n" |
58 | b = a + i; |
| 61 | "bne 0b\n" /* The operation failed and must be attempted again if a != b. */ |
- | |
| 62 | "nop\n" |
- | |
| 63 | : "=m" (*((uint64_t *)x)), "=r" (a), "=r" (b) |
59 | __asm__ volatile ("casx %0, %1, %2\n": "+m" (*((uint64_t *)x)), "+r" (a), "+r" (b)); |
| 64 | : "r" (i) |
60 | } while (a != b); |
| 65 | ); |
- | |
| 66 | 61 | ||
| 67 | return a; |
62 | return a; |
| 68 | } |
63 | } |
| 69 | 64 | ||
| 70 | static inline long atomic_preinc(atomic_t *val) |
65 | static inline long atomic_preinc(atomic_t *val) |