Rev 1891 | Rev 2047 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
| Rev 1891 | Rev 1902 | ||
|---|---|---|---|
| Line 34... | Line 34... | ||
| 34 | 34 | ||
| 35 | #ifndef KERN_sparc64_BARRIER_H_ |
35 | #ifndef KERN_sparc64_BARRIER_H_ |
| 36 | #define KERN_sparc64_BARRIER_H_ |
36 | #define KERN_sparc64_BARRIER_H_ |
| 37 | 37 | ||
| 38 | /* |
38 | /* |
| 39 | * TODO: Implement true SPARC V9 memory barriers for macros below. |
39 | * We assume TSO memory model in which only reads can pass earlier stores |
| - | 40 | * (but not earlier reads). Therefore, CS_ENTER_BARRIER() and CS_LEAVE_BARRIER() |
|
| - | 41 | * can be empty. |
|
| 40 | */ |
42 | */ |
| 41 | #define CS_ENTER_BARRIER() __asm__ volatile ("" ::: "memory") |
43 | #define CS_ENTER_BARRIER() __asm__ volatile ("" ::: "memory") |
| 42 | #define CS_LEAVE_BARRIER() __asm__ volatile ("" ::: "memory") |
44 | #define CS_LEAVE_BARRIER() __asm__ volatile ("" ::: "memory") |
| 43 | 45 | ||
| 44 | #define memory_barrier() __asm__ volatile ("membar #LoadLoad | #StoreStore\n" ::: "memory") |
46 | #define memory_barrier() __asm__ volatile ("membar #LoadLoad | #StoreStore\n" ::: "memory") |