Rev 4343 | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 4343 | Rev 4345 | ||
---|---|---|---|
Line 24... | Line 24... | ||
24 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
24 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
25 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
26 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
26 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | */ |
27 | */ |
28 | 28 | ||
29 | /** @addtogroup ia32 |
29 | /** @addtogroup ia32 |
30 | * @{ |
30 | * @{ |
31 | */ |
31 | */ |
32 | /** @file |
32 | /** @file |
33 | */ |
33 | */ |
34 | 34 | ||
Line 44... | Line 44... | ||
44 | 44 | ||
45 | /* |
45 | /* |
46 | * Provisions are made to prevent compiler from reordering instructions itself. |
46 | * Provisions are made to prevent compiler from reordering instructions itself. |
47 | */ |
47 | */ |
48 | 48 | ||
49 | #define CS_ENTER_BARRIER() asm volatile ("" ::: "memory") |
49 | #define CS_ENTER_BARRIER() asm volatile ("" ::: "memory") |
50 | #define CS_LEAVE_BARRIER() asm volatile ("" ::: "memory") |
50 | #define CS_LEAVE_BARRIER() asm volatile ("" ::: "memory") |
51 | 51 | ||
52 | static inline void cpuid_serialization(void) |
52 | static inline void cpuid_serialization(void) |
53 | { |
53 | { |
54 | #ifndef __IN_SHARED_LIBC__ |
54 | #ifndef __IN_SHARED_LIBC__ |
55 | asm volatile ( |
55 | asm volatile ( |
Line 68... | Line 68... | ||
68 | ); |
68 | ); |
69 | #endif |
69 | #endif |
70 | } |
70 | } |
71 | 71 | ||
72 | #if defined(CONFIG_FENCES_P4) |
72 | #if defined(CONFIG_FENCES_P4) |
73 | # define memory_barrier() asm volatile ("mfence\n" ::: "memory") |
73 | #define memory_barrier() asm volatile ("mfence\n" ::: "memory") |
74 | # define read_barrier() asm volatile ("lfence\n" ::: "memory") |
74 | #define read_barrier() asm volatile ("lfence\n" ::: "memory") |
75 | # ifdef CONFIG_WEAK_MEMORY |
75 | #ifdef CONFIG_WEAK_MEMORY |
76 | # define write_barrier() asm volatile ("sfence\n" ::: "memory") |
76 | #define write_barrier() asm volatile ("sfence\n" ::: "memory") |
77 | # else |
77 | #else |
78 | # define write_barrier() asm volatile( "" ::: "memory"); |
78 | #define write_barrier() asm volatile ("" ::: "memory"); |
79 | # endif |
79 | #endif |
80 | #elif defined(CONFIG_FENCES_P3) |
80 | #elif defined(CONFIG_FENCES_P3) |
81 | # define memory_barrier() cpuid_serialization() |
81 | #define memory_barrier() cpuid_serialization() |
82 | # define read_barrier() cpuid_serialization() |
82 | #define read_barrier() cpuid_serialization() |
83 | # ifdef CONFIG_WEAK_MEMORY |
83 | #ifdef CONFIG_WEAK_MEMORY |
84 | # define write_barrier() asm volatile ("sfence\n" ::: "memory") |
84 | #define write_barrier() asm volatile ("sfence\n" ::: "memory") |
85 | # else |
85 | #else |
86 | # define write_barrier() asm volatile( "" ::: "memory"); |
86 | #define write_barrier() asm volatile ("" ::: "memory"); |
87 | # endif |
87 | #endif |
88 | #else |
88 | #else |
89 | # define memory_barrier() cpuid_serialization() |
89 | #define memory_barrier() cpuid_serialization() |
90 | # define read_barrier() cpuid_serialization() |
90 | #define read_barrier() cpuid_serialization() |
91 | # ifdef CONFIG_WEAK_MEMORY |
91 | #ifdef CONFIG_WEAK_MEMORY |
92 | # define write_barrier() cpuid_serialization() |
92 | #define write_barrier() cpuid_serialization() |
93 | # else |
93 | #else |
94 | # define write_barrier() asm volatile( "" ::: "memory"); |
94 | #define write_barrier() asm volatile ("" ::: "memory"); |
95 | # endif |
95 | #endif |
96 | #endif |
96 | #endif |
97 | 97 | ||
98 | /* |
98 | /* |
99 | * On ia32, the hardware takes care about instruction and data cache coherence, |
99 | * On ia32, the hardware takes care about instruction and data cache coherence, |
100 | * even on SMP systems. We issue a write barrier to be sure that writes |
100 | * even on SMP systems. We issue a write barrier to be sure that writes |
101 | * queueing in the store buffer drain to the memory (even though it would be |
101 | * queueing in the store buffer drain to the memory (even though it would be |
102 | * sufficient for them to drain to the D-cache). |
102 | * sufficient for them to drain to the D-cache). |
103 | */ |
103 | */ |
104 | #define smc_coherence(a) write_barrier() |
104 | #define smc_coherence(a) write_barrier() |
105 | #define smc_coherence_block(a, l) write_barrier() |
105 | #define smc_coherence_block(a, l) write_barrier() |
106 | 106 | ||
107 | #endif |
107 | #endif |
108 | 108 | ||
109 | /** @} |
109 | /** @} |
110 | */ |
110 | */ |