Subversion Repositories HelenOS

Rev

Rev 3903 | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 3903 Rev 4016
Line 24... Line 24...
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup ia32   
29
/** @addtogroup ia32
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
Line 44... Line 44...
44
 
44
 
45
/*
45
/*
46
 * Provisions are made to prevent compiler from reordering instructions itself.
46
 * Provisions are made to prevent compiler from reordering instructions itself.
47
 */
47
 */
48
 
48
 
49
#define CS_ENTER_BARRIER()  asm volatile ("" ::: "memory")
49
#define CS_ENTER_BARRIER()  asm volatile ("" ::: "memory")
50
#define CS_LEAVE_BARRIER()  asm volatile ("" ::: "memory")
50
#define CS_LEAVE_BARRIER()  asm volatile ("" ::: "memory")
51
 
51
 
52
static inline void cpuid_serialization(void)
52
static inline void cpuid_serialization(void)
53
{
53
{
54
    asm volatile (
54
    asm volatile (
55
        "xorl %%eax, %%eax\n"
55
        "xorl %%eax, %%eax\n"
Line 57... Line 57...
57
        ::: "eax", "ebx", "ecx", "edx", "memory"
57
        ::: "eax", "ebx", "ecx", "edx", "memory"
58
    );
58
    );
59
}
59
}
60
 
60
 
61
#if defined(CONFIG_FENCES_P4)
61
#if defined(CONFIG_FENCES_P4)
62
#   define memory_barrier()     asm volatile ("mfence\n" ::: "memory")
62
    #define memory_barrier()  asm volatile ("mfence\n" ::: "memory")
63
#   define read_barrier()       asm volatile ("lfence\n" ::: "memory")
63
    #define read_barrier()    asm volatile ("lfence\n" ::: "memory")
64
#   ifdef CONFIG_WEAK_MEMORY
64
    #ifdef CONFIG_WEAK_MEMORY
65
#       define write_barrier()  asm volatile ("sfence\n" ::: "memory")
65
        #define write_barrier()  asm volatile ("sfence\n" ::: "memory")
66
#   else
66
    #else
67
#       define write_barrier()  asm volatile( "" ::: "memory");
67
        #define write_barrier()  asm volatile ("" ::: "memory");
68
#   endif
68
    #endif
69
#elif defined(CONFIG_FENCES_P3)
69
#elif defined(CONFIG_FENCES_P3)
70
#   define memory_barrier()     cpuid_serialization()
70
    #define memory_barrier()  cpuid_serialization()
71
#   define read_barrier()       cpuid_serialization()
71
    #define read_barrier()    cpuid_serialization()
72
#   ifdef CONFIG_WEAK_MEMORY
72
    #ifdef CONFIG_WEAK_MEMORY
73
#       define write_barrier()  asm volatile ("sfence\n" ::: "memory")
73
        #define write_barrier()  asm volatile ("sfence\n" ::: "memory")
74
#   else
74
    #else
75
#       define write_barrier()  asm volatile( "" ::: "memory");
75
        #define write_barrier()  asm volatile ("" ::: "memory");
76
#   endif
76
    #endif
77
#else
77
#else
78
#   define memory_barrier()     cpuid_serialization()
78
    #define memory_barrier()  cpuid_serialization()
79
#   define read_barrier()       cpuid_serialization()
79
    #define read_barrier()    cpuid_serialization()
80
#   ifdef CONFIG_WEAK_MEMORY
80
    #ifdef CONFIG_WEAK_MEMORY
81
#       define write_barrier()  cpuid_serialization()
81
        #define write_barrier()  cpuid_serialization()
82
#   else
82
    #else
83
#       define write_barrier()  asm volatile( "" ::: "memory");
83
        #define write_barrier()  asm volatile ("" ::: "memory");
84
#   endif
84
    #endif
85
#endif
85
#endif
86
 
86
 
87
/*
87
/*
88
 * On ia32, the hardware takes care about instruction and data cache coherence,
88
 * On ia32, the hardware takes care about instruction and data cache coherence,
89
 * even on SMP systems.  We issue a write barrier to be sure that writes
89
 * even on SMP systems.  We issue a write barrier to be sure that writes
90
 * queueing in the store buffer drain to the memory (even though it would be
90
 * queueing in the store buffer drain to the memory (even though it would be
91
 * sufficient for them to drain to the D-cache).
91
 * sufficient for them to drain to the D-cache).
92
 */
92
 */
93
#define smc_coherence(a)        write_barrier()
93
#define smc_coherence(a)           write_barrier()
94
#define smc_coherence_block(a, l)   write_barrier()
94
#define smc_coherence_block(a, l)  write_barrier()
95
 
95
 
96
#endif
96
#endif
97
 
97
 
98
/** @}
98
/** @}
99
 */
99
 */