Subversion Repositories HelenOS

Rev

Rev 2071 | Rev 2272 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 2071 Rev 2082
Line 37... Line 37...
37
 
37
 
38
/*
38
/*
39
 * Our critical section barriers are prepared for the weakest RMO memory model.
39
 * Our critical section barriers are prepared for the weakest RMO memory model.
40
 */
40
 */
41
#define CS_ENTER_BARRIER()              \
41
#define CS_ENTER_BARRIER()              \
42
    __asm__ volatile (              \
42
    asm volatile (              \
43
        "membar #LoadLoad | #LoadStore\n"   \
43
        "membar #LoadLoad | #LoadStore\n"   \
44
        ::: "memory"                \
44
        ::: "memory"                \
45
    )
45
    )
46
#define CS_LEAVE_BARRIER()              \
46
#define CS_LEAVE_BARRIER()              \
47
    __asm__ volatile (              \
47
    asm volatile (              \
48
        "membar #StoreStore\n"          \
48
        "membar #StoreStore\n"          \
49
        "membar #LoadStore\n"           \
49
        "membar #LoadStore\n"           \
50
        ::: "memory"                \
50
        ::: "memory"                \
51
    )
51
    )
52
 
52
 
53
#define memory_barrier()    \
53
#define memory_barrier()    \
54
    __asm__ volatile ("membar #LoadLoad | #StoreStore\n" ::: "memory")
54
    asm volatile ("membar #LoadLoad | #StoreStore\n" ::: "memory")
55
#define read_barrier()      \
55
#define read_barrier()      \
56
    __asm__ volatile ("membar #LoadLoad\n" ::: "memory")
56
    asm volatile ("membar #LoadLoad\n" ::: "memory")
57
#define write_barrier()     \
57
#define write_barrier()     \
58
    __asm__ volatile ("membar #StoreStore\n" ::: "memory")
58
    asm volatile ("membar #StoreStore\n" ::: "memory")
59
 
59
 
60
/** Flush Instruction Memory instruction. */
60
/** Flush Instruction Memory instruction. */
61
static inline void flush(void)
61
static inline void flush(void)
62
{
62
{
63
    /*
63
    /*
Line 68... Line 68...
68
     * DTLB entries. Therefore, when this function is called,
68
     * DTLB entries. Therefore, when this function is called,
69
     * the %o7 register will always be in the range mapped by
69
     * the %o7 register will always be in the range mapped by
70
     * DTLB.
70
     * DTLB.
71
     */
71
     */
72
     
72
     
73
        __asm__ volatile ("flush %o7\n");
73
        asm volatile ("flush %o7\n");
74
}
74
}
75
 
75
 
76
/** Memory Barrier instruction. */
76
/** Memory Barrier instruction. */
77
static inline void membar(void)
77
static inline void membar(void)
78
{
78
{
79
    __asm__ volatile ("membar #Sync\n");
79
    asm volatile ("membar #Sync\n");
80
}
80
}
81
 
81
 
82
#endif
82
#endif
83
 
83
 
84
/** @}
84
/** @}