Subversion Repositories HelenOS

Rev

Rev 3022 | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 3022 Rev 4055
1
/*
1
/*
2
 * Copyright (c) 2005 Jakub Jermar
2
 * Copyright (c) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup sparc64
29
/** @addtogroup sparc64
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef KERN_sparc64_BARRIER_H_
35
#ifndef KERN_sparc64_BARRIER_H_
36
#define KERN_sparc64_BARRIER_H_
36
#define KERN_sparc64_BARRIER_H_
37
 
37
 
38
/*
38
/*
39
 * Our critical section barriers are prepared for the weakest RMO memory model.
39
 * Our critical section barriers are prepared for the weakest RMO memory model.
40
 */
40
 */
41
#define CS_ENTER_BARRIER()              \
41
#define CS_ENTER_BARRIER()              \
42
    asm volatile (                  \
42
    asm volatile (                  \
43
        "membar #LoadLoad | #LoadStore\n"   \
43
        "membar #LoadLoad | #LoadStore\n"   \
44
        ::: "memory"                \
44
        ::: "memory"                \
45
    )
45
    )
46
#define CS_LEAVE_BARRIER()              \
46
#define CS_LEAVE_BARRIER()              \
47
    asm volatile (                  \
47
    asm volatile (                  \
48
        "membar #StoreStore\n"          \
48
        "membar #StoreStore\n"          \
49
        "membar #LoadStore\n"           \
49
        "membar #LoadStore\n"           \
50
        ::: "memory"                \
50
        ::: "memory"                \
51
    )
51
    )
52
 
52
 
53
#define memory_barrier()    \
53
#define memory_barrier()    \
54
    asm volatile ("membar #LoadLoad | #StoreStore\n" ::: "memory")
54
    asm volatile ("membar #LoadLoad | #StoreStore\n" ::: "memory")
55
#define read_barrier()      \
55
#define read_barrier()      \
56
    asm volatile ("membar #LoadLoad\n" ::: "memory")
56
    asm volatile ("membar #LoadLoad\n" ::: "memory")
57
#define write_barrier()     \
57
#define write_barrier()     \
58
    asm volatile ("membar #StoreStore\n" ::: "memory")
58
    asm volatile ("membar #StoreStore\n" ::: "memory")
59
 
59
 
-
 
60
#define flush(a)        \
-
 
61
    asm volatile ("flush %0\n" :: "r" ((a)) : "memory")
-
 
62
 
60
/** Flush Instruction Memory instruction. */
63
/** Flush Instruction pipeline. */
61
static inline void flush(void)
64
static inline void flush_pipeline(void)
62
{
65
{
63
    /*
66
    /*
64
     * The FLUSH instruction takes address parameter.
67
     * The FLUSH instruction takes address parameter.
65
     * As such, it may trap if the address is not found in DTLB.
68
     * As such, it may trap if the address is not found in DTLB.
66
     *
69
     *
67
     * The entire kernel text is mapped by a locked ITLB and
70
     * The entire kernel text is mapped by a locked ITLB and
68
     * DTLB entries. Therefore, when this function is called,
71
     * DTLB entries. Therefore, when this function is called,
69
     * the %o7 register will always be in the range mapped by
72
     * the %o7 register will always be in the range mapped by
70
     * DTLB.
73
     * DTLB.
71
     */
74
     */
72
     
75
     
73
        asm volatile ("flush %o7\n");
76
        asm volatile ("flush %o7\n");
74
}
77
}
75
 
78
 
76
/** Memory Barrier instruction. */
79
/** Memory Barrier instruction. */
77
static inline void membar(void)
80
static inline void membar(void)
78
{
81
{
79
    asm volatile ("membar #Sync\n");
82
    asm volatile ("membar #Sync\n");
80
}
83
}
81
 
84
 
-
 
85
#if defined (US)
-
 
86
 
-
 
87
#define smc_coherence(a)    \
-
 
88
{               \
-
 
89
    write_barrier();    \
-
 
90
    flush((a));     \
-
 
91
}
-
 
92
 
-
 
93
#define FLUSH_INVAL_MIN     4
-
 
94
#define smc_coherence_block(a, l)           \
-
 
95
{                           \
-
 
96
    unsigned long i;                \
-
 
97
    write_barrier();                \
-
 
98
    for (i = 0; i < (l); i += FLUSH_INVAL_MIN)  \
-
 
99
        flush((void *)(a) + i);         \
-
 
100
}
-
 
101
 
-
 
102
#elif defined (US3)
-
 
103
 
-
 
104
#define smc_coherence(a)    \
-
 
105
{               \
-
 
106
    write_barrier();    \
-
 
107
    flush_pipeline();   \
-
 
108
}
-
 
109
 
-
 
110
#define smc_coherence_block(a, l)   \
-
 
111
{                   \
-
 
112
    write_barrier();        \
-
 
113
    flush_pipeline();       \
-
 
114
}
-
 
115
 
-
 
116
#endif  /* defined(US3) */
-
 
117
 
82
#endif
118
#endif
83
 
119
 
84
/** @}
120
/** @}
85
 */
121
 */
86
 
122