Subversion Repositories HelenOS

Rev

Rev 4343 | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 4343 Rev 4345
1
/*
1
/*
2
 * Copyright (c) 2005 Jakub Jermar
2
 * Copyright (c) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup ia32   
29
/** @addtogroup ia32
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef KERN_ia32_BARRIER_H_
35
#ifndef KERN_ia32_BARRIER_H_
36
#define KERN_ia32_BARRIER_H_
36
#define KERN_ia32_BARRIER_H_
37
 
37
 
38
/*
38
/*
39
 * NOTE:
39
 * NOTE:
40
 * No barriers for critical section (i.e. spinlock) on IA-32 are needed:
40
 * No barriers for critical section (i.e. spinlock) on IA-32 are needed:
41
 * - spinlock_lock() and spinlock_trylock() use serializing XCHG instruction
41
 * - spinlock_lock() and spinlock_trylock() use serializing XCHG instruction
42
 * - writes cannot pass reads on IA-32 => spinlock_unlock() needs no barriers
42
 * - writes cannot pass reads on IA-32 => spinlock_unlock() needs no barriers
43
 */
43
 */
44
 
44
 
45
/*
45
/*
46
 * Provisions are made to prevent compiler from reordering instructions itself.
46
 * Provisions are made to prevent compiler from reordering instructions itself.
47
 */
47
 */
48
 
48
 
49
#define CS_ENTER_BARRIER()  asm volatile ("" ::: "memory")
49
#define CS_ENTER_BARRIER()  asm volatile ("" ::: "memory")
50
#define CS_LEAVE_BARRIER()  asm volatile ("" ::: "memory")
50
#define CS_LEAVE_BARRIER()  asm volatile ("" ::: "memory")
51
 
51
 
52
static inline void cpuid_serialization(void)
52
static inline void cpuid_serialization(void)
53
{
53
{
54
#ifndef __IN_SHARED_LIBC__
54
#ifndef __IN_SHARED_LIBC__
55
    asm volatile (
55
    asm volatile (
56
        "xorl %%eax, %%eax\n"
56
        "xorl %%eax, %%eax\n"
57
        "cpuid\n"
57
        "cpuid\n"
58
        ::: "eax", "ebx", "ecx", "edx", "memory"
58
        ::: "eax", "ebx", "ecx", "edx", "memory"
59
    );
59
    );
60
#else
60
#else
61
    /* Must not clobber PIC register ebx */
61
    /* Must not clobber PIC register ebx */
62
    asm volatile (
62
    asm volatile (
63
        "movl %%ebx, %%esi\n"
63
        "movl %%ebx, %%esi\n"
64
        "xorl %%eax, %%eax\n"
64
        "xorl %%eax, %%eax\n"
65
        "cpuid\n"
65
        "cpuid\n"
66
        "movl %%esi, %%ebx\n"
66
        "movl %%esi, %%ebx\n"
67
        ::: "eax", "ecx", "edx", "esi", "memory"
67
        ::: "eax", "ecx", "edx", "esi", "memory"
68
    );
68
    );
69
#endif
69
#endif
70
}
70
}
71
 
71
 
72
#if defined(CONFIG_FENCES_P4)
72
#if defined(CONFIG_FENCES_P4)
73
#   define memory_barrier()     asm volatile ("mfence\n" ::: "memory")
73
    #define memory_barrier()  asm volatile ("mfence\n" ::: "memory")
74
#   define read_barrier()       asm volatile ("lfence\n" ::: "memory")
74
    #define read_barrier()    asm volatile ("lfence\n" ::: "memory")
75
#   ifdef CONFIG_WEAK_MEMORY
75
    #ifdef CONFIG_WEAK_MEMORY
76
#       define write_barrier()  asm volatile ("sfence\n" ::: "memory")
76
        #define write_barrier()  asm volatile ("sfence\n" ::: "memory")
77
#   else
77
    #else
78
#       define write_barrier()  asm volatile( "" ::: "memory");
78
        #define write_barrier()  asm volatile ("" ::: "memory");
79
#   endif
79
    #endif
80
#elif defined(CONFIG_FENCES_P3)
80
#elif defined(CONFIG_FENCES_P3)
81
#   define memory_barrier()     cpuid_serialization()
81
    #define memory_barrier()  cpuid_serialization()
82
#   define read_barrier()       cpuid_serialization()
82
    #define read_barrier()    cpuid_serialization()
83
#   ifdef CONFIG_WEAK_MEMORY
83
    #ifdef CONFIG_WEAK_MEMORY
84
#       define write_barrier()  asm volatile ("sfence\n" ::: "memory")
84
        #define write_barrier()  asm volatile ("sfence\n" ::: "memory")
85
#   else
85
    #else
86
#       define write_barrier()  asm volatile( "" ::: "memory");
86
        #define write_barrier()  asm volatile ("" ::: "memory");
87
#   endif
87
    #endif
88
#else
88
#else
89
#   define memory_barrier()     cpuid_serialization()
89
    #define memory_barrier()  cpuid_serialization()
90
#   define read_barrier()       cpuid_serialization()
90
    #define read_barrier()    cpuid_serialization()
91
#   ifdef CONFIG_WEAK_MEMORY
91
    #ifdef CONFIG_WEAK_MEMORY
92
#       define write_barrier()  cpuid_serialization()
92
        #define write_barrier()  cpuid_serialization()
93
#   else
93
    #else
94
#       define write_barrier()  asm volatile( "" ::: "memory");
94
        #define write_barrier()  asm volatile ("" ::: "memory");
95
#   endif
95
    #endif
96
#endif
96
#endif
97
 
97
 
98
/*
98
/*
99
 * On ia32, the hardware takes care about instruction and data cache coherence,
99
 * On ia32, the hardware takes care about instruction and data cache coherence,
100
 * even on SMP systems.  We issue a write barrier to be sure that writes
100
 * even on SMP systems.  We issue a write barrier to be sure that writes
101
 * queueing in the store buffer drain to the memory (even though it would be
101
 * queueing in the store buffer drain to the memory (even though it would be
102
 * sufficient for them to drain to the D-cache).
102
 * sufficient for them to drain to the D-cache).
103
 */
103
 */
104
#define smc_coherence(a)        write_barrier()
104
#define smc_coherence(a)           write_barrier()
105
#define smc_coherence_block(a, l)   write_barrier()
105
#define smc_coherence_block(a, l)  write_barrier()
106
 
106
 
107
#endif
107
#endif
108
 
108
 
109
/** @}
109
/** @}
110
 */
110
 */
111
 
111