Subversion Repositories HelenOS-historic

Rev

Rev 1702 | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1702 Rev 1780
1
/*
1
/*
2
 * Copyright (C) 2005 Martin Decky
2
 * Copyright (C) 2005 Martin Decky
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
 /** @addtogroup ppc64 
29
 /** @addtogroup ppc64 
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef __ppc64_ASM_H__
35
#ifndef __ppc64_ASM_H__
36
#define __ppc64_ASM_H__
36
#define __ppc64_ASM_H__
37
 
37
 
38
#include <arch/types.h>
38
#include <arch/types.h>
39
#include <config.h>
39
#include <config.h>
40
 
40
 
41
/** Enable interrupts.
41
/** Enable interrupts.
42
 *
42
 *
43
 * Enable interrupts and return previous
43
 * Enable interrupts and return previous
44
 * value of EE.
44
 * value of EE.
45
 *
45
 *
46
 * @return Old interrupt priority level.
46
 * @return Old interrupt priority level.
47
 */
47
 */
48
static inline ipl_t interrupts_enable(void)
48
static inline ipl_t interrupts_enable(void)
49
{
49
{
50
    ipl_t v;
50
    ipl_t v;
51
    ipl_t tmp;
51
    ipl_t tmp;
52
   
52
   
53
    asm volatile (
53
    asm volatile (
54
        "mfmsr %0\n"
54
        "mfmsr %0\n"
55
        "mfmsr %1\n"
55
        "mfmsr %1\n"
56
        "ori %1, %1, 1 << 15\n"
56
        "ori %1, %1, 1 << 15\n"
57
        "mtmsr %1\n"
57
        "mtmsr %1\n"
58
        : "=r" (v), "=r" (tmp)
58
        : "=r" (v), "=r" (tmp)
59
    );
59
    );
60
    return v;
60
    return v;
61
}
61
}
62
 
62
 
63
/** Disable interrupts.
63
/** Disable interrupts.
64
 *
64
 *
65
 * Disable interrupts and return previous
65
 * Disable interrupts and return previous
66
 * value of EE.
66
 * value of EE.
67
 *
67
 *
68
 * @return Old interrupt priority level.
68
 * @return Old interrupt priority level.
69
 */
69
 */
70
static inline ipl_t interrupts_disable(void)
70
static inline ipl_t interrupts_disable(void)
71
{
71
{
72
    ipl_t v;
72
    ipl_t v;
73
    ipl_t tmp;
73
    ipl_t tmp;
74
   
74
   
75
    asm volatile (
75
    asm volatile (
76
        "mfmsr %0\n"
76
        "mfmsr %0\n"
77
        "mfmsr %1\n"
77
        "mfmsr %1\n"
78
        "rlwinm %1, %1, 0, 17, 15\n"
78
        "rlwinm %1, %1, 0, 17, 15\n"
79
        "mtmsr %1\n"
79
        "mtmsr %1\n"
80
        : "=r" (v), "=r" (tmp)
80
        : "=r" (v), "=r" (tmp)
81
    );
81
    );
82
    return v;
82
    return v;
83
}
83
}
84
 
84
 
85
/** Restore interrupt priority level.
85
/** Restore interrupt priority level.
86
 *
86
 *
87
 * Restore EE.
87
 * Restore EE.
88
 *
88
 *
89
 * @param ipl Saved interrupt priority level.
89
 * @param ipl Saved interrupt priority level.
90
 */
90
 */
91
static inline void interrupts_restore(ipl_t ipl)
91
static inline void interrupts_restore(ipl_t ipl)
92
{
92
{
93
    ipl_t tmp;
93
    ipl_t tmp;
94
   
94
   
95
    asm volatile (
95
    asm volatile (
96
        "mfmsr %1\n"
96
        "mfmsr %1\n"
97
        "rlwimi  %0, %1, 0, 17, 15\n"
97
        "rlwimi  %0, %1, 0, 17, 15\n"
98
        "cmpw 0, %0, %1\n"
98
        "cmpw 0, %0, %1\n"
99
        "beq 0f\n"
99
        "beq 0f\n"
100
        "mtmsr %0\n"
100
        "mtmsr %0\n"
101
        "0:\n"
101
        "0:\n"
102
        : "=r" (ipl), "=r" (tmp)
102
        : "=r" (ipl), "=r" (tmp)
103
        : "0" (ipl)
103
        : "0" (ipl)
104
        : "cr0"
104
        : "cr0"
105
    );
105
    );
106
}
106
}
107
 
107
 
108
/** Return interrupt priority level.
108
/** Return interrupt priority level.
109
 *
109
 *
110
 * Return EE.
110
 * Return EE.
111
 *
111
 *
112
 * @return Current interrupt priority level.
112
 * @return Current interrupt priority level.
113
 */
113
 */
114
static inline ipl_t interrupts_read(void)
114
static inline ipl_t interrupts_read(void)
115
{
115
{
116
    ipl_t v;
116
    ipl_t v;
117
   
117
   
118
    asm volatile (
118
    asm volatile (
119
        "mfmsr %0\n"
119
        "mfmsr %0\n"
120
        : "=r" (v)
120
        : "=r" (v)
121
    );
121
    );
122
    return v;
122
    return v;
123
}
123
}
124
 
124
 
125
/** Return base address of current stack.
125
/** Return base address of current stack.
126
 *
126
 *
127
 * Return the base address of the current stack.
127
 * Return the base address of the current stack.
128
 * The stack is assumed to be STACK_SIZE bytes long.
128
 * The stack is assumed to be STACK_SIZE bytes long.
129
 * The stack must start on page boundary.
129
 * The stack must start on page boundary.
130
 */
130
 */
131
static inline __address get_stack_base(void)
131
static inline uintptr_t get_stack_base(void)
132
{
132
{
133
    __address v;
133
    uintptr_t v;
134
   
134
   
135
    asm volatile (
135
    asm volatile (
136
        "and %0, %%sp, %1\n"
136
        "and %0, %%sp, %1\n"
137
        : "=r" (v)
137
        : "=r" (v)
138
        : "r" (~(STACK_SIZE - 1))
138
        : "r" (~(STACK_SIZE - 1))
139
    );
139
    );
140
    return v;
140
    return v;
141
}
141
}
142
 
142
 
143
static inline void cpu_sleep(void)
143
static inline void cpu_sleep(void)
144
{
144
{
145
}
145
}
146
 
146
 
147
static inline void cpu_halt(void)
147
static inline void cpu_halt(void)
148
{
148
{
149
    asm volatile (
149
    asm volatile (
150
        "b 0\n"
150
        "b 0\n"
151
    );
151
    );
152
}
152
}
153
 
153
 
154
void asm_delay_loop(__u32 t);
154
void asm_delay_loop(uint32_t t);
155
 
155
 
156
extern void userspace_asm(__address uspace_uarg, __address stack, __address entry);
156
extern void userspace_asm(uintptr_t uspace_uarg, uintptr_t stack, uintptr_t entry);
157
 
157
 
158
#endif
158
#endif
159
 
159
 
160
 /** @}
160
 /** @}
161
 */
161
 */
162
 
162
 
163
 
163