Subversion Repositories HelenOS-historic

Rev

Rev 640 | Rev 658 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 640 Rev 650
1
/*
1
/*
2
 * Copyright (C) 2005 Jakub Jermar
2
 * Copyright (C) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
#ifndef __sparc64_ASM_H__
29
#ifndef __sparc64_ASM_H__
30
#define __sparc64_ASM_H__
30
#define __sparc64_ASM_H__
31
 
31
 
-
 
32
#include <typedefs.h>
32
#include <arch/types.h>
33
#include <arch/types.h>
-
 
34
#include <arch/register.h>
33
#include <config.h>
35
#include <config.h>
34
 
36
 
-
 
37
/** Read Processor State register.
-
 
38
 *
-
 
39
 * @return Value of PSTATE register.
-
 
40
 */
-
 
41
static inline __u64 pstate_read(void)
-
 
42
{
-
 
43
    __u64 v;
-
 
44
   
-
 
45
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
-
 
46
   
-
 
47
    return v;
-
 
48
}
-
 
49
 
-
 
50
/** Write Processor State register.
-
 
51
 *
-
 
52
 * @param New value of PSTATE register.
-
 
53
 */
-
 
54
static inline void pstate_write(__u64 v)
-
 
55
{
-
 
56
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
-
 
57
}
-
 
58
 
-
 
59
 
35
/** Enable interrupts.
60
/** Enable interrupts.
36
 *
61
 *
37
 * Enable interrupts and return previous
62
 * Enable interrupts and return previous
38
 * value of IPL.
63
 * value of IPL.
39
 *
64
 *
40
 * @return Old interrupt priority level.
65
 * @return Old interrupt priority level.
41
 */
66
 */
42
static inline ipl_t interrupts_enable(void) {
67
static inline ipl_t interrupts_enable(void) {
-
 
68
    pstate_reg_t pstate;
-
 
69
    __u64 value;
-
 
70
   
-
 
71
    value = pstate_read();
-
 
72
    pstate.value = value;
-
 
73
    pstate.ie = true;
-
 
74
    pstate_write(pstate.value);
-
 
75
   
-
 
76
    return (ipl_t) value;
43
}
77
}
44
 
78
 
45
/** Disable interrupts.
79
/** Disable interrupts.
46
 *
80
 *
47
 * Disable interrupts and return previous
81
 * Disable interrupts and return previous
48
 * value of IPL.
82
 * value of IPL.
49
 *
83
 *
50
 * @return Old interrupt priority level.
84
 * @return Old interrupt priority level.
51
 */
85
 */
52
static inline ipl_t interrupts_disable(void) {
86
static inline ipl_t interrupts_disable(void) {
-
 
87
    pstate_reg_t pstate;
-
 
88
    __u64 value;
-
 
89
   
-
 
90
    value = pstate_read();
-
 
91
    pstate.value = value;
-
 
92
    pstate.ie = false;
-
 
93
    pstate_write(pstate.value);
-
 
94
   
-
 
95
    return (ipl_t) value;
53
}
96
}
54
 
97
 
55
/** Restore interrupt priority level.
98
/** Restore interrupt priority level.
56
 *
99
 *
57
 * Restore IPL.
100
 * Restore IPL.
58
 *
101
 *
59
 * @param ipl Saved interrupt priority level.
102
 * @param ipl Saved interrupt priority level.
60
 */
103
 */
61
static inline void interrupts_restore(ipl_t ipl) {
104
static inline void interrupts_restore(ipl_t ipl) {
-
 
105
    pstate_reg_t pstate;
-
 
106
   
-
 
107
    pstate.value = pstate_read();
-
 
108
    pstate.ie = ((pstate_reg_t) ipl).ie;
-
 
109
    pstate_write(pstate.value);
62
}
110
}
63
 
111
 
64
/** Return interrupt priority level.
112
/** Return interrupt priority level.
65
 *
113
 *
66
 * Return IPL.
114
 * Return IPL.
67
 *
115
 *
68
 * @return Current interrupt priority level.
116
 * @return Current interrupt priority level.
69
 */
117
 */
70
static inline ipl_t interrupts_read(void) {
118
static inline ipl_t interrupts_read(void) {
-
 
119
    return (ipl_t) pstate_read();
71
}
120
}
72
 
121
 
73
/** Return base address of current stack.
122
/** Return base address of current stack.
74
 *
123
 *
75
 * Return the base address of the current stack.
124
 * Return the base address of the current stack.
76
 * The stack is assumed to be STACK_SIZE bytes long.
125
 * The stack is assumed to be STACK_SIZE bytes long.
77
 * The stack must start on page boundary.
126
 * The stack must start on page boundary.
78
 */
127
 */
79
static inline __address get_stack_base(void)
128
static inline __address get_stack_base(void)
80
{
129
{
81
    __address v;
130
    __address v;
82
   
131
   
83
    __asm__ volatile ("and %%o6, %1, %0\n" : "=r" (v) : "r" (~(STACK_SIZE-1)));
132
    __asm__ volatile ("and %%sp, %1, %0\n" : "=r" (v) : "r" (~(STACK_SIZE-1)));
84
   
133
   
85
    return v;
134
    return v;
86
}
135
}
87
 
136
 
88
/** Read Version Register.
137
/** Read Version Register.
89
 *
138
 *
90
 * @return Value of VER register.
139
 * @return Value of VER register.
91
 */
140
 */
92
static inline __u64 ver_read(void)
141
static inline __u64 ver_read(void)
93
{
142
{
94
    __u64 v;
143
    __u64 v;
95
   
144
   
96
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
145
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
97
   
146
   
98
    return v;
147
    return v;
99
}
148
}
100
 
149
 
101
/** Read Trap Base Address register.
150
/** Read Trap Base Address register.
102
 *
151
 *
103
 * @return Current value in TBA.
152
 * @return Current value in TBA.
104
 */
153
 */
105
static inline __u64 tba_read(void)
154
static inline __u64 tba_read(void)
106
{
155
{
107
    __u64 v;
156
    __u64 v;
108
   
157
   
109
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
158
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
110
   
159
   
111
    return v;
160
    return v;
112
}
161
}
113
 
162
 
114
/** Write Trap Base Address register.
163
/** Write Trap Base Address register.
115
 *
164
 *
116
 * @param New value of TBA.
165
 * @param New value of TBA.
117
 */
166
 */
118
static inline void tba_write(__u64 v)
167
static inline void tba_write(__u64 v)
119
{
168
{
120
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
169
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
121
}
170
}
122
 
171
 
123
/** Load __u64 from alternate space.
172
/** Load __u64 from alternate space.
124
 *
173
 *
125
 * @param asi ASI determining the alternate space.
174
 * @param asi ASI determining the alternate space.
126
 * @param va Virtual address within the ASI.
175
 * @param va Virtual address within the ASI.
127
 *
176
 *
128
 * @return Value read from the virtual address in the specified address space.
177
 * @return Value read from the virtual address in the specified address space.
129
 */
178
 */
130
static inline __u64 asi_u64_read(asi_t asi, __address va)
179
static inline __u64 asi_u64_read(asi_t asi, __address va)
131
{
180
{
132
    __u64 v;
181
    __u64 v;
133
   
182
   
134
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" (asi));
183
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" (asi));
135
   
184
   
136
    return v;
185
    return v;
137
}
186
}
138
 
187
 
139
/** Store __u64 to alternate space.
188
/** Store __u64 to alternate space.
140
 *
189
 *
141
 * @param asi ASI determining the alternate space.
190
 * @param asi ASI determining the alternate space.
142
 * @param va Virtual address within the ASI.
191
 * @param va Virtual address within the ASI.
143
 * @param v Value to be written.
192
 * @param v Value to be written.
144
 */
193
 */
145
static inline void asi_u64_write(asi_t asi, __address va, __u64 v)
194
static inline void asi_u64_write(asi_t asi, __address va, __u64 v)
146
{
195
{
147
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" (asi) : "memory");
196
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" (asi) : "memory");
148
}
197
}
149
 
198
 
150
void cpu_halt(void);
199
void cpu_halt(void);
151
void cpu_sleep(void);
200
void cpu_sleep(void);
152
void asm_delay_loop(__u32 t);
201
void asm_delay_loop(__u32 t);
153
 
202
 
154
#endif
203
#endif
155
 
204