Subversion Repositories HelenOS-historic

Rev

Rev 873 | Rev 1702 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 873 Rev 883
1
/*
1
/*
2
 * Copyright (C) 2005 Jakub Jermar
2
 * Copyright (C) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
#ifndef __sparc64_ASM_H__
29
#ifndef __sparc64_ASM_H__
30
#define __sparc64_ASM_H__
30
#define __sparc64_ASM_H__
31
 
31
 
32
#include <typedefs.h>
32
#include <typedefs.h>
33
#include <arch/types.h>
33
#include <arch/types.h>
34
#include <arch/register.h>
34
#include <arch/register.h>
35
#include <config.h>
35
#include <config.h>
36
 
36
 
37
/** Read Processor State register.
37
/** Read Processor State register.
38
 *
38
 *
39
 * @return Value of PSTATE register.
39
 * @return Value of PSTATE register.
40
 */
40
 */
41
static inline __u64 pstate_read(void)
41
static inline __u64 pstate_read(void)
42
{
42
{
43
    __u64 v;
43
    __u64 v;
44
   
44
   
45
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
45
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
46
   
46
   
47
    return v;
47
    return v;
48
}
48
}
49
 
49
 
50
/** Write Processor State register.
50
/** Write Processor State register.
51
 *
51
 *
52
 * @param New value of PSTATE register.
52
 * @param New value of PSTATE register.
53
 */
53
 */
54
static inline void pstate_write(__u64 v)
54
static inline void pstate_write(__u64 v)
55
{
55
{
56
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
56
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
57
}
57
}
58
 
58
 
59
/** Read TICK_compare Register.
59
/** Read TICK_compare Register.
60
 *
60
 *
61
 * @return Value of TICK_comapre register.
61
 * @return Value of TICK_comapre register.
62
 */
62
 */
63
static inline __u64 tick_compare_read(void)
63
static inline __u64 tick_compare_read(void)
64
{
64
{
65
    __u64 v;
65
    __u64 v;
66
   
66
   
67
    __asm__ volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
67
    __asm__ volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
68
   
68
   
69
    return v;
69
    return v;
70
}
70
}
71
 
71
 
72
/** Write TICK_compare Register.
72
/** Write TICK_compare Register.
73
 *
73
 *
74
 * @param New value of TICK_comapre register.
74
 * @param New value of TICK_comapre register.
75
 */
75
 */
76
static inline void tick_compare_write(__u64 v)
76
static inline void tick_compare_write(__u64 v)
77
{
77
{
78
    __asm__ volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
78
    __asm__ volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
79
}
79
}
80
 
80
 
81
/** Read TICK Register.
81
/** Read TICK Register.
82
 *
82
 *
83
 * @return Value of TICK register.
83
 * @return Value of TICK register.
84
 */
84
 */
85
static inline __u64 tick_read(void)
85
static inline __u64 tick_read(void)
86
{
86
{
87
    __u64 v;
87
    __u64 v;
88
   
88
   
89
    __asm__ volatile ("rdpr %%tick, %0\n" : "=r" (v));
89
    __asm__ volatile ("rdpr %%tick, %0\n" : "=r" (v));
90
   
90
   
91
    return v;
91
    return v;
92
}
92
}
93
 
93
 
94
/** Write TICK Register.
94
/** Write TICK Register.
95
 *
95
 *
96
 * @param New value of TICK register.
96
 * @param New value of TICK register.
97
 */
97
 */
98
static inline void tick_write(__u64 v)
98
static inline void tick_write(__u64 v)
99
{
99
{
100
    __asm__ volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));
100
    __asm__ volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));
101
}
101
}
102
 
102
 
103
/** Read SOFTINT Register.
103
/** Read SOFTINT Register.
104
 *
104
 *
105
 * @return Value of SOFTINT register.
105
 * @return Value of SOFTINT register.
106
 */
106
 */
107
static inline __u64 softint_read(void)
107
static inline __u64 softint_read(void)
108
{
108
{
109
    __u64 v;
109
    __u64 v;
110
 
110
 
111
    __asm__ volatile ("rd %%softint, %0\n" : "=r" (v));
111
    __asm__ volatile ("rd %%softint, %0\n" : "=r" (v));
112
 
112
 
113
    return v;
113
    return v;
114
}
114
}
115
 
115
 
116
/** Write SOFTINT Register.
116
/** Write SOFTINT Register.
117
 *
117
 *
118
 * @param New value of SOFTINT register.
118
 * @param New value of SOFTINT register.
119
 */
119
 */
120
static inline void softint_write(__u64 v)
120
static inline void softint_write(__u64 v)
121
{
121
{
122
    __asm__ volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
122
    __asm__ volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
123
}
123
}
124
 
124
 
125
/** Write CLEAR_SOFTINT Register.
125
/** Write CLEAR_SOFTINT Register.
126
 *
126
 *
127
 * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
127
 * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
128
 *
128
 *
129
 * @param New value of CLEAR_SOFTINT register.
129
 * @param New value of CLEAR_SOFTINT register.
130
 */
130
 */
131
static inline void clear_softint_write(__u64 v)
131
static inline void clear_softint_write(__u64 v)
132
{
132
{
133
    __asm__ volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
133
    __asm__ volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
134
}
134
}
135
 
135
 
136
/** Enable interrupts.
136
/** Enable interrupts.
137
 *
137
 *
138
 * Enable interrupts and return previous
138
 * Enable interrupts and return previous
139
 * value of IPL.
139
 * value of IPL.
140
 *
140
 *
141
 * @return Old interrupt priority level.
141
 * @return Old interrupt priority level.
142
 */
142
 */
143
static inline ipl_t interrupts_enable(void) {
143
static inline ipl_t interrupts_enable(void) {
144
    pstate_reg_t pstate;
144
    pstate_reg_t pstate;
145
    __u64 value;
145
    __u64 value;
146
   
146
   
147
    value = pstate_read();
147
    value = pstate_read();
148
    pstate.value = value;
148
    pstate.value = value;
149
    pstate.ie = true;
149
    pstate.ie = true;
150
    pstate_write(pstate.value);
150
    pstate_write(pstate.value);
151
   
151
   
152
    return (ipl_t) value;
152
    return (ipl_t) value;
153
}
153
}
154
 
154
 
155
/** Disable interrupts.
155
/** Disable interrupts.
156
 *
156
 *
157
 * Disable interrupts and return previous
157
 * Disable interrupts and return previous
158
 * value of IPL.
158
 * value of IPL.
159
 *
159
 *
160
 * @return Old interrupt priority level.
160
 * @return Old interrupt priority level.
161
 */
161
 */
162
static inline ipl_t interrupts_disable(void) {
162
static inline ipl_t interrupts_disable(void) {
163
    pstate_reg_t pstate;
163
    pstate_reg_t pstate;
164
    __u64 value;
164
    __u64 value;
165
   
165
   
166
    value = pstate_read();
166
    value = pstate_read();
167
    pstate.value = value;
167
    pstate.value = value;
168
    pstate.ie = false;
168
    pstate.ie = false;
169
    pstate_write(pstate.value);
169
    pstate_write(pstate.value);
170
   
170
   
171
    return (ipl_t) value;
171
    return (ipl_t) value;
172
}
172
}
173
 
173
 
174
/** Restore interrupt priority level.
174
/** Restore interrupt priority level.
175
 *
175
 *
176
 * Restore IPL.
176
 * Restore IPL.
177
 *
177
 *
178
 * @param ipl Saved interrupt priority level.
178
 * @param ipl Saved interrupt priority level.
179
 */
179
 */
180
static inline void interrupts_restore(ipl_t ipl) {
180
static inline void interrupts_restore(ipl_t ipl) {
181
    pstate_reg_t pstate;
181
    pstate_reg_t pstate;
182
   
182
   
183
    pstate.value = pstate_read();
183
    pstate.value = pstate_read();
184
    pstate.ie = ((pstate_reg_t) ipl).ie;
184
    pstate.ie = ((pstate_reg_t) ipl).ie;
185
    pstate_write(pstate.value);
185
    pstate_write(pstate.value);
186
}
186
}
187
 
187
 
188
/** Return interrupt priority level.
188
/** Return interrupt priority level.
189
 *
189
 *
190
 * Return IPL.
190
 * Return IPL.
191
 *
191
 *
192
 * @return Current interrupt priority level.
192
 * @return Current interrupt priority level.
193
 */
193
 */
194
static inline ipl_t interrupts_read(void) {
194
static inline ipl_t interrupts_read(void) {
195
    return (ipl_t) pstate_read();
195
    return (ipl_t) pstate_read();
196
}
196
}
197
 
197
 
198
/** Return base address of current stack.
198
/** Return base address of current stack.
199
 *
199
 *
200
 * Return the base address of the current stack.
200
 * Return the base address of the current stack.
201
 * The stack is assumed to be STACK_SIZE bytes long.
201
 * The stack is assumed to be STACK_SIZE bytes long.
202
 * The stack must start on page boundary.
202
 * The stack must start on page boundary.
203
 */
203
 */
204
static inline __address get_stack_base(void)
204
static inline __address get_stack_base(void)
205
{
205
{
206
    __address v;
206
    __address v;
207
   
207
   
208
    __asm__ volatile ("and %%sp, %1, %0\n" : "=r" (v) : "r" (~(STACK_SIZE-1)));
208
    __asm__ volatile ("and %%sp, %1, %0\n" : "=r" (v) : "r" (~(STACK_SIZE-1)));
209
   
209
   
210
    return v;
210
    return v;
211
}
211
}
212
 
212
 
213
/** Read Version Register.
213
/** Read Version Register.
214
 *
214
 *
215
 * @return Value of VER register.
215
 * @return Value of VER register.
216
 */
216
 */
217
static inline __u64 ver_read(void)
217
static inline __u64 ver_read(void)
218
{
218
{
219
    __u64 v;
219
    __u64 v;
220
   
220
   
221
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
221
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
222
   
222
   
223
    return v;
223
    return v;
224
}
224
}
225
 
225
 
226
/** Read Trap Base Address register.
226
/** Read Trap Base Address register.
227
 *
227
 *
228
 * @return Current value in TBA.
228
 * @return Current value in TBA.
229
 */
229
 */
230
static inline __u64 tba_read(void)
230
static inline __u64 tba_read(void)
231
{
231
{
232
    __u64 v;
232
    __u64 v;
233
   
233
   
234
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
234
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
235
   
235
   
236
    return v;
236
    return v;
237
}
237
}
238
 
238
 
239
/** Read Trap Program Counter register.
239
/** Read Trap Program Counter register.
240
 *
240
 *
241
 * @return Current value in TPC.
241
 * @return Current value in TPC.
242
 */
242
 */
243
static inline __u64 tpc_read(void)
243
static inline __u64 tpc_read(void)
244
{
244
{
245
    __u64 v;
245
    __u64 v;
246
   
246
   
247
    __asm__ volatile ("rdpr %%tpc, %0\n" : "=r" (v));
247
    __asm__ volatile ("rdpr %%tpc, %0\n" : "=r" (v));
248
   
248
   
249
    return v;
249
    return v;
250
}
250
}
251
 
251
 
-
 
252
/** Read Trap Level register.
-
 
253
 *
-
 
254
 * @return Current value in TL.
-
 
255
 */
-
 
256
static inline __u64 tl_read(void)
-
 
257
{
-
 
258
    __u64 v;
-
 
259
   
-
 
260
    __asm__ volatile ("rdpr %%tl, %0\n" : "=r" (v));
-
 
261
   
-
 
262
    return v;
-
 
263
}
252
 
264
 
253
/** Write Trap Base Address register.
265
/** Write Trap Base Address register.
254
 *
266
 *
255
 * @param New value of TBA.
267
 * @param New value of TBA.
256
 */
268
 */
257
static inline void tba_write(__u64 v)
269
static inline void tba_write(__u64 v)
258
{
270
{
259
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
271
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
260
}
272
}
261
 
273
 
262
/** Load __u64 from alternate space.
274
/** Load __u64 from alternate space.
263
 *
275
 *
264
 * @param asi ASI determining the alternate space.
276
 * @param asi ASI determining the alternate space.
265
 * @param va Virtual address within the ASI.
277
 * @param va Virtual address within the ASI.
266
 *
278
 *
267
 * @return Value read from the virtual address in the specified address space.
279
 * @return Value read from the virtual address in the specified address space.
268
 */
280
 */
269
static inline __u64 asi_u64_read(asi_t asi, __address va)
281
static inline __u64 asi_u64_read(asi_t asi, __address va)
270
{
282
{
271
    __u64 v;
283
    __u64 v;
272
   
284
   
273
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" (asi));
285
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" (asi));
274
   
286
   
275
    return v;
287
    return v;
276
}
288
}
277
 
289
 
278
/** Store __u64 to alternate space.
290
/** Store __u64 to alternate space.
279
 *
291
 *
280
 * @param asi ASI determining the alternate space.
292
 * @param asi ASI determining the alternate space.
281
 * @param va Virtual address within the ASI.
293
 * @param va Virtual address within the ASI.
282
 * @param v Value to be written.
294
 * @param v Value to be written.
283
 */
295
 */
284
static inline void asi_u64_write(asi_t asi, __address va, __u64 v)
296
static inline void asi_u64_write(asi_t asi, __address va, __u64 v)
285
{
297
{
286
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" (asi) : "memory");
298
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" (asi) : "memory");
287
}
299
}
288
 
300
 
289
 
301
 
290
 
302
 
291
void cpu_halt(void);
303
void cpu_halt(void);
292
void cpu_sleep(void);
304
void cpu_sleep(void);
293
void asm_delay_loop(__u32 t);
305
void asm_delay_loop(__u32 t);
294
 
306
 
295
#endif
307
#endif
296
 
308