Subversion Repositories HelenOS-historic

Rev

Rev 883 | Rev 1708 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 883 Rev 1702
1
/*
1
/*
2
 * Copyright (C) 2005 Jakub Jermar
2
 * Copyright (C) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
-
 
29
 /** @addtogroup sparc64   
-
 
30
 * @{
-
 
31
 */
-
 
32
/** @file
-
 
33
 */
-
 
34
 
29
#ifndef __sparc64_ASM_H__
35
#ifndef __sparc64_ASM_H__
30
#define __sparc64_ASM_H__
36
#define __sparc64_ASM_H__
31
 
37
 
32
#include <typedefs.h>
38
#include <typedefs.h>
33
#include <arch/types.h>
39
#include <arch/types.h>
34
#include <arch/register.h>
40
#include <arch/register.h>
35
#include <config.h>
41
#include <config.h>
36
 
42
 
37
/** Read Processor State register.
43
/** Read Processor State register.
38
 *
44
 *
39
 * @return Value of PSTATE register.
45
 * @return Value of PSTATE register.
40
 */
46
 */
41
static inline __u64 pstate_read(void)
47
static inline __u64 pstate_read(void)
42
{
48
{
43
    __u64 v;
49
    __u64 v;
44
   
50
   
45
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
51
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
46
   
52
   
47
    return v;
53
    return v;
48
}
54
}
49
 
55
 
50
/** Write Processor State register.
56
/** Write Processor State register.
51
 *
57
 *
52
 * @param New value of PSTATE register.
58
 * @param New value of PSTATE register.
53
 */
59
 */
54
static inline void pstate_write(__u64 v)
60
static inline void pstate_write(__u64 v)
55
{
61
{
56
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
62
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
57
}
63
}
58
 
64
 
59
/** Read TICK_compare Register.
65
/** Read TICK_compare Register.
60
 *
66
 *
61
 * @return Value of TICK_comapre register.
67
 * @return Value of TICK_comapre register.
62
 */
68
 */
63
static inline __u64 tick_compare_read(void)
69
static inline __u64 tick_compare_read(void)
64
{
70
{
65
    __u64 v;
71
    __u64 v;
66
   
72
   
67
    __asm__ volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
73
    __asm__ volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
68
   
74
   
69
    return v;
75
    return v;
70
}
76
}
71
 
77
 
72
/** Write TICK_compare Register.
78
/** Write TICK_compare Register.
73
 *
79
 *
74
 * @param New value of TICK_comapre register.
80
 * @param New value of TICK_comapre register.
75
 */
81
 */
76
static inline void tick_compare_write(__u64 v)
82
static inline void tick_compare_write(__u64 v)
77
{
83
{
78
    __asm__ volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
84
    __asm__ volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
79
}
85
}
80
 
86
 
81
/** Read TICK Register.
87
/** Read TICK Register.
82
 *
88
 *
83
 * @return Value of TICK register.
89
 * @return Value of TICK register.
84
 */
90
 */
85
static inline __u64 tick_read(void)
91
static inline __u64 tick_read(void)
86
{
92
{
87
    __u64 v;
93
    __u64 v;
88
   
94
   
89
    __asm__ volatile ("rdpr %%tick, %0\n" : "=r" (v));
95
    __asm__ volatile ("rdpr %%tick, %0\n" : "=r" (v));
90
   
96
   
91
    return v;
97
    return v;
92
}
98
}
93
 
99
 
94
/** Write TICK Register.
100
/** Write TICK Register.
95
 *
101
 *
96
 * @param New value of TICK register.
102
 * @param New value of TICK register.
97
 */
103
 */
98
static inline void tick_write(__u64 v)
104
static inline void tick_write(__u64 v)
99
{
105
{
100
    __asm__ volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));
106
    __asm__ volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));
101
}
107
}
102
 
108
 
103
/** Read SOFTINT Register.
109
/** Read SOFTINT Register.
104
 *
110
 *
105
 * @return Value of SOFTINT register.
111
 * @return Value of SOFTINT register.
106
 */
112
 */
107
static inline __u64 softint_read(void)
113
static inline __u64 softint_read(void)
108
{
114
{
109
    __u64 v;
115
    __u64 v;
110
 
116
 
111
    __asm__ volatile ("rd %%softint, %0\n" : "=r" (v));
117
    __asm__ volatile ("rd %%softint, %0\n" : "=r" (v));
112
 
118
 
113
    return v;
119
    return v;
114
}
120
}
115
 
121
 
116
/** Write SOFTINT Register.
122
/** Write SOFTINT Register.
117
 *
123
 *
118
 * @param New value of SOFTINT register.
124
 * @param New value of SOFTINT register.
119
 */
125
 */
120
static inline void softint_write(__u64 v)
126
static inline void softint_write(__u64 v)
121
{
127
{
122
    __asm__ volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
128
    __asm__ volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
123
}
129
}
124
 
130
 
125
/** Write CLEAR_SOFTINT Register.
131
/** Write CLEAR_SOFTINT Register.
126
 *
132
 *
127
 * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
133
 * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
128
 *
134
 *
129
 * @param New value of CLEAR_SOFTINT register.
135
 * @param New value of CLEAR_SOFTINT register.
130
 */
136
 */
131
static inline void clear_softint_write(__u64 v)
137
static inline void clear_softint_write(__u64 v)
132
{
138
{
133
    __asm__ volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
139
    __asm__ volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
134
}
140
}
135
 
141
 
136
/** Enable interrupts.
142
/** Enable interrupts.
137
 *
143
 *
138
 * Enable interrupts and return previous
144
 * Enable interrupts and return previous
139
 * value of IPL.
145
 * value of IPL.
140
 *
146
 *
141
 * @return Old interrupt priority level.
147
 * @return Old interrupt priority level.
142
 */
148
 */
143
static inline ipl_t interrupts_enable(void) {
149
static inline ipl_t interrupts_enable(void) {
144
    pstate_reg_t pstate;
150
    pstate_reg_t pstate;
145
    __u64 value;
151
    __u64 value;
146
   
152
   
147
    value = pstate_read();
153
    value = pstate_read();
148
    pstate.value = value;
154
    pstate.value = value;
149
    pstate.ie = true;
155
    pstate.ie = true;
150
    pstate_write(pstate.value);
156
    pstate_write(pstate.value);
151
   
157
   
152
    return (ipl_t) value;
158
    return (ipl_t) value;
153
}
159
}
154
 
160
 
155
/** Disable interrupts.
161
/** Disable interrupts.
156
 *
162
 *
157
 * Disable interrupts and return previous
163
 * Disable interrupts and return previous
158
 * value of IPL.
164
 * value of IPL.
159
 *
165
 *
160
 * @return Old interrupt priority level.
166
 * @return Old interrupt priority level.
161
 */
167
 */
162
static inline ipl_t interrupts_disable(void) {
168
static inline ipl_t interrupts_disable(void) {
163
    pstate_reg_t pstate;
169
    pstate_reg_t pstate;
164
    __u64 value;
170
    __u64 value;
165
   
171
   
166
    value = pstate_read();
172
    value = pstate_read();
167
    pstate.value = value;
173
    pstate.value = value;
168
    pstate.ie = false;
174
    pstate.ie = false;
169
    pstate_write(pstate.value);
175
    pstate_write(pstate.value);
170
   
176
   
171
    return (ipl_t) value;
177
    return (ipl_t) value;
172
}
178
}
173
 
179
 
174
/** Restore interrupt priority level.
180
/** Restore interrupt priority level.
175
 *
181
 *
176
 * Restore IPL.
182
 * Restore IPL.
177
 *
183
 *
178
 * @param ipl Saved interrupt priority level.
184
 * @param ipl Saved interrupt priority level.
179
 */
185
 */
180
static inline void interrupts_restore(ipl_t ipl) {
186
static inline void interrupts_restore(ipl_t ipl) {
181
    pstate_reg_t pstate;
187
    pstate_reg_t pstate;
182
   
188
   
183
    pstate.value = pstate_read();
189
    pstate.value = pstate_read();
184
    pstate.ie = ((pstate_reg_t) ipl).ie;
190
    pstate.ie = ((pstate_reg_t) ipl).ie;
185
    pstate_write(pstate.value);
191
    pstate_write(pstate.value);
186
}
192
}
187
 
193
 
188
/** Return interrupt priority level.
194
/** Return interrupt priority level.
189
 *
195
 *
190
 * Return IPL.
196
 * Return IPL.
191
 *
197
 *
192
 * @return Current interrupt priority level.
198
 * @return Current interrupt priority level.
193
 */
199
 */
194
static inline ipl_t interrupts_read(void) {
200
static inline ipl_t interrupts_read(void) {
195
    return (ipl_t) pstate_read();
201
    return (ipl_t) pstate_read();
196
}
202
}
197
 
203
 
198
/** Return base address of current stack.
204
/** Return base address of current stack.
199
 *
205
 *
200
 * Return the base address of the current stack.
206
 * Return the base address of the current stack.
201
 * The stack is assumed to be STACK_SIZE bytes long.
207
 * The stack is assumed to be STACK_SIZE bytes long.
202
 * The stack must start on page boundary.
208
 * The stack must start on page boundary.
203
 */
209
 */
204
static inline __address get_stack_base(void)
210
static inline __address get_stack_base(void)
205
{
211
{
206
    __address v;
212
    __address v;
207
   
213
   
208
    __asm__ volatile ("and %%sp, %1, %0\n" : "=r" (v) : "r" (~(STACK_SIZE-1)));
214
    __asm__ volatile ("and %%sp, %1, %0\n" : "=r" (v) : "r" (~(STACK_SIZE-1)));
209
   
215
   
210
    return v;
216
    return v;
211
}
217
}
212
 
218
 
213
/** Read Version Register.
219
/** Read Version Register.
214
 *
220
 *
215
 * @return Value of VER register.
221
 * @return Value of VER register.
216
 */
222
 */
217
static inline __u64 ver_read(void)
223
static inline __u64 ver_read(void)
218
{
224
{
219
    __u64 v;
225
    __u64 v;
220
   
226
   
221
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
227
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
222
   
228
   
223
    return v;
229
    return v;
224
}
230
}
225
 
231
 
226
/** Read Trap Base Address register.
232
/** Read Trap Base Address register.
227
 *
233
 *
228
 * @return Current value in TBA.
234
 * @return Current value in TBA.
229
 */
235
 */
230
static inline __u64 tba_read(void)
236
static inline __u64 tba_read(void)
231
{
237
{
232
    __u64 v;
238
    __u64 v;
233
   
239
   
234
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
240
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
235
   
241
   
236
    return v;
242
    return v;
237
}
243
}
238
 
244
 
239
/** Read Trap Program Counter register.
245
/** Read Trap Program Counter register.
240
 *
246
 *
241
 * @return Current value in TPC.
247
 * @return Current value in TPC.
242
 */
248
 */
243
static inline __u64 tpc_read(void)
249
static inline __u64 tpc_read(void)
244
{
250
{
245
    __u64 v;
251
    __u64 v;
246
   
252
   
247
    __asm__ volatile ("rdpr %%tpc, %0\n" : "=r" (v));
253
    __asm__ volatile ("rdpr %%tpc, %0\n" : "=r" (v));
248
   
254
   
249
    return v;
255
    return v;
250
}
256
}
251
 
257
 
252
/** Read Trap Level register.
258
/** Read Trap Level register.
253
 *
259
 *
254
 * @return Current value in TL.
260
 * @return Current value in TL.
255
 */
261
 */
256
static inline __u64 tl_read(void)
262
static inline __u64 tl_read(void)
257
{
263
{
258
    __u64 v;
264
    __u64 v;
259
   
265
   
260
    __asm__ volatile ("rdpr %%tl, %0\n" : "=r" (v));
266
    __asm__ volatile ("rdpr %%tl, %0\n" : "=r" (v));
261
   
267
   
262
    return v;
268
    return v;
263
}
269
}
264
 
270
 
265
/** Write Trap Base Address register.
271
/** Write Trap Base Address register.
266
 *
272
 *
267
 * @param New value of TBA.
273
 * @param New value of TBA.
268
 */
274
 */
269
static inline void tba_write(__u64 v)
275
static inline void tba_write(__u64 v)
270
{
276
{
271
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
277
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
272
}
278
}
273
 
279
 
274
/** Load __u64 from alternate space.
280
/** Load __u64 from alternate space.
275
 *
281
 *
276
 * @param asi ASI determining the alternate space.
282
 * @param asi ASI determining the alternate space.
277
 * @param va Virtual address within the ASI.
283
 * @param va Virtual address within the ASI.
278
 *
284
 *
279
 * @return Value read from the virtual address in the specified address space.
285
 * @return Value read from the virtual address in the specified address space.
280
 */
286
 */
281
static inline __u64 asi_u64_read(asi_t asi, __address va)
287
static inline __u64 asi_u64_read(asi_t asi, __address va)
282
{
288
{
283
    __u64 v;
289
    __u64 v;
284
   
290
   
285
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" (asi));
291
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" (asi));
286
   
292
   
287
    return v;
293
    return v;
288
}
294
}
289
 
295
 
290
/** Store __u64 to alternate space.
296
/** Store __u64 to alternate space.
291
 *
297
 *
292
 * @param asi ASI determining the alternate space.
298
 * @param asi ASI determining the alternate space.
293
 * @param va Virtual address within the ASI.
299
 * @param va Virtual address within the ASI.
294
 * @param v Value to be written.
300
 * @param v Value to be written.
295
 */
301
 */
296
static inline void asi_u64_write(asi_t asi, __address va, __u64 v)
302
static inline void asi_u64_write(asi_t asi, __address va, __u64 v)
297
{
303
{
298
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" (asi) : "memory");
304
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" (asi) : "memory");
299
}
305
}
300
 
306
 
301
 
307
 
302
 
308
 
303
void cpu_halt(void);
309
void cpu_halt(void);
304
void cpu_sleep(void);
310
void cpu_sleep(void);
305
void asm_delay_loop(__u32 t);
311
void asm_delay_loop(__u32 t);
306
 
312
 
307
#endif
313
#endif
-
 
314
 
-
 
315
 /** @}
-
 
316
 */
-
 
317
 
308
 
318