Subversion Repositories HelenOS-historic

Rev

Rev 1780 | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1780 Rev 1784
1
/*
1
/*
2
 * Copyright (C) 2005 Jakub Jermar
2
 * Copyright (C) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
 /** @addtogroup sparc64   
29
/** @addtogroup sparc64
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef __sparc64_ASM_H__
35
#ifndef KERN_sparc64_ASM_H_
36
#define __sparc64_ASM_H__
36
#define KERN_sparc64_ASM_H_
37
 
37
 
38
#include <typedefs.h>
38
#include <typedefs.h>
39
#include <arch/types.h>
39
#include <arch/types.h>
40
#include <arch/register.h>
40
#include <arch/register.h>
41
#include <config.h>
41
#include <config.h>
42
 
42
 
43
/** Read Processor State register.
43
/** Read Processor State register.
44
 *
44
 *
45
 * @return Value of PSTATE register.
45
 * @return Value of PSTATE register.
46
 */
46
 */
47
static inline uint64_t pstate_read(void)
47
static inline uint64_t pstate_read(void)
48
{
48
{
49
    uint64_t v;
49
    uint64_t v;
50
   
50
   
51
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
51
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
52
   
52
   
53
    return v;
53
    return v;
54
}
54
}
55
 
55
 
56
/** Write Processor State register.
56
/** Write Processor State register.
57
 *
57
 *
58
 * @param v New value of PSTATE register.
58
 * @param v New value of PSTATE register.
59
 */
59
 */
60
static inline void pstate_write(uint64_t v)
60
static inline void pstate_write(uint64_t v)
61
{
61
{
62
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
62
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
63
}
63
}
64
 
64
 
65
/** Read TICK_compare Register.
65
/** Read TICK_compare Register.
66
 *
66
 *
67
 * @return Value of TICK_comapre register.
67
 * @return Value of TICK_comapre register.
68
 */
68
 */
69
static inline uint64_t tick_compare_read(void)
69
static inline uint64_t tick_compare_read(void)
70
{
70
{
71
    uint64_t v;
71
    uint64_t v;
72
   
72
   
73
    __asm__ volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
73
    __asm__ volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
74
   
74
   
75
    return v;
75
    return v;
76
}
76
}
77
 
77
 
78
/** Write TICK_compare Register.
78
/** Write TICK_compare Register.
79
 *
79
 *
80
 * @param v New value of TICK_comapre register.
80
 * @param v New value of TICK_comapre register.
81
 */
81
 */
82
static inline void tick_compare_write(uint64_t v)
82
static inline void tick_compare_write(uint64_t v)
83
{
83
{
84
    __asm__ volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
84
    __asm__ volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
85
}
85
}
86
 
86
 
87
/** Read TICK Register.
87
/** Read TICK Register.
88
 *
88
 *
89
 * @return Value of TICK register.
89
 * @return Value of TICK register.
90
 */
90
 */
91
static inline uint64_t tick_read(void)
91
static inline uint64_t tick_read(void)
92
{
92
{
93
    uint64_t v;
93
    uint64_t v;
94
   
94
   
95
    __asm__ volatile ("rdpr %%tick, %0\n" : "=r" (v));
95
    __asm__ volatile ("rdpr %%tick, %0\n" : "=r" (v));
96
   
96
   
97
    return v;
97
    return v;
98
}
98
}
99
 
99
 
100
/** Write TICK Register.
100
/** Write TICK Register.
101
 *
101
 *
102
 * @param v New value of TICK register.
102
 * @param v New value of TICK register.
103
 */
103
 */
104
static inline void tick_write(uint64_t v)
104
static inline void tick_write(uint64_t v)
105
{
105
{
106
    __asm__ volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));
106
    __asm__ volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));
107
}
107
}
108
 
108
 
109
/** Read SOFTINT Register.
109
/** Read SOFTINT Register.
110
 *
110
 *
111
 * @return Value of SOFTINT register.
111
 * @return Value of SOFTINT register.
112
 */
112
 */
113
static inline uint64_t softint_read(void)
113
static inline uint64_t softint_read(void)
114
{
114
{
115
    uint64_t v;
115
    uint64_t v;
116
 
116
 
117
    __asm__ volatile ("rd %%softint, %0\n" : "=r" (v));
117
    __asm__ volatile ("rd %%softint, %0\n" : "=r" (v));
118
 
118
 
119
    return v;
119
    return v;
120
}
120
}
121
 
121
 
122
/** Write SOFTINT Register.
122
/** Write SOFTINT Register.
123
 *
123
 *
124
 * @param v New value of SOFTINT register.
124
 * @param v New value of SOFTINT register.
125
 */
125
 */
126
static inline void softint_write(uint64_t v)
126
static inline void softint_write(uint64_t v)
127
{
127
{
128
    __asm__ volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
128
    __asm__ volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
129
}
129
}
130
 
130
 
131
/** Write CLEAR_SOFTINT Register.
131
/** Write CLEAR_SOFTINT Register.
132
 *
132
 *
133
 * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
133
 * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
134
 *
134
 *
135
 * @param v New value of CLEAR_SOFTINT register.
135
 * @param v New value of CLEAR_SOFTINT register.
136
 */
136
 */
137
static inline void clear_softint_write(uint64_t v)
137
static inline void clear_softint_write(uint64_t v)
138
{
138
{
139
    __asm__ volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
139
    __asm__ volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
140
}
140
}
141
 
141
 
142
/** Enable interrupts.
142
/** Enable interrupts.
143
 *
143
 *
144
 * Enable interrupts and return previous
144
 * Enable interrupts and return previous
145
 * value of IPL.
145
 * value of IPL.
146
 *
146
 *
147
 * @return Old interrupt priority level.
147
 * @return Old interrupt priority level.
148
 */
148
 */
149
static inline ipl_t interrupts_enable(void) {
149
static inline ipl_t interrupts_enable(void) {
150
    pstate_reg_t pstate;
150
    pstate_reg_t pstate;
151
    uint64_t value;
151
    uint64_t value;
152
   
152
   
153
    value = pstate_read();
153
    value = pstate_read();
154
    pstate.value = value;
154
    pstate.value = value;
155
    pstate.ie = true;
155
    pstate.ie = true;
156
    pstate_write(pstate.value);
156
    pstate_write(pstate.value);
157
   
157
   
158
    return (ipl_t) value;
158
    return (ipl_t) value;
159
}
159
}
160
 
160
 
161
/** Disable interrupts.
161
/** Disable interrupts.
162
 *
162
 *
163
 * Disable interrupts and return previous
163
 * Disable interrupts and return previous
164
 * value of IPL.
164
 * value of IPL.
165
 *
165
 *
166
 * @return Old interrupt priority level.
166
 * @return Old interrupt priority level.
167
 */
167
 */
168
static inline ipl_t interrupts_disable(void) {
168
static inline ipl_t interrupts_disable(void) {
169
    pstate_reg_t pstate;
169
    pstate_reg_t pstate;
170
    uint64_t value;
170
    uint64_t value;
171
   
171
   
172
    value = pstate_read();
172
    value = pstate_read();
173
    pstate.value = value;
173
    pstate.value = value;
174
    pstate.ie = false;
174
    pstate.ie = false;
175
    pstate_write(pstate.value);
175
    pstate_write(pstate.value);
176
   
176
   
177
    return (ipl_t) value;
177
    return (ipl_t) value;
178
}
178
}
179
 
179
 
180
/** Restore interrupt priority level.
180
/** Restore interrupt priority level.
181
 *
181
 *
182
 * Restore IPL.
182
 * Restore IPL.
183
 *
183
 *
184
 * @param ipl Saved interrupt priority level.
184
 * @param ipl Saved interrupt priority level.
185
 */
185
 */
186
static inline void interrupts_restore(ipl_t ipl) {
186
static inline void interrupts_restore(ipl_t ipl) {
187
    pstate_reg_t pstate;
187
    pstate_reg_t pstate;
188
   
188
   
189
    pstate.value = pstate_read();
189
    pstate.value = pstate_read();
190
    pstate.ie = ((pstate_reg_t) ipl).ie;
190
    pstate.ie = ((pstate_reg_t) ipl).ie;
191
    pstate_write(pstate.value);
191
    pstate_write(pstate.value);
192
}
192
}
193
 
193
 
194
/** Return interrupt priority level.
194
/** Return interrupt priority level.
195
 *
195
 *
196
 * Return IPL.
196
 * Return IPL.
197
 *
197
 *
198
 * @return Current interrupt priority level.
198
 * @return Current interrupt priority level.
199
 */
199
 */
200
static inline ipl_t interrupts_read(void) {
200
static inline ipl_t interrupts_read(void) {
201
    return (ipl_t) pstate_read();
201
    return (ipl_t) pstate_read();
202
}
202
}
203
 
203
 
204
/** Return base address of current stack.
204
/** Return base address of current stack.
205
 *
205
 *
206
 * Return the base address of the current stack.
206
 * Return the base address of the current stack.
207
 * The stack is assumed to be STACK_SIZE bytes long.
207
 * The stack is assumed to be STACK_SIZE bytes long.
208
 * The stack must start on page boundary.
208
 * The stack must start on page boundary.
209
 */
209
 */
210
static inline uintptr_t get_stack_base(void)
210
static inline uintptr_t get_stack_base(void)
211
{
211
{
212
    uintptr_t v;
212
    uintptr_t v;
213
   
213
   
214
    __asm__ volatile ("and %%sp, %1, %0\n" : "=r" (v) : "r" (~(STACK_SIZE-1)));
214
    __asm__ volatile ("and %%sp, %1, %0\n" : "=r" (v) : "r" (~(STACK_SIZE-1)));
215
   
215
   
216
    return v;
216
    return v;
217
}
217
}
218
 
218
 
219
/** Read Version Register.
219
/** Read Version Register.
220
 *
220
 *
221
 * @return Value of VER register.
221
 * @return Value of VER register.
222
 */
222
 */
223
static inline uint64_t ver_read(void)
223
static inline uint64_t ver_read(void)
224
{
224
{
225
    uint64_t v;
225
    uint64_t v;
226
   
226
   
227
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
227
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
228
   
228
   
229
    return v;
229
    return v;
230
}
230
}
231
 
231
 
232
/** Read Trap Base Address register.
232
/** Read Trap Base Address register.
233
 *
233
 *
234
 * @return Current value in TBA.
234
 * @return Current value in TBA.
235
 */
235
 */
236
static inline uint64_t tba_read(void)
236
static inline uint64_t tba_read(void)
237
{
237
{
238
    uint64_t v;
238
    uint64_t v;
239
   
239
   
240
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
240
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
241
   
241
   
242
    return v;
242
    return v;
243
}
243
}
244
 
244
 
245
/** Read Trap Program Counter register.
245
/** Read Trap Program Counter register.
246
 *
246
 *
247
 * @return Current value in TPC.
247
 * @return Current value in TPC.
248
 */
248
 */
249
static inline uint64_t tpc_read(void)
249
static inline uint64_t tpc_read(void)
250
{
250
{
251
    uint64_t v;
251
    uint64_t v;
252
   
252
   
253
    __asm__ volatile ("rdpr %%tpc, %0\n" : "=r" (v));
253
    __asm__ volatile ("rdpr %%tpc, %0\n" : "=r" (v));
254
   
254
   
255
    return v;
255
    return v;
256
}
256
}
257
 
257
 
258
/** Read Trap Level register.
258
/** Read Trap Level register.
259
 *
259
 *
260
 * @return Current value in TL.
260
 * @return Current value in TL.
261
 */
261
 */
262
static inline uint64_t tl_read(void)
262
static inline uint64_t tl_read(void)
263
{
263
{
264
    uint64_t v;
264
    uint64_t v;
265
   
265
   
266
    __asm__ volatile ("rdpr %%tl, %0\n" : "=r" (v));
266
    __asm__ volatile ("rdpr %%tl, %0\n" : "=r" (v));
267
   
267
   
268
    return v;
268
    return v;
269
}
269
}
270
 
270
 
271
/** Write Trap Base Address register.
271
/** Write Trap Base Address register.
272
 *
272
 *
273
 * @param v New value of TBA.
273
 * @param v New value of TBA.
274
 */
274
 */
275
static inline void tba_write(uint64_t v)
275
static inline void tba_write(uint64_t v)
276
{
276
{
277
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
277
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
278
}
278
}
279
 
279
 
280
/** Load uint64_t from alternate space.
280
/** Load uint64_t from alternate space.
281
 *
281
 *
282
 * @param asi ASI determining the alternate space.
282
 * @param asi ASI determining the alternate space.
283
 * @param va Virtual address within the ASI.
283
 * @param va Virtual address within the ASI.
284
 *
284
 *
285
 * @return Value read from the virtual address in the specified address space.
285
 * @return Value read from the virtual address in the specified address space.
286
 */
286
 */
287
static inline uint64_t asi_u64_read(asi_t asi, uintptr_t va)
287
static inline uint64_t asi_u64_read(asi_t asi, uintptr_t va)
288
{
288
{
289
    uint64_t v;
289
    uint64_t v;
290
   
290
   
291
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" (asi));
291
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" (asi));
292
   
292
   
293
    return v;
293
    return v;
294
}
294
}
295
 
295
 
296
/** Store uint64_t to alternate space.
296
/** Store uint64_t to alternate space.
297
 *
297
 *
298
 * @param asi ASI determining the alternate space.
298
 * @param asi ASI determining the alternate space.
299
 * @param va Virtual address within the ASI.
299
 * @param va Virtual address within the ASI.
300
 * @param v Value to be written.
300
 * @param v Value to be written.
301
 */
301
 */
302
static inline void asi_u64_write(asi_t asi, uintptr_t va, uint64_t v)
302
static inline void asi_u64_write(asi_t asi, uintptr_t va, uint64_t v)
303
{
303
{
304
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" (asi) : "memory");
304
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" (asi) : "memory");
305
}
305
}
306
 
306
 
307
 
-
 
308
 
-
 
309
void cpu_halt(void);
307
void cpu_halt(void);
310
void cpu_sleep(void);
308
void cpu_sleep(void);
311
void asm_delay_loop(uint32_t t);
309
void asm_delay_loop(uint32_t t);
312
 
310
 
313
#endif
311
#endif
314
 
312
 
315
 /** @}
313
/** @}
316
 */
314
 */
317
 
-
 
318
 
315