Subversion Repositories HelenOS

Rev

Rev 1855 | Rev 1860 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1855 Rev 1856
1
/*
1
/*
2
 * Copyright (C) 2005 Jakub Jermar
2
 * Copyright (C) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup sparc64
29
/** @addtogroup sparc64
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef KERN_sparc64_ASM_H_
35
#ifndef KERN_sparc64_ASM_H_
36
#define KERN_sparc64_ASM_H_
36
#define KERN_sparc64_ASM_H_
37
 
37
 
38
#include <typedefs.h>
38
#include <typedefs.h>
39
#include <arch/types.h>
39
#include <arch/types.h>
40
#include <arch/register.h>
40
#include <arch/register.h>
41
#include <config.h>
41
#include <config.h>
42
 
42
 
43
/** Read Processor State register.
43
/** Read Processor State register.
44
 *
44
 *
45
 * @return Value of PSTATE register.
45
 * @return Value of PSTATE register.
46
 */
46
 */
47
static inline uint64_t pstate_read(void)
47
static inline uint64_t pstate_read(void)
48
{
48
{
49
    uint64_t v;
49
    uint64_t v;
50
   
50
   
51
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
51
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
52
   
52
   
53
    return v;
53
    return v;
54
}
54
}
55
 
55
 
56
/** Write Processor State register.
56
/** Write Processor State register.
57
 *
57
 *
58
 * @param v New value of PSTATE register.
58
 * @param v New value of PSTATE register.
59
 */
59
 */
60
static inline void pstate_write(uint64_t v)
60
static inline void pstate_write(uint64_t v)
61
{
61
{
62
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
62
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
63
}
63
}
64
 
64
 
65
/** Read TICK_compare Register.
65
/** Read TICK_compare Register.
66
 *
66
 *
67
 * @return Value of TICK_comapre register.
67
 * @return Value of TICK_comapre register.
68
 */
68
 */
69
static inline uint64_t tick_compare_read(void)
69
static inline uint64_t tick_compare_read(void)
70
{
70
{
71
    uint64_t v;
71
    uint64_t v;
72
   
72
   
73
    __asm__ volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
73
    __asm__ volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
74
   
74
   
75
    return v;
75
    return v;
76
}
76
}
77
 
77
 
78
/** Write TICK_compare Register.
78
/** Write TICK_compare Register.
79
 *
79
 *
80
 * @param v New value of TICK_comapre register.
80
 * @param v New value of TICK_comapre register.
81
 */
81
 */
82
static inline void tick_compare_write(uint64_t v)
82
static inline void tick_compare_write(uint64_t v)
83
{
83
{
84
    __asm__ volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
84
    __asm__ volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
85
}
85
}
86
 
86
 
87
/** Read TICK Register.
87
/** Read TICK Register.
88
 *
88
 *
89
 * @return Value of TICK register.
89
 * @return Value of TICK register.
90
 */
90
 */
91
static inline uint64_t tick_read(void)
91
static inline uint64_t tick_read(void)
92
{
92
{
93
    uint64_t v;
93
    uint64_t v;
94
   
94
   
95
    __asm__ volatile ("rdpr %%tick, %0\n" : "=r" (v));
95
    __asm__ volatile ("rdpr %%tick, %0\n" : "=r" (v));
96
   
96
   
97
    return v;
97
    return v;
98
}
98
}
99
 
99
 
100
/** Write TICK Register.
100
/** Write TICK Register.
101
 *
101
 *
102
 * @param v New value of TICK register.
102
 * @param v New value of TICK register.
103
 */
103
 */
104
static inline void tick_write(uint64_t v)
104
static inline void tick_write(uint64_t v)
105
{
105
{
106
    __asm__ volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));
106
    __asm__ volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));
107
}
107
}
108
 
108
 
109
/** Read SOFTINT Register.
109
/** Read SOFTINT Register.
110
 *
110
 *
111
 * @return Value of SOFTINT register.
111
 * @return Value of SOFTINT register.
112
 */
112
 */
113
static inline uint64_t softint_read(void)
113
static inline uint64_t softint_read(void)
114
{
114
{
115
    uint64_t v;
115
    uint64_t v;
116
 
116
 
117
    __asm__ volatile ("rd %%softint, %0\n" : "=r" (v));
117
    __asm__ volatile ("rd %%softint, %0\n" : "=r" (v));
118
 
118
 
119
    return v;
119
    return v;
120
}
120
}
121
 
121
 
122
/** Write SOFTINT Register.
122
/** Write SOFTINT Register.
123
 *
123
 *
124
 * @param v New value of SOFTINT register.
124
 * @param v New value of SOFTINT register.
125
 */
125
 */
126
static inline void softint_write(uint64_t v)
126
static inline void softint_write(uint64_t v)
127
{
127
{
128
    __asm__ volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
128
    __asm__ volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
129
}
129
}
130
 
130
 
131
/** Write CLEAR_SOFTINT Register.
131
/** Write CLEAR_SOFTINT Register.
132
 *
132
 *
133
 * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
133
 * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
134
 *
134
 *
135
 * @param v New value of CLEAR_SOFTINT register.
135
 * @param v New value of CLEAR_SOFTINT register.
136
 */
136
 */
137
static inline void clear_softint_write(uint64_t v)
137
static inline void clear_softint_write(uint64_t v)
138
{
138
{
139
    __asm__ volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
139
    __asm__ volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
140
}
140
}
141
 
141
 
142
/** Write SET_SOFTINT Register.
142
/** Write SET_SOFTINT Register.
143
 *
143
 *
144
 * Bits set in SET_SOFTINT register will be set in SOFTINT register.
144
 * Bits set in SET_SOFTINT register will be set in SOFTINT register.
145
 *
145
 *
146
 * @param v New value of SET_SOFTINT register.
146
 * @param v New value of SET_SOFTINT register.
147
 */
147
 */
148
static inline void set_softint_write(uint64_t v)
148
static inline void set_softint_write(uint64_t v)
149
{
149
{
150
    __asm__ volatile ("wr %0, %1, %%set_softint\n" : : "r" (v), "i" (0));
150
    __asm__ volatile ("wr %0, %1, %%set_softint\n" : : "r" (v), "i" (0));
151
}
151
}
152
 
152
 
153
/** Enable interrupts.
153
/** Enable interrupts.
154
 *
154
 *
155
 * Enable interrupts and return previous
155
 * Enable interrupts and return previous
156
 * value of IPL.
156
 * value of IPL.
157
 *
157
 *
158
 * @return Old interrupt priority level.
158
 * @return Old interrupt priority level.
159
 */
159
 */
160
static inline ipl_t interrupts_enable(void) {
160
static inline ipl_t interrupts_enable(void) {
161
    pstate_reg_t pstate;
161
    pstate_reg_t pstate;
162
    uint64_t value;
162
    uint64_t value;
163
   
163
   
164
    value = pstate_read();
164
    value = pstate_read();
165
    pstate.value = value;
165
    pstate.value = value;
166
    pstate.ie = true;
166
    pstate.ie = true;
167
    pstate_write(pstate.value);
167
    pstate_write(pstate.value);
168
   
168
   
169
    return (ipl_t) value;
169
    return (ipl_t) value;
170
}
170
}
171
 
171
 
172
/** Disable interrupts.
172
/** Disable interrupts.
173
 *
173
 *
174
 * Disable interrupts and return previous
174
 * Disable interrupts and return previous
175
 * value of IPL.
175
 * value of IPL.
176
 *
176
 *
177
 * @return Old interrupt priority level.
177
 * @return Old interrupt priority level.
178
 */
178
 */
179
static inline ipl_t interrupts_disable(void) {
179
static inline ipl_t interrupts_disable(void) {
180
    pstate_reg_t pstate;
180
    pstate_reg_t pstate;
181
    uint64_t value;
181
    uint64_t value;
182
   
182
   
183
    value = pstate_read();
183
    value = pstate_read();
184
    pstate.value = value;
184
    pstate.value = value;
185
    pstate.ie = false;
185
    pstate.ie = false;
186
    pstate_write(pstate.value);
186
    pstate_write(pstate.value);
187
   
187
   
188
    return (ipl_t) value;
188
    return (ipl_t) value;
189
}
189
}
190
 
190
 
191
/** Restore interrupt priority level.
191
/** Restore interrupt priority level.
192
 *
192
 *
193
 * Restore IPL.
193
 * Restore IPL.
194
 *
194
 *
195
 * @param ipl Saved interrupt priority level.
195
 * @param ipl Saved interrupt priority level.
196
 */
196
 */
197
static inline void interrupts_restore(ipl_t ipl) {
197
static inline void interrupts_restore(ipl_t ipl) {
198
    pstate_reg_t pstate;
198
    pstate_reg_t pstate;
199
   
199
   
200
    pstate.value = pstate_read();
200
    pstate.value = pstate_read();
201
    pstate.ie = ((pstate_reg_t) ipl).ie;
201
    pstate.ie = ((pstate_reg_t) ipl).ie;
202
    pstate_write(pstate.value);
202
    pstate_write(pstate.value);
203
}
203
}
204
 
204
 
205
/** Return interrupt priority level.
205
/** Return interrupt priority level.
206
 *
206
 *
207
 * Return IPL.
207
 * Return IPL.
208
 *
208
 *
209
 * @return Current interrupt priority level.
209
 * @return Current interrupt priority level.
210
 */
210
 */
211
static inline ipl_t interrupts_read(void) {
211
static inline ipl_t interrupts_read(void) {
212
    return (ipl_t) pstate_read();
212
    return (ipl_t) pstate_read();
213
}
213
}
214
 
214
 
215
/** Return base address of current stack.
215
/** Return base address of current stack.
216
 *
216
 *
217
 * Return the base address of the current stack.
217
 * Return the base address of the current stack.
218
 * The stack is assumed to be STACK_SIZE bytes long.
218
 * The stack is assumed to be STACK_SIZE bytes long.
219
 * The stack must start on page boundary.
219
 * The stack must start on page boundary.
220
 */
220
 */
221
static inline uintptr_t get_stack_base(void)
221
static inline uintptr_t get_stack_base(void)
222
{
222
{
223
    uintptr_t v;
223
    uintptr_t v;
224
   
224
   
225
    __asm__ volatile ("and %%sp, %1, %0\n" : "=r" (v) : "r" (~(STACK_SIZE-1)));
225
    __asm__ volatile ("and %%sp, %1, %0\n" : "=r" (v) : "r" (~(STACK_SIZE-1)));
226
   
226
   
227
    return v;
227
    return v;
228
}
228
}
229
 
229
 
230
/** Read Version Register.
230
/** Read Version Register.
231
 *
231
 *
232
 * @return Value of VER register.
232
 * @return Value of VER register.
233
 */
233
 */
234
static inline uint64_t ver_read(void)
234
static inline uint64_t ver_read(void)
235
{
235
{
236
    uint64_t v;
236
    uint64_t v;
237
   
237
   
238
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
238
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
239
   
239
   
240
    return v;
240
    return v;
241
}
241
}
242
 
242
 
243
/** Read Trap Base Address register.
243
/** Read Trap Base Address register.
244
 *
244
 *
245
 * @return Current value in TBA.
245
 * @return Current value in TBA.
246
 */
246
 */
247
static inline uint64_t tba_read(void)
247
static inline uint64_t tba_read(void)
248
{
248
{
249
    uint64_t v;
249
    uint64_t v;
250
   
250
   
251
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
251
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
252
   
252
   
253
    return v;
253
    return v;
254
}
254
}
255
 
255
 
256
/** Read Trap Program Counter register.
256
/** Read Trap Program Counter register.
257
 *
257
 *
258
 * @return Current value in TPC.
258
 * @return Current value in TPC.
259
 */
259
 */
260
static inline uint64_t tpc_read(void)
260
static inline uint64_t tpc_read(void)
261
{
261
{
262
    uint64_t v;
262
    uint64_t v;
263
   
263
   
264
    __asm__ volatile ("rdpr %%tpc, %0\n" : "=r" (v));
264
    __asm__ volatile ("rdpr %%tpc, %0\n" : "=r" (v));
265
   
265
   
266
    return v;
266
    return v;
267
}
267
}
268
 
268
 
269
/** Read Trap Level register.
269
/** Read Trap Level register.
270
 *
270
 *
271
 * @return Current value in TL.
271
 * @return Current value in TL.
272
 */
272
 */
273
static inline uint64_t tl_read(void)
273
static inline uint64_t tl_read(void)
274
{
274
{
275
    uint64_t v;
275
    uint64_t v;
276
   
276
   
277
    __asm__ volatile ("rdpr %%tl, %0\n" : "=r" (v));
277
    __asm__ volatile ("rdpr %%tl, %0\n" : "=r" (v));
278
   
278
   
279
    return v;
279
    return v;
280
}
280
}
281
 
281
 
282
/** Write Trap Base Address register.
282
/** Write Trap Base Address register.
283
 *
283
 *
284
 * @param v New value of TBA.
284
 * @param v New value of TBA.
285
 */
285
 */
286
static inline void tba_write(uint64_t v)
286
static inline void tba_write(uint64_t v)
287
{
287
{
288
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
288
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
289
}
289
}
290
 
290
 
291
/** Load uint64_t from alternate space.
291
/** Load uint64_t from alternate space.
292
 *
292
 *
293
 * @param asi ASI determining the alternate space.
293
 * @param asi ASI determining the alternate space.
294
 * @param va Virtual address within the ASI.
294
 * @param va Virtual address within the ASI.
295
 *
295
 *
296
 * @return Value read from the virtual address in the specified address space.
296
 * @return Value read from the virtual address in the specified address space.
297
 */
297
 */
298
static inline uint64_t asi_u64_read(asi_t asi, uintptr_t va)
298
static inline uint64_t asi_u64_read(asi_t asi, uintptr_t va)
299
{
299
{
300
    uint64_t v;
300
    uint64_t v;
301
   
301
   
302
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" (asi));
302
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" (asi));
303
   
303
   
304
    return v;
304
    return v;
305
}
305
}
306
 
306
 
307
/** Store uint64_t to alternate space.
307
/** Store uint64_t to alternate space.
308
 *
308
 *
309
 * @param asi ASI determining the alternate space.
309
 * @param asi ASI determining the alternate space.
310
 * @param va Virtual address within the ASI.
310
 * @param va Virtual address within the ASI.
311
 * @param v Value to be written.
311
 * @param v Value to be written.
312
 */
312
 */
313
static inline void asi_u64_write(asi_t asi, uintptr_t va, uint64_t v)
313
static inline void asi_u64_write(asi_t asi, uintptr_t va, uint64_t v)
314
{
314
{
315
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" (asi) : "memory");
315
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" (asi) : "memory");
316
}
316
}
317
 
317
 
318
/** Flush all valid register windows to memory. */
318
/** Flush all valid register windows to memory. */
319
static inline void flushw(void)
319
static inline void flushw(void)
320
{
320
{
321
    __asm__ volatile ("flushw\n");
321
    __asm__ volatile ("flushw\n");
322
}
322
}
323
 
323
 
324
void cpu_halt(void);
324
extern void cpu_halt(void);
325
void cpu_sleep(void);
325
extern void cpu_sleep(void);
326
void asm_delay_loop(uint32_t t);
326
extern void asm_delay_loop(uint32_t t);
-
 
327
 
-
 
328
extern uint64_t read_from_ag_g7(void);
-
 
329
extern void write_to_ag_g6(uint64_t val);
-
 
330
extern void write_to_ag_g7(uint64_t val);
-
 
331
extern void write_to_ig_g6(uint64_t val);
327
 
332
 
328
#endif
333
#endif
329
 
334
 
330
/** @}
335
/** @}
331
 */
336
 */
332
 
337