Subversion Repositories HelenOS

Rev

Rev 1881 | Rev 1885 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1881 Rev 1882
1
/*
1
/*
2
 * Copyright (C) 2005 Jakub Jermar
2
 * Copyright (C) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup sparc64
29
/** @addtogroup sparc64
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef KERN_sparc64_ASM_H_
35
#ifndef KERN_sparc64_ASM_H_
36
#define KERN_sparc64_ASM_H_
36
#define KERN_sparc64_ASM_H_
37
 
37
 
38
#include <arch.h>
38
#include <arch.h>
39
#include <typedefs.h>
39
#include <typedefs.h>
40
#include <arch/types.h>
40
#include <arch/types.h>
41
#include <arch/register.h>
41
#include <arch/register.h>
42
#include <config.h>
42
#include <config.h>
43
#include <time/clock.h>
43
#include <time/clock.h>
44
 
44
 
45
/** Read Processor State register.
45
/** Read Processor State register.
46
 *
46
 *
47
 * @return Value of PSTATE register.
47
 * @return Value of PSTATE register.
48
 */
48
 */
49
static inline uint64_t pstate_read(void)
49
static inline uint64_t pstate_read(void)
50
{
50
{
51
    uint64_t v;
51
    uint64_t v;
52
   
52
   
53
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
53
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
54
   
54
   
55
    return v;
55
    return v;
56
}
56
}
57
 
57
 
58
/** Write Processor State register.
58
/** Write Processor State register.
59
 *
59
 *
60
 * @param v New value of PSTATE register.
60
 * @param v New value of PSTATE register.
61
 */
61
 */
62
static inline void pstate_write(uint64_t v)
62
static inline void pstate_write(uint64_t v)
63
{
63
{
64
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
64
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
65
}
65
}
66
 
66
 
67
/** Read TICK_compare Register.
67
/** Read TICK_compare Register.
68
 *
68
 *
69
 * @return Value of TICK_comapre register.
69
 * @return Value of TICK_comapre register.
70
 */
70
 */
71
static inline uint64_t tick_compare_read(void)
71
static inline uint64_t tick_compare_read(void)
72
{
72
{
73
    uint64_t v;
73
    uint64_t v;
74
   
74
   
75
    __asm__ volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
75
    __asm__ volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
76
   
76
   
77
    return v;
77
    return v;
78
}
78
}
79
 
79
 
80
/** Write TICK_compare Register.
80
/** Write TICK_compare Register.
81
 *
81
 *
82
 * @param v New value of TICK_comapre register.
82
 * @param v New value of TICK_comapre register.
83
 */
83
 */
84
static inline void tick_compare_write(uint64_t v)
84
static inline void tick_compare_write(uint64_t v)
85
{
85
{
86
    __asm__ volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
86
    __asm__ volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
87
}
87
}
88
 
88
 
89
/** Read TICK Register.
89
/** Read TICK Register.
90
 *
90
 *
91
 * @return Value of TICK register.
91
 * @return Value of TICK register.
92
 */
92
 */
93
static inline uint64_t tick_read(void)
93
static inline uint64_t tick_read(void)
94
{
94
{
95
    uint64_t v;
95
    uint64_t v;
96
   
96
   
97
    __asm__ volatile ("rdpr %%tick, %0\n" : "=r" (v));
97
    __asm__ volatile ("rdpr %%tick, %0\n" : "=r" (v));
98
   
98
   
99
    return v;
99
    return v;
100
}
100
}
101
 
101
 
102
/** Write TICK Register.
102
/** Write TICK Register.
103
 *
103
 *
104
 * @param v New value of TICK register.
104
 * @param v New value of TICK register.
105
 */
105
 */
106
static inline void tick_write(uint64_t v)
106
static inline void tick_write(uint64_t v)
107
{
107
{
108
    __asm__ volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));
108
    __asm__ volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));
109
}
109
}
110
 
110
 
-
 
111
/** Read FPRS Register.
-
 
112
 *
-
 
113
 * @return Value of FPRS register.
-
 
114
 */
-
 
115
static inline uint64_t fprs_read(void)
-
 
116
{
-
 
117
    uint64_t v;
-
 
118
   
-
 
119
    __asm__ volatile ("rd %%fprs, %0\n" : "=r" (v));
-
 
120
   
-
 
121
    return v;
-
 
122
}
-
 
123
 
-
 
124
/** Write FPRS Register.
-
 
125
 *
-
 
126
 * @param v New value of FPRS register.
-
 
127
 */
-
 
128
static inline void fprs_write(uint64_t v)
-
 
129
{
-
 
130
    __asm__ volatile ("wr %0, %1, %%fprs\n" : : "r" (v), "i" (0));
-
 
131
}
-
 
132
 
111
/** Read SOFTINT Register.
133
/** Read SOFTINT Register.
112
 *
134
 *
113
 * @return Value of SOFTINT register.
135
 * @return Value of SOFTINT register.
114
 */
136
 */
115
static inline uint64_t softint_read(void)
137
static inline uint64_t softint_read(void)
116
{
138
{
117
    uint64_t v;
139
    uint64_t v;
118
 
140
 
119
    __asm__ volatile ("rd %%softint, %0\n" : "=r" (v));
141
    __asm__ volatile ("rd %%softint, %0\n" : "=r" (v));
120
 
142
 
121
    return v;
143
    return v;
122
}
144
}
123
 
145
 
124
/** Write SOFTINT Register.
146
/** Write SOFTINT Register.
125
 *
147
 *
126
 * @param v New value of SOFTINT register.
148
 * @param v New value of SOFTINT register.
127
 */
149
 */
128
static inline void softint_write(uint64_t v)
150
static inline void softint_write(uint64_t v)
129
{
151
{
130
    __asm__ volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
152
    __asm__ volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
131
}
153
}
132
 
154
 
133
/** Write CLEAR_SOFTINT Register.
155
/** Write CLEAR_SOFTINT Register.
134
 *
156
 *
135
 * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
157
 * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
136
 *
158
 *
137
 * @param v New value of CLEAR_SOFTINT register.
159
 * @param v New value of CLEAR_SOFTINT register.
138
 */
160
 */
139
static inline void clear_softint_write(uint64_t v)
161
static inline void clear_softint_write(uint64_t v)
140
{
162
{
141
    __asm__ volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
163
    __asm__ volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
142
}
164
}
143
 
165
 
144
/** Write SET_SOFTINT Register.
166
/** Write SET_SOFTINT Register.
145
 *
167
 *
146
 * Bits set in SET_SOFTINT register will be set in SOFTINT register.
168
 * Bits set in SET_SOFTINT register will be set in SOFTINT register.
147
 *
169
 *
148
 * @param v New value of SET_SOFTINT register.
170
 * @param v New value of SET_SOFTINT register.
149
 */
171
 */
150
static inline void set_softint_write(uint64_t v)
172
static inline void set_softint_write(uint64_t v)
151
{
173
{
152
    __asm__ volatile ("wr %0, %1, %%set_softint\n" : : "r" (v), "i" (0));
174
    __asm__ volatile ("wr %0, %1, %%set_softint\n" : : "r" (v), "i" (0));
153
}
175
}
154
 
176
 
155
/** Enable interrupts.
177
/** Enable interrupts.
156
 *
178
 *
157
 * Enable interrupts and return previous
179
 * Enable interrupts and return previous
158
 * value of IPL.
180
 * value of IPL.
159
 *
181
 *
160
 * @return Old interrupt priority level.
182
 * @return Old interrupt priority level.
161
 */
183
 */
162
static inline ipl_t interrupts_enable(void) {
184
static inline ipl_t interrupts_enable(void) {
163
    pstate_reg_t pstate;
185
    pstate_reg_t pstate;
164
    uint64_t value;
186
    uint64_t value;
165
   
187
   
166
    value = pstate_read();
188
    value = pstate_read();
167
    pstate.value = value;
189
    pstate.value = value;
168
    pstate.ie = true;
190
    pstate.ie = true;
169
    pstate_write(pstate.value);
191
    pstate_write(pstate.value);
170
   
192
   
171
    return (ipl_t) value;
193
    return (ipl_t) value;
172
}
194
}
173
 
195
 
174
/** Disable interrupts.
196
/** Disable interrupts.
175
 *
197
 *
176
 * Disable interrupts and return previous
198
 * Disable interrupts and return previous
177
 * value of IPL.
199
 * value of IPL.
178
 *
200
 *
179
 * @return Old interrupt priority level.
201
 * @return Old interrupt priority level.
180
 */
202
 */
181
static inline ipl_t interrupts_disable(void) {
203
static inline ipl_t interrupts_disable(void) {
182
    pstate_reg_t pstate;
204
    pstate_reg_t pstate;
183
    uint64_t value;
205
    uint64_t value;
184
   
206
   
185
    value = pstate_read();
207
    value = pstate_read();
186
    pstate.value = value;
208
    pstate.value = value;
187
    pstate.ie = false;
209
    pstate.ie = false;
188
    pstate_write(pstate.value);
210
    pstate_write(pstate.value);
189
   
211
   
190
    return (ipl_t) value;
212
    return (ipl_t) value;
191
}
213
}
192
 
214
 
193
/** Restore interrupt priority level.
215
/** Restore interrupt priority level.
194
 *
216
 *
195
 * Restore IPL.
217
 * Restore IPL.
196
 *
218
 *
197
 * @param ipl Saved interrupt priority level.
219
 * @param ipl Saved interrupt priority level.
198
 */
220
 */
199
static inline void interrupts_restore(ipl_t ipl) {
221
static inline void interrupts_restore(ipl_t ipl) {
200
    pstate_reg_t pstate;
222
    pstate_reg_t pstate;
201
   
223
   
202
    pstate.value = pstate_read();
224
    pstate.value = pstate_read();
203
    pstate.ie = ((pstate_reg_t) ipl).ie;
225
    pstate.ie = ((pstate_reg_t) ipl).ie;
204
    pstate_write(pstate.value);
226
    pstate_write(pstate.value);
205
}
227
}
206
 
228
 
207
/** Return interrupt priority level.
229
/** Return interrupt priority level.
208
 *
230
 *
209
 * Return IPL.
231
 * Return IPL.
210
 *
232
 *
211
 * @return Current interrupt priority level.
233
 * @return Current interrupt priority level.
212
 */
234
 */
213
static inline ipl_t interrupts_read(void) {
235
static inline ipl_t interrupts_read(void) {
214
    return (ipl_t) pstate_read();
236
    return (ipl_t) pstate_read();
215
}
237
}
216
 
238
 
217
/** Return base address of current stack.
239
/** Return base address of current stack.
218
 *
240
 *
219
 * Return the base address of the current stack.
241
 * Return the base address of the current stack.
220
 * The stack is assumed to be STACK_SIZE bytes long.
242
 * The stack is assumed to be STACK_SIZE bytes long.
221
 * The stack must start on page boundary.
243
 * The stack must start on page boundary.
222
 */
244
 */
223
static inline uintptr_t get_stack_base(void)
245
static inline uintptr_t get_stack_base(void)
224
{
246
{
225
    uintptr_t v;
247
    uintptr_t v;
226
   
248
   
227
    __asm__ volatile ("andn %%sp, %1, %0\n" : "=r" (v) : "r" (STACK_SIZE-1));
249
    __asm__ volatile ("andn %%sp, %1, %0\n" : "=r" (v) : "r" (STACK_SIZE-1));
228
   
250
   
229
    return v;
251
    return v;
230
}
252
}
231
 
253
 
232
/** Read Version Register.
254
/** Read Version Register.
233
 *
255
 *
234
 * @return Value of VER register.
256
 * @return Value of VER register.
235
 */
257
 */
236
static inline uint64_t ver_read(void)
258
static inline uint64_t ver_read(void)
237
{
259
{
238
    uint64_t v;
260
    uint64_t v;
239
   
261
   
240
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
262
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
241
   
263
   
242
    return v;
264
    return v;
243
}
265
}
244
 
266
 
245
/** Read Trap Base Address register.
267
/** Read Trap Base Address register.
246
 *
268
 *
247
 * @return Current value in TBA.
269
 * @return Current value in TBA.
248
 */
270
 */
249
static inline uint64_t tba_read(void)
271
static inline uint64_t tba_read(void)
250
{
272
{
251
    uint64_t v;
273
    uint64_t v;
252
   
274
   
253
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
275
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
254
   
276
   
255
    return v;
277
    return v;
256
}
278
}
257
 
279
 
258
/** Read Trap Program Counter register.
280
/** Read Trap Program Counter register.
259
 *
281
 *
260
 * @return Current value in TPC.
282
 * @return Current value in TPC.
261
 */
283
 */
262
static inline uint64_t tpc_read(void)
284
static inline uint64_t tpc_read(void)
263
{
285
{
264
    uint64_t v;
286
    uint64_t v;
265
   
287
   
266
    __asm__ volatile ("rdpr %%tpc, %0\n" : "=r" (v));
288
    __asm__ volatile ("rdpr %%tpc, %0\n" : "=r" (v));
267
   
289
   
268
    return v;
290
    return v;
269
}
291
}
270
 
292
 
271
/** Read Trap Level register.
293
/** Read Trap Level register.
272
 *
294
 *
273
 * @return Current value in TL.
295
 * @return Current value in TL.
274
 */
296
 */
275
static inline uint64_t tl_read(void)
297
static inline uint64_t tl_read(void)
276
{
298
{
277
    uint64_t v;
299
    uint64_t v;
278
   
300
   
279
    __asm__ volatile ("rdpr %%tl, %0\n" : "=r" (v));
301
    __asm__ volatile ("rdpr %%tl, %0\n" : "=r" (v));
280
   
302
   
281
    return v;
303
    return v;
282
}
304
}
283
 
305
 
284
/** Write Trap Base Address register.
306
/** Write Trap Base Address register.
285
 *
307
 *
286
 * @param v New value of TBA.
308
 * @param v New value of TBA.
287
 */
309
 */
288
static inline void tba_write(uint64_t v)
310
static inline void tba_write(uint64_t v)
289
{
311
{
290
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
312
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
291
}
313
}
292
 
314
 
293
/** Load uint64_t from alternate space.
315
/** Load uint64_t from alternate space.
294
 *
316
 *
295
 * @param asi ASI determining the alternate space.
317
 * @param asi ASI determining the alternate space.
296
 * @param va Virtual address within the ASI.
318
 * @param va Virtual address within the ASI.
297
 *
319
 *
298
 * @return Value read from the virtual address in the specified address space.
320
 * @return Value read from the virtual address in the specified address space.
299
 */
321
 */
300
static inline uint64_t asi_u64_read(asi_t asi, uintptr_t va)
322
static inline uint64_t asi_u64_read(asi_t asi, uintptr_t va)
301
{
323
{
302
    uint64_t v;
324
    uint64_t v;
303
   
325
   
304
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" (asi));
326
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" (asi));
305
   
327
   
306
    return v;
328
    return v;
307
}
329
}
308
 
330
 
309
/** Store uint64_t to alternate space.
331
/** Store uint64_t to alternate space.
310
 *
332
 *
311
 * @param asi ASI determining the alternate space.
333
 * @param asi ASI determining the alternate space.
312
 * @param va Virtual address within the ASI.
334
 * @param va Virtual address within the ASI.
313
 * @param v Value to be written.
335
 * @param v Value to be written.
314
 */
336
 */
315
static inline void asi_u64_write(asi_t asi, uintptr_t va, uint64_t v)
337
static inline void asi_u64_write(asi_t asi, uintptr_t va, uint64_t v)
316
{
338
{
317
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" (asi) : "memory");
339
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" (asi) : "memory");
318
}
340
}
319
 
341
 
320
/** Flush all valid register windows to memory. */
342
/** Flush all valid register windows to memory. */
321
static inline void flushw(void)
343
static inline void flushw(void)
322
{
344
{
323
    __asm__ volatile ("flushw\n");
345
    __asm__ volatile ("flushw\n");
324
}
346
}
325
 
347
 
326
/** Switch to nucleus by setting TL to 1. */
348
/** Switch to nucleus by setting TL to 1. */
327
static inline void nucleus_enter(void)
349
static inline void nucleus_enter(void)
328
{
350
{
329
    __asm__ volatile ("wrpr %g0, 1, %tl\n");
351
    __asm__ volatile ("wrpr %g0, 1, %tl\n");
330
}
352
}
331
 
353
 
332
/** Switch from nucleus by setting TL to 0. */
354
/** Switch from nucleus by setting TL to 0. */
333
static inline void nucleus_leave(void)
355
static inline void nucleus_leave(void)
334
{
356
{
335
    __asm__ volatile ("wrpr %g0, %g0, %tl\n");
357
    __asm__ volatile ("wrpr %g0, %g0, %tl\n");
336
}
358
}
337
 
359
 
338
extern void cpu_halt(void);
360
extern void cpu_halt(void);
339
extern void cpu_sleep(void);
361
extern void cpu_sleep(void);
340
extern void asm_delay_loop(const uint32_t usec);
362
extern void asm_delay_loop(const uint32_t usec);
341
 
363
 
342
extern uint64_t read_from_ag_g7(void);
364
extern uint64_t read_from_ag_g7(void);
343
extern void write_to_ag_g6(uint64_t val);
365
extern void write_to_ag_g6(uint64_t val);
344
extern void write_to_ag_g7(uint64_t val);
366
extern void write_to_ag_g7(uint64_t val);
345
extern void write_to_ig_g6(uint64_t val);
367
extern void write_to_ig_g6(uint64_t val);
346
 
368
 
347
extern void switch_to_userspace(uint64_t pc, uint64_t sp, uint64_t uarg);
369
extern void switch_to_userspace(uint64_t pc, uint64_t sp, uint64_t uarg);
348
 
370
 
349
#endif
371
#endif
350
 
372
 
351
/** @}
373
/** @}
352
 */
374
 */
353
 
375