Subversion Repositories HelenOS

Rev

Rev 2068 | Rev 2082 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 2068 Rev 2071
1
/*
1
/*
2
 * Copyright (C) 2005 Jakub Jermar
2
 * Copyright (c) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup sparc64
29
/** @addtogroup sparc64
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef KERN_sparc64_ASM_H_
35
#ifndef KERN_sparc64_ASM_H_
36
#define KERN_sparc64_ASM_H_
36
#define KERN_sparc64_ASM_H_
37
 
37
 
38
#include <arch.h>
38
#include <arch.h>
39
#include <typedefs.h>
39
#include <typedefs.h>
40
#include <arch/types.h>
40
#include <arch/types.h>
41
#include <arch/register.h>
41
#include <arch/register.h>
42
#include <config.h>
42
#include <config.h>
43
#include <time/clock.h>
43
#include <time/clock.h>
44
#include <arch/stack.h>
44
#include <arch/stack.h>
45
 
45
 
46
/** Read Processor State register.
46
/** Read Processor State register.
47
 *
47
 *
48
 * @return Value of PSTATE register.
48
 * @return Value of PSTATE register.
49
 */
49
 */
50
static inline uint64_t pstate_read(void)
50
static inline uint64_t pstate_read(void)
51
{
51
{
52
    uint64_t v;
52
    uint64_t v;
53
   
53
   
54
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
54
    __asm__ volatile ("rdpr %%pstate, %0\n" : "=r" (v));
55
   
55
   
56
    return v;
56
    return v;
57
}
57
}
58
 
58
 
59
/** Write Processor State register.
59
/** Write Processor State register.
60
 *
60
 *
61
 * @param v New value of PSTATE register.
61
 * @param v New value of PSTATE register.
62
 */
62
 */
63
static inline void pstate_write(uint64_t v)
63
static inline void pstate_write(uint64_t v)
64
{
64
{
65
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
65
    __asm__ volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
66
}
66
}
67
 
67
 
68
/** Read TICK_compare Register.
68
/** Read TICK_compare Register.
69
 *
69
 *
70
 * @return Value of TICK_comapre register.
70
 * @return Value of TICK_comapre register.
71
 */
71
 */
72
static inline uint64_t tick_compare_read(void)
72
static inline uint64_t tick_compare_read(void)
73
{
73
{
74
    uint64_t v;
74
    uint64_t v;
75
   
75
   
76
    __asm__ volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
76
    __asm__ volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
77
   
77
   
78
    return v;
78
    return v;
79
}
79
}
80
 
80
 
81
/** Write TICK_compare Register.
81
/** Write TICK_compare Register.
82
 *
82
 *
83
 * @param v New value of TICK_comapre register.
83
 * @param v New value of TICK_comapre register.
84
 */
84
 */
85
static inline void tick_compare_write(uint64_t v)
85
static inline void tick_compare_write(uint64_t v)
86
{
86
{
87
    __asm__ volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
87
    __asm__ volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
88
}
88
}
89
 
89
 
90
/** Read TICK Register.
90
/** Read TICK Register.
91
 *
91
 *
92
 * @return Value of TICK register.
92
 * @return Value of TICK register.
93
 */
93
 */
94
static inline uint64_t tick_read(void)
94
static inline uint64_t tick_read(void)
95
{
95
{
96
    uint64_t v;
96
    uint64_t v;
97
   
97
   
98
    __asm__ volatile ("rdpr %%tick, %0\n" : "=r" (v));
98
    __asm__ volatile ("rdpr %%tick, %0\n" : "=r" (v));
99
   
99
   
100
    return v;
100
    return v;
101
}
101
}
102
 
102
 
103
/** Write TICK Register.
103
/** Write TICK Register.
104
 *
104
 *
105
 * @param v New value of TICK register.
105
 * @param v New value of TICK register.
106
 */
106
 */
107
static inline void tick_write(uint64_t v)
107
static inline void tick_write(uint64_t v)
108
{
108
{
109
    __asm__ volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));
109
    __asm__ volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));
110
}
110
}
111
 
111
 
112
/** Read FPRS Register.
112
/** Read FPRS Register.
113
 *
113
 *
114
 * @return Value of FPRS register.
114
 * @return Value of FPRS register.
115
 */
115
 */
116
static inline uint64_t fprs_read(void)
116
static inline uint64_t fprs_read(void)
117
{
117
{
118
    uint64_t v;
118
    uint64_t v;
119
   
119
   
120
    __asm__ volatile ("rd %%fprs, %0\n" : "=r" (v));
120
    __asm__ volatile ("rd %%fprs, %0\n" : "=r" (v));
121
   
121
   
122
    return v;
122
    return v;
123
}
123
}
124
 
124
 
125
/** Write FPRS Register.
125
/** Write FPRS Register.
126
 *
126
 *
127
 * @param v New value of FPRS register.
127
 * @param v New value of FPRS register.
128
 */
128
 */
129
static inline void fprs_write(uint64_t v)
129
static inline void fprs_write(uint64_t v)
130
{
130
{
131
    __asm__ volatile ("wr %0, %1, %%fprs\n" : : "r" (v), "i" (0));
131
    __asm__ volatile ("wr %0, %1, %%fprs\n" : : "r" (v), "i" (0));
132
}
132
}
133
 
133
 
134
/** Read SOFTINT Register.
134
/** Read SOFTINT Register.
135
 *
135
 *
136
 * @return Value of SOFTINT register.
136
 * @return Value of SOFTINT register.
137
 */
137
 */
138
static inline uint64_t softint_read(void)
138
static inline uint64_t softint_read(void)
139
{
139
{
140
    uint64_t v;
140
    uint64_t v;
141
 
141
 
142
    __asm__ volatile ("rd %%softint, %0\n" : "=r" (v));
142
    __asm__ volatile ("rd %%softint, %0\n" : "=r" (v));
143
 
143
 
144
    return v;
144
    return v;
145
}
145
}
146
 
146
 
147
/** Write SOFTINT Register.
147
/** Write SOFTINT Register.
148
 *
148
 *
149
 * @param v New value of SOFTINT register.
149
 * @param v New value of SOFTINT register.
150
 */
150
 */
151
static inline void softint_write(uint64_t v)
151
static inline void softint_write(uint64_t v)
152
{
152
{
153
    __asm__ volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
153
    __asm__ volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
154
}
154
}
155
 
155
 
156
/** Write CLEAR_SOFTINT Register.
156
/** Write CLEAR_SOFTINT Register.
157
 *
157
 *
158
 * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
158
 * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
159
 *
159
 *
160
 * @param v New value of CLEAR_SOFTINT register.
160
 * @param v New value of CLEAR_SOFTINT register.
161
 */
161
 */
162
static inline void clear_softint_write(uint64_t v)
162
static inline void clear_softint_write(uint64_t v)
163
{
163
{
164
    __asm__ volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
164
    __asm__ volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
165
}
165
}
166
 
166
 
167
/** Write SET_SOFTINT Register.
167
/** Write SET_SOFTINT Register.
168
 *
168
 *
169
 * Bits set in SET_SOFTINT register will be set in SOFTINT register.
169
 * Bits set in SET_SOFTINT register will be set in SOFTINT register.
170
 *
170
 *
171
 * @param v New value of SET_SOFTINT register.
171
 * @param v New value of SET_SOFTINT register.
172
 */
172
 */
173
static inline void set_softint_write(uint64_t v)
173
static inline void set_softint_write(uint64_t v)
174
{
174
{
175
    __asm__ volatile ("wr %0, %1, %%set_softint\n" : : "r" (v), "i" (0));
175
    __asm__ volatile ("wr %0, %1, %%set_softint\n" : : "r" (v), "i" (0));
176
}
176
}
177
 
177
 
178
/** Enable interrupts.
178
/** Enable interrupts.
179
 *
179
 *
180
 * Enable interrupts and return previous
180
 * Enable interrupts and return previous
181
 * value of IPL.
181
 * value of IPL.
182
 *
182
 *
183
 * @return Old interrupt priority level.
183
 * @return Old interrupt priority level.
184
 */
184
 */
185
static inline ipl_t interrupts_enable(void) {
185
static inline ipl_t interrupts_enable(void) {
186
    pstate_reg_t pstate;
186
    pstate_reg_t pstate;
187
    uint64_t value;
187
    uint64_t value;
188
   
188
   
189
    value = pstate_read();
189
    value = pstate_read();
190
    pstate.value = value;
190
    pstate.value = value;
191
    pstate.ie = true;
191
    pstate.ie = true;
192
    pstate_write(pstate.value);
192
    pstate_write(pstate.value);
193
   
193
   
194
    return (ipl_t) value;
194
    return (ipl_t) value;
195
}
195
}
196
 
196
 
197
/** Disable interrupts.
197
/** Disable interrupts.
198
 *
198
 *
199
 * Disable interrupts and return previous
199
 * Disable interrupts and return previous
200
 * value of IPL.
200
 * value of IPL.
201
 *
201
 *
202
 * @return Old interrupt priority level.
202
 * @return Old interrupt priority level.
203
 */
203
 */
204
static inline ipl_t interrupts_disable(void) {
204
static inline ipl_t interrupts_disable(void) {
205
    pstate_reg_t pstate;
205
    pstate_reg_t pstate;
206
    uint64_t value;
206
    uint64_t value;
207
   
207
   
208
    value = pstate_read();
208
    value = pstate_read();
209
    pstate.value = value;
209
    pstate.value = value;
210
    pstate.ie = false;
210
    pstate.ie = false;
211
    pstate_write(pstate.value);
211
    pstate_write(pstate.value);
212
   
212
   
213
    return (ipl_t) value;
213
    return (ipl_t) value;
214
}
214
}
215
 
215
 
216
/** Restore interrupt priority level.
216
/** Restore interrupt priority level.
217
 *
217
 *
218
 * Restore IPL.
218
 * Restore IPL.
219
 *
219
 *
220
 * @param ipl Saved interrupt priority level.
220
 * @param ipl Saved interrupt priority level.
221
 */
221
 */
222
static inline void interrupts_restore(ipl_t ipl) {
222
static inline void interrupts_restore(ipl_t ipl) {
223
    pstate_reg_t pstate;
223
    pstate_reg_t pstate;
224
   
224
   
225
    pstate.value = pstate_read();
225
    pstate.value = pstate_read();
226
    pstate.ie = ((pstate_reg_t) ipl).ie;
226
    pstate.ie = ((pstate_reg_t) ipl).ie;
227
    pstate_write(pstate.value);
227
    pstate_write(pstate.value);
228
}
228
}
229
 
229
 
230
/** Return interrupt priority level.
230
/** Return interrupt priority level.
231
 *
231
 *
232
 * Return IPL.
232
 * Return IPL.
233
 *
233
 *
234
 * @return Current interrupt priority level.
234
 * @return Current interrupt priority level.
235
 */
235
 */
236
static inline ipl_t interrupts_read(void) {
236
static inline ipl_t interrupts_read(void) {
237
    return (ipl_t) pstate_read();
237
    return (ipl_t) pstate_read();
238
}
238
}
239
 
239
 
240
/** Return base address of current stack.
240
/** Return base address of current stack.
241
 *
241
 *
242
 * Return the base address of the current stack.
242
 * Return the base address of the current stack.
243
 * The stack is assumed to be STACK_SIZE bytes long.
243
 * The stack is assumed to be STACK_SIZE bytes long.
244
 * The stack must start on page boundary.
244
 * The stack must start on page boundary.
245
 */
245
 */
246
static inline uintptr_t get_stack_base(void)
246
static inline uintptr_t get_stack_base(void)
247
{
247
{
248
    uintptr_t unbiased_sp;
248
    uintptr_t unbiased_sp;
249
   
249
   
250
    __asm__ volatile ("add %%sp, %1, %0\n" : "=r" (unbiased_sp) : "i" (STACK_BIAS));
250
    __asm__ volatile ("add %%sp, %1, %0\n" : "=r" (unbiased_sp) : "i" (STACK_BIAS));
251
   
251
   
252
    return ALIGN_DOWN(unbiased_sp, STACK_SIZE);
252
    return ALIGN_DOWN(unbiased_sp, STACK_SIZE);
253
}
253
}
254
 
254
 
255
/** Read Version Register.
255
/** Read Version Register.
256
 *
256
 *
257
 * @return Value of VER register.
257
 * @return Value of VER register.
258
 */
258
 */
259
static inline uint64_t ver_read(void)
259
static inline uint64_t ver_read(void)
260
{
260
{
261
    uint64_t v;
261
    uint64_t v;
262
   
262
   
263
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
263
    __asm__ volatile ("rdpr %%ver, %0\n" : "=r" (v));
264
   
264
   
265
    return v;
265
    return v;
266
}
266
}
267
 
267
 
268
/** Read Trap Program Counter register.
268
/** Read Trap Program Counter register.
269
 *
269
 *
270
 * @return Current value in TPC.
270
 * @return Current value in TPC.
271
 */
271
 */
272
static inline uint64_t tpc_read(void)
272
static inline uint64_t tpc_read(void)
273
{
273
{
274
    uint64_t v;
274
    uint64_t v;
275
   
275
   
276
    __asm__ volatile ("rdpr %%tpc, %0\n" : "=r" (v));
276
    __asm__ volatile ("rdpr %%tpc, %0\n" : "=r" (v));
277
   
277
   
278
    return v;
278
    return v;
279
}
279
}
280
 
280
 
281
/** Read Trap Level register.
281
/** Read Trap Level register.
282
 *
282
 *
283
 * @return Current value in TL.
283
 * @return Current value in TL.
284
 */
284
 */
285
static inline uint64_t tl_read(void)
285
static inline uint64_t tl_read(void)
286
{
286
{
287
    uint64_t v;
287
    uint64_t v;
288
   
288
   
289
    __asm__ volatile ("rdpr %%tl, %0\n" : "=r" (v));
289
    __asm__ volatile ("rdpr %%tl, %0\n" : "=r" (v));
290
   
290
   
291
    return v;
291
    return v;
292
}
292
}
293
 
293
 
294
/** Read Trap Base Address register.
294
/** Read Trap Base Address register.
295
 *
295
 *
296
 * @return Current value in TBA.
296
 * @return Current value in TBA.
297
 */
297
 */
298
static inline uint64_t tba_read(void)
298
static inline uint64_t tba_read(void)
299
{
299
{
300
    uint64_t v;
300
    uint64_t v;
301
   
301
   
302
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
302
    __asm__ volatile ("rdpr %%tba, %0\n" : "=r" (v));
303
   
303
   
304
    return v;
304
    return v;
305
}
305
}
306
 
306
 
307
/** Write Trap Base Address register.
307
/** Write Trap Base Address register.
308
 *
308
 *
309
 * @param v New value of TBA.
309
 * @param v New value of TBA.
310
 */
310
 */
311
static inline void tba_write(uint64_t v)
311
static inline void tba_write(uint64_t v)
312
{
312
{
313
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
313
    __asm__ volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
314
}
314
}
315
 
315
 
316
/** Load uint64_t from alternate space.
316
/** Load uint64_t from alternate space.
317
 *
317
 *
318
 * @param asi ASI determining the alternate space.
318
 * @param asi ASI determining the alternate space.
319
 * @param va Virtual address within the ASI.
319
 * @param va Virtual address within the ASI.
320
 *
320
 *
321
 * @return Value read from the virtual address in the specified address space.
321
 * @return Value read from the virtual address in the specified address space.
322
 */
322
 */
323
static inline uint64_t asi_u64_read(asi_t asi, uintptr_t va)
323
static inline uint64_t asi_u64_read(asi_t asi, uintptr_t va)
324
{
324
{
325
    uint64_t v;
325
    uint64_t v;
326
   
326
   
327
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" ((unsigned) asi));
327
    __asm__ volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" ((unsigned) asi));
328
   
328
   
329
    return v;
329
    return v;
330
}
330
}
331
 
331
 
332
/** Store uint64_t to alternate space.
332
/** Store uint64_t to alternate space.
333
 *
333
 *
334
 * @param asi ASI determining the alternate space.
334
 * @param asi ASI determining the alternate space.
335
 * @param va Virtual address within the ASI.
335
 * @param va Virtual address within the ASI.
336
 * @param v Value to be written.
336
 * @param v Value to be written.
337
 */
337
 */
338
static inline void asi_u64_write(asi_t asi, uintptr_t va, uint64_t v)
338
static inline void asi_u64_write(asi_t asi, uintptr_t va, uint64_t v)
339
{
339
{
340
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" ((unsigned) asi) : "memory");
340
    __asm__ volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" ((unsigned) asi) : "memory");
341
}
341
}
342
 
342
 
343
/** Flush all valid register windows to memory. */
343
/** Flush all valid register windows to memory. */
344
static inline void flushw(void)
344
static inline void flushw(void)
345
{
345
{
346
    __asm__ volatile ("flushw\n");
346
    __asm__ volatile ("flushw\n");
347
}
347
}
348
 
348
 
349
/** Switch to nucleus by setting TL to 1. */
349
/** Switch to nucleus by setting TL to 1. */
350
static inline void nucleus_enter(void)
350
static inline void nucleus_enter(void)
351
{
351
{
352
    __asm__ volatile ("wrpr %g0, 1, %tl\n");
352
    __asm__ volatile ("wrpr %g0, 1, %tl\n");
353
}
353
}
354
 
354
 
355
/** Switch from nucleus by setting TL to 0. */
355
/** Switch from nucleus by setting TL to 0. */
356
static inline void nucleus_leave(void)
356
static inline void nucleus_leave(void)
357
{
357
{
358
    __asm__ volatile ("wrpr %g0, %g0, %tl\n");
358
    __asm__ volatile ("wrpr %g0, %g0, %tl\n");
359
}
359
}
360
 
360
 
361
/** Read UPA_CONFIG register.
361
/** Read UPA_CONFIG register.
362
 *
362
 *
363
 * @return Value of the UPA_CONFIG register.
363
 * @return Value of the UPA_CONFIG register.
364
 */
364
 */
365
static inline uint64_t upa_config_read(void)
365
static inline uint64_t upa_config_read(void)
366
{
366
{
367
    return asi_u64_read(ASI_UPA_CONFIG, 0);
367
    return asi_u64_read(ASI_UPA_CONFIG, 0);
368
}
368
}
369
 
369
 
370
extern void cpu_halt(void);
370
extern void cpu_halt(void);
371
extern void cpu_sleep(void);
371
extern void cpu_sleep(void);
372
extern void asm_delay_loop(const uint32_t usec);
372
extern void asm_delay_loop(const uint32_t usec);
373
 
373
 
374
extern uint64_t read_from_ag_g7(void);
374
extern uint64_t read_from_ag_g7(void);
375
extern void write_to_ag_g6(uint64_t val);
375
extern void write_to_ag_g6(uint64_t val);
376
extern void write_to_ag_g7(uint64_t val);
376
extern void write_to_ag_g7(uint64_t val);
377
extern void write_to_ig_g6(uint64_t val);
377
extern void write_to_ig_g6(uint64_t val);
378
 
378
 
379
extern void switch_to_userspace(uint64_t pc, uint64_t sp, uint64_t uarg);
379
extern void switch_to_userspace(uint64_t pc, uint64_t sp, uint64_t uarg);
380
 
380
 
381
#endif
381
#endif
382
 
382
 
383
/** @}
383
/** @}
384
 */
384
 */
385
 
385