Subversion Repositories HelenOS

Rev

Rev 2787 | Rev 3597 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 2787 Rev 3536
1
/*
1
/*
2
 * Copyright (c) 2005 Jakub Jermar
2
 * Copyright (c) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup ia64   
29
/** @addtogroup ia64   
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef KERN_ia64_ASM_H_
35
#ifndef KERN_ia64_ASM_H_
36
#define KERN_ia64_ASM_H_
36
#define KERN_ia64_ASM_H_
37
 
37
 
38
#include <config.h>
38
#include <config.h>
39
#include <arch/types.h>
39
#include <arch/types.h>
40
#include <arch/register.h>
40
#include <arch/register.h>
41
 
41
 
42
 
42
 
43
#define IA64_IOSPACE_ADDRESS 0xE001000000000000ULL
43
#define IA64_IOSPACE_ADDRESS 0xE001000000000000ULL
44
 
44
 
45
static inline void  outb(uint64_t port,uint8_t v)
45
static inline void  outb(uint64_t port,uint8_t v)
46
{
46
{
47
    *((char *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 )))) = v;
47
    *((uint8_t *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 )))) = v;
48
 
48
 
49
    asm volatile ("mf\n" ::: "memory");
49
    asm volatile ("mf\n" ::: "memory");
50
}
50
}
51
 
51
 
-
 
52
static inline void  outw(uint64_t port,uint16_t v)
-
 
53
{
-
 
54
    *((uint16_t *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 )))) = v;
-
 
55
 
-
 
56
    asm volatile ("mf\n" ::: "memory");
-
 
57
}
-
 
58
 
-
 
59
static inline void  outl(uint64_t port,uint32_t v)
-
 
60
{
-
 
61
    *((uint32_t *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 )))) = v;
-
 
62
 
-
 
63
    asm volatile ("mf\n" ::: "memory");
-
 
64
}
-
 
65
 
-
 
66
 
52
 
67
 
53
static inline uint8_t inb(uint64_t port)
68
static inline uint8_t inb(uint64_t port)
54
{
69
{
55
    asm volatile ("mf\n" ::: "memory");
70
    asm volatile ("mf\n" ::: "memory");
56
 
71
 
57
    return *((char *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 ))));
72
    return *((uint8_t *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 ))));
-
 
73
}
-
 
74
 
-
 
75
static inline uint16_t inw(uint64_t port)
-
 
76
{
-
 
77
    asm volatile ("mf\n" ::: "memory");
-
 
78
 
-
 
79
    return *((uint16_t *)(IA64_IOSPACE_ADDRESS + ( (port & 0xffE) | ( (port >> 2) << 12 ))));
-
 
80
}
-
 
81
 
-
 
82
static inline uint32_t inl(uint64_t port)
-
 
83
{
-
 
84
    asm volatile ("mf\n" ::: "memory");
-
 
85
 
-
 
86
    return *((uint32_t *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 ))));
58
}
87
}
59
 
88
 
60
 
89
 
61
 
90
 
62
/** Return base address of current stack
91
/** Return base address of current stack
63
 *
92
 *
64
 * Return the base address of the current stack.
93
 * Return the base address of the current stack.
65
 * The stack is assumed to be STACK_SIZE long.
94
 * The stack is assumed to be STACK_SIZE long.
66
 * The stack must start on page boundary.
95
 * The stack must start on page boundary.
67
 */
96
 */
68
static inline uintptr_t get_stack_base(void)
97
static inline uintptr_t get_stack_base(void)
69
{
98
{
70
    uint64_t v;
99
    uint64_t v;
71
 
100
 
72
    asm volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
101
    asm volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
73
   
102
   
74
    return v;
103
    return v;
75
}
104
}
76
 
105
 
77
/** Return Processor State Register.
106
/** Return Processor State Register.
78
 *
107
 *
79
 * @return PSR.
108
 * @return PSR.
80
 */
109
 */
81
static inline uint64_t psr_read(void)
110
static inline uint64_t psr_read(void)
82
{
111
{
83
    uint64_t v;
112
    uint64_t v;
84
   
113
   
85
    asm volatile ("mov %0 = psr\n" : "=r" (v));
114
    asm volatile ("mov %0 = psr\n" : "=r" (v));
86
   
115
   
87
    return v;
116
    return v;
88
}
117
}
89
 
118
 
90
/** Read IVA (Interruption Vector Address).
119
/** Read IVA (Interruption Vector Address).
91
 *
120
 *
92
 * @return Return location of interruption vector table.
121
 * @return Return location of interruption vector table.
93
 */
122
 */
94
static inline uint64_t iva_read(void)
123
static inline uint64_t iva_read(void)
95
{
124
{
96
    uint64_t v;
125
    uint64_t v;
97
   
126
   
98
    asm volatile ("mov %0 = cr.iva\n" : "=r" (v));
127
    asm volatile ("mov %0 = cr.iva\n" : "=r" (v));
99
   
128
   
100
    return v;
129
    return v;
101
}
130
}
102
 
131
 
103
/** Write IVA (Interruption Vector Address) register.
132
/** Write IVA (Interruption Vector Address) register.
104
 *
133
 *
105
 * @param v New location of interruption vector table.
134
 * @param v New location of interruption vector table.
106
 */
135
 */
107
static inline void iva_write(uint64_t v)
136
static inline void iva_write(uint64_t v)
108
{
137
{
109
    asm volatile ("mov cr.iva = %0\n" : : "r" (v));
138
    asm volatile ("mov cr.iva = %0\n" : : "r" (v));
110
}
139
}
111
 
140
 
112
 
141
 
113
/** Read IVR (External Interrupt Vector Register).
142
/** Read IVR (External Interrupt Vector Register).
114
 *
143
 *
115
 * @return Highest priority, pending, unmasked external interrupt vector.
144
 * @return Highest priority, pending, unmasked external interrupt vector.
116
 */
145
 */
117
static inline uint64_t ivr_read(void)
146
static inline uint64_t ivr_read(void)
118
{
147
{
119
    uint64_t v;
148
    uint64_t v;
120
   
149
   
121
    asm volatile ("mov %0 = cr.ivr\n" : "=r" (v));
150
    asm volatile ("mov %0 = cr.ivr\n" : "=r" (v));
122
   
151
   
123
    return v;
152
    return v;
124
}
153
}
125
 
154
 
126
/** Write ITC (Interval Timer Counter) register.
155
/** Write ITC (Interval Timer Counter) register.
127
 *
156
 *
128
 * @param v New counter value.
157
 * @param v New counter value.
129
 */
158
 */
130
static inline void itc_write(uint64_t v)
159
static inline void itc_write(uint64_t v)
131
{
160
{
132
    asm volatile ("mov ar.itc = %0\n" : : "r" (v));
161
    asm volatile ("mov ar.itc = %0\n" : : "r" (v));
133
}
162
}
134
 
163
 
135
/** Read ITC (Interval Timer Counter) register.
164
/** Read ITC (Interval Timer Counter) register.
136
 *
165
 *
137
 * @return Current counter value.
166
 * @return Current counter value.
138
 */
167
 */
139
static inline uint64_t itc_read(void)
168
static inline uint64_t itc_read(void)
140
{
169
{
141
    uint64_t v;
170
    uint64_t v;
142
   
171
   
143
    asm volatile ("mov %0 = ar.itc\n" : "=r" (v));
172
    asm volatile ("mov %0 = ar.itc\n" : "=r" (v));
144
   
173
   
145
    return v;
174
    return v;
146
}
175
}
147
 
176
 
148
/** Write ITM (Interval Timer Match) register.
177
/** Write ITM (Interval Timer Match) register.
149
 *
178
 *
150
 * @param v New match value.
179
 * @param v New match value.
151
 */
180
 */
152
static inline void itm_write(uint64_t v)
181
static inline void itm_write(uint64_t v)
153
{
182
{
154
    asm volatile ("mov cr.itm = %0\n" : : "r" (v));
183
    asm volatile ("mov cr.itm = %0\n" : : "r" (v));
155
}
184
}
156
 
185
 
157
/** Read ITM (Interval Timer Match) register.
186
/** Read ITM (Interval Timer Match) register.
158
 *
187
 *
159
 * @return Match value.
188
 * @return Match value.
160
 */
189
 */
161
static inline uint64_t itm_read(void)
190
static inline uint64_t itm_read(void)
162
{
191
{
163
    uint64_t v;
192
    uint64_t v;
164
   
193
   
165
    asm volatile ("mov %0 = cr.itm\n" : "=r" (v));
194
    asm volatile ("mov %0 = cr.itm\n" : "=r" (v));
166
   
195
   
167
    return v;
196
    return v;
168
}
197
}
169
 
198
 
170
/** Read ITV (Interval Timer Vector) register.
199
/** Read ITV (Interval Timer Vector) register.
171
 *
200
 *
172
 * @return Current vector and mask bit.
201
 * @return Current vector and mask bit.
173
 */
202
 */
174
static inline uint64_t itv_read(void)
203
static inline uint64_t itv_read(void)
175
{
204
{
176
    uint64_t v;
205
    uint64_t v;
177
   
206
   
178
    asm volatile ("mov %0 = cr.itv\n" : "=r" (v));
207
    asm volatile ("mov %0 = cr.itv\n" : "=r" (v));
179
   
208
   
180
    return v;
209
    return v;
181
}
210
}
182
 
211
 
183
/** Write ITV (Interval Timer Vector) register.
212
/** Write ITV (Interval Timer Vector) register.
184
 *
213
 *
185
 * @param v New vector and mask bit.
214
 * @param v New vector and mask bit.
186
 */
215
 */
187
static inline void itv_write(uint64_t v)
216
static inline void itv_write(uint64_t v)
188
{
217
{
189
    asm volatile ("mov cr.itv = %0\n" : : "r" (v));
218
    asm volatile ("mov cr.itv = %0\n" : : "r" (v));
190
}
219
}
191
 
220
 
192
/** Write EOI (End Of Interrupt) register.
221
/** Write EOI (End Of Interrupt) register.
193
 *
222
 *
194
 * @param v This value is ignored.
223
 * @param v This value is ignored.
195
 */
224
 */
196
static inline void eoi_write(uint64_t v)
225
static inline void eoi_write(uint64_t v)
197
{
226
{
198
    asm volatile ("mov cr.eoi = %0\n" : : "r" (v));
227
    asm volatile ("mov cr.eoi = %0\n" : : "r" (v));
199
}
228
}
200
 
229
 
201
/** Read TPR (Task Priority Register).
230
/** Read TPR (Task Priority Register).
202
 *
231
 *
203
 * @return Current value of TPR.
232
 * @return Current value of TPR.
204
 */
233
 */
205
static inline uint64_t tpr_read(void)
234
static inline uint64_t tpr_read(void)
206
{
235
{
207
    uint64_t v;
236
    uint64_t v;
208
 
237
 
209
    asm volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
238
    asm volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
210
   
239
   
211
    return v;
240
    return v;
212
}
241
}
213
 
242
 
214
/** Write TPR (Task Priority Register).
243
/** Write TPR (Task Priority Register).
215
 *
244
 *
216
 * @param v New value of TPR.
245
 * @param v New value of TPR.
217
 */
246
 */
218
static inline void tpr_write(uint64_t v)
247
static inline void tpr_write(uint64_t v)
219
{
248
{
220
    asm volatile ("mov cr.tpr = %0\n" : : "r" (v));
249
    asm volatile ("mov cr.tpr = %0\n" : : "r" (v));
221
}
250
}
222
 
251
 
223
/** Disable interrupts.
252
/** Disable interrupts.
224
 *
253
 *
225
 * Disable interrupts and return previous
254
 * Disable interrupts and return previous
226
 * value of PSR.
255
 * value of PSR.
227
 *
256
 *
228
 * @return Old interrupt priority level.
257
 * @return Old interrupt priority level.
229
 */
258
 */
230
static ipl_t interrupts_disable(void)
259
static ipl_t interrupts_disable(void)
231
{
260
{
232
    uint64_t v;
261
    uint64_t v;
233
   
262
   
234
    asm volatile (
263
    asm volatile (
235
        "mov %0 = psr\n"
264
        "mov %0 = psr\n"
236
        "rsm %1\n"
265
        "rsm %1\n"
237
        : "=r" (v)
266
        : "=r" (v)
238
        : "i" (PSR_I_MASK)
267
        : "i" (PSR_I_MASK)
239
    );
268
    );
240
   
269
   
241
    return (ipl_t) v;
270
    return (ipl_t) v;
242
}
271
}
243
 
272
 
244
/** Enable interrupts.
273
/** Enable interrupts.
245
 *
274
 *
246
 * Enable interrupts and return previous
275
 * Enable interrupts and return previous
247
 * value of PSR.
276
 * value of PSR.
248
 *
277
 *
249
 * @return Old interrupt priority level.
278
 * @return Old interrupt priority level.
250
 */
279
 */
251
static ipl_t interrupts_enable(void)
280
static ipl_t interrupts_enable(void)
252
{
281
{
253
    uint64_t v;
282
    uint64_t v;
254
   
283
   
255
    asm volatile (
284
    asm volatile (
256
        "mov %0 = psr\n"
285
        "mov %0 = psr\n"
257
        "ssm %1\n"
286
        "ssm %1\n"
258
        ";;\n"
287
        ";;\n"
259
        "srlz.d\n"
288
        "srlz.d\n"
260
        : "=r" (v)
289
        : "=r" (v)
261
        : "i" (PSR_I_MASK)
290
        : "i" (PSR_I_MASK)
262
    );
291
    );
263
   
292
   
264
    return (ipl_t) v;
293
    return (ipl_t) v;
265
}
294
}
266
 
295
 
267
/** Restore interrupt priority level.
296
/** Restore interrupt priority level.
268
 *
297
 *
269
 * Restore PSR.
298
 * Restore PSR.
270
 *
299
 *
271
 * @param ipl Saved interrupt priority level.
300
 * @param ipl Saved interrupt priority level.
272
 */
301
 */
273
static inline void interrupts_restore(ipl_t ipl)
302
static inline void interrupts_restore(ipl_t ipl)
274
{
303
{
275
    if (ipl & PSR_I_MASK)
304
    if (ipl & PSR_I_MASK)
276
        (void) interrupts_enable();
305
        (void) interrupts_enable();
277
    else
306
    else
278
        (void) interrupts_disable();
307
        (void) interrupts_disable();
279
}
308
}
280
 
309
 
281
/** Return interrupt priority level.
310
/** Return interrupt priority level.
282
 *
311
 *
283
 * @return PSR.
312
 * @return PSR.
284
 */
313
 */
285
static inline ipl_t interrupts_read(void)
314
static inline ipl_t interrupts_read(void)
286
{
315
{
287
    return (ipl_t) psr_read();
316
    return (ipl_t) psr_read();
288
}
317
}
289
 
318
 
290
/** Disable protection key checking. */
319
/** Disable protection key checking. */
291
static inline void pk_disable(void)
320
static inline void pk_disable(void)
292
{
321
{
293
    asm volatile ("rsm %0\n" : : "i" (PSR_PK_MASK));
322
    asm volatile ("rsm %0\n" : : "i" (PSR_PK_MASK));
294
}
323
}
295
 
324
 
296
extern void cpu_halt(void);
325
extern void cpu_halt(void);
297
extern void cpu_sleep(void);
326
extern void cpu_sleep(void);
298
extern void asm_delay_loop(uint32_t t);
327
extern void asm_delay_loop(uint32_t t);
299
 
328
 
300
extern void switch_to_userspace(uintptr_t entry, uintptr_t sp, uintptr_t bsp, uintptr_t uspace_uarg, uint64_t ipsr, uint64_t rsc);
329
extern void switch_to_userspace(uintptr_t entry, uintptr_t sp, uintptr_t bsp, uintptr_t uspace_uarg, uint64_t ipsr, uint64_t rsc);
301
 
330
 
302
#endif
331
#endif
303
 
332
 
304
/** @}
333
/** @}
305
 */
334
 */
306
 
335