Subversion Repositories HelenOS

Rev

Rev 3577 | Rev 3779 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 3577 Rev 3774
1
/*
1
/*
2
 * Copyright (c) 2005 Jakub Jermar
2
 * Copyright (c) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup ia64   
29
/** @addtogroup ia64   
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef KERN_ia64_ASM_H_
35
#ifndef KERN_ia64_ASM_H_
36
#define KERN_ia64_ASM_H_
36
#define KERN_ia64_ASM_H_
37
 
37
 
38
#include <config.h>
38
#include <config.h>
39
#include <arch/types.h>
39
#include <arch/types.h>
40
#include <arch/register.h>
40
#include <arch/register.h>
41
 
41
 
42
typedef uint64_t ioport_t;
42
typedef uint64_t ioport_t;
43
 
43
 
44
#define IA64_IOSPACE_ADDRESS 0xE001000000000000ULL
44
#define IA64_IOSPACE_ADDRESS 0xE001000000000000ULL
45
 
45
 
46
static inline void  outb(ioport_t port,uint8_t v)
46
static inline void  outb(ioport_t port, uint8_t v)
47
{
47
{
-
 
48
    *((uint8_t *)(IA64_IOSPACE_ADDRESS +
48
    *((uint8_t *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 )))) = v;
49
        ((port & 0xfff) | ((port >> 2) << 12)))) = v;
49
 
50
 
50
    asm volatile ("mf\n" ::: "memory");
51
    asm volatile ("mf\n" ::: "memory");
51
}
52
}
52
 
53
 
53
static inline void  outw(ioport_t port,uint16_t v)
54
static inline void  outw(ioport_t port, uint16_t v)
54
{
55
{
-
 
56
    *((uint16_t *)(IA64_IOSPACE_ADDRESS +
55
    *((uint16_t *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 )))) = v;
57
        ((port & 0xfff) | ((port >> 2) << 12)))) = v;
56
 
58
 
57
    asm volatile ("mf\n" ::: "memory");
59
    asm volatile ("mf\n" ::: "memory");
58
}
60
}
59
 
61
 
60
static inline void  outl(ioport_t port,uint32_t v)
62
static inline void  outl(ioport_t port, uint32_t v)
61
{
63
{
-
 
64
    *((uint32_t *)(IA64_IOSPACE_ADDRESS +
62
    *((uint32_t *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 )))) = v;
65
        ((port & 0xfff) | ((port >> 2) << 12)))) = v;
63
 
66
 
64
    asm volatile ("mf\n" ::: "memory");
67
    asm volatile ("mf\n" ::: "memory");
65
}
68
}
66
 
69
 
67
 
-
 
68
 
-
 
69
static inline uint8_t inb(ioport_t port)
70
static inline uint8_t inb(ioport_t port)
70
{
71
{
71
    asm volatile ("mf\n" ::: "memory");
72
    asm volatile ("mf\n" ::: "memory");
72
 
73
 
-
 
74
    return *((uint8_t *)(IA64_IOSPACE_ADDRESS +
73
    return *((uint8_t *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 ))));
75
        ((port & 0xfff) | ((port >> 2) << 12))));
74
}
76
}
75
 
77
 
76
static inline uint16_t inw(ioport_t port)
78
static inline uint16_t inw(ioport_t port)
77
{
79
{
78
    asm volatile ("mf\n" ::: "memory");
80
    asm volatile ("mf\n" ::: "memory");
79
 
81
 
-
 
82
    return *((uint16_t *)(IA64_IOSPACE_ADDRESS +
80
    return *((uint16_t *)(IA64_IOSPACE_ADDRESS + ( (port & 0xffE) | ( (port >> 2) << 12 ))));
83
        ((port & 0xffE) | ((port >> 2) << 12))));
81
}
84
}
82
 
85
 
83
static inline uint32_t inl(ioport_t port)
86
static inline uint32_t inl(ioport_t port)
84
{
87
{
85
    asm volatile ("mf\n" ::: "memory");
88
    asm volatile ("mf\n" ::: "memory");
86
 
89
 
-
 
90
    return *((uint32_t *)(IA64_IOSPACE_ADDRESS +
87
    return *((uint32_t *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 ))));
91
        ((port & 0xfff) | ((port >> 2) << 12))));
88
}
92
}
89
 
93
 
90
 
-
 
91
 
-
 
92
/** Return base address of current stack
94
/** Return base address of current stack
93
 *
95
 *
94
 * Return the base address of the current stack.
96
 * Return the base address of the current stack.
95
 * The stack is assumed to be STACK_SIZE long.
97
 * The stack is assumed to be STACK_SIZE long.
96
 * The stack must start on page boundary.
98
 * The stack must start on page boundary.
97
 */
99
 */
98
static inline uintptr_t get_stack_base(void)
100
static inline uintptr_t get_stack_base(void)
99
{
101
{
100
    uint64_t v;
102
    uint64_t v;
101
 
103
 
102
    //I'm not sure why but this code bad inlines in scheduler, 
104
    //I'm not sure why but this code bad inlines in scheduler, 
103
    //so THE shifts about 16B and causes kernel panic
105
    //so THE shifts about 16B and causes kernel panic
104
    //asm volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
106
    //asm volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
105
    //return v;
107
    //return v;
106
   
108
   
107
    //this code have the same meaning but inlines well
109
    //this code have the same meaning but inlines well
108
    asm volatile ("mov %0 = r12" : "=r" (v)  );
110
    asm volatile ("mov %0 = r12" : "=r" (v)  );
109
    return v & (~(STACK_SIZE-1));
111
    return v & (~(STACK_SIZE-1));
110
}
112
}
111
 
113
 
112
/** Return Processor State Register.
114
/** Return Processor State Register.
113
 *
115
 *
114
 * @return PSR.
116
 * @return PSR.
115
 */
117
 */
116
static inline uint64_t psr_read(void)
118
static inline uint64_t psr_read(void)
117
{
119
{
118
    uint64_t v;
120
    uint64_t v;
119
   
121
   
120
    asm volatile ("mov %0 = psr\n" : "=r" (v));
122
    asm volatile ("mov %0 = psr\n" : "=r" (v));
121
   
123
   
122
    return v;
124
    return v;
123
}
125
}
124
 
126
 
125
/** Read IVA (Interruption Vector Address).
127
/** Read IVA (Interruption Vector Address).
126
 *
128
 *
127
 * @return Return location of interruption vector table.
129
 * @return Return location of interruption vector table.
128
 */
130
 */
129
static inline uint64_t iva_read(void)
131
static inline uint64_t iva_read(void)
130
{
132
{
131
    uint64_t v;
133
    uint64_t v;
132
   
134
   
133
    asm volatile ("mov %0 = cr.iva\n" : "=r" (v));
135
    asm volatile ("mov %0 = cr.iva\n" : "=r" (v));
134
   
136
   
135
    return v;
137
    return v;
136
}
138
}
137
 
139
 
138
/** Write IVA (Interruption Vector Address) register.
140
/** Write IVA (Interruption Vector Address) register.
139
 *
141
 *
140
 * @param v New location of interruption vector table.
142
 * @param v New location of interruption vector table.
141
 */
143
 */
142
static inline void iva_write(uint64_t v)
144
static inline void iva_write(uint64_t v)
143
{
145
{
144
    asm volatile ("mov cr.iva = %0\n" : : "r" (v));
146
    asm volatile ("mov cr.iva = %0\n" : : "r" (v));
145
}
147
}
146
 
148
 
147
 
149
 
148
/** Read IVR (External Interrupt Vector Register).
150
/** Read IVR (External Interrupt Vector Register).
149
 *
151
 *
150
 * @return Highest priority, pending, unmasked external interrupt vector.
152
 * @return Highest priority, pending, unmasked external interrupt vector.
151
 */
153
 */
152
static inline uint64_t ivr_read(void)
154
static inline uint64_t ivr_read(void)
153
{
155
{
154
    uint64_t v;
156
    uint64_t v;
155
   
157
   
156
    asm volatile ("mov %0 = cr.ivr\n" : "=r" (v));
158
    asm volatile ("mov %0 = cr.ivr\n" : "=r" (v));
157
   
159
   
158
    return v;
160
    return v;
159
}
161
}
160
 
162
 
161
static inline uint64_t cr64_read(void)
163
static inline uint64_t cr64_read(void)
162
{
164
{
163
    uint64_t v;
165
    uint64_t v;
164
   
166
   
165
    asm volatile ("mov %0 = cr64\n" : "=r" (v));
167
    asm volatile ("mov %0 = cr64\n" : "=r" (v));
166
   
168
   
167
    return v;
169
    return v;
168
}
170
}
169
 
171
 
170
 
172
 
171
/** Write ITC (Interval Timer Counter) register.
173
/** Write ITC (Interval Timer Counter) register.
172
 *
174
 *
173
 * @param v New counter value.
175
 * @param v New counter value.
174
 */
176
 */
175
static inline void itc_write(uint64_t v)
177
static inline void itc_write(uint64_t v)
176
{
178
{
177
    asm volatile ("mov ar.itc = %0\n" : : "r" (v));
179
    asm volatile ("mov ar.itc = %0\n" : : "r" (v));
178
}
180
}
179
 
181
 
180
/** Read ITC (Interval Timer Counter) register.
182
/** Read ITC (Interval Timer Counter) register.
181
 *
183
 *
182
 * @return Current counter value.
184
 * @return Current counter value.
183
 */
185
 */
184
static inline uint64_t itc_read(void)
186
static inline uint64_t itc_read(void)
185
{
187
{
186
    uint64_t v;
188
    uint64_t v;
187
   
189
   
188
    asm volatile ("mov %0 = ar.itc\n" : "=r" (v));
190
    asm volatile ("mov %0 = ar.itc\n" : "=r" (v));
189
   
191
   
190
    return v;
192
    return v;
191
}
193
}
192
 
194
 
193
/** Write ITM (Interval Timer Match) register.
195
/** Write ITM (Interval Timer Match) register.
194
 *
196
 *
195
 * @param v New match value.
197
 * @param v New match value.
196
 */
198
 */
197
static inline void itm_write(uint64_t v)
199
static inline void itm_write(uint64_t v)
198
{
200
{
199
    asm volatile ("mov cr.itm = %0\n" : : "r" (v));
201
    asm volatile ("mov cr.itm = %0\n" : : "r" (v));
200
}
202
}
201
 
203
 
202
/** Read ITM (Interval Timer Match) register.
204
/** Read ITM (Interval Timer Match) register.
203
 *
205
 *
204
 * @return Match value.
206
 * @return Match value.
205
 */
207
 */
206
static inline uint64_t itm_read(void)
208
static inline uint64_t itm_read(void)
207
{
209
{
208
    uint64_t v;
210
    uint64_t v;
209
   
211
   
210
    asm volatile ("mov %0 = cr.itm\n" : "=r" (v));
212
    asm volatile ("mov %0 = cr.itm\n" : "=r" (v));
211
   
213
   
212
    return v;
214
    return v;
213
}
215
}
214
 
216
 
215
/** Read ITV (Interval Timer Vector) register.
217
/** Read ITV (Interval Timer Vector) register.
216
 *
218
 *
217
 * @return Current vector and mask bit.
219
 * @return Current vector and mask bit.
218
 */
220
 */
219
static inline uint64_t itv_read(void)
221
static inline uint64_t itv_read(void)
220
{
222
{
221
    uint64_t v;
223
    uint64_t v;
222
   
224
   
223
    asm volatile ("mov %0 = cr.itv\n" : "=r" (v));
225
    asm volatile ("mov %0 = cr.itv\n" : "=r" (v));
224
   
226
   
225
    return v;
227
    return v;
226
}
228
}
227
 
229
 
228
/** Write ITV (Interval Timer Vector) register.
230
/** Write ITV (Interval Timer Vector) register.
229
 *
231
 *
230
 * @param v New vector and mask bit.
232
 * @param v New vector and mask bit.
231
 */
233
 */
232
static inline void itv_write(uint64_t v)
234
static inline void itv_write(uint64_t v)
233
{
235
{
234
    asm volatile ("mov cr.itv = %0\n" : : "r" (v));
236
    asm volatile ("mov cr.itv = %0\n" : : "r" (v));
235
}
237
}
236
 
238
 
237
/** Write EOI (End Of Interrupt) register.
239
/** Write EOI (End Of Interrupt) register.
238
 *
240
 *
239
 * @param v This value is ignored.
241
 * @param v This value is ignored.
240
 */
242
 */
241
static inline void eoi_write(uint64_t v)
243
static inline void eoi_write(uint64_t v)
242
{
244
{
243
    asm volatile ("mov cr.eoi = %0\n" : : "r" (v));
245
    asm volatile ("mov cr.eoi = %0\n" : : "r" (v));
244
}
246
}
245
 
247
 
246
/** Read TPR (Task Priority Register).
248
/** Read TPR (Task Priority Register).
247
 *
249
 *
248
 * @return Current value of TPR.
250
 * @return Current value of TPR.
249
 */
251
 */
250
static inline uint64_t tpr_read(void)
252
static inline uint64_t tpr_read(void)
251
{
253
{
252
    uint64_t v;
254
    uint64_t v;
253
 
255
 
254
    asm volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
256
    asm volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
255
   
257
   
256
    return v;
258
    return v;
257
}
259
}
258
 
260
 
259
/** Write TPR (Task Priority Register).
261
/** Write TPR (Task Priority Register).
260
 *
262
 *
261
 * @param v New value of TPR.
263
 * @param v New value of TPR.
262
 */
264
 */
263
static inline void tpr_write(uint64_t v)
265
static inline void tpr_write(uint64_t v)
264
{
266
{
265
    asm volatile ("mov cr.tpr = %0\n" : : "r" (v));
267
    asm volatile ("mov cr.tpr = %0\n" : : "r" (v));
266
}
268
}
267
 
269
 
268
/** Disable interrupts.
270
/** Disable interrupts.
269
 *
271
 *
270
 * Disable interrupts and return previous
272
 * Disable interrupts and return previous
271
 * value of PSR.
273
 * value of PSR.
272
 *
274
 *
273
 * @return Old interrupt priority level.
275
 * @return Old interrupt priority level.
274
 */
276
 */
275
static ipl_t interrupts_disable(void)
277
static ipl_t interrupts_disable(void)
276
{
278
{
277
    uint64_t v;
279
    uint64_t v;
278
   
280
   
279
    asm volatile (
281
    asm volatile (
280
        "mov %0 = psr\n"
282
        "mov %0 = psr\n"
281
        "rsm %1\n"
283
        "rsm %1\n"
282
        : "=r" (v)
284
        : "=r" (v)
283
        : "i" (PSR_I_MASK)
285
        : "i" (PSR_I_MASK)
284
    );
286
    );
285
   
287
   
286
    return (ipl_t) v;
288
    return (ipl_t) v;
287
}
289
}
288
 
290
 
289
/** Enable interrupts.
291
/** Enable interrupts.
290
 *
292
 *
291
 * Enable interrupts and return previous
293
 * Enable interrupts and return previous
292
 * value of PSR.
294
 * value of PSR.
293
 *
295
 *
294
 * @return Old interrupt priority level.
296
 * @return Old interrupt priority level.
295
 */
297
 */
296
static ipl_t interrupts_enable(void)
298
static ipl_t interrupts_enable(void)
297
{
299
{
298
    uint64_t v;
300
    uint64_t v;
299
   
301
   
300
    asm volatile (
302
    asm volatile (
301
        "mov %0 = psr\n"
303
        "mov %0 = psr\n"
302
        "ssm %1\n"
304
        "ssm %1\n"
303
        ";;\n"
305
        ";;\n"
304
        "srlz.d\n"
306
        "srlz.d\n"
305
        : "=r" (v)
307
        : "=r" (v)
306
        : "i" (PSR_I_MASK)
308
        : "i" (PSR_I_MASK)
307
    );
309
    );
308
   
310
   
309
    return (ipl_t) v;
311
    return (ipl_t) v;
310
}
312
}
311
 
313
 
312
/** Restore interrupt priority level.
314
/** Restore interrupt priority level.
313
 *
315
 *
314
 * Restore PSR.
316
 * Restore PSR.
315
 *
317
 *
316
 * @param ipl Saved interrupt priority level.
318
 * @param ipl Saved interrupt priority level.
317
 */
319
 */
318
static inline void interrupts_restore(ipl_t ipl)
320
static inline void interrupts_restore(ipl_t ipl)
319
{
321
{
320
    if (ipl & PSR_I_MASK)
322
    if (ipl & PSR_I_MASK)
321
        (void) interrupts_enable();
323
        (void) interrupts_enable();
322
    else
324
    else
323
        (void) interrupts_disable();
325
        (void) interrupts_disable();
324
}
326
}
325
 
327
 
326
/** Return interrupt priority level.
328
/** Return interrupt priority level.
327
 *
329
 *
328
 * @return PSR.
330
 * @return PSR.
329
 */
331
 */
330
static inline ipl_t interrupts_read(void)
332
static inline ipl_t interrupts_read(void)
331
{
333
{
332
    return (ipl_t) psr_read();
334
    return (ipl_t) psr_read();
333
}
335
}
334
 
336
 
335
/** Disable protection key checking. */
337
/** Disable protection key checking. */
336
static inline void pk_disable(void)
338
static inline void pk_disable(void)
337
{
339
{
338
    asm volatile ("rsm %0\n" : : "i" (PSR_PK_MASK));
340
    asm volatile ("rsm %0\n" : : "i" (PSR_PK_MASK));
339
}
341
}
340
 
342
 
341
extern void cpu_halt(void);
343
extern void cpu_halt(void);
342
extern void cpu_sleep(void);
344
extern void cpu_sleep(void);
343
extern void asm_delay_loop(uint32_t t);
345
extern void asm_delay_loop(uint32_t t);
344
 
346
 
345
extern void switch_to_userspace(uintptr_t entry, uintptr_t sp, uintptr_t bsp, uintptr_t uspace_uarg, uint64_t ipsr, uint64_t rsc);
347
extern void switch_to_userspace(uintptr_t, uintptr_t, uintptr_t, uintptr_t,
-
 
348
    uint64_t, uint64_t);
346
 
349
 
347
#endif
350
#endif
348
 
351
 
349
/** @}
352
/** @}
350
 */
353
 */
351
 
354