Subversion Repositories HelenOS

Rev

Rev 4028 | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 4028 Rev 4272
1
/*
1
/*
2
 * Copyright (c) 2005 Jakub Jermar
2
 * Copyright (c) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup ia64   
29
/** @addtogroup ia64   
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef KERN_ia64_ASM_H_
35
#ifndef KERN_ia64_ASM_H_
36
#define KERN_ia64_ASM_H_
36
#define KERN_ia64_ASM_H_
37
 
37
 
38
#include <config.h>
38
#include <config.h>
39
#include <typedefs.h>
39
#include <typedefs.h>
40
#include <arch/types.h>
40
#include <arch/types.h>
41
#include <arch/register.h>
41
#include <arch/register.h>
42
 
42
 
43
#define IA64_IOSPACE_ADDRESS 0xE001000000000000ULL
43
#define IA64_IOSPACE_ADDRESS 0xE001000000000000ULL
44
 
44
 
45
static inline void pio_write_8(ioport8_t *port, uint8_t v)
45
static inline void pio_write_8(ioport8_t *port, uint8_t v)
46
{
46
{
47
    uintptr_t prt = (uintptr_t) port;
47
    uintptr_t prt = (uintptr_t) port;
48
 
48
 
49
    *((uint8_t *)(IA64_IOSPACE_ADDRESS +
49
    *((ioport8_t *)(IA64_IOSPACE_ADDRESS +
50
        ((prt & 0xfff) | ((prt >> 2) << 12)))) = v;
50
        ((prt & 0xfff) | ((prt >> 2) << 12)))) = v;
51
 
51
 
52
    asm volatile ("mf\n" ::: "memory");
52
    asm volatile ("mf\n" ::: "memory");
53
}
53
}
54
 
54
 
55
static inline void pio_write_16(ioport16_t *port, uint16_t v)
55
static inline void pio_write_16(ioport16_t *port, uint16_t v)
56
{
56
{
57
    uintptr_t prt = (uintptr_t) port;
57
    uintptr_t prt = (uintptr_t) port;
58
 
58
 
59
    *((uint16_t *)(IA64_IOSPACE_ADDRESS +
59
    *((ioport16_t *)(IA64_IOSPACE_ADDRESS +
60
        ((prt & 0xfff) | ((prt >> 2) << 12)))) = v;
60
        ((prt & 0xfff) | ((prt >> 2) << 12)))) = v;
61
 
61
 
62
    asm volatile ("mf\n" ::: "memory");
62
    asm volatile ("mf\n" ::: "memory");
63
}
63
}
64
 
64
 
65
static inline void pio_write_32(ioport32_t *port, uint32_t v)
65
static inline void pio_write_32(ioport32_t *port, uint32_t v)
66
{
66
{
67
    uintptr_t prt = (uintptr_t) port;
67
    uintptr_t prt = (uintptr_t) port;
68
 
68
 
69
    *((uint32_t *)(IA64_IOSPACE_ADDRESS +
69
    *((ioport32_t *)(IA64_IOSPACE_ADDRESS +
70
        ((prt & 0xfff) | ((prt >> 2) << 12)))) = v;
70
        ((prt & 0xfff) | ((prt >> 2) << 12)))) = v;
71
 
71
 
72
    asm volatile ("mf\n" ::: "memory");
72
    asm volatile ("mf\n" ::: "memory");
73
}
73
}
74
 
74
 
75
static inline uint8_t pio_read_8(ioport8_t *port)
75
static inline uint8_t pio_read_8(ioport8_t *port)
76
{
76
{
77
    uintptr_t prt = (uintptr_t) port;
77
    uintptr_t prt = (uintptr_t) port;
78
 
78
 
79
    asm volatile ("mf\n" ::: "memory");
79
    asm volatile ("mf\n" ::: "memory");
80
 
80
 
81
    return *((uint8_t *)(IA64_IOSPACE_ADDRESS +
81
    return *((ioport8_t *)(IA64_IOSPACE_ADDRESS +
82
        ((prt & 0xfff) | ((prt >> 2) << 12))));
82
        ((prt & 0xfff) | ((prt >> 2) << 12))));
83
}
83
}
84
 
84
 
85
static inline uint16_t pio_read_16(ioport16_t *port)
85
static inline uint16_t pio_read_16(ioport16_t *port)
86
{
86
{
87
    uintptr_t prt = (uintptr_t) port;
87
    uintptr_t prt = (uintptr_t) port;
88
 
88
 
89
    asm volatile ("mf\n" ::: "memory");
89
    asm volatile ("mf\n" ::: "memory");
90
 
90
 
91
    return *((uint16_t *)(IA64_IOSPACE_ADDRESS +
91
    return *((ioport16_t *)(IA64_IOSPACE_ADDRESS +
92
        ((prt & 0xfff) | ((prt >> 2) << 12))));
92
        ((prt & 0xfff) | ((prt >> 2) << 12))));
93
}
93
}
94
 
94
 
95
static inline uint32_t pio_read_32(ioport32_t *port)
95
static inline uint32_t pio_read_32(ioport32_t *port)
96
{
96
{
97
    uintptr_t prt = (uintptr_t) port;
97
    uintptr_t prt = (uintptr_t) port;
98
 
98
 
99
    asm volatile ("mf\n" ::: "memory");
99
    asm volatile ("mf\n" ::: "memory");
100
 
100
 
101
    return *((uint32_t *)(IA64_IOSPACE_ADDRESS +
101
    return *((ioport32_t *)(IA64_IOSPACE_ADDRESS +
102
        ((prt & 0xfff) | ((prt >> 2) << 12))));
102
        ((prt & 0xfff) | ((prt >> 2) << 12))));
103
}
103
}
104
 
104
 
105
/** Return base address of current stack
105
/** Return base address of current stack
106
 *
106
 *
107
 * Return the base address of the current stack.
107
 * Return the base address of the current stack.
108
 * The stack is assumed to be STACK_SIZE long.
108
 * The stack is assumed to be STACK_SIZE long.
109
 * The stack must start on page boundary.
109
 * The stack must start on page boundary.
110
 */
110
 */
111
static inline uintptr_t get_stack_base(void)
111
static inline uintptr_t get_stack_base(void)
112
{
112
{
113
    uint64_t v;
113
    uint64_t v;
114
 
114
 
115
    //I'm not sure why but this code bad inlines in scheduler, 
115
    //I'm not sure why but this code bad inlines in scheduler, 
116
    //so THE shifts about 16B and causes kernel panic
116
    //so THE shifts about 16B and causes kernel panic
117
    //asm volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
117
    //asm volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
118
    //return v;
118
    //return v;
119
   
119
   
120
    //this code have the same meaning but inlines well
120
    //this code have the same meaning but inlines well
121
    asm volatile ("mov %0 = r12" : "=r" (v)  );
121
    asm volatile ("mov %0 = r12" : "=r" (v)  );
122
    return v & (~(STACK_SIZE-1));
122
    return v & (~(STACK_SIZE-1));
123
}
123
}
124
 
124
 
125
/** Return Processor State Register.
125
/** Return Processor State Register.
126
 *
126
 *
127
 * @return PSR.
127
 * @return PSR.
128
 */
128
 */
129
static inline uint64_t psr_read(void)
129
static inline uint64_t psr_read(void)
130
{
130
{
131
    uint64_t v;
131
    uint64_t v;
132
   
132
   
133
    asm volatile ("mov %0 = psr\n" : "=r" (v));
133
    asm volatile ("mov %0 = psr\n" : "=r" (v));
134
   
134
   
135
    return v;
135
    return v;
136
}
136
}
137
 
137
 
138
/** Read IVA (Interruption Vector Address).
138
/** Read IVA (Interruption Vector Address).
139
 *
139
 *
140
 * @return Return location of interruption vector table.
140
 * @return Return location of interruption vector table.
141
 */
141
 */
142
static inline uint64_t iva_read(void)
142
static inline uint64_t iva_read(void)
143
{
143
{
144
    uint64_t v;
144
    uint64_t v;
145
   
145
   
146
    asm volatile ("mov %0 = cr.iva\n" : "=r" (v));
146
    asm volatile ("mov %0 = cr.iva\n" : "=r" (v));
147
   
147
   
148
    return v;
148
    return v;
149
}
149
}
150
 
150
 
151
/** Write IVA (Interruption Vector Address) register.
151
/** Write IVA (Interruption Vector Address) register.
152
 *
152
 *
153
 * @param v New location of interruption vector table.
153
 * @param v New location of interruption vector table.
154
 */
154
 */
155
static inline void iva_write(uint64_t v)
155
static inline void iva_write(uint64_t v)
156
{
156
{
157
    asm volatile ("mov cr.iva = %0\n" : : "r" (v));
157
    asm volatile ("mov cr.iva = %0\n" : : "r" (v));
158
}
158
}
159
 
159
 
160
 
160
 
161
/** Read IVR (External Interrupt Vector Register).
161
/** Read IVR (External Interrupt Vector Register).
162
 *
162
 *
163
 * @return Highest priority, pending, unmasked external interrupt vector.
163
 * @return Highest priority, pending, unmasked external interrupt vector.
164
 */
164
 */
165
static inline uint64_t ivr_read(void)
165
static inline uint64_t ivr_read(void)
166
{
166
{
167
    uint64_t v;
167
    uint64_t v;
168
   
168
   
169
    asm volatile ("mov %0 = cr.ivr\n" : "=r" (v));
169
    asm volatile ("mov %0 = cr.ivr\n" : "=r" (v));
170
   
170
   
171
    return v;
171
    return v;
172
}
172
}
173
 
173
 
174
static inline uint64_t cr64_read(void)
174
static inline uint64_t cr64_read(void)
175
{
175
{
176
    uint64_t v;
176
    uint64_t v;
177
   
177
   
178
    asm volatile ("mov %0 = cr64\n" : "=r" (v));
178
    asm volatile ("mov %0 = cr64\n" : "=r" (v));
179
   
179
   
180
    return v;
180
    return v;
181
}
181
}
182
 
182
 
183
 
183
 
184
/** Write ITC (Interval Timer Counter) register.
184
/** Write ITC (Interval Timer Counter) register.
185
 *
185
 *
186
 * @param v New counter value.
186
 * @param v New counter value.
187
 */
187
 */
188
static inline void itc_write(uint64_t v)
188
static inline void itc_write(uint64_t v)
189
{
189
{
190
    asm volatile ("mov ar.itc = %0\n" : : "r" (v));
190
    asm volatile ("mov ar.itc = %0\n" : : "r" (v));
191
}
191
}
192
 
192
 
193
/** Read ITC (Interval Timer Counter) register.
193
/** Read ITC (Interval Timer Counter) register.
194
 *
194
 *
195
 * @return Current counter value.
195
 * @return Current counter value.
196
 */
196
 */
197
static inline uint64_t itc_read(void)
197
static inline uint64_t itc_read(void)
198
{
198
{
199
    uint64_t v;
199
    uint64_t v;
200
   
200
   
201
    asm volatile ("mov %0 = ar.itc\n" : "=r" (v));
201
    asm volatile ("mov %0 = ar.itc\n" : "=r" (v));
202
   
202
   
203
    return v;
203
    return v;
204
}
204
}
205
 
205
 
206
/** Write ITM (Interval Timer Match) register.
206
/** Write ITM (Interval Timer Match) register.
207
 *
207
 *
208
 * @param v New match value.
208
 * @param v New match value.
209
 */
209
 */
210
static inline void itm_write(uint64_t v)
210
static inline void itm_write(uint64_t v)
211
{
211
{
212
    asm volatile ("mov cr.itm = %0\n" : : "r" (v));
212
    asm volatile ("mov cr.itm = %0\n" : : "r" (v));
213
}
213
}
214
 
214
 
215
/** Read ITM (Interval Timer Match) register.
215
/** Read ITM (Interval Timer Match) register.
216
 *
216
 *
217
 * @return Match value.
217
 * @return Match value.
218
 */
218
 */
219
static inline uint64_t itm_read(void)
219
static inline uint64_t itm_read(void)
220
{
220
{
221
    uint64_t v;
221
    uint64_t v;
222
   
222
   
223
    asm volatile ("mov %0 = cr.itm\n" : "=r" (v));
223
    asm volatile ("mov %0 = cr.itm\n" : "=r" (v));
224
   
224
   
225
    return v;
225
    return v;
226
}
226
}
227
 
227
 
228
/** Read ITV (Interval Timer Vector) register.
228
/** Read ITV (Interval Timer Vector) register.
229
 *
229
 *
230
 * @return Current vector and mask bit.
230
 * @return Current vector and mask bit.
231
 */
231
 */
232
static inline uint64_t itv_read(void)
232
static inline uint64_t itv_read(void)
233
{
233
{
234
    uint64_t v;
234
    uint64_t v;
235
   
235
   
236
    asm volatile ("mov %0 = cr.itv\n" : "=r" (v));
236
    asm volatile ("mov %0 = cr.itv\n" : "=r" (v));
237
   
237
   
238
    return v;
238
    return v;
239
}
239
}
240
 
240
 
241
/** Write ITV (Interval Timer Vector) register.
241
/** Write ITV (Interval Timer Vector) register.
242
 *
242
 *
243
 * @param v New vector and mask bit.
243
 * @param v New vector and mask bit.
244
 */
244
 */
245
static inline void itv_write(uint64_t v)
245
static inline void itv_write(uint64_t v)
246
{
246
{
247
    asm volatile ("mov cr.itv = %0\n" : : "r" (v));
247
    asm volatile ("mov cr.itv = %0\n" : : "r" (v));
248
}
248
}
249
 
249
 
250
/** Write EOI (End Of Interrupt) register.
250
/** Write EOI (End Of Interrupt) register.
251
 *
251
 *
252
 * @param v This value is ignored.
252
 * @param v This value is ignored.
253
 */
253
 */
254
static inline void eoi_write(uint64_t v)
254
static inline void eoi_write(uint64_t v)
255
{
255
{
256
    asm volatile ("mov cr.eoi = %0\n" : : "r" (v));
256
    asm volatile ("mov cr.eoi = %0\n" : : "r" (v));
257
}
257
}
258
 
258
 
259
/** Read TPR (Task Priority Register).
259
/** Read TPR (Task Priority Register).
260
 *
260
 *
261
 * @return Current value of TPR.
261
 * @return Current value of TPR.
262
 */
262
 */
263
static inline uint64_t tpr_read(void)
263
static inline uint64_t tpr_read(void)
264
{
264
{
265
    uint64_t v;
265
    uint64_t v;
266
 
266
 
267
    asm volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
267
    asm volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
268
   
268
   
269
    return v;
269
    return v;
270
}
270
}
271
 
271
 
272
/** Write TPR (Task Priority Register).
272
/** Write TPR (Task Priority Register).
273
 *
273
 *
274
 * @param v New value of TPR.
274
 * @param v New value of TPR.
275
 */
275
 */
276
static inline void tpr_write(uint64_t v)
276
static inline void tpr_write(uint64_t v)
277
{
277
{
278
    asm volatile ("mov cr.tpr = %0\n" : : "r" (v));
278
    asm volatile ("mov cr.tpr = %0\n" : : "r" (v));
279
}
279
}
280
 
280
 
281
/** Disable interrupts.
281
/** Disable interrupts.
282
 *
282
 *
283
 * Disable interrupts and return previous
283
 * Disable interrupts and return previous
284
 * value of PSR.
284
 * value of PSR.
285
 *
285
 *
286
 * @return Old interrupt priority level.
286
 * @return Old interrupt priority level.
287
 */
287
 */
288
static ipl_t interrupts_disable(void)
288
static ipl_t interrupts_disable(void)
289
{
289
{
290
    uint64_t v;
290
    uint64_t v;
291
   
291
   
292
    asm volatile (
292
    asm volatile (
293
        "mov %0 = psr\n"
293
        "mov %0 = psr\n"
294
        "rsm %1\n"
294
        "rsm %1\n"
295
        : "=r" (v)
295
        : "=r" (v)
296
        : "i" (PSR_I_MASK)
296
        : "i" (PSR_I_MASK)
297
    );
297
    );
298
   
298
   
299
    return (ipl_t) v;
299
    return (ipl_t) v;
300
}
300
}
301
 
301
 
302
/** Enable interrupts.
302
/** Enable interrupts.
303
 *
303
 *
304
 * Enable interrupts and return previous
304
 * Enable interrupts and return previous
305
 * value of PSR.
305
 * value of PSR.
306
 *
306
 *
307
 * @return Old interrupt priority level.
307
 * @return Old interrupt priority level.
308
 */
308
 */
309
static ipl_t interrupts_enable(void)
309
static ipl_t interrupts_enable(void)
310
{
310
{
311
    uint64_t v;
311
    uint64_t v;
312
   
312
   
313
    asm volatile (
313
    asm volatile (
314
        "mov %0 = psr\n"
314
        "mov %0 = psr\n"
315
        "ssm %1\n"
315
        "ssm %1\n"
316
        ";;\n"
316
        ";;\n"
317
        "srlz.d\n"
317
        "srlz.d\n"
318
        : "=r" (v)
318
        : "=r" (v)
319
        : "i" (PSR_I_MASK)
319
        : "i" (PSR_I_MASK)
320
    );
320
    );
321
   
321
   
322
    return (ipl_t) v;
322
    return (ipl_t) v;
323
}
323
}
324
 
324
 
325
/** Restore interrupt priority level.
325
/** Restore interrupt priority level.
326
 *
326
 *
327
 * Restore PSR.
327
 * Restore PSR.
328
 *
328
 *
329
 * @param ipl Saved interrupt priority level.
329
 * @param ipl Saved interrupt priority level.
330
 */
330
 */
331
static inline void interrupts_restore(ipl_t ipl)
331
static inline void interrupts_restore(ipl_t ipl)
332
{
332
{
333
    if (ipl & PSR_I_MASK)
333
    if (ipl & PSR_I_MASK)
334
        (void) interrupts_enable();
334
        (void) interrupts_enable();
335
    else
335
    else
336
        (void) interrupts_disable();
336
        (void) interrupts_disable();
337
}
337
}
338
 
338
 
339
/** Return interrupt priority level.
339
/** Return interrupt priority level.
340
 *
340
 *
341
 * @return PSR.
341
 * @return PSR.
342
 */
342
 */
343
static inline ipl_t interrupts_read(void)
343
static inline ipl_t interrupts_read(void)
344
{
344
{
345
    return (ipl_t) psr_read();
345
    return (ipl_t) psr_read();
346
}
346
}
347
 
347
 
348
/** Disable protection key checking. */
348
/** Disable protection key checking. */
349
static inline void pk_disable(void)
349
static inline void pk_disable(void)
350
{
350
{
351
    asm volatile ("rsm %0\n" : : "i" (PSR_PK_MASK));
351
    asm volatile ("rsm %0\n" : : "i" (PSR_PK_MASK));
352
}
352
}
353
 
353
 
354
extern void cpu_halt(void);
354
extern void cpu_halt(void);
355
extern void cpu_sleep(void);
355
extern void cpu_sleep(void);
356
extern void asm_delay_loop(uint32_t t);
356
extern void asm_delay_loop(uint32_t t);
357
 
357
 
358
extern void switch_to_userspace(uintptr_t, uintptr_t, uintptr_t, uintptr_t,
358
extern void switch_to_userspace(uintptr_t, uintptr_t, uintptr_t, uintptr_t,
359
    uint64_t, uint64_t);
359
    uint64_t, uint64_t);
360
 
360
 
361
#endif
361
#endif
362
 
362
 
363
/** @}
363
/** @}
364
 */
364
 */
365
 
365