Subversion Repositories HelenOS

Rev

Rev 4344 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 4344 Rev 4345
1
/*
1
/*
2
 * Copyright (c) 2005 Jakub Jermar
2
 * Copyright (c) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
/** @addtogroup ia64   
29
/** @addtogroup ia64   
30
 * @{
30
 * @{
31
 */
31
 */
32
/** @file
32
/** @file
33
 */
33
 */
34
 
34
 
35
#ifndef KERN_ia64_ASM_H_
35
#ifndef KERN_ia64_ASM_H_
36
#define KERN_ia64_ASM_H_
36
#define KERN_ia64_ASM_H_
37
 
37
 
38
#include <config.h>
38
#include <config.h>
-
 
39
#include <typedefs.h>
39
#include <arch/types.h>
40
#include <arch/types.h>
40
#include <arch/register.h>
41
#include <arch/register.h>
41
 
42
 
42
#define IA64_IOSPACE_ADDRESS 0xE001000000000000ULL
43
#define IA64_IOSPACE_ADDRESS 0xE001000000000000ULL
43
 
44
 
44
static inline void pio_write_8(ioport8_t *port, uint8_t v)
45
static inline void pio_write_8(ioport8_t *port, uint8_t v)
45
{
46
{
46
    uintptr_t prt = (uintptr_t) port;
47
    uintptr_t prt = (uintptr_t) port;
47
 
48
 
48
    *((uint8_t *)(IA64_IOSPACE_ADDRESS +
49
    *((uint8_t *)(IA64_IOSPACE_ADDRESS +
49
        ((prt & 0xfff) | ((prt >> 2) << 12)))) = v;
50
        ((prt & 0xfff) | ((prt >> 2) << 12)))) = v;
50
 
51
 
51
    asm volatile ("mf\n" ::: "memory");
52
    asm volatile ("mf\n" ::: "memory");
52
}
53
}
53
 
54
 
54
static inline void pio_write_16(ioport16_t *port, uint16_t v)
55
static inline void pio_write_16(ioport16_t *port, uint16_t v)
55
{
56
{
56
    uintptr_t prt = (uintptr_t) port;
57
    uintptr_t prt = (uintptr_t) port;
57
 
58
 
58
    *((uint16_t *)(IA64_IOSPACE_ADDRESS +
59
    *((uint16_t *)(IA64_IOSPACE_ADDRESS +
59
        ((prt & 0xfff) | ((prt >> 2) << 12)))) = v;
60
        ((prt & 0xfff) | ((prt >> 2) << 12)))) = v;
60
 
61
 
61
    asm volatile ("mf\n" ::: "memory");
62
    asm volatile ("mf\n" ::: "memory");
62
}
63
}
63
 
64
 
64
static inline void pio_write_32(ioport32_t *port, uint32_t v)
65
static inline void pio_write_32(ioport32_t *port, uint32_t v)
65
{
66
{
66
    uintptr_t prt = (uintptr_t) port;
67
    uintptr_t prt = (uintptr_t) port;
67
 
68
 
68
    *((uint32_t *)(IA64_IOSPACE_ADDRESS +
69
    *((uint32_t *)(IA64_IOSPACE_ADDRESS +
69
        ((prt & 0xfff) | ((prt >> 2) << 12)))) = v;
70
        ((prt & 0xfff) | ((prt >> 2) << 12)))) = v;
70
 
71
 
71
    asm volatile ("mf\n" ::: "memory");
72
    asm volatile ("mf\n" ::: "memory");
72
}
73
}
73
 
74
 
74
static inline uint8_t pio_read_8(ioport8_t *port)
75
static inline uint8_t pio_read_8(ioport8_t *port)
75
{
76
{
76
    uintptr_t prt = (uintptr_t) port;
77
    uintptr_t prt = (uintptr_t) port;
77
 
78
 
78
    asm volatile ("mf\n" ::: "memory");
79
    asm volatile ("mf\n" ::: "memory");
79
 
80
 
80
    return *((uint8_t *)(IA64_IOSPACE_ADDRESS +
81
    return *((uint8_t *)(IA64_IOSPACE_ADDRESS +
81
        ((prt & 0xfff) | ((prt >> 2) << 12))));
82
        ((prt & 0xfff) | ((prt >> 2) << 12))));
82
}
83
}
83
 
84
 
84
static inline uint16_t pio_read_16(ioport16_t *port)
85
static inline uint16_t pio_read_16(ioport16_t *port)
85
{
86
{
86
    uintptr_t prt = (uintptr_t) port;
87
    uintptr_t prt = (uintptr_t) port;
87
 
88
 
88
    asm volatile ("mf\n" ::: "memory");
89
    asm volatile ("mf\n" ::: "memory");
89
 
90
 
90
    return *((uint16_t *)(IA64_IOSPACE_ADDRESS +
91
    return *((uint16_t *)(IA64_IOSPACE_ADDRESS +
91
        ((prt & 0xffE) | ((prt >> 2) << 12))));
92
        ((prt & 0xfff) | ((prt >> 2) << 12))));
92
}
93
}
93
 
94
 
94
static inline uint32_t pio_read_32(ioport32_t *port)
95
static inline uint32_t pio_read_32(ioport32_t *port)
95
{
96
{
96
    uintptr_t prt = (uintptr_t) port;
97
    uintptr_t prt = (uintptr_t) port;
97
 
98
 
98
    asm volatile ("mf\n" ::: "memory");
99
    asm volatile ("mf\n" ::: "memory");
99
 
100
 
100
    return *((uint32_t *)(IA64_IOSPACE_ADDRESS +
101
    return *((uint32_t *)(IA64_IOSPACE_ADDRESS +
101
        ((prt & 0xfff) | ((prt >> 2) << 12))));
102
        ((prt & 0xfff) | ((prt >> 2) << 12))));
102
}
103
}
103
 
104
 
104
/** Return base address of current stack
105
/** Return base address of current stack
105
 *
106
 *
106
 * Return the base address of the current stack.
107
 * Return the base address of the current stack.
107
 * The stack is assumed to be STACK_SIZE long.
108
 * The stack is assumed to be STACK_SIZE long.
108
 * The stack must start on page boundary.
109
 * The stack must start on page boundary.
109
 */
110
 */
110
static inline uintptr_t get_stack_base(void)
111
static inline uintptr_t get_stack_base(void)
111
{
112
{
112
    uint64_t v;
113
    uint64_t v;
113
 
114
 
114
    //I'm not sure why but this code bad inlines in scheduler, 
115
    //I'm not sure why but this code bad inlines in scheduler, 
115
    //so THE shifts about 16B and causes kernel panic
116
    //so THE shifts about 16B and causes kernel panic
116
    //asm volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
117
    //asm volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
117
    //return v;
118
    //return v;
118
   
119
   
119
    //this code have the same meaning but inlines well
120
    //this code have the same meaning but inlines well
120
    asm volatile ("mov %0 = r12" : "=r" (v)  );
121
    asm volatile ("mov %0 = r12" : "=r" (v)  );
121
    return v & (~(STACK_SIZE-1));
122
    return v & (~(STACK_SIZE-1));
122
}
123
}
123
 
124
 
124
/** Return Processor State Register.
125
/** Return Processor State Register.
125
 *
126
 *
126
 * @return PSR.
127
 * @return PSR.
127
 */
128
 */
128
static inline uint64_t psr_read(void)
129
static inline uint64_t psr_read(void)
129
{
130
{
130
    uint64_t v;
131
    uint64_t v;
131
   
132
   
132
    asm volatile ("mov %0 = psr\n" : "=r" (v));
133
    asm volatile ("mov %0 = psr\n" : "=r" (v));
133
   
134
   
134
    return v;
135
    return v;
135
}
136
}
136
 
137
 
137
/** Read IVA (Interruption Vector Address).
138
/** Read IVA (Interruption Vector Address).
138
 *
139
 *
139
 * @return Return location of interruption vector table.
140
 * @return Return location of interruption vector table.
140
 */
141
 */
141
static inline uint64_t iva_read(void)
142
static inline uint64_t iva_read(void)
142
{
143
{
143
    uint64_t v;
144
    uint64_t v;
144
   
145
   
145
    asm volatile ("mov %0 = cr.iva\n" : "=r" (v));
146
    asm volatile ("mov %0 = cr.iva\n" : "=r" (v));
146
   
147
   
147
    return v;
148
    return v;
148
}
149
}
149
 
150
 
150
/** Write IVA (Interruption Vector Address) register.
151
/** Write IVA (Interruption Vector Address) register.
151
 *
152
 *
152
 * @param v New location of interruption vector table.
153
 * @param v New location of interruption vector table.
153
 */
154
 */
154
static inline void iva_write(uint64_t v)
155
static inline void iva_write(uint64_t v)
155
{
156
{
156
    asm volatile ("mov cr.iva = %0\n" : : "r" (v));
157
    asm volatile ("mov cr.iva = %0\n" : : "r" (v));
157
}
158
}
158
 
159
 
159
 
160
 
160
/** Read IVR (External Interrupt Vector Register).
161
/** Read IVR (External Interrupt Vector Register).
161
 *
162
 *
162
 * @return Highest priority, pending, unmasked external interrupt vector.
163
 * @return Highest priority, pending, unmasked external interrupt vector.
163
 */
164
 */
164
static inline uint64_t ivr_read(void)
165
static inline uint64_t ivr_read(void)
165
{
166
{
166
    uint64_t v;
167
    uint64_t v;
167
   
168
   
168
    asm volatile ("mov %0 = cr.ivr\n" : "=r" (v));
169
    asm volatile ("mov %0 = cr.ivr\n" : "=r" (v));
169
   
170
   
170
    return v;
171
    return v;
171
}
172
}
172
 
173
 
173
static inline uint64_t cr64_read(void)
174
static inline uint64_t cr64_read(void)
174
{
175
{
175
    uint64_t v;
176
    uint64_t v;
176
   
177
   
177
    asm volatile ("mov %0 = cr64\n" : "=r" (v));
178
    asm volatile ("mov %0 = cr64\n" : "=r" (v));
178
   
179
   
179
    return v;
180
    return v;
180
}
181
}
181
 
182
 
182
 
183
 
183
/** Write ITC (Interval Timer Counter) register.
184
/** Write ITC (Interval Timer Counter) register.
184
 *
185
 *
185
 * @param v New counter value.
186
 * @param v New counter value.
186
 */
187
 */
187
static inline void itc_write(uint64_t v)
188
static inline void itc_write(uint64_t v)
188
{
189
{
189
    asm volatile ("mov ar.itc = %0\n" : : "r" (v));
190
    asm volatile ("mov ar.itc = %0\n" : : "r" (v));
190
}
191
}
191
 
192
 
192
/** Read ITC (Interval Timer Counter) register.
193
/** Read ITC (Interval Timer Counter) register.
193
 *
194
 *
194
 * @return Current counter value.
195
 * @return Current counter value.
195
 */
196
 */
196
static inline uint64_t itc_read(void)
197
static inline uint64_t itc_read(void)
197
{
198
{
198
    uint64_t v;
199
    uint64_t v;
199
   
200
   
200
    asm volatile ("mov %0 = ar.itc\n" : "=r" (v));
201
    asm volatile ("mov %0 = ar.itc\n" : "=r" (v));
201
   
202
   
202
    return v;
203
    return v;
203
}
204
}
204
 
205
 
205
/** Write ITM (Interval Timer Match) register.
206
/** Write ITM (Interval Timer Match) register.
206
 *
207
 *
207
 * @param v New match value.
208
 * @param v New match value.
208
 */
209
 */
209
static inline void itm_write(uint64_t v)
210
static inline void itm_write(uint64_t v)
210
{
211
{
211
    asm volatile ("mov cr.itm = %0\n" : : "r" (v));
212
    asm volatile ("mov cr.itm = %0\n" : : "r" (v));
212
}
213
}
213
 
214
 
214
/** Read ITM (Interval Timer Match) register.
215
/** Read ITM (Interval Timer Match) register.
215
 *
216
 *
216
 * @return Match value.
217
 * @return Match value.
217
 */
218
 */
218
static inline uint64_t itm_read(void)
219
static inline uint64_t itm_read(void)
219
{
220
{
220
    uint64_t v;
221
    uint64_t v;
221
   
222
   
222
    asm volatile ("mov %0 = cr.itm\n" : "=r" (v));
223
    asm volatile ("mov %0 = cr.itm\n" : "=r" (v));
223
   
224
   
224
    return v;
225
    return v;
225
}
226
}
226
 
227
 
227
/** Read ITV (Interval Timer Vector) register.
228
/** Read ITV (Interval Timer Vector) register.
228
 *
229
 *
229
 * @return Current vector and mask bit.
230
 * @return Current vector and mask bit.
230
 */
231
 */
231
static inline uint64_t itv_read(void)
232
static inline uint64_t itv_read(void)
232
{
233
{
233
    uint64_t v;
234
    uint64_t v;
234
   
235
   
235
    asm volatile ("mov %0 = cr.itv\n" : "=r" (v));
236
    asm volatile ("mov %0 = cr.itv\n" : "=r" (v));
236
   
237
   
237
    return v;
238
    return v;
238
}
239
}
239
 
240
 
240
/** Write ITV (Interval Timer Vector) register.
241
/** Write ITV (Interval Timer Vector) register.
241
 *
242
 *
242
 * @param v New vector and mask bit.
243
 * @param v New vector and mask bit.
243
 */
244
 */
244
static inline void itv_write(uint64_t v)
245
static inline void itv_write(uint64_t v)
245
{
246
{
246
    asm volatile ("mov cr.itv = %0\n" : : "r" (v));
247
    asm volatile ("mov cr.itv = %0\n" : : "r" (v));
247
}
248
}
248
 
249
 
249
/** Write EOI (End Of Interrupt) register.
250
/** Write EOI (End Of Interrupt) register.
250
 *
251
 *
251
 * @param v This value is ignored.
252
 * @param v This value is ignored.
252
 */
253
 */
253
static inline void eoi_write(uint64_t v)
254
static inline void eoi_write(uint64_t v)
254
{
255
{
255
    asm volatile ("mov cr.eoi = %0\n" : : "r" (v));
256
    asm volatile ("mov cr.eoi = %0\n" : : "r" (v));
256
}
257
}
257
 
258
 
258
/** Read TPR (Task Priority Register).
259
/** Read TPR (Task Priority Register).
259
 *
260
 *
260
 * @return Current value of TPR.
261
 * @return Current value of TPR.
261
 */
262
 */
262
static inline uint64_t tpr_read(void)
263
static inline uint64_t tpr_read(void)
263
{
264
{
264
    uint64_t v;
265
    uint64_t v;
265
 
266
 
266
    asm volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
267
    asm volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
267
   
268
   
268
    return v;
269
    return v;
269
}
270
}
270
 
271
 
271
/** Write TPR (Task Priority Register).
272
/** Write TPR (Task Priority Register).
272
 *
273
 *
273
 * @param v New value of TPR.
274
 * @param v New value of TPR.
274
 */
275
 */
275
static inline void tpr_write(uint64_t v)
276
static inline void tpr_write(uint64_t v)
276
{
277
{
277
    asm volatile ("mov cr.tpr = %0\n" : : "r" (v));
278
    asm volatile ("mov cr.tpr = %0\n" : : "r" (v));
278
}
279
}
279
 
280
 
280
/** Disable interrupts.
281
/** Disable interrupts.
281
 *
282
 *
282
 * Disable interrupts and return previous
283
 * Disable interrupts and return previous
283
 * value of PSR.
284
 * value of PSR.
284
 *
285
 *
285
 * @return Old interrupt priority level.
286
 * @return Old interrupt priority level.
286
 */
287
 */
287
static ipl_t interrupts_disable(void)
288
static ipl_t interrupts_disable(void)
288
{
289
{
289
    uint64_t v;
290
    uint64_t v;
290
   
291
   
291
    asm volatile (
292
    asm volatile (
292
        "mov %0 = psr\n"
293
        "mov %0 = psr\n"
293
        "rsm %1\n"
294
        "rsm %1\n"
294
        : "=r" (v)
295
        : "=r" (v)
295
        : "i" (PSR_I_MASK)
296
        : "i" (PSR_I_MASK)
296
    );
297
    );
297
   
298
   
298
    return (ipl_t) v;
299
    return (ipl_t) v;
299
}
300
}
300
 
301
 
301
/** Enable interrupts.
302
/** Enable interrupts.
302
 *
303
 *
303
 * Enable interrupts and return previous
304
 * Enable interrupts and return previous
304
 * value of PSR.
305
 * value of PSR.
305
 *
306
 *
306
 * @return Old interrupt priority level.
307
 * @return Old interrupt priority level.
307
 */
308
 */
308
static ipl_t interrupts_enable(void)
309
static ipl_t interrupts_enable(void)
309
{
310
{
310
    uint64_t v;
311
    uint64_t v;
311
   
312
   
312
    asm volatile (
313
    asm volatile (
313
        "mov %0 = psr\n"
314
        "mov %0 = psr\n"
314
        "ssm %1\n"
315
        "ssm %1\n"
315
        ";;\n"
316
        ";;\n"
316
        "srlz.d\n"
317
        "srlz.d\n"
317
        : "=r" (v)
318
        : "=r" (v)
318
        : "i" (PSR_I_MASK)
319
        : "i" (PSR_I_MASK)
319
    );
320
    );
320
   
321
   
321
    return (ipl_t) v;
322
    return (ipl_t) v;
322
}
323
}
323
 
324
 
324
/** Restore interrupt priority level.
325
/** Restore interrupt priority level.
325
 *
326
 *
326
 * Restore PSR.
327
 * Restore PSR.
327
 *
328
 *
328
 * @param ipl Saved interrupt priority level.
329
 * @param ipl Saved interrupt priority level.
329
 */
330
 */
330
static inline void interrupts_restore(ipl_t ipl)
331
static inline void interrupts_restore(ipl_t ipl)
331
{
332
{
332
    if (ipl & PSR_I_MASK)
333
    if (ipl & PSR_I_MASK)
333
        (void) interrupts_enable();
334
        (void) interrupts_enable();
334
    else
335
    else
335
        (void) interrupts_disable();
336
        (void) interrupts_disable();
336
}
337
}
337
 
338
 
338
/** Return interrupt priority level.
339
/** Return interrupt priority level.
339
 *
340
 *
340
 * @return PSR.
341
 * @return PSR.
341
 */
342
 */
342
static inline ipl_t interrupts_read(void)
343
static inline ipl_t interrupts_read(void)
343
{
344
{
344
    return (ipl_t) psr_read();
345
    return (ipl_t) psr_read();
345
}
346
}
346
 
347
 
347
/** Disable protection key checking. */
348
/** Disable protection key checking. */
348
static inline void pk_disable(void)
349
static inline void pk_disable(void)
349
{
350
{
350
    asm volatile ("rsm %0\n" : : "i" (PSR_PK_MASK));
351
    asm volatile ("rsm %0\n" : : "i" (PSR_PK_MASK));
351
}
352
}
352
 
353
 
353
extern void cpu_halt(void);
354
extern void cpu_halt(void);
354
extern void cpu_sleep(void);
355
extern void cpu_sleep(void);
355
extern void asm_delay_loop(uint32_t t);
356
extern void asm_delay_loop(uint32_t t);
356
 
357
 
357
extern void switch_to_userspace(uintptr_t, uintptr_t, uintptr_t, uintptr_t,
358
extern void switch_to_userspace(uintptr_t, uintptr_t, uintptr_t, uintptr_t,
358
    uint64_t, uint64_t);
359
    uint64_t, uint64_t);
359
 
360
 
360
#endif
361
#endif
361
 
362
 
362
/** @}
363
/** @}
363
 */
364
 */
364
 
365