Subversion Repositories HelenOS-historic

Rev

Rev 1488 | Rev 1708 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1488 Rev 1702
1
/*
1
/*
2
 * Copyright (C) 2005 Jakub Jermar
2
 * Copyright (C) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
-
 
29
 /** @addtogroup ia64  
-
 
30
 * @{
-
 
31
 */
-
 
32
/** @file
-
 
33
 */
-
 
34
 
29
#ifndef __ia64_ASM_H__
35
#ifndef __ia64_ASM_H__
30
#define __ia64_ASM_H__
36
#define __ia64_ASM_H__
31
 
37
 
32
#include <config.h>
38
#include <config.h>
33
#include <arch/types.h>
39
#include <arch/types.h>
34
#include <arch/register.h>
40
#include <arch/register.h>
35
 
41
 
36
/** Return base address of current stack
42
/** Return base address of current stack
37
 *
43
 *
38
 * Return the base address of the current stack.
44
 * Return the base address of the current stack.
39
 * The stack is assumed to be STACK_SIZE long.
45
 * The stack is assumed to be STACK_SIZE long.
40
 * The stack must start on page boundary.
46
 * The stack must start on page boundary.
41
 */
47
 */
42
static inline __address get_stack_base(void)
48
static inline __address get_stack_base(void)
43
{
49
{
44
    __u64 v;
50
    __u64 v;
45
 
51
 
46
    __asm__ volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
52
    __asm__ volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
47
   
53
   
48
    return v;
54
    return v;
49
}
55
}
50
 
56
 
51
/** Return Processor State Register.
57
/** Return Processor State Register.
52
 *
58
 *
53
 * @return PSR.
59
 * @return PSR.
54
 */
60
 */
55
static inline __u64 psr_read(void)
61
static inline __u64 psr_read(void)
56
{
62
{
57
    __u64 v;
63
    __u64 v;
58
   
64
   
59
    __asm__ volatile ("mov %0 = psr\n" : "=r" (v));
65
    __asm__ volatile ("mov %0 = psr\n" : "=r" (v));
60
   
66
   
61
    return v;
67
    return v;
62
}
68
}
63
 
69
 
64
/** Read IVA (Interruption Vector Address).
70
/** Read IVA (Interruption Vector Address).
65
 *
71
 *
66
 * @return Return location of interruption vector table.
72
 * @return Return location of interruption vector table.
67
 */
73
 */
68
static inline __u64 iva_read(void)
74
static inline __u64 iva_read(void)
69
{
75
{
70
    __u64 v;
76
    __u64 v;
71
   
77
   
72
    __asm__ volatile ("mov %0 = cr.iva\n" : "=r" (v));
78
    __asm__ volatile ("mov %0 = cr.iva\n" : "=r" (v));
73
   
79
   
74
    return v;
80
    return v;
75
}
81
}
76
 
82
 
77
/** Write IVA (Interruption Vector Address) register.
83
/** Write IVA (Interruption Vector Address) register.
78
 *
84
 *
79
 * @param New location of interruption vector table.
85
 * @param New location of interruption vector table.
80
 */
86
 */
81
static inline void iva_write(__u64 v)
87
static inline void iva_write(__u64 v)
82
{
88
{
83
    __asm__ volatile ("mov cr.iva = %0\n" : : "r" (v));
89
    __asm__ volatile ("mov cr.iva = %0\n" : : "r" (v));
84
}
90
}
85
 
91
 
86
 
92
 
87
/** Read IVR (External Interrupt Vector Register).
93
/** Read IVR (External Interrupt Vector Register).
88
 *
94
 *
89
 * @return Highest priority, pending, unmasked external interrupt vector.
95
 * @return Highest priority, pending, unmasked external interrupt vector.
90
 */
96
 */
91
static inline __u64 ivr_read(void)
97
static inline __u64 ivr_read(void)
92
{
98
{
93
    __u64 v;
99
    __u64 v;
94
   
100
   
95
    __asm__ volatile ("mov %0 = cr.ivr\n" : "=r" (v));
101
    __asm__ volatile ("mov %0 = cr.ivr\n" : "=r" (v));
96
   
102
   
97
    return v;
103
    return v;
98
}
104
}
99
 
105
 
100
/** Write ITC (Interval Timer Counter) register.
106
/** Write ITC (Interval Timer Counter) register.
101
 *
107
 *
102
 * @param New counter value.
108
 * @param New counter value.
103
 */
109
 */
104
static inline void itc_write(__u64 v)
110
static inline void itc_write(__u64 v)
105
{
111
{
106
    __asm__ volatile ("mov ar.itc = %0\n" : : "r" (v));
112
    __asm__ volatile ("mov ar.itc = %0\n" : : "r" (v));
107
}
113
}
108
 
114
 
109
/** Read ITC (Interval Timer Counter) register.
115
/** Read ITC (Interval Timer Counter) register.
110
 *
116
 *
111
 * @return Current counter value.
117
 * @return Current counter value.
112
 */
118
 */
113
static inline __u64 itc_read(void)
119
static inline __u64 itc_read(void)
114
{
120
{
115
    __u64 v;
121
    __u64 v;
116
   
122
   
117
    __asm__ volatile ("mov %0 = ar.itc\n" : "=r" (v));
123
    __asm__ volatile ("mov %0 = ar.itc\n" : "=r" (v));
118
   
124
   
119
    return v;
125
    return v;
120
}
126
}
121
 
127
 
122
/** Write ITM (Interval Timer Match) register.
128
/** Write ITM (Interval Timer Match) register.
123
 *
129
 *
124
 * @param New match value.
130
 * @param New match value.
125
 */
131
 */
126
static inline void itm_write(__u64 v)
132
static inline void itm_write(__u64 v)
127
{
133
{
128
    __asm__ volatile ("mov cr.itm = %0\n" : : "r" (v));
134
    __asm__ volatile ("mov cr.itm = %0\n" : : "r" (v));
129
}
135
}
130
 
136
 
131
/** Read ITM (Interval Timer Match) register.
137
/** Read ITM (Interval Timer Match) register.
132
 *
138
 *
133
 * @return Match value.
139
 * @return Match value.
134
 */
140
 */
135
static inline __u64 itm_read(void)
141
static inline __u64 itm_read(void)
136
{
142
{
137
    __u64 v;
143
    __u64 v;
138
   
144
   
139
    __asm__ volatile ("mov %0 = cr.itm\n" : "=r" (v));
145
    __asm__ volatile ("mov %0 = cr.itm\n" : "=r" (v));
140
   
146
   
141
    return v;
147
    return v;
142
}
148
}
143
 
149
 
144
/** Read ITV (Interval Timer Vector) register.
150
/** Read ITV (Interval Timer Vector) register.
145
 *
151
 *
146
 * @return Current vector and mask bit.
152
 * @return Current vector and mask bit.
147
 */
153
 */
148
static inline __u64 itv_read(void)
154
static inline __u64 itv_read(void)
149
{
155
{
150
    __u64 v;
156
    __u64 v;
151
   
157
   
152
    __asm__ volatile ("mov %0 = cr.itv\n" : "=r" (v));
158
    __asm__ volatile ("mov %0 = cr.itv\n" : "=r" (v));
153
   
159
   
154
    return v;
160
    return v;
155
}
161
}
156
 
162
 
157
/** Write ITV (Interval Timer Vector) register.
163
/** Write ITV (Interval Timer Vector) register.
158
 *
164
 *
159
 * @param New vector and mask bit.
165
 * @param New vector and mask bit.
160
 */
166
 */
161
static inline void itv_write(__u64 v)
167
static inline void itv_write(__u64 v)
162
{
168
{
163
    __asm__ volatile ("mov cr.itv = %0\n" : : "r" (v));
169
    __asm__ volatile ("mov cr.itv = %0\n" : : "r" (v));
164
}
170
}
165
 
171
 
166
/** Write EOI (End Of Interrupt) register.
172
/** Write EOI (End Of Interrupt) register.
167
 *
173
 *
168
 * @param This value is ignored.
174
 * @param This value is ignored.
169
 */
175
 */
170
static inline void eoi_write(__u64 v)
176
static inline void eoi_write(__u64 v)
171
{
177
{
172
    __asm__ volatile ("mov cr.eoi = %0\n" : : "r" (v));
178
    __asm__ volatile ("mov cr.eoi = %0\n" : : "r" (v));
173
}
179
}
174
 
180
 
175
/** Read TPR (Task Priority Register).
181
/** Read TPR (Task Priority Register).
176
 *
182
 *
177
 * @return Current value of TPR.
183
 * @return Current value of TPR.
178
 */
184
 */
179
static inline __u64 tpr_read(void)
185
static inline __u64 tpr_read(void)
180
{
186
{
181
    __u64 v;
187
    __u64 v;
182
 
188
 
183
    __asm__ volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
189
    __asm__ volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
184
   
190
   
185
    return v;
191
    return v;
186
}
192
}
187
 
193
 
188
/** Write TPR (Task Priority Register).
194
/** Write TPR (Task Priority Register).
189
 *
195
 *
190
 * @param New value of TPR.
196
 * @param New value of TPR.
191
 */
197
 */
192
static inline void tpr_write(__u64 v)
198
static inline void tpr_write(__u64 v)
193
{
199
{
194
    __asm__ volatile ("mov cr.tpr = %0\n" : : "r" (v));
200
    __asm__ volatile ("mov cr.tpr = %0\n" : : "r" (v));
195
}
201
}
196
 
202
 
197
/** Disable interrupts.
203
/** Disable interrupts.
198
 *
204
 *
199
 * Disable interrupts and return previous
205
 * Disable interrupts and return previous
200
 * value of PSR.
206
 * value of PSR.
201
 *
207
 *
202
 * @return Old interrupt priority level.
208
 * @return Old interrupt priority level.
203
 */
209
 */
204
static ipl_t interrupts_disable(void)
210
static ipl_t interrupts_disable(void)
205
{
211
{
206
    __u64 v;
212
    __u64 v;
207
   
213
   
208
    __asm__ volatile (
214
    __asm__ volatile (
209
        "mov %0 = psr\n"
215
        "mov %0 = psr\n"
210
        "rsm %1\n"
216
        "rsm %1\n"
211
        : "=r" (v)
217
        : "=r" (v)
212
        : "i" (PSR_I_MASK)
218
        : "i" (PSR_I_MASK)
213
    );
219
    );
214
   
220
   
215
    return (ipl_t) v;
221
    return (ipl_t) v;
216
}
222
}
217
 
223
 
218
/** Enable interrupts.
224
/** Enable interrupts.
219
 *
225
 *
220
 * Enable interrupts and return previous
226
 * Enable interrupts and return previous
221
 * value of PSR.
227
 * value of PSR.
222
 *
228
 *
223
 * @return Old interrupt priority level.
229
 * @return Old interrupt priority level.
224
 */
230
 */
225
static ipl_t interrupts_enable(void)
231
static ipl_t interrupts_enable(void)
226
{
232
{
227
    __u64 v;
233
    __u64 v;
228
   
234
   
229
    __asm__ volatile (
235
    __asm__ volatile (
230
        "mov %0 = psr\n"
236
        "mov %0 = psr\n"
231
        "ssm %1\n"
237
        "ssm %1\n"
232
        ";;\n"
238
        ";;\n"
233
        "srlz.d\n"
239
        "srlz.d\n"
234
        : "=r" (v)
240
        : "=r" (v)
235
        : "i" (PSR_I_MASK)
241
        : "i" (PSR_I_MASK)
236
    );
242
    );
237
   
243
   
238
    return (ipl_t) v;
244
    return (ipl_t) v;
239
}
245
}
240
 
246
 
241
/** Restore interrupt priority level.
247
/** Restore interrupt priority level.
242
 *
248
 *
243
 * Restore PSR.
249
 * Restore PSR.
244
 *
250
 *
245
 * @param ipl Saved interrupt priority level.
251
 * @param ipl Saved interrupt priority level.
246
 */
252
 */
247
static inline void interrupts_restore(ipl_t ipl)
253
static inline void interrupts_restore(ipl_t ipl)
248
{
254
{
249
    if (ipl & PSR_I_MASK)
255
    if (ipl & PSR_I_MASK)
250
        (void) interrupts_enable();
256
        (void) interrupts_enable();
251
    else
257
    else
252
        (void) interrupts_disable();
258
        (void) interrupts_disable();
253
}
259
}
254
 
260
 
255
/** Return interrupt priority level.
261
/** Return interrupt priority level.
256
 *
262
 *
257
 * @return PSR.
263
 * @return PSR.
258
 */
264
 */
259
static inline ipl_t interrupts_read(void)
265
static inline ipl_t interrupts_read(void)
260
{
266
{
261
    return (ipl_t) psr_read();
267
    return (ipl_t) psr_read();
262
}
268
}
263
 
269
 
264
/** Disable protection key checking. */
270
/** Disable protection key checking. */
265
static inline void pk_disable(void)
271
static inline void pk_disable(void)
266
{
272
{
267
    __asm__ volatile ("rsm %0\n" : : "i" (PSR_PK_MASK));
273
    __asm__ volatile ("rsm %0\n" : : "i" (PSR_PK_MASK));
268
}
274
}
269
 
275
 
270
extern void cpu_halt(void);
276
extern void cpu_halt(void);
271
extern void cpu_sleep(void);
277
extern void cpu_sleep(void);
272
extern void asm_delay_loop(__u32 t);
278
extern void asm_delay_loop(__u32 t);
273
 
279
 
274
extern void switch_to_userspace(__address entry, __address sp, __address bsp, __address uspace_uarg, __u64 ipsr, __u64 rsc);
280
extern void switch_to_userspace(__address entry, __address sp, __address bsp, __address uspace_uarg, __u64 ipsr, __u64 rsc);
275
 
281
 
276
#endif
282
#endif
-
 
283
 
-
 
284
 /** @}
-
 
285
 */
-
 
286
 
277
 
287