Subversion Repositories HelenOS

Rev

Rev 433 | Rev 472 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 433 Rev 470
1
/*
1
/*
2
 * Copyright (C) 2005 Jakub Jermar
2
 * Copyright (C) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
#ifndef __ia64_ASM_H__
29
#ifndef __ia64_ASM_H__
30
#define __ia64_ASM_H__
30
#define __ia64_ASM_H__
31
 
31
 
32
#include <arch/types.h>
32
#include <arch/types.h>
33
#include <config.h>
33
#include <config.h>
34
#include <arch/register.h>
34
#include <arch/register.h>
35
 
35
 
36
/** Return base address of current stack
36
/** Return base address of current stack
37
 *
37
 *
38
 * Return the base address of the current stack.
38
 * Return the base address of the current stack.
39
 * The stack is assumed to be STACK_SIZE long.
39
 * The stack is assumed to be STACK_SIZE long.
40
 * The stack must start on page boundary.
40
 * The stack must start on page boundary.
41
 */
41
 */
42
static inline __address get_stack_base(void)
42
static inline __address get_stack_base(void)
43
{
43
{
44
    __u64 v;
44
    __u64 v;
45
 
45
 
46
    __asm__ volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
46
    __asm__ volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
47
   
47
   
48
    return v;
48
    return v;
49
}
49
}
50
 
50
 
-
 
51
/** Read IVA (Interruption Vector Address).
-
 
52
 *
-
 
53
 * @return Return location of interruption vector table.
-
 
54
 */
-
 
55
static inline __u64 iva_read(void)
-
 
56
{
-
 
57
    __u64 v;
-
 
58
   
-
 
59
    __asm__ volatile ("mov %0 = cr.iva\n" : "=r" (v));
-
 
60
   
-
 
61
    return v;
-
 
62
}
-
 
63
 
-
 
64
/** Write IVA (Interruption Vector Address) register.
-
 
65
 *
-
 
66
 * @param New location of interruption vector table.
-
 
67
 */
-
 
68
static inline void iva_write(__u64 v)
-
 
69
{
-
 
70
    __asm__ volatile ("mov cr.iva = %0\n" : : "r" (v));
-
 
71
}
-
 
72
 
-
 
73
 
51
/** Read IVR (External Interrupt Vector Register).
74
/** Read IVR (External Interrupt Vector Register).
52
 *
75
 *
53
 * @return Highest priority, pending, unmasked external interrupt vector.
76
 * @return Highest priority, pending, unmasked external interrupt vector.
54
 */
77
 */
55
static inline __u64 ivr_read(void)
78
static inline __u64 ivr_read(void)
56
{
79
{
57
    __u64 v;
80
    __u64 v;
58
   
81
   
59
    __asm__ volatile ("mov %0 = cr.ivr\n" : "=r" (v));
82
    __asm__ volatile ("mov %0 = cr.ivr\n" : "=r" (v));
60
   
83
   
61
    return v;
84
    return v;
62
}
85
}
63
 
86
 
64
/** Write ITC (Interval Timer Counter) register.
87
/** Write ITC (Interval Timer Counter) register.
65
 *
88
 *
66
 * @param New counter value.
89
 * @param New counter value.
67
 */
90
 */
68
static inline void itc_write(__u64 v)
91
static inline void itc_write(__u64 v)
69
{
92
{
70
    __asm__ volatile ("mov ar.itc = %0\n" : : "r" (v));
93
    __asm__ volatile ("mov ar.itc = %0\n" : : "r" (v));
71
}
94
}
72
 
95
 
73
/** Read ITC (Interval Timer Counter) register.
96
/** Read ITC (Interval Timer Counter) register.
74
 *
97
 *
75
 * @return Current counter value.
98
 * @return Current counter value.
76
 */
99
 */
77
static inline __u64 itc_read(void)
100
static inline __u64 itc_read(void)
78
{
101
{
79
    __u64 v;
102
    __u64 v;
80
   
103
   
81
    __asm__ volatile ("mov %0 = ar.itc\n" : "=r" (v));
104
    __asm__ volatile ("mov %0 = ar.itc\n" : "=r" (v));
82
   
105
   
83
    return v;
106
    return v;
84
}
107
}
85
 
108
 
86
/** Write ITM (Interval Timer Match) register.
109
/** Write ITM (Interval Timer Match) register.
87
 *
110
 *
88
 * @param New match value.
111
 * @param New match value.
89
 */
112
 */
90
static inline void itm_write(__u64 v)
113
static inline void itm_write(__u64 v)
91
{
114
{
92
    __asm__ volatile ("mov cr.itm = %0\n" : : "r" (v));
115
    __asm__ volatile ("mov cr.itm = %0\n" : : "r" (v));
93
}
116
}
94
 
117
 
95
/** Read ITV (Interval Timer Vector) register.
118
/** Read ITV (Interval Timer Vector) register.
96
 *
119
 *
97
 * @return Current vector and mask bit.
120
 * @return Current vector and mask bit.
98
 */
121
 */
99
static inline __u64 itv_read(void)
122
static inline __u64 itv_read(void)
100
{
123
{
101
    __u64 v;
124
    __u64 v;
102
   
125
   
103
    __asm__ volatile ("mov %0 = cr.itv\n" : "=r" (v));
126
    __asm__ volatile ("mov %0 = cr.itv\n" : "=r" (v));
104
   
127
   
105
    return v;
128
    return v;
106
}
129
}
107
 
130
 
108
/** Write ITV (Interval Timer Vector) register.
131
/** Write ITV (Interval Timer Vector) register.
109
 *
132
 *
110
 * @param New vector and mask bit.
133
 * @param New vector and mask bit.
111
 */
134
 */
112
static inline void itv_write(__u64 v)
135
static inline void itv_write(__u64 v)
113
{
136
{
114
    __asm__ volatile ("mov cr.itv = %0\n" : : "r" (v));
137
    __asm__ volatile ("mov cr.itv = %0\n" : : "r" (v));
115
}
138
}
116
 
139
 
117
/** Write EOI (End Of Interrupt) register.
140
/** Write EOI (End Of Interrupt) register.
118
 *
141
 *
119
 * @param This value is ignored.
142
 * @param This value is ignored.
120
 */
143
 */
121
static inline void eoi_write(__u64 v)
144
static inline void eoi_write(__u64 v)
122
{
145
{
123
    __asm__ volatile ("mov cr.eoi = %0\n" : : "r" (v));
146
    __asm__ volatile ("mov cr.eoi = %0\n" : : "r" (v));
124
}
147
}
125
 
148
 
126
/** Read TPR (Task Priority Register).
149
/** Read TPR (Task Priority Register).
127
 *
150
 *
128
 * @return Current value of TPR.
151
 * @return Current value of TPR.
129
 */
152
 */
130
static inline __u64 tpr_read(void)
153
static inline __u64 tpr_read(void)
131
{
154
{
132
    __u64 v;
155
    __u64 v;
133
 
156
 
134
    __asm__ volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
157
    __asm__ volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
135
   
158
   
136
    return v;
159
    return v;
137
}
160
}
138
 
161
 
139
/** Write TPR (Task Priority Register).
162
/** Write TPR (Task Priority Register).
140
 *
163
 *
141
 * @param New value of TPR.
164
 * @param New value of TPR.
142
 */
165
 */
143
static inline void tpr_write(__u64 v)
166
static inline void tpr_write(__u64 v)
144
{
167
{
145
    __asm__ volatile ("mov cr.tpr = %0\n" : : "r" (v));
168
    __asm__ volatile ("mov cr.tpr = %0\n" : : "r" (v));
146
}
169
}
147
 
170
 
148
/** Disable interrupts.
171
/** Disable interrupts.
149
 *
172
 *
150
 * Disable interrupts and return previous
173
 * Disable interrupts and return previous
151
 * value of PSR.
174
 * value of PSR.
152
 *
175
 *
153
 * @return Old interrupt priority level.
176
 * @return Old interrupt priority level.
154
 */
177
 */
155
static ipl_t interrupts_disable(void)
178
static ipl_t interrupts_disable(void)
156
{
179
{
157
    __u64 v;
180
    __u64 v;
158
   
181
   
159
    __asm__ volatile (
182
    __asm__ volatile (
160
        "mov %0 = psr\n"
183
        "mov %0 = psr\n"
161
        "rsm %1\n"
184
        "rsm %1\n"
162
        : "=r" (v)
185
        : "=r" (v)
163
        : "i" (PSR_I_MASK)
186
        : "i" (PSR_I_MASK)
164
    );
187
    );
165
   
188
   
166
    return (ipl_t) v;
189
    return (ipl_t) v;
167
}
190
}
168
 
191
 
169
/** Enable interrupts.
192
/** Enable interrupts.
170
 *
193
 *
171
 * Enable interrupts and return previous
194
 * Enable interrupts and return previous
172
 * value of PSR.
195
 * value of PSR.
173
 *
196
 *
174
 * @return Old interrupt priority level.
197
 * @return Old interrupt priority level.
175
 */
198
 */
176
static ipl_t interrupts_enable(void)
199
static ipl_t interrupts_enable(void)
177
{
200
{
178
    __u64 v;
201
    __u64 v;
179
   
202
   
180
    __asm__ volatile (
203
    __asm__ volatile (
181
        "mov %0 = psr\n"
204
        "mov %0 = psr\n"
182
        "ssm %1\n"
205
        "ssm %1\n"
183
        ";;\n"
206
        ";;\n"
184
        "srlz.d\n"
207
        "srlz.d\n"
185
        : "=r" (v)
208
        : "=r" (v)
186
        : "i" (PSR_I_MASK)
209
        : "i" (PSR_I_MASK)
187
    );
210
    );
188
   
211
   
189
    return (ipl_t) v;
212
    return (ipl_t) v;
190
}
213
}
191
 
214
 
192
/** Restore interrupt priority level.
215
/** Restore interrupt priority level.
193
 *
216
 *
194
 * Restore PSR.
217
 * Restore PSR.
195
 *
218
 *
196
 * @param ipl Saved interrupt priority level.
219
 * @param ipl Saved interrupt priority level.
197
 */
220
 */
198
static inline void interrupts_restore(ipl_t ipl)
221
static inline void interrupts_restore(ipl_t ipl)
199
{
222
{
200
    __asm__ volatile (
223
    __asm__ volatile (
201
        "mov psr.l = %0\n"
224
        "mov psr.l = %0\n"
202
        ";;\n"
225
        ";;\n"
203
        "srlz.d\n"
226
        "srlz.d\n"
204
        : : "r" ((__u64) ipl)
227
        : : "r" ((__u64) ipl)
205
    );
228
    );
206
}
229
}
207
 
230
 
208
/** Return interrupt priority level.
231
/** Return interrupt priority level.
209
 *
232
 *
210
 * @return PSR.
233
 * @return PSR.
211
 */
234
 */
212
static inline ipl_t interrupts_read(void)
235
static inline ipl_t interrupts_read(void)
213
{
236
{
214
    __u64 v;
237
    __u64 v;
215
   
238
   
216
    __asm__ volatile ("mov %0 = psr\n" : "=r" (v));
239
    __asm__ volatile ("mov %0 = psr\n" : "=r" (v));
217
   
240
   
218
    return (ipl_t) v;
241
    return (ipl_t) v;
219
}
242
}
220
 
243
 
221
#define set_shadow_register(reg,val) {__u64 v = val; __asm__  volatile("mov r15 = %0;;\n""bsw.0;;\n""mov "   #reg   " = r15;;\n""bsw.1;;\n" : : "r" (v) : "r15" ); }
-
 
222
#define get_shadow_register(reg,val) {__u64 v ; __asm__  volatile("bsw.0;;\n" "mov r15 = r" #reg ";;\n" "bsw.1;;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
-
 
223
 
-
 
224
#define get_control_register(reg,val) {__u64 v ; __asm__  volatile("mov r15 = cr" #reg ";;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
-
 
225
#define get_aplication_register(reg,val) {__u64 v ; __asm__  volatile("mov r15 = ar" #reg ";;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
-
 
226
#define get_psr(val) {__u64 v ; __asm__  volatile("mov r15 = psr;;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
-
 
227
 
-
 
228
extern void cpu_halt(void);
244
extern void cpu_halt(void);
229
extern void cpu_sleep(void);
245
extern void cpu_sleep(void);
230
extern void asm_delay_loop(__u32 t);
246
extern void asm_delay_loop(__u32 t);
231
 
247
 
232
#endif
248
#endif
233
 
249