Subversion Repositories HelenOS

Rev

Rev 432 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 432 Rev 433
1
/*
1
/*
2
 * Copyright (C) 2005 Jakub Jermar
2
 * Copyright (C) 2005 Jakub Jermar
3
 * All rights reserved.
3
 * All rights reserved.
4
 *
4
 *
5
 * Redistribution and use in source and binary forms, with or without
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
6
 * modification, are permitted provided that the following conditions
7
 * are met:
7
 * are met:
8
 *
8
 *
9
 * - Redistributions of source code must retain the above copyright
9
 * - Redistributions of source code must retain the above copyright
10
 *   notice, this list of conditions and the following disclaimer.
10
 *   notice, this list of conditions and the following disclaimer.
11
 * - Redistributions in binary form must reproduce the above copyright
11
 * - Redistributions in binary form must reproduce the above copyright
12
 *   notice, this list of conditions and the following disclaimer in the
12
 *   notice, this list of conditions and the following disclaimer in the
13
 *   documentation and/or other materials provided with the distribution.
13
 *   documentation and/or other materials provided with the distribution.
14
 * - The name of the author may not be used to endorse or promote products
14
 * - The name of the author may not be used to endorse or promote products
15
 *   derived from this software without specific prior written permission.
15
 *   derived from this software without specific prior written permission.
16
 *
16
 *
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
 */
27
 */
28
 
28
 
29
#ifndef __ia64_ASM_H__
29
#ifndef __ia64_ASM_H__
30
#define __ia64_ASM_H__
30
#define __ia64_ASM_H__
31
 
31
 
32
#include <arch/types.h>
32
#include <arch/types.h>
33
#include <config.h>
33
#include <config.h>
34
#include <arch/register.h>
34
#include <arch/register.h>
35
 
35
 
36
/** Return base address of current stack
36
/** Return base address of current stack
37
 *
37
 *
38
 * Return the base address of the current stack.
38
 * Return the base address of the current stack.
39
 * The stack is assumed to be STACK_SIZE long.
39
 * The stack is assumed to be STACK_SIZE long.
40
 * The stack must start on page boundary.
40
 * The stack must start on page boundary.
41
 */
41
 */
42
static inline __address get_stack_base(void)
42
static inline __address get_stack_base(void)
43
{
43
{
44
    __u64 v;
44
    __u64 v;
45
 
45
 
46
    __asm__ volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
46
    __asm__ volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
47
   
47
   
48
    return v;
48
    return v;
49
}
49
}
50
 
50
 
51
/** Read IVR (External Interrupt Vector Register).
51
/** Read IVR (External Interrupt Vector Register).
52
 *
52
 *
53
 * @return Highest priority, pending, unmasked external interrupt vector.
53
 * @return Highest priority, pending, unmasked external interrupt vector.
54
 */
54
 */
55
static inline __u64 ivr_read(void)
55
static inline __u64 ivr_read(void)
56
{
56
{
57
    __u64 v;
57
    __u64 v;
58
   
58
   
59
    __asm__ volatile ("mov %0 = cr.ivr\n" : "=r" (v));
59
    __asm__ volatile ("mov %0 = cr.ivr\n" : "=r" (v));
60
   
60
   
61
    return v;
61
    return v;
62
}
62
}
63
 
63
 
64
/** Write ITC (Interval Timer Counter) register.
64
/** Write ITC (Interval Timer Counter) register.
65
 *
65
 *
66
 * @param New counter value.
66
 * @param New counter value.
67
 */
67
 */
68
static inline void itc_write(__u64 v)
68
static inline void itc_write(__u64 v)
69
{
69
{
70
    __asm__ volatile ("mov ar.itc = %0\n" : : "r" (v));
70
    __asm__ volatile ("mov ar.itc = %0\n" : : "r" (v));
71
}
71
}
72
 
72
 
73
/** Read ITC (Interval Timer Counter) register.
73
/** Read ITC (Interval Timer Counter) register.
74
 *
74
 *
75
 * @return Current counter value.
75
 * @return Current counter value.
76
 */
76
 */
77
static inline __u64 itc_read(void)
77
static inline __u64 itc_read(void)
78
{
78
{
79
    __u64 v;
79
    __u64 v;
80
   
80
   
81
    __asm__ volatile ("mov %0 = ar.itc\n" : "=r" (v));
81
    __asm__ volatile ("mov %0 = ar.itc\n" : "=r" (v));
82
   
82
   
83
    return v;
83
    return v;
84
}
84
}
85
 
85
 
86
/** Write ITM (Interval Timer Match) register.
86
/** Write ITM (Interval Timer Match) register.
87
 *
87
 *
88
 * @param New match value.
88
 * @param New match value.
89
 */
89
 */
90
static inline void itm_write(__u64 v)
90
static inline void itm_write(__u64 v)
91
{
91
{
92
    __asm__ volatile ("mov cr.itm = %0\n" : : "r" (v));
92
    __asm__ volatile ("mov cr.itm = %0\n" : : "r" (v));
93
}
93
}
94
 
94
 
-
 
95
/** Read ITV (Interval Timer Vector) register.
-
 
96
 *
-
 
97
 * @return Current vector and mask bit.
-
 
98
 */
-
 
99
static inline __u64 itv_read(void)
-
 
100
{
-
 
101
    __u64 v;
-
 
102
   
-
 
103
    __asm__ volatile ("mov %0 = cr.itv\n" : "=r" (v));
-
 
104
   
-
 
105
    return v;
-
 
106
}
-
 
107
 
95
/** Write ITV (Interval Timer Vector) register.
108
/** Write ITV (Interval Timer Vector) register.
96
 *
109
 *
97
 * @param New vector and masked bit.
110
 * @param New vector and mask bit.
98
 */
111
 */
99
static inline void itv_write(__u64 v)
112
static inline void itv_write(__u64 v)
100
{
113
{
101
    __asm__ volatile ("mov cr.itv = %0\n" : : "r" (v));
114
    __asm__ volatile ("mov cr.itv = %0\n" : : "r" (v));
102
}
115
}
103
 
116
 
104
/** Write EOI (End Of Interrupt) register.
117
/** Write EOI (End Of Interrupt) register.
105
 *
118
 *
106
 * @param This value is ignored.
119
 * @param This value is ignored.
107
 */
120
 */
108
static inline void eoi_write(__u64 v)
121
static inline void eoi_write(__u64 v)
109
{
122
{
110
    __asm__ volatile ("mov cr.eoi = %0\n" : : "r" (v));
123
    __asm__ volatile ("mov cr.eoi = %0\n" : : "r" (v));
111
}
124
}
112
 
125
 
113
/** Read TPR (Task Priority Register).
126
/** Read TPR (Task Priority Register).
114
 *
127
 *
115
 * @return Current value of TPR.
128
 * @return Current value of TPR.
116
 */
129
 */
117
static inline __u64 tpr_read(void)
130
static inline __u64 tpr_read(void)
118
{
131
{
119
    __u64 v;
132
    __u64 v;
120
 
133
 
121
    __asm__ volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
134
    __asm__ volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
122
   
135
   
123
    return v;
136
    return v;
124
}
137
}
125
 
138
 
126
/** Write TPR (Task Priority Register).
139
/** Write TPR (Task Priority Register).
127
 *
140
 *
128
 * @param New value of TPR.
141
 * @param New value of TPR.
129
 */
142
 */
130
static inline void tpr_write(__u64 v)
143
static inline void tpr_write(__u64 v)
131
{
144
{
132
    __asm__ volatile ("mov cr.tpr = %0\n" : : "r" (v));
145
    __asm__ volatile ("mov cr.tpr = %0\n" : : "r" (v));
133
}
146
}
134
 
147
 
135
/** Disable interrupts.
148
/** Disable interrupts.
136
 *
149
 *
137
 * Disable interrupts and return previous
150
 * Disable interrupts and return previous
138
 * value of PSR.
151
 * value of PSR.
139
 *
152
 *
140
 * @return Old interrupt priority level.
153
 * @return Old interrupt priority level.
141
 */
154
 */
142
static ipl_t interrupts_disable(void)
155
static ipl_t interrupts_disable(void)
143
{
156
{
144
    __u64 v;
157
    __u64 v;
145
   
158
   
146
    __asm__ volatile (
159
    __asm__ volatile (
147
        "mov %0 = psr\n"
160
        "mov %0 = psr\n"
148
        "rsm %1\n"
161
        "rsm %1\n"
149
        : "=r" (v)
162
        : "=r" (v)
150
        : "i" (PSR_I_MASK)
163
        : "i" (PSR_I_MASK)
151
    );
164
    );
152
   
165
   
153
    return (ipl_t) v;
166
    return (ipl_t) v;
154
}
167
}
155
 
168
 
156
/** Enable interrupts.
169
/** Enable interrupts.
157
 *
170
 *
158
 * Enable interrupts and return previous
171
 * Enable interrupts and return previous
159
 * value of PSR.
172
 * value of PSR.
160
 *
173
 *
161
 * @return Old interrupt priority level.
174
 * @return Old interrupt priority level.
162
 */
175
 */
163
static ipl_t interrupts_enable(void)
176
static ipl_t interrupts_enable(void)
164
{
177
{
165
    __u64 v;
178
    __u64 v;
166
   
179
   
167
    __asm__ volatile (
180
    __asm__ volatile (
168
        "mov %0 = psr\n"
181
        "mov %0 = psr\n"
169
        "ssm %1\n"
182
        "ssm %1\n"
170
        ";;\n"
183
        ";;\n"
171
        "srlz.d\n"
184
        "srlz.d\n"
172
        : "=r" (v)
185
        : "=r" (v)
173
        : "i" (PSR_I_MASK)
186
        : "i" (PSR_I_MASK)
174
    );
187
    );
175
   
188
   
176
    return (ipl_t) v;
189
    return (ipl_t) v;
177
}
190
}
178
 
191
 
179
/** Restore interrupt priority level.
192
/** Restore interrupt priority level.
180
 *
193
 *
181
 * Restore PSR.
194
 * Restore PSR.
182
 *
195
 *
183
 * @param ipl Saved interrupt priority level.
196
 * @param ipl Saved interrupt priority level.
184
 */
197
 */
185
static inline void interrupts_restore(ipl_t ipl)
198
static inline void interrupts_restore(ipl_t ipl)
186
{
199
{
187
    __asm__ volatile (
200
    __asm__ volatile (
188
        "mov psr.l = %0\n"
201
        "mov psr.l = %0\n"
189
        ";;\n"
202
        ";;\n"
190
        "srlz.d\n"
203
        "srlz.d\n"
191
        : : "r" ((__u64) ipl)
204
        : : "r" ((__u64) ipl)
192
    );
205
    );
193
}
206
}
194
 
207
 
195
/** Return interrupt priority level.
208
/** Return interrupt priority level.
196
 *
209
 *
197
 * @return PSR.
210
 * @return PSR.
198
 */
211
 */
199
static inline ipl_t interrupts_read(void)
212
static inline ipl_t interrupts_read(void)
200
{
213
{
201
    __u64 v;
214
    __u64 v;
202
   
215
   
203
    __asm__ volatile ("mov %0 = psr\n" : "=r" (v));
216
    __asm__ volatile ("mov %0 = psr\n" : "=r" (v));
204
   
217
   
205
    return (ipl_t) v;
218
    return (ipl_t) v;
206
}
219
}
207
 
220
 
208
#define set_shadow_register(reg,val) {__u64 v = val; __asm__  volatile("mov r15 = %0;;\n""bsw.0;;\n""mov "   #reg   " = r15;;\n""bsw.1;;\n" : : "r" (v) : "r15" ); }
221
#define set_shadow_register(reg,val) {__u64 v = val; __asm__  volatile("mov r15 = %0;;\n""bsw.0;;\n""mov "   #reg   " = r15;;\n""bsw.1;;\n" : : "r" (v) : "r15" ); }
209
#define get_shadow_register(reg,val) {__u64 v ; __asm__  volatile("bsw.0;;\n" "mov r15 = r" #reg ";;\n" "bsw.1;;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
222
#define get_shadow_register(reg,val) {__u64 v ; __asm__  volatile("bsw.0;;\n" "mov r15 = r" #reg ";;\n" "bsw.1;;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
210
 
223
 
211
#define get_control_register(reg,val) {__u64 v ; __asm__  volatile("mov r15 = cr" #reg ";;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
224
#define get_control_register(reg,val) {__u64 v ; __asm__  volatile("mov r15 = cr" #reg ";;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
212
#define get_aplication_register(reg,val) {__u64 v ; __asm__  volatile("mov r15 = ar" #reg ";;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
225
#define get_aplication_register(reg,val) {__u64 v ; __asm__  volatile("mov r15 = ar" #reg ";;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
213
#define get_psr(val) {__u64 v ; __asm__  volatile("mov r15 = psr;;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
226
#define get_psr(val) {__u64 v ; __asm__  volatile("mov r15 = psr;;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
214
 
227
 
215
extern void cpu_halt(void);
228
extern void cpu_halt(void);
216
extern void cpu_sleep(void);
229
extern void cpu_sleep(void);
217
extern void asm_delay_loop(__u32 t);
230
extern void asm_delay_loop(__u32 t);
218
 
231
 
219
#endif
232
#endif
220
 
233