Rev 3386 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 3386 | Rev 4153 | ||
---|---|---|---|
1 | /* |
1 | /* |
2 | * Copyright (c) 2005 Jakub Jermar |
2 | * Copyright (c) 2005 Jakub Jermar |
3 | * All rights reserved. |
3 | * All rights reserved. |
4 | * |
4 | * |
5 | * Redistribution and use in source and binary forms, with or without |
5 | * Redistribution and use in source and binary forms, with or without |
6 | * modification, are permitted provided that the following conditions |
6 | * modification, are permitted provided that the following conditions |
7 | * are met: |
7 | * are met: |
8 | * |
8 | * |
9 | * - Redistributions of source code must retain the above copyright |
9 | * - Redistributions of source code must retain the above copyright |
10 | * notice, this list of conditions and the following disclaimer. |
10 | * notice, this list of conditions and the following disclaimer. |
11 | * - Redistributions in binary form must reproduce the above copyright |
11 | * - Redistributions in binary form must reproduce the above copyright |
12 | * notice, this list of conditions and the following disclaimer in the |
12 | * notice, this list of conditions and the following disclaimer in the |
13 | * documentation and/or other materials provided with the distribution. |
13 | * documentation and/or other materials provided with the distribution. |
14 | * - The name of the author may not be used to endorse or promote products |
14 | * - The name of the author may not be used to endorse or promote products |
15 | * derived from this software without specific prior written permission. |
15 | * derived from this software without specific prior written permission. |
16 | * |
16 | * |
17 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
17 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
18 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
18 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
19 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
19 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
20 | * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
20 | * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
21 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
21 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
22 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
22 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
23 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
24 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
25 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
26 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
26 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | */ |
27 | */ |
28 | 28 | ||
29 | /** @addtogroup ia64 |
29 | /** @addtogroup ia64 |
30 | * @{ |
30 | * @{ |
31 | */ |
31 | */ |
32 | /** @file |
32 | /** @file |
33 | */ |
33 | */ |
34 | 34 | ||
35 | #ifndef KERN_ia64_ASM_H_ |
35 | #ifndef KERN_ia64_ASM_H_ |
36 | #define KERN_ia64_ASM_H_ |
36 | #define KERN_ia64_ASM_H_ |
37 | 37 | ||
38 | #include <config.h> |
38 | #include <config.h> |
- | 39 | #include <typedefs.h> |
|
39 | #include <arch/types.h> |
40 | #include <arch/types.h> |
40 | #include <arch/register.h> |
41 | #include <arch/register.h> |
41 | 42 | ||
42 | - | ||
43 | #define IA64_IOSPACE_ADDRESS 0xE001000000000000ULL |
43 | #define IA64_IOSPACE_ADDRESS 0xE001000000000000ULL |
44 | 44 | ||
45 | static inline void outb(uint64_t port,uint8_t v) |
45 | static inline void pio_write_8(ioport8_t *port, uint8_t v) |
46 | { |
46 | { |
- | 47 | uintptr_t prt = (uintptr_t) port; |
|
- | 48 | ||
- | 49 | *((uint8_t *)(IA64_IOSPACE_ADDRESS + |
|
47 | *((char *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 )))) = v; |
50 | ((prt & 0xfff) | ((prt >> 2) << 12)))) = v; |
48 | 51 | ||
49 | asm volatile ("mf\n" ::: "memory"); |
52 | asm volatile ("mf\n" ::: "memory"); |
50 | } |
53 | } |
51 | 54 | ||
- | 55 | static inline void pio_write_16(ioport16_t *port, uint16_t v) |
|
- | 56 | { |
|
- | 57 | uintptr_t prt = (uintptr_t) port; |
|
- | 58 | ||
- | 59 | *((uint16_t *)(IA64_IOSPACE_ADDRESS + |
|
- | 60 | ((prt & 0xfff) | ((prt >> 2) << 12)))) = v; |
|
- | 61 | ||
- | 62 | asm volatile ("mf\n" ::: "memory"); |
|
- | 63 | } |
|
- | 64 | ||
- | 65 | static inline void pio_write_32(ioport32_t *port, uint32_t v) |
|
- | 66 | { |
|
- | 67 | uintptr_t prt = (uintptr_t) port; |
|
- | 68 | ||
- | 69 | *((uint32_t *)(IA64_IOSPACE_ADDRESS + |
|
- | 70 | ((prt & 0xfff) | ((prt >> 2) << 12)))) = v; |
|
- | 71 | ||
- | 72 | asm volatile ("mf\n" ::: "memory"); |
|
- | 73 | } |
|
- | 74 | ||
- | 75 | static inline uint8_t pio_read_8(ioport8_t *port) |
|
- | 76 | { |
|
- | 77 | uintptr_t prt = (uintptr_t) port; |
|
- | 78 | ||
- | 79 | asm volatile ("mf\n" ::: "memory"); |
|
- | 80 | ||
- | 81 | return *((uint8_t *)(IA64_IOSPACE_ADDRESS + |
|
- | 82 | ((prt & 0xfff) | ((prt >> 2) << 12)))); |
|
- | 83 | } |
|
52 | 84 | ||
53 | static inline uint8_t inb(uint64_t port) |
85 | static inline uint16_t pio_read_16(ioport16_t *port) |
54 | { |
86 | { |
- | 87 | uintptr_t prt = (uintptr_t) port; |
|
- | 88 | ||
55 | asm volatile ("mf\n" ::: "memory"); |
89 | asm volatile ("mf\n" ::: "memory"); |
56 | 90 | ||
- | 91 | return *((uint16_t *)(IA64_IOSPACE_ADDRESS + |
|
57 | return *((char *)(IA64_IOSPACE_ADDRESS + ( (port & 0xfff) | ( (port >> 2) << 12 )))); |
92 | ((prt & 0xfff) | ((prt >> 2) << 12)))); |
58 | } |
93 | } |
59 | 94 | ||
- | 95 | static inline uint32_t pio_read_32(ioport32_t *port) |
|
- | 96 | { |
|
- | 97 | uintptr_t prt = (uintptr_t) port; |
|
60 | 98 | ||
- | 99 | asm volatile ("mf\n" ::: "memory"); |
|
- | 100 | ||
- | 101 | return *((uint32_t *)(IA64_IOSPACE_ADDRESS + |
|
- | 102 | ((prt & 0xfff) | ((prt >> 2) << 12)))); |
|
- | 103 | } |
|
61 | 104 | ||
62 | /** Return base address of current stack |
105 | /** Return base address of current stack |
63 | * |
106 | * |
64 | * Return the base address of the current stack. |
107 | * Return the base address of the current stack. |
65 | * The stack is assumed to be STACK_SIZE long. |
108 | * The stack is assumed to be STACK_SIZE long. |
66 | * The stack must start on page boundary. |
109 | * The stack must start on page boundary. |
67 | */ |
110 | */ |
68 | static inline uintptr_t get_stack_base(void) |
111 | static inline uintptr_t get_stack_base(void) |
69 | { |
112 | { |
70 | uint64_t v; |
113 | uint64_t v; |
71 | 114 | ||
- | 115 | //I'm not sure why but this code bad inlines in scheduler, |
|
- | 116 | //so THE shifts about 16B and causes kernel panic |
|
72 | asm volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1))); |
117 | //asm volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1))); |
- | 118 | //return v; |
|
73 | 119 | ||
- | 120 | //this code have the same meaning but inlines well |
|
- | 121 | asm volatile ("mov %0 = r12" : "=r" (v) ); |
|
74 | return v; |
122 | return v & (~(STACK_SIZE-1)); |
75 | } |
123 | } |
76 | 124 | ||
77 | /** Return Processor State Register. |
125 | /** Return Processor State Register. |
78 | * |
126 | * |
79 | * @return PSR. |
127 | * @return PSR. |
80 | */ |
128 | */ |
81 | static inline uint64_t psr_read(void) |
129 | static inline uint64_t psr_read(void) |
82 | { |
130 | { |
83 | uint64_t v; |
131 | uint64_t v; |
84 | 132 | ||
85 | asm volatile ("mov %0 = psr\n" : "=r" (v)); |
133 | asm volatile ("mov %0 = psr\n" : "=r" (v)); |
86 | 134 | ||
87 | return v; |
135 | return v; |
88 | } |
136 | } |
89 | 137 | ||
90 | /** Read IVA (Interruption Vector Address). |
138 | /** Read IVA (Interruption Vector Address). |
91 | * |
139 | * |
92 | * @return Return location of interruption vector table. |
140 | * @return Return location of interruption vector table. |
93 | */ |
141 | */ |
94 | static inline uint64_t iva_read(void) |
142 | static inline uint64_t iva_read(void) |
95 | { |
143 | { |
96 | uint64_t v; |
144 | uint64_t v; |
97 | 145 | ||
98 | asm volatile ("mov %0 = cr.iva\n" : "=r" (v)); |
146 | asm volatile ("mov %0 = cr.iva\n" : "=r" (v)); |
99 | 147 | ||
100 | return v; |
148 | return v; |
101 | } |
149 | } |
102 | 150 | ||
103 | /** Write IVA (Interruption Vector Address) register. |
151 | /** Write IVA (Interruption Vector Address) register. |
104 | * |
152 | * |
105 | * @param v New location of interruption vector table. |
153 | * @param v New location of interruption vector table. |
106 | */ |
154 | */ |
107 | static inline void iva_write(uint64_t v) |
155 | static inline void iva_write(uint64_t v) |
108 | { |
156 | { |
109 | asm volatile ("mov cr.iva = %0\n" : : "r" (v)); |
157 | asm volatile ("mov cr.iva = %0\n" : : "r" (v)); |
110 | } |
158 | } |
111 | 159 | ||
112 | 160 | ||
113 | /** Read IVR (External Interrupt Vector Register). |
161 | /** Read IVR (External Interrupt Vector Register). |
114 | * |
162 | * |
115 | * @return Highest priority, pending, unmasked external interrupt vector. |
163 | * @return Highest priority, pending, unmasked external interrupt vector. |
116 | */ |
164 | */ |
117 | static inline uint64_t ivr_read(void) |
165 | static inline uint64_t ivr_read(void) |
118 | { |
166 | { |
119 | uint64_t v; |
167 | uint64_t v; |
120 | 168 | ||
121 | asm volatile ("mov %0 = cr.ivr\n" : "=r" (v)); |
169 | asm volatile ("mov %0 = cr.ivr\n" : "=r" (v)); |
122 | 170 | ||
123 | return v; |
171 | return v; |
124 | } |
172 | } |
125 | 173 | ||
- | 174 | static inline uint64_t cr64_read(void) |
|
- | 175 | { |
|
- | 176 | uint64_t v; |
|
- | 177 | ||
- | 178 | asm volatile ("mov %0 = cr64\n" : "=r" (v)); |
|
- | 179 | ||
- | 180 | return v; |
|
- | 181 | } |
|
- | 182 | ||
- | 183 | ||
126 | /** Write ITC (Interval Timer Counter) register. |
184 | /** Write ITC (Interval Timer Counter) register. |
127 | * |
185 | * |
128 | * @param v New counter value. |
186 | * @param v New counter value. |
129 | */ |
187 | */ |
130 | static inline void itc_write(uint64_t v) |
188 | static inline void itc_write(uint64_t v) |
131 | { |
189 | { |
132 | asm volatile ("mov ar.itc = %0\n" : : "r" (v)); |
190 | asm volatile ("mov ar.itc = %0\n" : : "r" (v)); |
133 | } |
191 | } |
134 | 192 | ||
135 | /** Read ITC (Interval Timer Counter) register. |
193 | /** Read ITC (Interval Timer Counter) register. |
136 | * |
194 | * |
137 | * @return Current counter value. |
195 | * @return Current counter value. |
138 | */ |
196 | */ |
139 | static inline uint64_t itc_read(void) |
197 | static inline uint64_t itc_read(void) |
140 | { |
198 | { |
141 | uint64_t v; |
199 | uint64_t v; |
142 | 200 | ||
143 | asm volatile ("mov %0 = ar.itc\n" : "=r" (v)); |
201 | asm volatile ("mov %0 = ar.itc\n" : "=r" (v)); |
144 | 202 | ||
145 | return v; |
203 | return v; |
146 | } |
204 | } |
147 | 205 | ||
148 | /** Write ITM (Interval Timer Match) register. |
206 | /** Write ITM (Interval Timer Match) register. |
149 | * |
207 | * |
150 | * @param v New match value. |
208 | * @param v New match value. |
151 | */ |
209 | */ |
152 | static inline void itm_write(uint64_t v) |
210 | static inline void itm_write(uint64_t v) |
153 | { |
211 | { |
154 | asm volatile ("mov cr.itm = %0\n" : : "r" (v)); |
212 | asm volatile ("mov cr.itm = %0\n" : : "r" (v)); |
155 | } |
213 | } |
156 | 214 | ||
157 | /** Read ITM (Interval Timer Match) register. |
215 | /** Read ITM (Interval Timer Match) register. |
158 | * |
216 | * |
159 | * @return Match value. |
217 | * @return Match value. |
160 | */ |
218 | */ |
161 | static inline uint64_t itm_read(void) |
219 | static inline uint64_t itm_read(void) |
162 | { |
220 | { |
163 | uint64_t v; |
221 | uint64_t v; |
164 | 222 | ||
165 | asm volatile ("mov %0 = cr.itm\n" : "=r" (v)); |
223 | asm volatile ("mov %0 = cr.itm\n" : "=r" (v)); |
166 | 224 | ||
167 | return v; |
225 | return v; |
168 | } |
226 | } |
169 | 227 | ||
170 | /** Read ITV (Interval Timer Vector) register. |
228 | /** Read ITV (Interval Timer Vector) register. |
171 | * |
229 | * |
172 | * @return Current vector and mask bit. |
230 | * @return Current vector and mask bit. |
173 | */ |
231 | */ |
174 | static inline uint64_t itv_read(void) |
232 | static inline uint64_t itv_read(void) |
175 | { |
233 | { |
176 | uint64_t v; |
234 | uint64_t v; |
177 | 235 | ||
178 | asm volatile ("mov %0 = cr.itv\n" : "=r" (v)); |
236 | asm volatile ("mov %0 = cr.itv\n" : "=r" (v)); |
179 | 237 | ||
180 | return v; |
238 | return v; |
181 | } |
239 | } |
182 | 240 | ||
183 | /** Write ITV (Interval Timer Vector) register. |
241 | /** Write ITV (Interval Timer Vector) register. |
184 | * |
242 | * |
185 | * @param v New vector and mask bit. |
243 | * @param v New vector and mask bit. |
186 | */ |
244 | */ |
187 | static inline void itv_write(uint64_t v) |
245 | static inline void itv_write(uint64_t v) |
188 | { |
246 | { |
189 | asm volatile ("mov cr.itv = %0\n" : : "r" (v)); |
247 | asm volatile ("mov cr.itv = %0\n" : : "r" (v)); |
190 | } |
248 | } |
191 | 249 | ||
192 | /** Write EOI (End Of Interrupt) register. |
250 | /** Write EOI (End Of Interrupt) register. |
193 | * |
251 | * |
194 | * @param v This value is ignored. |
252 | * @param v This value is ignored. |
195 | */ |
253 | */ |
196 | static inline void eoi_write(uint64_t v) |
254 | static inline void eoi_write(uint64_t v) |
197 | { |
255 | { |
198 | asm volatile ("mov cr.eoi = %0\n" : : "r" (v)); |
256 | asm volatile ("mov cr.eoi = %0\n" : : "r" (v)); |
199 | } |
257 | } |
200 | 258 | ||
201 | /** Read TPR (Task Priority Register). |
259 | /** Read TPR (Task Priority Register). |
202 | * |
260 | * |
203 | * @return Current value of TPR. |
261 | * @return Current value of TPR. |
204 | */ |
262 | */ |
205 | static inline uint64_t tpr_read(void) |
263 | static inline uint64_t tpr_read(void) |
206 | { |
264 | { |
207 | uint64_t v; |
265 | uint64_t v; |
208 | 266 | ||
209 | asm volatile ("mov %0 = cr.tpr\n" : "=r" (v)); |
267 | asm volatile ("mov %0 = cr.tpr\n" : "=r" (v)); |
210 | 268 | ||
211 | return v; |
269 | return v; |
212 | } |
270 | } |
213 | 271 | ||
214 | /** Write TPR (Task Priority Register). |
272 | /** Write TPR (Task Priority Register). |
215 | * |
273 | * |
216 | * @param v New value of TPR. |
274 | * @param v New value of TPR. |
217 | */ |
275 | */ |
218 | static inline void tpr_write(uint64_t v) |
276 | static inline void tpr_write(uint64_t v) |
219 | { |
277 | { |
220 | asm volatile ("mov cr.tpr = %0\n" : : "r" (v)); |
278 | asm volatile ("mov cr.tpr = %0\n" : : "r" (v)); |
221 | } |
279 | } |
222 | 280 | ||
223 | /** Disable interrupts. |
281 | /** Disable interrupts. |
224 | * |
282 | * |
225 | * Disable interrupts and return previous |
283 | * Disable interrupts and return previous |
226 | * value of PSR. |
284 | * value of PSR. |
227 | * |
285 | * |
228 | * @return Old interrupt priority level. |
286 | * @return Old interrupt priority level. |
229 | */ |
287 | */ |
230 | static ipl_t interrupts_disable(void) |
288 | static ipl_t interrupts_disable(void) |
231 | { |
289 | { |
232 | uint64_t v; |
290 | uint64_t v; |
233 | 291 | ||
234 | asm volatile ( |
292 | asm volatile ( |
235 | "mov %0 = psr\n" |
293 | "mov %0 = psr\n" |
236 | "rsm %1\n" |
294 | "rsm %1\n" |
237 | : "=r" (v) |
295 | : "=r" (v) |
238 | : "i" (PSR_I_MASK) |
296 | : "i" (PSR_I_MASK) |
239 | ); |
297 | ); |
240 | 298 | ||
241 | return (ipl_t) v; |
299 | return (ipl_t) v; |
242 | } |
300 | } |
243 | 301 | ||
244 | /** Enable interrupts. |
302 | /** Enable interrupts. |
245 | * |
303 | * |
246 | * Enable interrupts and return previous |
304 | * Enable interrupts and return previous |
247 | * value of PSR. |
305 | * value of PSR. |
248 | * |
306 | * |
249 | * @return Old interrupt priority level. |
307 | * @return Old interrupt priority level. |
250 | */ |
308 | */ |
251 | static ipl_t interrupts_enable(void) |
309 | static ipl_t interrupts_enable(void) |
252 | { |
310 | { |
253 | uint64_t v; |
311 | uint64_t v; |
254 | 312 | ||
255 | asm volatile ( |
313 | asm volatile ( |
256 | "mov %0 = psr\n" |
314 | "mov %0 = psr\n" |
257 | "ssm %1\n" |
315 | "ssm %1\n" |
258 | ";;\n" |
316 | ";;\n" |
259 | "srlz.d\n" |
317 | "srlz.d\n" |
260 | : "=r" (v) |
318 | : "=r" (v) |
261 | : "i" (PSR_I_MASK) |
319 | : "i" (PSR_I_MASK) |
262 | ); |
320 | ); |
263 | 321 | ||
264 | return (ipl_t) v; |
322 | return (ipl_t) v; |
265 | } |
323 | } |
266 | 324 | ||
267 | /** Restore interrupt priority level. |
325 | /** Restore interrupt priority level. |
268 | * |
326 | * |
269 | * Restore PSR. |
327 | * Restore PSR. |
270 | * |
328 | * |
271 | * @param ipl Saved interrupt priority level. |
329 | * @param ipl Saved interrupt priority level. |
272 | */ |
330 | */ |
273 | static inline void interrupts_restore(ipl_t ipl) |
331 | static inline void interrupts_restore(ipl_t ipl) |
274 | { |
332 | { |
275 | if (ipl & PSR_I_MASK) |
333 | if (ipl & PSR_I_MASK) |
276 | (void) interrupts_enable(); |
334 | (void) interrupts_enable(); |
277 | else |
335 | else |
278 | (void) interrupts_disable(); |
336 | (void) interrupts_disable(); |
279 | } |
337 | } |
280 | 338 | ||
281 | /** Return interrupt priority level. |
339 | /** Return interrupt priority level. |
282 | * |
340 | * |
283 | * @return PSR. |
341 | * @return PSR. |
284 | */ |
342 | */ |
285 | static inline ipl_t interrupts_read(void) |
343 | static inline ipl_t interrupts_read(void) |
286 | { |
344 | { |
287 | return (ipl_t) psr_read(); |
345 | return (ipl_t) psr_read(); |
288 | } |
346 | } |
289 | 347 | ||
290 | /** Disable protection key checking. */ |
348 | /** Disable protection key checking. */ |
291 | static inline void pk_disable(void) |
349 | static inline void pk_disable(void) |
292 | { |
350 | { |
293 | asm volatile ("rsm %0\n" : : "i" (PSR_PK_MASK)); |
351 | asm volatile ("rsm %0\n" : : "i" (PSR_PK_MASK)); |
294 | } |
352 | } |
295 | 353 | ||
296 | extern void cpu_halt(void); |
354 | extern void cpu_halt(void); |
297 | extern void cpu_sleep(void); |
355 | extern void cpu_sleep(void); |
298 | extern void asm_delay_loop(uint32_t t); |
356 | extern void asm_delay_loop(uint32_t t); |
299 | 357 | ||
300 | extern void switch_to_userspace(uintptr_t entry, uintptr_t sp, uintptr_t bsp, uintptr_t uspace_uarg, uint64_t ipsr, uint64_t rsc); |
358 | extern void switch_to_userspace(uintptr_t, uintptr_t, uintptr_t, uintptr_t, |
- | 359 | uint64_t, uint64_t); |
|
301 | 360 | ||
302 | #endif |
361 | #endif |
303 | 362 | ||
304 | /** @} |
363 | /** @} |
305 | */ |
364 | */ |
306 | 365 |