Subversion Repositories HelenOS

Rev

Rev 431 | Go to most recent revision | Blame | Last modification | View Log | Download | RSS feed

  1. /*
  2.  * Copyright (C) 2005 Jakub Jermar
  3.  * All rights reserved.
  4.  *
  5.  * Redistribution and use in source and binary forms, with or without
  6.  * modification, are permitted provided that the following conditions
  7.  * are met:
  8.  *
  9.  * - Redistributions of source code must retain the above copyright
  10.  *   notice, this list of conditions and the following disclaimer.
  11.  * - Redistributions in binary form must reproduce the above copyright
  12.  *   notice, this list of conditions and the following disclaimer in the
  13.  *   documentation and/or other materials provided with the distribution.
  14.  * - The name of the author may not be used to endorse or promote products
  15.  *   derived from this software without specific prior written permission.
  16.  *
  17.  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
  18.  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
  19.  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
  20.  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
  21.  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  22.  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  23.  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  24.  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  25.  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  26.  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  27.  */
  28.  
  29. #ifndef __ia64_ASM_H__
  30. #define __ia64_ASM_H__
  31.  
  32. #include <arch/types.h>
  33. #include <config.h>
  34. #include <arch/register.h>
  35.  
  36. /** Return base address of current stack
  37.  *
  38.  * Return the base address of the current stack.
  39.  * The stack is assumed to be STACK_SIZE long.
  40.  * The stack must start on page boundary.
  41.  */
  42. static inline __address get_stack_base(void)
  43. {
  44.     __u64 v;
  45.  
  46.     __asm__ volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
  47.    
  48.     return v;
  49. }
  50.  
  51. /** Read IVR (External Interrupt Vector Register).
  52.  *
  53.  * @return Highest priority, pending, unmasked external interrupt vector.
  54.  */
  55. static inline __u64 ivr_read(void)
  56. {
  57.     __u64 v;
  58.    
  59.     __asm__ volatile ("mov %0 = cr.ivr\n" : "=r" (v));
  60.    
  61.     return v;
  62. }
  63.  
  64. /** Write ITC (Interval Timer Counter) register.
  65.  *
  66.  * @param New counter value.
  67.  */
  68. static inline void itc_write(__u64 v)
  69. {
  70.     __asm__ volatile ("mov ar.itc = %0\n" : : "r" (v));
  71. }
  72.  
  73. /** Read ITC (Interval Timer Counter) register.
  74.  *
  75.  * @return Current counter value.
  76.  */
  77. static inline __u64 itc_read(void)
  78. {
  79.     __u64 v;
  80.    
  81.     __asm__ volatile ("mov %0 = ar.itc\n" : "=r" (v));
  82.    
  83.     return v;
  84. }
  85.  
  86. /** Write ITM (Interval Timer Match) register.
  87.  *
  88.  * @param New match value.
  89.  */
  90. static inline void itm_write(__u64 v)
  91. {
  92.     __asm__ volatile ("mov cr.itm = %0\n" : : "r" (v));
  93. }
  94.  
  95. /** Write ITV (Interval Timer Vector) register.
  96.  *
  97.  * @param New vector and masked bit.
  98.  */
  99. static inline void itv_write(__u64 v)
  100. {
  101.     __asm__ volatile ("mov cr.itv = %0\n" : : "r" (v));
  102. }
  103.  
  104. /** Write EOI (End Of Interrupt) register.
  105.  *
  106.  * @param This value is ignored.
  107.  */
  108. static inline void eoi_write(__u64 v)
  109. {
  110.     __asm__ volatile ("mov cr.eoi = %0\n" : : "r" (v));
  111. }
  112.  
  113. /** Read TPR (Task Priority Register).
  114.  *
  115.  * @return Current value of TPR.
  116.  */
  117. static inline __u64 tpr_read(void)
  118. {
  119.     __u64 v;
  120.  
  121.     __asm__ volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
  122.    
  123.     return v;
  124. }
  125.  
  126. /** Write TPR (Task Priority Register).
  127.  *
  128.  * @param New value of TPR.
  129.  */
  130. static inline void tpr_write(__u64 v)
  131. {
  132.     __asm__ volatile ("mov cr.tpr = %0\n" : : "r" (v));
  133. }
  134.  
  135. /** Disable interrupts.
  136.  *
  137.  * Disable interrupts and return previous
  138.  * value of PSR.
  139.  *
  140.  * @return Old interrupt priority level.
  141.  */
  142. static ipl_t interrupts_disable(void)
  143. {
  144.     __u64 v;
  145.    
  146.     __asm__ volatile (
  147.         "mov %0 = psr\n"
  148.         "rsm %1\n"
  149.         : "=r" (v)
  150.         : "i" (PSR_I_MASK)
  151.     );
  152.    
  153.     return (ipl_t) v;
  154. }
  155.  
  156. /** Enable interrupts.
  157.  *
  158.  * Enable interrupts and return previous
  159.  * value of PSR.
  160.  *
  161.  * @return Old interrupt priority level.
  162.  */
  163. static ipl_t interrupts_enable(void)
  164. {
  165.     __u64 v;
  166.    
  167.     __asm__ volatile (
  168.         "mov %0 = psr\n"
  169.         "ssm %1\n"
  170.         ";;\n"
  171.         "srlz.d\n"
  172.         : "=r" (v)
  173.         : "i" (PSR_I_MASK)
  174.     );
  175.    
  176.     return (ipl_t) v;
  177. }
  178.  
  179. /** Restore interrupt priority level.
  180.  *
  181.  * Restore PSR.
  182.  *
  183.  * @param ipl Saved interrupt priority level.
  184.  */
  185. static inline void interrupts_restore(ipl_t ipl)
  186. {
  187.     __asm__ volatile (
  188.         "mov psr.l = %0\n"
  189.         ";;\n"
  190.         "srlz.d\n"
  191.         : : "r" ((__u64) ipl)
  192.     );
  193. }
  194.  
  195. /** Return interrupt priority level.
  196.  *
  197.  * @return PSR.
  198.  */
  199. static inline ipl_t interrupts_read(void)
  200. {
  201.     __u64 v;
  202.    
  203.     __asm__ volatile ("mov %0 = psr\n" : "=r" (v));
  204.    
  205.     return (ipl_t) v;
  206. }
  207.  
  208. #define set_shadow_register(reg,val) {__u64 v = val; __asm__  volatile("mov r15 = %0;;\n""bsw.0;;\n""mov "   #reg   " = r15;;\n""bsw.1;;\n" : : "r" (v) : "r15" ); }
  209. #define get_shadow_register(reg,val) {__u64 v ; __asm__  volatile("bsw.0;;\n" "mov r15 = r" #reg ";;\n" "bsw.1;;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
  210.  
  211. #define get_control_register(reg,val) {__u64 v ; __asm__  volatile("mov r15 = cr" #reg ";;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
  212. #define get_aplication_register(reg,val) {__u64 v ; __asm__  volatile("mov r15 = ar" #reg ";;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
  213. #define get_psr(val) {__u64 v ; __asm__  volatile("mov r15 = psr;;\n" "mov %0 = r15;;\n" : "=r" (v) : : "r15" ); val=v; }
  214.  
  215. extern void cpu_halt(void);
  216. extern void cpu_sleep(void);
  217. extern void asm_delay_loop(__u32 t);
  218.  
  219. #endif
  220.