Subversion Repositories HelenOS

Rev

Rev 1488 | Rev 1708 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | Download | RSS feed

  1. /*
  2.  * Copyright (C) 2005 Jakub Jermar
  3.  * All rights reserved.
  4.  *
  5.  * Redistribution and use in source and binary forms, with or without
  6.  * modification, are permitted provided that the following conditions
  7.  * are met:
  8.  *
  9.  * - Redistributions of source code must retain the above copyright
  10.  *   notice, this list of conditions and the following disclaimer.
  11.  * - Redistributions in binary form must reproduce the above copyright
  12.  *   notice, this list of conditions and the following disclaimer in the
  13.  *   documentation and/or other materials provided with the distribution.
  14.  * - The name of the author may not be used to endorse or promote products
  15.  *   derived from this software without specific prior written permission.
  16.  *
  17.  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
  18.  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
  19.  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
  20.  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
  21.  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  22.  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  23.  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  24.  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  25.  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  26.  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  27.  */
  28.  
  29.  /** @addtogroup ia64  
  30.  * @{
  31.  */
  32. /** @file
  33.  */
  34.  
  35. #ifndef __ia64_ASM_H__
  36. #define __ia64_ASM_H__
  37.  
  38. #include <config.h>
  39. #include <arch/types.h>
  40. #include <arch/register.h>
  41.  
  42. /** Return base address of current stack
  43.  *
  44.  * Return the base address of the current stack.
  45.  * The stack is assumed to be STACK_SIZE long.
  46.  * The stack must start on page boundary.
  47.  */
  48. static inline __address get_stack_base(void)
  49. {
  50.     __u64 v;
  51.  
  52.     __asm__ volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
  53.    
  54.     return v;
  55. }
  56.  
  57. /** Return Processor State Register.
  58.  *
  59.  * @return PSR.
  60.  */
  61. static inline __u64 psr_read(void)
  62. {
  63.     __u64 v;
  64.    
  65.     __asm__ volatile ("mov %0 = psr\n" : "=r" (v));
  66.    
  67.     return v;
  68. }
  69.  
  70. /** Read IVA (Interruption Vector Address).
  71.  *
  72.  * @return Return location of interruption vector table.
  73.  */
  74. static inline __u64 iva_read(void)
  75. {
  76.     __u64 v;
  77.    
  78.     __asm__ volatile ("mov %0 = cr.iva\n" : "=r" (v));
  79.    
  80.     return v;
  81. }
  82.  
  83. /** Write IVA (Interruption Vector Address) register.
  84.  *
  85.  * @param New location of interruption vector table.
  86.  */
  87. static inline void iva_write(__u64 v)
  88. {
  89.     __asm__ volatile ("mov cr.iva = %0\n" : : "r" (v));
  90. }
  91.  
  92.  
  93. /** Read IVR (External Interrupt Vector Register).
  94.  *
  95.  * @return Highest priority, pending, unmasked external interrupt vector.
  96.  */
  97. static inline __u64 ivr_read(void)
  98. {
  99.     __u64 v;
  100.    
  101.     __asm__ volatile ("mov %0 = cr.ivr\n" : "=r" (v));
  102.    
  103.     return v;
  104. }
  105.  
  106. /** Write ITC (Interval Timer Counter) register.
  107.  *
  108.  * @param New counter value.
  109.  */
  110. static inline void itc_write(__u64 v)
  111. {
  112.     __asm__ volatile ("mov ar.itc = %0\n" : : "r" (v));
  113. }
  114.  
  115. /** Read ITC (Interval Timer Counter) register.
  116.  *
  117.  * @return Current counter value.
  118.  */
  119. static inline __u64 itc_read(void)
  120. {
  121.     __u64 v;
  122.    
  123.     __asm__ volatile ("mov %0 = ar.itc\n" : "=r" (v));
  124.    
  125.     return v;
  126. }
  127.  
  128. /** Write ITM (Interval Timer Match) register.
  129.  *
  130.  * @param New match value.
  131.  */
  132. static inline void itm_write(__u64 v)
  133. {
  134.     __asm__ volatile ("mov cr.itm = %0\n" : : "r" (v));
  135. }
  136.  
  137. /** Read ITM (Interval Timer Match) register.
  138.  *
  139.  * @return Match value.
  140.  */
  141. static inline __u64 itm_read(void)
  142. {
  143.     __u64 v;
  144.    
  145.     __asm__ volatile ("mov %0 = cr.itm\n" : "=r" (v));
  146.    
  147.     return v;
  148. }
  149.  
  150. /** Read ITV (Interval Timer Vector) register.
  151.  *
  152.  * @return Current vector and mask bit.
  153.  */
  154. static inline __u64 itv_read(void)
  155. {
  156.     __u64 v;
  157.    
  158.     __asm__ volatile ("mov %0 = cr.itv\n" : "=r" (v));
  159.    
  160.     return v;
  161. }
  162.  
  163. /** Write ITV (Interval Timer Vector) register.
  164.  *
  165.  * @param New vector and mask bit.
  166.  */
  167. static inline void itv_write(__u64 v)
  168. {
  169.     __asm__ volatile ("mov cr.itv = %0\n" : : "r" (v));
  170. }
  171.  
  172. /** Write EOI (End Of Interrupt) register.
  173.  *
  174.  * @param This value is ignored.
  175.  */
  176. static inline void eoi_write(__u64 v)
  177. {
  178.     __asm__ volatile ("mov cr.eoi = %0\n" : : "r" (v));
  179. }
  180.  
  181. /** Read TPR (Task Priority Register).
  182.  *
  183.  * @return Current value of TPR.
  184.  */
  185. static inline __u64 tpr_read(void)
  186. {
  187.     __u64 v;
  188.  
  189.     __asm__ volatile ("mov %0 = cr.tpr\n"  : "=r" (v));
  190.    
  191.     return v;
  192. }
  193.  
  194. /** Write TPR (Task Priority Register).
  195.  *
  196.  * @param New value of TPR.
  197.  */
  198. static inline void tpr_write(__u64 v)
  199. {
  200.     __asm__ volatile ("mov cr.tpr = %0\n" : : "r" (v));
  201. }
  202.  
  203. /** Disable interrupts.
  204.  *
  205.  * Disable interrupts and return previous
  206.  * value of PSR.
  207.  *
  208.  * @return Old interrupt priority level.
  209.  */
  210. static ipl_t interrupts_disable(void)
  211. {
  212.     __u64 v;
  213.    
  214.     __asm__ volatile (
  215.         "mov %0 = psr\n"
  216.         "rsm %1\n"
  217.         : "=r" (v)
  218.         : "i" (PSR_I_MASK)
  219.     );
  220.    
  221.     return (ipl_t) v;
  222. }
  223.  
  224. /** Enable interrupts.
  225.  *
  226.  * Enable interrupts and return previous
  227.  * value of PSR.
  228.  *
  229.  * @return Old interrupt priority level.
  230.  */
  231. static ipl_t interrupts_enable(void)
  232. {
  233.     __u64 v;
  234.    
  235.     __asm__ volatile (
  236.         "mov %0 = psr\n"
  237.         "ssm %1\n"
  238.         ";;\n"
  239.         "srlz.d\n"
  240.         : "=r" (v)
  241.         : "i" (PSR_I_MASK)
  242.     );
  243.    
  244.     return (ipl_t) v;
  245. }
  246.  
  247. /** Restore interrupt priority level.
  248.  *
  249.  * Restore PSR.
  250.  *
  251.  * @param ipl Saved interrupt priority level.
  252.  */
  253. static inline void interrupts_restore(ipl_t ipl)
  254. {
  255.     if (ipl & PSR_I_MASK)
  256.         (void) interrupts_enable();
  257.     else
  258.         (void) interrupts_disable();
  259. }
  260.  
  261. /** Return interrupt priority level.
  262.  *
  263.  * @return PSR.
  264.  */
  265. static inline ipl_t interrupts_read(void)
  266. {
  267.     return (ipl_t) psr_read();
  268. }
  269.  
  270. /** Disable protection key checking. */
  271. static inline void pk_disable(void)
  272. {
  273.     __asm__ volatile ("rsm %0\n" : : "i" (PSR_PK_MASK));
  274. }
  275.  
  276. extern void cpu_halt(void);
  277. extern void cpu_sleep(void);
  278. extern void asm_delay_loop(__u32 t);
  279.  
  280. extern void switch_to_userspace(__address entry, __address sp, __address bsp, __address uspace_uarg, __u64 ipsr, __u64 rsc);
  281.  
  282. #endif
  283.  
  284.  /** @}
  285.  */
  286.  
  287.