Subversion Repositories HelenOS

Rev

Rev 3993 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | Download | RSS feed

  1. /*
  2.  * Copyright (c) 2005 Jakub Jermar
  3.  * All rights reserved.
  4.  *
  5.  * Redistribution and use in source and binary forms, with or without
  6.  * modification, are permitted provided that the following conditions
  7.  * are met:
  8.  *
  9.  * - Redistributions of source code must retain the above copyright
  10.  *   notice, this list of conditions and the following disclaimer.
  11.  * - Redistributions in binary form must reproduce the above copyright
  12.  *   notice, this list of conditions and the following disclaimer in the
  13.  *   documentation and/or other materials provided with the distribution.
  14.  * - The name of the author may not be used to endorse or promote products
  15.  *   derived from this software without specific prior written permission.
  16.  *
  17.  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
  18.  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
  19.  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
  20.  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
  21.  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  22.  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  23.  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  24.  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  25.  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  26.  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  27.  */
  28.  
  29. /** @addtogroup sparc64
  30.  * @{
  31.  */
  32. /** @file
  33.  */
  34.  
  35. #ifndef KERN_sparc64_ASM_H_
  36. #define KERN_sparc64_ASM_H_
  37.  
  38. #include <arch/arch.h>
  39. #include <arch/types.h>
  40. #include <typedefs.h>
  41. #include <align.h>
  42. #include <arch/register.h>
  43. #include <config.h>
  44. #include <arch/stack.h>
  45. #include <arch/barrier.h>
  46.  
  47. static inline void outb(ioport_t port, uint8_t v)
  48. {
  49.     *((volatile uint8_t *)(port)) = v;
  50.     memory_barrier();
  51. }
  52.  
  53. static inline void outw(ioport_t port, uint16_t v)
  54. {
  55.     *((volatile uint16_t *)(port)) = v;
  56.     memory_barrier();
  57. }
  58.  
  59. static inline void outl(ioport_t port, uint32_t v)
  60. {
  61.     *((volatile uint32_t *)(port)) = v;
  62.     memory_barrier();
  63. }
  64.  
  65. static inline uint8_t inb(ioport_t port)
  66. {
  67.     uint8_t rv;
  68.  
  69.     rv = *((volatile uint8_t *)(port));
  70.     memory_barrier();
  71.  
  72.     return rv;
  73. }
  74.  
  75. static inline uint16_t inw(ioport_t port)
  76. {
  77.     uint16_t rv;
  78.  
  79.     rv = *((volatile uint16_t *)(port));
  80.     memory_barrier();
  81.  
  82.     return rv;
  83. }
  84.  
  85. static inline uint32_t inl(ioport_t port)
  86. {
  87.     uint32_t rv;
  88.  
  89.     rv = *((volatile uint32_t *)(port));
  90.     memory_barrier();
  91.  
  92.     return rv;
  93. }
  94.  
  95. /** Read Processor State register.
  96.  *
  97.  * @return Value of PSTATE register.
  98.  */
  99. static inline uint64_t pstate_read(void)
  100. {
  101.     uint64_t v;
  102.    
  103.     asm volatile ("rdpr %%pstate, %0\n" : "=r" (v));
  104.    
  105.     return v;
  106. }
  107.  
  108. /** Write Processor State register.
  109.  *
  110.  * @param v New value of PSTATE register.
  111.  */
  112. static inline void pstate_write(uint64_t v)
  113. {
  114.     asm volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));
  115. }
  116.  
  117. /** Read TICK_compare Register.
  118.  *
  119.  * @return Value of TICK_comapre register.
  120.  */
  121. static inline uint64_t tick_compare_read(void)
  122. {
  123.     uint64_t v;
  124.    
  125.     asm volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));
  126.    
  127.     return v;
  128. }
  129.  
  130. /** Write TICK_compare Register.
  131.  *
  132.  * @param v New value of TICK_comapre register.
  133.  */
  134. static inline void tick_compare_write(uint64_t v)
  135. {
  136.     asm volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));
  137. }
  138.  
  139. /** Read STICK_compare Register.
  140.  *
  141.  * @return Value of STICK_compare register.
  142.  */
  143. static inline uint64_t stick_compare_read(void)
  144. {
  145.     uint64_t v;
  146.    
  147.     asm volatile ("rd %%asr25, %0\n" : "=r" (v));
  148.    
  149.     return v;
  150. }
  151.  
  152. /** Write STICK_compare Register.
  153.  *
  154.  * @param v New value of STICK_comapre register.
  155.  */
  156. static inline void stick_compare_write(uint64_t v)
  157. {
  158.     asm volatile ("wr %0, %1, %%asr25\n" : : "r" (v), "i" (0));
  159. }
  160.  
  161. /** Read TICK Register.
  162.  *
  163.  * @return Value of TICK register.
  164.  */
  165. static inline uint64_t tick_read(void)
  166. {
  167.     uint64_t v;
  168.    
  169.     asm volatile ("rdpr %%tick, %0\n" : "=r" (v));
  170.    
  171.     return v;
  172. }
  173.  
  174. /** Read FPRS Register.
  175.  *
  176.  * @return Value of FPRS register.
  177.  */
  178. static inline uint64_t fprs_read(void)
  179. {
  180.     uint64_t v;
  181.    
  182.     asm volatile ("rd %%fprs, %0\n" : "=r" (v));
  183.    
  184.     return v;
  185. }
  186.  
  187. /** Write FPRS Register.
  188.  *
  189.  * @param v New value of FPRS register.
  190.  */
  191. static inline void fprs_write(uint64_t v)
  192. {
  193.     asm volatile ("wr %0, %1, %%fprs\n" : : "r" (v), "i" (0));
  194. }
  195.  
  196. /** Read SOFTINT Register.
  197.  *
  198.  * @return Value of SOFTINT register.
  199.  */
  200. static inline uint64_t softint_read(void)
  201. {
  202.     uint64_t v;
  203.  
  204.     asm volatile ("rd %%softint, %0\n" : "=r" (v));
  205.  
  206.     return v;
  207. }
  208.  
  209. /** Write SOFTINT Register.
  210.  *
  211.  * @param v New value of SOFTINT register.
  212.  */
  213. static inline void softint_write(uint64_t v)
  214. {
  215.     asm volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));
  216. }
  217.  
  218. /** Write CLEAR_SOFTINT Register.
  219.  *
  220.  * Bits set in CLEAR_SOFTINT register will be cleared in SOFTINT register.
  221.  *
  222.  * @param v New value of CLEAR_SOFTINT register.
  223.  */
  224. static inline void clear_softint_write(uint64_t v)
  225. {
  226.     asm volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));
  227. }
  228.  
  229. /** Write SET_SOFTINT Register.
  230.  *
  231.  * Bits set in SET_SOFTINT register will be set in SOFTINT register.
  232.  *
  233.  * @param v New value of SET_SOFTINT register.
  234.  */
  235. static inline void set_softint_write(uint64_t v)
  236. {
  237.     asm volatile ("wr %0, %1, %%set_softint\n" : : "r" (v), "i" (0));
  238. }
  239.  
  240. /** Enable interrupts.
  241.  *
  242.  * Enable interrupts and return previous
  243.  * value of IPL.
  244.  *
  245.  * @return Old interrupt priority level.
  246.  */
  247. static inline ipl_t interrupts_enable(void) {
  248.     pstate_reg_t pstate;
  249.     uint64_t value;
  250.    
  251.     value = pstate_read();
  252.     pstate.value = value;
  253.     pstate.ie = true;
  254.     pstate_write(pstate.value);
  255.    
  256.     return (ipl_t) value;
  257. }
  258.  
  259. /** Disable interrupts.
  260.  *
  261.  * Disable interrupts and return previous
  262.  * value of IPL.
  263.  *
  264.  * @return Old interrupt priority level.
  265.  */
  266. static inline ipl_t interrupts_disable(void) {
  267.     pstate_reg_t pstate;
  268.     uint64_t value;
  269.    
  270.     value = pstate_read();
  271.     pstate.value = value;
  272.     pstate.ie = false;
  273.     pstate_write(pstate.value);
  274.    
  275.     return (ipl_t) value;
  276. }
  277.  
  278. /** Restore interrupt priority level.
  279.  *
  280.  * Restore IPL.
  281.  *
  282.  * @param ipl Saved interrupt priority level.
  283.  */
  284. static inline void interrupts_restore(ipl_t ipl) {
  285.     pstate_reg_t pstate;
  286.    
  287.     pstate.value = pstate_read();
  288.     pstate.ie = ((pstate_reg_t) ipl).ie;
  289.     pstate_write(pstate.value);
  290. }
  291.  
  292. /** Return interrupt priority level.
  293.  *
  294.  * Return IPL.
  295.  *
  296.  * @return Current interrupt priority level.
  297.  */
  298. static inline ipl_t interrupts_read(void) {
  299.     return (ipl_t) pstate_read();
  300. }
  301.  
  302. /** Return base address of current stack.
  303.  *
  304.  * Return the base address of the current stack.
  305.  * The stack is assumed to be STACK_SIZE bytes long.
  306.  * The stack must start on page boundary.
  307.  */
  308. static inline uintptr_t get_stack_base(void)
  309. {
  310.     uintptr_t unbiased_sp;
  311.    
  312.     asm volatile ("add %%sp, %1, %0\n" : "=r" (unbiased_sp) : "i" (STACK_BIAS));
  313.    
  314.     return ALIGN_DOWN(unbiased_sp, STACK_SIZE);
  315. }
  316.  
  317. /** Read Trap Program Counter register.
  318.  *
  319.  * @return Current value in TPC.
  320.  */
  321. static inline uint64_t tpc_read(void)
  322. {
  323.     uint64_t v;
  324.    
  325.     asm volatile ("rdpr %%tpc, %0\n" : "=r" (v));
  326.    
  327.     return v;
  328. }
  329.  
  330. /** Read Trap Level register.
  331.  *
  332.  * @return Current value in TL.
  333.  */
  334. static inline uint64_t tl_read(void)
  335. {
  336.     uint64_t v;
  337.    
  338.     asm volatile ("rdpr %%tl, %0\n" : "=r" (v));
  339.    
  340.     return v;
  341. }
  342.  
  343. /** Read Trap Base Address register.
  344.  *
  345.  * @return Current value in TBA.
  346.  */
  347. static inline uint64_t tba_read(void)
  348. {
  349.     uint64_t v;
  350.    
  351.     asm volatile ("rdpr %%tba, %0\n" : "=r" (v));
  352.    
  353.     return v;
  354. }
  355.  
  356. /** Write Trap Base Address register.
  357.  *
  358.  * @param v New value of TBA.
  359.  */
  360. static inline void tba_write(uint64_t v)
  361. {
  362.     asm volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));
  363. }
  364.  
  365. /** Load uint64_t from alternate space.
  366.  *
  367.  * @param asi ASI determining the alternate space.
  368.  * @param va Virtual address within the ASI.
  369.  *
  370.  * @return Value read from the virtual address in the specified address space.
  371.  */
  372. static inline uint64_t asi_u64_read(asi_t asi, uintptr_t va)
  373. {
  374.     uint64_t v;
  375.    
  376.     asm volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" ((unsigned) asi));
  377.    
  378.     return v;
  379. }
  380.  
  381. /** Store uint64_t to alternate space.
  382.  *
  383.  * @param asi ASI determining the alternate space.
  384.  * @param va Virtual address within the ASI.
  385.  * @param v Value to be written.
  386.  */
  387. static inline void asi_u64_write(asi_t asi, uintptr_t va, uint64_t v)
  388. {
  389.     asm volatile ("stxa %0, [%1] %2\n" : :  "r" (v), "r" (va), "i" ((unsigned) asi) : "memory");
  390. }
  391.  
  392. /** Flush all valid register windows to memory. */
  393. static inline void flushw(void)
  394. {
  395.     asm volatile ("flushw\n");
  396. }
  397.  
  398. /** Switch to nucleus by setting TL to 1. */
  399. static inline void nucleus_enter(void)
  400. {
  401.     asm volatile ("wrpr %g0, 1, %tl\n");
  402. }
  403.  
  404. /** Switch from nucleus by setting TL to 0. */
  405. static inline void nucleus_leave(void)
  406. {
  407.     asm volatile ("wrpr %g0, %g0, %tl\n");
  408. }
  409.  
  410. extern void cpu_halt(void);
  411. extern void cpu_sleep(void);
  412. extern void asm_delay_loop(const uint32_t usec);
  413.  
  414. extern void switch_to_userspace(uint64_t pc, uint64_t sp, uint64_t uarg);
  415.  
  416. #if defined(SUN4U)
  417. #include <arch/sun4u/asm.h>
  418. #elif defined (SUN4V)
  419. #include <arch/sun4v/asm.h>
  420. #endif
  421.  
  422. #endif
  423.  
  424. /** @}
  425.  */
  426.