Subversion Repositories HelenOS

Rev

Rev 4377 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | Download | RSS feed

  1. /*
  2.  * Copyright (c) 2005 Jakub Jermar
  3.  * All rights reserved.
  4.  *
  5.  * Redistribution and use in source and binary forms, with or without
  6.  * modification, are permitted provided that the following conditions
  7.  * are met:
  8.  *
  9.  * - Redistributions of source code must retain the above copyright
  10.  *   notice, this list of conditions and the following disclaimer.
  11.  * - Redistributions in binary form must reproduce the above copyright
  12.  *   notice, this list of conditions and the following disclaimer in the
  13.  *   documentation and/or other materials provided with the distribution.
  14.  * - The name of the author may not be used to endorse or promote products
  15.  *   derived from this software without specific prior written permission.
  16.  *
  17.  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
  18.  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
  19.  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
  20.  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
  21.  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  22.  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  23.  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  24.  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  25.  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  26.  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  27.  */
  28.  
  29. /** @addtogroup sparc64
  30.  * @{
  31.  */
  32. /** @file
  33.  */
  34.  
  35. #ifndef KERN_sparc64_ATOMIC_H_
  36. #define KERN_sparc64_ATOMIC_H_
  37.  
  38. #include <arch/barrier.h>
  39. #include <arch/types.h>
  40. #include <preemption.h>
  41.  
  42. /** Atomic add operation.
  43.  *
  44.  * Use atomic compare and swap operation to atomically add signed value.
  45.  *
  46.  * @param val Atomic variable.
  47.  * @param i Signed value to be added.
  48.  *
  49.  * @return Value of the atomic variable as it existed before addition.
  50.  */
  51. static inline long atomic_add(atomic_t *val, int i)
  52. {
  53.     uint64_t a, b;
  54.  
  55.     do {
  56.         volatile uintptr_t x = (uint64_t) &val->count;
  57.  
  58.         a = *((uint64_t *) x);
  59.         b = a + i;
  60.         asm volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *)x)),
  61.             "+r" (b) : "r" (a));
  62.     } while (a != b);
  63.  
  64.     return a;
  65. }
  66.  
  67. static inline long atomic_preinc(atomic_t *val)
  68. {
  69.     return atomic_add(val, 1) + 1;
  70. }
  71.  
  72. static inline long atomic_postinc(atomic_t *val)
  73. {
  74.     return atomic_add(val, 1);
  75. }
  76.  
  77. static inline long atomic_predec(atomic_t *val)
  78. {
  79.     return atomic_add(val, -1) - 1;
  80. }
  81.  
  82. static inline long atomic_postdec(atomic_t *val)
  83. {
  84.     return atomic_add(val, -1);
  85. }
  86.  
  87. static inline void atomic_inc(atomic_t *val)
  88. {
  89.     (void) atomic_add(val, 1);
  90. }
  91.  
  92. static inline void atomic_dec(atomic_t *val)
  93. {
  94.     (void) atomic_add(val, -1);
  95. }
  96.  
  97. static inline long test_and_set(atomic_t *val)
  98. {
  99.     uint64_t v = 1;
  100.     volatile uintptr_t x = (uint64_t) &val->count;
  101.  
  102.     asm volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *) x)),
  103.         "+r" (v) : "r" (0));
  104.  
  105.     return v;
  106. }
  107.  
  108. static inline void atomic_lock_arch(atomic_t *val)
  109. {
  110.     uint64_t tmp1 = 1;
  111.     uint64_t tmp2 = 0;
  112.  
  113.     volatile uintptr_t x = (uint64_t) &val->count;
  114.  
  115.     preemption_disable();
  116.  
  117.     asm volatile (
  118.     "0:\n"
  119.         "casx %0, %3, %1\n"
  120.         "brz %1, 2f\n"
  121.         "nop\n"
  122.     "1:\n"
  123.         "ldx %0, %2\n"
  124.         "brz %2, 0b\n"
  125.         "nop\n"
  126.         "ba %%xcc, 1b\n"
  127.         "nop\n"
  128.     "2:\n"
  129.         : "+m" (*((uint64_t *) x)), "+r" (tmp1), "+r" (tmp2) : "r" (0)
  130.     );
  131.    
  132.     /*
  133.      * Prevent critical section code from bleeding out this way up.
  134.      */
  135.     CS_ENTER_BARRIER();
  136. }
  137.  
  138. #endif
  139.  
  140. /** @}
  141.  */
  142.