Subversion Repositories HelenOS

Rev

Rev 3191 | Rev 4345 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | Download | RSS feed

  1. /*
  2.  * Copyright (c) 2001-2004 Jakub Jermar
  3.  * All rights reserved.
  4.  *
  5.  * Redistribution and use in source and binary forms, with or without
  6.  * modification, are permitted provided that the following conditions
  7.  * are met:
  8.  *
  9.  * - Redistributions of source code must retain the above copyright
  10.  *   notice, this list of conditions and the following disclaimer.
  11.  * - Redistributions in binary form must reproduce the above copyright
  12.  *   notice, this list of conditions and the following disclaimer in the
  13.  *   documentation and/or other materials provided with the distribution.
  14.  * - The name of the author may not be used to endorse or promote products
  15.  *   derived from this software without specific prior written permission.
  16.  *
  17.  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
  18.  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
  19.  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
  20.  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
  21.  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  22.  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  23.  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  24.  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  25.  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  26.  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  27.  */
  28.  
  29. /** @addtogroup amd64  
  30.  * @{
  31.  */
  32. /** @file
  33.  */
  34.  
  35. #ifndef KERN_amd64_ATOMIC_H_
  36. #define KERN_amd64_ATOMIC_H_
  37.  
  38. #include <arch/types.h>
  39. #include <arch/barrier.h>
  40. #include <preemption.h>
  41.  
  42. static inline void atomic_inc(atomic_t *val) {
  43. #ifdef CONFIG_SMP
  44.     asm volatile ("lock incq %0\n" : "+m" (val->count));
  45. #else
  46.     asm volatile ("incq %0\n" : "+m" (val->count));
  47. #endif /* CONFIG_SMP */
  48. }
  49.  
  50. static inline void atomic_dec(atomic_t *val) {
  51. #ifdef CONFIG_SMP
  52.     asm volatile ("lock decq %0\n" : "+m" (val->count));
  53. #else
  54.     asm volatile ("decq %0\n" : "+m" (val->count));
  55. #endif /* CONFIG_SMP */
  56. }
  57.  
  58. static inline long atomic_postinc(atomic_t *val)
  59. {
  60.     long r = 1;
  61.  
  62.     asm volatile (
  63.         "lock xaddq %1, %0\n"
  64.         : "+m" (val->count), "+r" (r)
  65.     );
  66.  
  67.     return r;
  68. }
  69.  
  70. static inline long atomic_postdec(atomic_t *val)
  71. {
  72.     long r = -1;
  73.    
  74.     asm volatile (
  75.         "lock xaddq %1, %0\n"
  76.         : "+m" (val->count), "+r" (r)
  77.     );
  78.    
  79.     return r;
  80. }
  81.  
  82. #define atomic_preinc(val) (atomic_postinc(val) + 1)
  83. #define atomic_predec(val) (atomic_postdec(val) - 1)
  84.  
  85. static inline uint64_t test_and_set(atomic_t *val) {
  86.     uint64_t v;
  87.    
  88.     asm volatile (
  89.         "movq $1, %0\n"
  90.         "xchgq %0, %1\n"
  91.         : "=r" (v), "+m" (val->count)
  92.     );
  93.    
  94.     return v;
  95. }
  96.  
  97.  
  98. /** amd64 specific fast spinlock */
  99. static inline void atomic_lock_arch(atomic_t *val)
  100. {
  101.     uint64_t tmp;
  102.  
  103.     preemption_disable();
  104.     asm volatile (
  105.         "0:\n"
  106. #ifdef CONFIG_HT
  107.         "pause\n"
  108. #endif
  109.         "mov %0, %1\n"
  110.         "testq %1, %1\n"
  111.         "jnz 0b\n"       /* lightweight looping on locked spinlock */
  112.        
  113.         "incq %1\n"      /* now use the atomic operation */
  114.         "xchgq %0, %1\n"
  115.         "testq %1, %1\n"
  116.         "jnz 0b\n"
  117.                 : "+m" (val->count), "=&r" (tmp)
  118.     );
  119.     /*
  120.      * Prevent critical section code from bleeding out this way up.
  121.      */
  122.     CS_ENTER_BARRIER();
  123. }
  124.  
  125. #endif
  126.  
  127. /** @}
  128.  */
  129.