Subversion Repositories HelenOS-historic

Rev

Rev 1121 | Rev 1702 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | Download | RSS feed

  1. /*
  2.  * Copyright (C) 2001-2004 Jakub Jermar
  3.  * All rights reserved.
  4.  *
  5.  * Redistribution and use in source and binary forms, with or without
  6.  * modification, are permitted provided that the following conditions
  7.  * are met:
  8.  *
  9.  * - Redistributions of source code must retain the above copyright
  10.  *   notice, this list of conditions and the following disclaimer.
  11.  * - Redistributions in binary form must reproduce the above copyright
  12.  *   notice, this list of conditions and the following disclaimer in the
  13.  *   documentation and/or other materials provided with the distribution.
  14.  * - The name of the author may not be used to endorse or promote products
  15.  *   derived from this software without specific prior written permission.
  16.  *
  17.  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
  18.  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
  19.  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
  20.  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
  21.  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  22.  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  23.  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  24.  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  25.  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  26.  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  27.  */
  28.  
  29. #ifndef __amd64_ATOMIC_H__
  30. #define __amd64_ATOMIC_H__
  31.  
  32. #include <arch/types.h>
  33. #include <arch/barrier.h>
  34. #include <preemption.h>
  35. #include <typedefs.h>
  36.  
  37. static inline void atomic_inc(atomic_t *val) {
  38. #ifdef CONFIG_SMP
  39.     __asm__ volatile ("lock incq %0\n" : "=m" (val->count));
  40. #else
  41.     __asm__ volatile ("incq %0\n" : "=m" (val->count));
  42. #endif /* CONFIG_SMP */
  43. }
  44.  
  45. static inline void atomic_dec(atomic_t *val) {
  46. #ifdef CONFIG_SMP
  47.     __asm__ volatile ("lock decq %0\n" : "=m" (val->count));
  48. #else
  49.     __asm__ volatile ("decq %0\n" : "=m" (val->count));
  50. #endif /* CONFIG_SMP */
  51. }
  52.  
  53. static inline long atomic_postinc(atomic_t *val)
  54. {
  55.     long r = 1;
  56.  
  57.     __asm__ volatile (
  58.         "lock xaddq %1, %0\n"
  59.         : "=m" (val->count) : "r" (r)
  60.     );
  61.  
  62.     return r;
  63. }
  64.  
  65. static inline long atomic_postdec(atomic_t *val)
  66. {
  67.     long r = -1;
  68.    
  69.     __asm__ volatile (
  70.         "lock xaddq %1, %0\n"
  71.         : "=m" (val->count) : "r" (r)
  72.     );
  73.    
  74.     return r;
  75. }
  76.  
  77. #define atomic_preinc(val) (atomic_postinc(val)+1)
  78. #define atomic_predec(val) (atomic_postdec(val)-1)
  79.  
  80. static inline __u64 test_and_set(atomic_t *val) {
  81.     __u64 v;
  82.    
  83.     __asm__ volatile (
  84.         "movq $1, %0\n"
  85.         "xchgq %0, %1\n"
  86.         : "=r" (v),"=m" (val->count)
  87.     );
  88.    
  89.     return v;
  90. }
  91.  
  92.  
  93. /** amd64 specific fast spinlock */
  94. static inline void atomic_lock_arch(atomic_t *val)
  95. {
  96.     __u64 tmp;
  97.  
  98.     preemption_disable();
  99.     __asm__ volatile (
  100.         "0:;"
  101. #ifdef CONFIG_HT
  102.         "pause;"
  103. #endif
  104.         "mov %0, %1;"
  105.         "testq %1, %1;"
  106.         "jnz 0b;"       /* Lightweight looping on locked spinlock */
  107.        
  108.         "incq %1;"      /* now use the atomic operation */
  109.         "xchgq %0, %1;"
  110.         "testq %1, %1;"
  111.         "jnz 0b;"
  112.                 : "=m"(val->count),"=r"(tmp)
  113.         );
  114.     /*
  115.      * Prevent critical section code from bleeding out this way up.
  116.      */
  117.     CS_ENTER_BARRIER();
  118. }
  119.  
  120. #endif
  121.