Subversion Repositories HelenOS

Rev

Rev 1866 | Go to most recent revision | Blame | Last modification | View Log | Download | RSS feed

  1. /*
  2.  * Copyright (c) 2001-2004 Jakub Jermar
  3.  * All rights reserved.
  4.  *
  5.  * Redistribution and use in source and binary forms, with or without
  6.  * modification, are permitted provided that the following conditions
  7.  * are met:
  8.  *
  9.  * - Redistributions of source code must retain the above copyright
  10.  *   notice, this list of conditions and the following disclaimer.
  11.  * - Redistributions in binary form must reproduce the above copyright
  12.  *   notice, this list of conditions and the following disclaimer in the
  13.  *   documentation and/or other materials provided with the distribution.
  14.  * - The name of the author may not be used to endorse or promote products
  15.  *   derived from this software without specific prior written permission.
  16.  *
  17.  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
  18.  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
  19.  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
  20.  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
  21.  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  22.  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  23.  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  24.  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  25.  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  26.  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  27.  */
  28.  
  29. /** @addtogroup libcamd64 amd64
  30.  * @ingroup lc
  31.  * @brief   amd64 architecture dependent parts of libc
  32.  * @{
  33.  */
  34. /** @file
  35.  */
  36.  
  37. #ifndef LIBC_amd64_ATOMIC_H_
  38. #define LIBC_amd64_ATOMIC_H_
  39.  
  40. static inline void atomic_inc(atomic_t *val) {
  41.     __asm__ volatile ("lock incq %0\n" : "=m" (val->count));
  42. }
  43.  
  44. static inline void atomic_dec(atomic_t *val) {
  45.     __asm__ volatile ("lock decq %0\n" : "=m" (val->count));
  46. }
  47.  
  48. static inline long atomic_postinc(atomic_t *val)
  49. {
  50.     long r;
  51.  
  52.     __asm__ volatile (
  53.         "movq $1, %0\n"
  54.         "lock xaddq %0, %1\n"
  55.         : "=r" (r), "=m" (val->count)
  56.     );
  57.  
  58.     return r;
  59. }
  60.  
  61. static inline long atomic_postdec(atomic_t *val)
  62. {
  63.     long r;
  64.    
  65.     __asm__ volatile (
  66.         "movq $-1, %0\n"
  67.         "lock xaddq %0, %1\n"
  68.         : "=r" (r), "=m" (val->count)
  69.     );
  70.    
  71.     return r;
  72. }
  73.  
  74. #define atomic_preinc(val) (atomic_postinc(val)+1)
  75. #define atomic_predec(val) (atomic_postdec(val)-1)
  76.  
  77. #endif
  78.  
  79. /** @}
  80.  */
  81.