00001 /* 00002 * Copyright (C) 2001-2004 Jakub Jermar 00003 * All rights reserved. 00004 * 00005 * Redistribution and use in source and binary forms, with or without 00006 * modification, are permitted provided that the following conditions 00007 * are met: 00008 * 00009 * - Redistributions of source code must retain the above copyright 00010 * notice, this list of conditions and the following disclaimer. 00011 * - Redistributions in binary form must reproduce the above copyright 00012 * notice, this list of conditions and the following disclaimer in the 00013 * documentation and/or other materials provided with the distribution. 00014 * - The name of the author may not be used to endorse or promote products 00015 * derived from this software without specific prior written permission. 00016 * 00017 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 00018 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00019 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 00020 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 00021 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 00022 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 00023 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 00024 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 00025 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 00026 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00027 */ 00028 00037 #ifndef __amd64_ATOMIC_H__ 00038 #define __amd64_ATOMIC_H__ 00039 00040 static inline void atomic_inc(atomic_t *val) { 00041 __asm__ volatile ("lock incq %0\n" : "=m" (val->count)); 00042 } 00043 00044 static inline void atomic_dec(atomic_t *val) { 00045 __asm__ volatile ("lock decq %0\n" : "=m" (val->count)); 00046 } 00047 00048 static inline long atomic_postinc(atomic_t *val) 00049 { 00050 long r; 00051 00052 __asm__ volatile ( 00053 "movq $1, %0\n" 00054 "lock xaddq %0, %1\n" 00055 : "=r" (r), "=m" (val->count) 00056 ); 00057 00058 return r; 00059 } 00060 00061 static inline long atomic_postdec(atomic_t *val) 00062 { 00063 long r; 00064 00065 __asm__ volatile ( 00066 "movq $-1, %0\n" 00067 "lock xaddq %0, %1\n" 00068 : "=r" (r), "=m" (val->count) 00069 ); 00070 00071 return r; 00072 } 00073 00074 #define atomic_preinc(val) (atomic_postinc(val)+1) 00075 #define atomic_predec(val) (atomic_postdec(val)-1) 00076 00077 #endif 00078 00079