Changes in kernel/arch/sparc64/include/atomic.h [986c24c:7a0359b] in mainline
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/sparc64/include/atomic.h
r986c24c r7a0359b 27 27 */ 28 28 29 /** @addtogroup sparc64 29 /** @addtogroup sparc64 30 30 * @{ 31 31 */ … … 37 37 38 38 #include <arch/barrier.h> 39 #include < arch/types.h>39 #include <typedefs.h> 40 40 #include <preemption.h> 41 #include <trace.h> 41 42 42 43 /** Atomic add operation. … … 45 46 * 46 47 * @param val Atomic variable. 47 * @param i Signed value to be added.48 * @param i Signed value to be added. 48 49 * 49 50 * @return Value of the atomic variable as it existed before addition. 51 * 50 52 */ 51 static inline long atomic_add(atomic_t *val, int i) 53 NO_TRACE static inline atomic_count_t atomic_add(atomic_t *val, 54 atomic_count_t i) 52 55 { 53 uint64_t a, b; 54 56 atomic_count_t a; 57 atomic_count_t b; 58 55 59 do { 56 volatile uintptr_t x = (uint64_t) &val->count;57 58 a = *(( uint64_t *) x);60 volatile uintptr_t ptr = (uintptr_t) &val->count; 61 62 a = *((atomic_count_t *) ptr); 59 63 b = a + i; 60 asm volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *)x)), 61 "+r" (b) : "r" (a)); 64 65 asm volatile ( 66 "casx %0, %2, %1\n" 67 : "+m" (*((atomic_count_t *) ptr)), 68 "+r" (b) 69 : "r" (a) 70 ); 62 71 } while (a != b); 63 72 64 73 return a; 65 74 } 66 75 67 static inline longatomic_preinc(atomic_t *val)76 NO_TRACE static inline atomic_count_t atomic_preinc(atomic_t *val) 68 77 { 69 78 return atomic_add(val, 1) + 1; 70 79 } 71 80 72 static inline longatomic_postinc(atomic_t *val)81 NO_TRACE static inline atomic_count_t atomic_postinc(atomic_t *val) 73 82 { 74 83 return atomic_add(val, 1); 75 84 } 76 85 77 static inline longatomic_predec(atomic_t *val)86 NO_TRACE static inline atomic_count_t atomic_predec(atomic_t *val) 78 87 { 79 88 return atomic_add(val, -1) - 1; 80 89 } 81 90 82 static inline longatomic_postdec(atomic_t *val)91 NO_TRACE static inline atomic_count_t atomic_postdec(atomic_t *val) 83 92 { 84 93 return atomic_add(val, -1); 85 94 } 86 95 87 static inline void atomic_inc(atomic_t *val)96 NO_TRACE static inline void atomic_inc(atomic_t *val) 88 97 { 89 98 (void) atomic_add(val, 1); 90 99 } 91 100 92 static inline void atomic_dec(atomic_t *val)101 NO_TRACE static inline void atomic_dec(atomic_t *val) 93 102 { 94 103 (void) atomic_add(val, -1); 95 104 } 96 105 97 static inline longtest_and_set(atomic_t *val)106 NO_TRACE static inline atomic_count_t test_and_set(atomic_t *val) 98 107 { 99 uint64_t v = 1; 100 volatile uintptr_t x = (uint64_t) &val->count; 101 102 asm volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *) x)), 103 "+r" (v) : "r" (0)); 104 108 atomic_count_t v = 1; 109 volatile uintptr_t ptr = (uintptr_t) &val->count; 110 111 asm volatile ( 112 "casx %0, %2, %1\n" 113 : "+m" (*((atomic_count_t *) ptr)), 114 "+r" (v) 115 : "r" (0) 116 ); 117 105 118 return v; 106 119 } 107 120 108 static inline void atomic_lock_arch(atomic_t *val)121 NO_TRACE static inline void atomic_lock_arch(atomic_t *val) 109 122 { 110 uint64_t tmp1 = 1;111 uint64_t tmp2 = 0;112 113 volatile uintptr_t x = (uint64_t) &val->count;114 123 atomic_count_t tmp1 = 1; 124 atomic_count_t tmp2 = 0; 125 126 volatile uintptr_t ptr = (uintptr_t) &val->count; 127 115 128 preemption_disable(); 116 129 117 130 asm volatile ( 118 "0:\n" 119 "casx %0, %3, %1\n" 120 "brz %1, 2f\n" 121 "nop\n" 122 "1:\n" 123 "ldx %0, %2\n" 124 "brz %2, 0b\n" 125 "nop\n" 126 "ba %%xcc, 1b\n" 127 "nop\n" 128 "2:\n" 129 : "+m" (*((uint64_t *) x)), "+r" (tmp1), "+r" (tmp2) : "r" (0) 131 "0:\n" 132 "casx %0, %3, %1\n" 133 "brz %1, 2f\n" 134 "nop\n" 135 "1:\n" 136 "ldx %0, %2\n" 137 "brz %2, 0b\n" 138 "nop\n" 139 "ba,a %%xcc, 1b\n" 140 "2:\n" 141 : "+m" (*((atomic_count_t *) ptr)), 142 "+r" (tmp1), 143 "+r" (tmp2) 144 : "r" (0) 130 145 ); 131 146
Note:
See TracChangeset
for help on using the changeset viewer.