Changes in kernel/arch/amd64/include/atomic.h [c00589d:7a0359b] in mainline
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/amd64/include/atomic.h
rc00589d r7a0359b 36 36 #define KERN_amd64_ATOMIC_H_ 37 37 38 #include < arch/types.h>38 #include <typedefs.h> 39 39 #include <arch/barrier.h> 40 40 #include <preemption.h> 41 #include <trace.h> 41 42 42 static inline void atomic_inc(atomic_t *val) { 43 NO_TRACE static inline void atomic_inc(atomic_t *val) 44 { 43 45 #ifdef CONFIG_SMP 44 46 asm volatile ( … … 54 56 } 55 57 56 static inline void atomic_dec(atomic_t *val) { 58 NO_TRACE static inline void atomic_dec(atomic_t *val) 59 { 57 60 #ifdef CONFIG_SMP 58 61 asm volatile ( … … 68 71 } 69 72 70 static inline long atomic_postinc(atomic_t *val) 73 NO_TRACE static inline atomic_count_t atomic_postinc(atomic_t *val) 71 74 { 72 longr = 1;75 atomic_count_t r = 1; 73 76 74 77 asm volatile ( 75 78 "lock xaddq %[r], %[count]\n" 76 : [count] "+m" (val->count), [r] "+r" (r) 79 : [count] "+m" (val->count), 80 [r] "+r" (r) 77 81 ); 78 82 … … 80 84 } 81 85 82 static inline long atomic_postdec(atomic_t *val) 86 NO_TRACE static inline atomic_count_t atomic_postdec(atomic_t *val) 83 87 { 84 longr = -1;88 atomic_count_t r = -1; 85 89 86 90 asm volatile ( 87 91 "lock xaddq %[r], %[count]\n" 88 : [count] "+m" (val->count), [r] "+r" (r) 92 : [count] "+m" (val->count), 93 [r] "+r" (r) 89 94 ); 90 95 … … 95 100 #define atomic_predec(val) (atomic_postdec(val) - 1) 96 101 97 static inline uint64_t test_and_set(atomic_t *val) { 98 uint64_t v; 102 NO_TRACE static inline atomic_count_t test_and_set(atomic_t *val) 103 { 104 atomic_count_t v = 1; 99 105 100 106 asm volatile ( 101 "movq $1, %[v]\n"102 107 "xchgq %[v], %[count]\n" 103 : [v] "=r" (v), [count] "+m" (val->count) 108 : [v] "+r" (v), 109 [count] "+m" (val->count) 104 110 ); 105 111 … … 107 113 } 108 114 109 110 115 /** amd64 specific fast spinlock */ 111 static inline void atomic_lock_arch(atomic_t *val)116 NO_TRACE static inline void atomic_lock_arch(atomic_t *val) 112 117 { 113 uint64_t tmp;118 atomic_count_t tmp; 114 119 115 120 preemption_disable(); 116 121 asm volatile ( 117 122 "0:\n" 118 " pause\n"119 " mov %[count], %[tmp]\n"120 " testq %[tmp], %[tmp]\n"121 " jnz 0b\n" /* lightweight looping on locked spinlock */123 " pause\n" 124 " mov %[count], %[tmp]\n" 125 " testq %[tmp], %[tmp]\n" 126 " jnz 0b\n" /* lightweight looping on locked spinlock */ 122 127 123 "incq %[tmp]\n" /* now use the atomic operation */ 124 "xchgq %[count], %[tmp]\n" 125 "testq %[tmp], %[tmp]\n" 126 "jnz 0b\n" 127 : [count] "+m" (val->count), [tmp] "=&r" (tmp) 128 " incq %[tmp]\n" /* now use the atomic operation */ 129 " xchgq %[count], %[tmp]\n" 130 " testq %[tmp], %[tmp]\n" 131 " jnz 0b\n" 132 : [count] "+m" (val->count), 133 [tmp] "=&r" (tmp) 128 134 ); 135 129 136 /* 130 137 * Prevent critical section code from bleeding out this way up.
Note:
See TracChangeset
for help on using the changeset viewer.