Changeset 228666c in mainline
- Timestamp:
- 2010-02-20T18:41:53Z (15 years ago)
- Branches:
- lfn, master, serial, ticket/834-toolchain-update, topic/msim-upgrade, topic/simplify-dev-export
- Children:
- b03a666
- Parents:
- bc9da2a
- Files:
-
- 40 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/amd64/include/atomic.h
rbc9da2a r228666c 40 40 #include <preemption.h> 41 41 42 static inline void atomic_inc(atomic_t *val) { 42 static inline void atomic_inc(atomic_t *val) 43 { 43 44 #ifdef CONFIG_SMP 44 45 asm volatile ( … … 54 55 } 55 56 56 static inline void atomic_dec(atomic_t *val) { 57 static inline void atomic_dec(atomic_t *val) 58 { 57 59 #ifdef CONFIG_SMP 58 60 asm volatile ( … … 68 70 } 69 71 70 static inline long atomic_postinc(atomic_t *val)72 static inline atomic_count_t atomic_postinc(atomic_t *val) 71 73 { 72 longr = 1;74 atomic_count_t r = 1; 73 75 74 76 asm volatile ( 75 77 "lock xaddq %[r], %[count]\n" 76 : [count] "+m" (val->count), [r] "+r" (r) 78 : [count] "+m" (val->count), 79 [r] "+r" (r) 77 80 ); 78 81 … … 80 83 } 81 84 82 static inline long atomic_postdec(atomic_t *val)85 static inline atomic_count_t atomic_postdec(atomic_t *val) 83 86 { 84 longr = -1;87 atomic_count_t r = -1; 85 88 86 89 asm volatile ( 87 90 "lock xaddq %[r], %[count]\n" 88 : [count] "+m" (val->count), [r] "+r" (r) 91 : [count] "+m" (val->count), 92 [r] "+r" (r) 89 93 ); 90 94 … … 95 99 #define atomic_predec(val) (atomic_postdec(val) - 1) 96 100 97 static inline uint64_t test_and_set(atomic_t *val) { 98 uint64_t v; 101 static inline atomic_count_t test_and_set(atomic_t *val) 102 { 103 atomic_count_t v; 99 104 100 105 asm volatile ( 101 106 "movq $1, %[v]\n" 102 107 "xchgq %[v], %[count]\n" 103 : [v] "=r" (v), [count] "+m" (val->count) 108 : [v] "=r" (v), 109 [count] "+m" (val->count) 104 110 ); 105 111 … … 107 113 } 108 114 109 110 115 /** amd64 specific fast spinlock */ 111 116 static inline void atomic_lock_arch(atomic_t *val) 112 117 { 113 uint64_t tmp;118 atomic_count_t tmp; 114 119 115 120 preemption_disable(); … … 125 130 "testq %[tmp], %[tmp]\n" 126 131 "jnz 0b\n" 127 : [count] "+m" (val->count), [tmp] "=&r" (tmp) 132 : [count] "+m" (val->count), 133 [tmp] "=&r" (tmp) 128 134 ); 135 129 136 /* 130 137 * Prevent critical section code from bleeding out this way up. -
kernel/arch/amd64/include/types.h
rbc9da2a r228666c 55 55 typedef uint64_t unative_t; 56 56 typedef int64_t native_t; 57 typedef uint64_t atomic_count_t; 57 58 58 59 typedef struct { -
kernel/arch/arm32/include/atomic.h
rbc9da2a r228666c 47 47 * 48 48 */ 49 static inline long atomic_add(atomic_t *val, int i)49 static inline atomic_count_t atomic_add(atomic_t *val, atomic_count_t i) 50 50 { 51 long ret;52 53 51 /* 54 52 * This implementation is for UP pre-ARMv6 systems where we do not have … … 57 55 ipl_t ipl = interrupts_disable(); 58 56 val->count += i; 59 ret = val->count;57 atomic_count_t ret = val->count; 60 58 interrupts_restore(ipl); 61 59 … … 66 64 * 67 65 * @param val Variable to be incremented. 66 * 68 67 */ 69 68 static inline void atomic_inc(atomic_t *val) … … 75 74 * 76 75 * @param val Variable to be decremented. 76 * 77 77 */ 78 78 static inline void atomic_dec(atomic_t *val) { … … 84 84 * @param val Variable to be incremented. 85 85 * @return Value after incrementation. 86 * 86 87 */ 87 static inline longatomic_preinc(atomic_t *val)88 static inline atomic_count_t atomic_preinc(atomic_t *val) 88 89 { 89 90 return atomic_add(val, 1); … … 94 95 * @param val Variable to be decremented. 95 96 * @return Value after decrementation. 97 * 96 98 */ 97 static inline longatomic_predec(atomic_t *val)99 static inline atomic_count_t atomic_predec(atomic_t *val) 98 100 { 99 101 return atomic_add(val, -1); … … 104 106 * @param val Variable to be incremented. 105 107 * @return Value before incrementation. 108 * 106 109 */ 107 static inline longatomic_postinc(atomic_t *val)110 static inline atomic_count_t atomic_postinc(atomic_t *val) 108 111 { 109 112 return atomic_add(val, 1) - 1; … … 114 117 * @param val Variable to be decremented. 115 118 * @return Value before decrementation. 119 * 116 120 */ 117 static inline longatomic_postdec(atomic_t *val)121 static inline atomic_count_t atomic_postdec(atomic_t *val) 118 122 { 119 123 return atomic_add(val, -1) + 1; -
kernel/arch/arm32/include/types.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup arm32 29 /** @addtogroup arm32 30 30 * @{ 31 31 */ … … 38 38 39 39 #ifndef DOXYGEN 40 # define ATTRIBUTE_PACKED __attribute__((packed))40 #define ATTRIBUTE_PACKED __attribute__((packed)) 41 41 #else 42 #define ATTRIBUTE_PACKED42 #define ATTRIBUTE_PACKED 43 43 #endif 44 44 … … 62 62 typedef uint32_t unative_t; 63 63 typedef int32_t native_t; 64 typedef uint32_t atomic_count_t; 64 65 65 66 typedef struct { -
kernel/arch/ia32/include/atomic.h
rbc9da2a r228666c 40 40 #include <preemption.h> 41 41 42 static inline void atomic_inc(atomic_t *val) { 42 static inline void atomic_inc(atomic_t *val) 43 { 43 44 #ifdef CONFIG_SMP 44 45 asm volatile ( … … 54 55 } 55 56 56 static inline void atomic_dec(atomic_t *val) { 57 static inline void atomic_dec(atomic_t *val) 58 { 57 59 #ifdef CONFIG_SMP 58 60 asm volatile ( … … 68 70 } 69 71 70 static inline long atomic_postinc(atomic_t *val)72 static inline atomic_count_t atomic_postinc(atomic_t *val) 71 73 { 72 longr = 1;74 atomic_count_t r = 1; 73 75 74 76 asm volatile ( 75 77 "lock xaddl %[r], %[count]\n" 76 : [count] "+m" (val->count), [r] "+r" (r) 78 : [count] "+m" (val->count), 79 [r] "+r" (r) 77 80 ); 78 81 … … 80 83 } 81 84 82 static inline long atomic_postdec(atomic_t *val)85 static inline atomic_count_t atomic_postdec(atomic_t *val) 83 86 { 84 longr = -1;87 atomic_count_t r = -1; 85 88 86 89 asm volatile ( 87 90 "lock xaddl %[r], %[count]\n" 88 : [count] "+m" (val->count), [r] "+r"(r) 91 : [count] "+m" (val->count), 92 [r] "+r" (r) 89 93 ); 90 94 … … 95 99 #define atomic_predec(val) (atomic_postdec(val) - 1) 96 100 97 static inline uint32_t test_and_set(atomic_t *val) { 98 uint32_t v; 101 static inline atomic_count_t test_and_set(atomic_t *val) 102 { 103 atomic_count_t v; 99 104 100 105 asm volatile ( 101 106 "movl $1, %[v]\n" 102 107 "xchgl %[v], %[count]\n" 103 : [v] "=r" (v), [count] "+m" (val->count) 108 : [v] "=r" (v), 109 [count] "+m" (val->count) 104 110 ); 105 111 … … 110 116 static inline void atomic_lock_arch(atomic_t *val) 111 117 { 112 uint32_t tmp;118 atomic_count_t tmp; 113 119 114 120 preemption_disable(); … … 124 130 "testl %[tmp], %[tmp]\n" 125 131 "jnz 0b\n" 126 : [count] "+m" (val->count), [tmp] "=&r" (tmp) 132 : [count] "+m" (val->count), 133 [tmp] "=&r" (tmp) 127 134 ); 135 128 136 /* 129 137 * Prevent critical section code from bleeding out this way up. -
kernel/arch/ia32/include/types.h
rbc9da2a r228666c 55 55 typedef uint32_t unative_t; 56 56 typedef int32_t native_t; 57 typedef uint32_t atomic_count_t; 57 58 58 59 typedef struct { -
kernel/arch/ia64/include/atomic.h
rbc9da2a r228666c 36 36 #define KERN_ia64_ATOMIC_H_ 37 37 38 static inline uint64_t test_and_set(atomic_t *val)38 static inline atomic_count_t test_and_set(atomic_t *val) 39 39 { 40 uint64_t v;41 40 atomic_count_t v; 41 42 42 asm volatile ( 43 43 "movl %[v] = 0x1;;\n" … … 53 53 { 54 54 do { 55 while (val->count) 56 ; 55 while (val->count); 57 56 } while (test_and_set(val)); 58 57 } … … 60 59 static inline void atomic_inc(atomic_t *val) 61 60 { 62 longv;61 atomic_count_t v; 63 62 64 63 asm volatile ( … … 71 70 static inline void atomic_dec(atomic_t *val) 72 71 { 73 longv;72 atomic_count_t v; 74 73 75 74 asm volatile ( … … 80 79 } 81 80 82 static inline longatomic_preinc(atomic_t *val)81 static inline atomic_count_t atomic_preinc(atomic_t *val) 83 82 { 84 longv;83 atomic_count_t v; 85 84 86 85 asm volatile ( … … 93 92 } 94 93 95 static inline longatomic_predec(atomic_t *val)94 static inline atomic_count_t atomic_predec(atomic_t *val) 96 95 { 97 longv;96 atomic_count_t v; 98 97 99 98 asm volatile ( … … 106 105 } 107 106 108 static inline longatomic_postinc(atomic_t *val)107 static inline atomic_count_t atomic_postinc(atomic_t *val) 109 108 { 110 longv;109 atomic_count_t v; 111 110 112 111 asm volatile ( … … 119 118 } 120 119 121 static inline longatomic_postdec(atomic_t *val)120 static inline atomic_count_t atomic_postdec(atomic_t *val) 122 121 { 123 longv;122 atomic_count_t v; 124 123 125 124 asm volatile ( -
kernel/arch/ia64/include/types.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup ia64 29 /** @addtogroup ia64 30 30 * @{ 31 31 */ … … 63 63 typedef uint64_t unative_t; 64 64 typedef int64_t native_t; 65 typedef uint64_t atomic_count_t; 65 66 66 67 typedef struct { -
kernel/arch/mips32/include/atomic.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup mips32 29 /** @addtogroup mips32 30 30 * @{ 31 31 */ … … 51 51 * 52 52 * @return Value after addition. 53 * 53 54 */ 54 static inline long atomic_add(atomic_t *val, int i)55 static inline atomic_count_t atomic_add(atomic_t *val, atomic_count_t i) 55 56 { 56 long tmp, v; 57 atomic_count_t tmp; 58 atomic_count_t v; 57 59 58 60 asm volatile ( … … 64 66 " beq %0, %4, 1b\n" /* if the atomic operation failed, try again */ 65 67 " nop\n" 66 : "=&r" (tmp), "+m" (val->count), "=&r" (v) 67 : "r" (i), "i" (0) 68 : "=&r" (tmp), 69 "+m" (val->count), 70 "=&r" (v) 71 : "r" (i), 72 "i" (0) 68 73 ); 69 74 … … 71 76 } 72 77 73 static inline uint32_t test_and_set(atomic_t *val) { 74 uint32_t tmp, v; 78 static inline atomic_count_t test_and_set(atomic_t *val) 79 { 80 atomic_count_t tmp; 81 atomic_count_t v; 75 82 76 83 asm volatile ( … … 82 89 " beqz %0, 1b\n" 83 90 "2:\n" 84 : "=&r" (tmp), "+m" (val->count), "=&r" (v) 91 : "=&r" (tmp), 92 "+m" (val->count), 93 "=&r" (v) 85 94 : "i" (1) 86 95 ); … … 89 98 } 90 99 91 static inline void atomic_lock_arch(atomic_t *val) { 100 static inline void atomic_lock_arch(atomic_t *val) 101 { 92 102 do { 93 while (val->count) 94 ; 103 while (val->count); 95 104 } while (test_and_set(val)); 96 105 } -
kernel/arch/mips32/include/types.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup mips32 29 /** @addtogroup mips32 30 30 * @{ 31 31 */ … … 55 55 typedef uint32_t unative_t; 56 56 typedef int32_t native_t; 57 typedef uint32_t atomic_count_t; 57 58 58 59 typedef struct { -
kernel/arch/ppc32/include/atomic.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup ppc32 29 /** @addtogroup ppc32 30 30 * @{ 31 31 */ … … 38 38 static inline void atomic_inc(atomic_t *val) 39 39 { 40 longtmp;41 40 atomic_count_t tmp; 41 42 42 asm volatile ( 43 43 "1:\n" … … 46 46 "stwcx. %0, 0, %2\n" 47 47 "bne- 1b" 48 : "=&r" (tmp), "=m" (val->count) 49 : "r" (&val->count), "m" (val->count) 48 : "=&r" (tmp), 49 "=m" (val->count) 50 : "r" (&val->count), 51 "m" (val->count) 50 52 : "cc" 51 53 ); … … 54 56 static inline void atomic_dec(atomic_t *val) 55 57 { 56 longtmp;57 58 atomic_count_t tmp; 59 58 60 asm volatile ( 59 61 "1:\n" 60 62 "lwarx %0, 0, %2\n" 61 63 "addic %0, %0, -1\n" 62 "stwcx. 64 "stwcx. %0, 0, %2\n" 63 65 "bne- 1b" 64 : "=&r" (tmp), "=m" (val->count) 65 : "r" (&val->count), "m" (val->count) 66 : "=&r" (tmp), 67 "=m" (val->count) 68 : "r" (&val->count), 69 "m" (val->count) 66 70 : "cc" 67 71 ); 68 72 } 69 73 70 static inline longatomic_postinc(atomic_t *val)74 static inline atomic_count_t atomic_postinc(atomic_t *val) 71 75 { 72 76 atomic_inc(val); … … 74 78 } 75 79 76 static inline longatomic_postdec(atomic_t *val)80 static inline atomic_count_t atomic_postdec(atomic_t *val) 77 81 { 78 82 atomic_dec(val); … … 80 84 } 81 85 82 static inline longatomic_preinc(atomic_t *val)86 static inline atomic_count_t atomic_preinc(atomic_t *val) 83 87 { 84 88 atomic_inc(val); … … 86 90 } 87 91 88 static inline longatomic_predec(atomic_t *val)92 static inline atomic_count_t atomic_predec(atomic_t *val) 89 93 { 90 94 atomic_dec(val); -
kernel/arch/ppc32/include/types.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup ppc32 29 /** @addtogroup ppc32 30 30 * @{ 31 31 */ … … 55 55 typedef uint32_t unative_t; 56 56 typedef int32_t native_t; 57 typedef uint32_t atomic_count_t; 57 58 58 59 typedef struct { -
kernel/arch/sparc64/include/atomic.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup sparc64 29 /** @addtogroup sparc64 30 30 * @{ 31 31 */ … … 45 45 * 46 46 * @param val Atomic variable. 47 * @param i Signed value to be added.47 * @param i Signed value to be added. 48 48 * 49 49 * @return Value of the atomic variable as it existed before addition. 50 * 50 51 */ 51 static inline long atomic_add(atomic_t *val, int i)52 static inline atomic_count_t atomic_add(atomic_t *val, atomic_count_t i) 52 53 { 53 uint64_t a, b; 54 54 atomic_count_t a; 55 atomic_count_t b; 56 55 57 do { 56 volatile uintptr_t x = (uint64_t) &val->count;57 58 a = *(( uint64_t *) x);58 volatile uintptr_t ptr = (uintptr_t) &val->count; 59 60 a = *((atomic_count_t *) ptr); 59 61 b = a + i; 60 asm volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *)x)), 61 "+r" (b) : "r" (a)); 62 63 asm volatile ( 64 "casx %0, %2, %1\n" 65 : "+m" (*((atomic_count_t *) ptr)), 66 "+r" (b) 67 : "r" (a) 68 ); 62 69 } while (a != b); 63 70 64 71 return a; 65 72 } 66 73 67 static inline longatomic_preinc(atomic_t *val)74 static inline atomic_count_t atomic_preinc(atomic_t *val) 68 75 { 69 76 return atomic_add(val, 1) + 1; 70 77 } 71 78 72 static inline longatomic_postinc(atomic_t *val)79 static inline atomic_count_t atomic_postinc(atomic_t *val) 73 80 { 74 81 return atomic_add(val, 1); 75 82 } 76 83 77 static inline longatomic_predec(atomic_t *val)84 static inline atomic_count_t atomic_predec(atomic_t *val) 78 85 { 79 86 return atomic_add(val, -1) - 1; 80 87 } 81 88 82 static inline longatomic_postdec(atomic_t *val)89 static inline atomic_count_t atomic_postdec(atomic_t *val) 83 90 { 84 91 return atomic_add(val, -1); … … 95 102 } 96 103 97 static inline longtest_and_set(atomic_t *val)104 static inline atomic_count_t test_and_set(atomic_t *val) 98 105 { 99 uint64_t v = 1; 100 volatile uintptr_t x = (uint64_t) &val->count; 101 102 asm volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *) x)), 103 "+r" (v) : "r" (0)); 104 106 atomic_count_t v = 1; 107 volatile uintptr_t ptr = (uintptr_t) &val->count; 108 109 asm volatile ( 110 "casx %0, %2, %1\n" 111 : "+m" (*((atomic_count_t *) ptr)), 112 "+r" (v) 113 : "r" (0) 114 ); 115 105 116 return v; 106 117 } … … 108 119 static inline void atomic_lock_arch(atomic_t *val) 109 120 { 110 uint64_t tmp1 = 1;111 uint64_t tmp2 = 0;112 113 volatile uintptr_t x = (uint64_t) &val->count;114 121 atomic_count_t tmp1 = 1; 122 atomic_count_t tmp2 = 0; 123 124 volatile uintptr_t ptr = (uintptr_t) &val->count; 125 115 126 preemption_disable(); 116 127 117 128 asm volatile ( 118 "0:\n" 119 "casx %0, %3, %1\n" 120 "brz %1, 2f\n" 121 "nop\n" 122 "1:\n" 123 "ldx %0, %2\n" 124 "brz %2, 0b\n" 125 "nop\n" 126 "ba %%xcc, 1b\n" 127 "nop\n" 128 "2:\n" 129 : "+m" (*((uint64_t *) x)), "+r" (tmp1), "+r" (tmp2) : "r" (0) 129 "0:\n" 130 "casx %0, %3, %1\n" 131 "brz %1, 2f\n" 132 "nop\n" 133 "1:\n" 134 "ldx %0, %2\n" 135 "brz %2, 0b\n" 136 "nop\n" 137 "ba %%xcc, 1b\n" 138 "nop\n" 139 "2:\n" 140 : "+m" (*((atomic_count_t *) ptr)), 141 "+r" (tmp1), 142 "+r" (tmp2) 143 : "r" (0) 130 144 ); 131 145 -
kernel/arch/sparc64/include/types.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup sparc64 29 /** @addtogroup sparc64 30 30 * @{ 31 31 */ … … 55 55 typedef uint64_t unative_t; 56 56 typedef int64_t native_t; 57 typedef uint64_t atomic_count_t; 57 58 58 59 typedef struct { -
kernel/generic/include/atomic.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup generic 29 /** @addtogroup generic 30 30 * @{ 31 31 */ … … 36 36 #define KERN_ATOMIC_H_ 37 37 38 #include <arch/types.h> 39 38 40 typedef struct atomic { 39 volatile longcount;41 volatile atomic_count_t count; 40 42 } atomic_t; 41 43 42 44 #include <arch/atomic.h> 43 45 44 static inline void atomic_set(atomic_t *val, longi)46 static inline void atomic_set(atomic_t *val, atomic_count_t i) 45 47 { 46 48 val->count = i; 47 49 } 48 50 49 static inline longatomic_get(atomic_t *val)51 static inline atomic_count_t atomic_get(atomic_t *val) 50 52 { 51 53 return val->count; -
kernel/generic/src/proc/scheduler.c
rbc9da2a r228666c 542 542 { 543 543 thread_t *t; 544 int count, average, j, k = 0; 544 int count; 545 atomic_count_t average; 545 546 unsigned int i; 547 int j; 548 int k = 0; 546 549 ipl_t ipl; 547 550 -
kernel/test/fpu/fpu1_ia64.c
rbc9da2a r228666c 128 128 char *test_fpu1(void) 129 129 { 130 unsigned int i, total = 0; 130 unsigned int i; 131 atomic_count_t total = 0; 131 132 132 133 waitq_initialize(&can_start); … … 159 160 waitq_wakeup(&can_start, WAKEUP_ALL); 160 161 161 while (atomic_get(&threads_ok) != (long)total) {162 while (atomic_get(&threads_ok) != total) { 162 163 TPRINTF("Threads left: %d\n", total - atomic_get(&threads_ok)); 163 164 thread_sleep(1); -
kernel/test/fpu/fpu1_x86.c
rbc9da2a r228666c 125 125 char *test_fpu1(void) 126 126 { 127 unsigned int i, total = 0; 127 unsigned int i; 128 atomic_count_t total = 0; 128 129 129 130 waitq_initialize(&can_start); … … 156 157 waitq_wakeup(&can_start, WAKEUP_ALL); 157 158 158 while (atomic_get(&threads_ok) != (long)total) {159 while (atomic_get(&threads_ok) != total) { 159 160 TPRINTF("Threads left: %d\n", total - atomic_get(&threads_ok)); 160 161 thread_sleep(1); -
kernel/test/fpu/mips2.c
rbc9da2a r228666c 111 111 char *test_mips2(void) 112 112 { 113 unsigned int i, total = 0; 113 unsigned int i; 114 atomic_count_t total = 0; 114 115 115 116 waitq_initialize(&can_start); … … 138 139 139 140 TPRINTF("ok\n"); 140 141 141 142 thread_sleep(1); 142 143 waitq_wakeup(&can_start, WAKEUP_ALL); 143 144 144 while (atomic_get(&threads_ok) != (long)total) {145 while (atomic_get(&threads_ok) != total) { 145 146 TPRINTF("Threads left: %d\n", total - atomic_get(&threads_ok)); 146 147 thread_sleep(1); -
kernel/test/fpu/sse1.c
rbc9da2a r228666c 109 109 char *test_sse1(void) 110 110 { 111 unsigned int i, total = 0; 111 unsigned int i; 112 atomic_count_t total = 0; 112 113 113 114 waitq_initialize(&can_start); … … 140 141 waitq_wakeup(&can_start, WAKEUP_ALL); 141 142 142 while (atomic_get(&threads_ok) != (long)total) {143 while (atomic_get(&threads_ok) != total) { 143 144 TPRINTF("Threads left: %d\n", total - atomic_get(&threads_ok)); 144 145 thread_sleep(1); -
kernel/test/synch/rwlock5.c
rbc9da2a r228666c 70 70 { 71 71 int i, j, k; 72 long readers, writers; 72 atomic_count_t readers; 73 atomic_count_t writers; 73 74 74 75 waitq_initialize(&can_start); -
kernel/test/synch/semaphore1.c
rbc9da2a r228666c 73 73 { 74 74 int i, j, k; 75 int consumers, producers; 75 atomic_count_t consumers; 76 atomic_count_t producers; 76 77 77 78 waitq_initialize(&can_start); -
kernel/test/thread/thread1.c
rbc9da2a r228666c 55 55 char *test_thread1(void) 56 56 { 57 unsigned int i, total = 0; 57 unsigned int i; 58 atomic_count_t total = 0; 58 59 59 60 atomic_set(&finish, 1); 60 61 atomic_set(&threads_finished, 0); 61 62 62 for (i = 0; i < THREADS; i++) { 63 for (i = 0; i < THREADS; i++) { 63 64 thread_t *t; 64 65 if (!(t = thread_create(threadtest, NULL, TASK, 0, "threadtest", false))) { … … 74 75 75 76 atomic_set(&finish, 0); 76 while (atomic_get(&threads_finished) < ((long) total)) {77 while (atomic_get(&threads_finished) < total) { 77 78 TPRINTF("Threads left: %d\n", total - atomic_get(&threads_finished)); 78 79 thread_sleep(1); -
uspace/app/tester/thread/thread1.c
rbc9da2a r228666c 53 53 { 54 54 unsigned int i; 55 int total = 0;55 atomic_count_t total = 0; 56 56 57 57 atomic_set(&finish, 1); -
uspace/lib/libc/arch/amd64/include/atomic.h
rbc9da2a r228666c 42 42 #include <atomicdflt.h> 43 43 44 static inline void atomic_inc(atomic_t *val) { 45 asm volatile ("lock incq %0\n" : "+m" (val->count)); 44 static inline void atomic_inc(atomic_t *val) 45 { 46 asm volatile ( 47 "lock incq %[count]\n" 48 : [count] "+m" (val->count) 49 ); 46 50 } 47 51 48 static inline void atomic_dec(atomic_t *val) { 49 asm volatile ("lock decq %0\n" : "+m" (val->count)); 52 static inline void atomic_dec(atomic_t *val) 53 { 54 asm volatile ( 55 "lock decq %[count]\n" 56 : [count] "+m" (val->count) 57 ); 50 58 } 51 59 52 static inline long atomic_postinc(atomic_t *val)60 static inline atomic_count_t atomic_postinc(atomic_t *val) 53 61 { 54 long r; 55 56 asm volatile ( 57 "movq $1, %0\n" 58 "lock xaddq %0, %1\n" 59 : "=r" (r), "+m" (val->count) 60 ); 61 62 return r; 63 } 64 65 static inline long atomic_postdec(atomic_t *val) 66 { 67 long r; 62 atomic_count_t r = 1; 68 63 69 64 asm volatile ( 70 " movq $-1, %0\n"71 "lock xaddq %0, %1\n"72 : "=r" (r), "+m" (val->count)65 "lock xaddq %[r], %[count]\n" 66 : [count] "+m" (val->count), 67 [r] "+r" (r) 73 68 ); 74 69 … … 76 71 } 77 72 78 #define atomic_preinc(val) (atomic_postinc(val) + 1) 79 #define atomic_predec(val) (atomic_postdec(val) - 1) 73 static inline atomic_count_t atomic_postdec(atomic_t *val) 74 { 75 atomic_count_t r = -1; 76 77 asm volatile ( 78 "lock xaddq %[r], %[count]\n" 79 : [count] "+m" (val->count), 80 [r] "+r" (r) 81 ); 82 83 return r; 84 } 85 86 #define atomic_preinc(val) (atomic_postinc(val) + 1) 87 #define atomic_predec(val) (atomic_postdec(val) - 1) 80 88 81 89 #endif -
uspace/lib/libc/arch/amd64/include/types.h
rbc9da2a r228666c 54 54 55 55 typedef uint64_t uintptr_t; 56 typedef uint64_t atomic_count_t; 57 typedef int64_t atomic_signed_t; 56 58 57 59 #endif -
uspace/lib/libc/arch/arm32/include/atomic.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup libcarm32 29 /** @addtogroup libcarm32 30 30 * @{ 31 31 */ … … 38 38 39 39 #define LIBC_ARCH_ATOMIC_H_ 40 #define CAS 40 #define CAS 41 41 42 42 #include <atomicdflt.h> … … 46 46 extern uintptr_t *ras_page; 47 47 48 static inline bool cas(atomic_t *val, long ov, longnv)49 { 50 longret = 0;51 48 static inline bool cas(atomic_t *val, atomic_count_t ov, atomic_count_t nv) 49 { 50 atomic_count_t ret = 0; 51 52 52 /* 53 53 * The following instructions between labels 1 and 2 constitute a … … 75 75 : "memory" 76 76 ); 77 77 78 78 ras_page[0] = 0; 79 asm volatile ("" ::: "memory"); 79 asm volatile ( 80 "" ::: "memory" 81 ); 80 82 ras_page[1] = 0xffffffff; 81 83 82 84 return (bool) ret; 83 85 } … … 89 91 * 90 92 * @return Value after addition. 91 */ 92 static inline long atomic_add(atomic_t *val, int i) 93 { 94 long ret = 0; 95 93 * 94 */ 95 static inline atomic_count_t atomic_add(atomic_t *val, atomic_count_t i) 96 { 97 atomic_count_t ret = 0; 98 96 99 /* 97 100 * The following instructions between labels 1 and 2 constitute a … … 115 118 : [imm] "r" (i) 116 119 ); 117 120 118 121 ras_page[0] = 0; 119 asm volatile ("" ::: "memory"); 122 asm volatile ( 123 "" ::: "memory" 124 ); 120 125 ras_page[1] = 0xffffffff; 121 126 122 127 return ret; 123 128 } … … 127 132 * 128 133 * @param val Variable to be incremented. 134 * 129 135 */ 130 136 static inline void atomic_inc(atomic_t *val) … … 137 143 * 138 144 * @param val Variable to be decremented. 145 * 139 146 */ 140 147 static inline void atomic_dec(atomic_t *val) … … 148 155 * @param val Variable to be incremented. 149 156 * @return Value after incrementation. 150 */ 151 static inline long atomic_preinc(atomic_t *val) 157 * 158 */ 159 static inline atomic_count_t atomic_preinc(atomic_t *val) 152 160 { 153 161 return atomic_add(val, 1); … … 159 167 * @param val Variable to be decremented. 160 168 * @return Value after decrementation. 161 */ 162 static inline long atomic_predec(atomic_t *val) 169 * 170 */ 171 static inline atomic_count_t atomic_predec(atomic_t *val) 163 172 { 164 173 return atomic_add(val, -1); … … 170 179 * @param val Variable to be incremented. 171 180 * @return Value before incrementation. 172 */ 173 static inline long atomic_postinc(atomic_t *val) 181 * 182 */ 183 static inline atomic_count_t atomic_postinc(atomic_t *val) 174 184 { 175 185 return atomic_add(val, 1) - 1; … … 181 191 * @param val Variable to be decremented. 182 192 * @return Value before decrementation. 183 */ 184 static inline long atomic_postdec(atomic_t *val) 193 * 194 */ 195 static inline atomic_count_t atomic_postdec(atomic_t *val) 185 196 { 186 197 return atomic_add(val, -1) + 1; -
uspace/lib/libc/arch/arm32/include/types.h
rbc9da2a r228666c 55 55 56 56 typedef uint32_t uintptr_t; 57 typedef uint32_t atomic_count_t; 58 typedef int32_t atomic_signed_t; 57 59 58 60 #endif -
uspace/lib/libc/arch/ia32/include/atomic.h
rbc9da2a r228666c 40 40 #include <atomicdflt.h> 41 41 42 static inline void atomic_inc(atomic_t *val) { 43 asm volatile ("lock incl %0\n" : "+m" (val->count)); 42 static inline void atomic_inc(atomic_t *val) 43 { 44 asm volatile ( 45 "lock incl %[count]\n" 46 : [count] "+m" (val->count) 47 ); 44 48 } 45 49 46 static inline void atomic_dec(atomic_t *val) { 47 asm volatile ("lock decl %0\n" : "+m" (val->count)); 50 static inline void atomic_dec(atomic_t *val) 51 { 52 asm volatile ( 53 "lock decl %[count]\n" 54 : [count] "+m" (val->count) 55 ); 48 56 } 49 57 50 static inline long atomic_postinc(atomic_t *val)58 static inline atomic_count_t atomic_postinc(atomic_t *val) 51 59 { 52 long r; 53 54 asm volatile ( 55 "movl $1, %0\n" 56 "lock xaddl %0, %1\n" 57 : "=r" (r), "+m" (val->count) 58 ); 59 60 return r; 61 } 62 63 static inline long atomic_postdec(atomic_t *val) 64 { 65 long r; 60 atomic_count_t r = 1; 66 61 67 62 asm volatile ( 68 " movl $-1, %0\n"69 "lock xaddl %0, %1\n"70 : "=r" (r), "+m" (val->count)63 "lock xaddl %[r], %[count]\n" 64 : [count] "+m" (val->count), 65 [r] "+r" (r) 71 66 ); 72 67 … … 74 69 } 75 70 76 #define atomic_preinc(val) (atomic_postinc(val) + 1) 77 #define atomic_predec(val) (atomic_postdec(val) - 1) 71 static inline atomic_count_t atomic_postdec(atomic_t *val) 72 { 73 atomic_count_t r = -1; 74 75 asm volatile ( 76 "lock xaddl %[r], %[count]\n" 77 : [count] "+m" (val->count), 78 [r] "+r" (r) 79 ); 80 81 return r; 82 } 83 84 #define atomic_preinc(val) (atomic_postinc(val) + 1) 85 #define atomic_predec(val) (atomic_postdec(val) - 1) 78 86 79 87 #endif -
uspace/lib/libc/arch/ia32/include/types.h
rbc9da2a r228666c 54 54 55 55 typedef uint32_t uintptr_t; 56 typedef uint32_t atomic_count_t; 57 typedef int32_t atomic_signed_t; 56 58 57 59 #endif -
uspace/lib/libc/arch/ia64/include/atomic.h
rbc9da2a r228666c 42 42 static inline void atomic_inc(atomic_t *val) 43 43 { 44 longv;44 atomic_count_t v; 45 45 46 46 asm volatile ( … … 53 53 static inline void atomic_dec(atomic_t *val) 54 54 { 55 longv;55 atomic_count_t v; 56 56 57 57 asm volatile ( … … 62 62 } 63 63 64 static inline longatomic_preinc(atomic_t *val)64 static inline atomic_count_t atomic_preinc(atomic_t *val) 65 65 { 66 longv;66 atomic_count_t v; 67 67 68 68 asm volatile ( … … 75 75 } 76 76 77 static inline longatomic_predec(atomic_t *val)77 static inline atomic_count_t atomic_predec(atomic_t *val) 78 78 { 79 longv;79 atomic_count_t v; 80 80 81 81 asm volatile ( … … 88 88 } 89 89 90 static inline longatomic_postinc(atomic_t *val)90 static inline atomic_count_t atomic_postinc(atomic_t *val) 91 91 { 92 longv;92 atomic_count_t v; 93 93 94 94 asm volatile ( … … 101 101 } 102 102 103 static inline longatomic_postdec(atomic_t *val)103 static inline atomic_count_t atomic_postdec(atomic_t *val) 104 104 { 105 longv;105 atomic_count_t v; 106 106 107 107 asm volatile ( -
uspace/lib/libc/arch/ia64/include/types.h
rbc9da2a r228666c 59 59 60 60 typedef uint64_t uintptr_t; 61 typedef uint64_t atomic_count_t; 62 typedef int64_t atomic_signed_t; 61 63 62 64 typedef struct { -
uspace/lib/libc/arch/mips32/include/atomic.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup libcmips32 29 /** @addtogroup libcmips32 30 30 * @{ 31 31 */ 32 32 /** @file 33 * @ingroup libcmips32eb 33 * @ingroup libcmips32eb 34 34 */ 35 35 … … 41 41 #include <atomicdflt.h> 42 42 43 #define atomic_inc(x) 44 #define atomic_dec(x) 43 #define atomic_inc(x) ((void) atomic_add(x, 1)) 44 #define atomic_dec(x) ((void) atomic_add(x, -1)) 45 45 46 #define atomic_postinc(x) (atomic_add(x, 1) - 1)47 #define atomic_postdec(x) (atomic_add(x, -1) + 1)46 #define atomic_postinc(x) (atomic_add(x, 1) - 1) 47 #define atomic_postdec(x) (atomic_add(x, -1) + 1) 48 48 49 #define atomic_preinc(x) atomic_add(x, 1)50 #define atomic_predec(x) atomic_add(x, -1)49 #define atomic_preinc(x) atomic_add(x, 1) 50 #define atomic_predec(x) atomic_add(x, -1) 51 51 52 52 /* Atomic addition of immediate value. 53 53 * 54 54 * @param val Memory location to which will be the immediate value added. 55 * @param i Signed immediate that will be added to *val.55 * @param i Signed immediate that will be added to *val. 56 56 * 57 57 * @return Value after addition. 58 * 58 59 */ 59 static inline long atomic_add(atomic_t *val, int i)60 static inline atomic_count_t atomic_add(atomic_t *val, atomic_count_t i) 60 61 { 61 long tmp, v; 62 62 atomic_count_t tmp; 63 atomic_count_t v; 64 63 65 asm volatile ( 64 66 "1:\n" … … 70 72 /* nop */ /* nop is inserted automatically by compiler */ 71 73 " nop\n" 72 : "=&r" (tmp), "+m" (val->count), "=&r" (v) 73 : "r" (i), "i" (0) 74 ); 75 74 : "=&r" (tmp), 75 "+m" (val->count), 76 "=&r" (v) 77 : "r" (i), 78 "i" (0) 79 ); 80 76 81 return v; 77 82 } -
uspace/lib/libc/arch/mips32/include/types.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup libcmips32 29 /** @addtogroup libcmips32 30 30 * @{ 31 31 */ … … 55 55 56 56 typedef uint32_t uintptr_t; 57 typedef uint32_t atomic_count_t; 58 typedef int32_t atomic_signed_t; 57 59 58 60 #endif -
uspace/lib/libc/arch/ppc32/include/atomic.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup libcppc32 29 /** @addtogroup libcppc32 30 30 * @{ 31 31 */ … … 42 42 static inline void atomic_inc(atomic_t *val) 43 43 { 44 longtmp;45 44 atomic_count_t tmp; 45 46 46 asm volatile ( 47 47 "1:\n" … … 50 50 "stwcx. %0, 0, %2\n" 51 51 "bne- 1b" 52 : "=&r" (tmp), "=m" (val->count) 53 : "r" (&val->count), "m" (val->count) 54 : "cc"); 52 : "=&r" (tmp), 53 "=m" (val->count) 54 : "r" (&val->count), 55 "m" (val->count) 56 : "cc" 57 ); 55 58 } 56 59 57 60 static inline void atomic_dec(atomic_t *val) 58 61 { 59 longtmp;60 62 atomic_count_t tmp; 63 61 64 asm volatile ( 62 65 "1:\n" 63 66 "lwarx %0, 0, %2\n" 64 67 "addic %0, %0, -1\n" 65 "stwcx. 68 "stwcx. %0, 0, %2\n" 66 69 "bne- 1b" 67 : "=&r" (tmp), "=m" (val->count) 68 : "r" (&val->count), "m" (val->count) 69 : "cc"); 70 : "=&r" (tmp), 71 "=m" (val->count) 72 : "r" (&val->count), 73 "m" (val->count) 74 : "cc" 75 ); 70 76 } 71 77 72 static inline longatomic_postinc(atomic_t *val)78 static inline atomic_count_t atomic_postinc(atomic_t *val) 73 79 { 74 80 atomic_inc(val); … … 76 82 } 77 83 78 static inline longatomic_postdec(atomic_t *val)84 static inline atomic_count_t atomic_postdec(atomic_t *val) 79 85 { 80 86 atomic_dec(val); … … 82 88 } 83 89 84 static inline longatomic_preinc(atomic_t *val)90 static inline atomic_count_t atomic_preinc(atomic_t *val) 85 91 { 86 92 atomic_inc(val); … … 88 94 } 89 95 90 static inline longatomic_predec(atomic_t *val)96 static inline atomic_count_t atomic_predec(atomic_t *val) 91 97 { 92 98 atomic_dec(val); -
uspace/lib/libc/arch/ppc32/include/types.h
rbc9da2a r228666c 27 27 */ 28 28 29 /** @addtogroup libcppc32 29 /** @addtogroup libcppc32 30 30 * @{ 31 31 */ … … 54 54 55 55 typedef uint32_t uintptr_t; 56 typedef uint32_t atomic_count_t; 57 typedef int32_t atomic_signed_t; 56 58 57 59 #endif -
uspace/lib/libc/arch/sparc64/include/atomic.h
rbc9da2a r228666c 46 46 * 47 47 * @param val Atomic variable. 48 * @param i Signed value to be added.48 * @param i Signed value to be added. 49 49 * 50 50 * @return Value of the atomic variable as it existed before addition. 51 * 51 52 */ 52 static inline long atomic_add(atomic_t *val, int i)53 static inline atomic_count_t atomic_add(atomic_t *val, atomic_count_t i) 53 54 { 54 uint64_t a, b; 55 55 atomic_count_t a; 56 atomic_count_t b; 57 56 58 do { 57 volatile uintptr_t x = (uint64_t) &val->count;58 59 a = *(( uint64_t *) x);59 volatile uintptr_t ptr = (uintptr_t) &val->count; 60 61 a = *((atomic_count_t *) ptr); 60 62 b = a + i; 61 asm volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *)x)), "+r" (b) : "r" (a)); 63 64 asm volatile ( 65 "casx %0, %2, %1\n" 66 : "+m" (*((atomic_count_t *) ptr)), 67 "+r" (b) 68 : "r" (a) 69 ); 62 70 } while (a != b); 63 71 64 72 return a; 65 73 } 66 74 67 static inline longatomic_preinc(atomic_t *val)75 static inline atomic_count_t atomic_preinc(atomic_t *val) 68 76 { 69 77 return atomic_add(val, 1) + 1; 70 78 } 71 79 72 static inline longatomic_postinc(atomic_t *val)80 static inline atomic_count_t atomic_postinc(atomic_t *val) 73 81 { 74 82 return atomic_add(val, 1); 75 83 } 76 84 77 static inline longatomic_predec(atomic_t *val)85 static inline atomic_count_t atomic_predec(atomic_t *val) 78 86 { 79 87 return atomic_add(val, -1) - 1; 80 88 } 81 89 82 static inline longatomic_postdec(atomic_t *val)90 static inline atomic_count_t atomic_postdec(atomic_t *val) 83 91 { 84 92 return atomic_add(val, -1); -
uspace/lib/libc/arch/sparc64/include/types.h
rbc9da2a r228666c 54 54 55 55 typedef uint64_t uintptr_t; 56 typedef uint64_t atomic_count_t; 57 typedef int64_t atomic_signed_t; 56 58 57 59 #endif -
uspace/lib/libc/generic/futex.c
rbc9da2a r228666c 68 68 int futex_down(futex_t *futex) 69 69 { 70 if ( atomic_predec(futex) < 0)70 if ((atomic_signed_t) atomic_predec(futex) < 0) 71 71 return __SYSCALL1(SYS_FUTEX_SLEEP, (sysarg_t) &futex->count); 72 72 … … 82 82 int futex_up(futex_t *futex) 83 83 { 84 if ( atomic_postinc(futex) < 0)84 if ((atomic_signed_t) atomic_postinc(futex) < 0) 85 85 return __SYSCALL1(SYS_FUTEX_WAKEUP, (sysarg_t) &futex->count); 86 86 -
uspace/lib/libc/include/atomicdflt.h
rbc9da2a r228666c 37 37 38 38 #ifndef LIBC_ARCH_ATOMIC_H_ 39 #error This file cannot be included directly, include atomic.h instead.39 #error This file cannot be included directly, include atomic.h instead. 40 40 #endif 41 41 42 #include <stdint.h> 42 43 #include <bool.h> 43 44 44 45 typedef struct atomic { 45 volatile longcount;46 volatile atomic_count_t count; 46 47 } atomic_t; 47 48 48 static inline void atomic_set(atomic_t *val, longi)49 static inline void atomic_set(atomic_t *val, atomic_count_t i) 49 50 { 50 51 val->count = i; 51 52 } 52 53 53 static inline longatomic_get(atomic_t *val)54 static inline atomic_count_t atomic_get(atomic_t *val) 54 55 { 55 56 return val->count; 56 57 } 57 58 58 59 #ifndef CAS 59 static inline bool cas(atomic_t *val, long ov, longnv)60 static inline bool cas(atomic_t *val, atomic_count_t ov, atomic_count_t nv) 60 61 { 61 62 return __sync_bool_compare_and_swap(&val->count, ov, nv);
Note:
See TracChangeset
for help on using the changeset viewer.