Changeset c2efbb4 in mainline for kernel/arch/amd64/include
- Timestamp:
- 2010-02-20T20:54:53Z (16 years ago)
- Branches:
- lfn, master, serial, ticket/834-toolchain-update, topic/fix-logger-deadlock, topic/msim-upgrade, topic/simplify-dev-export
- Children:
- 721d4e85, 95c4776
- Parents:
- f516bc2 (diff), b03a666 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the(diff)links above to see all the changes relative to each parent. - Location:
- kernel/arch/amd64/include
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/amd64/include/atomic.h
rf516bc2 rc2efbb4 40 40 #include <preemption.h> 41 41 42 static inline void atomic_inc(atomic_t *val) { 42 static inline void atomic_inc(atomic_t *val) 43 { 43 44 #ifdef CONFIG_SMP 44 45 asm volatile ( … … 54 55 } 55 56 56 static inline void atomic_dec(atomic_t *val) { 57 static inline void atomic_dec(atomic_t *val) 58 { 57 59 #ifdef CONFIG_SMP 58 60 asm volatile ( … … 68 70 } 69 71 70 static inline long atomic_postinc(atomic_t *val)72 static inline atomic_count_t atomic_postinc(atomic_t *val) 71 73 { 72 longr = 1;74 atomic_count_t r = 1; 73 75 74 76 asm volatile ( 75 77 "lock xaddq %[r], %[count]\n" 76 : [count] "+m" (val->count), [r] "+r" (r) 78 : [count] "+m" (val->count), 79 [r] "+r" (r) 77 80 ); 78 81 … … 80 83 } 81 84 82 static inline long atomic_postdec(atomic_t *val)85 static inline atomic_count_t atomic_postdec(atomic_t *val) 83 86 { 84 longr = -1;87 atomic_count_t r = -1; 85 88 86 89 asm volatile ( 87 90 "lock xaddq %[r], %[count]\n" 88 : [count] "+m" (val->count), [r] "+r" (r) 91 : [count] "+m" (val->count), 92 [r] "+r" (r) 89 93 ); 90 94 … … 95 99 #define atomic_predec(val) (atomic_postdec(val) - 1) 96 100 97 static inline uint64_t test_and_set(atomic_t *val) { 98 uint64_t v; 101 static inline atomic_count_t test_and_set(atomic_t *val) 102 { 103 atomic_count_t v; 99 104 100 105 asm volatile ( 101 106 "movq $1, %[v]\n" 102 107 "xchgq %[v], %[count]\n" 103 : [v] "=r" (v), [count] "+m" (val->count) 108 : [v] "=r" (v), 109 [count] "+m" (val->count) 104 110 ); 105 111 … … 107 113 } 108 114 109 110 115 /** amd64 specific fast spinlock */ 111 116 static inline void atomic_lock_arch(atomic_t *val) 112 117 { 113 uint64_t tmp;118 atomic_count_t tmp; 114 119 115 120 preemption_disable(); … … 125 130 "testq %[tmp], %[tmp]\n" 126 131 "jnz 0b\n" 127 : [count] "+m" (val->count), [tmp] "=&r" (tmp) 132 : [count] "+m" (val->count), 133 [tmp] "=&r" (tmp) 128 134 ); 135 129 136 /* 130 137 * Prevent critical section code from bleeding out this way up. -
kernel/arch/amd64/include/interrupt.h
rf516bc2 rc2efbb4 54 54 #define IRQ_PIC_SPUR 7 55 55 #define IRQ_MOUSE 12 56 #define IRQ_DP8390 9 56 57 57 58 /* this one must have four least significant bits set to ones */ -
kernel/arch/amd64/include/memstr.h
rf516bc2 rc2efbb4 27 27 */ 28 28 29 /** @addtogroup amd64 29 /** @addtogroup amd64 30 30 * @{ 31 31 */ … … 38 38 #define memcpy(dst, src, cnt) __builtin_memcpy((dst), (src), (cnt)) 39 39 40 extern void memsetw(void *dst, size_t cnt, uint16_t x); 41 extern void memsetb(void *dst, size_t cnt, uint8_t x); 42 43 extern int memcmp(const void *a, const void *b, size_t cnt); 40 extern void memsetw(void *, size_t, uint16_t); 41 extern void memsetb(void *, size_t, uint8_t); 44 42 45 43 #endif -
kernel/arch/amd64/include/types.h
rf516bc2 rc2efbb4 55 55 typedef uint64_t unative_t; 56 56 typedef int64_t native_t; 57 typedef uint64_t atomic_count_t; 57 58 58 59 typedef struct {
Note:
See TracChangeset
for help on using the changeset viewer.
