Changeset e7b7be3f in mainline for kernel/arch/amd64
- Timestamp:
- 2007-01-22T13:10:08Z (19 years ago)
- Branches:
- lfn, master, serial, ticket/834-toolchain-update, topic/msim-upgrade, topic/simplify-dev-export
- Children:
- 0f3fc9b
- Parents:
- 62c63fc
- Location:
- kernel/arch/amd64
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/amd64/include/asm.h
r62c63fc re7b7be3f 53 53 uintptr_t v; 54 54 55 __asm__volatile ("andq %%rsp, %0\n" : "=r" (v) : "0" (~((uint64_t)STACK_SIZE-1)));55 asm volatile ("andq %%rsp, %0\n" : "=r" (v) : "0" (~((uint64_t)STACK_SIZE-1))); 56 56 57 57 return v; -
kernel/arch/amd64/include/atomic.h
r62c63fc re7b7be3f 43 43 static inline void atomic_inc(atomic_t *val) { 44 44 #ifdef CONFIG_SMP 45 __asm__volatile ("lock incq %0\n" : "=m" (val->count));45 asm volatile ("lock incq %0\n" : "=m" (val->count)); 46 46 #else 47 __asm__volatile ("incq %0\n" : "=m" (val->count));47 asm volatile ("incq %0\n" : "=m" (val->count)); 48 48 #endif /* CONFIG_SMP */ 49 49 } … … 51 51 static inline void atomic_dec(atomic_t *val) { 52 52 #ifdef CONFIG_SMP 53 __asm__volatile ("lock decq %0\n" : "=m" (val->count));53 asm volatile ("lock decq %0\n" : "=m" (val->count)); 54 54 #else 55 __asm__volatile ("decq %0\n" : "=m" (val->count));55 asm volatile ("decq %0\n" : "=m" (val->count)); 56 56 #endif /* CONFIG_SMP */ 57 57 } … … 61 61 long r = 1; 62 62 63 __asm__volatile (63 asm volatile ( 64 64 "lock xaddq %1, %0\n" 65 65 : "=m" (val->count), "+r" (r) … … 73 73 long r = -1; 74 74 75 __asm__volatile (75 asm volatile ( 76 76 "lock xaddq %1, %0\n" 77 77 : "=m" (val->count), "+r" (r) … … 87 87 uint64_t v; 88 88 89 __asm__volatile (89 asm volatile ( 90 90 "movq $1, %0\n" 91 91 "xchgq %0, %1\n" … … 103 103 104 104 preemption_disable(); 105 __asm__volatile (105 asm volatile ( 106 106 "0:;" 107 107 #ifdef CONFIG_HT -
kernel/arch/amd64/include/memstr.h
r62c63fc re7b7be3f 52 52 unative_t d0, d1, d2; 53 53 54 __asm__ __volatile__(54 asm volatile( 55 55 "rep movsq\n\t" 56 56 "movq %4, %%rcx\n\t" … … 83 83 unative_t ret; 84 84 85 __asm__(85 asm ( 86 86 "repe cmpsb\n\t" 87 87 "je 1f\n\t" … … 109 109 unative_t d0, d1; 110 110 111 __asm__ __volatile__(111 asm volatile ( 112 112 "rep stosw\n\t" 113 113 : "=&D" (d0), "=&c" (d1), "=a" (x) … … 131 131 unative_t d0, d1; 132 132 133 __asm__ __volatile__(133 asm volatile ( 134 134 "rep stosb\n\t" 135 135 : "=&D" (d0), "=&c" (d1), "=a" (x) -
kernel/arch/amd64/src/cpu/cpu.c
r62c63fc re7b7be3f 77 77 void cpu_setup_fpu(void) 78 78 { 79 __asm__volatile (79 asm volatile ( 80 80 "movq %%cr0, %%rax;" 81 81 "btsq $1, %%rax;" /* cr0.mp */ … … 100 100 void fpu_disable(void) 101 101 { 102 __asm__volatile (102 asm volatile ( 103 103 "mov %%cr0,%%rax;" 104 104 "bts $3,%%rax;" … … 112 112 void fpu_enable(void) 113 113 { 114 __asm__volatile (114 asm volatile ( 115 115 "mov %%cr0,%%rax;" 116 116 "btr $3,%%rax;" -
kernel/arch/amd64/src/fpu_context.c
r62c63fc re7b7be3f 41 41 void fpu_context_save(fpu_context_t *fctx) 42 42 { 43 __asm__volatile (43 asm volatile ( 44 44 "fxsave %0" 45 45 : "=m"(*fctx) … … 50 50 void fpu_context_restore(fpu_context_t *fctx) 51 51 { 52 __asm__volatile (52 asm volatile ( 53 53 "fxrstor %0" 54 54 : "=m"(*fctx) … … 59 59 { 60 60 /* TODO: Zero all SSE, MMX etc. registers */ 61 __asm__volatile (61 asm volatile ( 62 62 "fninit;" 63 63 ); -
kernel/arch/amd64/src/userspace.c
r62c63fc re7b7be3f 55 55 ipl &= ~(0xcd4); 56 56 57 __asm__volatile (""57 asm volatile ("" 58 58 "pushq %0\n" 59 59 "pushq %1\n"
Note:
See TracChangeset
for help on using the changeset viewer.