Ignore:
File:
1 edited

Legend:

Unmodified
Added
Removed
  • kernel/arch/amd64/include/atomic.h

    rba371e1 rc00589d  
    4040#include <preemption.h>
    4141
    42 static inline void atomic_inc(atomic_t *val)
    43 {
     42static inline void atomic_inc(atomic_t *val) {
    4443#ifdef CONFIG_SMP
    4544        asm volatile (
     
    5554}
    5655
    57 static inline void atomic_dec(atomic_t *val)
    58 {
     56static inline void atomic_dec(atomic_t *val) {
    5957#ifdef CONFIG_SMP
    6058        asm volatile (
     
    7068}
    7169
    72 static inline atomic_count_t atomic_postinc(atomic_t *val)
     70static inline long atomic_postinc(atomic_t *val)
    7371{
    74         atomic_count_t r = 1;
     72        long r = 1;
    7573       
    7674        asm volatile (
    7775                "lock xaddq %[r], %[count]\n"
    78                 : [count] "+m" (val->count),
    79                   [r] "+r" (r)
     76                : [count] "+m" (val->count), [r] "+r" (r)
    8077        );
    8178       
     
    8380}
    8481
    85 static inline atomic_count_t atomic_postdec(atomic_t *val)
     82static inline long atomic_postdec(atomic_t *val)
    8683{
    87         atomic_count_t r = -1;
     84        long r = -1;
    8885       
    8986        asm volatile (
    9087                "lock xaddq %[r], %[count]\n"
    91                 : [count] "+m" (val->count),
    92                   [r] "+r" (r)
     88                : [count] "+m" (val->count), [r] "+r" (r)
    9389        );
    9490       
     
    9995#define atomic_predec(val)  (atomic_postdec(val) - 1)
    10096
    101 static inline atomic_count_t test_and_set(atomic_t *val)
    102 {
    103         atomic_count_t v = 1;
     97static inline uint64_t test_and_set(atomic_t *val) {
     98        uint64_t v;
    10499       
    105100        asm volatile (
     101                "movq $1, %[v]\n"
    106102                "xchgq %[v], %[count]\n"
    107                 : [v] "+r" (v),
    108                   [count] "+m" (val->count)
     103                : [v] "=r" (v), [count] "+m" (val->count)
    109104        );
    110105       
     
    112107}
    113108
     109
    114110/** amd64 specific fast spinlock */
    115111static inline void atomic_lock_arch(atomic_t *val)
    116112{
    117         atomic_count_t tmp;
     113        uint64_t tmp;
    118114       
    119115        preemption_disable();
     
    129125                "testq %[tmp], %[tmp]\n"
    130126                "jnz 0b\n"
    131                 : [count] "+m" (val->count),
    132                   [tmp] "=&r" (tmp)
     127                : [count] "+m" (val->count), [tmp] "=&r" (tmp)
    133128        );
    134        
    135129        /*
    136130         * Prevent critical section code from bleeding out this way up.
Note: See TracChangeset for help on using the changeset viewer.