Changeset 73a4bab in mainline


Ignore:
Timestamp:
2005-11-11T14:06:55Z (19 years ago)
Author:
Jakub Vana <jakub.vana@…>
Branches:
lfn, master, serial, ticket/834-toolchain-update, topic/msim-upgrade, topic/simplify-dev-export
Children:
8a0b3730
Parents:
0172eba
Message:

Atomic inc & dec functions synchronized on all ia32,ia64 and mips platforms. Now there are 3 versions which returns no value, new value and old value och changed variable.

Files:
5 edited

Legend:

Unmodified
Added
Removed
  • Makefile.config

    r0172eba r73a4bab  
    3737
    3838CONFIG_USERSPACE = n
    39 CONFIG_TEST =
     39#CONFIG_TEST =
    4040#CONFIG_TEST = synch/rwlock1
    4141#CONFIG_TEST = synch/rwlock2
    4242#CONFIG_TEST = synch/rwlock3
    43 #CONFIG_TEST = synch/rwlock4
     43CONFIG_TEST = synch/rwlock4
    4444#CONFIG_TEST = synch/rwlock5
    4545#CONFIG_TEST = synch/semaphore1
  • arch/ia32/include/atomic.h

    r0172eba r73a4bab  
    3636static inline void atomic_inc(atomic_t *val) {
    3737#ifdef CONFIG_SMP
    38         __asm__ volatile ("lock incl %0\n" : "=m" (*val));
     38        __asm__ volatile ("lock incl %0\n" : "+m" (*val));
    3939#else
    40         __asm__ volatile ("incl %0\n" : "=m" (*val));
     40        __asm__ volatile ("incl %0\n" : "+m" (*val));
    4141#endif /* CONFIG_SMP */
    4242}
     
    4444static inline void atomic_dec(atomic_t *val) {
    4545#ifdef CONFIG_SMP
    46         __asm__ volatile ("lock decl %0\n" : "=m" (*val));
     46        __asm__ volatile ("lock decl %0\n" : "+m" (*val));
    4747#else
    48         __asm__ volatile ("decl %0\n" : "=m" (*val));
     48        __asm__ volatile ("decl %0\n" : "+m" (*val));
    4949#endif /* CONFIG_SMP */
    5050}
     51
     52static inline atomic_t atomic_inc_pre(atomic_t *val)
     53{
     54        atomic_t r;
     55        __asm__ volatile (
     56                "movl $1,%0;"
     57                "lock xaddl %0,%1;"
     58                : "=r"(r), "+m" (*val)
     59        );
     60        return r;
     61}
     62
     63
     64
     65static inline atomic_t atomic_dec_pre(atomic_t *val)
     66{
     67        atomic_t r;
     68        __asm__ volatile (
     69                "movl $-1,%0;"
     70                "lock xaddl %0,%1;"
     71                : "=r"(r), "+m" (*val)
     72        );
     73        return r;
     74}
     75
     76#define atomic_inc_post(val) (atomic_inc_pre(val)+1)
     77#define atomic_dec_post(val) (atomic_dec_pre(val)-1)
     78
     79
    5180
    5281static inline int test_and_set(volatile int *val) {
     
    5685                "movl $1, %0\n"
    5786                "xchgl %0, %1\n"
    58                 : "=r" (v),"=m" (*val)
     87                : "=r" (v),"+m" (*val)
    5988        );
    6089       
  • arch/ia64/include/atomic.h

    r0172eba r73a4bab  
    3838        atomic_t v;
    3939
    40 /*     
    41  *      __asm__ volatile ("fetchadd8.rel %0 = %1, %2\n" : "=r" (v), "=m" (val) : "i" (imm));
    42  */
     40       
     41        __asm__ volatile ("fetchadd8.rel %0 = %1, %2\n" : "=r" (v), "+m" (*val) : "i" (imm));
     42 
    4343        *val += imm;
    4444       
     
    4646}
    4747
    48 static inline atomic_t atomic_inc(atomic_t *val) { return atomic_add(val, 1); }
    49 static inline atomic_t atomic_dec(atomic_t *val) { return atomic_add(val, -1); }
     48static inline void atomic_inc(atomic_t *val) { atomic_add(val, 1); }
     49static inline void atomic_dec(atomic_t *val) { atomic_add(val, -1); }
     50
     51
     52static inline atomic_t atomic_inc_pre(atomic_t *val) { return atomic_add(val, 1); }
     53static inline atomic_t atomic_dec_pre(atomic_t *val) { return atomic_add(val, -1); }
     54
     55
     56static inline atomic_t atomic_inc_post(atomic_t *val) { return atomic_add(val, 1)+1; }
     57static inline atomic_t atomic_dec_post(atomic_t *val) { return atomic_add(val, -1)-1; }
     58
     59
     60
    5061
    5162#endif
  • arch/ia64/src/ivt.S

    r0172eba r73a4bab  
    11#
    22# Copyright (C) 2005 Jakub Vana
    3 # Copyright (C) 2005 Jakub Jermar
    43# All rights reserved.
    54#
     
    2928
    3029#include <arch/stack.h>
    31 #include <arch/register.h>
    3230
    3331#define STACK_ITEMS             12
     
    107105       
    108106        /* assume kernel backing store */
    109         /* mov ar.bspstore = r28 ;; */
     107        mov ar.bspstore = r28 ;;
    110108       
    111109        mov r29 = ar.bsp
     
    147145        ld8 r24 = [r31], +8 ;;          /* load ar.rsc */
    148146
    149         /* mov ar.bspstore = r28 ;; */  /* (step 4) */
    150         /* mov ar.rnat = r27 */         /* (step 5) */
     147        mov ar.bspstore = r28 ;;        /* (step 4) */
     148        mov ar.rnat = r27               /* (step 5) */
    151149
    152150        mov ar.pfs = r25                /* (step 6) */
     
    192190
    193191    /* 6. switch to bank 1 and reenable PSR.ic */
    194         ssm PSR_IC_MASK
     192        ssm 0x2000
    195193        bsw.1 ;;
    196194        srlz.d
     
    248246   
    249247    /* 9. skipped (will not enable interrupts) */
    250         /*
    251          * ssm PSR_I_MASK
    252          * ;;
    253          * srlz.d
    254          */
    255248
    256249    /* 10. call handler */
     
    262255       
    263256    /* 12. skipped (will not disable interrupts) */
    264         /*
    265          * rsm PSR_I_MASK
    266          * ;;
    267          * srlz.d
    268          */
    269257
    270258    /* 13. restore general and floating-point registers */
     
    320308       
    321309    /* 15. disable PSR.ic and switch to bank 0 */
    322         rsm PSR_IC_MASK
     310        rsm 0x2000
    323311        bsw.0 ;;
    324312        srlz.d
  • arch/mips32/include/atomic.h

    r0172eba r73a4bab  
    3535#define atomic_dec(x)   (a_sub(x,1))
    3636
     37#define atomic_inc_pre(x) (a_add(x,1)-1)
     38#define atomic_dec_pre(x) (a_sub(x,1)+1)
     39
     40#define atomic_inc_post(x) (a_add(x,1))
     41#define atomic_dec_post(x) (a_sub(x,1))
     42
     43
    3744typedef volatile __u32 atomic_t;
    3845
     
    4552 * of the variable to a special register and if another process writes to
    4653 * the same location, the SC (store-conditional) instruction fails.
     54 
     55 Returns (*val)+i
     56 
    4757 */
    4858static inline atomic_t a_add(atomic_t *val, int i)
     
    7383 *
    7484 * Implemented in the same manner as a_add, except we substract the value.
     85
     86 Returns (*val)-i
     87
    7588 */
    7689static inline atomic_t a_sub(atomic_t *val, int i)
Note: See TracChangeset for help on using the changeset viewer.