Changeset 228666c in mainline


Ignore:
Timestamp:
2010-02-20T18:41:53Z (14 years ago)
Author:
Martin Decky <martin@…>
Branches:
lfn, master, serial, ticket/834-toolchain-update, topic/msim-upgrade, topic/simplify-dev-export
Children:
b03a666
Parents:
bc9da2a
Message:

introduce atomic_count_t as the explicit type of the internal value in atomic_t (this is probably better than the chaotic mix of int/long)
atomic_count_t is defined as unsigned, for signed semantics you can cast it to atomic_signed_t

Files:
40 edited

Legend:

Unmodified
Added
Removed
  • kernel/arch/amd64/include/atomic.h

    rbc9da2a r228666c  
    4040#include <preemption.h>
    4141
    42 static inline void atomic_inc(atomic_t *val) {
     42static inline void atomic_inc(atomic_t *val)
     43{
    4344#ifdef CONFIG_SMP
    4445        asm volatile (
     
    5455}
    5556
    56 static inline void atomic_dec(atomic_t *val) {
     57static inline void atomic_dec(atomic_t *val)
     58{
    5759#ifdef CONFIG_SMP
    5860        asm volatile (
     
    6870}
    6971
    70 static inline long atomic_postinc(atomic_t *val)
     72static inline atomic_count_t atomic_postinc(atomic_t *val)
    7173{
    72         long r = 1;
     74        atomic_count_t r = 1;
    7375       
    7476        asm volatile (
    7577                "lock xaddq %[r], %[count]\n"
    76                 : [count] "+m" (val->count), [r] "+r" (r)
     78                : [count] "+m" (val->count),
     79                  [r] "+r" (r)
    7780        );
    7881       
     
    8083}
    8184
    82 static inline long atomic_postdec(atomic_t *val)
     85static inline atomic_count_t atomic_postdec(atomic_t *val)
    8386{
    84         long r = -1;
     87        atomic_count_t r = -1;
    8588       
    8689        asm volatile (
    8790                "lock xaddq %[r], %[count]\n"
    88                 : [count] "+m" (val->count), [r] "+r" (r)
     91                : [count] "+m" (val->count),
     92                  [r] "+r" (r)
    8993        );
    9094       
     
    9599#define atomic_predec(val)  (atomic_postdec(val) - 1)
    96100
    97 static inline uint64_t test_and_set(atomic_t *val) {
    98         uint64_t v;
     101static inline atomic_count_t test_and_set(atomic_t *val)
     102{
     103        atomic_count_t v;
    99104       
    100105        asm volatile (
    101106                "movq $1, %[v]\n"
    102107                "xchgq %[v], %[count]\n"
    103                 : [v] "=r" (v), [count] "+m" (val->count)
     108                : [v] "=r" (v),
     109                  [count] "+m" (val->count)
    104110        );
    105111       
     
    107113}
    108114
    109 
    110115/** amd64 specific fast spinlock */
    111116static inline void atomic_lock_arch(atomic_t *val)
    112117{
    113         uint64_t tmp;
     118        atomic_count_t tmp;
    114119       
    115120        preemption_disable();
     
    125130                "testq %[tmp], %[tmp]\n"
    126131                "jnz 0b\n"
    127                 : [count] "+m" (val->count), [tmp] "=&r" (tmp)
     132                : [count] "+m" (val->count),
     133                  [tmp] "=&r" (tmp)
    128134        );
     135       
    129136        /*
    130137         * Prevent critical section code from bleeding out this way up.
  • kernel/arch/amd64/include/types.h

    rbc9da2a r228666c  
    5555typedef uint64_t unative_t;
    5656typedef int64_t native_t;
     57typedef uint64_t atomic_count_t;
    5758
    5859typedef struct {
  • kernel/arch/arm32/include/atomic.h

    rbc9da2a r228666c  
    4747 *
    4848 */
    49 static inline long atomic_add(atomic_t *val, int i)
     49static inline atomic_count_t atomic_add(atomic_t *val, atomic_count_t i)
    5050{
    51         long ret;
    52 
    5351        /*
    5452         * This implementation is for UP pre-ARMv6 systems where we do not have
     
    5755        ipl_t ipl = interrupts_disable();
    5856        val->count += i;
    59         ret = val->count;
     57        atomic_count_t ret = val->count;
    6058        interrupts_restore(ipl);
    6159       
     
    6664 *
    6765 * @param val Variable to be incremented.
     66 *
    6867 */
    6968static inline void atomic_inc(atomic_t *val)
     
    7574 *
    7675 * @param val Variable to be decremented.
     76 *
    7777 */
    7878static inline void atomic_dec(atomic_t *val) {
     
    8484 * @param val Variable to be incremented.
    8585 * @return    Value after incrementation.
     86 *
    8687 */
    87 static inline long atomic_preinc(atomic_t *val)
     88static inline atomic_count_t atomic_preinc(atomic_t *val)
    8889{
    8990        return atomic_add(val, 1);
     
    9495 * @param val Variable to be decremented.
    9596 * @return    Value after decrementation.
     97 *
    9698 */
    97 static inline long atomic_predec(atomic_t *val)
     99static inline atomic_count_t atomic_predec(atomic_t *val)
    98100{
    99101        return atomic_add(val, -1);
     
    104106 * @param val Variable to be incremented.
    105107 * @return    Value before incrementation.
     108 *
    106109 */
    107 static inline long atomic_postinc(atomic_t *val)
     110static inline atomic_count_t atomic_postinc(atomic_t *val)
    108111{
    109112        return atomic_add(val, 1) - 1;
     
    114117 * @param val Variable to be decremented.
    115118 * @return    Value before decrementation.
     119 *
    116120 */
    117 static inline long atomic_postdec(atomic_t *val)
     121static inline atomic_count_t atomic_postdec(atomic_t *val)
    118122{
    119123        return atomic_add(val, -1) + 1;
  • kernel/arch/arm32/include/types.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup arm32   
     29/** @addtogroup arm32
    3030 * @{
    3131 */
     
    3838
    3939#ifndef DOXYGEN
    40 #       define ATTRIBUTE_PACKED __attribute__ ((packed))
     40        #define ATTRIBUTE_PACKED __attribute__((packed))
    4141#else
    42 #       define ATTRIBUTE_PACKED
     42        #define ATTRIBUTE_PACKED
    4343#endif
    4444
     
    6262typedef uint32_t unative_t;
    6363typedef int32_t native_t;
     64typedef uint32_t atomic_count_t;
    6465
    6566typedef struct {
  • kernel/arch/ia32/include/atomic.h

    rbc9da2a r228666c  
    4040#include <preemption.h>
    4141
    42 static inline void atomic_inc(atomic_t *val) {
     42static inline void atomic_inc(atomic_t *val)
     43{
    4344#ifdef CONFIG_SMP
    4445        asm volatile (
     
    5455}
    5556
    56 static inline void atomic_dec(atomic_t *val) {
     57static inline void atomic_dec(atomic_t *val)
     58{
    5759#ifdef CONFIG_SMP
    5860        asm volatile (
     
    6870}
    6971
    70 static inline long atomic_postinc(atomic_t *val)
     72static inline atomic_count_t atomic_postinc(atomic_t *val)
    7173{
    72         long r = 1;
     74        atomic_count_t r = 1;
    7375       
    7476        asm volatile (
    7577                "lock xaddl %[r], %[count]\n"
    76                 : [count] "+m" (val->count), [r] "+r" (r)
     78                : [count] "+m" (val->count),
     79                  [r] "+r" (r)
    7780        );
    7881       
     
    8083}
    8184
    82 static inline long atomic_postdec(atomic_t *val)
     85static inline atomic_count_t atomic_postdec(atomic_t *val)
    8386{
    84         long r = -1;
     87        atomic_count_t r = -1;
    8588       
    8689        asm volatile (
    8790                "lock xaddl %[r], %[count]\n"
    88                 : [count] "+m" (val->count), [r] "+r"(r)
     91                : [count] "+m" (val->count),
     92                  [r] "+r" (r)
    8993        );
    9094       
     
    9599#define atomic_predec(val)  (atomic_postdec(val) - 1)
    96100
    97 static inline uint32_t test_and_set(atomic_t *val) {
    98         uint32_t v;
     101static inline atomic_count_t test_and_set(atomic_t *val)
     102{
     103        atomic_count_t v;
    99104       
    100105        asm volatile (
    101106                "movl $1, %[v]\n"
    102107                "xchgl %[v], %[count]\n"
    103                 : [v] "=r" (v), [count] "+m" (val->count)
     108                : [v] "=r" (v),
     109                  [count] "+m" (val->count)
    104110        );
    105111       
     
    110116static inline void atomic_lock_arch(atomic_t *val)
    111117{
    112         uint32_t tmp;
     118        atomic_count_t tmp;
    113119       
    114120        preemption_disable();
     
    124130                "testl %[tmp], %[tmp]\n"
    125131                "jnz 0b\n"
    126                 : [count] "+m" (val->count), [tmp] "=&r" (tmp)
     132                : [count] "+m" (val->count),
     133                  [tmp] "=&r" (tmp)
    127134        );
     135       
    128136        /*
    129137         * Prevent critical section code from bleeding out this way up.
  • kernel/arch/ia32/include/types.h

    rbc9da2a r228666c  
    5555typedef uint32_t unative_t;
    5656typedef int32_t native_t;
     57typedef uint32_t atomic_count_t;
    5758
    5859typedef struct {
  • kernel/arch/ia64/include/atomic.h

    rbc9da2a r228666c  
    3636#define KERN_ia64_ATOMIC_H_
    3737
    38 static inline uint64_t test_and_set(atomic_t *val)
     38static inline atomic_count_t test_and_set(atomic_t *val)
    3939{
    40         uint64_t v;
    41                
     40        atomic_count_t v;
     41       
    4242        asm volatile (
    4343                "movl %[v] = 0x1;;\n"
     
    5353{
    5454        do {
    55                 while (val->count)
    56                         ;
     55                while (val->count);
    5756        } while (test_and_set(val));
    5857}
     
    6059static inline void atomic_inc(atomic_t *val)
    6160{
    62         long v;
     61        atomic_count_t v;
    6362       
    6463        asm volatile (
     
    7170static inline void atomic_dec(atomic_t *val)
    7271{
    73         long v;
     72        atomic_count_t v;
    7473       
    7574        asm volatile (
     
    8079}
    8180
    82 static inline long atomic_preinc(atomic_t *val)
     81static inline atomic_count_t atomic_preinc(atomic_t *val)
    8382{
    84         long v;
     83        atomic_count_t v;
    8584       
    8685        asm volatile (
     
    9392}
    9493
    95 static inline long atomic_predec(atomic_t *val)
     94static inline atomic_count_t atomic_predec(atomic_t *val)
    9695{
    97         long v;
     96        atomic_count_t v;
    9897       
    9998        asm volatile (
     
    106105}
    107106
    108 static inline long atomic_postinc(atomic_t *val)
     107static inline atomic_count_t atomic_postinc(atomic_t *val)
    109108{
    110         long v;
     109        atomic_count_t v;
    111110       
    112111        asm volatile (
     
    119118}
    120119
    121 static inline long atomic_postdec(atomic_t *val)
     120static inline atomic_count_t atomic_postdec(atomic_t *val)
    122121{
    123         long v;
     122        atomic_count_t v;
    124123       
    125124        asm volatile (
  • kernel/arch/ia64/include/types.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup ia64   
     29/** @addtogroup ia64
    3030 * @{
    3131 */
     
    6363typedef uint64_t unative_t;
    6464typedef int64_t native_t;
     65typedef uint64_t atomic_count_t;
    6566
    6667typedef struct {
  • kernel/arch/mips32/include/atomic.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup mips32 
     29/** @addtogroup mips32
    3030 * @{
    3131 */
     
    5151 *
    5252 * @return Value after addition.
     53 *
    5354 */
    54 static inline long atomic_add(atomic_t *val, int i)
     55static inline atomic_count_t atomic_add(atomic_t *val, atomic_count_t i)
    5556{
    56         long tmp, v;
     57        atomic_count_t tmp;
     58        atomic_count_t v;
    5759       
    5860        asm volatile (
     
    6466                "       beq %0, %4, 1b\n"   /* if the atomic operation failed, try again */
    6567                "       nop\n"
    66                 : "=&r" (tmp), "+m" (val->count), "=&r" (v)
    67                 : "r" (i), "i" (0)
     68                : "=&r" (tmp),
     69                  "+m" (val->count),
     70                  "=&r" (v)
     71                : "r" (i),
     72                  "i" (0)
    6873        );
    6974       
     
    7176}
    7277
    73 static inline uint32_t test_and_set(atomic_t *val) {
    74         uint32_t tmp, v;
     78static inline atomic_count_t test_and_set(atomic_t *val)
     79{
     80        atomic_count_t tmp;
     81        atomic_count_t v;
    7582       
    7683        asm volatile (
     
    8289                "       beqz %0, 1b\n"
    8390                "2:\n"
    84                 : "=&r" (tmp), "+m" (val->count), "=&r" (v)
     91                : "=&r" (tmp),
     92                  "+m" (val->count),
     93                  "=&r" (v)
    8594                : "i" (1)
    8695        );
     
    8998}
    9099
    91 static inline void atomic_lock_arch(atomic_t *val) {
     100static inline void atomic_lock_arch(atomic_t *val)
     101{
    92102        do {
    93                 while (val->count)
    94                         ;
     103                while (val->count);
    95104        } while (test_and_set(val));
    96105}
  • kernel/arch/mips32/include/types.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup mips32 
     29/** @addtogroup mips32
    3030 * @{
    3131 */
     
    5555typedef uint32_t unative_t;
    5656typedef int32_t native_t;
     57typedef uint32_t atomic_count_t;
    5758
    5859typedef struct {
  • kernel/arch/ppc32/include/atomic.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup ppc32   
     29/** @addtogroup ppc32
    3030 * @{
    3131 */
     
    3838static inline void atomic_inc(atomic_t *val)
    3939{
    40         long tmp;
    41 
     40        atomic_count_t tmp;
     41       
    4242        asm volatile (
    4343                "1:\n"
     
    4646                "stwcx. %0, 0, %2\n"
    4747                "bne- 1b"
    48                 : "=&r" (tmp), "=m" (val->count)
    49                 : "r" (&val->count), "m" (val->count)
     48                : "=&r" (tmp),
     49                  "=m" (val->count)
     50                : "r" (&val->count),
     51                  "m" (val->count)
    5052                : "cc"
    5153        );
     
    5456static inline void atomic_dec(atomic_t *val)
    5557{
    56         long tmp;
    57 
     58        atomic_count_t tmp;
     59       
    5860        asm volatile (
    5961                "1:\n"
    6062                "lwarx %0, 0, %2\n"
    6163                "addic %0, %0, -1\n"
    62                 "stwcx. %0, 0, %2\n"
     64                "stwcx. %0, 0, %2\n"
    6365                "bne- 1b"
    64                 : "=&r" (tmp), "=m" (val->count)
    65                 : "r" (&val->count), "m" (val->count)
     66                : "=&r" (tmp),
     67                  "=m" (val->count)
     68                : "r" (&val->count),
     69                  "m" (val->count)
    6670                : "cc"
    6771        );
    6872}
    6973
    70 static inline long atomic_postinc(atomic_t *val)
     74static inline atomic_count_t atomic_postinc(atomic_t *val)
    7175{
    7276        atomic_inc(val);
     
    7478}
    7579
    76 static inline long atomic_postdec(atomic_t *val)
     80static inline atomic_count_t atomic_postdec(atomic_t *val)
    7781{
    7882        atomic_dec(val);
     
    8084}
    8185
    82 static inline long atomic_preinc(atomic_t *val)
     86static inline atomic_count_t atomic_preinc(atomic_t *val)
    8387{
    8488        atomic_inc(val);
     
    8690}
    8791
    88 static inline long atomic_predec(atomic_t *val)
     92static inline atomic_count_t atomic_predec(atomic_t *val)
    8993{
    9094        atomic_dec(val);
  • kernel/arch/ppc32/include/types.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup ppc32   
     29/** @addtogroup ppc32
    3030 * @{
    3131 */
     
    5555typedef uint32_t unative_t;
    5656typedef int32_t native_t;
     57typedef uint32_t atomic_count_t;
    5758
    5859typedef struct {
  • kernel/arch/sparc64/include/atomic.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup sparc64 
     29/** @addtogroup sparc64
    3030 * @{
    3131 */
     
    4545 *
    4646 * @param val Atomic variable.
    47  * @param i Signed value to be added.
     47 * @param i   Signed value to be added.
    4848 *
    4949 * @return Value of the atomic variable as it existed before addition.
     50 *
    5051 */
    51 static inline long atomic_add(atomic_t *val, int i)
     52static inline atomic_count_t atomic_add(atomic_t *val, atomic_count_t i)
    5253{
    53         uint64_t a, b;
    54 
     54        atomic_count_t a;
     55        atomic_count_t b;
     56       
    5557        do {
    56                 volatile uintptr_t x = (uint64_t) &val->count;
    57 
    58                 a = *((uint64_t *) x);
     58                volatile uintptr_t ptr = (uintptr_t) &val->count;
     59               
     60                a = *((atomic_count_t *) ptr);
    5961                b = a + i;
    60                 asm volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *)x)),
    61                     "+r" (b) : "r" (a));
     62               
     63                asm volatile (
     64                        "casx %0, %2, %1\n"
     65                        : "+m" (*((atomic_count_t *) ptr)),
     66                      "+r" (b)
     67                    : "r" (a)
     68                );
    6269        } while (a != b);
    63 
     70       
    6471        return a;
    6572}
    6673
    67 static inline long atomic_preinc(atomic_t *val)
     74static inline atomic_count_t atomic_preinc(atomic_t *val)
    6875{
    6976        return atomic_add(val, 1) + 1;
    7077}
    7178
    72 static inline long atomic_postinc(atomic_t *val)
     79static inline atomic_count_t atomic_postinc(atomic_t *val)
    7380{
    7481        return atomic_add(val, 1);
    7582}
    7683
    77 static inline long atomic_predec(atomic_t *val)
     84static inline atomic_count_t atomic_predec(atomic_t *val)
    7885{
    7986        return atomic_add(val, -1) - 1;
    8087}
    8188
    82 static inline long atomic_postdec(atomic_t *val)
     89static inline atomic_count_t atomic_postdec(atomic_t *val)
    8390{
    8491        return atomic_add(val, -1);
     
    95102}
    96103
    97 static inline long test_and_set(atomic_t *val)
     104static inline atomic_count_t test_and_set(atomic_t *val)
    98105{
    99         uint64_t v = 1;
    100         volatile uintptr_t x = (uint64_t) &val->count;
    101 
    102         asm volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *) x)),
    103             "+r" (v) : "r" (0));
    104 
     106        atomic_count_t v = 1;
     107        volatile uintptr_t ptr = (uintptr_t) &val->count;
     108       
     109        asm volatile (
     110                "casx %0, %2, %1\n"
     111                : "+m" (*((atomic_count_t *) ptr)),
     112              "+r" (v)
     113            : "r" (0)
     114        );
     115       
    105116        return v;
    106117}
     
    108119static inline void atomic_lock_arch(atomic_t *val)
    109120{
    110         uint64_t tmp1 = 1;
    111         uint64_t tmp2 = 0;
    112 
    113         volatile uintptr_t x = (uint64_t) &val->count;
    114 
     121        atomic_count_t tmp1 = 1;
     122        atomic_count_t tmp2 = 0;
     123       
     124        volatile uintptr_t ptr = (uintptr_t) &val->count;
     125       
    115126        preemption_disable();
    116 
     127       
    117128        asm volatile (
    118         "0:\n"
    119                 "casx %0, %3, %1\n"
    120                 "brz %1, 2f\n"
    121                 "nop\n"
    122         "1:\n"
    123                 "ldx %0, %2\n"
    124                 "brz %2, 0b\n"
    125                 "nop\n"
    126                 "ba %%xcc, 1b\n"
    127                 "nop\n"
    128         "2:\n"
    129                 : "+m" (*((uint64_t *) x)), "+r" (tmp1), "+r" (tmp2) : "r" (0)
     129                "0:\n"
     130                        "casx %0, %3, %1\n"
     131                        "brz %1, 2f\n"
     132                        "nop\n"
     133                "1:\n"
     134                        "ldx %0, %2\n"
     135                        "brz %2, 0b\n"
     136                        "nop\n"
     137                        "ba %%xcc, 1b\n"
     138                        "nop\n"
     139                "2:\n"
     140                : "+m" (*((atomic_count_t *) ptr)),
     141                  "+r" (tmp1),
     142                  "+r" (tmp2)
     143                : "r" (0)
    130144        );
    131145       
  • kernel/arch/sparc64/include/types.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup sparc64 
     29/** @addtogroup sparc64
    3030 * @{
    3131 */
     
    5555typedef uint64_t unative_t;
    5656typedef int64_t native_t;
     57typedef uint64_t atomic_count_t;
    5758
    5859typedef struct {
  • kernel/generic/include/atomic.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup generic 
     29/** @addtogroup generic
    3030 * @{
    3131 */
     
    3636#define KERN_ATOMIC_H_
    3737
     38#include <arch/types.h>
     39
    3840typedef struct atomic {
    39         volatile long count;
     41        volatile atomic_count_t count;
    4042} atomic_t;
    4143
    4244#include <arch/atomic.h>
    4345
    44 static inline void atomic_set(atomic_t *val, long i)
     46static inline void atomic_set(atomic_t *val, atomic_count_t i)
    4547{
    4648        val->count = i;
    4749}
    4850
    49 static inline long atomic_get(atomic_t *val)
     51static inline atomic_count_t atomic_get(atomic_t *val)
    5052{
    5153        return val->count;
  • kernel/generic/src/proc/scheduler.c

    rbc9da2a r228666c  
    542542{
    543543        thread_t *t;
    544         int count, average, j, k = 0;
     544        int count;
     545        atomic_count_t average;
    545546        unsigned int i;
     547        int j;
     548        int k = 0;
    546549        ipl_t ipl;
    547550
  • kernel/test/fpu/fpu1_ia64.c

    rbc9da2a r228666c  
    128128char *test_fpu1(void)
    129129{
    130         unsigned int i, total = 0;
     130        unsigned int i;
     131        atomic_count_t total = 0;
    131132       
    132133        waitq_initialize(&can_start);
     
    159160        waitq_wakeup(&can_start, WAKEUP_ALL);
    160161       
    161         while (atomic_get(&threads_ok) != (long) total) {
     162        while (atomic_get(&threads_ok) != total) {
    162163                TPRINTF("Threads left: %d\n", total - atomic_get(&threads_ok));
    163164                thread_sleep(1);
  • kernel/test/fpu/fpu1_x86.c

    rbc9da2a r228666c  
    125125char *test_fpu1(void)
    126126{
    127         unsigned int i, total = 0;
     127        unsigned int i;
     128        atomic_count_t total = 0;
    128129       
    129130        waitq_initialize(&can_start);
     
    156157        waitq_wakeup(&can_start, WAKEUP_ALL);
    157158       
    158         while (atomic_get(&threads_ok) != (long) total) {
     159        while (atomic_get(&threads_ok) != total) {
    159160                TPRINTF("Threads left: %d\n", total - atomic_get(&threads_ok));
    160161                thread_sleep(1);
  • kernel/test/fpu/mips2.c

    rbc9da2a r228666c  
    111111char *test_mips2(void)
    112112{
    113         unsigned int i, total = 0;
     113        unsigned int i;
     114        atomic_count_t total = 0;
    114115       
    115116        waitq_initialize(&can_start);
     
    138139       
    139140        TPRINTF("ok\n");
    140                
     141       
    141142        thread_sleep(1);
    142143        waitq_wakeup(&can_start, WAKEUP_ALL);
    143144       
    144         while (atomic_get(&threads_ok) != (long) total) {
     145        while (atomic_get(&threads_ok) != total) {
    145146                TPRINTF("Threads left: %d\n", total - atomic_get(&threads_ok));
    146147                thread_sleep(1);
  • kernel/test/fpu/sse1.c

    rbc9da2a r228666c  
    109109char *test_sse1(void)
    110110{
    111         unsigned int i, total = 0;
     111        unsigned int i;
     112        atomic_count_t total = 0;
    112113       
    113114        waitq_initialize(&can_start);
     
    140141        waitq_wakeup(&can_start, WAKEUP_ALL);
    141142       
    142         while (atomic_get(&threads_ok) != (long) total) {
     143        while (atomic_get(&threads_ok) != total) {
    143144                TPRINTF("Threads left: %d\n", total - atomic_get(&threads_ok));
    144145                thread_sleep(1);
  • kernel/test/synch/rwlock5.c

    rbc9da2a r228666c  
    7070{
    7171        int i, j, k;
    72         long readers, writers;
     72        atomic_count_t readers;
     73        atomic_count_t writers;
    7374       
    7475        waitq_initialize(&can_start);
  • kernel/test/synch/semaphore1.c

    rbc9da2a r228666c  
    7373{
    7474        int i, j, k;
    75         int consumers, producers;
     75        atomic_count_t consumers;
     76        atomic_count_t producers;
    7677       
    7778        waitq_initialize(&can_start);
  • kernel/test/thread/thread1.c

    rbc9da2a r228666c  
    5555char *test_thread1(void)
    5656{
    57         unsigned int i, total = 0;
     57        unsigned int i;
     58        atomic_count_t total = 0;
    5859       
    5960        atomic_set(&finish, 1);
    6061        atomic_set(&threads_finished, 0);
    6162       
    62         for (i = 0; i < THREADS; i++) { 
     63        for (i = 0; i < THREADS; i++) {
    6364                thread_t *t;
    6465                if (!(t = thread_create(threadtest, NULL, TASK, 0, "threadtest", false))) {
     
    7475       
    7576        atomic_set(&finish, 0);
    76         while (atomic_get(&threads_finished) < ((long) total)) {
     77        while (atomic_get(&threads_finished) < total) {
    7778                TPRINTF("Threads left: %d\n", total - atomic_get(&threads_finished));
    7879                thread_sleep(1);
  • uspace/app/tester/thread/thread1.c

    rbc9da2a r228666c  
    5353{
    5454        unsigned int i;
    55         int total = 0;
     55        atomic_count_t total = 0;
    5656       
    5757        atomic_set(&finish, 1);
  • uspace/lib/libc/arch/amd64/include/atomic.h

    rbc9da2a r228666c  
    4242#include <atomicdflt.h>
    4343
    44 static inline void atomic_inc(atomic_t *val) {
    45         asm volatile ("lock incq %0\n" : "+m" (val->count));
     44static inline void atomic_inc(atomic_t *val)
     45{
     46        asm volatile (
     47                "lock incq %[count]\n"
     48                : [count] "+m" (val->count)
     49        );
    4650}
    4751
    48 static inline void atomic_dec(atomic_t *val) {
    49         asm volatile ("lock decq %0\n" : "+m" (val->count));
     52static inline void atomic_dec(atomic_t *val)
     53{
     54        asm volatile (
     55                "lock decq %[count]\n"
     56                : [count] "+m" (val->count)
     57        );
    5058}
    5159
    52 static inline long atomic_postinc(atomic_t *val)
     60static inline atomic_count_t atomic_postinc(atomic_t *val)
    5361{
    54         long r;
    55 
    56         asm volatile (
    57                 "movq $1, %0\n"
    58                 "lock xaddq %0, %1\n"
    59                 : "=r" (r), "+m" (val->count)
    60         );
    61 
    62         return r;
    63 }
    64 
    65 static inline long atomic_postdec(atomic_t *val)
    66 {
    67         long r;
     62        atomic_count_t r = 1;
    6863       
    6964        asm volatile (
    70                 "movq $-1, %0\n"
    71                 "lock xaddq %0, %1\n"
    72                 : "=r" (r), "+m" (val->count)
     65                "lock xaddq %[r], %[count]\n"
     66                : [count] "+m" (val->count),
     67                  [r] "+r" (r)
    7368        );
    7469       
     
    7671}
    7772
    78 #define atomic_preinc(val) (atomic_postinc(val) + 1)
    79 #define atomic_predec(val) (atomic_postdec(val) - 1)
     73static inline atomic_count_t atomic_postdec(atomic_t *val)
     74{
     75        atomic_count_t r = -1;
     76       
     77        asm volatile (
     78                "lock xaddq %[r], %[count]\n"
     79                : [count] "+m" (val->count),
     80                  [r] "+r" (r)
     81        );
     82       
     83        return r;
     84}
     85
     86#define atomic_preinc(val)  (atomic_postinc(val) + 1)
     87#define atomic_predec(val)  (atomic_postdec(val) - 1)
    8088
    8189#endif
  • uspace/lib/libc/arch/amd64/include/types.h

    rbc9da2a r228666c  
    5454
    5555typedef uint64_t uintptr_t;
     56typedef uint64_t atomic_count_t;
     57typedef int64_t atomic_signed_t;
    5658
    5759#endif
  • uspace/lib/libc/arch/arm32/include/atomic.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup libcarm32       
     29/** @addtogroup libcarm32
    3030 * @{
    3131 */
     
    3838
    3939#define LIBC_ARCH_ATOMIC_H_
    40 #define CAS 
     40#define CAS
    4141
    4242#include <atomicdflt.h>
     
    4646extern uintptr_t *ras_page;
    4747
    48 static inline bool cas(atomic_t *val, long ov, long nv)
    49 {
    50         long ret = 0;
    51 
     48static inline bool cas(atomic_t *val, atomic_count_t ov, atomic_count_t nv)
     49{
     50        atomic_count_t ret = 0;
     51       
    5252        /*
    5353         * The following instructions between labels 1 and 2 constitute a
     
    7575                : "memory"
    7676        );
    77 
     77       
    7878        ras_page[0] = 0;
    79         asm volatile ("" ::: "memory");
     79        asm volatile (
     80                "" ::: "memory"
     81        );
    8082        ras_page[1] = 0xffffffff;
    81 
     83       
    8284        return (bool) ret;
    8385}
     
    8991 *
    9092 * @return Value after addition.
    91  */
    92 static inline long atomic_add(atomic_t *val, int i)
    93 {
    94         long ret = 0;
    95 
     93 *
     94 */
     95static inline atomic_count_t atomic_add(atomic_t *val, atomic_count_t i)
     96{
     97        atomic_count_t ret = 0;
     98       
    9699        /*
    97100         * The following instructions between labels 1 and 2 constitute a
     
    115118                : [imm] "r" (i)
    116119        );
    117 
     120       
    118121        ras_page[0] = 0;
    119         asm volatile ("" ::: "memory");
     122        asm volatile (
     123                "" ::: "memory"
     124        );
    120125        ras_page[1] = 0xffffffff;
    121 
     126       
    122127        return ret;
    123128}
     
    127132 *
    128133 * @param val Variable to be incremented.
     134 *
    129135 */
    130136static inline void atomic_inc(atomic_t *val)
     
    137143 *
    138144 * @param val Variable to be decremented.
     145 *
    139146 */
    140147static inline void atomic_dec(atomic_t *val)
     
    148155 * @param val Variable to be incremented.
    149156 * @return    Value after incrementation.
    150  */
    151 static inline long atomic_preinc(atomic_t *val)
     157 *
     158 */
     159static inline atomic_count_t atomic_preinc(atomic_t *val)
    152160{
    153161        return atomic_add(val, 1);
     
    159167 * @param val Variable to be decremented.
    160168 * @return    Value after decrementation.
    161  */
    162 static inline long atomic_predec(atomic_t *val)
     169 *
     170 */
     171static inline atomic_count_t atomic_predec(atomic_t *val)
    163172{
    164173        return atomic_add(val, -1);
     
    170179 * @param val Variable to be incremented.
    171180 * @return    Value before incrementation.
    172  */
    173 static inline long atomic_postinc(atomic_t *val)
     181 *
     182 */
     183static inline atomic_count_t atomic_postinc(atomic_t *val)
    174184{
    175185        return atomic_add(val, 1) - 1;
     
    181191 * @param val Variable to be decremented.
    182192 * @return    Value before decrementation.
    183  */
    184 static inline long atomic_postdec(atomic_t *val)
     193 *
     194 */
     195static inline atomic_count_t atomic_postdec(atomic_t *val)
    185196{
    186197        return atomic_add(val, -1) + 1;
  • uspace/lib/libc/arch/arm32/include/types.h

    rbc9da2a r228666c  
    5555
    5656typedef uint32_t uintptr_t;
     57typedef uint32_t atomic_count_t;
     58typedef int32_t atomic_signed_t;
    5759
    5860#endif
  • uspace/lib/libc/arch/ia32/include/atomic.h

    rbc9da2a r228666c  
    4040#include <atomicdflt.h>
    4141
    42 static inline void atomic_inc(atomic_t *val) {
    43         asm volatile ("lock incl %0\n" : "+m" (val->count));
     42static inline void atomic_inc(atomic_t *val)
     43{
     44        asm volatile (
     45                "lock incl %[count]\n"
     46                : [count] "+m" (val->count)
     47        );
    4448}
    4549
    46 static inline void atomic_dec(atomic_t *val) {
    47         asm volatile ("lock decl %0\n" : "+m" (val->count));
     50static inline void atomic_dec(atomic_t *val)
     51{
     52        asm volatile (
     53                "lock decl %[count]\n"
     54                : [count] "+m" (val->count)
     55        );
    4856}
    4957
    50 static inline long atomic_postinc(atomic_t *val)
     58static inline atomic_count_t atomic_postinc(atomic_t *val)
    5159{
    52         long r;
    53 
    54         asm volatile (
    55                 "movl $1, %0\n"
    56                 "lock xaddl %0, %1\n"
    57                 : "=r" (r), "+m" (val->count)
    58         );
    59 
    60         return r;
    61 }
    62 
    63 static inline long atomic_postdec(atomic_t *val)
    64 {
    65         long r;
     60        atomic_count_t r = 1;
    6661       
    6762        asm volatile (
    68                 "movl $-1, %0\n"
    69                 "lock xaddl %0, %1\n"
    70                 : "=r" (r), "+m" (val->count)
     63                "lock xaddl %[r], %[count]\n"
     64                : [count] "+m" (val->count),
     65                  [r] "+r" (r)
    7166        );
    7267       
     
    7469}
    7570
    76 #define atomic_preinc(val) (atomic_postinc(val) + 1)
    77 #define atomic_predec(val) (atomic_postdec(val) - 1)
     71static inline atomic_count_t atomic_postdec(atomic_t *val)
     72{
     73        atomic_count_t r = -1;
     74       
     75        asm volatile (
     76                "lock xaddl %[r], %[count]\n"
     77                : [count] "+m" (val->count),
     78                  [r] "+r" (r)
     79        );
     80       
     81        return r;
     82}
     83
     84#define atomic_preinc(val)  (atomic_postinc(val) + 1)
     85#define atomic_predec(val)  (atomic_postdec(val) - 1)
    7886
    7987#endif
  • uspace/lib/libc/arch/ia32/include/types.h

    rbc9da2a r228666c  
    5454
    5555typedef uint32_t uintptr_t;
     56typedef uint32_t atomic_count_t;
     57typedef int32_t atomic_signed_t;
    5658
    5759#endif
  • uspace/lib/libc/arch/ia64/include/atomic.h

    rbc9da2a r228666c  
    4242static inline void atomic_inc(atomic_t *val)
    4343{
    44         long v;
     44        atomic_count_t v;
    4545       
    4646        asm volatile (
     
    5353static inline void atomic_dec(atomic_t *val)
    5454{
    55         long v;
     55        atomic_count_t v;
    5656       
    5757        asm volatile (
     
    6262}
    6363
    64 static inline long atomic_preinc(atomic_t *val)
     64static inline atomic_count_t atomic_preinc(atomic_t *val)
    6565{
    66         long v;
     66        atomic_count_t v;
    6767       
    6868        asm volatile (
     
    7575}
    7676
    77 static inline long atomic_predec(atomic_t *val)
     77static inline atomic_count_t atomic_predec(atomic_t *val)
    7878{
    79         long v;
     79        atomic_count_t v;
    8080       
    8181        asm volatile (
     
    8888}
    8989
    90 static inline long atomic_postinc(atomic_t *val)
     90static inline atomic_count_t atomic_postinc(atomic_t *val)
    9191{
    92         long v;
     92        atomic_count_t v;
    9393       
    9494        asm volatile (
     
    101101}
    102102
    103 static inline long atomic_postdec(atomic_t *val)
     103static inline atomic_count_t atomic_postdec(atomic_t *val)
    104104{
    105         long v;
     105        atomic_count_t v;
    106106       
    107107        asm volatile (
  • uspace/lib/libc/arch/ia64/include/types.h

    rbc9da2a r228666c  
    5959
    6060typedef uint64_t uintptr_t;
     61typedef uint64_t atomic_count_t;
     62typedef int64_t atomic_signed_t;
    6163
    6264typedef struct {
  • uspace/lib/libc/arch/mips32/include/atomic.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup libcmips32     
     29/** @addtogroup libcmips32
    3030 * @{
    3131 */
    3232/** @file
    33  * @ingroup libcmips32eb       
     33 * @ingroup libcmips32eb
    3434 */
    3535
     
    4141#include <atomicdflt.h>
    4242
    43 #define atomic_inc(x)   ((void) atomic_add(x, 1))
    44 #define atomic_dec(x)   ((void) atomic_add(x, -1))
     43#define atomic_inc(x)  ((void) atomic_add(x, 1))
     44#define atomic_dec(x)  ((void) atomic_add(x, -1))
    4545
    46 #define atomic_postinc(x) (atomic_add(x, 1) - 1)
    47 #define atomic_postdec(x) (atomic_add(x, -1) + 1)
     46#define atomic_postinc(x)  (atomic_add(x, 1) - 1)
     47#define atomic_postdec(x)  (atomic_add(x, -1) + 1)
    4848
    49 #define atomic_preinc(x) atomic_add(x, 1)
    50 #define atomic_predec(x) atomic_add(x, -1)
     49#define atomic_preinc(x)  atomic_add(x, 1)
     50#define atomic_predec(x)  atomic_add(x, -1)
    5151
    5252/* Atomic addition of immediate value.
    5353 *
    5454 * @param val Memory location to which will be the immediate value added.
    55  * @param i Signed immediate that will be added to *val.
     55 * @param i   Signed immediate that will be added to *val.
    5656 *
    5757 * @return Value after addition.
     58 *
    5859 */
    59 static inline long atomic_add(atomic_t *val, int i)
     60static inline atomic_count_t atomic_add(atomic_t *val, atomic_count_t i)
    6061{
    61         long tmp, v;
    62 
     62        atomic_count_t tmp;
     63        atomic_count_t v;
     64       
    6365        asm volatile (
    6466                "1:\n"
     
    7072                /*      nop     */              /* nop is inserted automatically by compiler */
    7173                "       nop\n"
    72                 : "=&r" (tmp), "+m" (val->count), "=&r" (v)
    73                 : "r" (i), "i" (0)
    74                 );
    75 
     74                : "=&r" (tmp),
     75                  "+m" (val->count),
     76                  "=&r" (v)
     77                : "r" (i),
     78                  "i" (0)
     79        );
     80       
    7681        return v;
    7782}
  • uspace/lib/libc/arch/mips32/include/types.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup libcmips32     
     29/** @addtogroup libcmips32
    3030 * @{
    3131 */
     
    5555
    5656typedef uint32_t uintptr_t;
     57typedef uint32_t atomic_count_t;
     58typedef int32_t atomic_signed_t;
    5759
    5860#endif
  • uspace/lib/libc/arch/ppc32/include/atomic.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup libcppc32       
     29/** @addtogroup libcppc32
    3030 * @{
    3131 */
     
    4242static inline void atomic_inc(atomic_t *val)
    4343{
    44         long tmp;
    45 
     44        atomic_count_t tmp;
     45       
    4646        asm volatile (
    4747                "1:\n"
     
    5050                "stwcx. %0, 0, %2\n"
    5151                "bne- 1b"
    52                 : "=&r" (tmp), "=m" (val->count)
    53                 : "r" (&val->count), "m" (val->count)
    54                 : "cc");
     52                : "=&r" (tmp),
     53                  "=m" (val->count)
     54                : "r" (&val->count),
     55                  "m" (val->count)
     56                : "cc"
     57        );
    5558}
    5659
    5760static inline void atomic_dec(atomic_t *val)
    5861{
    59         long tmp;
    60 
     62        atomic_count_t tmp;
     63       
    6164        asm volatile (
    6265                "1:\n"
    6366                "lwarx %0, 0, %2\n"
    6467                "addic %0, %0, -1\n"
    65                 "stwcx. %0, 0, %2\n"
     68                "stwcx. %0, 0, %2\n"
    6669                "bne- 1b"
    67                 : "=&r" (tmp), "=m" (val->count)
    68                 : "r" (&val->count), "m" (val->count)
    69                 : "cc");
     70                : "=&r" (tmp),
     71                  "=m" (val->count)
     72                : "r" (&val->count),
     73                  "m" (val->count)
     74                : "cc"
     75        );
    7076}
    7177
    72 static inline long atomic_postinc(atomic_t *val)
     78static inline atomic_count_t atomic_postinc(atomic_t *val)
    7379{
    7480        atomic_inc(val);
     
    7682}
    7783
    78 static inline long atomic_postdec(atomic_t *val)
     84static inline atomic_count_t atomic_postdec(atomic_t *val)
    7985{
    8086        atomic_dec(val);
     
    8288}
    8389
    84 static inline long atomic_preinc(atomic_t *val)
     90static inline atomic_count_t atomic_preinc(atomic_t *val)
    8591{
    8692        atomic_inc(val);
     
    8894}
    8995
    90 static inline long atomic_predec(atomic_t *val)
     96static inline atomic_count_t atomic_predec(atomic_t *val)
    9197{
    9298        atomic_dec(val);
  • uspace/lib/libc/arch/ppc32/include/types.h

    rbc9da2a r228666c  
    2727 */
    2828
    29 /** @addtogroup libcppc32       
     29/** @addtogroup libcppc32
    3030 * @{
    3131 */
     
    5454
    5555typedef uint32_t uintptr_t;
     56typedef uint32_t atomic_count_t;
     57typedef int32_t atomic_signed_t;
    5658
    5759#endif
  • uspace/lib/libc/arch/sparc64/include/atomic.h

    rbc9da2a r228666c  
    4646 *
    4747 * @param val Atomic variable.
    48  * @param i Signed value to be added.
     48 * @param i   Signed value to be added.
    4949 *
    5050 * @return Value of the atomic variable as it existed before addition.
     51 *
    5152 */
    52 static inline long atomic_add(atomic_t *val, int i)
     53static inline atomic_count_t atomic_add(atomic_t *val, atomic_count_t i)
    5354{
    54         uint64_t a, b;
    55 
     55        atomic_count_t a;
     56        atomic_count_t b;
     57       
    5658        do {
    57                 volatile uintptr_t x = (uint64_t) &val->count;
    58 
    59                 a = *((uint64_t *) x);
     59                volatile uintptr_t ptr = (uintptr_t) &val->count;
     60               
     61                a = *((atomic_count_t *) ptr);
    6062                b = a + i;
    61                 asm volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *)x)), "+r" (b) : "r" (a));
     63               
     64                asm volatile (
     65                        "casx %0, %2, %1\n"
     66                        : "+m" (*((atomic_count_t *) ptr)),
     67                          "+r" (b)
     68                        : "r" (a)
     69                );
    6270        } while (a != b);
    63 
     71       
    6472        return a;
    6573}
    6674
    67 static inline long atomic_preinc(atomic_t *val)
     75static inline atomic_count_t atomic_preinc(atomic_t *val)
    6876{
    6977        return atomic_add(val, 1) + 1;
    7078}
    7179
    72 static inline long atomic_postinc(atomic_t *val)
     80static inline atomic_count_t atomic_postinc(atomic_t *val)
    7381{
    7482        return atomic_add(val, 1);
    7583}
    7684
    77 static inline long atomic_predec(atomic_t *val)
     85static inline atomic_count_t atomic_predec(atomic_t *val)
    7886{
    7987        return atomic_add(val, -1) - 1;
    8088}
    8189
    82 static inline long atomic_postdec(atomic_t *val)
     90static inline atomic_count_t atomic_postdec(atomic_t *val)
    8391{
    8492        return atomic_add(val, -1);
  • uspace/lib/libc/arch/sparc64/include/types.h

    rbc9da2a r228666c  
    5454
    5555typedef uint64_t uintptr_t;
     56typedef uint64_t atomic_count_t;
     57typedef int64_t atomic_signed_t;
    5658
    5759#endif
  • uspace/lib/libc/generic/futex.c

    rbc9da2a r228666c  
    6868int futex_down(futex_t *futex)
    6969{
    70         if (atomic_predec(futex) < 0)
     70        if ((atomic_signed_t) atomic_predec(futex) < 0)
    7171                return __SYSCALL1(SYS_FUTEX_SLEEP, (sysarg_t) &futex->count);
    7272
     
    8282int futex_up(futex_t *futex)
    8383{
    84         if (atomic_postinc(futex) < 0)
     84        if ((atomic_signed_t) atomic_postinc(futex) < 0)
    8585                return __SYSCALL1(SYS_FUTEX_WAKEUP, (sysarg_t) &futex->count);
    8686               
  • uspace/lib/libc/include/atomicdflt.h

    rbc9da2a r228666c  
    3737
    3838#ifndef LIBC_ARCH_ATOMIC_H_
    39 #error This file cannot be included directly, include atomic.h instead.
     39        #error This file cannot be included directly, include atomic.h instead.
    4040#endif
    4141
     42#include <stdint.h>
    4243#include <bool.h>
    4344
    4445typedef struct atomic {
    45         volatile long count;
     46        volatile atomic_count_t count;
    4647} atomic_t;
    4748
    48 static inline void atomic_set(atomic_t *val, long i)
     49static inline void atomic_set(atomic_t *val, atomic_count_t i)
    4950{
    50         val->count = i;
     51        val->count = i;
    5152}
    5253
    53 static inline long atomic_get(atomic_t *val)
     54static inline atomic_count_t atomic_get(atomic_t *val)
    5455{
    55         return val->count;
     56        return val->count;
    5657}
    5758
    5859#ifndef CAS
    59 static inline bool cas(atomic_t *val, long ov, long nv)
     60static inline bool cas(atomic_t *val, atomic_count_t ov, atomic_count_t nv)
    6061{
    6162        return __sync_bool_compare_and_swap(&val->count, ov, nv);
Note: See TracChangeset for help on using the changeset viewer.