Changeset 2708f6a in mainline


Ignore:
Timestamp:
2012-11-07T10:59:34Z (11 years ago)
Author:
Adam Hraska <adam.hraska+hos@…>
Branches:
lfn, master, serial, ticket/834-toolchain-update, topic/msim-upgrade, topic/simplify-dev-export
Children:
cc106e4
Parents:
c8fccf5
Message:

Removed ia32 and amd64 specific atomic compare-and-swap operations (use compiler builtins instead).

Location:
kernel
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • kernel/arch/amd64/include/atomic.h

    rc8fccf5 r2708f6a  
    141141
    142142
    143 #define _atomic_cas_impl(pptr, exp_val, new_val, old_val, prefix) \
    144         asm volatile ( \
    145                 prefix " cmpxchgq %[newval], %[ptr]\n" \
    146                 : /* Output operands. */ \
    147                 /* Old/current value is returned in eax. */ \
    148                 [oldval] "=a" (old_val), \
    149                 /* (*ptr) will be read and written to, hence "+" */ \
    150                 [ptr] "+m" (*pptr) \
    151                 : /* Input operands. */ \
    152                 /* Expected value must be in eax. */ \
    153                 [expval] "a" (exp_val), \
    154                 /* The new value may be in any register. */ \
    155                 [newval] "r" (new_val) \
    156                 : "memory" \
    157         )
    158        
    159 /** Atomically compares and swaps the pointer at pptr. */
    160 NO_TRACE static inline void * atomic_cas_ptr(void **pptr,
    161         void *exp_val, void *new_val)
    162 {
    163         void *old_val;
    164         _atomic_cas_impl(pptr, exp_val, new_val, old_val, "lock\n");
    165         return old_val;
    166 }
    167 
    168 /** Compare-and-swap of a pointer that is atomic wrt to local cpu's interrupts.
    169  *
    170  * This function is NOT smp safe and is not atomic with respect to other cpus.
    171  */
    172 NO_TRACE static inline void * atomic_cas_ptr_local(void **pptr,
    173         void *exp_val, void *new_val)
    174 {
    175         void *old_val;
    176         _atomic_cas_impl(pptr, exp_val, new_val, old_val, "");
    177         return old_val;
    178 }
    179 
    180 
    181 #define _atomic_swap_impl(pptr, new_val) \
    182 ({ \
    183         typeof(*(pptr)) new_in_old_out = new_val; \
    184         asm volatile ( \
    185                 "xchgq %[val], %[p_ptr]\n" \
    186                 : [val] "+r" (new_in_old_out), \
    187                   [p_ptr] "+m" (*pptr) \
    188         ); \
    189         \
    190         new_in_old_out; \
    191 })
    192 
    193 /*
    194  * Issuing a xchg instruction always implies lock prefix semantics.
    195  * Therefore, it is cheaper to use a cmpxchg without a lock prefix
    196  * in a loop.
    197  */
    198 #define _atomic_swap_local_impl(pptr, new_val) \
    199 ({ \
    200         typeof(*(pptr)) exp_val; \
    201         typeof(*(pptr)) old_val; \
    202         \
    203         do { \
    204                 exp_val = *pptr; \
    205                 _atomic_cas_impl(pptr, exp_val, new_val, old_val, ""); \
    206         } while (old_val != exp_val); \
    207         \
    208         old_val; \
    209 })
    210 
    211 
    212 /** Atomicaly sets *ptr to val and returns the previous value. */
    213 NO_TRACE static inline void * atomic_set_return_ptr(void **pptr, void *val)
    214 {
    215         return _atomic_swap_impl(pptr, val);
    216 }
    217 
    218 /** Sets *ptr to new_val and returns the previous value. NOT smp safe.
    219  *
    220  * This function is only atomic wrt to local interrupts and it is
    221  * NOT atomic wrt to other cpus.
    222  */
    223 NO_TRACE static inline void * atomic_set_return_ptr_local(
    224         void **pptr, void *new_val)
    225 {
    226         return _atomic_swap_local_impl(pptr, new_val);
    227 }
    228 
    229 /** Atomicaly sets *ptr to val and returns the previous value. */
    230 NO_TRACE static inline native_t atomic_set_return_native_t(
    231         native_t *p, native_t val)
    232 {
    233         return _atomic_swap_impl(p, val);
    234 }
    235 
    236 /** Sets *ptr to new_val and returns the previous value. NOT smp safe.
    237  *
    238  * This function is only atomic wrt to local interrupts and it is
    239  * NOT atomic wrt to other cpus.
    240  */
    241 NO_TRACE static inline native_t atomic_set_return_native_t_local(
    242         native_t *p, native_t new_val)
    243 {
    244         return _atomic_swap_local_impl(p, new_val);
    245 }
    246 
    247 
    248 #undef _atomic_cas_ptr_impl
    249 #undef _atomic_swap_impl
    250 #undef _atomic_swap_local_impl
    251 
    252143#endif
    253144
  • kernel/arch/ia32/include/atomic.h

    rc8fccf5 r2708f6a  
    143143}
    144144
    145 
    146 #define _atomic_cas_impl(pptr, exp_val, new_val, old_val, prefix) \
    147         asm volatile ( \
    148                 prefix " cmpxchgl %[newval], %[ptr]\n" \
    149                 : /* Output operands. */ \
    150                 /* Old/current value is returned in eax. */ \
    151                 [oldval] "=a" (old_val), \
    152                 /* (*ptr) will be read and written to, hence "+" */ \
    153                 [ptr] "+m" (*pptr) \
    154                 : /* Input operands. */ \
    155                 /* Expected value must be in eax. */ \
    156                 [expval] "a" (exp_val), \
    157                 /* The new value may be in any register. */ \
    158                 [newval] "r" (new_val) \
    159                 : "memory" \
    160         )
    161        
    162 /** Atomically compares and swaps the pointer at pptr. */
    163 NO_TRACE static inline void * atomic_cas_ptr(void **pptr,
    164         void *exp_val, void *new_val)
    165 {
    166         void *old_val;
    167         _atomic_cas_impl(pptr, exp_val, new_val, old_val, "lock\n");
    168         return old_val;
    169 }
    170 
    171 /** Compare-and-swap of a pointer that is atomic wrt to local cpu's interrupts.
    172  *
    173  * This function is NOT smp safe and is not atomic with respect to other cpus.
    174  */
    175 NO_TRACE static inline void * atomic_cas_ptr_local(void **pptr,
    176         void *exp_val, void *new_val)
    177 {
    178         void *old_val;
    179         _atomic_cas_impl(pptr, exp_val, new_val, old_val, "");
    180         return old_val;
    181 }
    182 
    183 
    184 #define _atomic_swap_impl(pptr, new_val) \
    185 ({ \
    186         typeof(*(pptr)) new_in_old_out = new_val; \
    187         asm volatile ( \
    188                 "xchgl %[val], %[p_ptr]\n" \
    189                 : [val] "+r" (new_in_old_out), \
    190                   [p_ptr] "+m" (*pptr) \
    191         ); \
    192         \
    193         new_in_old_out; \
    194 })
    195 
    196 /*
    197  * Issuing a xchg instruction always implies lock prefix semantics.
    198  * Therefore, it is cheaper to use a cmpxchg without a lock prefix
    199  * in a loop.
    200  */
    201 #define _atomic_swap_local_impl(pptr, new_val) \
    202 ({ \
    203         typeof(*(pptr)) exp_val; \
    204         typeof(*(pptr)) old_val; \
    205         \
    206         do { \
    207                 exp_val = *pptr; \
    208                 _atomic_cas_impl(pptr, exp_val, new_val, old_val, ""); \
    209         } while (old_val != exp_val); \
    210         \
    211         old_val; \
    212 })
    213 
    214 
    215 /** Atomicaly sets *ptr to val and returns the previous value. */
    216 NO_TRACE static inline void * atomic_set_return_ptr(void **pptr, void *val)
    217 {
    218         return _atomic_swap_impl(pptr, val);
    219 }
    220 
    221 /** Sets *ptr to new_val and returns the previous value. NOT smp safe.
    222  *
    223  * This function is only atomic wrt to local interrupts and it is
    224  * NOT atomic wrt to other cpus.
    225  */
    226 NO_TRACE static inline void * atomic_set_return_ptr_local(
    227         void **pptr, void *new_val)
    228 {
    229         return _atomic_swap_local_impl(pptr, new_val);
    230 }
    231 
    232 /** Atomicaly sets *ptr to val and returns the previous value. */
    233 NO_TRACE static inline native_t atomic_set_return_native_t(
    234         native_t *p, native_t val)
    235 {
    236         return _atomic_swap_impl(p, val);
    237 }
    238 
    239 /** Sets *ptr to new_val and returns the previous value. NOT smp safe.
    240  *
    241  * This function is only atomic wrt to local interrupts and it is
    242  * NOT atomic wrt to other cpus.
    243  */
    244 NO_TRACE static inline native_t atomic_set_return_native_t_local(
    245         native_t *p, native_t new_val)
    246 {
    247         return _atomic_swap_local_impl(p, new_val);
    248 }
    249 
    250 
    251 #undef _atomic_cas_ptr_impl
    252 #undef _atomic_swap_impl
    253 #undef _atomic_swap_local_impl
    254 
    255145#endif
    256146
  • kernel/test/atomic/atomic1.c

    rc8fccf5 r2708f6a  
    6060                return "Failed atomic_get() after atomic_predec()";
    6161       
    62         void *ptr = 0;
    63         void *a_ptr = &a;
    64         if (atomic_cas_ptr(&ptr, 0, a_ptr) != 0)
    65                 return "Failed atomic_cas_ptr(): bad return value";
    66         if (ptr != a_ptr)
    67                 return "Failed atomic_cas_ptr(): bad pointer value";
    68         if (atomic_cas_ptr(&ptr, 0, 0) != a_ptr)
    69                 return "Failed atomic_cas_ptr(): indicated change";
    70         if (ptr != a_ptr)
    71                 return "Failed atomic_cas_ptr(): changed the ptr";
    72        
    73         ptr = 0;
    74         if (atomic_set_return_ptr(&ptr, a_ptr) != 0)
    75                 return "Failed atomic_set_return_ptr()";
    76         if (atomic_set_return_ptr_local(&ptr, 0) != a_ptr || ptr != 0)
    77                 return "Failed atomic_set_return_ptr_local()";
    78        
    7962        return NULL;
    8063}
Note: See TracChangeset for help on using the changeset viewer.