Changeset 2708f6a in mainline for kernel/arch/amd64/include/atomic.h


Ignore:
Timestamp:
2012-11-07T10:59:34Z (12 years ago)
Author:
Adam Hraska <adam.hraska+hos@…>
Branches:
lfn, master, serial, ticket/834-toolchain-update, topic/msim-upgrade, topic/simplify-dev-export
Children:
cc106e4
Parents:
c8fccf5
Message:

Removed ia32 and amd64 specific atomic compare-and-swap operations (use compiler builtins instead).

File:
1 edited

Legend:

Unmodified
Added
Removed
  • kernel/arch/amd64/include/atomic.h

    rc8fccf5 r2708f6a  
    141141
    142142
    143 #define _atomic_cas_impl(pptr, exp_val, new_val, old_val, prefix) \
    144         asm volatile ( \
    145                 prefix " cmpxchgq %[newval], %[ptr]\n" \
    146                 : /* Output operands. */ \
    147                 /* Old/current value is returned in eax. */ \
    148                 [oldval] "=a" (old_val), \
    149                 /* (*ptr) will be read and written to, hence "+" */ \
    150                 [ptr] "+m" (*pptr) \
    151                 : /* Input operands. */ \
    152                 /* Expected value must be in eax. */ \
    153                 [expval] "a" (exp_val), \
    154                 /* The new value may be in any register. */ \
    155                 [newval] "r" (new_val) \
    156                 : "memory" \
    157         )
    158        
    159 /** Atomically compares and swaps the pointer at pptr. */
    160 NO_TRACE static inline void * atomic_cas_ptr(void **pptr,
    161         void *exp_val, void *new_val)
    162 {
    163         void *old_val;
    164         _atomic_cas_impl(pptr, exp_val, new_val, old_val, "lock\n");
    165         return old_val;
    166 }
    167 
    168 /** Compare-and-swap of a pointer that is atomic wrt to local cpu's interrupts.
    169  *
    170  * This function is NOT smp safe and is not atomic with respect to other cpus.
    171  */
    172 NO_TRACE static inline void * atomic_cas_ptr_local(void **pptr,
    173         void *exp_val, void *new_val)
    174 {
    175         void *old_val;
    176         _atomic_cas_impl(pptr, exp_val, new_val, old_val, "");
    177         return old_val;
    178 }
    179 
    180 
    181 #define _atomic_swap_impl(pptr, new_val) \
    182 ({ \
    183         typeof(*(pptr)) new_in_old_out = new_val; \
    184         asm volatile ( \
    185                 "xchgq %[val], %[p_ptr]\n" \
    186                 : [val] "+r" (new_in_old_out), \
    187                   [p_ptr] "+m" (*pptr) \
    188         ); \
    189         \
    190         new_in_old_out; \
    191 })
    192 
    193 /*
    194  * Issuing a xchg instruction always implies lock prefix semantics.
    195  * Therefore, it is cheaper to use a cmpxchg without a lock prefix
    196  * in a loop.
    197  */
    198 #define _atomic_swap_local_impl(pptr, new_val) \
    199 ({ \
    200         typeof(*(pptr)) exp_val; \
    201         typeof(*(pptr)) old_val; \
    202         \
    203         do { \
    204                 exp_val = *pptr; \
    205                 _atomic_cas_impl(pptr, exp_val, new_val, old_val, ""); \
    206         } while (old_val != exp_val); \
    207         \
    208         old_val; \
    209 })
    210 
    211 
    212 /** Atomicaly sets *ptr to val and returns the previous value. */
    213 NO_TRACE static inline void * atomic_set_return_ptr(void **pptr, void *val)
    214 {
    215         return _atomic_swap_impl(pptr, val);
    216 }
    217 
    218 /** Sets *ptr to new_val and returns the previous value. NOT smp safe.
    219  *
    220  * This function is only atomic wrt to local interrupts and it is
    221  * NOT atomic wrt to other cpus.
    222  */
    223 NO_TRACE static inline void * atomic_set_return_ptr_local(
    224         void **pptr, void *new_val)
    225 {
    226         return _atomic_swap_local_impl(pptr, new_val);
    227 }
    228 
    229 /** Atomicaly sets *ptr to val and returns the previous value. */
    230 NO_TRACE static inline native_t atomic_set_return_native_t(
    231         native_t *p, native_t val)
    232 {
    233         return _atomic_swap_impl(p, val);
    234 }
    235 
    236 /** Sets *ptr to new_val and returns the previous value. NOT smp safe.
    237  *
    238  * This function is only atomic wrt to local interrupts and it is
    239  * NOT atomic wrt to other cpus.
    240  */
    241 NO_TRACE static inline native_t atomic_set_return_native_t_local(
    242         native_t *p, native_t new_val)
    243 {
    244         return _atomic_swap_local_impl(p, new_val);
    245 }
    246 
    247 
    248 #undef _atomic_cas_ptr_impl
    249 #undef _atomic_swap_impl
    250 #undef _atomic_swap_local_impl
    251 
    252143#endif
    253144
Note: See TracChangeset for help on using the changeset viewer.