Changeset b17518e in mainline for kernel/arch/amd64/include/atomic.h


Ignore:
Timestamp:
2012-08-05T01:18:21Z (13 years ago)
Author:
Adam Hraska <adam.hraska+hos@…>
Branches:
lfn, master, serial, ticket/834-toolchain-update, topic/msim-upgrade, topic/simplify-dev-export
Children:
bc216a0
Parents:
f1c7755
Message:

Renamed atomic_swap_* to atmoic_set_return_* and added a local cpu native_t variant.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • kernel/arch/amd64/include/atomic.h

    rf1c7755 rb17518e  
    141141
    142142
    143 #define _atomic_cas_ptr_impl(pptr, exp_val, new_val, old_val, prefix) \
     143#define _atomic_cas_impl(pptr, exp_val, new_val, old_val, prefix) \
    144144        asm volatile ( \
    145145                prefix " cmpxchgq %[newval], %[ptr]\n" \
     
    162162{
    163163        void *old_val;
    164         _atomic_cas_ptr_impl(pptr, exp_val, new_val, old_val, "lock\n");
     164        _atomic_cas_impl(pptr, exp_val, new_val, old_val, "lock\n");
    165165        return old_val;
    166166}
     
    174174{
    175175        void *old_val;
    176         _atomic_cas_ptr_impl(pptr, exp_val, new_val, old_val, "");
     176        _atomic_cas_impl(pptr, exp_val, new_val, old_val, "");
    177177        return old_val;
    178178}
    179179
    180 /** Atomicaly sets *ptr to new_val and returns the previous value. */
    181 NO_TRACE static inline void * atomic_swap_ptr(void **pptr, void *new_val)
    182 {
    183         void *new_in_old_out = new_val;
    184        
    185         asm volatile (
    186                 "xchgq %[val], %[pptr]\n"
    187                 : [val] "+r" (new_in_old_out),
    188                   [pptr] "+m" (*pptr)
    189         );
    190        
    191         return new_in_old_out;
     180
     181#define _atomic_swap_impl(pptr, new_val) \
     182({ \
     183        typeof(*(pptr)) new_in_old_out = new_val; \
     184        asm volatile ( \
     185                "xchgq %[val], %[p_ptr]\n" \
     186                : [val] "+r" (new_in_old_out), \
     187                  [p_ptr] "+m" (*pptr) \
     188        ); \
     189        \
     190        new_in_old_out; \
     191})
     192
     193/*
     194 * Issuing a xchg instruction always implies lock prefix semantics.
     195 * Therefore, it is cheaper to use a cmpxchg without a lock prefix
     196 * in a loop.
     197 */
     198#define _atomic_swap_local_impl(pptr, new_val) \
     199({ \
     200        typeof(*(pptr)) exp_val; \
     201        typeof(*(pptr)) old_val; \
     202        \
     203        do { \
     204                exp_val = *pptr; \
     205                _atomic_cas_impl(pptr, exp_val, new_val, old_val, ""); \
     206        } while (old_val != exp_val); \
     207        \
     208        old_val; \
     209})
     210
     211
     212/** Atomicaly sets *ptr to val and returns the previous value. */
     213NO_TRACE static inline void * atomic_set_return_ptr(void **pptr, void *val)
     214{
     215        return _atomic_swap_impl(pptr, val);
    192216}
    193217
     
    197221 * NOT atomic wrt to other cpus.
    198222 */
    199 NO_TRACE static inline void * atomic_swap_ptr_local(void **pptr, void *new_val)
    200 {
    201         /*
    202          * Issuing a xchg instruction always implies lock prefix semantics.
    203          * Therefore, it is cheaper to use a cmpxchg without a lock prefix
    204          * in a loop.
    205          */
    206         void *exp_val;
    207         void *old_val;
    208        
    209         do {
    210                 exp_val = *pptr;
    211                 old_val = atomic_cas_ptr_local(pptr, exp_val, new_val);
    212         } while (old_val != exp_val);
    213        
    214         return old_val;
     223NO_TRACE static inline void * atomic_set_return_ptr_local(
     224        void **pptr, void *new_val)
     225{
     226        return _atomic_swap_local_impl(pptr, new_val);
     227}
     228
     229/** Atomicaly sets *ptr to val and returns the previous value. */
     230NO_TRACE static inline native_t atomic_set_return_native_t(
     231        native_t *p, native_t val)
     232{
     233        return _atomic_swap_impl(p, val);
     234}
     235
     236/** Sets *ptr to new_val and returns the previous value. NOT smp safe.
     237 *
     238 * This function is only atomic wrt to local interrupts and it is
     239 * NOT atomic wrt to other cpus.
     240 */
     241NO_TRACE static inline native_t atomic_set_return_native_t_local(
     242        native_t *p, native_t new_val)
     243{
     244        return _atomic_swap_local_impl(p, new_val);
    215245}
    216246
    217247
    218248#undef _atomic_cas_ptr_impl
    219 
     249#undef _atomic_swap_impl
     250#undef _atomic_swap_local_impl
    220251
    221252#endif
Note: See TracChangeset for help on using the changeset viewer.