Fork us on GitHub Follow us on Facebook Follow us on Twitter

Changeset f1c7755 in mainline


Ignore:
Timestamp:
2012-08-05T00:12:33Z (9 years ago)
Author:
Adam Hraska <adam.hraska+hos@…>
Branches:
lfn, master
Children:
b17518e
Parents:
6eaed07
Message:

Added atomic_swap_ptr() for ia32, amd64.

Location:
kernel
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • kernel/arch/amd64/include/atomic.h

    r6eaed07 rf1c7755  
    178178}
    179179
     180/** Atomicaly sets *ptr to new_val and returns the previous value. */
     181NO_TRACE static inline void * atomic_swap_ptr(void **pptr, void *new_val)
     182{
     183        void *new_in_old_out = new_val;
     184       
     185        asm volatile (
     186                "xchgq %[val], %[pptr]\n"
     187                : [val] "+r" (new_in_old_out),
     188                  [pptr] "+m" (*pptr)
     189        );
     190       
     191        return new_in_old_out;
     192}
     193
     194/** Sets *ptr to new_val and returns the previous value. NOT smp safe.
     195 *
     196 * This function is only atomic wrt to local interrupts and it is
     197 * NOT atomic wrt to other cpus.
     198 */
     199NO_TRACE static inline void * atomic_swap_ptr_local(void **pptr, void *new_val)
     200{
     201        /*
     202         * Issuing a xchg instruction always implies lock prefix semantics.
     203         * Therefore, it is cheaper to use a cmpxchg without a lock prefix
     204         * in a loop.
     205         */
     206        void *exp_val;
     207        void *old_val;
     208       
     209        do {
     210                exp_val = *pptr;
     211                old_val = atomic_cas_ptr_local(pptr, exp_val, new_val);
     212        } while (old_val != exp_val);
     213       
     214        return old_val;
     215}
     216
     217
    180218#undef _atomic_cas_ptr_impl
    181219
  • kernel/arch/ia32/include/atomic.h

    r6eaed07 rf1c7755  
    181181}
    182182
     183
     184/** Atomicaly sets *ptr to new_val and returns the previous value. */
     185NO_TRACE static inline void * atomic_swap_ptr(void **pptr, void *new_val)
     186{
     187        void *new_in_old_out = new_val;
     188       
     189        asm volatile (
     190                "xchgl %[val], %[pptr]\n"
     191                : [val] "+r" (new_in_old_out),
     192                  [pptr] "+m" (*pptr)
     193        );
     194       
     195        return new_in_old_out;
     196}
     197
     198/** Sets *ptr to new_val and returns the previous value. NOT smp safe.
     199 *
     200 * This function is only atomic wrt to local interrupts and it is
     201 * NOT atomic wrt to other cpus.
     202 */
     203NO_TRACE static inline void * atomic_swap_ptr_local(void **pptr, void *new_val)
     204{
     205        /*
     206         * Issuing a xchg instruction always implies lock prefix semantics.
     207         * Therefore, it is cheaper to use a cmpxchg without a lock prefix
     208         * in a loop.
     209         */
     210        void *exp_val;
     211        void *old_val;
     212       
     213        do {
     214                exp_val = *pptr;
     215                old_val = atomic_cas_ptr_local(pptr, exp_val, new_val);
     216        } while (old_val != exp_val);
     217       
     218        return old_val;
     219}
     220
    183221#undef _atomic_cas_ptr_impl
    184222
  • kernel/test/atomic/atomic1.c

    r6eaed07 rf1c7755  
    7171                return "Failed atomic_cas_ptr(): changed the ptr";
    7272       
     73        ptr = 0;
     74        if (atomic_swap_ptr(&ptr, a_ptr) != 0)
     75                return "Failed atomic_swap_ptr()";
     76        if (atomic_swap_ptr_local(&ptr, 0) != a_ptr || ptr != 0)
     77                return "Failed atomic_swap_ptr_local()";
     78       
    7379        return NULL;
    7480}
Note: See TracChangeset for help on using the changeset viewer.