Changeset 23684b7 in mainline for arch


Ignore:
Timestamp:
2006-03-22T17:21:15Z (20 years ago)
Author:
Jakub Jermar <jakub@…>
Branches:
lfn, master, serial, ticket/834-toolchain-update, topic/msim-upgrade, topic/simplify-dev-export
Children:
d71007e
Parents:
45fb65c
Message:

Define atomic_t only once in atomic.h
Change the encapsulated counter type to long so that it supports negative values as well.

Location:
arch
Files:
9 edited

Legend:

Unmodified
Added
Removed
  • arch/amd64/include/atomic.h

    r45fb65c r23684b7  
    3333#include <arch/barrier.h>
    3434#include <preemption.h>
    35 
    36 typedef struct { volatile __u64 count; } atomic_t;
    37 
    38 static inline void atomic_set(atomic_t *val, __u64 i)
    39 {
    40         val->count = i;
    41 }
    42 
    43 static inline __u64 atomic_get(atomic_t *val)
    44 {
    45         return val->count;
    46 }
     35#include <typedefs.h>
    4736
    4837static inline void atomic_inc(atomic_t *val) {
     
    6251}
    6352
    64 static inline count_t atomic_postinc(atomic_t *val)
     53static inline long atomic_postinc(atomic_t *val)
    6554{
    66         count_t r;
     55        long r;
    6756
    6857        __asm__ volatile (
     
    7564}
    7665
    77 static inline count_t atomic_postdec(atomic_t *val)
     66static inline long atomic_postdec(atomic_t *val)
    7867{
    79         count_t r;
     68        long r;
    8069       
    8170        __asm__ volatile (
     
    10493
    10594
    106 /** AMD64 specific fast spinlock */
     95/** amd64 specific fast spinlock */
    10796static inline void atomic_lock_arch(atomic_t *val)
    10897{
     
    117106                "mov %0, %1;"
    118107                "testq %1, %1;"
    119                 "jnz 0b;"       /* Leightweight looping on locked spinlock */
     108                "jnz 0b;"       /* Lightweight looping on locked spinlock */
    120109               
    121110                "incq %1;"      /* now use the atomic operation */
  • arch/amd64/src/proc/scheduler.c

    r45fb65c r23684b7  
    3434#include <arch/asm.h>
    3535#include <arch/debugger.h>
     36#include <print.h>
    3637
    37 #include <print.h>
    3838void before_thread_runs_arch(void)
    3939{
  • arch/ia32/include/atomic.h

    r45fb65c r23684b7  
    3333#include <arch/barrier.h>
    3434#include <preemption.h>
    35 
    36 typedef struct { volatile __u32 count; } atomic_t;
    37 
    38 static inline void atomic_set(atomic_t *val, __u32 i)
    39 {
    40         val->count = i;
    41 }
    42 
    43 static inline __u32 atomic_get(atomic_t *val)
    44 {
    45         return val->count;
    46 }
     35#include <typedefs.h>
    4736
    4837static inline void atomic_inc(atomic_t *val) {
     
    6251}
    6352
    64 static inline count_t atomic_postinc(atomic_t *val)
     53static inline long atomic_postinc(atomic_t *val)
    6554{
    66         count_t r;
     55        long r;
    6756
    6857        __asm__ volatile (
     
    7564}
    7665
    77 static inline count_t atomic_postdec(atomic_t *val)
     66static inline long atomic_postdec(atomic_t *val)
    7867{
    79         count_t r;
     68        long r;
    8069       
    8170        __asm__ volatile (
     
    10392}
    10493
    105 /** Ia32 specific fast spinlock */
     94/** ia32 specific fast spinlock */
    10695static inline void atomic_lock_arch(atomic_t *val)
    10796{
     
    116105                "mov %0, %1;"
    117106                "testl %1, %1;"
    118                 "jnz 0b;"       /* Leightweight looping on locked spinlock */
     107                "jnz 0b;"       /* Lightweight looping on locked spinlock */
    119108               
    120109                "incl %1;"      /* now use the atomic operation */
  • arch/ia32/src/userspace.c

    r45fb65c r23684b7  
    6262                "pushl %4\n"
    6363                "movl %5, %%eax\n"
    64                 "iret"
     64                "iret\n"
    6565                :
    6666                : "i" (selector(UDATA_DES) | PL_USER), "r" (kernel_uarg->uspace_stack+THREAD_STACK_SIZE),
  • arch/ia64/include/atomic.h

    r45fb65c r23684b7  
    3131
    3232#include <arch/types.h>
     33#include <typedefs.h>
    3334
    34 typedef struct { volatile __u64 count; } atomic_t;
    3535
    3636/** Atomic addition.
     
    4141 * @return Value before addition.
    4242 */
    43 static inline count_t atomic_add(atomic_t *val, int imm)
     43static inline long atomic_add(atomic_t *val, int imm)
    4444{
    45         count_t v;
     45        long v;
    4646
    4747        __asm__ volatile ("fetchadd8.rel %0 = %1, %2\n" : "=r" (v), "+m" (val->count) : "i" (imm));
     
    5050}
    5151
    52 static inline void atomic_set(atomic_t *val, __u64 i)
    53 {
    54         val->count = i;
    55 }
    56 
    57 static inline __u32 atomic_get(atomic_t *val)
    58 {
    59         return val->count;
    60 }
    61 
    6252static inline void atomic_inc(atomic_t *val) { atomic_add(val, 1); }
    6353static inline void atomic_dec(atomic_t *val) { atomic_add(val, -1); }
    6454
    65 static inline count_t atomic_preinc(atomic_t *val) { return atomic_add(val, 1) + 1; }
    66 static inline count_t atomic_predec(atomic_t *val) { return atomic_add(val, -1) - 1; }
     55static inline long atomic_preinc(atomic_t *val) { return atomic_add(val, 1) + 1; }
     56static inline long atomic_predec(atomic_t *val) { return atomic_add(val, -1) - 1; }
    6757
    68 static inline count_t atomic_postinc(atomic_t *val) { return atomic_add(val, 1); }
    69 static inline count_t atomic_postdec(atomic_t *val) { return atomic_add(val, -1); }
     58static inline long atomic_postinc(atomic_t *val) { return atomic_add(val, 1); }
     59static inline long atomic_postdec(atomic_t *val) { return atomic_add(val, -1); }
    7060
    7161#endif
  • arch/ia64/src/mm/tlb.c

    r45fb65c r23684b7  
    439439                page_table_unlock(AS, true);
    440440                if (!as_page_fault(va)) {
    441                         panic("%s: va=%P, rid=%d\n", __FUNCTION__, istate->cr_ifa, rr.map.rid);
     441                        panic("%s: va=%P, rid=%d, iip=%P\n", __FUNCTION__, istate->cr_ifa, rr.map.rid, istate->cr_iip);
    442442                }
    443443        }
  • arch/mips32/include/atomic.h

    r45fb65c r23684b7  
    3131
    3232#include <arch/types.h>
     33#include <typedefs.h>
    3334
    3435#define atomic_inc(x)   ((void) atomic_add(x, 1))
     
    4142#define atomic_predec(x) atomic_add(x, -1)
    4243
    43 typedef struct { volatile __u32 count; } atomic_t;
    44 
    4544/* Atomic addition of immediate value.
    4645 *
     
    5049 * @return Value after addition.
    5150 */
    52 static inline count_t atomic_add(atomic_t *val, int i)
     51static inline long atomic_add(atomic_t *val, int i)
    5352{
    54         count_t tmp, v;
     53        long tmp, v;
    5554
    5655        __asm__ volatile (
     
    6968}
    7069
    71 /* Reads/writes are atomic on mips for 4-bytes */
    72 
    73 static inline void atomic_set(atomic_t *val, __u32 i)
    74 {
    75         val->count = i;
    76 }
    77 
    78 static inline __u32 atomic_get(atomic_t *val)
    79 {
    80         return val->count;
    81 }
    82 
    8370#endif
  • arch/ppc32/include/atomic.h

    r45fb65c r23684b7  
    3131
    3232#include <arch/types.h>
    33 
    34 typedef struct { volatile __u32 count; } atomic_t;
     33#include <typedefs.h>
    3534
    3635static inline void atomic_inc(atomic_t *val)
     
    6463}
    6564
    66 static inline __u32 atomic_postinc(atomic_t *val)
     65static inline long atomic_postinc(atomic_t *val)
    6766{
    6867        atomic_inc(val);
     
    7069}
    7170
    72 static inline __u32 atomic_postdec(atomic_t *val)
     71static inline long atomic_postdec(atomic_t *val)
    7372{
    7473        atomic_dec(val);
     
    7675}
    7776
    78 static inline __u32 atomic_preinc(atomic_t *val)
     77static inline long atomic_preinc(atomic_t *val)
    7978{
    8079        atomic_inc(val);
     
    8281}
    8382
    84 static inline __u32 atomic_predec(atomic_t *val)
     83static inline long atomic_predec(atomic_t *val)
    8584{
    8685        atomic_dec(val);
     
    8887}
    8988
    90 static inline void atomic_set(atomic_t *val, __u32 i)
    91 {
    92         val->count = i;
    93 }
    94 
    95 static inline __u32 atomic_get(atomic_t *val)
    96 {
    97         return val->count;
    98 }
    99 
    10089#endif
  • arch/sparc64/include/atomic.h

    r45fb65c r23684b7  
    3131
    3232#include <arch/types.h>
    33 
    34 typedef struct { volatile __u64 count; } atomic_t;
     33#include <typedefs.h>
    3534
    3635/** Atomic add operation.
     
    4342 * @return Value of the atomic variable as it existed before addition.
    4443 */
    45 static inline count_t atomic_add(atomic_t *val, int i)
     44static inline long atomic_add(atomic_t *val, int i)
    4645{
    4746        __u64 a, b;
     
    6362}
    6463
    65 static inline count_t atomic_preinc(atomic_t *val)
     64static inline long atomic_preinc(atomic_t *val)
    6665{
    6766        return atomic_add(val, 1) + 1;
    6867}
    6968
    70 static inline count_t atomic_postinc(atomic_t *val)
     69static inline long atomic_postinc(atomic_t *val)
    7170{
    7271        return atomic_add(val, 1);
    7372}
    7473
    75 static inline count_t atomic_predec(atomic_t *val)
     74static inline long atomic_predec(atomic_t *val)
    7675{
    7776        return atomic_add(val, -1) - 1;
    7877}
    7978
    80 static inline count_t atomic_postdec(atomic_t *val)
     79static inline long atomic_postdec(atomic_t *val)
    8180{
    8281        return atomic_add(val, -1);
     
    9392}
    9493
    95 static inline void atomic_set(atomic_t *val, __u64 i)
    96 {
    97         val->count = i;
    98 }
    99 
    100 static inline __u64 atomic_get(atomic_t *val)
    101 {
    102         return val->count;
    103 }
    104 
    10594#endif
Note: See TracChangeset for help on using the changeset viewer.