Changeset e7b7be3f in mainline for kernel/arch/sparc64/include
- Timestamp:
- 2007-01-22T13:10:08Z (19 years ago)
- Branches:
- lfn, master, serial, ticket/834-toolchain-update, topic/msim-upgrade, topic/simplify-dev-export
- Children:
- 0f3fc9b
- Parents:
- 62c63fc
- Location:
- kernel/arch/sparc64/include
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/sparc64/include/asm.h
r62c63fc re7b7be3f 52 52 uint64_t v; 53 53 54 __asm__volatile ("rdpr %%pstate, %0\n" : "=r" (v));54 asm volatile ("rdpr %%pstate, %0\n" : "=r" (v)); 55 55 56 56 return v; … … 63 63 static inline void pstate_write(uint64_t v) 64 64 { 65 __asm__volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0));65 asm volatile ("wrpr %0, %1, %%pstate\n" : : "r" (v), "i" (0)); 66 66 } 67 67 … … 74 74 uint64_t v; 75 75 76 __asm__volatile ("rd %%tick_cmpr, %0\n" : "=r" (v));76 asm volatile ("rd %%tick_cmpr, %0\n" : "=r" (v)); 77 77 78 78 return v; … … 85 85 static inline void tick_compare_write(uint64_t v) 86 86 { 87 __asm__volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0));87 asm volatile ("wr %0, %1, %%tick_cmpr\n" : : "r" (v), "i" (0)); 88 88 } 89 89 … … 96 96 uint64_t v; 97 97 98 __asm__volatile ("rdpr %%tick, %0\n" : "=r" (v));98 asm volatile ("rdpr %%tick, %0\n" : "=r" (v)); 99 99 100 100 return v; … … 107 107 static inline void tick_write(uint64_t v) 108 108 { 109 __asm__volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0));109 asm volatile ("wrpr %0, %1, %%tick\n" : : "r" (v), "i" (0)); 110 110 } 111 111 … … 118 118 uint64_t v; 119 119 120 __asm__volatile ("rd %%fprs, %0\n" : "=r" (v));120 asm volatile ("rd %%fprs, %0\n" : "=r" (v)); 121 121 122 122 return v; … … 129 129 static inline void fprs_write(uint64_t v) 130 130 { 131 __asm__volatile ("wr %0, %1, %%fprs\n" : : "r" (v), "i" (0));131 asm volatile ("wr %0, %1, %%fprs\n" : : "r" (v), "i" (0)); 132 132 } 133 133 … … 140 140 uint64_t v; 141 141 142 __asm__volatile ("rd %%softint, %0\n" : "=r" (v));142 asm volatile ("rd %%softint, %0\n" : "=r" (v)); 143 143 144 144 return v; … … 151 151 static inline void softint_write(uint64_t v) 152 152 { 153 __asm__volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0));153 asm volatile ("wr %0, %1, %%softint\n" : : "r" (v), "i" (0)); 154 154 } 155 155 … … 162 162 static inline void clear_softint_write(uint64_t v) 163 163 { 164 __asm__volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0));164 asm volatile ("wr %0, %1, %%clear_softint\n" : : "r" (v), "i" (0)); 165 165 } 166 166 … … 173 173 static inline void set_softint_write(uint64_t v) 174 174 { 175 __asm__volatile ("wr %0, %1, %%set_softint\n" : : "r" (v), "i" (0));175 asm volatile ("wr %0, %1, %%set_softint\n" : : "r" (v), "i" (0)); 176 176 } 177 177 … … 248 248 uintptr_t unbiased_sp; 249 249 250 __asm__volatile ("add %%sp, %1, %0\n" : "=r" (unbiased_sp) : "i" (STACK_BIAS));250 asm volatile ("add %%sp, %1, %0\n" : "=r" (unbiased_sp) : "i" (STACK_BIAS)); 251 251 252 252 return ALIGN_DOWN(unbiased_sp, STACK_SIZE); … … 261 261 uint64_t v; 262 262 263 __asm__volatile ("rdpr %%ver, %0\n" : "=r" (v));263 asm volatile ("rdpr %%ver, %0\n" : "=r" (v)); 264 264 265 265 return v; … … 274 274 uint64_t v; 275 275 276 __asm__volatile ("rdpr %%tpc, %0\n" : "=r" (v));276 asm volatile ("rdpr %%tpc, %0\n" : "=r" (v)); 277 277 278 278 return v; … … 287 287 uint64_t v; 288 288 289 __asm__volatile ("rdpr %%tl, %0\n" : "=r" (v));289 asm volatile ("rdpr %%tl, %0\n" : "=r" (v)); 290 290 291 291 return v; … … 300 300 uint64_t v; 301 301 302 __asm__volatile ("rdpr %%tba, %0\n" : "=r" (v));302 asm volatile ("rdpr %%tba, %0\n" : "=r" (v)); 303 303 304 304 return v; … … 311 311 static inline void tba_write(uint64_t v) 312 312 { 313 __asm__volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0));313 asm volatile ("wrpr %0, %1, %%tba\n" : : "r" (v), "i" (0)); 314 314 } 315 315 … … 325 325 uint64_t v; 326 326 327 __asm__volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" ((unsigned) asi));327 asm volatile ("ldxa [%1] %2, %0\n" : "=r" (v) : "r" (va), "i" ((unsigned) asi)); 328 328 329 329 return v; … … 338 338 static inline void asi_u64_write(asi_t asi, uintptr_t va, uint64_t v) 339 339 { 340 __asm__volatile ("stxa %0, [%1] %2\n" : : "r" (v), "r" (va), "i" ((unsigned) asi) : "memory");340 asm volatile ("stxa %0, [%1] %2\n" : : "r" (v), "r" (va), "i" ((unsigned) asi) : "memory"); 341 341 } 342 342 … … 344 344 static inline void flushw(void) 345 345 { 346 __asm__volatile ("flushw\n");346 asm volatile ("flushw\n"); 347 347 } 348 348 … … 350 350 static inline void nucleus_enter(void) 351 351 { 352 __asm__volatile ("wrpr %g0, 1, %tl\n");352 asm volatile ("wrpr %g0, 1, %tl\n"); 353 353 } 354 354 … … 356 356 static inline void nucleus_leave(void) 357 357 { 358 __asm__volatile ("wrpr %g0, %g0, %tl\n");358 asm volatile ("wrpr %g0, %g0, %tl\n"); 359 359 } 360 360 -
kernel/arch/sparc64/include/atomic.h
r62c63fc re7b7be3f 58 58 a = *((uint64_t *) x); 59 59 b = a + i; 60 __asm__volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *)x)), "+r" (b) : "r" (a));60 asm volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *)x)), "+r" (b) : "r" (a)); 61 61 } while (a != b); 62 62 … … 99 99 volatile uintptr_t x = (uint64_t) &val->count; 100 100 101 __asm__volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *) x)), "+r" (v) : "r" (0));101 asm volatile ("casx %0, %2, %1\n" : "+m" (*((uint64_t *) x)), "+r" (v) : "r" (0)); 102 102 103 103 return v; … … 111 111 volatile uintptr_t x = (uint64_t) &val->count; 112 112 113 __asm__volatile (113 asm volatile ( 114 114 "0:\n" 115 115 "casx %0, %3, %1\n" -
kernel/arch/sparc64/include/barrier.h
r62c63fc re7b7be3f 40 40 */ 41 41 #define CS_ENTER_BARRIER() \ 42 __asm__volatile ( \42 asm volatile ( \ 43 43 "membar #LoadLoad | #LoadStore\n" \ 44 44 ::: "memory" \ 45 45 ) 46 46 #define CS_LEAVE_BARRIER() \ 47 __asm__volatile ( \47 asm volatile ( \ 48 48 "membar #StoreStore\n" \ 49 49 "membar #LoadStore\n" \ … … 52 52 53 53 #define memory_barrier() \ 54 __asm__volatile ("membar #LoadLoad | #StoreStore\n" ::: "memory")54 asm volatile ("membar #LoadLoad | #StoreStore\n" ::: "memory") 55 55 #define read_barrier() \ 56 __asm__volatile ("membar #LoadLoad\n" ::: "memory")56 asm volatile ("membar #LoadLoad\n" ::: "memory") 57 57 #define write_barrier() \ 58 __asm__volatile ("membar #StoreStore\n" ::: "memory")58 asm volatile ("membar #StoreStore\n" ::: "memory") 59 59 60 60 /** Flush Instruction Memory instruction. */ … … 71 71 */ 72 72 73 __asm__volatile ("flush %o7\n");73 asm volatile ("flush %o7\n"); 74 74 } 75 75 … … 77 77 static inline void membar(void) 78 78 { 79 __asm__volatile ("membar #Sync\n");79 asm volatile ("membar #Sync\n"); 80 80 } 81 81
Note:
See TracChangeset
for help on using the changeset viewer.