00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00035 #ifndef __ia64_ASM_H__
00036 #define __ia64_ASM_H__
00037
00038 #include <config.h>
00039 #include <arch/types.h>
00040 #include <arch/register.h>
00041
00048 static inline __address get_stack_base(void)
00049 {
00050 __u64 v;
00051
00052 __asm__ volatile ("and %0 = %1, r12" : "=r" (v) : "r" (~(STACK_SIZE-1)));
00053
00054 return v;
00055 }
00056
00061 static inline __u64 psr_read(void)
00062 {
00063 __u64 v;
00064
00065 __asm__ volatile ("mov %0 = psr\n" : "=r" (v));
00066
00067 return v;
00068 }
00069
00074 static inline __u64 iva_read(void)
00075 {
00076 __u64 v;
00077
00078 __asm__ volatile ("mov %0 = cr.iva\n" : "=r" (v));
00079
00080 return v;
00081 }
00082
00087 static inline void iva_write(__u64 v)
00088 {
00089 __asm__ volatile ("mov cr.iva = %0\n" : : "r" (v));
00090 }
00091
00092
00097 static inline __u64 ivr_read(void)
00098 {
00099 __u64 v;
00100
00101 __asm__ volatile ("mov %0 = cr.ivr\n" : "=r" (v));
00102
00103 return v;
00104 }
00105
00110 static inline void itc_write(__u64 v)
00111 {
00112 __asm__ volatile ("mov ar.itc = %0\n" : : "r" (v));
00113 }
00114
00119 static inline __u64 itc_read(void)
00120 {
00121 __u64 v;
00122
00123 __asm__ volatile ("mov %0 = ar.itc\n" : "=r" (v));
00124
00125 return v;
00126 }
00127
00132 static inline void itm_write(__u64 v)
00133 {
00134 __asm__ volatile ("mov cr.itm = %0\n" : : "r" (v));
00135 }
00136
00141 static inline __u64 itm_read(void)
00142 {
00143 __u64 v;
00144
00145 __asm__ volatile ("mov %0 = cr.itm\n" : "=r" (v));
00146
00147 return v;
00148 }
00149
00154 static inline __u64 itv_read(void)
00155 {
00156 __u64 v;
00157
00158 __asm__ volatile ("mov %0 = cr.itv\n" : "=r" (v));
00159
00160 return v;
00161 }
00162
00167 static inline void itv_write(__u64 v)
00168 {
00169 __asm__ volatile ("mov cr.itv = %0\n" : : "r" (v));
00170 }
00171
00176 static inline void eoi_write(__u64 v)
00177 {
00178 __asm__ volatile ("mov cr.eoi = %0\n" : : "r" (v));
00179 }
00180
00185 static inline __u64 tpr_read(void)
00186 {
00187 __u64 v;
00188
00189 __asm__ volatile ("mov %0 = cr.tpr\n" : "=r" (v));
00190
00191 return v;
00192 }
00193
00198 static inline void tpr_write(__u64 v)
00199 {
00200 __asm__ volatile ("mov cr.tpr = %0\n" : : "r" (v));
00201 }
00202
00210 static ipl_t interrupts_disable(void)
00211 {
00212 __u64 v;
00213
00214 __asm__ volatile (
00215 "mov %0 = psr\n"
00216 "rsm %1\n"
00217 : "=r" (v)
00218 : "i" (PSR_I_MASK)
00219 );
00220
00221 return (ipl_t) v;
00222 }
00223
00231 static ipl_t interrupts_enable(void)
00232 {
00233 __u64 v;
00234
00235 __asm__ volatile (
00236 "mov %0 = psr\n"
00237 "ssm %1\n"
00238 ";;\n"
00239 "srlz.d\n"
00240 : "=r" (v)
00241 : "i" (PSR_I_MASK)
00242 );
00243
00244 return (ipl_t) v;
00245 }
00246
00253 static inline void interrupts_restore(ipl_t ipl)
00254 {
00255 if (ipl & PSR_I_MASK)
00256 (void) interrupts_enable();
00257 else
00258 (void) interrupts_disable();
00259 }
00260
00265 static inline ipl_t interrupts_read(void)
00266 {
00267 return (ipl_t) psr_read();
00268 }
00269
00271 static inline void pk_disable(void)
00272 {
00273 __asm__ volatile ("rsm %0\n" : : "i" (PSR_PK_MASK));
00274 }
00275
00276 extern void cpu_halt(void);
00277 extern void cpu_sleep(void);
00278 extern void asm_delay_loop(__u32 t);
00279
00280 extern void switch_to_userspace(__address entry, __address sp, __address bsp, __address uspace_uarg, __u64 ipsr, __u64 rsc);
00281
00282 #endif
00283