ia64/xen-unstable

view tools/ioemu/ia64_intrinsic.h @ 10806:4c2a281cd1e7

[qemu] sync patches and add file missing from cset 10738:42aa63188a88

Signed-off-by: Christian Limpach <Christian.Limpach@xensource.com>
author chris@kneesaa.uk.xensource.com
date Wed Jul 26 14:38:00 2006 +0100 (2006-07-26)
parents
children
line source
1 #ifndef IA64_INTRINSIC_H
2 #define IA64_INTRINSIC_H
4 /*
5 * Compiler-dependent Intrinsics
6 *
7 * Copyright (C) 2002,2003 Jun Nakajima <jun.nakajima@intel.com>
8 * Copyright (C) 2002,2003 Suresh Siddha <suresh.b.siddha@intel.com>
9 *
10 */
11 extern long ia64_cmpxchg_called_with_bad_pointer (void);
12 extern void ia64_bad_param_for_getreg (void);
13 #define ia64_cmpxchg(sem,ptr,o,n,s) ({ \
14 uint64_t _o, _r; \
15 switch(s) { \
16 case 1: _o = (uint8_t)(long)(o); break; \
17 case 2: _o = (uint16_t)(long)(o); break; \
18 case 4: _o = (uint32_t)(long)(o); break; \
19 case 8: _o = (uint64_t)(long)(o); break; \
20 default: break; \
21 } \
22 switch(s) { \
23 case 1: \
24 _r = ia64_cmpxchg1_##sem((uint8_t*)ptr,n,_o); break; \
25 case 2: \
26 _r = ia64_cmpxchg2_##sem((uint16_t*)ptr,n,_o); break; \
27 case 4: \
28 _r = ia64_cmpxchg4_##sem((uint32_t*)ptr,n,_o); break; \
29 case 8: \
30 _r = ia64_cmpxchg8_##sem((uint64_t*)ptr,n,_o); break; \
31 default: \
32 _r = ia64_cmpxchg_called_with_bad_pointer(); break; \
33 } \
34 (__typeof__(o)) _r; \
35 })
37 #define cmpxchg_acq(ptr,o,n) ia64_cmpxchg(acq,ptr,o,n,sizeof(*ptr))
38 #define cmpxchg_rel(ptr,o,n) ia64_cmpxchg(rel,ptr,o,n,sizeof(*ptr))
40 /*
41 * Register Names for getreg() and setreg().
42 *
43 * The "magic" numbers happen to match the values used by the Intel compiler's
44 * getreg()/setreg() intrinsics.
45 */
47 /* Special Registers */
49 #define _IA64_REG_IP 1016 /* getreg only */
50 #define _IA64_REG_PSR 1019
51 #define _IA64_REG_PSR_L 1019
53 /* General Integer Registers */
55 #define _IA64_REG_GP 1025 /* R1 */
56 #define _IA64_REG_R8 1032 /* R8 */
57 #define _IA64_REG_R9 1033 /* R9 */
58 #define _IA64_REG_SP 1036 /* R12 */
59 #define _IA64_REG_TP 1037 /* R13 */
61 /* Application Registers */
63 #define _IA64_REG_AR_KR0 3072
64 #define _IA64_REG_AR_KR1 3073
65 #define _IA64_REG_AR_KR2 3074
66 #define _IA64_REG_AR_KR3 3075
67 #define _IA64_REG_AR_KR4 3076
68 #define _IA64_REG_AR_KR5 3077
69 #define _IA64_REG_AR_KR6 3078
70 #define _IA64_REG_AR_KR7 3079
71 #define _IA64_REG_AR_RSC 3088
72 #define _IA64_REG_AR_BSP 3089
73 #define _IA64_REG_AR_BSPSTORE 3090
74 #define _IA64_REG_AR_RNAT 3091
75 #define _IA64_REG_AR_FCR 3093
76 #define _IA64_REG_AR_EFLAG 3096
77 #define _IA64_REG_AR_CSD 3097
78 #define _IA64_REG_AR_SSD 3098
79 #define _IA64_REG_AR_CFLAG 3099
80 #define _IA64_REG_AR_FSR 3100
81 #define _IA64_REG_AR_FIR 3101
82 #define _IA64_REG_AR_FDR 3102
83 #define _IA64_REG_AR_CCV 3104
84 #define _IA64_REG_AR_UNAT 3108
85 #define _IA64_REG_AR_FPSR 3112
86 #define _IA64_REG_AR_ITC 3116
87 #define _IA64_REG_AR_PFS 3136
88 #define _IA64_REG_AR_LC 3137
89 #define _IA64_REG_AR_EC 3138
91 /* Control Registers */
93 #define _IA64_REG_CR_DCR 4096
94 #define _IA64_REG_CR_ITM 4097
95 #define _IA64_REG_CR_IVA 4098
96 #define _IA64_REG_CR_PTA 4104
97 #define _IA64_REG_CR_IPSR 4112
98 #define _IA64_REG_CR_ISR 4113
99 #define _IA64_REG_CR_IIP 4115
100 #define _IA64_REG_CR_IFA 4116
101 #define _IA64_REG_CR_ITIR 4117
102 #define _IA64_REG_CR_IIPA 4118
103 #define _IA64_REG_CR_IFS 4119
104 #define _IA64_REG_CR_IIM 4120
105 #define _IA64_REG_CR_IHA 4121
106 #define _IA64_REG_CR_LID 4160
107 #define _IA64_REG_CR_IVR 4161 /* getreg only */
108 #define _IA64_REG_CR_TPR 4162
109 #define _IA64_REG_CR_EOI 4163
110 #define _IA64_REG_CR_IRR0 4164 /* getreg only */
111 #define _IA64_REG_CR_IRR1 4165 /* getreg only */
112 #define _IA64_REG_CR_IRR2 4166 /* getreg only */
113 #define _IA64_REG_CR_IRR3 4167 /* getreg only */
114 #define _IA64_REG_CR_ITV 4168
115 #define _IA64_REG_CR_PMV 4169
116 #define _IA64_REG_CR_CMCV 4170
117 #define _IA64_REG_CR_LRR0 4176
118 #define _IA64_REG_CR_LRR1 4177
120 /* Indirect Registers for getindreg() and setindreg() */
122 #define _IA64_REG_INDR_CPUID 9000 /* getindreg only */
123 #define _IA64_REG_INDR_DBR 9001
124 #define _IA64_REG_INDR_IBR 9002
125 #define _IA64_REG_INDR_PKR 9003
126 #define _IA64_REG_INDR_PMC 9004
127 #define _IA64_REG_INDR_PMD 9005
128 #define _IA64_REG_INDR_RR 9006
130 #ifdef __INTEL_COMPILER
131 void __fc(uint64_t *addr);
132 void __synci(void);
133 void __isrlz(void);
134 void __dsrlz(void);
135 uint64_t __getReg(const int whichReg);
136 uint64_t _InterlockedCompareExchange8_rel(volatile uint8_t *dest, uint64_t xchg, uint64_t comp);
137 uint64_t _InterlockedCompareExchange8_acq(volatile uint8_t *dest, uint64_t xchg, uint64_t comp);
138 uint64_t _InterlockedCompareExchange16_rel(volatile uint16_t *dest, uint64_t xchg, uint64_t comp);
139 uint64_t _InterlockedCompareExchange16_acq(volatile uint16_t *dest, uint64_t xchg, uint64_t comp);
140 uint64_t _InterlockedCompareExchange_rel(volatile uint32_t *dest, uint64_t xchg, uint64_t comp);
141 uint64_t _InterlockedCompareExchange_acq(volatile uint32_t *dest, uint64_t xchg, uint64_t comp);
142 uint64_t _InterlockedCompareExchange64_rel(volatile uint64_t *dest, uint64_t xchg, uint64_t comp);
143 u64_t _InterlockedCompareExchange64_acq(volatile uint64_t *dest, uint64_t xchg, uint64_t comp);
145 #define ia64_cmpxchg1_rel _InterlockedCompareExchange8_rel
146 #define ia64_cmpxchg1_acq _InterlockedCompareExchange8_acq
147 #define ia64_cmpxchg2_rel _InterlockedCompareExchange16_rel
148 #define ia64_cmpxchg2_acq _InterlockedCompareExchange16_acq
149 #define ia64_cmpxchg4_rel _InterlockedCompareExchange_rel
150 #define ia64_cmpxchg4_acq _InterlockedCompareExchange_acq
151 #define ia64_cmpxchg8_rel _InterlockedCompareExchange64_rel
152 #define ia64_cmpxchg8_acq _InterlockedCompareExchange64_acq
154 #define ia64_srlz_d __dsrlz
155 #define ia64_srlz_i __isrlz
156 #define __ia64_fc __fc
157 #define ia64_sync_i __synci
158 #define __ia64_getreg __getReg
159 #else /* __INTEL_COMPILER */
160 #define ia64_cmpxchg1_acq(ptr, new, old) \
161 ({ \
162 uint64_t ia64_intri_res; \
163 asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
164 asm volatile ("cmpxchg1.acq %0=[%1],%2,ar.ccv": \
165 "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
166 ia64_intri_res; \
167 })
169 #define ia64_cmpxchg1_rel(ptr, new, old) \
170 ({ \
171 uint64_t ia64_intri_res; \
172 asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
173 asm volatile ("cmpxchg1.rel %0=[%1],%2,ar.ccv": \
174 "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
175 ia64_intri_res; \
176 })
178 #define ia64_cmpxchg2_acq(ptr, new, old) \
179 ({ \
180 uint64_t ia64_intri_res; \
181 asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
182 asm volatile ("cmpxchg2.acq %0=[%1],%2,ar.ccv": \
183 "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
184 ia64_intri_res; \
185 })
187 #define ia64_cmpxchg2_rel(ptr, new, old) \
188 ({ \
189 uint64_t ia64_intri_res; \
190 asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
191 \
192 asm volatile ("cmpxchg2.rel %0=[%1],%2,ar.ccv": \
193 "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
194 ia64_intri_res; \
195 })
197 #define ia64_cmpxchg4_acq(ptr, new, old) \
198 ({ \
199 uint64_t ia64_intri_res; \
200 asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
201 asm volatile ("cmpxchg4.acq %0=[%1],%2,ar.ccv": \
202 "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
203 ia64_intri_res; \
204 })
206 #define ia64_cmpxchg4_rel(ptr, new, old) \
207 ({ \
208 uint64_t ia64_intri_res; \
209 asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
210 asm volatile ("cmpxchg4.rel %0=[%1],%2,ar.ccv": \
211 "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
212 ia64_intri_res; \
213 })
215 #define ia64_cmpxchg8_acq(ptr, new, old) \
216 ({ \
217 uint64_t ia64_intri_res; \
218 asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
219 asm volatile ("cmpxchg8.acq %0=[%1],%2,ar.ccv": \
220 "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
221 ia64_intri_res; \
222 })
224 #define ia64_cmpxchg8_rel(ptr, new, old) \
225 ({ \
226 uint64_t ia64_intri_res; \
227 asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
228 \
229 asm volatile ("cmpxchg8.rel %0=[%1],%2,ar.ccv": \
230 "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
231 ia64_intri_res; \
232 })
234 #define ia64_srlz_i() asm volatile (";; srlz.i ;;" ::: "memory")
235 #define ia64_srlz_d() asm volatile (";; srlz.d" ::: "memory");
236 #define __ia64_fc(addr) asm volatile ("fc %0" :: "r"(addr) : "memory")
237 #define ia64_sync_i() asm volatile (";; sync.i" ::: "memory")
239 register unsigned long ia64_r13 asm ("r13") __attribute_used__;
240 #define __ia64_getreg(regnum) \
241 ({ \
242 uint64_t ia64_intri_res; \
243 \
244 switch (regnum) { \
245 case _IA64_REG_GP: \
246 asm volatile ("mov %0=gp" : "=r"(ia64_intri_res)); \
247 break; \
248 case _IA64_REG_IP: \
249 asm volatile ("mov %0=ip" : "=r"(ia64_intri_res)); \
250 break; \
251 case _IA64_REG_PSR: \
252 asm volatile ("mov %0=psr" : "=r"(ia64_intri_res)); \
253 break; \
254 case _IA64_REG_TP: /* for current() */ \
255 ia64_intri_res = ia64_r13; \
256 break; \
257 case _IA64_REG_AR_KR0 ... _IA64_REG_AR_EC: \
258 asm volatile ("mov %0=ar%1" : "=r" (ia64_intri_res) \
259 : "i"(regnum - _IA64_REG_AR_KR0)); \
260 break; \
261 case _IA64_REG_CR_DCR ... _IA64_REG_CR_LRR1: \
262 asm volatile ("mov %0=cr%1" : "=r" (ia64_intri_res) \
263 : "i" (regnum - _IA64_REG_CR_DCR)); \
264 break; \
265 case _IA64_REG_SP: \
266 asm volatile ("mov %0=sp" : "=r" (ia64_intri_res)); \
267 break; \
268 default: \
269 ia64_bad_param_for_getreg(); \
270 break; \
271 } \
272 ia64_intri_res; \
273 })
275 #endif /* __INTEL_COMPILER */
276 #endif /* IA64_INTRINSIC_H */