direct-io.hg

view xen/arch/ia64/vmx/optvfault.S @ 12431:4816a891b3d6

[IA64] Fix SMP Windows boot failure

Sometime SMP Windows can't boot, the root cause is guest timer interrupt
is lost.

This patch fixes following issues.
1. Windows uses different way to sync itc.
2. Previously when Guest timer fires and guest ITV is masked, XEN will
desert this Guest timer interrupt. It is not correct for windows,
windows may expect this timer interrupt.
3. Windows may use different way to set timer in some situations.
Windows first sets itm (which may be smaller than current itc), and
then sets itc (which is samller than itm).
XEN can support this way to set timer.

Signed-off-by: Anthony Xu <anthony.xu@intel.com>
author awilliam@xenbuild.aw
date Fri Nov 10 11:19:57 2006 -0700 (2006-11-10)
parents 5ebc7ee315cc
children 74de984434c9
line source
1 /*
2 * arch/ia64/vmx/optvfault.S
3 * optimize virtualization fault handler
4 *
5 * Copyright (C) 2006 Intel Co
6 * Xuefei Xu (Anthony Xu) <anthony.xu@intel.com>
7 */
9 #include <linux/config.h>
10 #include <asm/asmmacro.h>
11 #include <asm/kregs.h>
12 #include <asm/offsets.h>
13 #include <asm/percpu.h>
14 #include <asm/processor.h>
15 #include <asm/vmx_vpd.h>
16 #include <asm/vmx_pal_vsa.h>
17 #include <asm/asm-offsets.h>
19 #define ACCE_MOV_FROM_AR
20 #define ACCE_MOV_FROM_RR
21 #define ACCE_MOV_TO_RR
22 #define ACCE_RSM
23 #define ACCE_SSM
24 #define ACCE_MOV_TO_PSR
26 //mov r1=ar3
27 GLOBAL_ENTRY(vmx_asm_mov_from_ar)
28 #ifndef ACCE_MOV_FROM_AR
29 br.many vmx_virtualization_fault_back
30 #endif
31 add r18=VCPU_VTM_OFFSET_OFS,r21
32 add r16=VCPU_VTM_LAST_ITC_OFS,r21
33 extr.u r17=r25,6,7
34 ;;
35 ld8 r18=[r18]
36 mov r19=ar.itc
37 mov r24=b0
38 ;;
39 ld8 r16=[r16]
40 add r19=r19,r18
41 movl r20=asm_mov_to_reg
42 ;;
43 adds r30=vmx_resume_to_guest-asm_mov_to_reg,r20
44 shladd r17=r17,4,r20
45 cmp.gtu p6,p0=r16,r19
46 ;;
47 (p6) mov r19=r16
48 mov b0=r17
49 br.sptk.few b0
50 ;;
51 END(vmx_asm_mov_from_ar)
54 // mov r1=rr[r3]
55 GLOBAL_ENTRY(vmx_asm_mov_from_rr)
56 #ifndef ACCE_MOV_FROM_RR
57 br.many vmx_virtualization_fault_back
58 #endif
59 extr.u r16=r25,20,7
60 extr.u r17=r25,6,7
61 movl r20=asm_mov_from_reg
62 ;;
63 adds r30=vmx_asm_mov_from_rr_back_1-asm_mov_from_reg,r20
64 shladd r16=r16,4,r20
65 mov r24=b0
66 ;;
67 add r27=VCPU_VRR0_OFS,r21
68 mov b0=r16
69 br.many b0
70 ;;
71 vmx_asm_mov_from_rr_back_1:
72 adds r30=vmx_resume_to_guest-asm_mov_from_reg,r20
73 adds r22=asm_mov_to_reg-asm_mov_from_reg,r20
74 shr.u r26=r19,61
75 ;;
76 shladd r17=r17,4,r22
77 shladd r27=r26,3,r27
78 ;;
79 ld8 r19=[r27]
80 mov b0=r17
81 br.many b0
82 END(vmx_asm_mov_from_rr)
85 // mov rr[r3]=r2
86 GLOBAL_ENTRY(vmx_asm_mov_to_rr)
87 #ifndef ACCE_MOV_TO_RR
88 br.many vmx_virtualization_fault_back
89 #endif
90 extr.u r16=r25,20,7
91 extr.u r17=r25,13,7
92 movl r20=asm_mov_from_reg
93 ;;
94 adds r30=vmx_asm_mov_to_rr_back_1-asm_mov_from_reg,r20
95 shladd r16=r16,4,r20
96 mov r22=b0
97 ;;
98 add r27=VCPU_VRR0_OFS,r21
99 mov b0=r16
100 br.many b0
101 ;;
102 vmx_asm_mov_to_rr_back_1:
103 adds r30=vmx_asm_mov_to_rr_back_2-asm_mov_from_reg,r20
104 shr.u r23=r19,61
105 shladd r17=r17,4,r20
106 ;;
107 //if rr7, go back
108 cmp.eq p6,p0=7,r23
109 mov b0=r22
110 (p6) br.cond.dpnt.many vmx_virtualization_fault_back
111 ;;
112 mov r28=r19
113 mov b0=r17
114 br.many b0
115 vmx_asm_mov_to_rr_back_2:
116 adds r30=vmx_resume_to_guest-asm_mov_from_reg,r20
117 shladd r27=r23,3,r27
118 ;; // +starting_rid
119 st8 [r27]=r19
120 mov b0=r30
121 ;;
122 adds r16=IA64_VCPU_STARTING_RID_OFFSET,r21
123 ;;
124 ld4 r16=[r16]
125 ;;
126 shl r16=r16,8
127 ;;
128 add r19=r19,r16
129 ;; //mangling rid 1 and 3
130 extr.u r16=r19,8,8
131 extr.u r17=r19,24,8
132 extr.u r18=r19,2,6
133 ;;
134 dep r19=r16,r19,24,8
135 ;;
136 dep r19=r17,r19,8,8
137 ;; //set ve 1
138 dep r19=-1,r19,0,1
139 cmp.lt p6,p0=14,r18
140 ;;
141 (p6) mov r18=14
142 ;;
143 (p6) dep r19=r18,r19,2,6
144 ;;
145 cmp.eq p6,p0=0,r23
146 ;;
147 cmp.eq.or p6,p0=4,r23
148 ;;
149 adds r16=IA64_VCPU_MODE_FLAGS_OFFSET,r21
150 (p6) adds r17=IA64_VCPU_META_SAVED_RR0_OFFSET,r21
151 ;;
152 ld4 r16=[r16]
153 cmp.eq p7,p0=r0,r0
154 (p6) shladd r17=r23,1,r17
155 ;;
156 (p6) st8 [r17]=r19
157 (p6) tbit.nz p6,p7=r16,0
158 ;;
159 (p7) mov rr[r28]=r19
160 mov r24=r22
161 br.many b0
162 END(vmx_asm_mov_to_rr)
165 //rsm
166 GLOBAL_ENTRY(vmx_asm_rsm)
167 #ifndef ACCE_RSM
168 br.many vmx_virtualization_fault_back
169 #endif
170 add r16=IA64_VPD_BASE_OFFSET,r21
171 extr.u r26=r25,6,21
172 extr.u r27=r25,31,2
173 ;;
174 ld8 r16=[r16]
175 extr.u r28=r25,36,1
176 dep r26=r27,r26,21,2
177 ;;
178 add r17=VPD_VPSR_START_OFFSET,r16
179 add r22=IA64_VCPU_MODE_FLAGS_OFFSET,r21
180 //r26 is imm24
181 dep r26=r28,r26,23,1
182 ;;
183 ld8 r18=[r17]
184 movl r28=IA64_PSR_IC+IA64_PSR_I+IA64_PSR_DT+IA64_PSR_SI
185 ld4 r23=[r22]
186 sub r27=-1,r26
187 mov r24=b0
188 ;;
189 mov r20=cr.ipsr
190 or r28=r27,r28
191 and r19=r18,r27
192 ;;
193 st8 [r17]=r19
194 and r20=r20,r28
195 ;;
196 mov cr.ipsr=r20
197 tbit.nz p6,p0=r23,0
198 ;;
199 tbit.z.or p6,p0=r26,IA64_PSR_DT_BIT
200 (p6) br.dptk vmx_resume_to_guest
201 ;;
202 add r26=IA64_VCPU_META_RR0_OFFSET,r21
203 add r27=IA64_VCPU_META_RR0_OFFSET+8,r21
204 dep r23=-1,r23,0,1
205 ;;
206 ld8 r26=[r26]
207 ld8 r27=[r27]
208 st4 [r22]=r23
209 dep.z r28=4,61,3
210 ;;
211 mov rr[r0]=r26
212 mov rr[r28]=r27
213 br.many vmx_resume_to_guest
214 END(vmx_asm_rsm)
217 //ssm
218 GLOBAL_ENTRY(vmx_asm_ssm)
219 #ifndef ACCE_SSM
220 br.many vmx_virtualization_fault_back
221 #endif
222 add r16=IA64_VPD_BASE_OFFSET,r21
223 extr.u r26=r25,6,21
224 extr.u r27=r25,31,2
225 ;;
226 ld8 r16=[r16]
227 extr.u r28=r25,36,1
228 dep r26=r27,r26,21,2
229 ;; //r26 is imm24
230 add r27=VPD_VPSR_START_OFFSET,r16
231 dep r26=r28,r26,23,1
232 ;; //r19 vpsr
233 ld8 r29=[r27]
234 mov r24=b0
235 ;;
236 add r22=IA64_VCPU_MODE_FLAGS_OFFSET,r21
237 mov r20=cr.ipsr
238 or r19=r29,r26
239 ;;
240 ld4 r23=[r22]
241 st8 [r27]=r19
242 or r20=r20,r26
243 ;;
244 mov cr.ipsr=r20
245 movl r28=IA64_PSR_DT+IA64_PSR_RT+IA64_PSR_IT
246 ;;
247 and r19=r28,r19
248 tbit.z p6,p0=r23,0
249 ;;
250 cmp.ne.or p6,p0=r28,r19
251 (p6) br.dptk vmx_asm_ssm_1
252 ;;
253 add r26=IA64_VCPU_META_SAVED_RR0_OFFSET,r21
254 add r27=IA64_VCPU_META_SAVED_RR0_OFFSET+8,r21
255 dep r23=0,r23,0,1
256 ;;
257 ld8 r26=[r26]
258 ld8 r27=[r27]
259 st4 [r22]=r23
260 dep.z r28=4,61,3
261 ;;
262 mov rr[r0]=r26
263 mov rr[r28]=r27
264 ;;
265 srlz.i
266 ;;
267 vmx_asm_ssm_1:
268 tbit.nz p6,p0=r29,IA64_PSR_I_BIT
269 ;;
270 tbit.z.or p6,p0=r19,IA64_PSR_I_BIT
271 (p6) br.dptk vmx_resume_to_guest
272 ;;
273 add r29=VPD_VTPR_START_OFFSET,r16
274 add r30=VPD_VHPI_START_OFFSET,r16
275 ;;
276 ld8 r29=[r29]
277 ld8 r30=[r30]
278 ;;
279 extr.u r17=r29,4,4
280 extr.u r18=r29,16,1
281 ;;
282 dep r17=r18,r17,4,1
283 ;;
284 cmp.gt p6,p0=r30,r17
285 (p6) br.dpnt.few vmx_asm_dispatch_vexirq
286 br.many vmx_resume_to_guest
287 END(vmx_asm_ssm)
290 //mov psr.l=r2
291 GLOBAL_ENTRY(vmx_asm_mov_to_psr)
292 #ifndef ACCE_MOV_TO_PSR
293 br.many vmx_virtualization_fault_back
294 #endif
295 add r16=IA64_VPD_BASE_OFFSET,r21
296 extr.u r26=r25,13,7 //r2
297 ;;
298 ld8 r16=[r16]
299 movl r20=asm_mov_from_reg
300 ;;
301 adds r30=vmx_asm_mov_to_psr_back-asm_mov_from_reg,r20
302 shladd r26=r26,4,r20
303 mov r24=b0
304 ;;
305 add r27=VPD_VPSR_START_OFFSET,r16
306 mov b0=r26
307 br.many b0
308 ;;
309 vmx_asm_mov_to_psr_back:
310 ld8 r17=[r27]
311 add r22=IA64_VCPU_MODE_FLAGS_OFFSET,r21
312 dep r19=0,r19,32,32
313 ;;
314 ld4 r23=[r22]
315 dep r18=0,r17,0,32
316 ;;
317 add r30=r18,r19
318 movl r28=IA64_PSR_DT+IA64_PSR_RT+IA64_PSR_IT
319 ;;
320 st8 [r27]=r30
321 and r27=r28,r30
322 and r29=r28,r17
323 ;;
324 cmp.eq p5,p0=r29,r27
325 cmp.eq p6,p7=r28,r27
326 (p5) br.many vmx_asm_mov_to_psr_1
327 ;;
328 //virtual to physical
329 (p7) add r26=IA64_VCPU_META_RR0_OFFSET,r21
330 (p7) add r27=IA64_VCPU_META_RR0_OFFSET+8,r21
331 (p7) dep r23=-1,r23,0,1
332 ;;
333 //physical to virtual
334 (p6) add r26=IA64_VCPU_META_SAVED_RR0_OFFSET,r21
335 (p6) add r27=IA64_VCPU_META_SAVED_RR0_OFFSET+8,r21
336 (p6) dep r23=0,r23,0,1
337 ;;
338 ld8 r26=[r26]
339 ld8 r27=[r27]
340 st4 [r22]=r23
341 dep.z r28=4,61,3
342 ;;
343 mov rr[r0]=r26
344 mov rr[r28]=r27
345 ;;
346 srlz.i
347 ;;
348 vmx_asm_mov_to_psr_1:
349 mov r20=cr.ipsr
350 movl r28=IA64_PSR_IC+IA64_PSR_I+IA64_PSR_DT+IA64_PSR_SI+IA64_PSR_RT
351 ;;
352 or r19=r19,r28
353 dep r20=0,r20,0,32
354 ;;
355 add r20=r19,r20
356 mov b0=r24
357 ;;
358 mov cr.ipsr=r20
359 cmp.ne p6,p0=r0,r0
360 ;;
361 tbit.nz.or p6,p0=r17,IA64_PSR_I_BIT
362 tbit.z.or p6,p0=r30,IA64_PSR_I_BIT
363 (p6) br.dpnt.few vmx_resume_to_guest
364 ;;
365 add r29=VPD_VTPR_START_OFFSET,r16
366 add r30=VPD_VHPI_START_OFFSET,r16
367 ;;
368 ld8 r29=[r29]
369 ld8 r30=[r30]
370 ;;
371 extr.u r17=r29,4,4
372 extr.u r18=r29,16,1
373 ;;
374 dep r17=r18,r17,4,1
375 ;;
376 cmp.gt p6,p0=r30,r17
377 (p6) br.dpnt.few vmx_asm_dispatch_vexirq
378 br.many vmx_resume_to_guest
379 END(vmx_asm_mov_to_psr)
382 ENTRY(vmx_asm_dispatch_vexirq)
383 //increment iip
384 mov r16=cr.ipsr
385 ;;
386 extr.u r17=r16,IA64_PSR_RI_BIT,2
387 tbit.nz p6,p7=r16,IA64_PSR_RI_BIT+1
388 ;;
389 (p6) mov r18=cr.iip
390 (p6) mov r17=r0
391 (p7) add r17=1,r17
392 ;;
393 (p6) add r18=0x10,r18
394 dep r16=r17,r16,IA64_PSR_RI_BIT,2
395 ;;
396 (p6) mov cr.iip=r18
397 mov cr.ipsr=r16
398 br.many vmx_dispatch_vexirq
399 END(vmx_asm_dispatch_vexirq)
402 #define MOV_TO_REG0 \
403 {; \
404 nop.b 0x0; \
405 nop.b 0x0; \
406 nop.b 0x0; \
407 ;; \
408 };
411 #define MOV_TO_REG(n) \
412 {; \
413 mov r##n##=r19; \
414 mov b0=r30; \
415 br.sptk.many b0; \
416 ;; \
417 };
420 #define MOV_FROM_REG(n) \
421 {; \
422 mov r19=r##n##; \
423 mov b0=r30; \
424 br.sptk.many b0; \
425 ;; \
426 };
429 #define MOV_TO_BANK0_REG(n) \
430 ENTRY_MIN_ALIGN(asm_mov_to_bank0_reg##n##); \
431 {; \
432 mov r26=r2; \
433 mov r2=r19; \
434 bsw.1; \
435 ;; \
436 }; \
437 {; \
438 mov r##n##=r2; \
439 nop.b 0x0; \
440 bsw.0; \
441 ;; \
442 }; \
443 {; \
444 mov r2=r26; \
445 mov b0=r30; \
446 br.sptk.many b0; \
447 ;; \
448 }; \
449 END(asm_mov_to_bank0_reg##n##)
452 #define MOV_FROM_BANK0_REG(n) \
453 ENTRY_MIN_ALIGN(asm_mov_from_bank0_reg##n##); \
454 {; \
455 mov r26=r2; \
456 nop.b 0x0; \
457 bsw.1; \
458 ;; \
459 }; \
460 {; \
461 mov r2=r##n##; \
462 nop.b 0x0; \
463 bsw.0; \
464 ;; \
465 }; \
466 {; \
467 mov r19=r2; \
468 mov r2=r26; \
469 mov b0=r30; \
470 }; \
471 {; \
472 nop.b 0x0; \
473 nop.b 0x0; \
474 br.sptk.many b0; \
475 ;; \
476 }; \
477 END(asm_mov_from_bank0_reg##n##)
480 #define JMP_TO_MOV_TO_BANK0_REG(n) \
481 {; \
482 nop.b 0x0; \
483 nop.b 0x0; \
484 br.sptk.many asm_mov_to_bank0_reg##n##; \
485 ;; \
486 }
489 #define JMP_TO_MOV_FROM_BANK0_REG(n) \
490 {; \
491 nop.b 0x0; \
492 nop.b 0x0; \
493 br.sptk.many asm_mov_from_bank0_reg##n##; \
494 ;; \
495 }
498 MOV_FROM_BANK0_REG(16)
499 MOV_FROM_BANK0_REG(17)
500 MOV_FROM_BANK0_REG(18)
501 MOV_FROM_BANK0_REG(19)
502 MOV_FROM_BANK0_REG(20)
503 MOV_FROM_BANK0_REG(21)
504 MOV_FROM_BANK0_REG(22)
505 MOV_FROM_BANK0_REG(23)
506 MOV_FROM_BANK0_REG(24)
507 MOV_FROM_BANK0_REG(25)
508 MOV_FROM_BANK0_REG(26)
509 MOV_FROM_BANK0_REG(27)
510 MOV_FROM_BANK0_REG(28)
511 MOV_FROM_BANK0_REG(29)
512 MOV_FROM_BANK0_REG(30)
513 MOV_FROM_BANK0_REG(31)
516 // mov from reg table
517 ENTRY(asm_mov_from_reg)
518 MOV_FROM_REG(0)
519 MOV_FROM_REG(1)
520 MOV_FROM_REG(2)
521 MOV_FROM_REG(3)
522 MOV_FROM_REG(4)
523 MOV_FROM_REG(5)
524 MOV_FROM_REG(6)
525 MOV_FROM_REG(7)
526 MOV_FROM_REG(8)
527 MOV_FROM_REG(9)
528 MOV_FROM_REG(10)
529 MOV_FROM_REG(11)
530 MOV_FROM_REG(12)
531 MOV_FROM_REG(13)
532 MOV_FROM_REG(14)
533 MOV_FROM_REG(15)
534 JMP_TO_MOV_FROM_BANK0_REG(16)
535 JMP_TO_MOV_FROM_BANK0_REG(17)
536 JMP_TO_MOV_FROM_BANK0_REG(18)
537 JMP_TO_MOV_FROM_BANK0_REG(19)
538 JMP_TO_MOV_FROM_BANK0_REG(20)
539 JMP_TO_MOV_FROM_BANK0_REG(21)
540 JMP_TO_MOV_FROM_BANK0_REG(22)
541 JMP_TO_MOV_FROM_BANK0_REG(23)
542 JMP_TO_MOV_FROM_BANK0_REG(24)
543 JMP_TO_MOV_FROM_BANK0_REG(25)
544 JMP_TO_MOV_FROM_BANK0_REG(26)
545 JMP_TO_MOV_FROM_BANK0_REG(27)
546 JMP_TO_MOV_FROM_BANK0_REG(28)
547 JMP_TO_MOV_FROM_BANK0_REG(29)
548 JMP_TO_MOV_FROM_BANK0_REG(30)
549 JMP_TO_MOV_FROM_BANK0_REG(31)
550 MOV_FROM_REG(32)
551 MOV_FROM_REG(33)
552 MOV_FROM_REG(34)
553 MOV_FROM_REG(35)
554 MOV_FROM_REG(36)
555 MOV_FROM_REG(37)
556 MOV_FROM_REG(38)
557 MOV_FROM_REG(39)
558 MOV_FROM_REG(40)
559 MOV_FROM_REG(41)
560 MOV_FROM_REG(42)
561 MOV_FROM_REG(43)
562 MOV_FROM_REG(44)
563 MOV_FROM_REG(45)
564 MOV_FROM_REG(46)
565 MOV_FROM_REG(47)
566 MOV_FROM_REG(48)
567 MOV_FROM_REG(49)
568 MOV_FROM_REG(50)
569 MOV_FROM_REG(51)
570 MOV_FROM_REG(52)
571 MOV_FROM_REG(53)
572 MOV_FROM_REG(54)
573 MOV_FROM_REG(55)
574 MOV_FROM_REG(56)
575 MOV_FROM_REG(57)
576 MOV_FROM_REG(58)
577 MOV_FROM_REG(59)
578 MOV_FROM_REG(60)
579 MOV_FROM_REG(61)
580 MOV_FROM_REG(62)
581 MOV_FROM_REG(63)
582 MOV_FROM_REG(64)
583 MOV_FROM_REG(65)
584 MOV_FROM_REG(66)
585 MOV_FROM_REG(67)
586 MOV_FROM_REG(68)
587 MOV_FROM_REG(69)
588 MOV_FROM_REG(70)
589 MOV_FROM_REG(71)
590 MOV_FROM_REG(72)
591 MOV_FROM_REG(73)
592 MOV_FROM_REG(74)
593 MOV_FROM_REG(75)
594 MOV_FROM_REG(76)
595 MOV_FROM_REG(77)
596 MOV_FROM_REG(78)
597 MOV_FROM_REG(79)
598 MOV_FROM_REG(80)
599 MOV_FROM_REG(81)
600 MOV_FROM_REG(82)
601 MOV_FROM_REG(83)
602 MOV_FROM_REG(84)
603 MOV_FROM_REG(85)
604 MOV_FROM_REG(86)
605 MOV_FROM_REG(87)
606 MOV_FROM_REG(88)
607 MOV_FROM_REG(89)
608 MOV_FROM_REG(90)
609 MOV_FROM_REG(91)
610 MOV_FROM_REG(92)
611 MOV_FROM_REG(93)
612 MOV_FROM_REG(94)
613 MOV_FROM_REG(95)
614 MOV_FROM_REG(96)
615 MOV_FROM_REG(97)
616 MOV_FROM_REG(98)
617 MOV_FROM_REG(99)
618 MOV_FROM_REG(100)
619 MOV_FROM_REG(101)
620 MOV_FROM_REG(102)
621 MOV_FROM_REG(103)
622 MOV_FROM_REG(104)
623 MOV_FROM_REG(105)
624 MOV_FROM_REG(106)
625 MOV_FROM_REG(107)
626 MOV_FROM_REG(108)
627 MOV_FROM_REG(109)
628 MOV_FROM_REG(110)
629 MOV_FROM_REG(111)
630 MOV_FROM_REG(112)
631 MOV_FROM_REG(113)
632 MOV_FROM_REG(114)
633 MOV_FROM_REG(115)
634 MOV_FROM_REG(116)
635 MOV_FROM_REG(117)
636 MOV_FROM_REG(118)
637 MOV_FROM_REG(119)
638 MOV_FROM_REG(120)
639 MOV_FROM_REG(121)
640 MOV_FROM_REG(122)
641 MOV_FROM_REG(123)
642 MOV_FROM_REG(124)
643 MOV_FROM_REG(125)
644 MOV_FROM_REG(126)
645 MOV_FROM_REG(127)
646 END(asm_mov_from_reg)
649 /* must be in bank 0
650 * parameter:
651 * r31: pr
652 * r24: b0
653 */
654 ENTRY(vmx_resume_to_guest)
655 mov r16=cr.ipsr
656 movl r20=__vsa_base
657 ;;
658 ld8 r20=[r20]
659 adds r19=IA64_VPD_BASE_OFFSET,r21
660 ;;
661 ld8 r25=[r19]
662 extr.u r17=r16,IA64_PSR_RI_BIT,2
663 tbit.nz p6,p7=r16,IA64_PSR_RI_BIT+1
664 ;;
665 (p6) mov r18=cr.iip
666 (p6) mov r17=r0
667 ;;
668 (p6) add r18=0x10,r18
669 (p7) add r17=1,r17
670 ;;
671 (p6) mov cr.iip=r18
672 dep r16=r17,r16,IA64_PSR_RI_BIT,2
673 ;;
674 mov cr.ipsr=r16
675 adds r19= VPD_VPSR_START_OFFSET,r25
676 add r28=PAL_VPS_RESUME_NORMAL,r20
677 add r29=PAL_VPS_RESUME_HANDLER,r20
678 ;;
679 ld8 r19=[r19]
680 mov b0=r29
681 cmp.ne p6,p7 = r0,r0
682 ;;
683 tbit.z p6,p7 = r19,IA64_PSR_IC_BIT // p1=vpsr.ic
684 ;;
685 (p6) ld8 r26=[r25]
686 (p7) mov b0=r28
687 mov pr=r31,-2
688 br.sptk.many b0 // call pal service
689 ;;
690 END(vmx_resume_to_guest)
693 MOV_TO_BANK0_REG(16)
694 MOV_TO_BANK0_REG(17)
695 MOV_TO_BANK0_REG(18)
696 MOV_TO_BANK0_REG(19)
697 MOV_TO_BANK0_REG(20)
698 MOV_TO_BANK0_REG(21)
699 MOV_TO_BANK0_REG(22)
700 MOV_TO_BANK0_REG(23)
701 MOV_TO_BANK0_REG(24)
702 MOV_TO_BANK0_REG(25)
703 MOV_TO_BANK0_REG(26)
704 MOV_TO_BANK0_REG(27)
705 MOV_TO_BANK0_REG(28)
706 MOV_TO_BANK0_REG(29)
707 MOV_TO_BANK0_REG(30)
708 MOV_TO_BANK0_REG(31)
711 // mov to reg table
712 ENTRY(asm_mov_to_reg)
713 MOV_TO_REG0
714 MOV_TO_REG(1)
715 MOV_TO_REG(2)
716 MOV_TO_REG(3)
717 MOV_TO_REG(4)
718 MOV_TO_REG(5)
719 MOV_TO_REG(6)
720 MOV_TO_REG(7)
721 MOV_TO_REG(8)
722 MOV_TO_REG(9)
723 MOV_TO_REG(10)
724 MOV_TO_REG(11)
725 MOV_TO_REG(12)
726 MOV_TO_REG(13)
727 MOV_TO_REG(14)
728 MOV_TO_REG(15)
729 JMP_TO_MOV_TO_BANK0_REG(16)
730 JMP_TO_MOV_TO_BANK0_REG(17)
731 JMP_TO_MOV_TO_BANK0_REG(18)
732 JMP_TO_MOV_TO_BANK0_REG(19)
733 JMP_TO_MOV_TO_BANK0_REG(20)
734 JMP_TO_MOV_TO_BANK0_REG(21)
735 JMP_TO_MOV_TO_BANK0_REG(22)
736 JMP_TO_MOV_TO_BANK0_REG(23)
737 JMP_TO_MOV_TO_BANK0_REG(24)
738 JMP_TO_MOV_TO_BANK0_REG(25)
739 JMP_TO_MOV_TO_BANK0_REG(26)
740 JMP_TO_MOV_TO_BANK0_REG(27)
741 JMP_TO_MOV_TO_BANK0_REG(28)
742 JMP_TO_MOV_TO_BANK0_REG(29)
743 JMP_TO_MOV_TO_BANK0_REG(30)
744 JMP_TO_MOV_TO_BANK0_REG(31)
745 MOV_TO_REG(32)
746 MOV_TO_REG(33)
747 MOV_TO_REG(34)
748 MOV_TO_REG(35)
749 MOV_TO_REG(36)
750 MOV_TO_REG(37)
751 MOV_TO_REG(38)
752 MOV_TO_REG(39)
753 MOV_TO_REG(40)
754 MOV_TO_REG(41)
755 MOV_TO_REG(42)
756 MOV_TO_REG(43)
757 MOV_TO_REG(44)
758 MOV_TO_REG(45)
759 MOV_TO_REG(46)
760 MOV_TO_REG(47)
761 MOV_TO_REG(48)
762 MOV_TO_REG(49)
763 MOV_TO_REG(50)
764 MOV_TO_REG(51)
765 MOV_TO_REG(52)
766 MOV_TO_REG(53)
767 MOV_TO_REG(54)
768 MOV_TO_REG(55)
769 MOV_TO_REG(56)
770 MOV_TO_REG(57)
771 MOV_TO_REG(58)
772 MOV_TO_REG(59)
773 MOV_TO_REG(60)
774 MOV_TO_REG(61)
775 MOV_TO_REG(62)
776 MOV_TO_REG(63)
777 MOV_TO_REG(64)
778 MOV_TO_REG(65)
779 MOV_TO_REG(66)
780 MOV_TO_REG(67)
781 MOV_TO_REG(68)
782 MOV_TO_REG(69)
783 MOV_TO_REG(70)
784 MOV_TO_REG(71)
785 MOV_TO_REG(72)
786 MOV_TO_REG(73)
787 MOV_TO_REG(74)
788 MOV_TO_REG(75)
789 MOV_TO_REG(76)
790 MOV_TO_REG(77)
791 MOV_TO_REG(78)
792 MOV_TO_REG(79)
793 MOV_TO_REG(80)
794 MOV_TO_REG(81)
795 MOV_TO_REG(82)
796 MOV_TO_REG(83)
797 MOV_TO_REG(84)
798 MOV_TO_REG(85)
799 MOV_TO_REG(86)
800 MOV_TO_REG(87)
801 MOV_TO_REG(88)
802 MOV_TO_REG(89)
803 MOV_TO_REG(90)
804 MOV_TO_REG(91)
805 MOV_TO_REG(92)
806 MOV_TO_REG(93)
807 MOV_TO_REG(94)
808 MOV_TO_REG(95)
809 MOV_TO_REG(96)
810 MOV_TO_REG(97)
811 MOV_TO_REG(98)
812 MOV_TO_REG(99)
813 MOV_TO_REG(100)
814 MOV_TO_REG(101)
815 MOV_TO_REG(102)
816 MOV_TO_REG(103)
817 MOV_TO_REG(104)
818 MOV_TO_REG(105)
819 MOV_TO_REG(106)
820 MOV_TO_REG(107)
821 MOV_TO_REG(108)
822 MOV_TO_REG(109)
823 MOV_TO_REG(110)
824 MOV_TO_REG(111)
825 MOV_TO_REG(112)
826 MOV_TO_REG(113)
827 MOV_TO_REG(114)
828 MOV_TO_REG(115)
829 MOV_TO_REG(116)
830 MOV_TO_REG(117)
831 MOV_TO_REG(118)
832 MOV_TO_REG(119)
833 MOV_TO_REG(120)
834 MOV_TO_REG(121)
835 MOV_TO_REG(122)
836 MOV_TO_REG(123)
837 MOV_TO_REG(124)
838 MOV_TO_REG(125)
839 MOV_TO_REG(126)
840 MOV_TO_REG(127)
841 END(asm_mov_to_reg)