ia64/xen-unstable

annotate xen/include/asm-ia64/vmx_vcpu.h @ 6534:23979fb12c49

Merge.
author adsharma@los-vmm.sc.intel.com
date Wed Aug 17 12:33:56 2005 -0800 (2005-08-17)
parents 1ae656509f02 f294acb25858
children 84ee014ebd41
rev   line source
adsharma@4993 1 /* -*- Mode:C; c-basic-offset:4; tab-width:4; indent-tabs-mode:nil -*- */
adsharma@4993 2 /*
adsharma@4993 3 * vmx_vcpu.h:
adsharma@4993 4 * Copyright (c) 2005, Intel Corporation.
adsharma@4993 5 *
adsharma@4993 6 * This program is free software; you can redistribute it and/or modify it
adsharma@4993 7 * under the terms and conditions of the GNU General Public License,
adsharma@4993 8 * version 2, as published by the Free Software Foundation.
adsharma@4993 9 *
adsharma@4993 10 * This program is distributed in the hope it will be useful, but WITHOUT
adsharma@4993 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
adsharma@4993 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
adsharma@4993 13 * more details.
adsharma@4993 14 *
adsharma@4993 15 * You should have received a copy of the GNU General Public License along with
adsharma@4993 16 * this program; if not, write to the Free Software Foundation, Inc., 59 Temple
adsharma@4993 17 * Place - Suite 330, Boston, MA 02111-1307 USA.
adsharma@4993 18 *
adsharma@4993 19 * Xuefei Xu (Anthony Xu) (Anthony.xu@intel.com)
adsharma@4993 20 * Yaozu Dong (Eddie Dong) (Eddie.dong@intel.com)
adsharma@4993 21 */
adsharma@4993 22
adsharma@4993 23 #ifndef _XEN_IA64_VMX_VCPU_H
adsharma@4993 24 #define _XEN_IA64_VMX_VCPU_H
adsharma@4993 25
adsharma@4993 26
adsharma@4993 27 #include <xen/sched.h>
adsharma@4993 28 #include <asm/ia64_int.h>
adsharma@4993 29 #include <asm/vmx_vpd.h>
adsharma@4993 30 #include <asm/ptrace.h>
adsharma@4993 31 #include <asm/regs.h>
adsharma@4993 32 #include <asm/regionreg.h>
adsharma@4993 33 #include <asm/types.h>
adsharma@4993 34 #include <asm/vcpu.h>
adsharma@4993 35
adsharma@4993 36 #define VRN_SHIFT 61
adsharma@4993 37 #define VRN0 0x0UL
adsharma@4993 38 #define VRN1 0x1UL
adsharma@4993 39 #define VRN2 0x2UL
adsharma@4993 40 #define VRN3 0x3UL
adsharma@4993 41 #define VRN4 0x4UL
adsharma@4993 42 #define VRN5 0x5UL
adsharma@4993 43 #define VRN6 0x6UL
adsharma@4993 44 #define VRN7 0x7UL
adsharma@4993 45
adsharma@4993 46 // this def for vcpu_regs won't work if kernel stack is present
adsharma@4993 47 #define vcpu_regs(vcpu) (((struct pt_regs *) ((char *) (vcpu) + IA64_STK_OFFSET)) - 1)
adsharma@4993 48 #define VMX_VPD(x,y) ((x)->arch.arch_vmx.vpd->y)
adsharma@4993 49
adsharma@4993 50 #define VMX(x,y) ((x)->arch.arch_vmx.y)
adsharma@4993 51
adsharma@4993 52 #define VPD_CR(x,y) (((cr_t*)VMX_VPD(x,vcr))->y)
adsharma@4993 53
adsharma@4993 54 #define VMM_RR_SHIFT 20
adsharma@4993 55 #define VMM_RR_MASK ((1UL<<VMM_RR_SHIFT)-1)
djm@5797 56 //#define VRID_2_MRID(vcpu,rid) ((rid) & VMM_RR_MASK) | \
adsharma@5086 57 ((vcpu->domain->domain_id) << VMM_RR_SHIFT)
adsharma@4993 58 extern u64 indirect_reg_igfld_MASK ( int type, int index, u64 value);
adsharma@4993 59 extern u64 cr_igfld_mask (int index, u64 value);
adsharma@4993 60 extern int check_indirect_reg_rsv_fields ( int type, int index, u64 value );
adsharma@4993 61 extern u64 set_isr_ei_ni (VCPU *vcpu);
adsharma@4993 62 extern u64 set_isr_for_na_inst(VCPU *vcpu, int op);
adsharma@4993 63
adsharma@4993 64
adsharma@4993 65 /* next all for CONFIG_VTI APIs definition */
adsharma@4993 66 extern void vmx_vcpu_set_psr(VCPU *vcpu, unsigned long value);
adsharma@4993 67 extern UINT64 vmx_vcpu_sync_mpsr(UINT64 mipsr, UINT64 value);
adsharma@4993 68 extern void vmx_vcpu_set_psr_sync_mpsr(VCPU * vcpu, UINT64 value);
adsharma@4993 69 extern IA64FAULT vmx_vcpu_cover(VCPU *vcpu);
adsharma@4993 70 extern thash_cb_t *vmx_vcpu_get_vtlb(VCPU *vcpu);
adsharma@4993 71 extern thash_cb_t *vmx_vcpu_get_vhpt(VCPU *vcpu);
djm@5797 72 extern ia64_rr vmx_vcpu_rr(VCPU *vcpu,UINT64 vadr);
adsharma@4993 73 extern IA64FAULT vmx_vcpu_set_rr(VCPU *vcpu, UINT64 reg, UINT64 val);
adsharma@4993 74 extern IA64FAULT vmx_vcpu_get_rr(VCPU *vcpu, UINT64 reg, UINT64 *pval);
adsharma@4993 75 extern IA64FAULT vmx_vcpu_get_pkr(VCPU *vcpu, UINT64 reg, UINT64 *pval);
adsharma@4993 76 IA64FAULT vmx_vcpu_set_pkr(VCPU *vcpu, UINT64 reg, UINT64 val);
adsharma@4993 77 extern IA64FAULT vmx_vcpu_itc_i(VCPU *vcpu, UINT64 pte, UINT64 itir, UINT64 ifa);
adsharma@4993 78 extern IA64FAULT vmx_vcpu_itc_d(VCPU *vcpu, UINT64 pte, UINT64 itir, UINT64 ifa);
adsharma@4993 79 extern IA64FAULT vmx_vcpu_itr_i(VCPU *vcpu, UINT64 pte, UINT64 itir, UINT64 ifa, UINT64 idx);
adsharma@4993 80 extern IA64FAULT vmx_vcpu_itr_d(VCPU *vcpu, UINT64 pte, UINT64 itir, UINT64 ifa, UINT64 idx);
adsharma@4993 81 extern IA64FAULT vmx_vcpu_ptr_d(VCPU *vcpu,UINT64 vadr,UINT64 ps);
adsharma@4993 82 extern IA64FAULT vmx_vcpu_ptr_i(VCPU *vcpu,UINT64 vadr,UINT64 ps);
adsharma@4993 83 extern IA64FAULT vmx_vcpu_ptc_l(VCPU *vcpu, UINT64 vadr, UINT64 ps);
adsharma@4993 84 extern IA64FAULT vmx_vcpu_ptc_e(VCPU *vcpu, UINT64 vadr);
adsharma@4993 85 extern IA64FAULT vmx_vcpu_ptc_g(VCPU *vcpu, UINT64 vadr, UINT64 ps);
adsharma@4993 86 extern IA64FAULT vmx_vcpu_ptc_ga(VCPU *vcpu,UINT64 vadr,UINT64 ps);
adsharma@4993 87 extern IA64FAULT vmx_vcpu_thash(VCPU *vcpu, UINT64 vadr, UINT64 *pval);
adsharma@4993 88 extern u64 vmx_vcpu_get_itir_on_fault(VCPU *vcpu, u64 ifa);
adsharma@4993 89 extern IA64FAULT vmx_vcpu_ttag(VCPU *vcpu, UINT64 vadr, UINT64 *pval);
adsharma@4993 90 extern IA64FAULT vmx_vcpu_tpa(VCPU *vcpu, UINT64 vadr, UINT64 *padr);
adsharma@4993 91 extern IA64FAULT vmx_vcpu_tak(VCPU *vcpu, UINT64 vadr, UINT64 *key);
adsharma@4993 92 extern IA64FAULT vmx_vcpu_rfi(VCPU *vcpu);
adsharma@4993 93 extern UINT64 vmx_vcpu_get_psr(VCPU *vcpu);
adsharma@4993 94 extern IA64FAULT vmx_vcpu_get_bgr(VCPU *vcpu, unsigned int reg, UINT64 *val);
adsharma@4993 95 extern IA64FAULT vmx_vcpu_set_bgr(VCPU *vcpu, unsigned int reg, u64 val,int nat);
adsharma@4993 96 extern IA64FAULT vmx_vcpu_get_gr(VCPU *vcpu, unsigned reg, UINT64 * val);
adsharma@4993 97 extern IA64FAULT vmx_vcpu_set_gr(VCPU *vcpu, unsigned reg, u64 value, int nat);
adsharma@4993 98 extern IA64FAULT vmx_vcpu_reset_psr_sm(VCPU *vcpu, UINT64 imm24);
adsharma@4993 99 extern IA64FAULT vmx_vcpu_set_psr_sm(VCPU *vcpu, UINT64 imm24);
adsharma@4993 100 extern IA64FAULT vmx_vcpu_set_psr_l(VCPU *vcpu, UINT64 val);
adsharma@4993 101 extern void vtm_init(VCPU *vcpu);
adsharma@4993 102 extern uint64_t vtm_get_itc(VCPU *vcpu);
adsharma@4993 103 extern void vtm_set_itc(VCPU *vcpu, uint64_t new_itc);
adsharma@4993 104 extern void vtm_set_itv(VCPU *vcpu);
adsharma@4993 105 extern void vtm_interruption_update(VCPU *vcpu, vtime_t* vtm);
adsharma@4993 106 extern void vtm_domain_out(VCPU *vcpu);
adsharma@4993 107 extern void vtm_domain_in(VCPU *vcpu);
fred@5950 108 #ifdef V_IOSAPIC_READY
fred@5950 109 extern void vlapic_update_ext_irq(VCPU *vcpu);
fred@5950 110 extern void vlapic_update_shared_info(VCPU *vcpu);
fred@5950 111 #endif
adsharma@4993 112 extern void vlsapic_reset(VCPU *vcpu);
adsharma@4993 113 extern int vmx_check_pending_irq(VCPU *vcpu);
adsharma@4993 114 extern void guest_write_eoi(VCPU *vcpu);
adsharma@4993 115 extern uint64_t guest_read_vivr(VCPU *vcpu);
adsharma@4993 116 extern void vmx_inject_vhpi(VCPU *vcpu, u8 vec);
adsharma@4993 117 extern void vmx_vcpu_pend_interrupt(VCPU *vcpu, UINT64 vector);
adsharma@4993 118 extern struct virutal_platform_def *vmx_vcpu_get_plat(VCPU *vcpu);
djm@5797 119 extern void memread_p(VCPU *vcpu, u64 *src, u64 *dest, size_t s);
djm@5797 120 extern void memread_v(VCPU *vcpu, thash_data_t *vtlb, u64 *src, u64 *dest, size_t s);
djm@5797 121 extern void memwrite_v(VCPU *vcpu, thash_data_t *vtlb, u64 *src, u64 *dest, size_t s);
djm@5797 122 extern void memwrite_p(VCPU *vcpu, u64 *src, u64 *dest, size_t s);
adsharma@4993 123
adsharma@4993 124
adsharma@4993 125 /**************************************************************************
adsharma@4993 126 VCPU control register access routines
adsharma@4993 127 **************************************************************************/
adsharma@4993 128
adsharma@4993 129 static inline
adsharma@4993 130 IA64FAULT vmx_vcpu_get_dcr(VCPU *vcpu, UINT64 *pval)
adsharma@4993 131 {
adsharma@4993 132 *pval = VPD_CR(vcpu,dcr);
adsharma@4993 133 return (IA64_NO_FAULT);
adsharma@4993 134 }
adsharma@4993 135
adsharma@4993 136 static inline
adsharma@4993 137 IA64FAULT vmx_vcpu_get_itm(VCPU *vcpu, UINT64 *pval)
adsharma@4993 138 {
adsharma@4993 139 *pval = VPD_CR(vcpu,itm);
adsharma@4993 140 return (IA64_NO_FAULT);
adsharma@4993 141 }
adsharma@4993 142
adsharma@4993 143 static inline
adsharma@4993 144 IA64FAULT vmx_vcpu_get_iva(VCPU *vcpu, UINT64 *pval)
adsharma@4993 145 {
adsharma@4993 146 *pval = VPD_CR(vcpu,iva);
adsharma@4993 147 return (IA64_NO_FAULT);
adsharma@4993 148 }
adsharma@4993 149 static inline
adsharma@4993 150 IA64FAULT vmx_vcpu_get_pta(VCPU *vcpu, UINT64 *pval)
adsharma@4993 151 {
adsharma@4993 152 *pval = VPD_CR(vcpu,pta);
adsharma@4993 153 return (IA64_NO_FAULT);
adsharma@4993 154 }
adsharma@4993 155 static inline
adsharma@4993 156 IA64FAULT vmx_vcpu_get_ipsr(VCPU *vcpu, UINT64 *pval)
adsharma@4993 157 {
adsharma@4993 158 *pval = VPD_CR(vcpu,ipsr);
adsharma@4993 159 return (IA64_NO_FAULT);
adsharma@4993 160 }
adsharma@4993 161
adsharma@4993 162 static inline
adsharma@4993 163 IA64FAULT vmx_vcpu_get_isr(VCPU *vcpu, UINT64 *pval)
adsharma@4993 164 {
adsharma@4993 165 *pval = VPD_CR(vcpu,isr);
adsharma@4993 166 return (IA64_NO_FAULT);
adsharma@4993 167 }
adsharma@4993 168 static inline
adsharma@4993 169 IA64FAULT vmx_vcpu_get_iip(VCPU *vcpu, UINT64 *pval)
adsharma@4993 170 {
adsharma@4993 171 *pval = VPD_CR(vcpu,iip);
adsharma@4993 172 return (IA64_NO_FAULT);
adsharma@4993 173 }
adsharma@4993 174 static inline
adsharma@4993 175 IA64FAULT vmx_vcpu_get_ifa(VCPU *vcpu, UINT64 *pval)
adsharma@4993 176 {
adsharma@4993 177 *pval = VPD_CR(vcpu,ifa);
adsharma@4993 178 return (IA64_NO_FAULT);
adsharma@4993 179 }
adsharma@4993 180
adsharma@4993 181 static inline
adsharma@4993 182 IA64FAULT vmx_vcpu_get_itir(VCPU *vcpu, UINT64 *pval)
adsharma@4993 183 {
adsharma@4993 184 *pval = VPD_CR(vcpu,itir);
adsharma@4993 185 return (IA64_NO_FAULT);
adsharma@4993 186 }
adsharma@4993 187 static inline
adsharma@4993 188 IA64FAULT vmx_vcpu_get_iipa(VCPU *vcpu, UINT64 *pval)
adsharma@4993 189 {
adsharma@4993 190 *pval = VPD_CR(vcpu,iipa);
adsharma@4993 191 return (IA64_NO_FAULT);
adsharma@4993 192 }
adsharma@4993 193 static inline
adsharma@4993 194 IA64FAULT vmx_vcpu_get_ifs(VCPU *vcpu, UINT64 *pval)
adsharma@4993 195 {
adsharma@4993 196 *pval = VPD_CR(vcpu,ifs);
adsharma@4993 197 return (IA64_NO_FAULT);
adsharma@4993 198 }
adsharma@4993 199 static inline
adsharma@4993 200 IA64FAULT vmx_vcpu_get_iim(VCPU *vcpu, UINT64 *pval)
adsharma@4993 201 {
adsharma@4993 202 *pval = VPD_CR(vcpu,iim);
adsharma@4993 203 return (IA64_NO_FAULT);
adsharma@4993 204 }
adsharma@4993 205 static inline
adsharma@4993 206 IA64FAULT vmx_vcpu_get_iha(VCPU *vcpu, UINT64 *pval)
adsharma@4993 207 {
adsharma@4993 208 *pval = VPD_CR(vcpu,iha);
adsharma@4993 209 return (IA64_NO_FAULT);
adsharma@4993 210 }
adsharma@4993 211 static inline
adsharma@4993 212 IA64FAULT vmx_vcpu_get_lid(VCPU *vcpu, UINT64 *pval)
adsharma@4993 213 {
adsharma@4993 214 *pval = VPD_CR(vcpu,lid);
adsharma@4993 215 return (IA64_NO_FAULT);
adsharma@4993 216 }
adsharma@4993 217 static inline
adsharma@4993 218 IA64FAULT vmx_vcpu_get_ivr(VCPU *vcpu, UINT64 *pval)
adsharma@4993 219 {
adsharma@4993 220 *pval = guest_read_vivr(vcpu);
adsharma@4993 221 return (IA64_NO_FAULT);
adsharma@4993 222 }
adsharma@4993 223 static inline
adsharma@4993 224 IA64FAULT vmx_vcpu_get_tpr(VCPU *vcpu, UINT64 *pval)
adsharma@4993 225 {
adsharma@4993 226 *pval = VPD_CR(vcpu,tpr);
adsharma@4993 227 return (IA64_NO_FAULT);
adsharma@4993 228 }
adsharma@4993 229 static inline
adsharma@4993 230 IA64FAULT vmx_vcpu_get_eoi(VCPU *vcpu, UINT64 *pval)
adsharma@4993 231 {
adsharma@4993 232 *pval = 0L; // reads of eoi always return 0
adsharma@4993 233 return (IA64_NO_FAULT);
adsharma@4993 234 }
adsharma@4993 235 static inline
adsharma@4993 236 IA64FAULT vmx_vcpu_get_irr0(VCPU *vcpu, UINT64 *pval)
adsharma@4993 237 {
adsharma@4993 238 *pval = VPD_CR(vcpu,irr[0]);
adsharma@4993 239 return (IA64_NO_FAULT);
adsharma@4993 240 }
adsharma@4993 241 static inline
adsharma@4993 242 IA64FAULT vmx_vcpu_get_irr1(VCPU *vcpu, UINT64 *pval)
adsharma@4993 243 {
adsharma@4993 244 *pval = VPD_CR(vcpu,irr[1]);
adsharma@4993 245 return (IA64_NO_FAULT);
adsharma@4993 246 }
adsharma@4993 247 static inline
adsharma@4993 248 IA64FAULT vmx_vcpu_get_irr2(VCPU *vcpu, UINT64 *pval)
adsharma@4993 249 {
adsharma@4993 250 *pval = VPD_CR(vcpu,irr[2]);
adsharma@4993 251 return (IA64_NO_FAULT);
adsharma@4993 252 }
adsharma@4993 253 static inline
adsharma@4993 254 IA64FAULT vmx_vcpu_get_irr3(VCPU *vcpu, UINT64 *pval)
adsharma@4993 255 {
adsharma@4993 256 *pval = VPD_CR(vcpu,irr[3]);
adsharma@4993 257 return (IA64_NO_FAULT);
adsharma@4993 258 }
adsharma@4993 259 static inline
adsharma@4993 260 IA64FAULT vmx_vcpu_get_itv(VCPU *vcpu, UINT64 *pval)
adsharma@4993 261 {
adsharma@4993 262 *pval = VPD_CR(vcpu,itv);
adsharma@4993 263 return (IA64_NO_FAULT);
adsharma@4993 264 }
adsharma@4993 265 static inline
adsharma@4993 266 IA64FAULT vmx_vcpu_get_pmv(VCPU *vcpu, UINT64 *pval)
adsharma@4993 267 {
adsharma@4993 268 *pval = VPD_CR(vcpu,pmv);
adsharma@4993 269 return (IA64_NO_FAULT);
adsharma@4993 270 }
adsharma@4993 271 static inline
adsharma@4993 272 IA64FAULT vmx_vcpu_get_cmcv(VCPU *vcpu, UINT64 *pval)
adsharma@4993 273 {
adsharma@4993 274 *pval = VPD_CR(vcpu,cmcv);
adsharma@4993 275 return (IA64_NO_FAULT);
adsharma@4993 276 }
adsharma@4993 277 static inline
adsharma@4993 278 IA64FAULT vmx_vcpu_get_lrr0(VCPU *vcpu, UINT64 *pval)
adsharma@4993 279 {
adsharma@4993 280 *pval = VPD_CR(vcpu,lrr0);
adsharma@4993 281 return (IA64_NO_FAULT);
adsharma@4993 282 }
adsharma@4993 283 static inline
adsharma@4993 284 IA64FAULT vmx_vcpu_get_lrr1(VCPU *vcpu, UINT64 *pval)
adsharma@4993 285 { *pval = VPD_CR(vcpu,lrr1);
adsharma@4993 286 return (IA64_NO_FAULT);
adsharma@4993 287 }
adsharma@4993 288 static inline
adsharma@4993 289 IA64FAULT
adsharma@4993 290 vmx_vcpu_set_dcr(VCPU *vcpu, u64 val)
adsharma@4993 291 {
adsharma@4993 292 u64 mdcr, mask;
adsharma@4993 293 VPD_CR(vcpu,dcr)=val;
adsharma@4993 294 /* All vDCR bits will go to mDCR, except for be/pp bit */
adsharma@4993 295 mdcr = ia64_get_dcr();
adsharma@4993 296 mask = IA64_DCR_BE | IA64_DCR_PP;
adsharma@4993 297 mdcr = ( mdcr & mask ) | ( val & (~mask) );
adsharma@4993 298 ia64_set_dcr( mdcr);
adsharma@4993 299
adsharma@4993 300 return IA64_NO_FAULT;
adsharma@4993 301 }
adsharma@4993 302
adsharma@4993 303 static inline
adsharma@4993 304 IA64FAULT
adsharma@4993 305 vmx_vcpu_set_itm(VCPU *vcpu, u64 val)
adsharma@4993 306 {
adsharma@4993 307 vtime_t *vtm;
adsharma@4993 308
adsharma@4993 309 vtm=&(vcpu->arch.arch_vmx.vtm);
adsharma@4993 310 VPD_CR(vcpu,itm)=val;
adsharma@4993 311 vtm_interruption_update(vcpu, vtm);
adsharma@4993 312 return IA64_NO_FAULT;
adsharma@4993 313 }
adsharma@4993 314 static inline
adsharma@4993 315 IA64FAULT
adsharma@4993 316 vmx_vcpu_set_iva(VCPU *vcpu, u64 val)
adsharma@4993 317 {
adsharma@4993 318 VPD_CR(vcpu,iva)=val;
adsharma@4993 319 return IA64_NO_FAULT;
adsharma@4993 320 }
adsharma@4993 321
adsharma@4993 322 static inline
adsharma@4993 323 IA64FAULT
adsharma@4993 324 vmx_vcpu_set_pta(VCPU *vcpu, u64 val)
adsharma@4993 325 {
adsharma@4993 326 VPD_CR(vcpu,pta)=val;
adsharma@4993 327 return IA64_NO_FAULT;
adsharma@4993 328 }
adsharma@4993 329
adsharma@4993 330 static inline
adsharma@4993 331 IA64FAULT
adsharma@4993 332 vmx_vcpu_set_ipsr(VCPU *vcpu, u64 val)
adsharma@4993 333 {
adsharma@4993 334 VPD_CR(vcpu,ipsr)=val;
adsharma@4993 335 return IA64_NO_FAULT;
adsharma@4993 336 }
adsharma@4993 337
adsharma@4993 338 static inline
adsharma@4993 339 IA64FAULT
adsharma@4993 340 vmx_vcpu_set_isr(VCPU *vcpu, u64 val)
adsharma@4993 341 {
adsharma@4993 342 VPD_CR(vcpu,isr)=val;
adsharma@4993 343 return IA64_NO_FAULT;
adsharma@4993 344 }
adsharma@4993 345
adsharma@4993 346 static inline
adsharma@4993 347 IA64FAULT
adsharma@4993 348 vmx_vcpu_set_iip(VCPU *vcpu, u64 val)
adsharma@4993 349 {
adsharma@4993 350 VPD_CR(vcpu,iip)=val;
adsharma@4993 351 return IA64_NO_FAULT;
adsharma@4993 352 }
adsharma@4993 353
adsharma@4993 354 static inline
adsharma@4993 355 IA64FAULT
adsharma@4993 356 vmx_vcpu_set_ifa(VCPU *vcpu, u64 val)
adsharma@4993 357 {
adsharma@4993 358 VPD_CR(vcpu,ifa)=val;
adsharma@4993 359 return IA64_NO_FAULT;
adsharma@4993 360 }
adsharma@4993 361
adsharma@4993 362 static inline
adsharma@4993 363 IA64FAULT
adsharma@4993 364 vmx_vcpu_set_itir(VCPU *vcpu, u64 val)
adsharma@4993 365 {
adsharma@4993 366 VPD_CR(vcpu,itir)=val;
adsharma@4993 367 return IA64_NO_FAULT;
adsharma@4993 368 }
adsharma@4993 369
adsharma@4993 370 static inline
adsharma@4993 371 IA64FAULT
adsharma@4993 372 vmx_vcpu_set_iipa(VCPU *vcpu, u64 val)
adsharma@4993 373 {
adsharma@4993 374 VPD_CR(vcpu,iipa)=val;
adsharma@4993 375 return IA64_NO_FAULT;
adsharma@4993 376 }
adsharma@4993 377
adsharma@4993 378 static inline
adsharma@4993 379 IA64FAULT
adsharma@4993 380 vmx_vcpu_set_ifs(VCPU *vcpu, u64 val)
adsharma@4993 381 {
adsharma@4993 382 VPD_CR(vcpu,ifs)=val;
adsharma@4993 383 return IA64_NO_FAULT;
adsharma@4993 384 }
adsharma@4993 385 static inline
adsharma@4993 386 IA64FAULT
adsharma@4993 387 vmx_vcpu_set_iim(VCPU *vcpu, u64 val)
adsharma@4993 388 {
adsharma@4993 389 VPD_CR(vcpu,iim)=val;
adsharma@4993 390 return IA64_NO_FAULT;
adsharma@4993 391 }
adsharma@4993 392
adsharma@4993 393 static inline
adsharma@4993 394 IA64FAULT
adsharma@4993 395 vmx_vcpu_set_iha(VCPU *vcpu, u64 val)
adsharma@4993 396 {
adsharma@4993 397 VPD_CR(vcpu,iha)=val;
adsharma@4993 398 return IA64_NO_FAULT;
adsharma@4993 399 }
adsharma@4993 400
adsharma@4993 401 static inline
adsharma@4993 402 IA64FAULT
adsharma@4993 403 vmx_vcpu_set_lid(VCPU *vcpu, u64 val)
adsharma@4993 404 {
adsharma@4993 405 VPD_CR(vcpu,lid)=val;
fred@5950 406 #ifdef V_IOSAPIC_READY
fred@5950 407 vlapic_update_shared_info(vcpu);
fred@5950 408 #endif
adsharma@4993 409 return IA64_NO_FAULT;
adsharma@4993 410 }
djm@5797 411 extern IA64FAULT vmx_vcpu_set_tpr(VCPU *vcpu, u64 val);
djm@5797 412
adsharma@4993 413 static inline
adsharma@4993 414 IA64FAULT
adsharma@4993 415 vmx_vcpu_set_eoi(VCPU *vcpu, u64 val)
adsharma@4993 416 {
adsharma@4993 417 guest_write_eoi(vcpu);
adsharma@4993 418 return IA64_NO_FAULT;
adsharma@4993 419 }
adsharma@4993 420
adsharma@4993 421 static inline
adsharma@4993 422 IA64FAULT
adsharma@4993 423 vmx_vcpu_set_itv(VCPU *vcpu, u64 val)
adsharma@4993 424 {
adsharma@4993 425
adsharma@4993 426 VPD_CR(vcpu,itv)=val;
adsharma@4993 427 vtm_set_itv(vcpu);
adsharma@4993 428 return IA64_NO_FAULT;
adsharma@4993 429 }
adsharma@4993 430 static inline
adsharma@4993 431 IA64FAULT
adsharma@4993 432 vmx_vcpu_set_pmv(VCPU *vcpu, u64 val)
adsharma@4993 433 {
adsharma@4993 434 VPD_CR(vcpu,pmv)=val;
adsharma@4993 435 return IA64_NO_FAULT;
adsharma@4993 436 }
adsharma@4993 437 static inline
adsharma@4993 438 IA64FAULT
adsharma@4993 439 vmx_vcpu_set_cmcv(VCPU *vcpu, u64 val)
adsharma@4993 440 {
adsharma@4993 441 VPD_CR(vcpu,cmcv)=val;
adsharma@4993 442 return IA64_NO_FAULT;
adsharma@4993 443 }
adsharma@4993 444 static inline
adsharma@4993 445 IA64FAULT
adsharma@4993 446 vmx_vcpu_set_lrr0(VCPU *vcpu, u64 val)
adsharma@4993 447 {
adsharma@4993 448 VPD_CR(vcpu,lrr0)=val;
adsharma@4993 449 return IA64_NO_FAULT;
adsharma@4993 450 }
adsharma@4993 451 static inline
adsharma@4993 452 IA64FAULT
adsharma@4993 453 vmx_vcpu_set_lrr1(VCPU *vcpu, u64 val)
adsharma@4993 454 {
adsharma@4993 455 VPD_CR(vcpu,lrr1)=val;
adsharma@4993 456 return IA64_NO_FAULT;
adsharma@4993 457 }
adsharma@4993 458
adsharma@4993 459
adsharma@4993 460
adsharma@4993 461
adsharma@4993 462 /**************************************************************************
adsharma@4993 463 VCPU privileged application register access routines
adsharma@4993 464 **************************************************************************/
adsharma@4993 465 static inline
adsharma@4993 466 IA64FAULT vmx_vcpu_set_itc(VCPU *vcpu, UINT64 val)
adsharma@4993 467 {
adsharma@4993 468 vtm_set_itc(vcpu, val);
adsharma@4993 469 return IA64_NO_FAULT;
adsharma@4993 470 }
adsharma@4993 471 static inline
adsharma@4993 472 IA64FAULT vmx_vcpu_get_itc(VCPU *vcpu,UINT64 *val)
adsharma@4993 473 {
adsharma@4993 474 *val = vtm_get_itc(vcpu);
adsharma@4993 475 return IA64_NO_FAULT;
adsharma@4993 476 }
adsharma@4993 477 static inline
adsharma@4993 478 IA64FAULT vmx_vcpu_get_rr(VCPU *vcpu, UINT64 reg, UINT64 *pval)
adsharma@4993 479 {
adsharma@4993 480 *pval = VMX(vcpu,vrr[reg>>61]);
adsharma@4993 481 return (IA64_NO_FAULT);
adsharma@4993 482 }
adsharma@4993 483 /**************************************************************************
adsharma@4993 484 VCPU debug breakpoint register access routines
adsharma@4993 485 **************************************************************************/
adsharma@4993 486
adsharma@4993 487 static inline
adsharma@4993 488 IA64FAULT vmx_vcpu_get_cpuid(VCPU *vcpu, UINT64 reg, UINT64 *pval)
adsharma@4993 489 {
adsharma@4993 490 // TODO: unimplemented DBRs return a reserved register fault
adsharma@4993 491 // TODO: Should set Logical CPU state, not just physical
adsharma@4993 492 if(reg > 4){
adsharma@4993 493 panic("there are only five cpuid registers");
adsharma@4993 494 }
adsharma@4993 495 *pval=VMX_VPD(vcpu,vcpuid[reg]);
adsharma@4993 496 return (IA64_NO_FAULT);
adsharma@4993 497 }
adsharma@4993 498
adsharma@4993 499
adsharma@4993 500 static inline
adsharma@4993 501 IA64FAULT vmx_vcpu_set_dbr(VCPU *vcpu, UINT64 reg, UINT64 val)
adsharma@4993 502 {
adsharma@4993 503 // TODO: unimplemented DBRs return a reserved register fault
adsharma@4993 504 // TODO: Should set Logical CPU state, not just physical
adsharma@4993 505 ia64_set_dbr(reg,val);
adsharma@4993 506 return (IA64_NO_FAULT);
adsharma@4993 507 }
adsharma@4993 508 static inline
adsharma@4993 509 IA64FAULT vmx_vcpu_set_ibr(VCPU *vcpu, UINT64 reg, UINT64 val)
adsharma@4993 510 {
adsharma@4993 511 // TODO: unimplemented IBRs return a reserved register fault
adsharma@4993 512 // TODO: Should set Logical CPU state, not just physical
adsharma@4993 513 ia64_set_ibr(reg,val);
adsharma@4993 514 return (IA64_NO_FAULT);
adsharma@4993 515 }
adsharma@4993 516 static inline
adsharma@4993 517 IA64FAULT vmx_vcpu_get_dbr(VCPU *vcpu, UINT64 reg, UINT64 *pval)
adsharma@4993 518 {
adsharma@4993 519 // TODO: unimplemented DBRs return a reserved register fault
adsharma@4993 520 UINT64 val = ia64_get_dbr(reg);
adsharma@4993 521 *pval = val;
adsharma@4993 522 return (IA64_NO_FAULT);
adsharma@4993 523 }
adsharma@4993 524 static inline
adsharma@4993 525 IA64FAULT vmx_vcpu_get_ibr(VCPU *vcpu, UINT64 reg, UINT64 *pval)
adsharma@4993 526 {
adsharma@4993 527 // TODO: unimplemented IBRs return a reserved register fault
adsharma@4993 528 UINT64 val = ia64_get_ibr(reg);
adsharma@4993 529 *pval = val;
adsharma@4993 530 return (IA64_NO_FAULT);
adsharma@4993 531 }
adsharma@4993 532
adsharma@4993 533 /**************************************************************************
adsharma@4993 534 VCPU performance monitor register access routines
adsharma@4993 535 **************************************************************************/
adsharma@4993 536 static inline
adsharma@4993 537 IA64FAULT vmx_vcpu_set_pmc(VCPU *vcpu, UINT64 reg, UINT64 val)
adsharma@4993 538 {
adsharma@4993 539 // TODO: Should set Logical CPU state, not just physical
adsharma@4993 540 // NOTE: Writes to unimplemented PMC registers are discarded
adsharma@4993 541 ia64_set_pmc(reg,val);
adsharma@4993 542 return (IA64_NO_FAULT);
adsharma@4993 543 }
adsharma@4993 544 static inline
adsharma@4993 545 IA64FAULT vmx_vcpu_set_pmd(VCPU *vcpu, UINT64 reg, UINT64 val)
adsharma@4993 546 {
adsharma@4993 547 // TODO: Should set Logical CPU state, not just physical
adsharma@4993 548 // NOTE: Writes to unimplemented PMD registers are discarded
adsharma@4993 549 ia64_set_pmd(reg,val);
adsharma@4993 550 return (IA64_NO_FAULT);
adsharma@4993 551 }
adsharma@4993 552 static inline
adsharma@4993 553 IA64FAULT vmx_vcpu_get_pmc(VCPU *vcpu, UINT64 reg, UINT64 *pval)
adsharma@4993 554 {
adsharma@4993 555 // NOTE: Reads from unimplemented PMC registers return zero
adsharma@4993 556 UINT64 val = (UINT64)ia64_get_pmc(reg);
adsharma@4993 557 *pval = val;
adsharma@4993 558 return (IA64_NO_FAULT);
adsharma@4993 559 }
adsharma@4993 560 static inline
adsharma@4993 561 IA64FAULT vmx_vcpu_get_pmd(VCPU *vcpu, UINT64 reg, UINT64 *pval)
adsharma@4993 562 {
adsharma@4993 563 // NOTE: Reads from unimplemented PMD registers return zero
adsharma@4993 564 UINT64 val = (UINT64)ia64_get_pmd(reg);
adsharma@4993 565 *pval = val;
adsharma@4993 566 return (IA64_NO_FAULT);
adsharma@4993 567 }
adsharma@4993 568
adsharma@4993 569 /**************************************************************************
adsharma@4993 570 VCPU banked general register access routines
adsharma@4993 571 **************************************************************************/
adsharma@4993 572 static inline
adsharma@4993 573 IA64FAULT vmx_vcpu_bsw0(VCPU *vcpu)
adsharma@4993 574 {
adsharma@4993 575
adsharma@4993 576 VMX_VPD(vcpu,vpsr) &= ~IA64_PSR_BN;
adsharma@4993 577 return (IA64_NO_FAULT);
adsharma@4993 578 }
adsharma@4993 579 static inline
adsharma@4993 580 IA64FAULT vmx_vcpu_bsw1(VCPU *vcpu)
adsharma@4993 581 {
adsharma@4993 582
adsharma@4993 583 VMX_VPD(vcpu,vpsr) |= IA64_PSR_BN;
adsharma@4993 584 return (IA64_NO_FAULT);
adsharma@4993 585 }
adsharma@4993 586
adsharma@4993 587 #define redistribute_rid(rid) (((rid) & ~0xffff) | (((rid) << 8) & 0xff00) | (((rid) >> 8) & 0xff))
adsharma@4993 588 static inline unsigned long
adsharma@4993 589 vmx_vrrtomrr(VCPU *vcpu,unsigned long val)
adsharma@4993 590 {
adsharma@4993 591 ia64_rr rr;
adsharma@4993 592 u64 rid;
adsharma@4993 593 rr.rrval=val;
adsharma@5086 594 rid=(((u64)vcpu->domain->domain_id)<<DOMAIN_RID_SHIFT) + rr.rid;
adsharma@4993 595 rr.rid = redistribute_rid(rid);
adsharma@4993 596 rr.ve=1;
adsharma@4993 597 return rr.rrval;
adsharma@4993 598 }
adsharma@4993 599 #endif