static int vlapic_find_highest_vector(const void *bitmap)
{
const uint32_t *word = bitmap;
- unsigned int word_offset = X86_NR_VECTORS / 32;
+ unsigned int word_offset = X86_IDT_VECTORS / 32;
/* Work backwards through the bitmap (first 32-bit word in every four). */
while ( (word_offset != 0) && (word[(--word_offset)*4] == 0) )
REG(LVT0) | REG(LVT1) | REG(LVTERR) | REG(TMICT) |
REG(TMCCT) | REG(TDCR) |
#undef REG
-#define REGBLOCK(x) (((1UL << (X86_NR_VECTORS / 32)) - 1) << (APIC_ ## x >> 4))
+#define REGBLOCK(x) (((1UL << (X86_IDT_VECTORS / 32)) - 1) << (APIC_ ## x >> 4))
REGBLOCK(ISR) | REGBLOCK(TMR) | REGBLOCK(IRR)
#undef REGBLOCK
};
{
word = (const void *)&vlapic->regs->data[APIC_IRR];
printk(XENLOG_ERR "vIRR:");
- for ( i = X86_NR_VECTORS / 32; i-- ; )
+ for ( i = X86_IDT_VECTORS / 32; i-- ; )
printk(" %08x", word[i*4]);
printk("\n");
}
{
word = (const void *)&pi_desc->pir;
printk(XENLOG_ERR " PIR:");
- for ( i = X86_NR_VECTORS / 32; i-- ; )
+ for ( i = X86_IDT_VECTORS / 32; i-- ; )
printk(" %08x", word[i]);
printk("\n");
}
unsigned int i;
/* EOI-exit bitmap */
- bitmap_zero(v->arch.hvm.vmx.eoi_exit_bitmap, X86_NR_VECTORS);
+ bitmap_zero(v->arch.hvm.vmx.eoi_exit_bitmap, X86_IDT_VECTORS);
for ( i = 0; i < ARRAY_SIZE(v->arch.hvm.vmx.eoi_exit_bitmap); ++i )
__vmwrite(EOI_EXIT_BITMAP(i), 0);
* is acceptable because the subsequent interrupts will set up the eoi
* bitmap correctly.
*/
- for ( i = 0x10; i < X86_NR_VECTORS; ++i )
+ for ( i = 0x10; i < X86_IDT_VECTORS; ++i )
if ( vlapic_test_vector(i, &vlapic->regs->data[APIC_IRR]) ||
vlapic_test_vector(i, &vlapic->regs->data[APIC_ISR]) )
set_bit(i, v->arch.hvm.vmx.eoi_exit_bitmap);
{
struct vlapic *vlapic = vcpu_vlapic(v);
unsigned int group, i;
- DECLARE_BITMAP(pending_intr, X86_NR_VECTORS);
+ DECLARE_BITMAP(pending_intr, X86_IDT_VECTORS);
if ( !pi_test_and_clear_on(&v->arch.hvm.vmx.pi_desc) )
return;
for ( group = 0; group < ARRAY_SIZE(pending_intr); group++ )
pending_intr[group] = pi_get_pir(&v->arch.hvm.vmx.pi_desc, group);
- bitmap_for_each ( i, pending_intr, X86_NR_VECTORS )
+ bitmap_for_each ( i, pending_intr, X86_IDT_VECTORS )
vlapic_set_vector(i, &vlapic->regs->data[APIC_IRR]);
}
};
struct pi_desc {
- DECLARE_BITMAP(pir, X86_NR_VECTORS);
+ DECLARE_BITMAP(pir, X86_IDT_VECTORS);
union {
struct {
u16 on : 1, /* bit 256 - Outstanding Notification */
unsigned int host_msr_count;
unsigned long eoi_exitmap_changed;
- DECLARE_BITMAP(eoi_exit_bitmap, X86_NR_VECTORS);
+ DECLARE_BITMAP(eoi_exit_bitmap, X86_IDT_VECTORS);
struct pi_desc pi_desc;
unsigned long host_cr0;
#define LEGACY_VECTOR(irq) ((irq) + FIRST_LEGACY_VECTOR)
typedef struct {
- DECLARE_BITMAP(_bits, X86_NR_VECTORS);
+ DECLARE_BITMAP(_bits, X86_IDT_VECTORS);
} vmask_t;
struct irq_desc;
#define IRQ_VECTOR_UNASSIGNED (-1)
-typedef int vector_irq_t[X86_NR_VECTORS];
+typedef int vector_irq_t[X86_IDT_VECTORS];
DECLARE_PER_CPU(vector_irq_t, vector_irq);
extern bool opt_noirqbalance;
#define X86_INVPCID_ALL_INCL_GLOBAL 2
#define X86_INVPCID_ALL_NON_GLOBAL 3
-#define X86_NR_VECTORS 256
+#define X86_IDT_VECTORS 256
/* Exception Vectors */
#define X86_EXC_DE 0 /* Divide Error */
return;
bitmap_or(vector_map[src]->_bits, vector_map[src]->_bits,
- vector_map[dst]->_bits, X86_NR_VECTORS);
+ vector_map[dst]->_bits, X86_IDT_VECTORS);
for (pin = 0; pin < nr_ioapic_entries[dst]; ++pin) {
int irq = apic_pin_2_gsi_irq(dst, pin);
struct irq_desc __read_mostly *irq_desc = NULL;
-static DECLARE_BITMAP(used_vectors, X86_NR_VECTORS);
+static DECLARE_BITMAP(used_vectors, X86_IDT_VECTORS);
static DEFINE_SPINLOCK(vector_lock);
cpumask_t online_mask;
int cpu;
- BUG_ON((unsigned)vector >= X86_NR_VECTORS);
+ BUG_ON((unsigned)vector >= X86_IDT_VECTORS);
cpumask_and(&online_mask, cpu_mask, &cpu_online_map);
if (cpumask_empty(&online_mask))
struct irq_desc *desc;
int irq, vector;
- for ( vector = 0; vector < X86_NR_VECTORS; ++vector )
+ for ( vector = 0; vector < X86_IDT_VECTORS; ++vector )
this_cpu(vector_irq)[vector] = INT_MIN;
irq_desc = xzalloc_array(struct irq_desc, nr_irqs);
unsigned int irq, vector;
/* Clear vector_irq */
- for ( vector = 0; vector < X86_NR_VECTORS; ++vector )
+ for ( vector = 0; vector < X86_IDT_VECTORS; ++vector )
per_cpu(vector_irq, cpu)[vector] = INT_MIN;
/* Mark the inuse vectors */
for ( irq = 0; irq < nr_irqs; ++irq )
return next++;
}
-static void (*direct_apic_vector[X86_NR_VECTORS])(void);
+static void (*direct_apic_vector[X86_IDT_VECTORS])(void);
void set_direct_apic_vector(uint8_t vector, void (*handler)(void))
{
BUG_ON(direct_apic_vector[vector] != NULL);
process_pending_softirqs();
printk("Direct vector information:\n");
- for ( i = FIRST_DYNAMIC_VECTOR; i < X86_NR_VECTORS; ++i )
+ for ( i = FIRST_DYNAMIC_VECTOR; i < X86_IDT_VECTORS; ++i )
if ( direct_apic_vector[i] )
printk(" %#02x -> %ps()\n", i, direct_apic_vector[i]);
/* If no table is presented then clear the entire virtual IDT. */
if ( guest_handle_is_null(traps) )
{
- memset(dst, 0, X86_NR_VECTORS * sizeof(*dst));
+ memset(dst, 0, X86_IDT_VECTORS * sizeof(*dst));
return 0;
}
/* If no table is presented then clear the entire virtual IDT. */
if ( guest_handle_is_null(traps) )
{
- memset(dst, 0, X86_NR_VECTORS * sizeof(*dst));
+ memset(dst, 0, X86_IDT_VECTORS * sizeof(*dst));
return 0;
}
if ( rc )
return rc;
- BUILD_BUG_ON(X86_NR_VECTORS * sizeof(*v->arch.pv.trap_ctxt) >
+ BUILD_BUG_ON(X86_IDT_VECTORS * sizeof(*v->arch.pv.trap_ctxt) >
PAGE_SIZE);
- v->arch.pv.trap_ctxt = xzalloc_array(struct trap_info, X86_NR_VECTORS);
+ v->arch.pv.trap_ctxt = xzalloc_array(struct trap_info, X86_IDT_VECTORS);
if ( !v->arch.pv.trap_ctxt )
{
rc = -ENOMEM;
setup_force_cpu_cap(X86_FEATURE_XEN_LBR);
}
-extern void (*const autogen_entrypoints[X86_NR_VECTORS])(void);
+extern void (*const autogen_entrypoints[X86_IDT_VECTORS])(void);
void __init trap_init(void)
{
unsigned int vector;
pv_trap_init();
- for ( vector = 0; vector < X86_NR_VECTORS; ++vector )
+ for ( vector = 0; vector < X86_IDT_VECTORS; ++vector )
{
if ( autogen_entrypoints[vector] )
{
FUNC_LOCAL(autogen_stubs, 0) /* Automatically generated stubs. */
vec = 0
- .rept X86_NR_VECTORS
+ .rept X86_IDT_VECTORS
/* Common interrupts, heading towards do_IRQ(). */
#if defined(CONFIG_PV32)