#if CONFIG_PAGING_LEVELS == 4 /* 64bit PAE, 4 levels */
mov $X86_CR4_PAE, %eax
- mov $l4_identmap, %ebx
+ mov $pae_l4_identmap, %ebx
#elif CONFIG_PAGING_LEVELS == 3 /* 32bit PAE, 3 levels */
mov $X86_CR4_PAE, %eax
- mov $l3_paemap, %ebx
+ mov $pae32_l3_identmap, %ebx
#else
# error Bad paging mode
.data
.p2align PAGE_SHIFT
-/* Mapping of first 2M of memory in 4k pages. Uses 1x 4k page. */
-GLOBAL(l1_identmap)
+/* PAE mappings of first 2M of memory in 4k pages. Uses 1x 4k page. */
+GLOBAL(pae_l1_identmap)
.long 0, 0 /* Unmap page at 0 to catch errors with NULL pointers. */
- .rept L1_PT_ENTRIES - 1
- .long (((. - l1_identmap) / 8) << (PAGE_ORDER_4K + PAGE_SHIFT)) + \
+ .rept PAE_L1_PT_ENTRIES - 1
+ .long (((. - pae_l1_identmap) / 8) << PAE_L1_PT_SHIFT) + \
_PAGE_USER + _PAGE_RW + _PAGE_PRESENT
.long 0
.endr
-SIZE(l1_identmap)
+SIZE(pae_l1_identmap)
-/* Mappings up to 4G in 2M superpages. Uses 4x 4k pages. */
-GLOBAL(l2_identmap)
- .long l1_identmap + _PAGE_USER + _PAGE_RW + _PAGE_PRESENT
+/* PAE mappings up to 4G, mostly in 2M superpages. Uses 4x 4k pages. */
+GLOBAL(pae_l2_identmap)
+ .long pae_l1_identmap + _PAGE_USER + _PAGE_RW + _PAGE_PRESENT
.long 0
- .rept (4 * L2_PT_ENTRIES) - 1
- .long (((. - l2_identmap) / 8) << (PAGE_ORDER_2M + PAGE_SHIFT)) + \
+ .rept (4 * PAE_L2_PT_ENTRIES) - 1
+ .long (((. - pae_l2_identmap) / 8) << PAE_L2_PT_SHIFT) + \
_PAGE_PSE + _PAGE_USER + _PAGE_RW + _PAGE_PRESENT
.long 0
.endr
-SIZE(l2_identmap)
+SIZE(pae_l2_identmap)
-/* l3 pagetable. Maps 4x l2 tables. */
-GLOBAL(l3_identmap)
+/* PAE l3 pagetable. Maps 4x l2 tables. */
+GLOBAL(pae_l3_identmap)
.rept 4
- .long l2_identmap + (((. - l3_identmap) / 8 ) << PAGE_SHIFT) + \
+ .long pae_l2_identmap + (((. - pae_l3_identmap) / 8 ) << PAGE_SHIFT) + \
_PAGE_USER + _PAGE_RW + _PAGE_PRESENT
.long 0
.endr
- .fill L3_PT_ENTRIES - 4, 8, 0
-SIZE(l3_identmap)
+ .fill PAE_L3_PT_ENTRIES - 4, 8, 0
+SIZE(pae_l3_identmap)
-/* l4 pagetable. Maps 1x l3 table. */
-GLOBAL(l4_identmap)
- .long l3_identmap + _PAGE_USER + _PAGE_RW + _PAGE_PRESENT
+/* PAE l4 pagetable. Maps 1x l3 table. */
+GLOBAL(pae_l4_identmap)
+ .long pae_l3_identmap + _PAGE_USER + _PAGE_RW + _PAGE_PRESENT
.long 0
- .fill L4_PT_ENTRIES - 1, 8, 0
-SIZE(l4_identmap)
+ .fill PAE_L4_PT_ENTRIES - 1, 8, 0
+SIZE(pae_l4_identmap)
-/* l3 32bit PAE quad. Contains 4 entries. */
-GLOBAL(l3_paemap)
- .rept 4
- .long l2_identmap + (((. - l3_paemap) / 8 ) << PAGE_SHIFT) + _PAGE_PRESENT
+/* PAE l3 32bit quad. Contains 4 64bit entries. */
+GLOBAL(pae32_l3_identmap)
+ .rept PAE32_L3_ENTRIES
+ .long pae_l2_identmap + (((. - pae32_l3_identmap) / 8 ) << PAGE_SHIFT) + _PAGE_PRESENT
.long 0
.endr
-SIZE(l3_paemap)
+SIZE(pae32_l3_identmap)
/*
* Local variables:
*/
#include <xtf/numbers.h>
+#include <arch/x86/page.h>
/* Don't clobber the ld directive */
#undef i386
}
}
+#ifdef CONFIG_HVM
+
+ASSERT(IS_ALIGNED(pae_l1_identmap, PAGE_SIZE), "pae_l1_identmap misaligned");
+ASSERT(IS_ALIGNED(pae_l2_identmap, PAGE_SIZE), "pae_l2_identmap misaligned");
+ASSERT(IS_ALIGNED(pae_l3_identmap, PAGE_SIZE), "pae_l3_identmap misaligned");
+ASSERT(IS_ALIGNED(pae_l4_identmap, PAGE_SIZE), "pae_l4_identmap misaligned");
+
+ASSERT(IS_ALIGNED(pae32_l3_identmap, 32), "pae32_l3_ident misaligned");
+
+#endif
/*
* Local variables:
* tab-width: 8
--- /dev/null
+/**
+ * @file include/arch/x86/page-pae.h
+ *
+ * Definitions and helpers for PAE pagetable handling.
+ */
+#ifndef XTF_X86_PAGE_PAE_H
+#define XTF_X86_PAGE_PAE_H
+
+/** PAE pagetable entries are 64 bits wide. */
+#define PAE_PTE_SZ 8
+
+/** @{ */
+/** All PAE pagetables contain 512 entries. */
+#define PAE_L1_PT_ENTRIES (PAGE_SIZE / PAE_PTE_SZ)
+#define PAE_L2_PT_ENTRIES (PAGE_SIZE / PAE_PTE_SZ)
+#define PAE_L3_PT_ENTRIES (PAGE_SIZE / PAE_PTE_SZ)
+#define PAE_L4_PT_ENTRIES (PAGE_SIZE / PAE_PTE_SZ)
+
+/* Other than PAE32_L3, which has 4 entries. */
+#define PAE32_L3_ENTRIES 4
+/** @} */
+
+#define PAE_L1_PT_SHIFT 12
+#define PAE_L2_PT_SHIFT 21
+#define PAE_L3_PT_SHIFT 30
+#define PAE_L4_PT_SHIFT 39
+
+#ifndef __ASSEMBLY__
+
+/** Integer representation of a PTE. */
+typedef uint64_t pae_intpte_t;
+
+static inline unsigned int pae_l1_table_offset(unsigned long va)
+{ return (va >> PAE_L1_PT_SHIFT) & (PAE_L1_PT_ENTRIES - 1); }
+static inline unsigned int pae_l2_table_offset(unsigned long va)
+{ return (va >> PAE_L2_PT_SHIFT) & (PAE_L2_PT_ENTRIES - 1); }
+static inline unsigned int pae_l3_table_offset(unsigned long va)
+{ return (va >> PAE_L3_PT_SHIFT) & (PAE_L3_PT_ENTRIES - 1); }
+#ifdef __x86_64__
+static inline unsigned int pae_l4_table_offset(unsigned long va)
+{ return (va >> PAE_L4_PT_SHIFT) & (PAE_L4_PT_ENTRIES - 1); }
+#endif /* __x86_64__ */
+
+#endif /* __ASSEMBLY__ */
+#endif /* XTF_X86_PAGE_PAE_H */
+
+/*
+ * Local variables:
+ * mode: C
+ * c-file-style: "BSD"
+ * c-basic-offset: 4
+ * tab-width: 4
+ * indent-tabs-mode: nil
+ * End:
+ */
*/
#define PAGE_SHIFT 12
-
#define PAGE_SIZE (_AC(1, L) << PAGE_SHIFT)
-
#define PAGE_MASK (~(PAGE_SIZE - 1))
+#include "page-pae.h"
+
#define PAGE_ORDER_4K 0
#define PAGE_ORDER_2M 9
#define PAGE_ORDER_1G 18
#define _PAGE_USER 0x004
#define _PAGE_PSE 0x080
-#define L1_PT_SHIFT 12
-#define L2_PT_SHIFT 21
-#define L3_PT_SHIFT 30
-#define L4_PT_SHIFT 39
+#if CONFIG_PAGING_LEVELS >= 3 /* PAE Paging */
+
+#define L1_PT_SHIFT PAE_L1_PT_SHIFT
+#define L2_PT_SHIFT PAE_L2_PT_SHIFT
+#define L3_PT_SHIFT PAE_L3_PT_SHIFT
+
+#endif /* CONFIG_PAGING_LEVELS >= 3 */
+
+#if CONFIG_PAGING_LEVELS >= 4 /* PAE Paging */
+
+#define L4_PT_SHIFT PAE_L4_PT_SHIFT
+
+#endif /* CONFIG_PAGING_LEVELS >= 4 */
-#define L1_PT_ENTRIES 512
-#define L2_PT_ENTRIES 512
-#define L3_PT_ENTRIES 512
-#define L4_PT_ENTRIES 512
#ifndef __ASSEMBLY__
+#if CONFIG_PAGING_LEVELS >= 3 /* PAE Paging */
+
+typedef pae_intpte_t intpte_t;
+
static inline unsigned int l1_table_offset(unsigned long va)
-{ return (va >> L1_PT_SHIFT) & (L1_PT_ENTRIES - 1); }
+{ return pae_l1_table_offset(va); }
static inline unsigned int l2_table_offset(unsigned long va)
-{ return (va >> L2_PT_SHIFT) & (L2_PT_ENTRIES - 1); }
+{ return pae_l2_table_offset(va); }
static inline unsigned int l3_table_offset(unsigned long va)
-{ return (va >> L3_PT_SHIFT) & (L3_PT_ENTRIES - 1); }
-#ifdef __x86_64__
+{ return pae_l3_table_offset(va); }
+
+#endif /* CONFIG_PAGING_LEVELS >= 3 */
+
+#if CONFIG_PAGING_LEVELS >= 4 /* PAE Paging */
+
static inline unsigned int l4_table_offset(unsigned long va)
-{ return (va >> L4_PT_SHIFT) & (L4_PT_ENTRIES - 1); }
-#endif /* __x86_64__ */
+{ return pae_l4_table_offset(va); }
+
+#endif /* CONFIG_PAGING_LEVELS >= 4 */
+#if CONFIG_PAGING_LEVELS > 0
-static inline uint64_t pte_to_paddr(uint64_t pte)
+static inline uint64_t pte_to_paddr(intpte_t pte)
{ return pte & 0x000ffffffffff000ULL; }
+#endif /* CONFIG_PAGING_LEVELS > 0 */
+
#endif /* !__ASSEMBLY__ */
#endif /* XTF_X86_PAGE_H */