// Code for manipulating stack locations.
//
-// Copyright (C) 2009-2014 Kevin O'Connor <kevin@koconnor.net>
+// Copyright (C) 2009-2015 Kevin O'Connor <kevin@koconnor.net>
//
// This file may be distributed under the terms of the GNU LGPLv3 license.
u8 cmosindex;
u8 a20;
u16 ss, fs, gs;
+ u32 cr0;
struct descloc_s gdt;
} Call16Data VARLOW;
int HaveSmmCall32 VARFSEG;
// Backup state in preparation for call32
-static void
+static int
call32_prep(u8 method)
{
- // Backup cmos index register and disable nmi
- u8 cmosindex = inb(PORT_CMOS_INDEX);
- outb(cmosindex | NMI_DISABLE_BIT, PORT_CMOS_INDEX);
- inb(PORT_CMOS_DATA);
- SET_LOW(Call16Data.cmosindex, cmosindex);
-
- // Backup ss
- SET_LOW(Call16Data.ss, GET_SEG(SS));
-
if (!CONFIG_CALL32_SMM || method != C16_SMM) {
+ // Backup cr0
+ u32 cr0 = cr0_read();
+ if (cr0 & CR0_PE)
+ // Called in 16bit protected mode?!
+ return -1;
+ SET_LOW(Call16Data.cr0, cr0);
+
// Backup fs/gs and gdt
SET_LOW(Call16Data.fs, GET_SEG(FS));
SET_LOW(Call16Data.gs, GET_SEG(GS));
SET_LOW(Call16Data.a20, set_a20(1));
}
+ // Backup ss
+ SET_LOW(Call16Data.ss, GET_SEG(SS));
+
+ // Backup cmos index register and disable nmi
+ u8 cmosindex = inb(PORT_CMOS_INDEX);
+ outb(cmosindex | NMI_DISABLE_BIT, PORT_CMOS_INDEX);
+ inb(PORT_CMOS_DATA);
+ SET_LOW(Call16Data.cmosindex, cmosindex);
+
SET_LOW(Call16Data.method, method);
+ return 0;
}
// Restore state backed up during call32
lgdt(&gdt);
SET_SEG(FS, GET_LOW(Call16Data.fs));
SET_SEG(GS, GET_LOW(Call16Data.gs));
+
+ // Restore cr0
+ u32 cr0_caching = GET_LOW(Call16Data.cr0) & (CR0_CD|CR0_NW);
+ if (cr0_caching)
+ cr0_mask(CR0_CD|CR0_NW, cr0_caching);
}
// Restore cmos index register
ASSERT16();
if (CONFIG_CALL32_SMM && GET_GLOBAL(HaveSmmCall32))
return call32_smm(func, eax);
- u32 cr0 = getcr0();
- if (cr0 & CR0_PE)
- // Called in 16bit protected mode?!
- return errret;
-
// Jump direclty to 32bit mode - this clobbers the 16bit segment
// selector registers.
- call32_prep(C16_BIG);
+ int ret = call32_prep(C16_BIG);
+ if (ret)
+ return errret;
u32 bkup_ss, bkup_esp;
asm volatile(
// Backup ss/esp / set esp to flat stack location
: "0" (index));
}
-static inline u32 getcr0(void) {
+static inline u32 cr0_read(void) {
u32 cr0;
asm("movl %%cr0, %0" : "=r"(cr0));
return cr0;
}
-static inline void setcr0(u32 cr0) {
+static inline void cr0_write(u32 cr0) {
asm("movl %0, %%cr0" : : "r"(cr0));
}
-static inline u16 getcr0_vm86(void) {
+static inline void cr0_mask(u32 off, u32 on) {
+ cr0_write((cr0_read() & ~off) | on);
+}
+static inline u16 cr0_vm86_read(void) {
u16 cr0;
asm("smsww %0" : "=r"(cr0));
return cr0;