From bbfee5bf8d2de2342f7fd0c8a12b9fe4617fc5fa Mon Sep 17 00:00:00 2001 From: Andrew Cooper Date: Thu, 5 May 2016 16:12:17 +0100 Subject: [PATCH] Provide all {read,write}_cr[02348]() stub functions Both GCC and Clang correctly encode %cr8 accesses using the lock instruction prefix in 32bit mode. Signed-off-by: Andrew Cooper --- include/arch/x86/lib.h | 52 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/include/arch/x86/lib.h b/include/arch/x86/lib.h index b1b2bf1..a99cd15 100644 --- a/include/arch/x86/lib.h +++ b/include/arch/x86/lib.h @@ -195,6 +195,15 @@ static inline unsigned long read_dr7(void) return val; } +static inline unsigned long read_cr0(void) +{ + unsigned long cr0; + + asm volatile ("mov %%cr0, %0" : "=r" (cr0)); + + return cr0; +} + static inline unsigned long read_cr2(void) { unsigned long cr2; @@ -213,6 +222,49 @@ static inline unsigned long read_cr3(void) return cr3; } +static inline unsigned long read_cr4(void) +{ + unsigned long cr4; + + asm volatile ("mov %%cr4, %0" : "=r" (cr4)); + + return cr4; +} + +static inline unsigned long read_cr8(void) +{ + unsigned long cr8; + + asm volatile ("mov %%cr8, %0" : "=r" (cr8)); + + return cr8; +} + +static inline void write_cr0(unsigned long cr0) +{ + asm volatile ("mov %0, %%cr0" :: "r" (cr0)); +} + +static inline void write_cr2(unsigned long cr2) +{ + asm volatile ("mov %0, %%cr2" :: "r" (cr2)); +} + +static inline void write_cr3(unsigned long cr3) +{ + asm volatile ("mov %0, %%cr3" :: "r" (cr3)); +} + +static inline void write_cr4(unsigned long cr4) +{ + asm volatile ("mov %0, %%cr4" :: "r" (cr4)); +} + +static inline void write_cr8(unsigned long cr8) +{ + asm volatile ("mov %0, %%cr8" :: "r" (cr8)); +} + #endif /* XTF_X86_LIB_H */ /* -- 2.39.5