summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--xen/arch/x86/acpi/power.c3
-rw-r--r--xen/arch/x86/setup.c5
-rw-r--r--xen/arch/x86/smpboot.c5
-rw-r--r--xen/arch/x86/spec_ctrl.c10
-rw-r--r--xen/include/asm-x86/current.h2
-rw-r--r--xen/include/asm-x86/spec_ctrl_asm.h4
6 files changed, 24 insertions, 5 deletions
diff --git a/xen/arch/x86/acpi/power.c b/xen/arch/x86/acpi/power.c
index 0837a3ead4..bac9c16389 100644
--- a/xen/arch/x86/acpi/power.c
+++ b/xen/arch/x86/acpi/power.c
@@ -296,7 +296,10 @@ static int enter_state(u32 state)
ci->spec_ctrl_flags |= (default_spec_ctrl_flags & SCF_ist_wrmsr);
if ( boot_cpu_has(X86_FEATURE_IBRSB) )
+ {
wrmsrl(MSR_SPEC_CTRL, default_xen_spec_ctrl);
+ ci->last_spec_ctrl = default_xen_spec_ctrl;
+ }
if ( boot_cpu_has(X86_FEATURE_SRBDS_CTRL) )
wrmsrl(MSR_MCU_OPT_CTRL, default_xen_mcu_opt_ctrl);
diff --git a/xen/arch/x86/setup.c b/xen/arch/x86/setup.c
index da47cdea14..369691dd13 100644
--- a/xen/arch/x86/setup.c
+++ b/xen/arch/x86/setup.c
@@ -1944,9 +1944,12 @@ void __init noreturn __start_xen(unsigned long mbi_p)
if ( bsp_delay_spec_ctrl )
{
- get_cpu_info()->spec_ctrl_flags &= ~SCF_use_shadow;
+ struct cpu_info *info = get_cpu_info();
+
+ info->spec_ctrl_flags &= ~SCF_use_shadow;
barrier();
wrmsrl(MSR_SPEC_CTRL, default_xen_spec_ctrl);
+ info->last_spec_ctrl = default_xen_spec_ctrl;
}
/* Jump to the 1:1 virtual mappings of cpu0_stack. */
diff --git a/xen/arch/x86/smpboot.c b/xen/arch/x86/smpboot.c
index 329cfdb6c9..ee3e86cc78 100644
--- a/xen/arch/x86/smpboot.c
+++ b/xen/arch/x86/smpboot.c
@@ -322,6 +322,8 @@ static void set_cpu_sibling_map(unsigned int cpu)
void start_secondary(void *unused)
{
+ struct cpu_info *info = get_cpu_info();
+
/*
* Dont put anything before smp_callin(), SMP booting is so fragile that we
* want to limit the things done here to the most necessary things.
@@ -378,7 +380,10 @@ void start_secondary(void *unused)
* microcode.
*/
if ( boot_cpu_has(X86_FEATURE_IBRSB) )
+ {
wrmsrl(MSR_SPEC_CTRL, default_xen_spec_ctrl);
+ info->last_spec_ctrl = default_xen_spec_ctrl;
+ }
if ( boot_cpu_has(X86_FEATURE_SRBDS_CTRL) )
wrmsrl(MSR_MCU_OPT_CTRL, default_xen_mcu_opt_ctrl);
diff --git a/xen/arch/x86/spec_ctrl.c b/xen/arch/x86/spec_ctrl.c
index 2072daf662..b2fd86ebe5 100644
--- a/xen/arch/x86/spec_ctrl.c
+++ b/xen/arch/x86/spec_ctrl.c
@@ -1270,6 +1270,9 @@ void __init init_speculation_mitigations(void)
*/
if ( has_spec_ctrl )
{
+ struct cpu_info *info = get_cpu_info();
+ unsigned int val;
+
bsp_delay_spec_ctrl = !cpu_has_hypervisor && default_xen_spec_ctrl;
/*
@@ -1278,15 +1281,16 @@ void __init init_speculation_mitigations(void)
*/
if ( bsp_delay_spec_ctrl )
{
- struct cpu_info *info = get_cpu_info();
-
info->shadow_spec_ctrl = 0;
barrier();
info->spec_ctrl_flags |= SCF_use_shadow;
barrier();
}
- wrmsrl(MSR_SPEC_CTRL, bsp_delay_spec_ctrl ? 0 : default_xen_spec_ctrl);
+ val = bsp_delay_spec_ctrl ? 0 : default_xen_spec_ctrl;
+
+ wrmsrl(MSR_SPEC_CTRL, val);
+ info->last_spec_ctrl = val;
}
if ( boot_cpu_has(X86_FEATURE_SRBDS_CTRL) )
diff --git a/xen/include/asm-x86/current.h b/xen/include/asm-x86/current.h
index a74ad4bc4c..8ea4aecc5e 100644
--- a/xen/include/asm-x86/current.h
+++ b/xen/include/asm-x86/current.h
@@ -56,6 +56,7 @@ struct cpu_info {
/* See asm-x86/spec_ctrl_asm.h for usage. */
unsigned int shadow_spec_ctrl;
uint8_t xen_spec_ctrl;
+ uint8_t last_spec_ctrl;
uint8_t spec_ctrl_flags;
/*
@@ -73,7 +74,6 @@ struct cpu_info {
*/
bool use_pv_cr3;
- unsigned long __pad;
/* get_stack_bottom() must be 16-byte aligned */
};
diff --git a/xen/include/asm-x86/spec_ctrl_asm.h b/xen/include/asm-x86/spec_ctrl_asm.h
index bf82528a12..9c0c7622c4 100644
--- a/xen/include/asm-x86/spec_ctrl_asm.h
+++ b/xen/include/asm-x86/spec_ctrl_asm.h
@@ -67,6 +67,10 @@
* steps 2 and 6 will restore the shadow value rather than leaving Xen's value
* loaded and corrupting the value used in guest context.
*
+ * Additionally, in some cases it is safe to skip writes to MSR_SPEC_CTRL when
+ * we don't require any of the side effects of an identical write. Maintain a
+ * per-cpu last_spec_ctrl value for this purpose.
+ *
* The following ASM fragments implement this algorithm. See their local
* comments for further details.
* - SPEC_CTRL_ENTRY_FROM_PV