[PATCH 4.17 088/101] arm64: Add ARCH_WORKAROUND_2 probing

From: Greg Kroah-Hartman
Date: Fri Jul 20 2018 - 08:50:37 EST


4.17-stable review patch. If anyone has any objections, please let me know.

------------------

From: Marc Zyngier <marc.zyngier@xxxxxxx>

commit a725e3dda1813ed306734823ac4c65ca04e38500 upstream.

As for Spectre variant-2, we rely on SMCCC 1.1 to provide the
discovery mechanism for detecting the SSBD mitigation.

A new capability is also allocated for that purpose, and a
config option.

Reviewed-by: Julien Grall <julien.grall@xxxxxxx>
Reviewed-by: Mark Rutland <mark.rutland@xxxxxxx>
Acked-by: Will Deacon <will.deacon@xxxxxxx>
Reviewed-by: Suzuki K Poulose <suzuki.poulose@xxxxxxx>
Signed-off-by: Marc Zyngier <marc.zyngier@xxxxxxx>
Signed-off-by: Catalin Marinas <catalin.marinas@xxxxxxx>
Signed-off-by: Greg Kroah-Hartman <gregkh@xxxxxxxxxxxxxxxxxxx>
---
arch/arm64/Kconfig | 9 +++++
arch/arm64/include/asm/cpucaps.h | 3 +
arch/arm64/kernel/cpu_errata.c | 69 +++++++++++++++++++++++++++++++++++++++
3 files changed, 80 insertions(+), 1 deletion(-)

--- a/arch/arm64/Kconfig
+++ b/arch/arm64/Kconfig
@@ -938,6 +938,15 @@ config HARDEN_EL2_VECTORS

If unsure, say Y.

+config ARM64_SSBD
+ bool "Speculative Store Bypass Disable" if EXPERT
+ default y
+ help
+ This enables mitigation of the bypassing of previous stores
+ by speculative loads.
+
+ If unsure, say Y.
+
menuconfig ARMV8_DEPRECATED
bool "Emulate deprecated/obsolete ARMv8 instructions"
depends on COMPAT
--- a/arch/arm64/include/asm/cpucaps.h
+++ b/arch/arm64/include/asm/cpucaps.h
@@ -48,7 +48,8 @@
#define ARM64_HAS_CACHE_IDC 27
#define ARM64_HAS_CACHE_DIC 28
#define ARM64_HW_DBM 29
+#define ARM64_SSBD 30

-#define ARM64_NCAPS 30
+#define ARM64_NCAPS 31

#endif /* __ASM_CPUCAPS_H */
--- a/arch/arm64/kernel/cpu_errata.c
+++ b/arch/arm64/kernel/cpu_errata.c
@@ -256,6 +256,67 @@ void __init arm64_update_smccc_conduit(s

*updptr = cpu_to_le32(insn);
}
+
+static void arm64_set_ssbd_mitigation(bool state)
+{
+ switch (psci_ops.conduit) {
+ case PSCI_CONDUIT_HVC:
+ arm_smccc_1_1_hvc(ARM_SMCCC_ARCH_WORKAROUND_2, state, NULL);
+ break;
+
+ case PSCI_CONDUIT_SMC:
+ arm_smccc_1_1_smc(ARM_SMCCC_ARCH_WORKAROUND_2, state, NULL);
+ break;
+
+ default:
+ WARN_ON_ONCE(1);
+ break;
+ }
+}
+
+static bool has_ssbd_mitigation(const struct arm64_cpu_capabilities *entry,
+ int scope)
+{
+ struct arm_smccc_res res;
+ bool supported = true;
+
+ WARN_ON(scope != SCOPE_LOCAL_CPU || preemptible());
+
+ if (psci_ops.smccc_version == SMCCC_VERSION_1_0)
+ return false;
+
+ /*
+ * The probe function return value is either negative
+ * (unsupported or mitigated), positive (unaffected), or zero
+ * (requires mitigation). We only need to do anything in the
+ * last case.
+ */
+ switch (psci_ops.conduit) {
+ case PSCI_CONDUIT_HVC:
+ arm_smccc_1_1_hvc(ARM_SMCCC_ARCH_FEATURES_FUNC_ID,
+ ARM_SMCCC_ARCH_WORKAROUND_2, &res);
+ if ((int)res.a0 != 0)
+ supported = false;
+ break;
+
+ case PSCI_CONDUIT_SMC:
+ arm_smccc_1_1_smc(ARM_SMCCC_ARCH_FEATURES_FUNC_ID,
+ ARM_SMCCC_ARCH_WORKAROUND_2, &res);
+ if ((int)res.a0 != 0)
+ supported = false;
+ break;
+
+ default:
+ supported = false;
+ }
+
+ if (supported) {
+ __this_cpu_write(arm64_ssbd_callback_required, 1);
+ arm64_set_ssbd_mitigation(true);
+ }
+
+ return supported;
+}
#endif /* CONFIG_ARM64_SSBD */

#define CAP_MIDR_RANGE(model, v_min, r_min, v_max, r_max) \
@@ -514,6 +575,14 @@ const struct arm64_cpu_capabilities arm6
ERRATA_MIDR_RANGE_LIST(arm64_harden_el2_vectors),
},
#endif
+#ifdef CONFIG_ARM64_SSBD
+ {
+ .desc = "Speculative Store Bypass Disable",
+ .capability = ARM64_SSBD,
+ .type = ARM64_CPUCAP_LOCAL_CPU_ERRATUM,
+ .matches = has_ssbd_mitigation,
+ },
+#endif
{
}
};