Re: RFC, untested: handing of MSR immediates and MSRs on Xen
From: H. Peter Anvin
Date: Wed Oct 23 2024 - 19:05:44 EST
On 10/23/24 15:08, Uros Bizjak wrote:
Please use %c asm operand modifier instead of %P. Please see comment in
arch/x86/include/asm/alternative.h for the reason:
Here is an updated version which uses asm goto for the safe versions and
changes the type to a boolean.
I also fixed a couple of syntax problems.
-hpa
typedef void (*xen_msr_action_t)(void);
#define xen_msr_native ((xen_msr_action_t)NULL)
#define xen_msr_ignore ((xen_msr_action_t)(1UL))
#ifdef CONFIG_XEN
/*** This should go into a Xen-specific header file ***/
/*
* The actual calling convention for the asm_xen_ function is:
* Input:
* %rax - value (full 64-bit value, do not read from %edx)
* %ecx - MSR number
* Output:
* %esi - optionally clobbered
* all other registers preserved
* On error, invoke the trap handler at the stack pointer using
* indirect trapping.
*/
void asm_xen_wrmsrns_general(void);
void asm_xen_wrmsrns_perf(void);
static __must_inline xen_msr_action_t xen_wrmsrns_action(uint32_t msr)
{
if (!__builtin_constant_p(msr))
return asm_xen_wrmsrns_general;
switch (msr) {
case 0x1234:
case 0x5678:
return asm_xen_wrmsrns_perf;
case 0x78:
case 0x89:
return xen_msr_ignore;
default:
return xen_msr_native;
}
}
#else
static __must_inline xen_msr_action_t xen_msr_action(uint32_t msr)
{
return xen_msr_native;
}
#endif
#define EX_WRMSR(from,to) \
_ASM_EXTABLE_TYPE(from, to, EX_TYPE_WRMSR, 0)
#define EX_WRMSR_SAFE(from,to) \
_ASM_EXTABLE_TYPE(from, to, EX_TYPE_WRMSR_SAFE, 0)
/*
* XXX: is ASM_CALL_CONSTRAINT compatible with asm goto? Perhaps "memory"
* takes care of the problem without it?
*/
static __must_inline void wrmsrns_variable(uint32_t msr, uint64_t val)
{
const xen_msr_action_t xen_what = xen_msr_action(msr);
uint32_t hi = val >> 32;
if (xen_what == xen_msr_native) {
asm_inline volatile(
"1:\n"
ALTERNATIVE("ds wrmsr",
"wrmsrns",
X86_FEATURE_WRMSRNS)
"2:\n"
EX_WRMSR(1b,2b)
: : "a" (val), "d" (hi), "c" (msr),
: "memory");
} else {
asm_inline volatile(
"1:\n"
ALTERNATIVE_2("ds wrmsr",
"wrmsrns",
X86_FEATURE_WRMSRNS,
"call %c[xen]",
X86_FEATURE_XENPV)
"2:\n"
EX_WRMSR(1b,2b)
: "+d" (hi), ASM_CALL_CONSTRAINT
: [val] "a" (val), "c" (msr), [xen] "i" (xen_what)
: "memory");
}
}
static __must_inline bool wrmsrns_variable_safe(uint32_t msr, uint64_t val)
{
const xen_msr_action_t xen_what = xen_msr_action(msr);
const uint32_t hi = val >> 32;
if (xen_what == xen_msr_native) {
asm_inline volatile goto(
"1:\n"
ALTERNATIVE("ds wrmsr",
"wrmsrns",
X86_FEATURE_WRMSRNS)
EX_WRMSR_SAFE(1b,%l[badmsr])
: : [val] "a" (val), "d" (hi), [msr] "c" (msr),
: "memory"
: badmsr);
} else {
/*
* Compute %edx inline to avoid problems with older
* gccs that did not allow output constraints in asm goto
* (since inputs and clobbers can't overlap)
*/
asm_inline volatile goto(
ALTERNATIVE_2("1: mov %%rax,%%rdx; shr $32,%%rdx; "
"2: ds wrmsr"
"mov %%rax,%%rdx; shr $32,%%rdx; "
"wrmsrns",
X86_FEATURE_WRMSRNS,
"call %c[xen]",
X86_FEATURE_XENPV)
EX_WRMSR_SAFE(1b,%l[badmsr]) /* For Xen */
EX_WRMSR_SAFE(2b,%l[badmsr]) /* For WRMSR(NS) */
: ASM_CALL_CONSTRAINT
: [val] "a" (val), [msr] "c" (msr), [xen] "i" (xen_what)
: "memory", "rdx"
: badmsr);
}
return false;
badmsr:
__cold;
return true;
}
#define WRMSRNS_IMM " .insn VEX.128.F3.M7.W0 0xf6 /0, %[val], %[msr] {:u32}\n"
static __must_inline void wrmsrns_constant(uint32_t msr, uint64_t val)
{
const xen_msr_action_t xen_what = xen_msr_action(msr, safe);
/*
* Compute %edx inline so the space can be reused by the
* immediate instruction forms (9 bytes long.)
*/
if (xen_what == xen_msr_native) {
asm_inline volatile(
ALTERNATIVE_2("1: mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"2: ds wrmsr",
"mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"wrmsrns",
X86_FEATURE_WRMSRNS,
WRMSRNS_IMM,
X86_FEATURE_MSR_IMM)
"3:\n"
EX_WRMSR(1b,3b) /* For WRMSRNS immediate */
EX_WRMSR(2b,3b) /* For WRMSR(NS) */
: : [val] "a" (val), "c" (msr), [msr] "i" (msr)
: "memory", "edx");
} else if (xen_what == xen_msr_ignore) {
asm_inline volatile(
ALTERNATIVE_3("1: mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"2: ds wrmsr",
"mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"wrmsrns",
X86_FEATURE_WRMSRNS,
WRMSRNS_IMM,
X86_FEATURE_MSR_IMM,
"",
X86_FEATURE_XENPV)
"3:\n"
EX_WRMSR(1b,3b) /* For WRMSRNS immediate */
EX_WRMSR(2b,3b) /* For WRMSR(NS) */
: : [val] "a" (val), "c" (msr), [msr] "i" (msr)
: "memory", "edx");
} else {
asm_inline volatile(
ALTERNATIVE_2("1: mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"2: ds wrmsr",
"mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"wrmsrns",
X86_FEATURE_WRMSRNS,
WRMSRNS_IMM,
X86_FEATURE_MSR_IMM)
"3:\n"
EX_WRMSR(1b,3b) /* For Xen and WRMSRNS immediate */
EX_WRMSR(2b,3b) /* For WRMSRNS and WRMSR */
: ASM_CALL_CONSTRAINT
: [val] "a" (val), "c" (msr), [msr] "i" (msr),
[xen] "i" (xen_what)
: "memory", "edx");
}
return err;
}
static __must_inline bool wrmsrns_constant_safe(uint32_t msr, uint64_t val)
{
const xen_msr_action_t xen_what = xen_msr_action(msr, safe);
if (xen_what == xen_msr_native) {
asm_inline volatile goto(
ALTERNATIVE_2("1: mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"2: ds wrmsr",
"mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"wrmsrns",
X86_FEATURE_WRMSRNS,
WRMSRNS_IMM,
X86_FEATURE_MSR_IMM)
EX_WRMSR_SAFE(1b,%l[badmsr]) /* For WRMSRNS immediate */
EX_WRMSR_SAFE(2b,%l[badmsr]) /* For WRMSR(NS) */
: : [val] "a" (val), "c" (msr), [msr] "i" (msr)
: "memory", "edx"
: badmsr);
} else if (xen_what == xen_msr_ignore) {
asm_inline volatile goto(
"1:\n"
ALTERNATIVE_3("1: mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"2: ds wrmsr",
"mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"wrmsrns", X86_FEATURE_WRMSRNS,
WRMSRNS_IMM, X86_FEATURE_MSR_IMM,
"", X86_FEATURE_XENPV)
EX_WRMSR_SAFE(1b,%l[badmsr]) /* For WRMSRNS immediate */
EX_WRMSR_SAFE(2b,%l[badmsr]) /* For WRMSR(NS) */
: "+r" (err)
: : [val] "a" (val), "c" (msr), [msr] "i" (msr)
: "memory", "edx"
: badmsr);
} else {
asm_inline volatile goto(
ALTERNATIVE_3("1: mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"2: ds wrmsr",
"mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"wrmsrns", X86_FEATURE_WRMSRNS,
WRMSRNS_IMM, X86_FEATURE_MSR_IMM,
"call %c[xen]", X86_FEATURE_XENPV)
EX_WRMSR_SAFE(1b,%l[badmsr]) /* For Xen/WRMSRNS imm */
EX_WRMSR_SAFE(2b,%l[badmsr]) /* For WRMSR(NS) */
: ASM_CALL_CONSTRAINT
: [val] "a" (val), "c" (msr), [msr] "i" (msr),
[xen] "i" (xen_what)
: "memory", "edx"
: badmsr);
}
return false;
badmsr:
__cold;
return true;
}
static inline void wrmsrns(uint32_t msr, uint64_t val)
{
if (__builtin_constant_p(msr))
wrmsrns_constant(msr, val);
else
wrmsrns_variable(msr, val);
}
static inline bool wrmsrns_safe(uint32_t msr, uint64_t val)
{
if (__builtin_constant_p(msr))
return wrmsrns_constant_safe(msr, val);
else
return wrmsrns_variable_safe(msr, val);
}