Re: RFC, untested: handing of MSR immediates and MSRs on Xen
From: H. Peter Anvin
Date: Wed Oct 23 2024 - 17:34:17 EST
And I of course forgot the include the actual code.
typedef void (*xen_msr_action_t)(void);
#define xen_msr_native ((xen_msr_action_t)NULL)
#define xen_msr_ignore ((xen_msr_action_t)(1UL))
#ifdef CONFIG_XEN
/*** This should go into a Xen-specific header file ***/
/*
* The actual calling convention for the asm_xen_ function is:
* Input:
* %rax - value (full 64-bit value, do not read from %edx)
* %ecx - MSR number
* Output:
* %edx - optionally clobbered
* all other registers preserved
* On error, invoke the trap handler at the stack pointer using
* indirect trapping.
*/
void asm_xen_wrmsrns_general(void);
void asm_xen_wrmsrns_perf(void);
static __must_inline xen_msr_action_t xen_wrmsrns_action(uint32_t msr)
{
if (!__builtin_constant_p(msr))
return asm_xen_wrmsrns_general;
switch (msr) {
case 0x1234:
case 0x5678:
return asm_xen_wrmsrns_perf;
case 0x78:
case 0x89:
return xen_msr_ignore;
default:
return xen_msr_native;
}
}
#else
static __must_inline xen_msr_action_t xen_msr_action(uint32_t msr)
{
return xen_msr_native;
}
#endif
#define EX_WRMSR(from,to) \
_ASM_EXTABLE_TYPE(from, to, EX_TYPE_WRMSR, 0)
#define EX_WRMSR_SAFE(from,to) \
_ASM_EXTABLE_TYPE_REG(from, to, EX_TYPE_WRMSR_SAFE, %[reg])
static __must_inline void wrmsrns_variable(uint32_t msr, uint64_t val)
{
const xen_msr_action_t xen_what = xen_msr_action(msr);
uint32_t hi = val >> 32;
if (xen_what == xen_msr_native) {
asm_inline volatile(
"1:\n"
ALTERNATIVE("ds wrmsr",
"wrmsrns",
X86_FEATURE_WRMSRNS)
"2:\n"
EX_WRMSR(1b,2b)
: : "a" (val), "d" (hi), "c" (msr),
: "memory");
} else {
asm_inline volatile(
"1:\n"
ALTERNATIVE_2("ds wrmsr",
"wrmsrns",
X86_FEATURE_WRMSRNS,
"call %P[xen]",
X86_FEATURE_XENPV)
"2:\n"
EX_WRMSR(1b,2b)
: [val] "a" (val), "d" (hi), ASM_CALL_CONSTRAINT
: "c" (msr), [xen] "i" (xen_what)
: "memory");
}
}
static __must_inline int wrmsrns_variable_safe(uint32_t msr, uint64_t val)
{
const xen_msr_action_t xen_what = xen_msr_action(msr);
uint32_t hi = val >> 32;
int err = 0;
if (xen_what == xen_msr_native) {
asm_inline volatile(
"1:\n"
ALTERNATIVE("ds wrmsr; xor %[err],%[err]",
"wrmsrns; xor %[err],%[err]",
X86_FEATURE_WRMSRNS)
"2:\n"
EX_WRMSR_SAFE(1b,2b)
: [err] "=r" (err)
: [val] "a" (val), "d" (hi), [msr] "c" (msr),
: "memory");
} else {
asm_inline volatile(
"1:\n"
ALTERNATIVE_2("ds wrmsr; xor %[err],%[err]"
"wrmsrns; xor %[err],%[err]"
X86_FEATURE_WRMSRNS,
"call %P[xen]",
X86_FEATURE_XENPV)
"2:\n"
EX_WRMSR_SAFE(1b,2b)
: [err] "=r" (err), "+d" (hi), ASM_CALL_CONSTRAINT
: [val] "a" (val), [msr] "c" (msr), [xen] "i" (xen_what)
: "memory");
}
return err;
}
#define WRMSRNS_IMM " .insn VEX.128.F3.M7.W0 0xf6 /0, %[val], %[msr] {:u32}\n"
static __must_inline void wrmsrns_constant(uint32_t msr, uint64_t val)
{
const xen_msr_action_t xen_what = xen_msr_action(msr, safe);
if (xen_what == xen_msr_native) {
asm_inline volatile(
"1:\n"
ALTERNATIVE_2("mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"1: ds wrmsr",
"mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"wrmsrns", X86_FEATURE_WRMSRNS,
WRMSRNS_IMM, X86_FEATURE_MSR_IMM)
"3:\n"
EX_WRMSR(1b,3b) /* For WRMSRNS immediate */
EX_WRMSR(2b,3b) /* For WRMSRNS and WRMSR */
: : [val] "a" (val), "c" (msr), [msr] "i" (msr)
: "memory", "edx");
} else if (xen_what == xen_msr_ignore) {
asm_inline volatile(
"1:\n"
ALTERNATIVE_3("mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"1: ds wrmsr",
"mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"wrmsrns", X86_FEATURE_WRMSRNS,
WRMSRNS_IMM, X86_FEATURE_MSR_IMM,
"", X86_FEATURE_XENPV)
"3:\n"
EX_WRMSR(1b,3b) /* For WRMSRNS immediate */
EX_WRMSR(2b,3b) /* For WRMSRNS and WRMSR */
: : [val] "a" (val), "c" (msr), [msr] "i" (msr)
: "memory", "edx");
} else {
asm_inline volatile(
ALTERNATIVE_2("1: mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"2: ds wrmsr",
"mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"wrmsrns", X86_FEATURE_WRMSRNS,
WRMSRNS_IMM, X86_FEATURE_MSR_IMM);
"3:\n"
EX_WRMSR(1b,3b) /* For Xen and WRMSRNS immediate */
EX_WRMSR(2b,3b) /* For WRMSRNS and WRMSR */
: ASM_CALL_CONSTRAINT
: [val] "a" (val), "c" (msr), [msr] "i" (msr),
[xen] "i" (xen_what)
: "memory", "edx");
}
return err;
}
static __must_inline int wrmsrns_constant_safe(uint32_t msr, uint64_t val)
{
const xen_msr_action_t xen_what = xen_msr_action(msr, safe);
int err = 0;
if (xen_what == xen_msr_native) {
asm_inline volatile(
"1:\n"
ALTERNATIVE_2("1: mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"2: ds wrmsr",
"mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"wrmsrns", X86_FEATURE_WRMSRNS,
WRMSRNS_IMM, X86_FEATURE_MSR_IMM)
"3:\n"
EX_WRMSR_SAFE(1b,3b) /* For WRMSRNS immediate */
EX_WRMSR_SAFE(2b,3b) /* For WRMSRNS and WRMSR */
: "+r" (err)
: [val] "a" (val), "c" (msr), [msr] "i" (msr)
: "memory", "edx");
} else if (xen_what == xen_msr_ignore) {
asm_inline volatile(
"1:\n"
ALTERNATIVE_3("1: mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"2: ds wrmsr",
"mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"wrmsrns", X86_FEATURE_WRMSRNS,
WRMSRNS_IMM, X86_FEATURE_MSR_IMM,
"", X86_FEATURE_XENPV)
"3:\n"
EX_WRMSR_SAFE(1b,3b) /* For WRMSRNS immediate */
EX_WRMSR_SAFE(2b,3b) /* For WRMSRNS and WRMSR */
: "+r" (err)
: : [val] "a" (val), "c" (msr), [msr] "i" (msr)
: "memory", "edx");
} else {
asm_inline volatile(
ALTERNATIVE_3("1: mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"2: ds wrmsr",
"mov %%rax,%%rdx; "
"shr $32,%%rdx; "
"wrmsrns", X86_FEATURE_WRMSRNS,
WRMSRNS_IMM, X86_FEATURE_MSR_IMM,
"call %P[xen]", X86_FEATURE_XENPV)
"3:\n"
EX_WRMSR_SAFE(1b,3b) /* For Xen and WRMSRNS immediate */
EX_WRMSR_SAFE(2b,3b) /* For WRMSRNS and WRMSR */
: "+r" (err), ASM_CALL_CONSTRAINT
: [val] "a" (val), "c" (msr), [msr] "i" (msr),
[xen] "i" (xen_what)
: "memory", "edx");
}
return err;
}
static inline void wrmsrns(uint32_t msr, uint64_t val)
{
if (__builtin_constant_p(msr))
wrmsrns_constant(msr, val);
else
wrmsrns_variable(msr, val);
}
static inline int wrmsrns_safe(uint32_t msr, uint64_t val)
{
if (__builtin_constant_p(msr))
return wrmsrns_constant_safe(msr, val);
else
return wrmsrns_variable_safe(msr, val);
}