[PATCH 06/10] x86/cpufeature: Get rid of the non-asm goto variant

From: Borislav Petkov
Date: Tue Jan 26 2016 - 16:13:35 EST


From: Borislav Petkov <bp@xxxxxxx>

I can simply quote hpa from the mail:

"Get rid of the non-asm goto variant and just fall back to dynamic if
asm goto is unavailable. It doesn't make any sense, really, if it is
supposed to be safe, and by now the asm goto-capable gcc is in more wide
use. (Originally the gcc 3.x fallback to pure dynamic didn't exist,
either.)"

Booy, am I lazy.

Cleanup the whole CC_HAVE_ASM_GOTO ifdeffery too, while at it.

Signed-off-by: Borislav Petkov <bp@xxxxxxx>
Suggested-by: "H. Peter Anvin" <hpa@xxxxxxxxx>
---
arch/x86/include/asm/cpufeature.h | 49 ++++-----------------------------------
1 file changed, 5 insertions(+), 44 deletions(-)

diff --git a/arch/x86/include/asm/cpufeature.h b/arch/x86/include/asm/cpufeature.h
index a261cf2e7907..d48bf024f335 100644
--- a/arch/x86/include/asm/cpufeature.h
+++ b/arch/x86/include/asm/cpufeature.h
@@ -129,17 +129,16 @@ extern const char * const x86_bug_flags[NBUGINTS*32];
* fast paths and boot_cpu_has() otherwise!
*/

-#if __GNUC__ >= 4 && defined(CONFIG_X86_FAST_FEATURE_TESTS)
+#if CC_HAVE_ASM_GOTO && defined(CONFIG_X86_FAST_FEATURE_TESTS)
extern bool __static_cpu_has(u16 bit);

/*
* Static testing of CPU features. Used the same as boot_cpu_has().
- * These are only valid after alternatives have run, but will statically
- * patch the target code for additional performance.
+ * These will statically patch the target code for additional
+ * performance.
*/
static __always_inline __pure bool _static_cpu_has(u16 bit)
{
-#ifdef CC_HAVE_ASM_GOTO
asm_volatile_goto("1: jmp %l[t_dynamic]\n"
"2:\n"
".skip -(((5f-4f) - (2b-1b)) > 0) * "
@@ -172,45 +171,6 @@ static __always_inline __pure bool _static_cpu_has(u16 bit)
return false;
t_dynamic:
return __static_cpu_has(bit);
-#else
- u8 flag;
- /* Open-coded due to __stringify() in ALTERNATIVE() */
- asm volatile("1: movb $2,%0\n"
- "2:\n"
- ".section .altinstructions,\"a\"\n"
- " .long 1b - .\n" /* src offset */
- " .long 3f - .\n" /* repl offset */
- " .word %P2\n" /* always replace */
- " .byte 2b - 1b\n" /* source len */
- " .byte 4f - 3f\n" /* replacement len */
- " .byte 0\n" /* pad len */
- ".previous\n"
- ".section .discard,\"aw\",@progbits\n"
- " .byte 0xff + (4f-3f) - (2b-1b)\n" /* size check */
- ".previous\n"
- ".section .altinstr_replacement,\"ax\"\n"
- "3: movb $0,%0\n"
- "4:\n"
- ".previous\n"
- ".section .altinstructions,\"a\"\n"
- " .long 1b - .\n" /* src offset */
- " .long 5f - .\n" /* repl offset */
- " .word %P1\n" /* feature bit */
- " .byte 4b - 3b\n" /* src len */
- " .byte 6f - 5f\n" /* repl len */
- " .byte 0\n" /* pad len */
- ".previous\n"
- ".section .discard,\"aw\",@progbits\n"
- " .byte 0xff + (6f-5f) - (4b-3b)\n" /* size check */
- ".previous\n"
- ".section .altinstr_replacement,\"ax\"\n"
- "5: movb $1,%0\n"
- "6:\n"
- ".previous\n"
- : "=qm" (flag)
- : "i" (bit), "i" (X86_FEATURE_ALWAYS));
- return (flag == 2 ? __static_cpu_has(bit) : flag);
-#endif /* CC_HAVE_ASM_GOTO */
}

#define static_cpu_has(bit) \
@@ -221,7 +181,8 @@ static __always_inline __pure bool _static_cpu_has(u16 bit)
)
#else
/*
- * gcc 3.x is too stupid to do the static test; fall back to dynamic.
+ * Fall back to dynamic for gcc versions which don't support asm goto. Should be
+ * a minority now anyway.
*/
#define static_cpu_has(bit) boot_cpu_has(bit)
#endif
--
2.3.5