[PATCH 04/24] x86/entry: Expose the address of .Lgs_change to traps.c

From: Lai Jiangshan
Date: Tue Aug 31 2021 - 13:50:57 EST


From: Lai Jiangshan <laijs@xxxxxxxxxxxxxxxxx>

The address of .Lgs_change will be used in traps.c in later patch when
some entry code is implemented in traps.c. So the address of .Lgs_change
is exposed to traps.c for preparation.

The label .Lgs_change is still needed in ASM code for extable.

Signed-off-by: Lai Jiangshan <laijs@xxxxxxxxxxxxxxxxx>
---
arch/x86/entry/entry_64.S | 3 ++-
arch/x86/entry/traps.c | 1 +
2 files changed, 3 insertions(+), 1 deletion(-)

diff --git a/arch/x86/entry/entry_64.S b/arch/x86/entry/entry_64.S
index e38a4cf795d9..9164e85b36b8 100644
--- a/arch/x86/entry/entry_64.S
+++ b/arch/x86/entry/entry_64.S
@@ -729,6 +729,7 @@ _ASM_NOKPROBE(common_interrupt_return)
SYM_FUNC_START(asm_load_gs_index)
FRAME_BEGIN
swapgs
+SYM_INNER_LABEL(asm_load_gs_index_gs_change, SYM_L_GLOBAL)
.Lgs_change:
movl %edi, %gs
2: ALTERNATIVE "", "mfence", X86_BUG_SWAPGS_FENCE
@@ -1011,7 +1012,7 @@ SYM_CODE_START_LOCAL(error_entry)
movl %ecx, %eax /* zero extend */
cmpq %rax, RIP+8(%rsp)
je .Lbstep_iret
- cmpq $.Lgs_change, RIP+8(%rsp)
+ cmpq $asm_load_gs_index_gs_change, RIP+8(%rsp)
jne .Lerror_entry_done_lfence

/*
diff --git a/arch/x86/entry/traps.c b/arch/x86/entry/traps.c
index 7869343473b7..f71db7934f90 100644
--- a/arch/x86/entry/traps.c
+++ b/arch/x86/entry/traps.c
@@ -67,6 +67,7 @@
#include <asm/proto.h>

extern unsigned char native_irq_return_iret[];
+extern unsigned char asm_load_gs_index_gs_change[];

#else
#include <asm/processor-flags.h>
--
2.19.1.6.gb485710b