[RFC PATCH -next v2 2/4] arm64/ftrace: introduce ftrace dynamic trampoline entrances

From: Wang ShaoBo
Date: Wed Mar 16 2022 - 05:52:07 EST


From: Cheng Jian <cj.chengjian@xxxxxxxxxx>

We introduce two function entrances ftrace_(regs)_caller_tramp
for dynamic trampoline use, which are put into Read-Only section
and should be entirely copied to the space allocated for trampoline.

Signed-off-by: Cheng Jian <cj.chengjian@xxxxxxxxxx>
Signed-off-by: Wang ShaoBo <bobo.shaobowang@xxxxxxxxxx>
---
arch/arm64/kernel/entry-ftrace.S | 80 +++++++++++++++++++++++++-------
1 file changed, 64 insertions(+), 16 deletions(-)

diff --git a/arch/arm64/kernel/entry-ftrace.S b/arch/arm64/kernel/entry-ftrace.S
index e535480a4069..88462d925446 100644
--- a/arch/arm64/kernel/entry-ftrace.S
+++ b/arch/arm64/kernel/entry-ftrace.S
@@ -76,6 +76,23 @@
add x29, sp, #S_STACKFRAME
.endm

+ .macro ftrace_regs_restore
+ /* Restore function arguments */
+ ldp x0, x1, [sp]
+ ldp x2, x3, [sp, #S_X2]
+ ldp x4, x5, [sp, #S_X4]
+ ldp x6, x7, [sp, #S_X6]
+ ldr x8, [sp, #S_X8]
+
+ /* Restore the callsite's FP, LR, PC */
+ ldr x29, [sp, #S_FP]
+ ldr x30, [sp, #S_LR]
+ ldr x9, [sp, #S_PC]
+
+ /* Restore the callsite's SP */
+ add sp, sp, #PT_REGS_SIZE + 16
+ .endm
+
SYM_CODE_START(ftrace_regs_caller)
bti c
ftrace_regs_entry 1
@@ -108,22 +125,8 @@ SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL) // ftrace_graph_caller();
* x19-x29 per the AAPCS, and we created frame records upon entry, so we need
* to restore x0-x8, x29, and x30.
*/
-ftrace_common_return:
- /* Restore function arguments */
- ldp x0, x1, [sp]
- ldp x2, x3, [sp, #S_X2]
- ldp x4, x5, [sp, #S_X4]
- ldp x6, x7, [sp, #S_X6]
- ldr x8, [sp, #S_X8]
-
- /* Restore the callsite's FP, LR, PC */
- ldr x29, [sp, #S_FP]
- ldr x30, [sp, #S_LR]
- ldr x9, [sp, #S_PC]
-
- /* Restore the callsite's SP */
- add sp, sp, #PT_REGS_SIZE + 16
-
+SYM_INNER_LABEL(ftrace_common_return, SYM_L_GLOBAL)
+ ftrace_regs_restore
ret x9
SYM_CODE_END(ftrace_common)

@@ -138,6 +141,51 @@ SYM_CODE_START(ftrace_graph_caller)
SYM_CODE_END(ftrace_graph_caller)
#endif

+.pushsection ".rodata", "a"
+// ftrace trampoline for ftrace_regs_caller
+SYM_CODE_START(ftrace_regs_caller_tramp)
+ bti c
+ ftrace_regs_entry 1 // save all regs
+
+ sub x0, x30, #AARCH64_INSN_SIZE // ip (callsite's BL insn)
+ mov x1, x9 // parent_ip (callsite's LR)
+SYM_INNER_LABEL(ftrace_regs_caller_tramp_ops, SYM_L_GLOBAL)
+ ldr x2, 0 // ops
+ mov x3, sp // regs
+SYM_INNER_LABEL(ftrace_regs_caller_tramp_call, SYM_L_GLOBAL)
+ nop
+#ifdef CONFIG_FUNCTION_GRAPH_TRACER
+SYM_INNER_LABEL(ftrace_regs_caller_tramp_graph_call, SYM_L_GLOBAL)
+ nop // ftrace_graph_caller()
+#endif
+ ftrace_regs_restore
+SYM_INNER_LABEL(ftrace_regs_caller_tramp_end, SYM_L_GLOBAL)
+ ret x9
+SYM_CODE_END(ftrace_regs_caller_tramp)
+
+// ftrace trampoline for ftrace_caller
+SYM_CODE_START(ftrace_caller_tramp)
+ bti c
+ ftrace_regs_entry 0 // save all regs
+
+ sub x0, x30, #AARCH64_INSN_SIZE // ip (callsite's BL insn)
+ mov x1, x9 // parent_ip (callsite's LR)
+SYM_INNER_LABEL(ftrace_caller_tramp_ops, SYM_L_GLOBAL)
+ ldr x2, 0 // ops
+ mov x3, sp // regs
+SYM_INNER_LABEL(ftrace_caller_tramp_call, SYM_L_GLOBAL)
+ nop
+#ifdef CONFIG_FUNCTION_GRAPH_TRACER
+SYM_INNER_LABEL(ftrace_caller_tramp_graph_call, SYM_L_GLOBAL)
+ nop // ftrace_graph_caller()
+#endif
+ ftrace_regs_restore
+SYM_INNER_LABEL(ftrace_caller_tramp_end, SYM_L_GLOBAL)
+ ret x9
+SYM_CODE_END(ftrace_caller_tramp)
+.popsection // .rodata
+
+
#else /* CONFIG_DYNAMIC_FTRACE_WITH_REGS */

/*
--
2.25.1