Re: [PATCH v2 03/24] objtool: Handle __sanitize_cov*() tail calls

From: Marco Elver
Date: Fri Jun 25 2021 - 07:05:50 EST


On Thu, Jun 24, 2021 at 11:41AM +0200, Peter Zijlstra wrote:
> Turns out the compilers also generate tail calls to __sanitize_cov*(),
> make sure to also patch those out in noinstr code.
>
> Fixes: 0f1441b44e82 ("objtool: Fix noinstr vs KCOV")
> Signed-off-by: Peter Zijlstra (Intel) <peterz@xxxxxxxxxxxxx>

Acked-by: Marco Elver <elver@xxxxxxxxxx>

Thanks! More reason to not even try to do the same for other
architectures, and the compiler needing the attribute...

> ---
> tools/objtool/arch/x86/decode.c | 20 ++++
> tools/objtool/check.c | 153 ++++++++++++++++++-----------------
> tools/objtool/include/objtool/arch.h | 1
> 3 files changed, 100 insertions(+), 74 deletions(-)
>
> --- a/tools/objtool/arch/x86/decode.c
> +++ b/tools/objtool/arch/x86/decode.c
> @@ -659,6 +659,26 @@ const char *arch_nop_insn(int len)
> return nops[len-1];
> }
>
> +#define BYTE_RET 0xC3
> +
> +const char *arch_ret_insn(int len)
> +{
> + static const char ret[5][5] = {
> + { BYTE_RET },
> + { BYTE_RET, BYTES_NOP1 },
> + { BYTE_RET, BYTES_NOP2 },
> + { BYTE_RET, BYTES_NOP3 },
> + { BYTE_RET, BYTES_NOP4 },
> + };
> +
> + if (len < 1 || len > 5) {
> + WARN("invalid RET size: %d\n", len);
> + return NULL;
> + }
> +
> + return ret[len-1];
> +}
> +
> /* asm/alternative.h ? */
>
> #define ALTINSTR_FLAG_INV (1 << 15)
> --- a/tools/objtool/check.c
> +++ b/tools/objtool/check.c
> @@ -828,6 +828,74 @@ static struct reloc *insn_reloc(struct o
> return insn->reloc;
> }
>
> +static void remove_insn_ops(struct instruction *insn)
> +{
> + struct stack_op *op, *tmp;
> +
> + list_for_each_entry_safe(op, tmp, &insn->stack_ops, list) {
> + list_del(&op->list);
> + free(op);
> + }
> +}
> +
> +static void add_call_dest(struct objtool_file *file, struct instruction *insn,
> + struct symbol *dest, bool sibling)
> +{
> + struct reloc *reloc = insn_reloc(file, insn);
> +
> + insn->call_dest = dest;
> + if (!dest)
> + return;
> +
> + if (insn->call_dest->static_call_tramp) {
> + list_add_tail(&insn->call_node,
> + &file->static_call_list);
> + }
> +
> + if (insn->sec->noinstr &&
> + !strncmp(insn->call_dest->name, "__sanitizer_cov_", 16)) {
> + if (reloc) {
> + reloc->type = R_NONE;
> + elf_write_reloc(file->elf, reloc);
> + }
> +
> + elf_write_insn(file->elf, insn->sec,
> + insn->offset, insn->len,
> + sibling ? arch_ret_insn(insn->len)
> + : arch_nop_insn(insn->len));
> +
> + insn->type = sibling ? INSN_RETURN : INSN_NOP;
> + }
> +
> + if (mcount && !strcmp(insn->call_dest->name, "__fentry__")) {
> + if (sibling)
> + WARN_FUNC("Tail call to __fentry__ !?!?", insn->sec, insn->offset);
> +
> + if (reloc) {
> + reloc->type = R_NONE;
> + elf_write_reloc(file->elf, reloc);
> + }
> +
> + elf_write_insn(file->elf, insn->sec,
> + insn->offset, insn->len,
> + arch_nop_insn(insn->len));
> +
> + insn->type = INSN_NOP;
> +
> + list_add_tail(&insn->mcount_loc_node,
> + &file->mcount_loc_list);
> + }
> +
> + /*
> + * Whatever stack impact regular CALLs have, should be undone
> + * by the RETURN of the called function.
> + *
> + * Annotated intra-function calls retain the stack_ops but
> + * are converted to JUMP, see read_intra_function_calls().
> + */
> + remove_insn_ops(insn);
> +}
> +
> /*
> * Find the destination instructions for all jumps.
> */
> @@ -866,11 +934,7 @@ static int add_jump_destinations(struct
> continue;
> } else if (insn->func) {
> /* internal or external sibling call (with reloc) */
> - insn->call_dest = reloc->sym;
> - if (insn->call_dest->static_call_tramp) {
> - list_add_tail(&insn->call_node,
> - &file->static_call_list);
> - }
> + add_call_dest(file, insn, reloc->sym, true);
> continue;
> } else if (reloc->sym->sec->idx) {
> dest_sec = reloc->sym->sec;
> @@ -926,13 +990,8 @@ static int add_jump_destinations(struct
>
> } else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
> insn->jump_dest->offset == insn->jump_dest->func->offset) {
> -
> /* internal sibling call (without reloc) */
> - insn->call_dest = insn->jump_dest->func;
> - if (insn->call_dest->static_call_tramp) {
> - list_add_tail(&insn->call_node,
> - &file->static_call_list);
> - }
> + add_call_dest(file, insn, insn->jump_dest->func, true);
> }
> }
> }
> @@ -940,16 +999,6 @@ static int add_jump_destinations(struct
> return 0;
> }
>
> -static void remove_insn_ops(struct instruction *insn)
> -{
> - struct stack_op *op, *tmp;
> -
> - list_for_each_entry_safe(op, tmp, &insn->stack_ops, list) {
> - list_del(&op->list);
> - free(op);
> - }
> -}
> -
> static struct symbol *find_call_destination(struct section *sec, unsigned long offset)
> {
> struct symbol *call_dest;
> @@ -968,6 +1017,7 @@ static int add_call_destinations(struct
> {
> struct instruction *insn;
> unsigned long dest_off;
> + struct symbol *dest;
> struct reloc *reloc;
>
> for_each_insn(file, insn) {
> @@ -977,7 +1027,9 @@ static int add_call_destinations(struct
> reloc = insn_reloc(file, insn);
> if (!reloc) {
> dest_off = arch_jump_destination(insn);
> - insn->call_dest = find_call_destination(insn->sec, dest_off);
> + dest = find_call_destination(insn->sec, dest_off);
> +
> + add_call_dest(file, insn, dest, false);
>
> if (insn->ignore)
> continue;
> @@ -995,9 +1047,8 @@ static int add_call_destinations(struct
>
> } else if (reloc->sym->type == STT_SECTION) {
> dest_off = arch_dest_reloc_offset(reloc->addend);
> - insn->call_dest = find_call_destination(reloc->sym->sec,
> - dest_off);
> - if (!insn->call_dest) {
> + dest = find_call_destination(reloc->sym->sec, dest_off);
> + if (!dest) {
> WARN_FUNC("can't find call dest symbol at %s+0x%lx",
> insn->sec, insn->offset,
> reloc->sym->sec->name,
> @@ -1005,6 +1056,8 @@ static int add_call_destinations(struct
> return -1;
> }
>
> + add_call_dest(file, insn, dest, false);
> +
> } else if (arch_is_retpoline(reloc->sym)) {
> /*
> * Retpoline calls are really dynamic calls in
> @@ -1020,55 +1073,7 @@ static int add_call_destinations(struct
> continue;
>
> } else
> - insn->call_dest = reloc->sym;
> -
> - if (insn->call_dest && insn->call_dest->static_call_tramp) {
> - list_add_tail(&insn->call_node,
> - &file->static_call_list);
> - }
> -
> - /*
> - * Many compilers cannot disable KCOV with a function attribute
> - * so they need a little help, NOP out any KCOV calls from noinstr
> - * text.
> - */
> - if (insn->sec->noinstr &&
> - !strncmp(insn->call_dest->name, "__sanitizer_cov_", 16)) {
> - if (reloc) {
> - reloc->type = R_NONE;
> - elf_write_reloc(file->elf, reloc);
> - }
> -
> - elf_write_insn(file->elf, insn->sec,
> - insn->offset, insn->len,
> - arch_nop_insn(insn->len));
> - insn->type = INSN_NOP;
> - }
> -
> - if (mcount && !strcmp(insn->call_dest->name, "__fentry__")) {
> - if (reloc) {
> - reloc->type = R_NONE;
> - elf_write_reloc(file->elf, reloc);
> - }
> -
> - elf_write_insn(file->elf, insn->sec,
> - insn->offset, insn->len,
> - arch_nop_insn(insn->len));
> -
> - insn->type = INSN_NOP;
> -
> - list_add_tail(&insn->mcount_loc_node,
> - &file->mcount_loc_list);
> - }
> -
> - /*
> - * Whatever stack impact regular CALLs have, should be undone
> - * by the RETURN of the called function.
> - *
> - * Annotated intra-function calls retain the stack_ops but
> - * are converted to JUMP, see read_intra_function_calls().
> - */
> - remove_insn_ops(insn);
> + add_call_dest(file, insn, reloc->sym, false);
> }
>
> return 0;
> --- a/tools/objtool/include/objtool/arch.h
> +++ b/tools/objtool/include/objtool/arch.h
> @@ -82,6 +82,7 @@ unsigned long arch_jump_destination(stru
> unsigned long arch_dest_reloc_offset(int addend);
>
> const char *arch_nop_insn(int len);
> +const char *arch_ret_insn(int len);
>
> int arch_decode_hint_reg(struct instruction *insn, u8 sp_reg);
>
>
>