LoongArch/ftrace: Add dynamic function graph tracer support

Once the function_graph tracer is enabled, a filtered function has the
following call sequence:

1) ftracer_caller     ==> on/off by ftrace_make_call/ftrace_make_nop
2) ftrace_graph_caller
3) ftrace_graph_call  ==> on/off by ftrace_en/disable_ftrace_graph_caller
4) prepare_ftrace_return

Considering the following DYNAMIC_FTRACE_WITH_REGS feature, it would be
more extendable to have a ftrace_graph_caller function, instead of
calling prepare_ftrace_return directly in ftrace_caller.

Co-developed-by: Jinyang He <hejinyang@loongson.cn>
Signed-off-by: Jinyang He <hejinyang@loongson.cn>
Signed-off-by: Qing Zhang <zhangqing@loongson.cn>
Signed-off-by: Huacai Chen <chenhuacai@loongson.cn>
This commit is contained in:
Qing Zhang 2022-12-10 22:40:15 +08:00 committed by Huacai Chen
parent 4733f09d88
commit 5fcfad3d41
3 changed files with 101 additions and 0 deletions

View File

@ -108,3 +108,47 @@ int __init ftrace_dyn_arch_init(void)
{
return 0;
}
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
void prepare_ftrace_return(unsigned long self_addr, unsigned long *parent)
{
unsigned long old;
unsigned long return_hooker = (unsigned long)&return_to_handler;
if (unlikely(atomic_read(&current->tracing_graph_pause)))
return;
old = *parent;
if (!function_graph_enter(old, self_addr, 0, NULL))
*parent = return_hooker;
}
static int ftrace_modify_graph_caller(bool enable)
{
u32 branch, nop;
unsigned long pc, func;
extern void ftrace_graph_call(void);
pc = (unsigned long)&ftrace_graph_call;
func = (unsigned long)&ftrace_graph_caller;
nop = larch_insn_gen_nop();
branch = larch_insn_gen_b(pc, func);
if (enable)
return ftrace_modify_code(pc, nop, branch, true);
else
return ftrace_modify_code(pc, branch, nop, true);
}
int ftrace_enable_ftrace_graph_caller(void)
{
return ftrace_modify_graph_caller(true);
}
int ftrace_disable_ftrace_graph_caller(void)
{
return ftrace_modify_graph_caller(false);
}
#endif /* CONFIG_FUNCTION_GRAPH_TRACER */

View File

@ -55,6 +55,30 @@ u32 larch_insn_gen_nop(void)
return INSN_NOP;
}
u32 larch_insn_gen_b(unsigned long pc, unsigned long dest)
{
long offset = dest - pc;
unsigned int immediate_l, immediate_h;
union loongarch_instruction insn;
if ((offset & 3) || offset < -SZ_128M || offset >= SZ_128M) {
pr_warn("The generated b instruction is out of range.\n");
return INSN_BREAK;
}
offset >>= 2;
immediate_l = offset & 0xffff;
offset >>= 16;
immediate_h = offset & 0x3ff;
insn.reg0i26_format.opcode = b_op;
insn.reg0i26_format.immediate_l = immediate_l;
insn.reg0i26_format.immediate_h = immediate_h;
return insn.word;
}
u32 larch_insn_gen_bl(unsigned long pc, unsigned long dest)
{
long offset = dest - pc;

View File

@ -57,6 +57,11 @@ SYM_CODE_START(ftrace_common)
SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL)
bl ftrace_stub
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL)
nop /* b ftrace_graph_caller */
#endif
/*
* As we didn't use S series regs in this assmembly code and all calls
* are C function which will save S series regs by themselves, there is
@ -83,3 +88,31 @@ SYM_CODE_START(ftrace_caller)
ftrace_regs_entry
b ftrace_common
SYM_CODE_END(ftrace_caller)
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
SYM_CODE_START(ftrace_graph_caller)
PTR_L a0, sp, PT_ERA
PTR_ADDI a0, a0, -8 /* arg0: self_addr */
PTR_ADDI a1, sp, PT_R1 /* arg1: parent */
bl prepare_ftrace_return
b ftrace_common_return
SYM_CODE_END(ftrace_graph_caller)
SYM_CODE_START(return_to_handler)
/* Save return value regs */
PTR_ADDI sp, sp, -2 * SZREG
PTR_S a0, sp, 0
PTR_S a1, sp, SZREG
move a0, zero
bl ftrace_return_to_handler
move ra, a0
/* Restore return value regs */
PTR_L a0, sp, 0
PTR_L a1, sp, SZREG
PTR_ADDI sp, sp, 2 * SZREG
jr ra
SYM_CODE_END(return_to_handler)
#endif