2 * drivers/misc/tegra-profiler/backtrace.c
4 * Copyright (c) 2014, NVIDIA CORPORATION. All rights reserved.
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2, as published by the Free Software Foundation.
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
17 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
19 #include <linux/uaccess.h>
20 #include <linux/sched.h>
23 #include <linux/tegra_profiler.h>
26 #include "backtrace.h"
27 #include "eh_unwind.h"
29 #define QUADD_USER_SPACE_MIN_ADDR 0x8000
32 is_thumb_mode(struct pt_regs *regs)
35 return compat_thumb_mode(regs);
37 return thumb_mode(regs);
42 quadd_user_stack_pointer(struct pt_regs *regs)
45 if (compat_user_mode(regs))
46 return regs->compat_sp;
49 return user_stack_pointer(regs);
52 static inline unsigned long
53 get_user_frame_pointer(struct pt_regs *regs)
58 if (compat_user_mode(regs))
59 fp = is_thumb_mode(regs) ?
60 regs->compat_usr(7) : regs->compat_usr(11);
64 fp = is_thumb_mode(regs) ? regs->ARM_r7 : regs->ARM_fp;
70 quadd_user_link_register(struct pt_regs *regs)
73 return compat_user_mode(regs) ?
74 regs->compat_lr : regs->regs[30];
81 quadd_callchain_store(struct quadd_callchain *cc,
85 cc->unw_rc = QUADD_URC_PC_INCORRECT;
89 if (cc->nr >= QUADD_MAX_STACK_DEPTH) {
90 cc->unw_rc = QUADD_URC_LEVEL_TOO_DEEP;
95 cc->ip_64[cc->nr++] = ip;
97 cc->ip_32[cc->nr++] = ip;
102 static unsigned long __user *
103 user_backtrace(unsigned long __user *tail,
104 struct quadd_callchain *cc,
105 struct vm_area_struct *stack_vma)
108 unsigned long value, value_lr = 0, value_fp = 0;
109 unsigned long __user *fp_prev = NULL;
111 if (!is_vma_addr((unsigned long)tail, stack_vma, sizeof(*tail)))
114 if (__copy_from_user_inatomic(&value, tail, sizeof(unsigned long))) {
115 cc->unw_rc = QUADD_URC_EACCESS;
119 if (is_vma_addr(value, stack_vma, sizeof(value))) {
120 /* gcc thumb/clang frame */
123 if (!is_vma_addr((unsigned long)(tail + 1), stack_vma,
127 if (__copy_from_user_inatomic(&value_lr, tail + 1,
129 cc->unw_rc = QUADD_URC_EACCESS;
134 if (__copy_from_user_inatomic(&value_fp, tail - 1,
136 cc->unw_rc = QUADD_URC_EACCESS;
140 if (!is_vma_addr(value_fp, stack_vma, sizeof(value_fp)))
146 fp_prev = (unsigned long __user *)value_fp;
148 if (value_lr < QUADD_USER_SPACE_MIN_ADDR) {
149 cc->unw_rc = QUADD_URC_PC_INCORRECT;
153 nr_added = quadd_callchain_store(cc, value_lr);
164 get_user_callchain_fp(struct pt_regs *regs,
165 struct quadd_callchain *cc,
166 struct task_struct *task)
168 unsigned long fp, sp, pc, reg;
169 struct vm_area_struct *vma, *vma_pc;
170 unsigned long __user *tail = NULL;
171 struct mm_struct *mm = task->mm;
174 cc->unw_rc = QUADD_URC_FP_INCORRECT;
177 cc->unw_rc = QUADD_URC_FAILURE;
181 sp = quadd_user_stack_pointer(regs);
182 pc = instruction_pointer(regs);
183 fp = get_user_frame_pointer(regs);
185 if (fp == 0 || fp < sp || fp & 0x3)
188 vma = find_vma(mm, sp);
190 cc->unw_rc = QUADD_URC_SP_INCORRECT;
194 if (!is_vma_addr(fp, vma, sizeof(fp)))
197 if (probe_kernel_address(fp, reg)) {
198 pr_warn_once("frame error: sp/fp: %#lx/%#lx, pc/lr: %#lx/%#lx, vma: %#lx-%#lx\n",
199 sp, fp, pc, quadd_user_link_register(regs),
200 vma->vm_start, vma->vm_end);
201 cc->unw_rc = QUADD_URC_EACCESS;
205 if (is_thumb_mode(regs)) {
206 if (reg <= fp || !is_vma_addr(reg, vma, sizeof(reg)))
208 } else if (reg > fp && is_vma_addr(reg, vma, sizeof(reg))) {
213 if (is_vma_addr(fp + sizeof(unsigned long), vma, sizeof(fp))) {
214 if (__copy_from_user_inatomic(
216 (unsigned long __user *)fp + 1,
217 sizeof(unsigned long))) {
218 cc->unw_rc = QUADD_URC_EACCESS;
222 vma_pc = find_vma(mm, pc);
226 if (!read_lr || !is_vma_addr(value, vma_pc, sizeof(value))) {
227 /* gcc: fp --> short frame tail (fp) */
229 unsigned long lr = quadd_user_link_register(regs);
231 if (lr < QUADD_USER_SPACE_MIN_ADDR) {
232 cc->unw_rc = QUADD_URC_PC_INCORRECT;
236 nr_added = quadd_callchain_store(cc, lr);
240 tail = (unsigned long __user *)reg;
245 tail = (unsigned long __user *)fp;
247 while (tail && !((unsigned long)tail & 0x3))
248 tail = user_backtrace(tail, cc, vma);
254 __user_backtrace(struct quadd_callchain *cc, struct task_struct *task)
256 struct mm_struct *mm = task->mm;
257 struct vm_area_struct *vma;
258 unsigned long __user *tail;
260 cc->unw_rc = QUADD_URC_FP_INCORRECT;
263 cc->unw_rc = QUADD_URC_FAILURE;
267 vma = find_vma(mm, cc->curr_sp);
269 cc->unw_rc = QUADD_URC_SP_INCORRECT;
273 tail = (unsigned long __user *)cc->curr_fp;
275 while (tail && !((unsigned long)tail & 0x3))
276 tail = user_backtrace(tail, cc, vma);
283 user_backtrace_compat(u32 __user *tail,
284 struct quadd_callchain *cc,
285 struct vm_area_struct *stack_vma)
288 u32 value, value_lr = 0, value_fp = 0;
289 u32 __user *fp_prev = NULL;
291 if (!is_vma_addr((unsigned long)tail, stack_vma, sizeof(*tail)))
294 if (__copy_from_user_inatomic(&value, tail, sizeof(value))) {
295 cc->unw_rc = QUADD_URC_EACCESS;
299 if (is_vma_addr(value, stack_vma, sizeof(value))) {
300 /* gcc thumb/clang frame */
303 if (!is_vma_addr((unsigned long)(tail + 1), stack_vma,
307 if (__copy_from_user_inatomic(&value_lr, tail + 1,
309 cc->unw_rc = QUADD_URC_EACCESS;
314 if (__copy_from_user_inatomic(&value_fp, tail - 1,
316 cc->unw_rc = QUADD_URC_EACCESS;
320 if (!is_vma_addr(value_fp, stack_vma, sizeof(value_fp)))
326 fp_prev = (u32 __user *)(unsigned long)value_fp;
328 if (value_lr < QUADD_USER_SPACE_MIN_ADDR) {
329 cc->unw_rc = QUADD_URC_PC_INCORRECT;
333 nr_added = quadd_callchain_store(cc, value_lr);
344 get_user_callchain_fp_compat(struct pt_regs *regs,
345 struct quadd_callchain *cc,
346 struct task_struct *task)
349 struct vm_area_struct *vma, *vma_pc;
350 u32 __user *tail = NULL;
351 struct mm_struct *mm = task->mm;
354 cc->unw_rc = QUADD_URC_FP_INCORRECT;
357 cc->unw_rc = QUADD_URC_FAILURE;
361 sp = quadd_user_stack_pointer(regs);
362 pc = instruction_pointer(regs);
363 fp = get_user_frame_pointer(regs);
365 if (fp == 0 || fp < sp || fp & 0x3)
368 vma = find_vma(mm, sp);
370 cc->unw_rc = QUADD_URC_SP_INCORRECT;
374 if (!is_vma_addr(fp, vma, sizeof(fp)))
377 if (probe_kernel_address((unsigned long)fp, reg)) {
378 pr_warn_once("frame error: sp/fp: %#x/%#x, pc/lr: %#x/%#x, vma: %#lx-%#lx\n",
379 sp, fp, pc, (u32)quadd_user_link_register(regs),
380 vma->vm_start, vma->vm_end);
381 cc->unw_rc = QUADD_URC_EACCESS;
385 if (is_thumb_mode(regs)) {
386 if (reg <= fp || !is_vma_addr(reg, vma, sizeof(reg)))
388 } else if (reg > fp && is_vma_addr(reg, vma, sizeof(reg))) {
393 if (is_vma_addr(fp + sizeof(u32), vma, sizeof(fp))) {
394 if (__copy_from_user_inatomic(
396 (u32 __user *)(fp + sizeof(u32)),
398 cc->unw_rc = QUADD_URC_EACCESS;
402 vma_pc = find_vma(mm, pc);
406 if (!read_lr || !is_vma_addr(value, vma_pc, sizeof(value))) {
407 /* gcc: fp --> short frame tail (fp) */
409 u32 lr = quadd_user_link_register(regs);
411 if (lr < QUADD_USER_SPACE_MIN_ADDR) {
412 cc->unw_rc = QUADD_URC_PC_INCORRECT;
416 nr_added = quadd_callchain_store(cc, lr);
420 tail = (u32 __user *)(unsigned long)reg;
425 tail = (u32 __user *)(unsigned long)fp;
427 while (tail && !((unsigned long)tail & 0x3))
428 tail = user_backtrace_compat(tail, cc, vma);
434 __user_backtrace_compat(struct quadd_callchain *cc, struct task_struct *task)
436 struct mm_struct *mm = task->mm;
437 struct vm_area_struct *vma;
440 cc->unw_rc = QUADD_URC_FP_INCORRECT;
443 cc->unw_rc = QUADD_URC_FAILURE;
447 vma = find_vma(mm, cc->curr_sp);
449 cc->unw_rc = QUADD_URC_SP_INCORRECT;
453 tail = (u32 __user *)cc->curr_fp;
455 while (tail && !((unsigned long)tail & 0x3))
456 tail = user_backtrace_compat(tail, cc, vma);
461 #endif /* CONFIG_ARM64 */
464 __get_user_callchain_fp(struct pt_regs *regs,
465 struct quadd_callchain *cc,
466 struct task_struct *task)
469 int nr, nr_prev = cc->nr;
471 if (cc->unw_rc == QUADD_URC_LEVEL_TOO_DEEP)
475 if (compat_user_mode(regs))
476 nr = __user_backtrace_compat(cc, task);
478 nr = __user_backtrace(cc, task);
480 nr = __user_backtrace(cc, task);
483 cc->unw_method = QUADD_UNW_METHOD_MIXED;
488 cc->unw_method = QUADD_UNW_METHOD_FP;
491 if (compat_user_mode(regs))
492 return get_user_callchain_fp_compat(regs, cc, task);
494 return get_user_callchain_fp(regs, cc, task);
498 quadd_get_user_callchain(struct pt_regs *regs,
499 struct quadd_callchain *cc,
500 struct quadd_ctx *ctx,
501 struct task_struct *task)
503 int unw_fp, unw_eht, unw_mix, nr = 0;
505 struct quadd_parameters *param = &ctx->param;
508 cc->unw_method = QUADD_URC_FAILURE;
517 cc->cs_64 = compat_user_mode(regs) ? 0 : 1;
522 extra = param->reserved[QUADD_PARAM_IDX_EXTRA];
524 unw_fp = extra & QUADD_PARAM_EXTRA_BT_FP;
525 unw_eht = extra & QUADD_PARAM_EXTRA_BT_UNWIND_TABLES;
526 unw_mix = extra & QUADD_PARAM_EXTRA_BT_MIXED;
531 nr = quadd_get_user_callchain_ut(regs, cc, task);
535 nr = __get_user_callchain_fp(regs, cc, task);