From 8e7a4cef71790d9406a7bd85aea6cfb12bc07d3c Mon Sep 17 00:00:00 2001 From: Yalin Wang Date: Mon, 3 Nov 2014 03:02:23 +0100 Subject: ARM: 8189/1: arm64:add bitrev.h file to support rbit instruction This patch add bitrev.h file to support rbit instruction, so that we can do bitrev operation by hardware. Signed-off-by: Yalin Wang Acked-by: Will Deacon Signed-off-by: Russell King --- arch/arm64/Kconfig | 1 + arch/arm64/include/asm/bitrev.h | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+) create mode 100644 arch/arm64/include/asm/bitrev.h (limited to 'arch') diff --git a/arch/arm64/Kconfig b/arch/arm64/Kconfig index b1f9a20a3677..0ed36d1f536d 100644 --- a/arch/arm64/Kconfig +++ b/arch/arm64/Kconfig @@ -39,6 +39,7 @@ config ARM64 select HARDIRQS_SW_RESEND select HAVE_ALIGNED_STRUCT_PAGE if SLUB select HAVE_ARCH_AUDITSYSCALL + select HAVE_ARCH_BITREVERSE select HAVE_ARCH_JUMP_LABEL select HAVE_ARCH_KGDB select HAVE_ARCH_SECCOMP_FILTER diff --git a/arch/arm64/include/asm/bitrev.h b/arch/arm64/include/asm/bitrev.h new file mode 100644 index 000000000000..a5a0c3660137 --- /dev/null +++ b/arch/arm64/include/asm/bitrev.h @@ -0,0 +1,19 @@ +#ifndef __ASM_BITREV_H +#define __ASM_BITREV_H +static __always_inline __attribute_const__ u32 __arch_bitrev32(u32 x) +{ + __asm__ ("rbit %w0, %w1" : "=r" (x) : "r" (x)); + return x; +} + +static __always_inline __attribute_const__ u16 __arch_bitrev16(u16 x) +{ + return __arch_bitrev32((u32)x) >> 16; +} + +static __always_inline __attribute_const__ u8 __arch_bitrev8(u8 x) +{ + return __arch_bitrev32((u32)x) >> 24; +} + +#endif -- cgit v1.2.3-59-g8ed1b From fca08f326ae0423f03b097ff54de432fe77b95d0 Mon Sep 17 00:00:00 2001 From: Wang Nan Date: Fri, 9 Jan 2015 10:19:49 +0800 Subject: ARM: probes: move all probe code to dedicate directory In discussion on LKML (https://lkml.org/lkml/2014/11/28/158), Russell King suggests to move all probe related code to arch/arm/probes. This patch does the work. Due to dependency on 'arch/arm/kernel/patch.h', this patch also moves patch.h to 'arch/arm/include/asm/patch.h', and related '#include' directives are also midified to '#include '. Following is an overview of this patch: ./arch/arm/kernel/ ./arch/arm/probes/ |-- Makefile |-- Makefile |-- probes-arm.c ==> |-- decode-arm.c |-- probes-arm.h ==> |-- decode-arm.h |-- probes-thumb.c ==> |-- decode-thumb.c |-- probes-thumb.h ==> |-- decode-thumb.h |-- probes.c ==> |-- decode.c |-- probes.h ==> |-- decode.h | |-- kprobes | | |-- Makefile |-- kprobes-arm.c ==> | |-- actions-arm.c |-- kprobes-common.c ==> | |-- actions-common.c |-- kprobes-thumb.c ==> | |-- actions-thumb.c |-- kprobes.c ==> | |-- core.c |-- kprobes.h ==> | |-- core.h |-- kprobes-test-arm.c ==> | |-- test-arm.c |-- kprobes-test.c ==> | |-- test-core.c |-- kprobes-test.h ==> | |-- test-core.h |-- kprobes-test-thumb.c ==> | `-- test-thumb.c | `-- uprobes | |-- Makefile |-- uprobes-arm.c ==> |-- actions-arm.c |-- uprobes.c ==> |-- core.c |-- uprobes.h ==> `-- core.h | `-- patch.h ==> arch/arm/include/asm/patch.h Signed-off-by: Wang Nan Acked-by: Masami Hiramatsu Signed-off-by: Jon Medhurst --- arch/arm/Makefile | 1 + arch/arm/include/asm/patch.h | 17 + arch/arm/kernel/Makefile | 16 +- arch/arm/kernel/jump_label.c | 2 +- arch/arm/kernel/kgdb.c | 3 +- arch/arm/kernel/kprobes-arm.c | 343 ------ arch/arm/kernel/kprobes-common.c | 171 --- arch/arm/kernel/kprobes-test-arm.c | 1346 ----------------------- arch/arm/kernel/kprobes-test-thumb.c | 1188 --------------------- arch/arm/kernel/kprobes-test.c | 1713 ------------------------------ arch/arm/kernel/kprobes-test.h | 435 -------- arch/arm/kernel/kprobes-thumb.c | 666 ------------ arch/arm/kernel/kprobes.c | 628 ----------- arch/arm/kernel/kprobes.h | 52 - arch/arm/kernel/patch.c | 3 +- arch/arm/kernel/patch.h | 17 - arch/arm/kernel/probes-arm.c | 734 ------------- arch/arm/kernel/probes-arm.h | 73 -- arch/arm/kernel/probes-thumb.c | 882 --------------- arch/arm/kernel/probes-thumb.h | 97 -- arch/arm/kernel/probes.c | 456 -------- arch/arm/kernel/probes.h | 407 ------- arch/arm/kernel/uprobes-arm.c | 234 ---- arch/arm/kernel/uprobes.c | 230 ---- arch/arm/kernel/uprobes.h | 35 - arch/arm/probes/Makefile | 7 + arch/arm/probes/decode-arm.c | 735 +++++++++++++ arch/arm/probes/decode-arm.h | 75 ++ arch/arm/probes/decode-thumb.c | 882 +++++++++++++++ arch/arm/probes/decode-thumb.h | 99 ++ arch/arm/probes/decode.c | 456 ++++++++ arch/arm/probes/decode.h | 407 +++++++ arch/arm/probes/kprobes/Makefile | 11 + arch/arm/probes/kprobes/actions-arm.c | 343 ++++++ arch/arm/probes/kprobes/actions-common.c | 171 +++ arch/arm/probes/kprobes/actions-thumb.c | 666 ++++++++++++ arch/arm/probes/kprobes/core.c | 628 +++++++++++ arch/arm/probes/kprobes/core.h | 53 + arch/arm/probes/kprobes/test-arm.c | 1346 +++++++++++++++++++++++ arch/arm/probes/kprobes/test-core.c | 1713 ++++++++++++++++++++++++++++++ arch/arm/probes/kprobes/test-core.h | 435 ++++++++ arch/arm/probes/kprobes/test-thumb.c | 1188 +++++++++++++++++++++ arch/arm/probes/uprobes/Makefile | 1 + arch/arm/probes/uprobes/actions-arm.c | 234 ++++ arch/arm/probes/uprobes/core.c | 230 ++++ arch/arm/probes/uprobes/core.h | 35 + 46 files changed, 9738 insertions(+), 9726 deletions(-) create mode 100644 arch/arm/include/asm/patch.h delete mode 100644 arch/arm/kernel/kprobes-arm.c delete mode 100644 arch/arm/kernel/kprobes-common.c delete mode 100644 arch/arm/kernel/kprobes-test-arm.c delete mode 100644 arch/arm/kernel/kprobes-test-thumb.c delete mode 100644 arch/arm/kernel/kprobes-test.c delete mode 100644 arch/arm/kernel/kprobes-test.h delete mode 100644 arch/arm/kernel/kprobes-thumb.c delete mode 100644 arch/arm/kernel/kprobes.c delete mode 100644 arch/arm/kernel/kprobes.h delete mode 100644 arch/arm/kernel/patch.h delete mode 100644 arch/arm/kernel/probes-arm.c delete mode 100644 arch/arm/kernel/probes-arm.h delete mode 100644 arch/arm/kernel/probes-thumb.c delete mode 100644 arch/arm/kernel/probes-thumb.h delete mode 100644 arch/arm/kernel/probes.c delete mode 100644 arch/arm/kernel/probes.h delete mode 100644 arch/arm/kernel/uprobes-arm.c delete mode 100644 arch/arm/kernel/uprobes.c delete mode 100644 arch/arm/kernel/uprobes.h create mode 100644 arch/arm/probes/Makefile create mode 100644 arch/arm/probes/decode-arm.c create mode 100644 arch/arm/probes/decode-arm.h create mode 100644 arch/arm/probes/decode-thumb.c create mode 100644 arch/arm/probes/decode-thumb.h create mode 100644 arch/arm/probes/decode.c create mode 100644 arch/arm/probes/decode.h create mode 100644 arch/arm/probes/kprobes/Makefile create mode 100644 arch/arm/probes/kprobes/actions-arm.c create mode 100644 arch/arm/probes/kprobes/actions-common.c create mode 100644 arch/arm/probes/kprobes/actions-thumb.c create mode 100644 arch/arm/probes/kprobes/core.c create mode 100644 arch/arm/probes/kprobes/core.h create mode 100644 arch/arm/probes/kprobes/test-arm.c create mode 100644 arch/arm/probes/kprobes/test-core.c create mode 100644 arch/arm/probes/kprobes/test-core.h create mode 100644 arch/arm/probes/kprobes/test-thumb.c create mode 100644 arch/arm/probes/uprobes/Makefile create mode 100644 arch/arm/probes/uprobes/actions-arm.c create mode 100644 arch/arm/probes/uprobes/core.c create mode 100644 arch/arm/probes/uprobes/core.h (limited to 'arch') diff --git a/arch/arm/Makefile b/arch/arm/Makefile index c1785eec2cf7..7f99cd652203 100644 --- a/arch/arm/Makefile +++ b/arch/arm/Makefile @@ -266,6 +266,7 @@ core-$(CONFIG_KVM_ARM_HOST) += arch/arm/kvm/ # If we have a machine-specific directory, then include it in the build. core-y += arch/arm/kernel/ arch/arm/mm/ arch/arm/common/ +core-y += arch/arm/probes/ core-y += arch/arm/net/ core-y += arch/arm/crypto/ core-y += arch/arm/firmware/ diff --git a/arch/arm/include/asm/patch.h b/arch/arm/include/asm/patch.h new file mode 100644 index 000000000000..77e054c2f6cd --- /dev/null +++ b/arch/arm/include/asm/patch.h @@ -0,0 +1,17 @@ +#ifndef _ARM_KERNEL_PATCH_H +#define _ARM_KERNEL_PATCH_H + +void patch_text(void *addr, unsigned int insn); +void __patch_text_real(void *addr, unsigned int insn, bool remap); + +static inline void __patch_text(void *addr, unsigned int insn) +{ + __patch_text_real(addr, insn, true); +} + +static inline void __patch_text_early(void *addr, unsigned int insn) +{ + __patch_text_real(addr, insn, false); +} + +#endif diff --git a/arch/arm/kernel/Makefile b/arch/arm/kernel/Makefile index fb2b71ebe3f2..9c51a433e025 100644 --- a/arch/arm/kernel/Makefile +++ b/arch/arm/kernel/Makefile @@ -51,20 +51,8 @@ obj-$(CONFIG_DYNAMIC_FTRACE) += ftrace.o insn.o obj-$(CONFIG_FUNCTION_GRAPH_TRACER) += ftrace.o insn.o obj-$(CONFIG_JUMP_LABEL) += jump_label.o insn.o patch.o obj-$(CONFIG_KEXEC) += machine_kexec.o relocate_kernel.o -obj-$(CONFIG_UPROBES) += probes.o probes-arm.o uprobes.o uprobes-arm.o -obj-$(CONFIG_KPROBES) += probes.o kprobes.o kprobes-common.o patch.o -ifdef CONFIG_THUMB2_KERNEL -obj-$(CONFIG_KPROBES) += kprobes-thumb.o probes-thumb.o -else -obj-$(CONFIG_KPROBES) += kprobes-arm.o probes-arm.o -endif -obj-$(CONFIG_ARM_KPROBES_TEST) += test-kprobes.o -test-kprobes-objs := kprobes-test.o -ifdef CONFIG_THUMB2_KERNEL -test-kprobes-objs += kprobes-test-thumb.o -else -test-kprobes-objs += kprobes-test-arm.o -endif +# Main staffs in KPROBES are in arch/arm/probes/ . +obj-$(CONFIG_KPROBES) += patch.o obj-$(CONFIG_OABI_COMPAT) += sys_oabi-compat.o obj-$(CONFIG_ARM_THUMBEE) += thumbee.o obj-$(CONFIG_KGDB) += kgdb.o patch.o diff --git a/arch/arm/kernel/jump_label.c b/arch/arm/kernel/jump_label.c index afeeb9ea6f43..d8da075959bf 100644 --- a/arch/arm/kernel/jump_label.c +++ b/arch/arm/kernel/jump_label.c @@ -1,8 +1,8 @@ #include #include +#include #include "insn.h" -#include "patch.h" #ifdef HAVE_JUMP_LABEL diff --git a/arch/arm/kernel/kgdb.c b/arch/arm/kernel/kgdb.c index 07db2f8a1b45..a6ad93c9bce3 100644 --- a/arch/arm/kernel/kgdb.c +++ b/arch/arm/kernel/kgdb.c @@ -14,10 +14,9 @@ #include #include +#include #include -#include "patch.h" - struct dbg_reg_def_t dbg_reg_def[DBG_MAX_REG_NUM] = { { "r0", 4, offsetof(struct pt_regs, ARM_r0)}, diff --git a/arch/arm/kernel/kprobes-arm.c b/arch/arm/kernel/kprobes-arm.c deleted file mode 100644 index ac300c60d656..000000000000 --- a/arch/arm/kernel/kprobes-arm.c +++ /dev/null @@ -1,343 +0,0 @@ -/* - * arch/arm/kernel/kprobes-decode.c - * - * Copyright (C) 2006, 2007 Motorola Inc. - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * General Public License for more details. - */ - -/* - * We do not have hardware single-stepping on ARM, This - * effort is further complicated by the ARM not having a - * "next PC" register. Instructions that change the PC - * can't be safely single-stepped in a MP environment, so - * we have a lot of work to do: - * - * In the prepare phase: - * *) If it is an instruction that does anything - * with the CPU mode, we reject it for a kprobe. - * (This is out of laziness rather than need. The - * instructions could be simulated.) - * - * *) Otherwise, decode the instruction rewriting its - * registers to take fixed, ordered registers and - * setting a handler for it to run the instruction. - * - * In the execution phase by an instruction's handler: - * - * *) If the PC is written to by the instruction, the - * instruction must be fully simulated in software. - * - * *) Otherwise, a modified form of the instruction is - * directly executed. Its handler calls the - * instruction in insn[0]. In insn[1] is a - * "mov pc, lr" to return. - * - * Before calling, load up the reordered registers - * from the original instruction's registers. If one - * of the original input registers is the PC, compute - * and adjust the appropriate input register. - * - * After call completes, copy the output registers to - * the original instruction's original registers. - * - * We don't use a real breakpoint instruction since that - * would have us in the kernel go from SVC mode to SVC - * mode losing the link register. Instead we use an - * undefined instruction. To simplify processing, the - * undefined instruction used for kprobes must be reserved - * exclusively for kprobes use. - * - * TODO: ifdef out some instruction decoding based on architecture. - */ - -#include -#include -#include - -#include "kprobes.h" -#include "probes-arm.h" - -#if __LINUX_ARM_ARCH__ >= 6 -#define BLX(reg) "blx "reg" \n\t" -#else -#define BLX(reg) "mov lr, pc \n\t" \ - "mov pc, "reg" \n\t" -#endif - -static void __kprobes -emulate_ldrdstrd(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long pc = regs->ARM_pc + 4; - int rt = (insn >> 12) & 0xf; - int rn = (insn >> 16) & 0xf; - int rm = insn & 0xf; - - register unsigned long rtv asm("r0") = regs->uregs[rt]; - register unsigned long rt2v asm("r1") = regs->uregs[rt+1]; - register unsigned long rnv asm("r2") = (rn == 15) ? pc - : regs->uregs[rn]; - register unsigned long rmv asm("r3") = regs->uregs[rm]; - - __asm__ __volatile__ ( - BLX("%[fn]") - : "=r" (rtv), "=r" (rt2v), "=r" (rnv) - : "0" (rtv), "1" (rt2v), "2" (rnv), "r" (rmv), - [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - regs->uregs[rt] = rtv; - regs->uregs[rt+1] = rt2v; - if (is_writeback(insn)) - regs->uregs[rn] = rnv; -} - -static void __kprobes -emulate_ldr(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long pc = regs->ARM_pc + 4; - int rt = (insn >> 12) & 0xf; - int rn = (insn >> 16) & 0xf; - int rm = insn & 0xf; - - register unsigned long rtv asm("r0"); - register unsigned long rnv asm("r2") = (rn == 15) ? pc - : regs->uregs[rn]; - register unsigned long rmv asm("r3") = regs->uregs[rm]; - - __asm__ __volatile__ ( - BLX("%[fn]") - : "=r" (rtv), "=r" (rnv) - : "1" (rnv), "r" (rmv), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - if (rt == 15) - load_write_pc(rtv, regs); - else - regs->uregs[rt] = rtv; - - if (is_writeback(insn)) - regs->uregs[rn] = rnv; -} - -static void __kprobes -emulate_str(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long rtpc = regs->ARM_pc - 4 + str_pc_offset; - unsigned long rnpc = regs->ARM_pc + 4; - int rt = (insn >> 12) & 0xf; - int rn = (insn >> 16) & 0xf; - int rm = insn & 0xf; - - register unsigned long rtv asm("r0") = (rt == 15) ? rtpc - : regs->uregs[rt]; - register unsigned long rnv asm("r2") = (rn == 15) ? rnpc - : regs->uregs[rn]; - register unsigned long rmv asm("r3") = regs->uregs[rm]; - - __asm__ __volatile__ ( - BLX("%[fn]") - : "=r" (rnv) - : "r" (rtv), "0" (rnv), "r" (rmv), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - if (is_writeback(insn)) - regs->uregs[rn] = rnv; -} - -static void __kprobes -emulate_rd12rn16rm0rs8_rwflags(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long pc = regs->ARM_pc + 4; - int rd = (insn >> 12) & 0xf; - int rn = (insn >> 16) & 0xf; - int rm = insn & 0xf; - int rs = (insn >> 8) & 0xf; - - register unsigned long rdv asm("r0") = regs->uregs[rd]; - register unsigned long rnv asm("r2") = (rn == 15) ? pc - : regs->uregs[rn]; - register unsigned long rmv asm("r3") = (rm == 15) ? pc - : regs->uregs[rm]; - register unsigned long rsv asm("r1") = regs->uregs[rs]; - unsigned long cpsr = regs->ARM_cpsr; - - __asm__ __volatile__ ( - "msr cpsr_fs, %[cpsr] \n\t" - BLX("%[fn]") - "mrs %[cpsr], cpsr \n\t" - : "=r" (rdv), [cpsr] "=r" (cpsr) - : "0" (rdv), "r" (rnv), "r" (rmv), "r" (rsv), - "1" (cpsr), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - if (rd == 15) - alu_write_pc(rdv, regs); - else - regs->uregs[rd] = rdv; - regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK); -} - -static void __kprobes -emulate_rd12rn16rm0_rwflags_nopc(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - int rd = (insn >> 12) & 0xf; - int rn = (insn >> 16) & 0xf; - int rm = insn & 0xf; - - register unsigned long rdv asm("r0") = regs->uregs[rd]; - register unsigned long rnv asm("r2") = regs->uregs[rn]; - register unsigned long rmv asm("r3") = regs->uregs[rm]; - unsigned long cpsr = regs->ARM_cpsr; - - __asm__ __volatile__ ( - "msr cpsr_fs, %[cpsr] \n\t" - BLX("%[fn]") - "mrs %[cpsr], cpsr \n\t" - : "=r" (rdv), [cpsr] "=r" (cpsr) - : "0" (rdv), "r" (rnv), "r" (rmv), - "1" (cpsr), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - regs->uregs[rd] = rdv; - regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK); -} - -static void __kprobes -emulate_rd16rn12rm0rs8_rwflags_nopc(probes_opcode_t insn, - struct arch_probes_insn *asi, - struct pt_regs *regs) -{ - int rd = (insn >> 16) & 0xf; - int rn = (insn >> 12) & 0xf; - int rm = insn & 0xf; - int rs = (insn >> 8) & 0xf; - - register unsigned long rdv asm("r2") = regs->uregs[rd]; - register unsigned long rnv asm("r0") = regs->uregs[rn]; - register unsigned long rmv asm("r3") = regs->uregs[rm]; - register unsigned long rsv asm("r1") = regs->uregs[rs]; - unsigned long cpsr = regs->ARM_cpsr; - - __asm__ __volatile__ ( - "msr cpsr_fs, %[cpsr] \n\t" - BLX("%[fn]") - "mrs %[cpsr], cpsr \n\t" - : "=r" (rdv), [cpsr] "=r" (cpsr) - : "0" (rdv), "r" (rnv), "r" (rmv), "r" (rsv), - "1" (cpsr), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - regs->uregs[rd] = rdv; - regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK); -} - -static void __kprobes -emulate_rd12rm0_noflags_nopc(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - int rd = (insn >> 12) & 0xf; - int rm = insn & 0xf; - - register unsigned long rdv asm("r0") = regs->uregs[rd]; - register unsigned long rmv asm("r3") = regs->uregs[rm]; - - __asm__ __volatile__ ( - BLX("%[fn]") - : "=r" (rdv) - : "0" (rdv), "r" (rmv), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - regs->uregs[rd] = rdv; -} - -static void __kprobes -emulate_rdlo12rdhi16rn0rm8_rwflags_nopc(probes_opcode_t insn, - struct arch_probes_insn *asi, - struct pt_regs *regs) -{ - int rdlo = (insn >> 12) & 0xf; - int rdhi = (insn >> 16) & 0xf; - int rn = insn & 0xf; - int rm = (insn >> 8) & 0xf; - - register unsigned long rdlov asm("r0") = regs->uregs[rdlo]; - register unsigned long rdhiv asm("r2") = regs->uregs[rdhi]; - register unsigned long rnv asm("r3") = regs->uregs[rn]; - register unsigned long rmv asm("r1") = regs->uregs[rm]; - unsigned long cpsr = regs->ARM_cpsr; - - __asm__ __volatile__ ( - "msr cpsr_fs, %[cpsr] \n\t" - BLX("%[fn]") - "mrs %[cpsr], cpsr \n\t" - : "=r" (rdlov), "=r" (rdhiv), [cpsr] "=r" (cpsr) - : "0" (rdlov), "1" (rdhiv), "r" (rnv), "r" (rmv), - "2" (cpsr), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - regs->uregs[rdlo] = rdlov; - regs->uregs[rdhi] = rdhiv; - regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK); -} - -const union decode_action kprobes_arm_actions[NUM_PROBES_ARM_ACTIONS] = { - [PROBES_EMULATE_NONE] = {.handler = probes_emulate_none}, - [PROBES_SIMULATE_NOP] = {.handler = probes_simulate_nop}, - [PROBES_PRELOAD_IMM] = {.handler = probes_simulate_nop}, - [PROBES_PRELOAD_REG] = {.handler = probes_simulate_nop}, - [PROBES_BRANCH_IMM] = {.handler = simulate_blx1}, - [PROBES_MRS] = {.handler = simulate_mrs}, - [PROBES_BRANCH_REG] = {.handler = simulate_blx2bx}, - [PROBES_CLZ] = {.handler = emulate_rd12rm0_noflags_nopc}, - [PROBES_SATURATING_ARITHMETIC] = { - .handler = emulate_rd12rn16rm0_rwflags_nopc}, - [PROBES_MUL1] = {.handler = emulate_rdlo12rdhi16rn0rm8_rwflags_nopc}, - [PROBES_MUL2] = {.handler = emulate_rd16rn12rm0rs8_rwflags_nopc}, - [PROBES_SWP] = {.handler = emulate_rd12rn16rm0_rwflags_nopc}, - [PROBES_LDRSTRD] = {.handler = emulate_ldrdstrd}, - [PROBES_LOAD_EXTRA] = {.handler = emulate_ldr}, - [PROBES_LOAD] = {.handler = emulate_ldr}, - [PROBES_STORE_EXTRA] = {.handler = emulate_str}, - [PROBES_STORE] = {.handler = emulate_str}, - [PROBES_MOV_IP_SP] = {.handler = simulate_mov_ipsp}, - [PROBES_DATA_PROCESSING_REG] = { - .handler = emulate_rd12rn16rm0rs8_rwflags}, - [PROBES_DATA_PROCESSING_IMM] = { - .handler = emulate_rd12rn16rm0rs8_rwflags}, - [PROBES_MOV_HALFWORD] = {.handler = emulate_rd12rm0_noflags_nopc}, - [PROBES_SEV] = {.handler = probes_emulate_none}, - [PROBES_WFE] = {.handler = probes_simulate_nop}, - [PROBES_SATURATE] = {.handler = emulate_rd12rn16rm0_rwflags_nopc}, - [PROBES_REV] = {.handler = emulate_rd12rm0_noflags_nopc}, - [PROBES_MMI] = {.handler = emulate_rd12rn16rm0_rwflags_nopc}, - [PROBES_PACK] = {.handler = emulate_rd12rn16rm0_rwflags_nopc}, - [PROBES_EXTEND] = {.handler = emulate_rd12rm0_noflags_nopc}, - [PROBES_EXTEND_ADD] = {.handler = emulate_rd12rn16rm0_rwflags_nopc}, - [PROBES_MUL_ADD_LONG] = { - .handler = emulate_rdlo12rdhi16rn0rm8_rwflags_nopc}, - [PROBES_MUL_ADD] = {.handler = emulate_rd16rn12rm0rs8_rwflags_nopc}, - [PROBES_BITFIELD] = {.handler = emulate_rd12rm0_noflags_nopc}, - [PROBES_BRANCH] = {.handler = simulate_bbl}, - [PROBES_LDMSTM] = {.decoder = kprobe_decode_ldmstm} -}; diff --git a/arch/arm/kernel/kprobes-common.c b/arch/arm/kernel/kprobes-common.c deleted file mode 100644 index 0bf5d64eba1d..000000000000 --- a/arch/arm/kernel/kprobes-common.c +++ /dev/null @@ -1,171 +0,0 @@ -/* - * arch/arm/kernel/kprobes-common.c - * - * Copyright (C) 2011 Jon Medhurst . - * - * Some contents moved here from arch/arm/include/asm/kprobes-arm.c which is - * Copyright (C) 2006, 2007 Motorola Inc. - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - */ - -#include -#include -#include - -#include "kprobes.h" - - -static void __kprobes simulate_ldm1stm1(probes_opcode_t insn, - struct arch_probes_insn *asi, - struct pt_regs *regs) -{ - int rn = (insn >> 16) & 0xf; - int lbit = insn & (1 << 20); - int wbit = insn & (1 << 21); - int ubit = insn & (1 << 23); - int pbit = insn & (1 << 24); - long *addr = (long *)regs->uregs[rn]; - int reg_bit_vector; - int reg_count; - - reg_count = 0; - reg_bit_vector = insn & 0xffff; - while (reg_bit_vector) { - reg_bit_vector &= (reg_bit_vector - 1); - ++reg_count; - } - - if (!ubit) - addr -= reg_count; - addr += (!pbit == !ubit); - - reg_bit_vector = insn & 0xffff; - while (reg_bit_vector) { - int reg = __ffs(reg_bit_vector); - reg_bit_vector &= (reg_bit_vector - 1); - if (lbit) - regs->uregs[reg] = *addr++; - else - *addr++ = regs->uregs[reg]; - } - - if (wbit) { - if (!ubit) - addr -= reg_count; - addr -= (!pbit == !ubit); - regs->uregs[rn] = (long)addr; - } -} - -static void __kprobes simulate_stm1_pc(probes_opcode_t insn, - struct arch_probes_insn *asi, - struct pt_regs *regs) -{ - unsigned long addr = regs->ARM_pc - 4; - - regs->ARM_pc = (long)addr + str_pc_offset; - simulate_ldm1stm1(insn, asi, regs); - regs->ARM_pc = (long)addr + 4; -} - -static void __kprobes simulate_ldm1_pc(probes_opcode_t insn, - struct arch_probes_insn *asi, - struct pt_regs *regs) -{ - simulate_ldm1stm1(insn, asi, regs); - load_write_pc(regs->ARM_pc, regs); -} - -static void __kprobes -emulate_generic_r0_12_noflags(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - register void *rregs asm("r1") = regs; - register void *rfn asm("lr") = asi->insn_fn; - - __asm__ __volatile__ ( - "stmdb sp!, {%[regs], r11} \n\t" - "ldmia %[regs], {r0-r12} \n\t" -#if __LINUX_ARM_ARCH__ >= 6 - "blx %[fn] \n\t" -#else - "str %[fn], [sp, #-4]! \n\t" - "adr lr, 1f \n\t" - "ldr pc, [sp], #4 \n\t" - "1: \n\t" -#endif - "ldr lr, [sp], #4 \n\t" /* lr = regs */ - "stmia lr, {r0-r12} \n\t" - "ldr r11, [sp], #4 \n\t" - : [regs] "=r" (rregs), [fn] "=r" (rfn) - : "0" (rregs), "1" (rfn) - : "r0", "r2", "r3", "r4", "r5", "r6", "r7", - "r8", "r9", "r10", "r12", "memory", "cc" - ); -} - -static void __kprobes -emulate_generic_r2_14_noflags(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - emulate_generic_r0_12_noflags(insn, asi, - (struct pt_regs *)(regs->uregs+2)); -} - -static void __kprobes -emulate_ldm_r3_15(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - emulate_generic_r0_12_noflags(insn, asi, - (struct pt_regs *)(regs->uregs+3)); - load_write_pc(regs->ARM_pc, regs); -} - -enum probes_insn __kprobes -kprobe_decode_ldmstm(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *h) -{ - probes_insn_handler_t *handler = 0; - unsigned reglist = insn & 0xffff; - int is_ldm = insn & 0x100000; - int rn = (insn >> 16) & 0xf; - - if (rn <= 12 && (reglist & 0xe000) == 0) { - /* Instruction only uses registers in the range R0..R12 */ - handler = emulate_generic_r0_12_noflags; - - } else if (rn >= 2 && (reglist & 0x8003) == 0) { - /* Instruction only uses registers in the range R2..R14 */ - rn -= 2; - reglist >>= 2; - handler = emulate_generic_r2_14_noflags; - - } else if (rn >= 3 && (reglist & 0x0007) == 0) { - /* Instruction only uses registers in the range R3..R15 */ - if (is_ldm && (reglist & 0x8000)) { - rn -= 3; - reglist >>= 3; - handler = emulate_ldm_r3_15; - } - } - - if (handler) { - /* We can emulate the instruction in (possibly) modified form */ - asi->insn[0] = __opcode_to_mem_arm((insn & 0xfff00000) | - (rn << 16) | reglist); - asi->insn_handler = handler; - return INSN_GOOD; - } - - /* Fallback to slower simulation... */ - if (reglist & 0x8000) - handler = is_ldm ? simulate_ldm1_pc : simulate_stm1_pc; - else - handler = simulate_ldm1stm1; - asi->insn_handler = handler; - return INSN_GOOD_NO_SLOT; -} - diff --git a/arch/arm/kernel/kprobes-test-arm.c b/arch/arm/kernel/kprobes-test-arm.c deleted file mode 100644 index cb1424240ff6..000000000000 --- a/arch/arm/kernel/kprobes-test-arm.c +++ /dev/null @@ -1,1346 +0,0 @@ -/* - * arch/arm/kernel/kprobes-test-arm.c - * - * Copyright (C) 2011 Jon Medhurst . - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - */ - -#include -#include -#include -#include - -#include "kprobes-test.h" - - -#define TEST_ISA "32" - -#define TEST_ARM_TO_THUMB_INTERWORK_R(code1, reg, val, code2) \ - TESTCASE_START(code1 #reg code2) \ - TEST_ARG_REG(reg, val) \ - TEST_ARG_REG(14, 99f) \ - TEST_ARG_END("") \ - "50: nop \n\t" \ - "1: "code1 #reg code2" \n\t" \ - " bx lr \n\t" \ - ".thumb \n\t" \ - "3: adr lr, 2f \n\t" \ - " bx lr \n\t" \ - ".arm \n\t" \ - "2: nop \n\t" \ - TESTCASE_END - -#define TEST_ARM_TO_THUMB_INTERWORK_P(code1, reg, val, code2) \ - TESTCASE_START(code1 #reg code2) \ - TEST_ARG_PTR(reg, val) \ - TEST_ARG_REG(14, 99f) \ - TEST_ARG_MEM(15, 3f+1) \ - TEST_ARG_END("") \ - "50: nop \n\t" \ - "1: "code1 #reg code2" \n\t" \ - " bx lr \n\t" \ - ".thumb \n\t" \ - "3: adr lr, 2f \n\t" \ - " bx lr \n\t" \ - ".arm \n\t" \ - "2: nop \n\t" \ - TESTCASE_END - - -void kprobe_arm_test_cases(void) -{ - kprobe_test_flags = 0; - - TEST_GROUP("Data-processing (register), (register-shifted register), (immediate)") - -#define _DATA_PROCESSING_DNM(op,s,val) \ - TEST_RR( op "eq" s " r0, r",1, VAL1,", r",2, val, "") \ - TEST_RR( op "ne" s " r1, r",1, VAL1,", r",2, val, ", lsl #3") \ - TEST_RR( op "cs" s " r2, r",3, VAL1,", r",2, val, ", lsr #4") \ - TEST_RR( op "cc" s " r3, r",3, VAL1,", r",2, val, ", asr #5") \ - TEST_RR( op "mi" s " r4, r",5, VAL1,", r",2, N(val),", asr #6") \ - TEST_RR( op "pl" s " r5, r",5, VAL1,", r",2, val, ", ror #7") \ - TEST_RR( op "vs" s " r6, r",7, VAL1,", r",2, val, ", rrx") \ - TEST_R( op "vc" s " r6, r",7, VAL1,", pc, lsl #3") \ - TEST_R( op "vc" s " r6, r",7, VAL1,", sp, lsr #4") \ - TEST_R( op "vc" s " r6, pc, r",7, VAL1,", asr #5") \ - TEST_R( op "vc" s " r6, sp, r",7, VAL1,", ror #6") \ - TEST_RRR( op "hi" s " r8, r",9, VAL1,", r",14,val, ", lsl r",0, 3,"")\ - TEST_RRR( op "ls" s " r9, r",9, VAL1,", r",14,val, ", lsr r",7, 4,"")\ - TEST_RRR( op "ge" s " r10, r",11,VAL1,", r",14,val, ", asr r",7, 5,"")\ - TEST_RRR( op "lt" s " r11, r",11,VAL1,", r",14,N(val),", asr r",7, 6,"")\ - TEST_RR( op "gt" s " r12, r13" ", r",14,val, ", ror r",14,7,"")\ - TEST_RR( op "le" s " r14, r",0, val, ", r13" ", lsl r",14,8,"")\ - TEST_R( op "eq" s " r0, r",11,VAL1,", #0xf5") \ - TEST_R( op "ne" s " r11, r",0, VAL1,", #0xf5000000") \ - TEST_R( op s " r7, r",8, VAL2,", #0x000af000") \ - TEST( op s " r4, pc" ", #0x00005a00") - -#define DATA_PROCESSING_DNM(op,val) \ - _DATA_PROCESSING_DNM(op,"",val) \ - _DATA_PROCESSING_DNM(op,"s",val) - -#define DATA_PROCESSING_NM(op,val) \ - TEST_RR( op "ne r",1, VAL1,", r",2, val, "") \ - TEST_RR( op "eq r",1, VAL1,", r",2, val, ", lsl #3") \ - TEST_RR( op "cc r",3, VAL1,", r",2, val, ", lsr #4") \ - TEST_RR( op "cs r",3, VAL1,", r",2, val, ", asr #5") \ - TEST_RR( op "pl r",5, VAL1,", r",2, N(val),", asr #6") \ - TEST_RR( op "mi r",5, VAL1,", r",2, val, ", ror #7") \ - TEST_RR( op "vc r",7, VAL1,", r",2, val, ", rrx") \ - TEST_R ( op "vs r",7, VAL1,", pc, lsl #3") \ - TEST_R ( op "vs r",7, VAL1,", sp, lsr #4") \ - TEST_R( op "vs pc, r",7, VAL1,", asr #5") \ - TEST_R( op "vs sp, r",7, VAL1,", ror #6") \ - TEST_RRR( op "ls r",9, VAL1,", r",14,val, ", lsl r",0, 3,"") \ - TEST_RRR( op "hi r",9, VAL1,", r",14,val, ", lsr r",7, 4,"") \ - TEST_RRR( op "lt r",11,VAL1,", r",14,val, ", asr r",7, 5,"") \ - TEST_RRR( op "ge r",11,VAL1,", r",14,N(val),", asr r",7, 6,"") \ - TEST_RR( op "le r13" ", r",14,val, ", ror r",14,7,"") \ - TEST_RR( op "gt r",0, val, ", r13" ", lsl r",14,8,"") \ - TEST_R( op "eq r",11,VAL1,", #0xf5") \ - TEST_R( op "ne r",0, VAL1,", #0xf5000000") \ - TEST_R( op " r",8, VAL2,", #0x000af000") - -#define _DATA_PROCESSING_DM(op,s,val) \ - TEST_R( op "eq" s " r0, r",1, val, "") \ - TEST_R( op "ne" s " r1, r",1, val, ", lsl #3") \ - TEST_R( op "cs" s " r2, r",3, val, ", lsr #4") \ - TEST_R( op "cc" s " r3, r",3, val, ", asr #5") \ - TEST_R( op "mi" s " r4, r",5, N(val),", asr #6") \ - TEST_R( op "pl" s " r5, r",5, val, ", ror #7") \ - TEST_R( op "vs" s " r6, r",10,val, ", rrx") \ - TEST( op "vs" s " r7, pc, lsl #3") \ - TEST( op "vs" s " r7, sp, lsr #4") \ - TEST_RR( op "vc" s " r8, r",7, val, ", lsl r",0, 3,"") \ - TEST_RR( op "hi" s " r9, r",9, val, ", lsr r",7, 4,"") \ - TEST_RR( op "ls" s " r10, r",9, val, ", asr r",7, 5,"") \ - TEST_RR( op "ge" s " r11, r",11,N(val),", asr r",7, 6,"") \ - TEST_RR( op "lt" s " r12, r",11,val, ", ror r",14,7,"") \ - TEST_R( op "gt" s " r14, r13" ", lsl r",14,8,"") \ - TEST( op "eq" s " r0, #0xf5") \ - TEST( op "ne" s " r11, #0xf5000000") \ - TEST( op s " r7, #0x000af000") \ - TEST( op s " r4, #0x00005a00") - -#define DATA_PROCESSING_DM(op,val) \ - _DATA_PROCESSING_DM(op,"",val) \ - _DATA_PROCESSING_DM(op,"s",val) - - DATA_PROCESSING_DNM("and",0xf00f00ff) - DATA_PROCESSING_DNM("eor",0xf00f00ff) - DATA_PROCESSING_DNM("sub",VAL2) - DATA_PROCESSING_DNM("rsb",VAL2) - DATA_PROCESSING_DNM("add",VAL2) - DATA_PROCESSING_DNM("adc",VAL2) - DATA_PROCESSING_DNM("sbc",VAL2) - DATA_PROCESSING_DNM("rsc",VAL2) - DATA_PROCESSING_NM("tst",0xf00f00ff) - DATA_PROCESSING_NM("teq",0xf00f00ff) - DATA_PROCESSING_NM("cmp",VAL2) - DATA_PROCESSING_NM("cmn",VAL2) - DATA_PROCESSING_DNM("orr",0xf00f00ff) - DATA_PROCESSING_DM("mov",VAL2) - DATA_PROCESSING_DNM("bic",0xf00f00ff) - DATA_PROCESSING_DM("mvn",VAL2) - - TEST("mov ip, sp") /* This has special case emulation code */ - - TEST_SUPPORTED("mov pc, #0x1000"); - TEST_SUPPORTED("mov sp, #0x1000"); - TEST_SUPPORTED("cmp pc, #0x1000"); - TEST_SUPPORTED("cmp sp, #0x1000"); - - /* Data-processing with PC and a shift count in a register */ - TEST_UNSUPPORTED(__inst_arm(0xe15c0f1e) " @ cmp r12, r14, asl pc") - TEST_UNSUPPORTED(__inst_arm(0xe1a0cf1e) " @ mov r12, r14, asl pc") - TEST_UNSUPPORTED(__inst_arm(0xe08caf1e) " @ add r10, r12, r14, asl pc") - TEST_UNSUPPORTED(__inst_arm(0xe151021f) " @ cmp r1, pc, lsl r2") - TEST_UNSUPPORTED(__inst_arm(0xe17f0211) " @ cmn pc, r1, lsl r2") - TEST_UNSUPPORTED(__inst_arm(0xe1a0121f) " @ mov r1, pc, lsl r2") - TEST_UNSUPPORTED(__inst_arm(0xe1a0f211) " @ mov pc, r1, lsl r2") - TEST_UNSUPPORTED(__inst_arm(0xe042131f) " @ sub r1, r2, pc, lsl r3") - TEST_UNSUPPORTED(__inst_arm(0xe1cf1312) " @ bic r1, pc, r2, lsl r3") - TEST_UNSUPPORTED(__inst_arm(0xe081f312) " @ add pc, r1, r2, lsl r3") - - /* Data-processing with PC as a target and status registers updated */ - TEST_UNSUPPORTED("movs pc, r1") - TEST_UNSUPPORTED("movs pc, r1, lsl r2") - TEST_UNSUPPORTED("movs pc, #0x10000") - TEST_UNSUPPORTED("adds pc, lr, r1") - TEST_UNSUPPORTED("adds pc, lr, r1, lsl r2") - TEST_UNSUPPORTED("adds pc, lr, #4") - - /* Data-processing with SP as target */ - TEST("add sp, sp, #16") - TEST("sub sp, sp, #8") - TEST("bic sp, sp, #0x20") - TEST("orr sp, sp, #0x20") - TEST_PR( "add sp, r",10,0,", r",11,4,"") - TEST_PRR("add sp, r",10,0,", r",11,4,", asl r",12,1,"") - TEST_P( "mov sp, r",10,0,"") - TEST_PR( "mov sp, r",10,0,", asl r",12,0,"") - - /* Data-processing with PC as target */ - TEST_BF( "add pc, pc, #2f-1b-8") - TEST_BF_R ("add pc, pc, r",14,2f-1f-8,"") - TEST_BF_R ("add pc, r",14,2f-1f-8,", pc") - TEST_BF_R ("mov pc, r",0,2f,"") - TEST_BF_R ("add pc, pc, r",14,(2f-1f-8)*2,", asr #1") - TEST_BB( "sub pc, pc, #1b-2b+8") -#if __LINUX_ARM_ARCH__ == 6 && !defined(CONFIG_CPU_V7) - TEST_BB( "sub pc, pc, #1b-2b+8-2") /* UNPREDICTABLE before and after ARMv6 */ -#endif - TEST_BB_R( "sub pc, pc, r",14, 1f-2f+8,"") - TEST_BB_R( "rsb pc, r",14,1f-2f+8,", pc") - TEST_R( "add pc, pc, r",10,-2,", asl #1") -#ifdef CONFIG_THUMB2_KERNEL - TEST_ARM_TO_THUMB_INTERWORK_R("add pc, pc, r",0,3f-1f-8+1,"") - TEST_ARM_TO_THUMB_INTERWORK_R("sub pc, r",0,3f+8+1,", #8") -#endif - TEST_GROUP("Miscellaneous instructions") - - TEST("mrs r0, cpsr") - TEST("mrspl r7, cpsr") - TEST("mrs r14, cpsr") - TEST_UNSUPPORTED(__inst_arm(0xe10ff000) " @ mrs r15, cpsr") - TEST_UNSUPPORTED("mrs r0, spsr") - TEST_UNSUPPORTED("mrs lr, spsr") - - TEST_UNSUPPORTED("msr cpsr, r0") - TEST_UNSUPPORTED("msr cpsr_f, lr") - TEST_UNSUPPORTED("msr spsr, r0") - - TEST_BF_R("bx r",0,2f,"") - TEST_BB_R("bx r",7,2f,"") - TEST_BF_R("bxeq r",14,2f,"") - -#if __LINUX_ARM_ARCH__ >= 5 - TEST_R("clz r0, r",0, 0x0,"") - TEST_R("clzeq r7, r",14,0x1,"") - TEST_R("clz lr, r",7, 0xffffffff,"") - TEST( "clz r4, sp") - TEST_UNSUPPORTED(__inst_arm(0x016fff10) " @ clz pc, r0") - TEST_UNSUPPORTED(__inst_arm(0x016f0f1f) " @ clz r0, pc") - -#if __LINUX_ARM_ARCH__ >= 6 - TEST_UNSUPPORTED("bxj r0") -#endif - - TEST_BF_R("blx r",0,2f,"") - TEST_BB_R("blx r",7,2f,"") - TEST_BF_R("blxeq r",14,2f,"") - TEST_UNSUPPORTED(__inst_arm(0x0120003f) " @ blx pc") - - TEST_RR( "qadd r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "qaddvs lr, r",9, VAL2,", r",8, VAL1,"") - TEST_R( "qadd lr, r",9, VAL2,", r13") - TEST_RR( "qsub r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "qsubvs lr, r",9, VAL2,", r",8, VAL1,"") - TEST_R( "qsub lr, r",9, VAL2,", r13") - TEST_RR( "qdadd r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "qdaddvs lr, r",9, VAL2,", r",8, VAL1,"") - TEST_R( "qdadd lr, r",9, VAL2,", r13") - TEST_RR( "qdsub r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "qdsubvs lr, r",9, VAL2,", r",8, VAL1,"") - TEST_R( "qdsub lr, r",9, VAL2,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe101f050) " @ qadd pc, r0, r1") - TEST_UNSUPPORTED(__inst_arm(0xe121f050) " @ qsub pc, r0, r1") - TEST_UNSUPPORTED(__inst_arm(0xe141f050) " @ qdadd pc, r0, r1") - TEST_UNSUPPORTED(__inst_arm(0xe161f050) " @ qdsub pc, r0, r1") - TEST_UNSUPPORTED(__inst_arm(0xe16f2050) " @ qdsub r2, r0, pc") - TEST_UNSUPPORTED(__inst_arm(0xe161205f) " @ qdsub r2, pc, r1") - - TEST_UNSUPPORTED("bkpt 0xffff") - TEST_UNSUPPORTED("bkpt 0x0000") - - TEST_UNSUPPORTED(__inst_arm(0xe1600070) " @ smc #0") - - TEST_GROUP("Halfword multiply and multiply-accumulate") - - TEST_RRR( "smlabb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "smlabbge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RR( "smlabb lr, r",1, VAL2,", r",2, VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe10f3281) " @ smlabb pc, r1, r2, r3") - TEST_RRR( "smlatb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "smlatbge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RR( "smlatb lr, r",1, VAL2,", r",2, VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe10f32a1) " @ smlatb pc, r1, r2, r3") - TEST_RRR( "smlabt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "smlabtge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RR( "smlabt lr, r",1, VAL2,", r",2, VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe10f32c1) " @ smlabt pc, r1, r2, r3") - TEST_RRR( "smlatt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "smlattge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RR( "smlatt lr, r",1, VAL2,", r",2, VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe10f32e1) " @ smlatt pc, r1, r2, r3") - - TEST_RRR( "smlawb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "smlawbge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RR( "smlawb lr, r",1, VAL2,", r",2, VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe12f3281) " @ smlawb pc, r1, r2, r3") - TEST_RRR( "smlawt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "smlawtge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RR( "smlawt lr, r",1, VAL2,", r",2, VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe12f32c1) " @ smlawt pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe12032cf) " @ smlawt r0, pc, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe1203fc1) " @ smlawt r0, r1, pc, r3") - TEST_UNSUPPORTED(__inst_arm(0xe120f2c1) " @ smlawt r0, r1, r2, pc") - - TEST_RR( "smulwb r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "smulwbge r7, r",8, VAL3,", r",9, VAL1,"") - TEST_R( "smulwb lr, r",1, VAL2,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe12f02a1) " @ smulwb pc, r1, r2") - TEST_RR( "smulwt r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "smulwtge r7, r",8, VAL3,", r",9, VAL1,"") - TEST_R( "smulwt lr, r",1, VAL2,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe12f02e1) " @ smulwt pc, r1, r2") - - TEST_RRRR( "smlalbb r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "smlalbble r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - TEST_RRR( "smlalbb r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe14f1382) " @ smlalbb pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe141f382) " @ smlalbb r1, pc, r2, r3") - TEST_RRRR( "smlaltb r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "smlaltble r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - TEST_RRR( "smlaltb r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe14f13a2) " @ smlaltb pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe141f3a2) " @ smlaltb r1, pc, r2, r3") - TEST_RRRR( "smlalbt r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "smlalbtle r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - TEST_RRR( "smlalbt r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe14f13c2) " @ smlalbt pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe141f3c2) " @ smlalbt r1, pc, r2, r3") - TEST_RRRR( "smlaltt r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "smlalttle r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - TEST_RRR( "smlaltt r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe14f13e2) " @ smlalbb pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe140f3e2) " @ smlalbb r0, pc, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe14013ef) " @ smlalbb r0, r1, pc, r3") - TEST_UNSUPPORTED(__inst_arm(0xe1401fe2) " @ smlalbb r0, r1, r2, pc") - - TEST_RR( "smulbb r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "smulbbge r7, r",8, VAL3,", r",9, VAL1,"") - TEST_R( "smulbb lr, r",1, VAL2,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe16f0281) " @ smulbb pc, r1, r2") - TEST_RR( "smultb r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "smultbge r7, r",8, VAL3,", r",9, VAL1,"") - TEST_R( "smultb lr, r",1, VAL2,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe16f02a1) " @ smultb pc, r1, r2") - TEST_RR( "smulbt r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "smulbtge r7, r",8, VAL3,", r",9, VAL1,"") - TEST_R( "smulbt lr, r",1, VAL2,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe16f02c1) " @ smultb pc, r1, r2") - TEST_RR( "smultt r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "smulttge r7, r",8, VAL3,", r",9, VAL1,"") - TEST_R( "smultt lr, r",1, VAL2,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe16f02e1) " @ smultt pc, r1, r2") - TEST_UNSUPPORTED(__inst_arm(0xe16002ef) " @ smultt r0, pc, r2") - TEST_UNSUPPORTED(__inst_arm(0xe1600fe1) " @ smultt r0, r1, pc") -#endif - - TEST_GROUP("Multiply and multiply-accumulate") - - TEST_RR( "mul r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "mulls r7, r",8, VAL2,", r",9, VAL2,"") - TEST_R( "mul lr, r",4, VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe00f0291) " @ mul pc, r1, r2") - TEST_UNSUPPORTED(__inst_arm(0xe000029f) " @ mul r0, pc, r2") - TEST_UNSUPPORTED(__inst_arm(0xe0000f91) " @ mul r0, r1, pc") - TEST_RR( "muls r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "mullss r7, r",8, VAL2,", r",9, VAL2,"") - TEST_R( "muls lr, r",4, VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe01f0291) " @ muls pc, r1, r2") - - TEST_RRR( "mla r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "mlahi r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RR( "mla lr, r",1, VAL2,", r",2, VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe02f3291) " @ mla pc, r1, r2, r3") - TEST_RRR( "mlas r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "mlahis r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RR( "mlas lr, r",1, VAL2,", r",2, VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe03f3291) " @ mlas pc, r1, r2, r3") - -#if __LINUX_ARM_ARCH__ >= 6 - TEST_RR( "umaal r0, r1, r",2, VAL1,", r",3, VAL2,"") - TEST_RR( "umaalls r7, r8, r",9, VAL2,", r",10, VAL1,"") - TEST_R( "umaal lr, r12, r",11,VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe041f392) " @ umaal pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe04f0392) " @ umaal r0, pc, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe0500090) " @ undef") - TEST_UNSUPPORTED(__inst_arm(0xe05fff9f) " @ undef") -#endif - -#if __LINUX_ARM_ARCH__ >= 7 - TEST_RRR( "mls r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "mlshi r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RR( "mls lr, r",1, VAL2,", r",2, VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe06f3291) " @ mls pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe060329f) " @ mls r0, pc, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe0603f91) " @ mls r0, r1, pc, r3") - TEST_UNSUPPORTED(__inst_arm(0xe060f291) " @ mls r0, r1, r2, pc") -#endif - - TEST_UNSUPPORTED(__inst_arm(0xe0700090) " @ undef") - TEST_UNSUPPORTED(__inst_arm(0xe07fff9f) " @ undef") - - TEST_RR( "umull r0, r1, r",2, VAL1,", r",3, VAL2,"") - TEST_RR( "umullls r7, r8, r",9, VAL2,", r",10, VAL1,"") - TEST_R( "umull lr, r12, r",11,VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe081f392) " @ umull pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe08f1392) " @ umull r1, pc, r2, r3") - TEST_RR( "umulls r0, r1, r",2, VAL1,", r",3, VAL2,"") - TEST_RR( "umulllss r7, r8, r",9, VAL2,", r",10, VAL1,"") - TEST_R( "umulls lr, r12, r",11,VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe091f392) " @ umulls pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe09f1392) " @ umulls r1, pc, r2, r3") - - TEST_RRRR( "umlal r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "umlalle r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - TEST_RRR( "umlal r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe0af1392) " @ umlal pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe0a1f392) " @ umlal r1, pc, r2, r3") - TEST_RRRR( "umlals r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "umlalles r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - TEST_RRR( "umlals r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe0bf1392) " @ umlals pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe0b1f392) " @ umlals r1, pc, r2, r3") - - TEST_RR( "smull r0, r1, r",2, VAL1,", r",3, VAL2,"") - TEST_RR( "smullls r7, r8, r",9, VAL2,", r",10, VAL1,"") - TEST_R( "smull lr, r12, r",11,VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe0c1f392) " @ smull pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe0cf1392) " @ smull r1, pc, r2, r3") - TEST_RR( "smulls r0, r1, r",2, VAL1,", r",3, VAL2,"") - TEST_RR( "smulllss r7, r8, r",9, VAL2,", r",10, VAL1,"") - TEST_R( "smulls lr, r12, r",11,VAL3,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe0d1f392) " @ smulls pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe0df1392) " @ smulls r1, pc, r2, r3") - - TEST_RRRR( "smlal r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "smlalle r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - TEST_RRR( "smlal r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe0ef1392) " @ smlal pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe0e1f392) " @ smlal r1, pc, r2, r3") - TEST_RRRR( "smlals r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "smlalles r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - TEST_RRR( "smlals r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") - TEST_UNSUPPORTED(__inst_arm(0xe0ff1392) " @ smlals pc, r1, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe0f0f392) " @ smlals r0, pc, r2, r3") - TEST_UNSUPPORTED(__inst_arm(0xe0f0139f) " @ smlals r0, r1, pc, r3") - TEST_UNSUPPORTED(__inst_arm(0xe0f01f92) " @ smlals r0, r1, r2, pc") - - TEST_GROUP("Synchronization primitives") - -#if __LINUX_ARM_ARCH__ < 6 - TEST_RP("swp lr, r",7,VAL2,", [r",8,0,"]") - TEST_R( "swpvs r0, r",1,VAL1,", [sp]") - TEST_RP("swp sp, r",14,VAL2,", [r",12,13*4,"]") -#else - TEST_UNSUPPORTED(__inst_arm(0xe108e097) " @ swp lr, r7, [r8]") - TEST_UNSUPPORTED(__inst_arm(0x610d0091) " @ swpvs r0, r1, [sp]") - TEST_UNSUPPORTED(__inst_arm(0xe10cd09e) " @ swp sp, r14 [r12]") -#endif - TEST_UNSUPPORTED(__inst_arm(0xe102f091) " @ swp pc, r1, [r2]") - TEST_UNSUPPORTED(__inst_arm(0xe102009f) " @ swp r0, pc, [r2]") - TEST_UNSUPPORTED(__inst_arm(0xe10f0091) " @ swp r0, r1, [pc]") -#if __LINUX_ARM_ARCH__ < 6 - TEST_RP("swpb lr, r",7,VAL2,", [r",8,0,"]") - TEST_R( "swpvsb r0, r",1,VAL1,", [sp]") -#else - TEST_UNSUPPORTED(__inst_arm(0xe148e097) " @ swpb lr, r7, [r8]") - TEST_UNSUPPORTED(__inst_arm(0x614d0091) " @ swpvsb r0, r1, [sp]") -#endif - TEST_UNSUPPORTED(__inst_arm(0xe142f091) " @ swpb pc, r1, [r2]") - - TEST_UNSUPPORTED(__inst_arm(0xe1100090)) /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe1200090)) /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe1300090)) /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe1500090)) /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe1600090)) /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe1700090)) /* Unallocated space */ -#if __LINUX_ARM_ARCH__ >= 6 - TEST_UNSUPPORTED("ldrex r2, [sp]") -#endif -#if (__LINUX_ARM_ARCH__ >= 7) || defined(CONFIG_CPU_32v6K) - TEST_UNSUPPORTED("strexd r0, r2, r3, [sp]") - TEST_UNSUPPORTED("ldrexd r2, r3, [sp]") - TEST_UNSUPPORTED("strexb r0, r2, [sp]") - TEST_UNSUPPORTED("ldrexb r2, [sp]") - TEST_UNSUPPORTED("strexh r0, r2, [sp]") - TEST_UNSUPPORTED("ldrexh r2, [sp]") -#endif - TEST_GROUP("Extra load/store instructions") - - TEST_RPR( "strh r",0, VAL1,", [r",1, 48,", -r",2, 24,"]") - TEST_RPR( "streqh r",14,VAL2,", [r",13,0, ", r",12, 48,"]") - TEST_RPR( "strh r",1, VAL1,", [r",2, 24,", r",3, 48,"]!") - TEST_RPR( "strneh r",12,VAL2,", [r",11,48,", -r",10,24,"]!") - TEST_RPR( "strh r",2, VAL1,", [r",3, 24,"], r",4, 48,"") - TEST_RPR( "strh r",10,VAL2,", [r",9, 48,"], -r",11,24,"") - TEST_UNSUPPORTED(__inst_arm(0xe1afc0ba) " @ strh r12, [pc, r10]!") - TEST_UNSUPPORTED(__inst_arm(0xe089f0bb) " @ strh pc, [r9], r11") - TEST_UNSUPPORTED(__inst_arm(0xe089a0bf) " @ strh r10, [r9], pc") - - TEST_PR( "ldrh r0, [r",0, 48,", -r",2, 24,"]") - TEST_PR( "ldrcsh r14, [r",13,0, ", r",12, 48,"]") - TEST_PR( "ldrh r1, [r",2, 24,", r",3, 48,"]!") - TEST_PR( "ldrcch r12, [r",11,48,", -r",10,24,"]!") - TEST_PR( "ldrh r2, [r",3, 24,"], r",4, 48,"") - TEST_PR( "ldrh r10, [r",9, 48,"], -r",11,24,"") - TEST_UNSUPPORTED(__inst_arm(0xe1bfc0ba) " @ ldrh r12, [pc, r10]!") - TEST_UNSUPPORTED(__inst_arm(0xe099f0bb) " @ ldrh pc, [r9], r11") - TEST_UNSUPPORTED(__inst_arm(0xe099a0bf) " @ ldrh r10, [r9], pc") - - TEST_RP( "strh r",0, VAL1,", [r",1, 24,", #-2]") - TEST_RP( "strmih r",14,VAL2,", [r",13,0, ", #2]") - TEST_RP( "strh r",1, VAL1,", [r",2, 24,", #4]!") - TEST_RP( "strplh r",12,VAL2,", [r",11,24,", #-4]!") - TEST_RP( "strh r",2, VAL1,", [r",3, 24,"], #48") - TEST_RP( "strh r",10,VAL2,", [r",9, 64,"], #-48") - TEST_UNSUPPORTED(__inst_arm(0xe1efc3b0) " @ strh r12, [pc, #48]!") - TEST_UNSUPPORTED(__inst_arm(0xe0c9f3b0) " @ strh pc, [r9], #48") - - TEST_P( "ldrh r0, [r",0, 24,", #-2]") - TEST_P( "ldrvsh r14, [r",13,0, ", #2]") - TEST_P( "ldrh r1, [r",2, 24,", #4]!") - TEST_P( "ldrvch r12, [r",11,24,", #-4]!") - TEST_P( "ldrh r2, [r",3, 24,"], #48") - TEST_P( "ldrh r10, [r",9, 64,"], #-48") - TEST( "ldrh r0, [pc, #0]") - TEST_UNSUPPORTED(__inst_arm(0xe1ffc3b0) " @ ldrh r12, [pc, #48]!") - TEST_UNSUPPORTED(__inst_arm(0xe0d9f3b0) " @ ldrh pc, [r9], #48") - - TEST_PR( "ldrsb r0, [r",0, 48,", -r",2, 24,"]") - TEST_PR( "ldrhisb r14, [r",13,0,", r",12, 48,"]") - TEST_PR( "ldrsb r1, [r",2, 24,", r",3, 48,"]!") - TEST_PR( "ldrlssb r12, [r",11,48,", -r",10,24,"]!") - TEST_PR( "ldrsb r2, [r",3, 24,"], r",4, 48,"") - TEST_PR( "ldrsb r10, [r",9, 48,"], -r",11,24,"") - TEST_UNSUPPORTED(__inst_arm(0xe1bfc0da) " @ ldrsb r12, [pc, r10]!") - TEST_UNSUPPORTED(__inst_arm(0xe099f0db) " @ ldrsb pc, [r9], r11") - - TEST_P( "ldrsb r0, [r",0, 24,", #-1]") - TEST_P( "ldrgesb r14, [r",13,0, ", #1]") - TEST_P( "ldrsb r1, [r",2, 24,", #4]!") - TEST_P( "ldrltsb r12, [r",11,24,", #-4]!") - TEST_P( "ldrsb r2, [r",3, 24,"], #48") - TEST_P( "ldrsb r10, [r",9, 64,"], #-48") - TEST( "ldrsb r0, [pc, #0]") - TEST_UNSUPPORTED(__inst_arm(0xe1ffc3d0) " @ ldrsb r12, [pc, #48]!") - TEST_UNSUPPORTED(__inst_arm(0xe0d9f3d0) " @ ldrsb pc, [r9], #48") - - TEST_PR( "ldrsh r0, [r",0, 48,", -r",2, 24,"]") - TEST_PR( "ldrgtsh r14, [r",13,0, ", r",12, 48,"]") - TEST_PR( "ldrsh r1, [r",2, 24,", r",3, 48,"]!") - TEST_PR( "ldrlesh r12, [r",11,48,", -r",10,24,"]!") - TEST_PR( "ldrsh r2, [r",3, 24,"], r",4, 48,"") - TEST_PR( "ldrsh r10, [r",9, 48,"], -r",11,24,"") - TEST_UNSUPPORTED(__inst_arm(0xe1bfc0fa) " @ ldrsh r12, [pc, r10]!") - TEST_UNSUPPORTED(__inst_arm(0xe099f0fb) " @ ldrsh pc, [r9], r11") - - TEST_P( "ldrsh r0, [r",0, 24,", #-1]") - TEST_P( "ldreqsh r14, [r",13,0 ,", #1]") - TEST_P( "ldrsh r1, [r",2, 24,", #4]!") - TEST_P( "ldrnesh r12, [r",11,24,", #-4]!") - TEST_P( "ldrsh r2, [r",3, 24,"], #48") - TEST_P( "ldrsh r10, [r",9, 64,"], #-48") - TEST( "ldrsh r0, [pc, #0]") - TEST_UNSUPPORTED(__inst_arm(0xe1ffc3f0) " @ ldrsh r12, [pc, #48]!") - TEST_UNSUPPORTED(__inst_arm(0xe0d9f3f0) " @ ldrsh pc, [r9], #48") - -#if __LINUX_ARM_ARCH__ >= 7 - TEST_UNSUPPORTED("strht r1, [r2], r3") - TEST_UNSUPPORTED("ldrht r1, [r2], r3") - TEST_UNSUPPORTED("strht r1, [r2], #48") - TEST_UNSUPPORTED("ldrht r1, [r2], #48") - TEST_UNSUPPORTED("ldrsbt r1, [r2], r3") - TEST_UNSUPPORTED("ldrsbt r1, [r2], #48") - TEST_UNSUPPORTED("ldrsht r1, [r2], r3") - TEST_UNSUPPORTED("ldrsht r1, [r2], #48") -#endif - -#if __LINUX_ARM_ARCH__ >= 5 - TEST_RPR( "strd r",0, VAL1,", [r",1, 48,", -r",2,24,"]") - TEST_RPR( "strccd r",8, VAL2,", [r",13,0, ", r",12,48,"]") - TEST_RPR( "strd r",4, VAL1,", [r",2, 24,", r",3, 48,"]!") - TEST_RPR( "strcsd r",12,VAL2,", [r",11,48,", -r",10,24,"]!") - TEST_RPR( "strd r",2, VAL1,", [r",5, 24,"], r",4,48,"") - TEST_RPR( "strd r",10,VAL2,", [r",9, 48,"], -r",7,24,"") - TEST_UNSUPPORTED(__inst_arm(0xe1afc0fa) " @ strd r12, [pc, r10]!") - - TEST_PR( "ldrd r0, [r",0, 48,", -r",2,24,"]") - TEST_PR( "ldrmid r8, [r",13,0, ", r",12,48,"]") - TEST_PR( "ldrd r4, [r",2, 24,", r",3, 48,"]!") - TEST_PR( "ldrpld r6, [r",11,48,", -r",10,24,"]!") - TEST_PR( "ldrd r2, [r",5, 24,"], r",4,48,"") - TEST_PR( "ldrd r10, [r",9,48,"], -r",7,24,"") - TEST_UNSUPPORTED(__inst_arm(0xe1afc0da) " @ ldrd r12, [pc, r10]!") - TEST_UNSUPPORTED(__inst_arm(0xe089f0db) " @ ldrd pc, [r9], r11") - TEST_UNSUPPORTED(__inst_arm(0xe089e0db) " @ ldrd lr, [r9], r11") - TEST_UNSUPPORTED(__inst_arm(0xe089c0df) " @ ldrd r12, [r9], pc") - - TEST_RP( "strd r",0, VAL1,", [r",1, 24,", #-8]") - TEST_RP( "strvsd r",8, VAL2,", [r",13,0, ", #8]") - TEST_RP( "strd r",4, VAL1,", [r",2, 24,", #16]!") - TEST_RP( "strvcd r",12,VAL2,", [r",11,24,", #-16]!") - TEST_RP( "strd r",2, VAL1,", [r",4, 24,"], #48") - TEST_RP( "strd r",10,VAL2,", [r",9, 64,"], #-48") - TEST_UNSUPPORTED(__inst_arm(0xe1efc3f0) " @ strd r12, [pc, #48]!") - - TEST_P( "ldrd r0, [r",0, 24,", #-8]") - TEST_P( "ldrhid r8, [r",13,0, ", #8]") - TEST_P( "ldrd r4, [r",2, 24,", #16]!") - TEST_P( "ldrlsd r6, [r",11,24,", #-16]!") - TEST_P( "ldrd r2, [r",5, 24,"], #48") - TEST_P( "ldrd r10, [r",9,6,"], #-48") - TEST_UNSUPPORTED(__inst_arm(0xe1efc3d0) " @ ldrd r12, [pc, #48]!") - TEST_UNSUPPORTED(__inst_arm(0xe0c9f3d0) " @ ldrd pc, [r9], #48") - TEST_UNSUPPORTED(__inst_arm(0xe0c9e3d0) " @ ldrd lr, [r9], #48") -#endif - - TEST_GROUP("Miscellaneous") - -#if __LINUX_ARM_ARCH__ >= 7 - TEST("movw r0, #0") - TEST("movw r0, #0xffff") - TEST("movw lr, #0xffff") - TEST_UNSUPPORTED(__inst_arm(0xe300f000) " @ movw pc, #0") - TEST_R("movt r",0, VAL1,", #0") - TEST_R("movt r",0, VAL2,", #0xffff") - TEST_R("movt r",14,VAL1,", #0xffff") - TEST_UNSUPPORTED(__inst_arm(0xe340f000) " @ movt pc, #0") -#endif - - TEST_UNSUPPORTED("msr cpsr, 0x13") - TEST_UNSUPPORTED("msr cpsr_f, 0xf0000000") - TEST_UNSUPPORTED("msr spsr, 0x13") - -#if __LINUX_ARM_ARCH__ >= 7 - TEST_SUPPORTED("yield") - TEST("sev") - TEST("nop") - TEST("wfi") - TEST_SUPPORTED("wfe") - TEST_UNSUPPORTED("dbg #0") -#endif - - TEST_GROUP("Load/store word and unsigned byte") - -#define LOAD_STORE(byte) \ - TEST_RP( "str"byte" r",0, VAL1,", [r",1, 24,", #-2]") \ - TEST_RP( "str"byte" r",14,VAL2,", [r",13,0, ", #2]") \ - TEST_RP( "str"byte" r",1, VAL1,", [r",2, 24,", #4]!") \ - TEST_RP( "str"byte" r",12,VAL2,", [r",11,24,", #-4]!") \ - TEST_RP( "str"byte" r",2, VAL1,", [r",3, 24,"], #48") \ - TEST_RP( "str"byte" r",10,VAL2,", [r",9, 64,"], #-48") \ - TEST_RPR("str"byte" r",0, VAL1,", [r",1, 48,", -r",2, 24,"]") \ - TEST_RPR("str"byte" r",14,VAL2,", [r",13,0, ", r",12, 48,"]") \ - TEST_RPR("str"byte" r",1, VAL1,", [r",2, 24,", r",3, 48,"]!") \ - TEST_RPR("str"byte" r",12,VAL2,", [r",11,48,", -r",10,24,"]!") \ - TEST_RPR("str"byte" r",2, VAL1,", [r",3, 24,"], r",4, 48,"") \ - TEST_RPR("str"byte" r",10,VAL2,", [r",9, 48,"], -r",11,24,"") \ - TEST_RPR("str"byte" r",0, VAL1,", [r",1, 24,", r",2, 32,", asl #1]")\ - TEST_RPR("str"byte" r",14,VAL2,", [r",13,0, ", r",12, 32,", lsr #2]")\ - TEST_RPR("str"byte" r",1, VAL1,", [r",2, 24,", r",3, 32,", asr #3]!")\ - TEST_RPR("str"byte" r",12,VAL2,", [r",11,24,", r",10, 4,", ror #31]!")\ - TEST_P( "ldr"byte" r0, [r",0, 24,", #-2]") \ - TEST_P( "ldr"byte" r14, [r",13,0, ", #2]") \ - TEST_P( "ldr"byte" r1, [r",2, 24,", #4]!") \ - TEST_P( "ldr"byte" r12, [r",11,24,", #-4]!") \ - TEST_P( "ldr"byte" r2, [r",3, 24,"], #48") \ - TEST_P( "ldr"byte" r10, [r",9, 64,"], #-48") \ - TEST_PR( "ldr"byte" r0, [r",0, 48,", -r",2, 24,"]") \ - TEST_PR( "ldr"byte" r14, [r",13,0, ", r",12, 48,"]") \ - TEST_PR( "ldr"byte" r1, [r",2, 24,", r",3, 48,"]!") \ - TEST_PR( "ldr"byte" r12, [r",11,48,", -r",10,24,"]!") \ - TEST_PR( "ldr"byte" r2, [r",3, 24,"], r",4, 48,"") \ - TEST_PR( "ldr"byte" r10, [r",9, 48,"], -r",11,24,"") \ - TEST_PR( "ldr"byte" r0, [r",0, 24,", r",2, 32,", asl #1]") \ - TEST_PR( "ldr"byte" r14, [r",13,0, ", r",12, 32,", lsr #2]") \ - TEST_PR( "ldr"byte" r1, [r",2, 24,", r",3, 32,", asr #3]!") \ - TEST_PR( "ldr"byte" r12, [r",11,24,", r",10, 4,", ror #31]!") \ - TEST( "ldr"byte" r0, [pc, #0]") \ - TEST_R( "ldr"byte" r12, [pc, r",14,0,"]") - - LOAD_STORE("") - TEST_P( "str pc, [r",0,0,", #15*4]") - TEST_R( "str pc, [sp, r",2,15*4,"]") - TEST_BF( "ldr pc, [sp, #15*4]") - TEST_BF_R("ldr pc, [sp, r",2,15*4,"]") - - TEST_P( "str sp, [r",0,0,", #13*4]") - TEST_R( "str sp, [sp, r",2,13*4,"]") - TEST_BF( "ldr sp, [sp, #13*4]") - TEST_BF_R("ldr sp, [sp, r",2,13*4,"]") - -#ifdef CONFIG_THUMB2_KERNEL - TEST_ARM_TO_THUMB_INTERWORK_P("ldr pc, [r",0,0,", #15*4]") -#endif - TEST_UNSUPPORTED(__inst_arm(0xe5af6008) " @ str r6, [pc, #8]!") - TEST_UNSUPPORTED(__inst_arm(0xe7af6008) " @ str r6, [pc, r8]!") - TEST_UNSUPPORTED(__inst_arm(0xe5bf6008) " @ ldr r6, [pc, #8]!") - TEST_UNSUPPORTED(__inst_arm(0xe7bf6008) " @ ldr r6, [pc, r8]!") - TEST_UNSUPPORTED(__inst_arm(0xe788600f) " @ str r6, [r8, pc]") - TEST_UNSUPPORTED(__inst_arm(0xe798600f) " @ ldr r6, [r8, pc]") - - LOAD_STORE("b") - TEST_UNSUPPORTED(__inst_arm(0xe5f7f008) " @ ldrb pc, [r7, #8]!") - TEST_UNSUPPORTED(__inst_arm(0xe7f7f008) " @ ldrb pc, [r7, r8]!") - TEST_UNSUPPORTED(__inst_arm(0xe5ef6008) " @ strb r6, [pc, #8]!") - TEST_UNSUPPORTED(__inst_arm(0xe7ef6008) " @ strb r6, [pc, r3]!") - TEST_UNSUPPORTED(__inst_arm(0xe5ff6008) " @ ldrb r6, [pc, #8]!") - TEST_UNSUPPORTED(__inst_arm(0xe7ff6008) " @ ldrb r6, [pc, r3]!") - - TEST_UNSUPPORTED("ldrt r0, [r1], #4") - TEST_UNSUPPORTED("ldrt r1, [r2], r3") - TEST_UNSUPPORTED("strt r2, [r3], #4") - TEST_UNSUPPORTED("strt r3, [r4], r5") - TEST_UNSUPPORTED("ldrbt r4, [r5], #4") - TEST_UNSUPPORTED("ldrbt r5, [r6], r7") - TEST_UNSUPPORTED("strbt r6, [r7], #4") - TEST_UNSUPPORTED("strbt r7, [r8], r9") - -#if __LINUX_ARM_ARCH__ >= 7 - TEST_GROUP("Parallel addition and subtraction, signed") - - TEST_UNSUPPORTED(__inst_arm(0xe6000010) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe60fffff) "") /* Unallocated space */ - - TEST_RR( "sadd16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "sadd16 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe61cff1a) " @ sadd16 pc, r12, r10") - TEST_RR( "sasx r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "sasx r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe61cff3a) " @ sasx pc, r12, r10") - TEST_RR( "ssax r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "ssax r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe61cff5a) " @ ssax pc, r12, r10") - TEST_RR( "ssub16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "ssub16 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe61cff7a) " @ ssub16 pc, r12, r10") - TEST_RR( "sadd8 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "sadd8 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe61cff9a) " @ sadd8 pc, r12, r10") - TEST_UNSUPPORTED(__inst_arm(0xe61000b0) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe61fffbf) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe61000d0) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe61fffdf) "") /* Unallocated space */ - TEST_RR( "ssub8 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "ssub8 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe61cfffa) " @ ssub8 pc, r12, r10") - - TEST_RR( "qadd16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "qadd16 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe62cff1a) " @ qadd16 pc, r12, r10") - TEST_RR( "qasx r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "qasx r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe62cff3a) " @ qasx pc, r12, r10") - TEST_RR( "qsax r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "qsax r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe62cff5a) " @ qsax pc, r12, r10") - TEST_RR( "qsub16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "qsub16 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe62cff7a) " @ qsub16 pc, r12, r10") - TEST_RR( "qadd8 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "qadd8 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe62cff9a) " @ qadd8 pc, r12, r10") - TEST_UNSUPPORTED(__inst_arm(0xe62000b0) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe62fffbf) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe62000d0) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe62fffdf) "") /* Unallocated space */ - TEST_RR( "qsub8 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "qsub8 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe62cfffa) " @ qsub8 pc, r12, r10") - - TEST_RR( "shadd16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "shadd16 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe63cff1a) " @ shadd16 pc, r12, r10") - TEST_RR( "shasx r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "shasx r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe63cff3a) " @ shasx pc, r12, r10") - TEST_RR( "shsax r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "shsax r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe63cff5a) " @ shsax pc, r12, r10") - TEST_RR( "shsub16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "shsub16 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe63cff7a) " @ shsub16 pc, r12, r10") - TEST_RR( "shadd8 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "shadd8 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe63cff9a) " @ shadd8 pc, r12, r10") - TEST_UNSUPPORTED(__inst_arm(0xe63000b0) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe63fffbf) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe63000d0) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe63fffdf) "") /* Unallocated space */ - TEST_RR( "shsub8 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "shsub8 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe63cfffa) " @ shsub8 pc, r12, r10") - - TEST_GROUP("Parallel addition and subtraction, unsigned") - - TEST_UNSUPPORTED(__inst_arm(0xe6400010) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe64fffff) "") /* Unallocated space */ - - TEST_RR( "uadd16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uadd16 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe65cff1a) " @ uadd16 pc, r12, r10") - TEST_RR( "uasx r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uasx r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe65cff3a) " @ uasx pc, r12, r10") - TEST_RR( "usax r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "usax r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe65cff5a) " @ usax pc, r12, r10") - TEST_RR( "usub16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "usub16 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe65cff7a) " @ usub16 pc, r12, r10") - TEST_RR( "uadd8 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uadd8 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe65cff9a) " @ uadd8 pc, r12, r10") - TEST_UNSUPPORTED(__inst_arm(0xe65000b0) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe65fffbf) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe65000d0) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe65fffdf) "") /* Unallocated space */ - TEST_RR( "usub8 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "usub8 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe65cfffa) " @ usub8 pc, r12, r10") - - TEST_RR( "uqadd16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uqadd16 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe66cff1a) " @ uqadd16 pc, r12, r10") - TEST_RR( "uqasx r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uqasx r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe66cff3a) " @ uqasx pc, r12, r10") - TEST_RR( "uqsax r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uqsax r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe66cff5a) " @ uqsax pc, r12, r10") - TEST_RR( "uqsub16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uqsub16 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe66cff7a) " @ uqsub16 pc, r12, r10") - TEST_RR( "uqadd8 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uqadd8 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe66cff9a) " @ uqadd8 pc, r12, r10") - TEST_UNSUPPORTED(__inst_arm(0xe66000b0) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe66fffbf) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe66000d0) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe66fffdf) "") /* Unallocated space */ - TEST_RR( "uqsub8 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uqsub8 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe66cfffa) " @ uqsub8 pc, r12, r10") - - TEST_RR( "uhadd16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uhadd16 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe67cff1a) " @ uhadd16 pc, r12, r10") - TEST_RR( "uhasx r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uhasx r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe67cff3a) " @ uhasx pc, r12, r10") - TEST_RR( "uhsax r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uhsax r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe67cff5a) " @ uhsax pc, r12, r10") - TEST_RR( "uhsub16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uhsub16 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe67cff7a) " @ uhsub16 pc, r12, r10") - TEST_RR( "uhadd8 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uhadd8 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe67cff9a) " @ uhadd8 pc, r12, r10") - TEST_UNSUPPORTED(__inst_arm(0xe67000b0) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe67fffbf) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe67000d0) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe67fffdf) "") /* Unallocated space */ - TEST_RR( "uhsub8 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uhsub8 r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe67cfffa) " @ uhsub8 pc, r12, r10") - TEST_UNSUPPORTED(__inst_arm(0xe67feffa) " @ uhsub8 r14, pc, r10") - TEST_UNSUPPORTED(__inst_arm(0xe67cefff) " @ uhsub8 r14, r12, pc") -#endif /* __LINUX_ARM_ARCH__ >= 7 */ - -#if __LINUX_ARM_ARCH__ >= 6 - TEST_GROUP("Packing, unpacking, saturation, and reversal") - - TEST_RR( "pkhbt r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "pkhbt r14,r",12, HH1,", r",10,HH2,", lsl #2") - TEST_UNSUPPORTED(__inst_arm(0xe68cf11a) " @ pkhbt pc, r12, r10, lsl #2") - TEST_RR( "pkhtb r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "pkhtb r14,r",12, HH1,", r",10,HH2,", asr #2") - TEST_UNSUPPORTED(__inst_arm(0xe68cf15a) " @ pkhtb pc, r12, r10, asr #2") - TEST_UNSUPPORTED(__inst_arm(0xe68fe15a) " @ pkhtb r14, pc, r10, asr #2") - TEST_UNSUPPORTED(__inst_arm(0xe68ce15f) " @ pkhtb r14, r12, pc, asr #2") - TEST_UNSUPPORTED(__inst_arm(0xe6900010) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe69fffdf) "") /* Unallocated space */ - - TEST_R( "ssat r0, #24, r",0, VAL1,"") - TEST_R( "ssat r14, #24, r",12, VAL2,"") - TEST_R( "ssat r0, #24, r",0, VAL1,", lsl #8") - TEST_R( "ssat r14, #24, r",12, VAL2,", asr #8") - TEST_UNSUPPORTED(__inst_arm(0xe6b7f01c) " @ ssat pc, #24, r12") - - TEST_R( "usat r0, #24, r",0, VAL1,"") - TEST_R( "usat r14, #24, r",12, VAL2,"") - TEST_R( "usat r0, #24, r",0, VAL1,", lsl #8") - TEST_R( "usat r14, #24, r",12, VAL2,", asr #8") - TEST_UNSUPPORTED(__inst_arm(0xe6f7f01c) " @ usat pc, #24, r12") - - TEST_RR( "sxtab16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "sxtab16 r14,r",12, HH2,", r",10,HH1,", ror #8") - TEST_R( "sxtb16 r8, r",7, HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe68cf47a) " @ sxtab16 pc,r12, r10, ror #8") - - TEST_RR( "sel r0, r",0, VAL1,", r",1, VAL2,"") - TEST_RR( "sel r14, r",12,VAL1,", r",10, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe68cffba) " @ sel pc, r12, r10") - TEST_UNSUPPORTED(__inst_arm(0xe68fefba) " @ sel r14, pc, r10") - TEST_UNSUPPORTED(__inst_arm(0xe68cefbf) " @ sel r14, r12, pc") - - TEST_R( "ssat16 r0, #12, r",0, HH1,"") - TEST_R( "ssat16 r14, #12, r",12, HH2,"") - TEST_UNSUPPORTED(__inst_arm(0xe6abff3c) " @ ssat16 pc, #12, r12") - - TEST_RR( "sxtab r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "sxtab r14,r",12, HH2,", r",10,HH1,", ror #8") - TEST_R( "sxtb r8, r",7, HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe6acf47a) " @ sxtab pc,r12, r10, ror #8") - - TEST_R( "rev r0, r",0, VAL1,"") - TEST_R( "rev r14, r",12, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe6bfff3c) " @ rev pc, r12") - - TEST_RR( "sxtah r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "sxtah r14,r",12, HH2,", r",10,HH1,", ror #8") - TEST_R( "sxth r8, r",7, HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe6bcf47a) " @ sxtah pc,r12, r10, ror #8") - - TEST_R( "rev16 r0, r",0, VAL1,"") - TEST_R( "rev16 r14, r",12, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe6bfffbc) " @ rev16 pc, r12") - - TEST_RR( "uxtab16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uxtab16 r14,r",12, HH2,", r",10,HH1,", ror #8") - TEST_R( "uxtb16 r8, r",7, HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe6ccf47a) " @ uxtab16 pc,r12, r10, ror #8") - - TEST_R( "usat16 r0, #12, r",0, HH1,"") - TEST_R( "usat16 r14, #12, r",12, HH2,"") - TEST_UNSUPPORTED(__inst_arm(0xe6ecff3c) " @ usat16 pc, #12, r12") - TEST_UNSUPPORTED(__inst_arm(0xe6ecef3f) " @ usat16 r14, #12, pc") - - TEST_RR( "uxtab r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uxtab r14,r",12, HH2,", r",10,HH1,", ror #8") - TEST_R( "uxtb r8, r",7, HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe6ecf47a) " @ uxtab pc,r12, r10, ror #8") - -#if __LINUX_ARM_ARCH__ >= 7 - TEST_R( "rbit r0, r",0, VAL1,"") - TEST_R( "rbit r14, r",12, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe6ffff3c) " @ rbit pc, r12") -#endif - - TEST_RR( "uxtah r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uxtah r14,r",12, HH2,", r",10,HH1,", ror #8") - TEST_R( "uxth r8, r",7, HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe6fff077) " @ uxth pc, r7") - TEST_UNSUPPORTED(__inst_arm(0xe6ff807f) " @ uxth r8, pc") - TEST_UNSUPPORTED(__inst_arm(0xe6fcf47a) " @ uxtah pc, r12, r10, ror #8") - TEST_UNSUPPORTED(__inst_arm(0xe6fce47f) " @ uxtah r14, r12, pc, ror #8") - - TEST_R( "revsh r0, r",0, VAL1,"") - TEST_R( "revsh r14, r",12, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe6ffff3c) " @ revsh pc, r12") - TEST_UNSUPPORTED(__inst_arm(0xe6ffef3f) " @ revsh r14, pc") - - TEST_UNSUPPORTED(__inst_arm(0xe6900070) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe69fff7f) "") /* Unallocated space */ - - TEST_UNSUPPORTED(__inst_arm(0xe6d00070) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_arm(0xe6dfff7f) "") /* Unallocated space */ -#endif /* __LINUX_ARM_ARCH__ >= 6 */ - -#if __LINUX_ARM_ARCH__ >= 6 - TEST_GROUP("Signed multiplies") - - TEST_RRR( "smlad r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") - TEST_RRR( "smlad r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe70f8a1c) " @ smlad pc, r12, r10, r8") - TEST_RRR( "smladx r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") - TEST_RRR( "smladx r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe70f8a3c) " @ smladx pc, r12, r10, r8") - - TEST_RR( "smuad r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "smuad r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe70ffa1c) " @ smuad pc, r12, r10") - TEST_RR( "smuadx r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "smuadx r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe70ffa3c) " @ smuadx pc, r12, r10") - - TEST_RRR( "smlsd r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") - TEST_RRR( "smlsd r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe70f8a5c) " @ smlsd pc, r12, r10, r8") - TEST_RRR( "smlsdx r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") - TEST_RRR( "smlsdx r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe70f8a7c) " @ smlsdx pc, r12, r10, r8") - - TEST_RR( "smusd r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "smusd r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe70ffa5c) " @ smusd pc, r12, r10") - TEST_RR( "smusdx r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "smusdx r14, r",12,HH2,", r",10,HH1,"") - TEST_UNSUPPORTED(__inst_arm(0xe70ffa7c) " @ smusdx pc, r12, r10") - - TEST_RRRR( "smlald r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2) - TEST_RRRR( "smlald r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1) - TEST_UNSUPPORTED(__inst_arm(0xe74af819) " @ smlald pc, r10, r9, r8") - TEST_UNSUPPORTED(__inst_arm(0xe74fb819) " @ smlald r11, pc, r9, r8") - TEST_UNSUPPORTED(__inst_arm(0xe74ab81f) " @ smlald r11, r10, pc, r8") - TEST_UNSUPPORTED(__inst_arm(0xe74abf19) " @ smlald r11, r10, r9, pc") - - TEST_RRRR( "smlaldx r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2) - TEST_RRRR( "smlaldx r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1) - TEST_UNSUPPORTED(__inst_arm(0xe74af839) " @ smlaldx pc, r10, r9, r8") - TEST_UNSUPPORTED(__inst_arm(0xe74fb839) " @ smlaldx r11, pc, r9, r8") - - TEST_RRR( "smmla r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") - TEST_RRR( "smmla r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe75f8a1c) " @ smmla pc, r12, r10, r8") - TEST_RRR( "smmlar r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") - TEST_RRR( "smmlar r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe75f8a3c) " @ smmlar pc, r12, r10, r8") - - TEST_RR( "smmul r0, r",0, VAL1,", r",1, VAL2,"") - TEST_RR( "smmul r14, r",12,VAL2,", r",10,VAL1,"") - TEST_UNSUPPORTED(__inst_arm(0xe75ffa1c) " @ smmul pc, r12, r10") - TEST_RR( "smmulr r0, r",0, VAL1,", r",1, VAL2,"") - TEST_RR( "smmulr r14, r",12,VAL2,", r",10,VAL1,"") - TEST_UNSUPPORTED(__inst_arm(0xe75ffa3c) " @ smmulr pc, r12, r10") - - TEST_RRR( "smmls r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") - TEST_RRR( "smmls r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe75f8adc) " @ smmls pc, r12, r10, r8") - TEST_RRR( "smmlsr r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") - TEST_RRR( "smmlsr r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") - TEST_UNSUPPORTED(__inst_arm(0xe75f8afc) " @ smmlsr pc, r12, r10, r8") - TEST_UNSUPPORTED(__inst_arm(0xe75e8aff) " @ smmlsr r14, pc, r10, r8") - TEST_UNSUPPORTED(__inst_arm(0xe75e8ffc) " @ smmlsr r14, r12, pc, r8") - TEST_UNSUPPORTED(__inst_arm(0xe75efafc) " @ smmlsr r14, r12, r10, pc") - - TEST_RR( "usad8 r0, r",0, VAL1,", r",1, VAL2,"") - TEST_RR( "usad8 r14, r",12,VAL2,", r",10,VAL1,"") - TEST_UNSUPPORTED(__inst_arm(0xe75ffa1c) " @ usad8 pc, r12, r10") - TEST_UNSUPPORTED(__inst_arm(0xe75efa1f) " @ usad8 r14, pc, r10") - TEST_UNSUPPORTED(__inst_arm(0xe75eff1c) " @ usad8 r14, r12, pc") - - TEST_RRR( "usada8 r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL3,"") - TEST_RRR( "usada8 r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL3,"") - TEST_UNSUPPORTED(__inst_arm(0xe78f8a1c) " @ usada8 pc, r12, r10, r8") - TEST_UNSUPPORTED(__inst_arm(0xe78e8a1f) " @ usada8 r14, pc, r10, r8") - TEST_UNSUPPORTED(__inst_arm(0xe78e8f1c) " @ usada8 r14, r12, pc, r8") -#endif /* __LINUX_ARM_ARCH__ >= 6 */ - -#if __LINUX_ARM_ARCH__ >= 7 - TEST_GROUP("Bit Field") - - TEST_R( "sbfx r0, r",0 , VAL1,", #0, #31") - TEST_R( "sbfxeq r14, r",12, VAL2,", #8, #16") - TEST_R( "sbfx r4, r",10, VAL1,", #16, #15") - TEST_UNSUPPORTED(__inst_arm(0xe7aff45c) " @ sbfx pc, r12, #8, #16") - - TEST_R( "ubfx r0, r",0 , VAL1,", #0, #31") - TEST_R( "ubfxcs r14, r",12, VAL2,", #8, #16") - TEST_R( "ubfx r4, r",10, VAL1,", #16, #15") - TEST_UNSUPPORTED(__inst_arm(0xe7eff45c) " @ ubfx pc, r12, #8, #16") - TEST_UNSUPPORTED(__inst_arm(0xe7efc45f) " @ ubfx r12, pc, #8, #16") - - TEST_R( "bfc r",0, VAL1,", #4, #20") - TEST_R( "bfcvs r",14,VAL2,", #4, #20") - TEST_R( "bfc r",7, VAL1,", #0, #31") - TEST_R( "bfc r",8, VAL2,", #0, #31") - TEST_UNSUPPORTED(__inst_arm(0xe7def01f) " @ bfc pc, #0, #31"); - - TEST_RR( "bfi r",0, VAL1,", r",0 , VAL2,", #0, #31") - TEST_RR( "bfipl r",12,VAL1,", r",14 , VAL2,", #4, #20") - TEST_UNSUPPORTED(__inst_arm(0xe7d7f21e) " @ bfi pc, r14, #4, #20") - - TEST_UNSUPPORTED(__inst_arm(0x07f000f0) "") /* Permanently UNDEFINED */ - TEST_UNSUPPORTED(__inst_arm(0x07ffffff) "") /* Permanently UNDEFINED */ -#endif /* __LINUX_ARM_ARCH__ >= 6 */ - - TEST_GROUP("Branch, branch with link, and block data transfer") - - TEST_P( "stmda r",0, 16*4,", {r0}") - TEST_P( "stmeqda r",4, 16*4,", {r0-r15}") - TEST_P( "stmneda r",8, 16*4,"!, {r8-r15}") - TEST_P( "stmda r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") - TEST_P( "stmda r",13,0, "!, {pc}") - - TEST_P( "ldmda r",0, 16*4,", {r0}") - TEST_BF_P("ldmcsda r",4, 15*4,", {r0-r15}") - TEST_BF_P("ldmccda r",7, 15*4,"!, {r8-r15}") - TEST_P( "ldmda r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") - TEST_BF_P("ldmda r",14,15*4,"!, {pc}") - - TEST_P( "stmia r",0, 16*4,", {r0}") - TEST_P( "stmmiia r",4, 16*4,", {r0-r15}") - TEST_P( "stmplia r",8, 16*4,"!, {r8-r15}") - TEST_P( "stmia r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") - TEST_P( "stmia r",14,0, "!, {pc}") - - TEST_P( "ldmia r",0, 16*4,", {r0}") - TEST_BF_P("ldmvsia r",4, 0, ", {r0-r15}") - TEST_BF_P("ldmvcia r",7, 8*4, "!, {r8-r15}") - TEST_P( "ldmia r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") - TEST_BF_P("ldmia r",14,15*4,"!, {pc}") - - TEST_P( "stmdb r",0, 16*4,", {r0}") - TEST_P( "stmhidb r",4, 16*4,", {r0-r15}") - TEST_P( "stmlsdb r",8, 16*4,"!, {r8-r15}") - TEST_P( "stmdb r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") - TEST_P( "stmdb r",13,4, "!, {pc}") - - TEST_P( "ldmdb r",0, 16*4,", {r0}") - TEST_BF_P("ldmgedb r",4, 16*4,", {r0-r15}") - TEST_BF_P("ldmltdb r",7, 16*4,"!, {r8-r15}") - TEST_P( "ldmdb r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") - TEST_BF_P("ldmdb r",14,16*4,"!, {pc}") - - TEST_P( "stmib r",0, 16*4,", {r0}") - TEST_P( "stmgtib r",4, 16*4,", {r0-r15}") - TEST_P( "stmleib r",8, 16*4,"!, {r8-r15}") - TEST_P( "stmib r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") - TEST_P( "stmib r",13,-4, "!, {pc}") - - TEST_P( "ldmib r",0, 16*4,", {r0}") - TEST_BF_P("ldmeqib r",4, -4,", {r0-r15}") - TEST_BF_P("ldmneib r",7, 7*4,"!, {r8-r15}") - TEST_P( "ldmib r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") - TEST_BF_P("ldmib r",14,14*4,"!, {pc}") - - TEST_P( "stmdb r",13,16*4,"!, {r3-r12,lr}") - TEST_P( "stmeqdb r",13,16*4,"!, {r3-r12}") - TEST_P( "stmnedb r",2, 16*4,", {r3-r12,lr}") - TEST_P( "stmdb r",13,16*4,"!, {r2-r12,lr}") - TEST_P( "stmdb r",0, 16*4,", {r0-r12}") - TEST_P( "stmdb r",0, 16*4,", {r0-r12,lr}") - - TEST_BF_P("ldmia r",13,5*4, "!, {r3-r12,pc}") - TEST_P( "ldmccia r",13,5*4, "!, {r3-r12}") - TEST_BF_P("ldmcsia r",2, 5*4, "!, {r3-r12,pc}") - TEST_BF_P("ldmia r",13,4*4, "!, {r2-r12,pc}") - TEST_P( "ldmia r",0, 16*4,", {r0-r12}") - TEST_P( "ldmia r",0, 16*4,", {r0-r12,lr}") - -#ifdef CONFIG_THUMB2_KERNEL - TEST_ARM_TO_THUMB_INTERWORK_P("ldmplia r",0,15*4,", {pc}") - TEST_ARM_TO_THUMB_INTERWORK_P("ldmmiia r",13,0,", {r0-r15}") -#endif - TEST_BF("b 2f") - TEST_BF("bl 2f") - TEST_BB("b 2b") - TEST_BB("bl 2b") - - TEST_BF("beq 2f") - TEST_BF("bleq 2f") - TEST_BB("bne 2b") - TEST_BB("blne 2b") - - TEST_BF("bgt 2f") - TEST_BF("blgt 2f") - TEST_BB("blt 2b") - TEST_BB("bllt 2b") - - TEST_GROUP("Supervisor Call, and coprocessor instructions") - - /* - * We can't really test these by executing them, so all - * we can do is check that probes are, or are not allowed. - * At the moment none are allowed... - */ -#define TEST_COPROCESSOR(code) TEST_UNSUPPORTED(code) - -#define COPROCESSOR_INSTRUCTIONS_ST_LD(two,cc) \ - TEST_COPROCESSOR("stc"two" 0, cr0, [r13, #4]") \ - TEST_COPROCESSOR("stc"two" 0, cr0, [r13, #-4]") \ - TEST_COPROCESSOR("stc"two" 0, cr0, [r13, #4]!") \ - TEST_COPROCESSOR("stc"two" 0, cr0, [r13, #-4]!") \ - TEST_COPROCESSOR("stc"two" 0, cr0, [r13], #4") \ - TEST_COPROCESSOR("stc"two" 0, cr0, [r13], #-4") \ - TEST_COPROCESSOR("stc"two" 0, cr0, [r13], {1}") \ - TEST_COPROCESSOR("stc"two"l 0, cr0, [r13, #4]") \ - TEST_COPROCESSOR("stc"two"l 0, cr0, [r13, #-4]") \ - TEST_COPROCESSOR("stc"two"l 0, cr0, [r13, #4]!") \ - TEST_COPROCESSOR("stc"two"l 0, cr0, [r13, #-4]!") \ - TEST_COPROCESSOR("stc"two"l 0, cr0, [r13], #4") \ - TEST_COPROCESSOR("stc"two"l 0, cr0, [r13], #-4") \ - TEST_COPROCESSOR("stc"two"l 0, cr0, [r13], {1}") \ - TEST_COPROCESSOR("ldc"two" 0, cr0, [r13, #4]") \ - TEST_COPROCESSOR("ldc"two" 0, cr0, [r13, #-4]") \ - TEST_COPROCESSOR("ldc"two" 0, cr0, [r13, #4]!") \ - TEST_COPROCESSOR("ldc"two" 0, cr0, [r13, #-4]!") \ - TEST_COPROCESSOR("ldc"two" 0, cr0, [r13], #4") \ - TEST_COPROCESSOR("ldc"two" 0, cr0, [r13], #-4") \ - TEST_COPROCESSOR("ldc"two" 0, cr0, [r13], {1}") \ - TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13, #4]") \ - TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13, #-4]") \ - TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13, #4]!") \ - TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13, #-4]!") \ - TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13], #4") \ - TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13], #-4") \ - TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13], {1}") \ - \ - TEST_COPROCESSOR( "stc"two" 0, cr0, [r15, #4]") \ - TEST_COPROCESSOR( "stc"two" 0, cr0, [r15, #-4]") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##daf0001) " @ stc"two" 0, cr0, [r15, #4]!") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##d2f0001) " @ stc"two" 0, cr0, [r15, #-4]!") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##caf0001) " @ stc"two" 0, cr0, [r15], #4") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##c2f0001) " @ stc"two" 0, cr0, [r15], #-4") \ - TEST_COPROCESSOR( "stc"two" 0, cr0, [r15], {1}") \ - TEST_COPROCESSOR( "stc"two"l 0, cr0, [r15, #4]") \ - TEST_COPROCESSOR( "stc"two"l 0, cr0, [r15, #-4]") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##def0001) " @ stc"two"l 0, cr0, [r15, #4]!") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##d6f0001) " @ stc"two"l 0, cr0, [r15, #-4]!") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##cef0001) " @ stc"two"l 0, cr0, [r15], #4") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##c6f0001) " @ stc"two"l 0, cr0, [r15], #-4") \ - TEST_COPROCESSOR( "stc"two"l 0, cr0, [r15], {1}") \ - TEST_COPROCESSOR( "ldc"two" 0, cr0, [r15, #4]") \ - TEST_COPROCESSOR( "ldc"two" 0, cr0, [r15, #-4]") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##dbf0001) " @ ldc"two" 0, cr0, [r15, #4]!") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##d3f0001) " @ ldc"two" 0, cr0, [r15, #-4]!") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##cbf0001) " @ ldc"two" 0, cr0, [r15], #4") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##c3f0001) " @ ldc"two" 0, cr0, [r15], #-4") \ - TEST_COPROCESSOR( "ldc"two" 0, cr0, [r15], {1}") \ - TEST_COPROCESSOR( "ldc"two"l 0, cr0, [r15, #4]") \ - TEST_COPROCESSOR( "ldc"two"l 0, cr0, [r15, #-4]") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##dff0001) " @ ldc"two"l 0, cr0, [r15, #4]!") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##d7f0001) " @ ldc"two"l 0, cr0, [r15, #-4]!") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##cff0001) " @ ldc"two"l 0, cr0, [r15], #4") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##c7f0001) " @ ldc"two"l 0, cr0, [r15], #-4") \ - TEST_COPROCESSOR( "ldc"two"l 0, cr0, [r15], {1}") - -#define COPROCESSOR_INSTRUCTIONS_MC_MR(two,cc) \ - \ - TEST_COPROCESSOR( "mcrr"two" 0, 15, r0, r14, cr0") \ - TEST_COPROCESSOR( "mcrr"two" 15, 0, r14, r0, cr15") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##c4f00f0) " @ mcrr"two" 0, 15, r0, r15, cr0") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##c40ff0f) " @ mcrr"two" 15, 0, r15, r0, cr15") \ - TEST_COPROCESSOR( "mrrc"two" 0, 15, r0, r14, cr0") \ - TEST_COPROCESSOR( "mrrc"two" 15, 0, r14, r0, cr15") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##c5f00f0) " @ mrrc"two" 0, 15, r0, r15, cr0") \ - TEST_UNSUPPORTED(__inst_arm(0x##cc##c50ff0f) " @ mrrc"two" 15, 0, r15, r0, cr15") \ - TEST_COPROCESSOR( "cdp"two" 15, 15, cr15, cr15, cr15, 7") \ - TEST_COPROCESSOR( "cdp"two" 0, 0, cr0, cr0, cr0, 0") \ - TEST_COPROCESSOR( "mcr"two" 15, 7, r15, cr15, cr15, 7") \ - TEST_COPROCESSOR( "mcr"two" 0, 0, r0, cr0, cr0, 0") \ - TEST_COPROCESSOR( "mrc"two" 15, 7, r15, cr15, cr15, 7") \ - TEST_COPROCESSOR( "mrc"two" 0, 0, r0, cr0, cr0, 0") - - COPROCESSOR_INSTRUCTIONS_ST_LD("",e) -#if __LINUX_ARM_ARCH__ >= 5 - COPROCESSOR_INSTRUCTIONS_MC_MR("",e) -#endif - TEST_UNSUPPORTED("svc 0") - TEST_UNSUPPORTED("svc 0xffffff") - - TEST_UNSUPPORTED("svc 0") - - TEST_GROUP("Unconditional instruction") - -#if __LINUX_ARM_ARCH__ >= 6 - TEST_UNSUPPORTED("srsda sp, 0x13") - TEST_UNSUPPORTED("srsdb sp, 0x13") - TEST_UNSUPPORTED("srsia sp, 0x13") - TEST_UNSUPPORTED("srsib sp, 0x13") - TEST_UNSUPPORTED("srsda sp!, 0x13") - TEST_UNSUPPORTED("srsdb sp!, 0x13") - TEST_UNSUPPORTED("srsia sp!, 0x13") - TEST_UNSUPPORTED("srsib sp!, 0x13") - - TEST_UNSUPPORTED("rfeda sp") - TEST_UNSUPPORTED("rfedb sp") - TEST_UNSUPPORTED("rfeia sp") - TEST_UNSUPPORTED("rfeib sp") - TEST_UNSUPPORTED("rfeda sp!") - TEST_UNSUPPORTED("rfedb sp!") - TEST_UNSUPPORTED("rfeia sp!") - TEST_UNSUPPORTED("rfeib sp!") - TEST_UNSUPPORTED(__inst_arm(0xf81d0a00) " @ rfeda pc") - TEST_UNSUPPORTED(__inst_arm(0xf91d0a00) " @ rfedb pc") - TEST_UNSUPPORTED(__inst_arm(0xf89d0a00) " @ rfeia pc") - TEST_UNSUPPORTED(__inst_arm(0xf99d0a00) " @ rfeib pc") - TEST_UNSUPPORTED(__inst_arm(0xf83d0a00) " @ rfeda pc!") - TEST_UNSUPPORTED(__inst_arm(0xf93d0a00) " @ rfedb pc!") - TEST_UNSUPPORTED(__inst_arm(0xf8bd0a00) " @ rfeia pc!") - TEST_UNSUPPORTED(__inst_arm(0xf9bd0a00) " @ rfeib pc!") -#endif /* __LINUX_ARM_ARCH__ >= 6 */ - -#if __LINUX_ARM_ARCH__ >= 6 - TEST_X( "blx __dummy_thumb_subroutine_even", - ".thumb \n\t" - ".space 4 \n\t" - ".type __dummy_thumb_subroutine_even, %%function \n\t" - "__dummy_thumb_subroutine_even: \n\t" - "mov r0, pc \n\t" - "bx lr \n\t" - ".arm \n\t" - ) - TEST( "blx __dummy_thumb_subroutine_even") - - TEST_X( "blx __dummy_thumb_subroutine_odd", - ".thumb \n\t" - ".space 2 \n\t" - ".type __dummy_thumb_subroutine_odd, %%function \n\t" - "__dummy_thumb_subroutine_odd: \n\t" - "mov r0, pc \n\t" - "bx lr \n\t" - ".arm \n\t" - ) - TEST( "blx __dummy_thumb_subroutine_odd") -#endif /* __LINUX_ARM_ARCH__ >= 6 */ - -#if __LINUX_ARM_ARCH__ >= 5 - COPROCESSOR_INSTRUCTIONS_ST_LD("2",f) -#endif -#if __LINUX_ARM_ARCH__ >= 6 - COPROCESSOR_INSTRUCTIONS_MC_MR("2",f) -#endif - - TEST_GROUP("Miscellaneous instructions, memory hints, and Advanced SIMD instructions") - -#if __LINUX_ARM_ARCH__ >= 6 - TEST_UNSUPPORTED("cps 0x13") - TEST_UNSUPPORTED("cpsie i") - TEST_UNSUPPORTED("cpsid i") - TEST_UNSUPPORTED("cpsie i,0x13") - TEST_UNSUPPORTED("cpsid i,0x13") - TEST_UNSUPPORTED("setend le") - TEST_UNSUPPORTED("setend be") -#endif - -#if __LINUX_ARM_ARCH__ >= 7 - TEST_P("pli [r",0,0b,", #16]") - TEST( "pli [pc, #0]") - TEST_RR("pli [r",12,0b,", r",0, 16,"]") - TEST_RR("pli [r",0, 0b,", -r",12,16,", lsl #4]") -#endif - -#if __LINUX_ARM_ARCH__ >= 5 - TEST_P("pld [r",0,32,", #-16]") - TEST( "pld [pc, #0]") - TEST_PR("pld [r",7, 24, ", r",0, 16,"]") - TEST_PR("pld [r",8, 24, ", -r",12,16,", lsl #4]") -#endif - -#if __LINUX_ARM_ARCH__ >= 7 - TEST_SUPPORTED( __inst_arm(0xf590f000) " @ pldw [r0, #0]") - TEST_SUPPORTED( __inst_arm(0xf797f000) " @ pldw [r7, r0]") - TEST_SUPPORTED( __inst_arm(0xf798f18c) " @ pldw [r8, r12, lsl #3]"); -#endif - -#if __LINUX_ARM_ARCH__ >= 7 - TEST_UNSUPPORTED("clrex") - TEST_UNSUPPORTED("dsb") - TEST_UNSUPPORTED("dmb") - TEST_UNSUPPORTED("isb") -#endif - - verbose("\n"); -} - diff --git a/arch/arm/kernel/kprobes-test-thumb.c b/arch/arm/kernel/kprobes-test-thumb.c deleted file mode 100644 index 844dd10d8593..000000000000 --- a/arch/arm/kernel/kprobes-test-thumb.c +++ /dev/null @@ -1,1188 +0,0 @@ -/* - * arch/arm/kernel/kprobes-test-thumb.c - * - * Copyright (C) 2011 Jon Medhurst . - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - */ - -#include -#include -#include - -#include "kprobes-test.h" - - -#define TEST_ISA "16" - -#define DONT_TEST_IN_ITBLOCK(tests) \ - kprobe_test_flags |= TEST_FLAG_NO_ITBLOCK; \ - tests \ - kprobe_test_flags &= ~TEST_FLAG_NO_ITBLOCK; - -#define CONDITION_INSTRUCTIONS(cc_pos, tests) \ - kprobe_test_cc_position = cc_pos; \ - DONT_TEST_IN_ITBLOCK(tests) \ - kprobe_test_cc_position = 0; - -#define TEST_ITBLOCK(code) \ - kprobe_test_flags |= TEST_FLAG_FULL_ITBLOCK; \ - TESTCASE_START(code) \ - TEST_ARG_END("") \ - "50: nop \n\t" \ - "1: "code" \n\t" \ - " mov r1, #0x11 \n\t" \ - " mov r2, #0x22 \n\t" \ - " mov r3, #0x33 \n\t" \ - "2: nop \n\t" \ - TESTCASE_END \ - kprobe_test_flags &= ~TEST_FLAG_FULL_ITBLOCK; - -#define TEST_THUMB_TO_ARM_INTERWORK_P(code1, reg, val, code2) \ - TESTCASE_START(code1 #reg code2) \ - TEST_ARG_PTR(reg, val) \ - TEST_ARG_REG(14, 99f+1) \ - TEST_ARG_MEM(15, 3f) \ - TEST_ARG_END("") \ - " nop \n\t" /* To align 1f */ \ - "50: nop \n\t" \ - "1: "code1 #reg code2" \n\t" \ - " bx lr \n\t" \ - ".arm \n\t" \ - "3: adr lr, 2f+1 \n\t" \ - " bx lr \n\t" \ - ".thumb \n\t" \ - "2: nop \n\t" \ - TESTCASE_END - - -void kprobe_thumb16_test_cases(void) -{ - kprobe_test_flags = TEST_FLAG_NARROW_INSTR; - - TEST_GROUP("Shift (immediate), add, subtract, move, and compare") - - TEST_R( "lsls r7, r",0,VAL1,", #5") - TEST_R( "lsls r0, r",7,VAL2,", #11") - TEST_R( "lsrs r7, r",0,VAL1,", #5") - TEST_R( "lsrs r0, r",7,VAL2,", #11") - TEST_R( "asrs r7, r",0,VAL1,", #5") - TEST_R( "asrs r0, r",7,VAL2,", #11") - TEST_RR( "adds r2, r",0,VAL1,", r",7,VAL2,"") - TEST_RR( "adds r5, r",7,VAL2,", r",0,VAL2,"") - TEST_RR( "subs r2, r",0,VAL1,", r",7,VAL2,"") - TEST_RR( "subs r5, r",7,VAL2,", r",0,VAL2,"") - TEST_R( "adds r7, r",0,VAL1,", #5") - TEST_R( "adds r0, r",7,VAL2,", #2") - TEST_R( "subs r7, r",0,VAL1,", #5") - TEST_R( "subs r0, r",7,VAL2,", #2") - TEST( "movs.n r0, #0x5f") - TEST( "movs.n r7, #0xa0") - TEST_R( "cmp.n r",0,0x5e, ", #0x5f") - TEST_R( "cmp.n r",5,0x15f,", #0x5f") - TEST_R( "cmp.n r",7,0xa0, ", #0xa0") - TEST_R( "adds.n r",0,VAL1,", #0x5f") - TEST_R( "adds.n r",7,VAL2,", #0xa0") - TEST_R( "subs.n r",0,VAL1,", #0x5f") - TEST_R( "subs.n r",7,VAL2,", #0xa0") - - TEST_GROUP("16-bit Thumb data-processing instructions") - -#define DATA_PROCESSING16(op,val) \ - TEST_RR( op" r",0,VAL1,", r",7,val,"") \ - TEST_RR( op" r",7,VAL2,", r",0,val,"") - - DATA_PROCESSING16("ands",0xf00f00ff) - DATA_PROCESSING16("eors",0xf00f00ff) - DATA_PROCESSING16("lsls",11) - DATA_PROCESSING16("lsrs",11) - DATA_PROCESSING16("asrs",11) - DATA_PROCESSING16("adcs",VAL2) - DATA_PROCESSING16("sbcs",VAL2) - DATA_PROCESSING16("rors",11) - DATA_PROCESSING16("tst",0xf00f00ff) - TEST_R("rsbs r",0,VAL1,", #0") - TEST_R("rsbs r",7,VAL2,", #0") - DATA_PROCESSING16("cmp",0xf00f00ff) - DATA_PROCESSING16("cmn",0xf00f00ff) - DATA_PROCESSING16("orrs",0xf00f00ff) - DATA_PROCESSING16("muls",VAL2) - DATA_PROCESSING16("bics",0xf00f00ff) - DATA_PROCESSING16("mvns",VAL2) - - TEST_GROUP("Special data instructions and branch and exchange") - - TEST_RR( "add r",0, VAL1,", r",7,VAL2,"") - TEST_RR( "add r",3, VAL2,", r",8,VAL3,"") - TEST_RR( "add r",8, VAL3,", r",0,VAL1,"") - TEST_R( "add sp" ", r",8,-8, "") - TEST_R( "add r",14,VAL1,", pc") - TEST_BF_R("add pc" ", r",0,2f-1f-8,"") - TEST_UNSUPPORTED(__inst_thumb16(0x44ff) " @ add pc, pc") - - TEST_RR( "cmp r",3,VAL1,", r",8,VAL2,"") - TEST_RR( "cmp r",8,VAL2,", r",0,VAL1,"") - TEST_R( "cmp sp" ", r",8,-8, "") - - TEST_R( "mov r0, r",7,VAL2,"") - TEST_R( "mov r3, r",8,VAL3,"") - TEST_R( "mov r8, r",0,VAL1,"") - TEST_P( "mov sp, r",8,-8, "") - TEST( "mov lr, pc") - TEST_BF_R("mov pc, r",0,2f, "") - - TEST_BF_R("bx r",0, 2f+1,"") - TEST_BF_R("bx r",14,2f+1,"") - TESTCASE_START("bx pc") - TEST_ARG_REG(14, 99f+1) - TEST_ARG_END("") - " nop \n\t" /* To align the bx pc*/ - "50: nop \n\t" - "1: bx pc \n\t" - " bx lr \n\t" - ".arm \n\t" - " adr lr, 2f+1 \n\t" - " bx lr \n\t" - ".thumb \n\t" - "2: nop \n\t" - TESTCASE_END - - TEST_BF_R("blx r",0, 2f+1,"") - TEST_BB_R("blx r",14,2f+1,"") - TEST_UNSUPPORTED(__inst_thumb16(0x47f8) " @ blx pc") - - TEST_GROUP("Load from Literal Pool") - - TEST_X( "ldr r0, 3f", - ".align \n\t" - "3: .word "__stringify(VAL1)) - TEST_X( "ldr r7, 3f", - ".space 128 \n\t" - ".align \n\t" - "3: .word "__stringify(VAL2)) - - TEST_GROUP("16-bit Thumb Load/store instructions") - - TEST_RPR("str r",0, VAL1,", [r",1, 24,", r",2, 48,"]") - TEST_RPR("str r",7, VAL2,", [r",6, 24,", r",5, 48,"]") - TEST_RPR("strh r",0, VAL1,", [r",1, 24,", r",2, 48,"]") - TEST_RPR("strh r",7, VAL2,", [r",6, 24,", r",5, 48,"]") - TEST_RPR("strb r",0, VAL1,", [r",1, 24,", r",2, 48,"]") - TEST_RPR("strb r",7, VAL2,", [r",6, 24,", r",5, 48,"]") - TEST_PR( "ldrsb r0, [r",1, 24,", r",2, 48,"]") - TEST_PR( "ldrsb r7, [r",6, 24,", r",5, 50,"]") - TEST_PR( "ldr r0, [r",1, 24,", r",2, 48,"]") - TEST_PR( "ldr r7, [r",6, 24,", r",5, 48,"]") - TEST_PR( "ldrh r0, [r",1, 24,", r",2, 48,"]") - TEST_PR( "ldrh r7, [r",6, 24,", r",5, 50,"]") - TEST_PR( "ldrb r0, [r",1, 24,", r",2, 48,"]") - TEST_PR( "ldrb r7, [r",6, 24,", r",5, 50,"]") - TEST_PR( "ldrsh r0, [r",1, 24,", r",2, 48,"]") - TEST_PR( "ldrsh r7, [r",6, 24,", r",5, 50,"]") - - TEST_RP("str r",0, VAL1,", [r",1, 24,", #120]") - TEST_RP("str r",7, VAL2,", [r",6, 24,", #120]") - TEST_P( "ldr r0, [r",1, 24,", #120]") - TEST_P( "ldr r7, [r",6, 24,", #120]") - TEST_RP("strb r",0, VAL1,", [r",1, 24,", #30]") - TEST_RP("strb r",7, VAL2,", [r",6, 24,", #30]") - TEST_P( "ldrb r0, [r",1, 24,", #30]") - TEST_P( "ldrb r7, [r",6, 24,", #30]") - TEST_RP("strh r",0, VAL1,", [r",1, 24,", #60]") - TEST_RP("strh r",7, VAL2,", [r",6, 24,", #60]") - TEST_P( "ldrh r0, [r",1, 24,", #60]") - TEST_P( "ldrh r7, [r",6, 24,", #60]") - - TEST_R( "str r",0, VAL1,", [sp, #0]") - TEST_R( "str r",7, VAL2,", [sp, #160]") - TEST( "ldr r0, [sp, #0]") - TEST( "ldr r7, [sp, #160]") - - TEST_RP("str r",0, VAL1,", [r",0, 24,"]") - TEST_P( "ldr r0, [r",0, 24,"]") - - TEST_GROUP("Generate PC-/SP-relative address") - - TEST("add r0, pc, #4") - TEST("add r7, pc, #1020") - TEST("add r0, sp, #4") - TEST("add r7, sp, #1020") - - TEST_GROUP("Miscellaneous 16-bit instructions") - - TEST_UNSUPPORTED( "cpsie i") - TEST_UNSUPPORTED( "cpsid i") - TEST_UNSUPPORTED( "setend le") - TEST_UNSUPPORTED( "setend be") - - TEST("add sp, #"__stringify(TEST_MEMORY_SIZE)) /* Assumes TEST_MEMORY_SIZE < 0x400 */ - TEST("sub sp, #0x7f*4") - -DONT_TEST_IN_ITBLOCK( - TEST_BF_R( "cbnz r",0,0, ", 2f") - TEST_BF_R( "cbz r",2,-1,", 2f") - TEST_BF_RX( "cbnz r",4,1, ", 2f", SPACE_0x20) - TEST_BF_RX( "cbz r",7,0, ", 2f", SPACE_0x40) -) - TEST_R("sxth r0, r",7, HH1,"") - TEST_R("sxth r7, r",0, HH2,"") - TEST_R("sxtb r0, r",7, HH1,"") - TEST_R("sxtb r7, r",0, HH2,"") - TEST_R("uxth r0, r",7, HH1,"") - TEST_R("uxth r7, r",0, HH2,"") - TEST_R("uxtb r0, r",7, HH1,"") - TEST_R("uxtb r7, r",0, HH2,"") - TEST_R("rev r0, r",7, VAL1,"") - TEST_R("rev r7, r",0, VAL2,"") - TEST_R("rev16 r0, r",7, VAL1,"") - TEST_R("rev16 r7, r",0, VAL2,"") - TEST_UNSUPPORTED(__inst_thumb16(0xba80) "") - TEST_UNSUPPORTED(__inst_thumb16(0xbabf) "") - TEST_R("revsh r0, r",7, VAL1,"") - TEST_R("revsh r7, r",0, VAL2,"") - -#define TEST_POPPC(code, offset) \ - TESTCASE_START(code) \ - TEST_ARG_PTR(13, offset) \ - TEST_ARG_END("") \ - TEST_BRANCH_F(code) \ - TESTCASE_END - - TEST("push {r0}") - TEST("push {r7}") - TEST("push {r14}") - TEST("push {r0-r7,r14}") - TEST("push {r0,r2,r4,r6,r14}") - TEST("push {r1,r3,r5,r7}") - TEST("pop {r0}") - TEST("pop {r7}") - TEST("pop {r0,r2,r4,r6}") - TEST_POPPC("pop {pc}",15*4) - TEST_POPPC("pop {r0-r7,pc}",7*4) - TEST_POPPC("pop {r1,r3,r5,r7,pc}",11*4) - TEST_THUMB_TO_ARM_INTERWORK_P("pop {pc} @ ",13,15*4,"") - TEST_THUMB_TO_ARM_INTERWORK_P("pop {r0-r7,pc} @ ",13,7*4,"") - - TEST_UNSUPPORTED("bkpt.n 0") - TEST_UNSUPPORTED("bkpt.n 255") - - TEST_SUPPORTED("yield") - TEST("sev") - TEST("nop") - TEST("wfi") - TEST_SUPPORTED("wfe") - TEST_UNSUPPORTED(__inst_thumb16(0xbf50) "") /* Unassigned hints */ - TEST_UNSUPPORTED(__inst_thumb16(0xbff0) "") /* Unassigned hints */ - -#define TEST_IT(code, code2) \ - TESTCASE_START(code) \ - TEST_ARG_END("") \ - "50: nop \n\t" \ - "1: "code" \n\t" \ - " "code2" \n\t" \ - "2: nop \n\t" \ - TESTCASE_END - -DONT_TEST_IN_ITBLOCK( - TEST_IT("it eq","moveq r0,#0") - TEST_IT("it vc","movvc r0,#0") - TEST_IT("it le","movle r0,#0") - TEST_IT("ite eq","moveq r0,#0\n\t movne r1,#1") - TEST_IT("itet vc","movvc r0,#0\n\t movvs r1,#1\n\t movvc r2,#2") - TEST_IT("itete le","movle r0,#0\n\t movgt r1,#1\n\t movle r2,#2\n\t movgt r3,#3") - TEST_IT("itttt le","movle r0,#0\n\t movle r1,#1\n\t movle r2,#2\n\t movle r3,#3") - TEST_IT("iteee le","movle r0,#0\n\t movgt r1,#1\n\t movgt r2,#2\n\t movgt r3,#3") -) - - TEST_GROUP("Load and store multiple") - - TEST_P("ldmia r",4, 16*4,"!, {r0,r7}") - TEST_P("ldmia r",7, 16*4,"!, {r0-r6}") - TEST_P("stmia r",4, 16*4,"!, {r0,r7}") - TEST_P("stmia r",0, 16*4,"!, {r0-r7}") - - TEST_GROUP("Conditional branch and Supervisor Call instructions") - -CONDITION_INSTRUCTIONS(8, - TEST_BF("beq 2f") - TEST_BB("bne 2b") - TEST_BF("bgt 2f") - TEST_BB("blt 2b") -) - TEST_UNSUPPORTED(__inst_thumb16(0xde00) "") - TEST_UNSUPPORTED(__inst_thumb16(0xdeff) "") - TEST_UNSUPPORTED("svc #0x00") - TEST_UNSUPPORTED("svc #0xff") - - TEST_GROUP("Unconditional branch") - - TEST_BF( "b 2f") - TEST_BB( "b 2b") - TEST_BF_X("b 2f", SPACE_0x400) - TEST_BB_X("b 2b", SPACE_0x400) - - TEST_GROUP("Testing instructions in IT blocks") - - TEST_ITBLOCK("subs.n r0, r0") - - verbose("\n"); -} - - -void kprobe_thumb32_test_cases(void) -{ - kprobe_test_flags = 0; - - TEST_GROUP("Load/store multiple") - - TEST_UNSUPPORTED("rfedb sp") - TEST_UNSUPPORTED("rfeia sp") - TEST_UNSUPPORTED("rfedb sp!") - TEST_UNSUPPORTED("rfeia sp!") - - TEST_P( "stmia r",0, 16*4,", {r0,r8}") - TEST_P( "stmia r",4, 16*4,", {r0-r12,r14}") - TEST_P( "stmia r",7, 16*4,"!, {r8-r12,r14}") - TEST_P( "stmia r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") - - TEST_P( "ldmia r",0, 16*4,", {r0,r8}") - TEST_P( "ldmia r",4, 0, ", {r0-r12,r14}") - TEST_BF_P("ldmia r",5, 8*4, "!, {r6-r12,r15}") - TEST_P( "ldmia r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") - TEST_BF_P("ldmia r",14,14*4,"!, {r4,pc}") - - TEST_P( "stmdb r",0, 16*4,", {r0,r8}") - TEST_P( "stmdb r",4, 16*4,", {r0-r12,r14}") - TEST_P( "stmdb r",5, 16*4,"!, {r8-r12,r14}") - TEST_P( "stmdb r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") - - TEST_P( "ldmdb r",0, 16*4,", {r0,r8}") - TEST_P( "ldmdb r",4, 16*4,", {r0-r12,r14}") - TEST_BF_P("ldmdb r",5, 16*4,"!, {r6-r12,r15}") - TEST_P( "ldmdb r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") - TEST_BF_P("ldmdb r",14,16*4,"!, {r4,pc}") - - TEST_P( "stmdb r",13,16*4,"!, {r3-r12,lr}") - TEST_P( "stmdb r",13,16*4,"!, {r3-r12}") - TEST_P( "stmdb r",2, 16*4,", {r3-r12,lr}") - TEST_P( "stmdb r",13,16*4,"!, {r2-r12,lr}") - TEST_P( "stmdb r",0, 16*4,", {r0-r12}") - TEST_P( "stmdb r",0, 16*4,", {r0-r12,lr}") - - TEST_BF_P("ldmia r",13,5*4, "!, {r3-r12,pc}") - TEST_P( "ldmia r",13,5*4, "!, {r3-r12}") - TEST_BF_P("ldmia r",2, 5*4, "!, {r3-r12,pc}") - TEST_BF_P("ldmia r",13,4*4, "!, {r2-r12,pc}") - TEST_P( "ldmia r",0, 16*4,", {r0-r12}") - TEST_P( "ldmia r",0, 16*4,", {r0-r12,lr}") - - TEST_THUMB_TO_ARM_INTERWORK_P("ldmia r",0,14*4,", {r12,pc}") - TEST_THUMB_TO_ARM_INTERWORK_P("ldmia r",13,2*4,", {r0-r12,pc}") - - TEST_UNSUPPORTED(__inst_thumb32(0xe88f0101) " @ stmia pc, {r0,r8}") - TEST_UNSUPPORTED(__inst_thumb32(0xe92f5f00) " @ stmdb pc!, {r8-r12,r14}") - TEST_UNSUPPORTED(__inst_thumb32(0xe8bdc000) " @ ldmia r13!, {r14,pc}") - TEST_UNSUPPORTED(__inst_thumb32(0xe93ec000) " @ ldmdb r14!, {r14,pc}") - TEST_UNSUPPORTED(__inst_thumb32(0xe8a73f00) " @ stmia r7!, {r8-r12,sp}") - TEST_UNSUPPORTED(__inst_thumb32(0xe8a79f00) " @ stmia r7!, {r8-r12,pc}") - TEST_UNSUPPORTED(__inst_thumb32(0xe93e2010) " @ ldmdb r14!, {r4,sp}") - - TEST_GROUP("Load/store double or exclusive, table branch") - - TEST_P( "ldrd r0, r1, [r",1, 24,", #-16]") - TEST( "ldrd r12, r14, [sp, #16]") - TEST_P( "ldrd r1, r0, [r",7, 24,", #-16]!") - TEST( "ldrd r14, r12, [sp, #16]!") - TEST_P( "ldrd r1, r0, [r",7, 24,"], #16") - TEST( "ldrd r7, r8, [sp], #-16") - - TEST_X( "ldrd r12, r14, 3f", - ".align 3 \n\t" - "3: .word "__stringify(VAL1)" \n\t" - " .word "__stringify(VAL2)) - - TEST_UNSUPPORTED(__inst_thumb32(0xe9ffec04) " @ ldrd r14, r12, [pc, #16]!") - TEST_UNSUPPORTED(__inst_thumb32(0xe8ffec04) " @ ldrd r14, r12, [pc], #16") - TEST_UNSUPPORTED(__inst_thumb32(0xe9d4d800) " @ ldrd sp, r8, [r4]") - TEST_UNSUPPORTED(__inst_thumb32(0xe9d4f800) " @ ldrd pc, r8, [r4]") - TEST_UNSUPPORTED(__inst_thumb32(0xe9d47d00) " @ ldrd r7, sp, [r4]") - TEST_UNSUPPORTED(__inst_thumb32(0xe9d47f00) " @ ldrd r7, pc, [r4]") - - TEST_RRP("strd r",0, VAL1,", r",1, VAL2,", [r",1, 24,", #-16]") - TEST_RR( "strd r",12,VAL2,", r",14,VAL1,", [sp, #16]") - TEST_RRP("strd r",1, VAL1,", r",0, VAL2,", [r",7, 24,", #-16]!") - TEST_RR( "strd r",14,VAL2,", r",12,VAL1,", [sp, #16]!") - TEST_RRP("strd r",1, VAL1,", r",0, VAL2,", [r",7, 24,"], #16") - TEST_RR( "strd r",7, VAL2,", r",8, VAL1,", [sp], #-16") - TEST_UNSUPPORTED(__inst_thumb32(0xe9efec04) " @ strd r14, r12, [pc, #16]!") - TEST_UNSUPPORTED(__inst_thumb32(0xe8efec04) " @ strd r14, r12, [pc], #16") - - TEST_RX("tbb [pc, r",0, (9f-(1f+4)),"]", - "9: \n\t" - ".byte (2f-1b-4)>>1 \n\t" - ".byte (3f-1b-4)>>1 \n\t" - "3: mvn r0, r0 \n\t" - "2: nop \n\t") - - TEST_RX("tbb [pc, r",4, (9f-(1f+4)+1),"]", - "9: \n\t" - ".byte (2f-1b-4)>>1 \n\t" - ".byte (3f-1b-4)>>1 \n\t" - "3: mvn r0, r0 \n\t" - "2: nop \n\t") - - TEST_RRX("tbb [r",1,9f,", r",2,0,"]", - "9: \n\t" - ".byte (2f-1b-4)>>1 \n\t" - ".byte (3f-1b-4)>>1 \n\t" - "3: mvn r0, r0 \n\t" - "2: nop \n\t") - - TEST_RX("tbh [pc, r",7, (9f-(1f+4))>>1,"]", - "9: \n\t" - ".short (2f-1b-4)>>1 \n\t" - ".short (3f-1b-4)>>1 \n\t" - "3: mvn r0, r0 \n\t" - "2: nop \n\t") - - TEST_RX("tbh [pc, r",12, ((9f-(1f+4))>>1)+1,"]", - "9: \n\t" - ".short (2f-1b-4)>>1 \n\t" - ".short (3f-1b-4)>>1 \n\t" - "3: mvn r0, r0 \n\t" - "2: nop \n\t") - - TEST_RRX("tbh [r",1,9f, ", r",14,1,"]", - "9: \n\t" - ".short (2f-1b-4)>>1 \n\t" - ".short (3f-1b-4)>>1 \n\t" - "3: mvn r0, r0 \n\t" - "2: nop \n\t") - - TEST_UNSUPPORTED(__inst_thumb32(0xe8d1f01f) " @ tbh [r1, pc]") - TEST_UNSUPPORTED(__inst_thumb32(0xe8d1f01d) " @ tbh [r1, sp]") - TEST_UNSUPPORTED(__inst_thumb32(0xe8ddf012) " @ tbh [sp, r2]") - - TEST_UNSUPPORTED("strexb r0, r1, [r2]") - TEST_UNSUPPORTED("strexh r0, r1, [r2]") - TEST_UNSUPPORTED("strexd r0, r1, [r2]") - TEST_UNSUPPORTED("ldrexb r0, [r1]") - TEST_UNSUPPORTED("ldrexh r0, [r1]") - TEST_UNSUPPORTED("ldrexd r0, [r1]") - - TEST_GROUP("Data-processing (shifted register) and (modified immediate)") - -#define _DATA_PROCESSING32_DNM(op,s,val) \ - TEST_RR(op s".w r0, r",1, VAL1,", r",2, val, "") \ - TEST_RR(op s" r1, r",1, VAL1,", r",2, val, ", lsl #3") \ - TEST_RR(op s" r2, r",3, VAL1,", r",2, val, ", lsr #4") \ - TEST_RR(op s" r3, r",3, VAL1,", r",2, val, ", asr #5") \ - TEST_RR(op s" r4, r",5, VAL1,", r",2, N(val),", asr #6") \ - TEST_RR(op s" r5, r",5, VAL1,", r",2, val, ", ror #7") \ - TEST_RR(op s" r8, r",9, VAL1,", r",10,val, ", rrx") \ - TEST_R( op s" r0, r",11,VAL1,", #0x00010001") \ - TEST_R( op s" r11, r",0, VAL1,", #0xf5000000") \ - TEST_R( op s" r7, r",8, VAL2,", #0x000af000") - -#define DATA_PROCESSING32_DNM(op,val) \ - _DATA_PROCESSING32_DNM(op,"",val) \ - _DATA_PROCESSING32_DNM(op,"s",val) - -#define DATA_PROCESSING32_NM(op,val) \ - TEST_RR(op".w r",1, VAL1,", r",2, val, "") \ - TEST_RR(op" r",1, VAL1,", r",2, val, ", lsl #3") \ - TEST_RR(op" r",3, VAL1,", r",2, val, ", lsr #4") \ - TEST_RR(op" r",3, VAL1,", r",2, val, ", asr #5") \ - TEST_RR(op" r",5, VAL1,", r",2, N(val),", asr #6") \ - TEST_RR(op" r",5, VAL1,", r",2, val, ", ror #7") \ - TEST_RR(op" r",9, VAL1,", r",10,val, ", rrx") \ - TEST_R( op" r",11,VAL1,", #0x00010001") \ - TEST_R( op" r",0, VAL1,", #0xf5000000") \ - TEST_R( op" r",8, VAL2,", #0x000af000") - -#define _DATA_PROCESSING32_DM(op,s,val) \ - TEST_R( op s".w r0, r",14, val, "") \ - TEST_R( op s" r1, r",12, val, ", lsl #3") \ - TEST_R( op s" r2, r",11, val, ", lsr #4") \ - TEST_R( op s" r3, r",10, val, ", asr #5") \ - TEST_R( op s" r4, r",9, N(val),", asr #6") \ - TEST_R( op s" r5, r",8, val, ", ror #7") \ - TEST_R( op s" r8, r",7,val, ", rrx") \ - TEST( op s" r0, #0x00010001") \ - TEST( op s" r11, #0xf5000000") \ - TEST( op s" r7, #0x000af000") \ - TEST( op s" r4, #0x00005a00") - -#define DATA_PROCESSING32_DM(op,val) \ - _DATA_PROCESSING32_DM(op,"",val) \ - _DATA_PROCESSING32_DM(op,"s",val) - - DATA_PROCESSING32_DNM("and",0xf00f00ff) - DATA_PROCESSING32_NM("tst",0xf00f00ff) - DATA_PROCESSING32_DNM("bic",0xf00f00ff) - DATA_PROCESSING32_DNM("orr",0xf00f00ff) - DATA_PROCESSING32_DM("mov",VAL2) - DATA_PROCESSING32_DNM("orn",0xf00f00ff) - DATA_PROCESSING32_DM("mvn",VAL2) - DATA_PROCESSING32_DNM("eor",0xf00f00ff) - DATA_PROCESSING32_NM("teq",0xf00f00ff) - DATA_PROCESSING32_DNM("add",VAL2) - DATA_PROCESSING32_NM("cmn",VAL2) - DATA_PROCESSING32_DNM("adc",VAL2) - DATA_PROCESSING32_DNM("sbc",VAL2) - DATA_PROCESSING32_DNM("sub",VAL2) - DATA_PROCESSING32_NM("cmp",VAL2) - DATA_PROCESSING32_DNM("rsb",VAL2) - - TEST_RR("pkhbt r0, r",0, HH1,", r",1, HH2,"") - TEST_RR("pkhbt r14,r",12, HH1,", r",10,HH2,", lsl #2") - TEST_RR("pkhtb r0, r",0, HH1,", r",1, HH2,"") - TEST_RR("pkhtb r14,r",12, HH1,", r",10,HH2,", asr #2") - - TEST_UNSUPPORTED(__inst_thumb32(0xea170f0d) " @ tst.w r7, sp") - TEST_UNSUPPORTED(__inst_thumb32(0xea170f0f) " @ tst.w r7, pc") - TEST_UNSUPPORTED(__inst_thumb32(0xea1d0f07) " @ tst.w sp, r7") - TEST_UNSUPPORTED(__inst_thumb32(0xea1f0f07) " @ tst.w pc, r7") - TEST_UNSUPPORTED(__inst_thumb32(0xf01d1f08) " @ tst sp, #0x00080008") - TEST_UNSUPPORTED(__inst_thumb32(0xf01f1f08) " @ tst pc, #0x00080008") - - TEST_UNSUPPORTED(__inst_thumb32(0xea970f0d) " @ teq.w r7, sp") - TEST_UNSUPPORTED(__inst_thumb32(0xea970f0f) " @ teq.w r7, pc") - TEST_UNSUPPORTED(__inst_thumb32(0xea9d0f07) " @ teq.w sp, r7") - TEST_UNSUPPORTED(__inst_thumb32(0xea9f0f07) " @ teq.w pc, r7") - TEST_UNSUPPORTED(__inst_thumb32(0xf09d1f08) " @ tst sp, #0x00080008") - TEST_UNSUPPORTED(__inst_thumb32(0xf09f1f08) " @ tst pc, #0x00080008") - - TEST_UNSUPPORTED(__inst_thumb32(0xeb170f0d) " @ cmn.w r7, sp") - TEST_UNSUPPORTED(__inst_thumb32(0xeb170f0f) " @ cmn.w r7, pc") - TEST_P("cmn.w sp, r",7,0,"") - TEST_UNSUPPORTED(__inst_thumb32(0xeb1f0f07) " @ cmn.w pc, r7") - TEST( "cmn sp, #0x00080008") - TEST_UNSUPPORTED(__inst_thumb32(0xf11f1f08) " @ cmn pc, #0x00080008") - - TEST_UNSUPPORTED(__inst_thumb32(0xebb70f0d) " @ cmp.w r7, sp") - TEST_UNSUPPORTED(__inst_thumb32(0xebb70f0f) " @ cmp.w r7, pc") - TEST_P("cmp.w sp, r",7,0,"") - TEST_UNSUPPORTED(__inst_thumb32(0xebbf0f07) " @ cmp.w pc, r7") - TEST( "cmp sp, #0x00080008") - TEST_UNSUPPORTED(__inst_thumb32(0xf1bf1f08) " @ cmp pc, #0x00080008") - - TEST_UNSUPPORTED(__inst_thumb32(0xea5f070d) " @ movs.w r7, sp") - TEST_UNSUPPORTED(__inst_thumb32(0xea5f070f) " @ movs.w r7, pc") - TEST_UNSUPPORTED(__inst_thumb32(0xea5f0d07) " @ movs.w sp, r7") - TEST_UNSUPPORTED(__inst_thumb32(0xea4f0f07) " @ mov.w pc, r7") - TEST_UNSUPPORTED(__inst_thumb32(0xf04f1d08) " @ mov sp, #0x00080008") - TEST_UNSUPPORTED(__inst_thumb32(0xf04f1f08) " @ mov pc, #0x00080008") - - TEST_R("add.w r0, sp, r",1, 4,"") - TEST_R("adds r0, sp, r",1, 4,", asl #3") - TEST_R("add r0, sp, r",1, 4,", asl #4") - TEST_R("add r0, sp, r",1, 16,", ror #1") - TEST_R("add.w sp, sp, r",1, 4,"") - TEST_R("add sp, sp, r",1, 4,", asl #3") - TEST_UNSUPPORTED(__inst_thumb32(0xeb0d1d01) " @ add sp, sp, r1, asl #4") - TEST_UNSUPPORTED(__inst_thumb32(0xeb0d0d71) " @ add sp, sp, r1, ror #1") - TEST( "add.w r0, sp, #24") - TEST( "add.w sp, sp, #24") - TEST_UNSUPPORTED(__inst_thumb32(0xeb0d0f01) " @ add pc, sp, r1") - TEST_UNSUPPORTED(__inst_thumb32(0xeb0d000f) " @ add r0, sp, pc") - TEST_UNSUPPORTED(__inst_thumb32(0xeb0d000d) " @ add r0, sp, sp") - TEST_UNSUPPORTED(__inst_thumb32(0xeb0d0d0f) " @ add sp, sp, pc") - TEST_UNSUPPORTED(__inst_thumb32(0xeb0d0d0d) " @ add sp, sp, sp") - - TEST_R("sub.w r0, sp, r",1, 4,"") - TEST_R("subs r0, sp, r",1, 4,", asl #3") - TEST_R("sub r0, sp, r",1, 4,", asl #4") - TEST_R("sub r0, sp, r",1, 16,", ror #1") - TEST_R("sub.w sp, sp, r",1, 4,"") - TEST_R("sub sp, sp, r",1, 4,", asl #3") - TEST_UNSUPPORTED(__inst_thumb32(0xebad1d01) " @ sub sp, sp, r1, asl #4") - TEST_UNSUPPORTED(__inst_thumb32(0xebad0d71) " @ sub sp, sp, r1, ror #1") - TEST_UNSUPPORTED(__inst_thumb32(0xebad0f01) " @ sub pc, sp, r1") - TEST( "sub.w r0, sp, #24") - TEST( "sub.w sp, sp, #24") - - TEST_UNSUPPORTED(__inst_thumb32(0xea02010f) " @ and r1, r2, pc") - TEST_UNSUPPORTED(__inst_thumb32(0xea0f0103) " @ and r1, pc, r3") - TEST_UNSUPPORTED(__inst_thumb32(0xea020f03) " @ and pc, r2, r3") - TEST_UNSUPPORTED(__inst_thumb32(0xea02010d) " @ and r1, r2, sp") - TEST_UNSUPPORTED(__inst_thumb32(0xea0d0103) " @ and r1, sp, r3") - TEST_UNSUPPORTED(__inst_thumb32(0xea020d03) " @ and sp, r2, r3") - TEST_UNSUPPORTED(__inst_thumb32(0xf00d1108) " @ and r1, sp, #0x00080008") - TEST_UNSUPPORTED(__inst_thumb32(0xf00f1108) " @ and r1, pc, #0x00080008") - TEST_UNSUPPORTED(__inst_thumb32(0xf0021d08) " @ and sp, r8, #0x00080008") - TEST_UNSUPPORTED(__inst_thumb32(0xf0021f08) " @ and pc, r8, #0x00080008") - - TEST_UNSUPPORTED(__inst_thumb32(0xeb02010f) " @ add r1, r2, pc") - TEST_UNSUPPORTED(__inst_thumb32(0xeb0f0103) " @ add r1, pc, r3") - TEST_UNSUPPORTED(__inst_thumb32(0xeb020f03) " @ add pc, r2, r3") - TEST_UNSUPPORTED(__inst_thumb32(0xeb02010d) " @ add r1, r2, sp") - TEST_SUPPORTED( __inst_thumb32(0xeb0d0103) " @ add r1, sp, r3") - TEST_UNSUPPORTED(__inst_thumb32(0xeb020d03) " @ add sp, r2, r3") - TEST_SUPPORTED( __inst_thumb32(0xf10d1108) " @ add r1, sp, #0x00080008") - TEST_UNSUPPORTED(__inst_thumb32(0xf10d1f08) " @ add pc, sp, #0x00080008") - TEST_UNSUPPORTED(__inst_thumb32(0xf10f1108) " @ add r1, pc, #0x00080008") - TEST_UNSUPPORTED(__inst_thumb32(0xf1021d08) " @ add sp, r8, #0x00080008") - TEST_UNSUPPORTED(__inst_thumb32(0xf1021f08) " @ add pc, r8, #0x00080008") - - TEST_UNSUPPORTED(__inst_thumb32(0xeaa00000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xeaf00000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xeb200000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xeb800000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xebe00000) "") - - TEST_UNSUPPORTED(__inst_thumb32(0xf0a00000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xf0c00000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xf0f00000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xf1200000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xf1800000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xf1e00000) "") - - TEST_GROUP("Coprocessor instructions") - - TEST_UNSUPPORTED(__inst_thumb32(0xec000000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xeff00000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xfc000000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xfff00000) "") - - TEST_GROUP("Data-processing (plain binary immediate)") - - TEST_R("addw r0, r",1, VAL1,", #0x123") - TEST( "addw r14, sp, #0xf5a") - TEST( "addw sp, sp, #0x20") - TEST( "addw r7, pc, #0x888") - TEST_UNSUPPORTED(__inst_thumb32(0xf20f1f20) " @ addw pc, pc, #0x120") - TEST_UNSUPPORTED(__inst_thumb32(0xf20d1f20) " @ addw pc, sp, #0x120") - TEST_UNSUPPORTED(__inst_thumb32(0xf20f1d20) " @ addw sp, pc, #0x120") - TEST_UNSUPPORTED(__inst_thumb32(0xf2001d20) " @ addw sp, r0, #0x120") - - TEST_R("subw r0, r",1, VAL1,", #0x123") - TEST( "subw r14, sp, #0xf5a") - TEST( "subw sp, sp, #0x20") - TEST( "subw r7, pc, #0x888") - TEST_UNSUPPORTED(__inst_thumb32(0xf2af1f20) " @ subw pc, pc, #0x120") - TEST_UNSUPPORTED(__inst_thumb32(0xf2ad1f20) " @ subw pc, sp, #0x120") - TEST_UNSUPPORTED(__inst_thumb32(0xf2af1d20) " @ subw sp, pc, #0x120") - TEST_UNSUPPORTED(__inst_thumb32(0xf2a01d20) " @ subw sp, r0, #0x120") - - TEST("movw r0, #0") - TEST("movw r0, #0xffff") - TEST("movw lr, #0xffff") - TEST_UNSUPPORTED(__inst_thumb32(0xf2400d00) " @ movw sp, #0") - TEST_UNSUPPORTED(__inst_thumb32(0xf2400f00) " @ movw pc, #0") - - TEST_R("movt r",0, VAL1,", #0") - TEST_R("movt r",0, VAL2,", #0xffff") - TEST_R("movt r",14,VAL1,", #0xffff") - TEST_UNSUPPORTED(__inst_thumb32(0xf2c00d00) " @ movt sp, #0") - TEST_UNSUPPORTED(__inst_thumb32(0xf2c00f00) " @ movt pc, #0") - - TEST_R( "ssat r0, #24, r",0, VAL1,"") - TEST_R( "ssat r14, #24, r",12, VAL2,"") - TEST_R( "ssat r0, #24, r",0, VAL1,", lsl #8") - TEST_R( "ssat r14, #24, r",12, VAL2,", asr #8") - TEST_UNSUPPORTED(__inst_thumb32(0xf30c0d17) " @ ssat sp, #24, r12") - TEST_UNSUPPORTED(__inst_thumb32(0xf30c0f17) " @ ssat pc, #24, r12") - TEST_UNSUPPORTED(__inst_thumb32(0xf30d0c17) " @ ssat r12, #24, sp") - TEST_UNSUPPORTED(__inst_thumb32(0xf30f0c17) " @ ssat r12, #24, pc") - - TEST_R( "usat r0, #24, r",0, VAL1,"") - TEST_R( "usat r14, #24, r",12, VAL2,"") - TEST_R( "usat r0, #24, r",0, VAL1,", lsl #8") - TEST_R( "usat r14, #24, r",12, VAL2,", asr #8") - TEST_UNSUPPORTED(__inst_thumb32(0xf38c0d17) " @ usat sp, #24, r12") - TEST_UNSUPPORTED(__inst_thumb32(0xf38c0f17) " @ usat pc, #24, r12") - TEST_UNSUPPORTED(__inst_thumb32(0xf38d0c17) " @ usat r12, #24, sp") - TEST_UNSUPPORTED(__inst_thumb32(0xf38f0c17) " @ usat r12, #24, pc") - - TEST_R( "ssat16 r0, #12, r",0, HH1,"") - TEST_R( "ssat16 r14, #12, r",12, HH2,"") - TEST_UNSUPPORTED(__inst_thumb32(0xf32c0d0b) " @ ssat16 sp, #12, r12") - TEST_UNSUPPORTED(__inst_thumb32(0xf32c0f0b) " @ ssat16 pc, #12, r12") - TEST_UNSUPPORTED(__inst_thumb32(0xf32d0c0b) " @ ssat16 r12, #12, sp") - TEST_UNSUPPORTED(__inst_thumb32(0xf32f0c0b) " @ ssat16 r12, #12, pc") - - TEST_R( "usat16 r0, #12, r",0, HH1,"") - TEST_R( "usat16 r14, #12, r",12, HH2,"") - TEST_UNSUPPORTED(__inst_thumb32(0xf3ac0d0b) " @ usat16 sp, #12, r12") - TEST_UNSUPPORTED(__inst_thumb32(0xf3ac0f0b) " @ usat16 pc, #12, r12") - TEST_UNSUPPORTED(__inst_thumb32(0xf3ad0c0b) " @ usat16 r12, #12, sp") - TEST_UNSUPPORTED(__inst_thumb32(0xf3af0c0b) " @ usat16 r12, #12, pc") - - TEST_R( "sbfx r0, r",0 , VAL1,", #0, #31") - TEST_R( "sbfx r14, r",12, VAL2,", #8, #16") - TEST_R( "sbfx r4, r",10, VAL1,", #16, #15") - TEST_UNSUPPORTED(__inst_thumb32(0xf34c2d0f) " @ sbfx sp, r12, #8, #16") - TEST_UNSUPPORTED(__inst_thumb32(0xf34c2f0f) " @ sbfx pc, r12, #8, #16") - TEST_UNSUPPORTED(__inst_thumb32(0xf34d2c0f) " @ sbfx r12, sp, #8, #16") - TEST_UNSUPPORTED(__inst_thumb32(0xf34f2c0f) " @ sbfx r12, pc, #8, #16") - - TEST_R( "ubfx r0, r",0 , VAL1,", #0, #31") - TEST_R( "ubfx r14, r",12, VAL2,", #8, #16") - TEST_R( "ubfx r4, r",10, VAL1,", #16, #15") - TEST_UNSUPPORTED(__inst_thumb32(0xf3cc2d0f) " @ ubfx sp, r12, #8, #16") - TEST_UNSUPPORTED(__inst_thumb32(0xf3cc2f0f) " @ ubfx pc, r12, #8, #16") - TEST_UNSUPPORTED(__inst_thumb32(0xf3cd2c0f) " @ ubfx r12, sp, #8, #16") - TEST_UNSUPPORTED(__inst_thumb32(0xf3cf2c0f) " @ ubfx r12, pc, #8, #16") - - TEST_R( "bfc r",0, VAL1,", #4, #20") - TEST_R( "bfc r",14,VAL2,", #4, #20") - TEST_R( "bfc r",7, VAL1,", #0, #31") - TEST_R( "bfc r",8, VAL2,", #0, #31") - TEST_UNSUPPORTED(__inst_thumb32(0xf36f0d1e) " @ bfc sp, #0, #31") - TEST_UNSUPPORTED(__inst_thumb32(0xf36f0f1e) " @ bfc pc, #0, #31") - - TEST_RR( "bfi r",0, VAL1,", r",0 , VAL2,", #0, #31") - TEST_RR( "bfi r",12,VAL1,", r",14 , VAL2,", #4, #20") - TEST_UNSUPPORTED(__inst_thumb32(0xf36e1d17) " @ bfi sp, r14, #4, #20") - TEST_UNSUPPORTED(__inst_thumb32(0xf36e1f17) " @ bfi pc, r14, #4, #20") - TEST_UNSUPPORTED(__inst_thumb32(0xf36d1e17) " @ bfi r14, sp, #4, #20") - - TEST_GROUP("Branches and miscellaneous control") - -CONDITION_INSTRUCTIONS(22, - TEST_BF("beq.w 2f") - TEST_BB("bne.w 2b") - TEST_BF("bgt.w 2f") - TEST_BB("blt.w 2b") - TEST_BF_X("bpl.w 2f", SPACE_0x1000) -) - - TEST_UNSUPPORTED("msr cpsr, r0") - TEST_UNSUPPORTED("msr cpsr_f, r1") - TEST_UNSUPPORTED("msr spsr, r2") - - TEST_UNSUPPORTED("cpsie.w i") - TEST_UNSUPPORTED("cpsid.w i") - TEST_UNSUPPORTED("cps 0x13") - - TEST_SUPPORTED("yield.w") - TEST("sev.w") - TEST("nop.w") - TEST("wfi.w") - TEST_SUPPORTED("wfe.w") - TEST_UNSUPPORTED("dbg.w #0") - - TEST_UNSUPPORTED("clrex") - TEST_UNSUPPORTED("dsb") - TEST_UNSUPPORTED("dmb") - TEST_UNSUPPORTED("isb") - - TEST_UNSUPPORTED("bxj r0") - - TEST_UNSUPPORTED("subs pc, lr, #4") - - TEST("mrs r0, cpsr") - TEST("mrs r14, cpsr") - TEST_UNSUPPORTED(__inst_thumb32(0xf3ef8d00) " @ mrs sp, spsr") - TEST_UNSUPPORTED(__inst_thumb32(0xf3ef8f00) " @ mrs pc, spsr") - TEST_UNSUPPORTED("mrs r0, spsr") - TEST_UNSUPPORTED("mrs lr, spsr") - - TEST_UNSUPPORTED(__inst_thumb32(0xf7f08000) " @ smc #0") - - TEST_UNSUPPORTED(__inst_thumb32(0xf7f0a000) " @ undefeined") - - TEST_BF( "b.w 2f") - TEST_BB( "b.w 2b") - TEST_BF_X("b.w 2f", SPACE_0x1000) - - TEST_BF( "bl.w 2f") - TEST_BB( "bl.w 2b") - TEST_BB_X("bl.w 2b", SPACE_0x1000) - - TEST_X( "blx __dummy_arm_subroutine", - ".arm \n\t" - ".align \n\t" - ".type __dummy_arm_subroutine, %%function \n\t" - "__dummy_arm_subroutine: \n\t" - "mov r0, pc \n\t" - "bx lr \n\t" - ".thumb \n\t" - ) - TEST( "blx __dummy_arm_subroutine") - - TEST_GROUP("Store single data item") - -#define SINGLE_STORE(size) \ - TEST_RP( "str"size" r",0, VAL1,", [r",11,-1024,", #1024]") \ - TEST_RP( "str"size" r",14,VAL2,", [r",1, -1024,", #1080]") \ - TEST_RP( "str"size" r",0, VAL1,", [r",11,256, ", #-120]") \ - TEST_RP( "str"size" r",14,VAL2,", [r",1, 256, ", #-128]") \ - TEST_RP( "str"size" r",0, VAL1,", [r",11,24, "], #120") \ - TEST_RP( "str"size" r",14,VAL2,", [r",1, 24, "], #128") \ - TEST_RP( "str"size" r",0, VAL1,", [r",11,24, "], #-120") \ - TEST_RP( "str"size" r",14,VAL2,", [r",1, 24, "], #-128") \ - TEST_RP( "str"size" r",0, VAL1,", [r",11,24, ", #120]!") \ - TEST_RP( "str"size" r",14,VAL2,", [r",1, 24, ", #128]!") \ - TEST_RP( "str"size" r",0, VAL1,", [r",11,256, ", #-120]!") \ - TEST_RP( "str"size" r",14,VAL2,", [r",1, 256, ", #-128]!") \ - TEST_RPR("str"size".w r",0, VAL1,", [r",1, 0,", r",2, 4,"]") \ - TEST_RPR("str"size" r",14,VAL2,", [r",10,0,", r",11,4,", lsl #1]") \ - TEST_R( "str"size".w r",7, VAL1,", [sp, #24]") \ - TEST_RP( "str"size".w r",0, VAL2,", [r",0,0, "]") \ - TEST_UNSUPPORTED("str"size"t r0, [r1, #4]") - - SINGLE_STORE("b") - SINGLE_STORE("h") - SINGLE_STORE("") - - TEST("str sp, [sp]") - TEST_UNSUPPORTED(__inst_thumb32(0xf8cfe000) " @ str r14, [pc]") - TEST_UNSUPPORTED(__inst_thumb32(0xf8cef000) " @ str pc, [r14]") - - TEST_GROUP("Advanced SIMD element or structure load/store instructions") - - TEST_UNSUPPORTED(__inst_thumb32(0xf9000000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xf92fffff) "") - TEST_UNSUPPORTED(__inst_thumb32(0xf9800000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xf9efffff) "") - - TEST_GROUP("Load single data item and memory hints") - -#define SINGLE_LOAD(size) \ - TEST_P( "ldr"size" r0, [r",11,-1024, ", #1024]") \ - TEST_P( "ldr"size" r14, [r",1, -1024,", #1080]") \ - TEST_P( "ldr"size" r0, [r",11,256, ", #-120]") \ - TEST_P( "ldr"size" r14, [r",1, 256, ", #-128]") \ - TEST_P( "ldr"size" r0, [r",11,24, "], #120") \ - TEST_P( "ldr"size" r14, [r",1, 24, "], #128") \ - TEST_P( "ldr"size" r0, [r",11,24, "], #-120") \ - TEST_P( "ldr"size" r14, [r",1,24, "], #-128") \ - TEST_P( "ldr"size" r0, [r",11,24, ", #120]!") \ - TEST_P( "ldr"size" r14, [r",1, 24, ", #128]!") \ - TEST_P( "ldr"size" r0, [r",11,256, ", #-120]!") \ - TEST_P( "ldr"size" r14, [r",1, 256, ", #-128]!") \ - TEST_PR("ldr"size".w r0, [r",1, 0,", r",2, 4,"]") \ - TEST_PR("ldr"size" r14, [r",10,0,", r",11,4,", lsl #1]") \ - TEST_X( "ldr"size".w r0, 3f", \ - ".align 3 \n\t" \ - "3: .word "__stringify(VAL1)) \ - TEST_X( "ldr"size".w r14, 3f", \ - ".align 3 \n\t" \ - "3: .word "__stringify(VAL2)) \ - TEST( "ldr"size".w r7, 3b") \ - TEST( "ldr"size".w r7, [sp, #24]") \ - TEST_P( "ldr"size".w r0, [r",0,0, "]") \ - TEST_UNSUPPORTED("ldr"size"t r0, [r1, #4]") - - SINGLE_LOAD("b") - SINGLE_LOAD("sb") - SINGLE_LOAD("h") - SINGLE_LOAD("sh") - SINGLE_LOAD("") - - TEST_BF_P("ldr pc, [r",14, 15*4,"]") - TEST_P( "ldr sp, [r",14, 13*4,"]") - TEST_BF_R("ldr pc, [sp, r",14, 15*4,"]") - TEST_R( "ldr sp, [sp, r",14, 13*4,"]") - TEST_THUMB_TO_ARM_INTERWORK_P("ldr pc, [r",0,0,", #15*4]") - TEST_SUPPORTED("ldr sp, 99f") - TEST_SUPPORTED("ldr pc, 99f") - - TEST_UNSUPPORTED(__inst_thumb32(0xf854700d) " @ ldr r7, [r4, sp]") - TEST_UNSUPPORTED(__inst_thumb32(0xf854700f) " @ ldr r7, [r4, pc]") - TEST_UNSUPPORTED(__inst_thumb32(0xf814700d) " @ ldrb r7, [r4, sp]") - TEST_UNSUPPORTED(__inst_thumb32(0xf814700f) " @ ldrb r7, [r4, pc]") - TEST_UNSUPPORTED(__inst_thumb32(0xf89fd004) " @ ldrb sp, 99f") - TEST_UNSUPPORTED(__inst_thumb32(0xf814d008) " @ ldrb sp, [r4, r8]") - TEST_UNSUPPORTED(__inst_thumb32(0xf894d000) " @ ldrb sp, [r4]") - - TEST_UNSUPPORTED(__inst_thumb32(0xf8600000) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xf9ffffff) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xf9500000) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xf95fffff) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xf8000800) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xf97ffaff) "") /* Unallocated space */ - - TEST( "pli [pc, #4]") - TEST( "pli [pc, #-4]") - TEST( "pld [pc, #4]") - TEST( "pld [pc, #-4]") - - TEST_P( "pld [r",0,-1024,", #1024]") - TEST( __inst_thumb32(0xf8b0f400) " @ pldw [r0, #1024]") - TEST_P( "pli [r",4, 0b,", #1024]") - TEST_P( "pld [r",7, 120,", #-120]") - TEST( __inst_thumb32(0xf837fc78) " @ pldw [r7, #-120]") - TEST_P( "pli [r",11,120,", #-120]") - TEST( "pld [sp, #0]") - - TEST_PR("pld [r",7, 24, ", r",0, 16,"]") - TEST_PR("pld [r",8, 24, ", r",12,16,", lsl #3]") - TEST_SUPPORTED(__inst_thumb32(0xf837f000) " @ pldw [r7, r0]") - TEST_SUPPORTED(__inst_thumb32(0xf838f03c) " @ pldw [r8, r12, lsl #3]"); - TEST_RR("pli [r",12,0b,", r",0, 16,"]") - TEST_RR("pli [r",0, 0b,", r",12,16,", lsl #3]") - TEST_R( "pld [sp, r",1, 16,"]") - TEST_UNSUPPORTED(__inst_thumb32(0xf817f00d) " @pld [r7, sp]") - TEST_UNSUPPORTED(__inst_thumb32(0xf817f00f) " @pld [r7, pc]") - - TEST_GROUP("Data-processing (register)") - -#define SHIFTS32(op) \ - TEST_RR(op" r0, r",1, VAL1,", r",2, 3, "") \ - TEST_RR(op" r14, r",12,VAL2,", r",11,10,"") - - SHIFTS32("lsl") - SHIFTS32("lsls") - SHIFTS32("lsr") - SHIFTS32("lsrs") - SHIFTS32("asr") - SHIFTS32("asrs") - SHIFTS32("ror") - SHIFTS32("rors") - - TEST_UNSUPPORTED(__inst_thumb32(0xfa01ff02) " @ lsl pc, r1, r2") - TEST_UNSUPPORTED(__inst_thumb32(0xfa01fd02) " @ lsl sp, r1, r2") - TEST_UNSUPPORTED(__inst_thumb32(0xfa0ff002) " @ lsl r0, pc, r2") - TEST_UNSUPPORTED(__inst_thumb32(0xfa0df002) " @ lsl r0, sp, r2") - TEST_UNSUPPORTED(__inst_thumb32(0xfa01f00f) " @ lsl r0, r1, pc") - TEST_UNSUPPORTED(__inst_thumb32(0xfa01f00d) " @ lsl r0, r1, sp") - - TEST_RR( "sxtah r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "sxtah r14,r",12, HH2,", r",10,HH1,", ror #8") - TEST_R( "sxth r8, r",7, HH1,"") - - TEST_UNSUPPORTED(__inst_thumb32(0xfa0fff87) " @ sxth pc, r7"); - TEST_UNSUPPORTED(__inst_thumb32(0xfa0ffd87) " @ sxth sp, r7"); - TEST_UNSUPPORTED(__inst_thumb32(0xfa0ff88f) " @ sxth r8, pc"); - TEST_UNSUPPORTED(__inst_thumb32(0xfa0ff88d) " @ sxth r8, sp"); - - TEST_RR( "uxtah r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uxtah r14,r",12, HH2,", r",10,HH1,", ror #8") - TEST_R( "uxth r8, r",7, HH1,"") - - TEST_RR( "sxtab16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "sxtab16 r14,r",12, HH2,", r",10,HH1,", ror #8") - TEST_R( "sxtb16 r8, r",7, HH1,"") - - TEST_RR( "uxtab16 r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uxtab16 r14,r",12, HH2,", r",10,HH1,", ror #8") - TEST_R( "uxtb16 r8, r",7, HH1,"") - - TEST_RR( "sxtab r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "sxtab r14,r",12, HH2,", r",10,HH1,", ror #8") - TEST_R( "sxtb r8, r",7, HH1,"") - - TEST_RR( "uxtab r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "uxtab r14,r",12, HH2,", r",10,HH1,", ror #8") - TEST_R( "uxtb r8, r",7, HH1,"") - - TEST_UNSUPPORTED(__inst_thumb32(0xfa6000f0) "") - TEST_UNSUPPORTED(__inst_thumb32(0xfa7fffff) "") - -#define PARALLEL_ADD_SUB(op) \ - TEST_RR( op"add16 r0, r",0, HH1,", r",1, HH2,"") \ - TEST_RR( op"add16 r14, r",12,HH2,", r",10,HH1,"") \ - TEST_RR( op"asx r0, r",0, HH1,", r",1, HH2,"") \ - TEST_RR( op"asx r14, r",12,HH2,", r",10,HH1,"") \ - TEST_RR( op"sax r0, r",0, HH1,", r",1, HH2,"") \ - TEST_RR( op"sax r14, r",12,HH2,", r",10,HH1,"") \ - TEST_RR( op"sub16 r0, r",0, HH1,", r",1, HH2,"") \ - TEST_RR( op"sub16 r14, r",12,HH2,", r",10,HH1,"") \ - TEST_RR( op"add8 r0, r",0, HH1,", r",1, HH2,"") \ - TEST_RR( op"add8 r14, r",12,HH2,", r",10,HH1,"") \ - TEST_RR( op"sub8 r0, r",0, HH1,", r",1, HH2,"") \ - TEST_RR( op"sub8 r14, r",12,HH2,", r",10,HH1,"") - - TEST_GROUP("Parallel addition and subtraction, signed") - - PARALLEL_ADD_SUB("s") - PARALLEL_ADD_SUB("q") - PARALLEL_ADD_SUB("sh") - - TEST_GROUP("Parallel addition and subtraction, unsigned") - - PARALLEL_ADD_SUB("u") - PARALLEL_ADD_SUB("uq") - PARALLEL_ADD_SUB("uh") - - TEST_GROUP("Miscellaneous operations") - - TEST_RR("qadd r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR("qadd lr, r",9, VAL2,", r",8, VAL1,"") - TEST_RR("qsub r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR("qsub lr, r",9, VAL2,", r",8, VAL1,"") - TEST_RR("qdadd r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR("qdadd lr, r",9, VAL2,", r",8, VAL1,"") - TEST_RR("qdsub r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR("qdsub lr, r",9, VAL2,", r",8, VAL1,"") - - TEST_R("rev.w r0, r",0, VAL1,"") - TEST_R("rev r14, r",12, VAL2,"") - TEST_R("rev16.w r0, r",0, VAL1,"") - TEST_R("rev16 r14, r",12, VAL2,"") - TEST_R("rbit r0, r",0, VAL1,"") - TEST_R("rbit r14, r",12, VAL2,"") - TEST_R("revsh.w r0, r",0, VAL1,"") - TEST_R("revsh r14, r",12, VAL2,"") - - TEST_UNSUPPORTED(__inst_thumb32(0xfa9cff8c) " @ rev pc, r12"); - TEST_UNSUPPORTED(__inst_thumb32(0xfa9cfd8c) " @ rev sp, r12"); - TEST_UNSUPPORTED(__inst_thumb32(0xfa9ffe8f) " @ rev r14, pc"); - TEST_UNSUPPORTED(__inst_thumb32(0xfa9dfe8d) " @ rev r14, sp"); - - TEST_RR("sel r0, r",0, VAL1,", r",1, VAL2,"") - TEST_RR("sel r14, r",12,VAL1,", r",10, VAL2,"") - - TEST_R("clz r0, r",0, 0x0,"") - TEST_R("clz r7, r",14,0x1,"") - TEST_R("clz lr, r",7, 0xffffffff,"") - - TEST_UNSUPPORTED(__inst_thumb32(0xfa80f030) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xfaffff7f) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xfab0f000) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xfaffff7f) "") /* Unallocated space */ - - TEST_GROUP("Multiply, multiply accumulate, and absolute difference operations") - - TEST_RR( "mul r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "mul r7, r",8, VAL2,", r",9, VAL2,"") - TEST_UNSUPPORTED(__inst_thumb32(0xfb08ff09) " @ mul pc, r8, r9") - TEST_UNSUPPORTED(__inst_thumb32(0xfb08fd09) " @ mul sp, r8, r9") - TEST_UNSUPPORTED(__inst_thumb32(0xfb0ff709) " @ mul r7, pc, r9") - TEST_UNSUPPORTED(__inst_thumb32(0xfb0df709) " @ mul r7, sp, r9") - TEST_UNSUPPORTED(__inst_thumb32(0xfb08f70f) " @ mul r7, r8, pc") - TEST_UNSUPPORTED(__inst_thumb32(0xfb08f70d) " @ mul r7, r8, sp") - - TEST_RRR( "mla r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "mla r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_UNSUPPORTED(__inst_thumb32(0xfb08af09) " @ mla pc, r8, r9, r10"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb08ad09) " @ mla sp, r8, r9, r10"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb0fa709) " @ mla r7, pc, r9, r10"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb0da709) " @ mla r7, sp, r9, r10"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb08a70f) " @ mla r7, r8, pc, r10"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb08a70d) " @ mla r7, r8, sp, r10"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb08d709) " @ mla r7, r8, r9, sp"); - - TEST_RRR( "mls r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "mls r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - - TEST_RRR( "smlabb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "smlabb r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RRR( "smlatb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "smlatb r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RRR( "smlabt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "smlabt r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RRR( "smlatt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "smlatt r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RR( "smulbb r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "smulbb r7, r",8, VAL3,", r",9, VAL1,"") - TEST_RR( "smultb r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "smultb r7, r",8, VAL3,", r",9, VAL1,"") - TEST_RR( "smulbt r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "smulbt r7, r",8, VAL3,", r",9, VAL1,"") - TEST_RR( "smultt r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "smultt r7, r",8, VAL3,", r",9, VAL1,"") - - TEST_RRR( "smlad r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") - TEST_RRR( "smlad r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") - TEST_RRR( "smladx r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") - TEST_RRR( "smladx r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") - TEST_RR( "smuad r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "smuad r14, r",12,HH2,", r",10,HH1,"") - TEST_RR( "smuadx r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "smuadx r14, r",12,HH2,", r",10,HH1,"") - - TEST_RRR( "smlawb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "smlawb r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RRR( "smlawt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") - TEST_RRR( "smlawt r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") - TEST_RR( "smulwb r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "smulwb r7, r",8, VAL3,", r",9, VAL1,"") - TEST_RR( "smulwt r0, r",1, VAL1,", r",2, VAL2,"") - TEST_RR( "smulwt r7, r",8, VAL3,", r",9, VAL1,"") - - TEST_RRR( "smlsd r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") - TEST_RRR( "smlsd r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") - TEST_RRR( "smlsdx r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") - TEST_RRR( "smlsdx r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") - TEST_RR( "smusd r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "smusd r14, r",12,HH2,", r",10,HH1,"") - TEST_RR( "smusdx r0, r",0, HH1,", r",1, HH2,"") - TEST_RR( "smusdx r14, r",12,HH2,", r",10,HH1,"") - - TEST_RRR( "smmla r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") - TEST_RRR( "smmla r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") - TEST_RRR( "smmlar r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") - TEST_RRR( "smmlar r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") - TEST_RR( "smmul r0, r",0, VAL1,", r",1, VAL2,"") - TEST_RR( "smmul r14, r",12,VAL2,", r",10,VAL1,"") - TEST_RR( "smmulr r0, r",0, VAL1,", r",1, VAL2,"") - TEST_RR( "smmulr r14, r",12,VAL2,", r",10,VAL1,"") - - TEST_RRR( "smmls r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") - TEST_RRR( "smmls r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") - TEST_RRR( "smmlsr r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") - TEST_RRR( "smmlsr r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") - - TEST_RRR( "usada8 r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL3,"") - TEST_RRR( "usada8 r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL3,"") - TEST_RR( "usad8 r0, r",0, VAL1,", r",1, VAL2,"") - TEST_RR( "usad8 r14, r",12,VAL2,", r",10,VAL1,"") - - TEST_UNSUPPORTED(__inst_thumb32(0xfb00f010) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xfb0fff1f) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xfb70f010) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xfb7fff1f) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xfb700010) "") /* Unallocated space */ - TEST_UNSUPPORTED(__inst_thumb32(0xfb7fff1f) "") /* Unallocated space */ - - TEST_GROUP("Long multiply, long multiply accumulate, and divide") - - TEST_RR( "smull r0, r1, r",2, VAL1,", r",3, VAL2,"") - TEST_RR( "smull r7, r8, r",9, VAL2,", r",10, VAL1,"") - TEST_UNSUPPORTED(__inst_thumb32(0xfb89f80a) " @ smull pc, r8, r9, r10"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb89d80a) " @ smull sp, r8, r9, r10"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb897f0a) " @ smull r7, pc, r9, r10"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb897d0a) " @ smull r7, sp, r9, r10"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb8f780a) " @ smull r7, r8, pc, r10"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb8d780a) " @ smull r7, r8, sp, r10"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb89780f) " @ smull r7, r8, r9, pc"); - TEST_UNSUPPORTED(__inst_thumb32(0xfb89780d) " @ smull r7, r8, r9, sp"); - - TEST_RR( "umull r0, r1, r",2, VAL1,", r",3, VAL2,"") - TEST_RR( "umull r7, r8, r",9, VAL2,", r",10, VAL1,"") - - TEST_RRRR( "smlal r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "smlal r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - - TEST_RRRR( "smlalbb r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "smlalbb r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - TEST_RRRR( "smlalbt r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "smlalbt r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - TEST_RRRR( "smlaltb r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "smlaltb r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - TEST_RRRR( "smlaltt r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "smlaltt r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - - TEST_RRRR( "smlald r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2) - TEST_RRRR( "smlald r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1) - TEST_RRRR( "smlaldx r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2) - TEST_RRRR( "smlaldx r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1) - - TEST_RRRR( "smlsld r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2) - TEST_RRRR( "smlsld r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1) - TEST_RRRR( "smlsldx r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2) - TEST_RRRR( "smlsldx r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1) - - TEST_RRRR( "umlal r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "umlal r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - TEST_RRRR( "umaal r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) - TEST_RRRR( "umaal r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) - - TEST_GROUP("Coprocessor instructions") - - TEST_UNSUPPORTED(__inst_thumb32(0xfc000000) "") - TEST_UNSUPPORTED(__inst_thumb32(0xffffffff) "") - - TEST_GROUP("Testing instructions in IT blocks") - - TEST_ITBLOCK("sub.w r0, r0") - - verbose("\n"); -} - diff --git a/arch/arm/kernel/kprobes-test.c b/arch/arm/kernel/kprobes-test.c deleted file mode 100644 index b206d7790c77..000000000000 --- a/arch/arm/kernel/kprobes-test.c +++ /dev/null @@ -1,1713 +0,0 @@ -/* - * arch/arm/kernel/kprobes-test.c - * - * Copyright (C) 2011 Jon Medhurst . - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - */ - -/* - * This file contains test code for ARM kprobes. - * - * The top level function run_all_tests() executes tests for all of the - * supported instruction sets: ARM, 16-bit Thumb, and 32-bit Thumb. These tests - * fall into two categories; run_api_tests() checks basic functionality of the - * kprobes API, and run_test_cases() is a comprehensive test for kprobes - * instruction decoding and simulation. - * - * run_test_cases() first checks the kprobes decoding table for self consistency - * (using table_test()) then executes a series of test cases for each of the CPU - * instruction forms. coverage_start() and coverage_end() are used to verify - * that these test cases cover all of the possible combinations of instructions - * described by the kprobes decoding tables. - * - * The individual test cases are in kprobes-test-arm.c and kprobes-test-thumb.c - * which use the macros defined in kprobes-test.h. The rest of this - * documentation will describe the operation of the framework used by these - * test cases. - */ - -/* - * TESTING METHODOLOGY - * ------------------- - * - * The methodology used to test an ARM instruction 'test_insn' is to use - * inline assembler like: - * - * test_before: nop - * test_case: test_insn - * test_after: nop - * - * When the test case is run a kprobe is placed of each nop. The - * post-handler of the test_before probe is used to modify the saved CPU - * register context to that which we require for the test case. The - * pre-handler of the of the test_after probe saves a copy of the CPU - * register context. In this way we can execute test_insn with a specific - * register context and see the results afterwards. - * - * To actually test the kprobes instruction emulation we perform the above - * step a second time but with an additional kprobe on the test_case - * instruction itself. If the emulation is accurate then the results seen - * by the test_after probe will be identical to the first run which didn't - * have a probe on test_case. - * - * Each test case is run several times with a variety of variations in the - * flags value of stored in CPSR, and for Thumb code, different ITState. - * - * For instructions which can modify PC, a second test_after probe is used - * like this: - * - * test_before: nop - * test_case: test_insn - * test_after: nop - * b test_done - * test_after2: nop - * test_done: - * - * The test case is constructed such that test_insn branches to - * test_after2, or, if testing a conditional instruction, it may just - * continue to test_after. The probes inserted at both locations let us - * determine which happened. A similar approach is used for testing - * backwards branches... - * - * b test_before - * b test_done @ helps to cope with off by 1 branches - * test_after2: nop - * b test_done - * test_before: nop - * test_case: test_insn - * test_after: nop - * test_done: - * - * The macros used to generate the assembler instructions describe above - * are TEST_INSTRUCTION, TEST_BRANCH_F (branch forwards) and TEST_BRANCH_B - * (branch backwards). In these, the local variables numbered 1, 50, 2 and - * 99 represent: test_before, test_case, test_after2 and test_done. - * - * FRAMEWORK - * --------- - * - * Each test case is wrapped between the pair of macros TESTCASE_START and - * TESTCASE_END. As well as performing the inline assembler boilerplate, - * these call out to the kprobes_test_case_start() and - * kprobes_test_case_end() functions which drive the execution of the test - * case. The specific arguments to use for each test case are stored as - * inline data constructed using the various TEST_ARG_* macros. Putting - * this all together, a simple test case may look like: - * - * TESTCASE_START("Testing mov r0, r7") - * TEST_ARG_REG(7, 0x12345678) // Set r7=0x12345678 - * TEST_ARG_END("") - * TEST_INSTRUCTION("mov r0, r7") - * TESTCASE_END - * - * Note, in practice the single convenience macro TEST_R would be used for this - * instead. - * - * The above would expand to assembler looking something like: - * - * @ TESTCASE_START - * bl __kprobes_test_case_start - * .pushsection .rodata - * "10: - * .ascii "mov r0, r7" @ text title for test case - * .byte 0 - * .popsection - * @ start of inline data... - * .word 10b @ pointer to title in .rodata section - * - * @ TEST_ARG_REG - * .byte ARG_TYPE_REG - * .byte 7 - * .short 0 - * .word 0x1234567 - * - * @ TEST_ARG_END - * .byte ARG_TYPE_END - * .byte TEST_ISA @ flags, including ISA being tested - * .short 50f-0f @ offset of 'test_before' - * .short 2f-0f @ offset of 'test_after2' (if relevent) - * .short 99f-0f @ offset of 'test_done' - * @ start of test case code... - * 0: - * .code TEST_ISA @ switch to ISA being tested - * - * @ TEST_INSTRUCTION - * 50: nop @ location for 'test_before' probe - * 1: mov r0, r7 @ the test case instruction 'test_insn' - * nop @ location for 'test_after' probe - * - * // TESTCASE_END - * 2: - * 99: bl __kprobes_test_case_end_##TEST_ISA - * .code NONMAL_ISA - * - * When the above is execute the following happens... - * - * __kprobes_test_case_start() is an assembler wrapper which sets up space - * for a stack buffer and calls the C function kprobes_test_case_start(). - * This C function will do some initial processing of the inline data and - * setup some global state. It then inserts the test_before and test_after - * kprobes and returns a value which causes the assembler wrapper to jump - * to the start of the test case code, (local label '0'). - * - * When the test case code executes, the test_before probe will be hit and - * test_before_post_handler will call setup_test_context(). This fills the - * stack buffer and CPU registers with a test pattern and then processes - * the test case arguments. In our example there is one TEST_ARG_REG which - * indicates that R7 should be loaded with the value 0x12345678. - * - * When the test_before probe ends, the test case continues and executes - * the "mov r0, r7" instruction. It then hits the test_after probe and the - * pre-handler for this (test_after_pre_handler) will save a copy of the - * CPU register context. This should now have R0 holding the same value as - * R7. - * - * Finally we get to the call to __kprobes_test_case_end_{32,16}. This is - * an assembler wrapper which switches back to the ISA used by the test - * code and calls the C function kprobes_test_case_end(). - * - * For each run through the test case, test_case_run_count is incremented - * by one. For even runs, kprobes_test_case_end() saves a copy of the - * register and stack buffer contents from the test case just run. It then - * inserts a kprobe on the test case instruction 'test_insn' and returns a - * value to cause the test case code to be re-run. - * - * For odd numbered runs, kprobes_test_case_end() compares the register and - * stack buffer contents to those that were saved on the previous even - * numbered run (the one without the kprobe on test_insn). These should be - * the same if the kprobe instruction simulation routine is correct. - * - * The pair of test case runs is repeated with different combinations of - * flag values in CPSR and, for Thumb, different ITState. This is - * controlled by test_context_cpsr(). - * - * BUILDING TEST CASES - * ------------------- - * - * - * As an aid to building test cases, the stack buffer is initialised with - * some special values: - * - * [SP+13*4] Contains SP+120. This can be used to test instructions - * which load a value into SP. - * - * [SP+15*4] When testing branching instructions using TEST_BRANCH_{F,B}, - * this holds the target address of the branch, 'test_after2'. - * This can be used to test instructions which load a PC value - * from memory. - */ - -#include -#include -#include -#include -#include -#include -#include -#include - -#include "kprobes.h" -#include "probes-arm.h" -#include "probes-thumb.h" -#include "kprobes-test.h" - - -#define BENCHMARKING 1 - - -/* - * Test basic API - */ - -static bool test_regs_ok; -static int test_func_instance; -static int pre_handler_called; -static int post_handler_called; -static int jprobe_func_called; -static int kretprobe_handler_called; -static int tests_failed; - -#define FUNC_ARG1 0x12345678 -#define FUNC_ARG2 0xabcdef - - -#ifndef CONFIG_THUMB2_KERNEL - -long arm_func(long r0, long r1); - -static void __used __naked __arm_kprobes_test_func(void) -{ - __asm__ __volatile__ ( - ".arm \n\t" - ".type arm_func, %%function \n\t" - "arm_func: \n\t" - "adds r0, r0, r1 \n\t" - "bx lr \n\t" - ".code "NORMAL_ISA /* Back to Thumb if necessary */ - : : : "r0", "r1", "cc" - ); -} - -#else /* CONFIG_THUMB2_KERNEL */ - -long thumb16_func(long r0, long r1); -long thumb32even_func(long r0, long r1); -long thumb32odd_func(long r0, long r1); - -static void __used __naked __thumb_kprobes_test_funcs(void) -{ - __asm__ __volatile__ ( - ".type thumb16_func, %%function \n\t" - "thumb16_func: \n\t" - "adds.n r0, r0, r1 \n\t" - "bx lr \n\t" - - ".align \n\t" - ".type thumb32even_func, %%function \n\t" - "thumb32even_func: \n\t" - "adds.w r0, r0, r1 \n\t" - "bx lr \n\t" - - ".align \n\t" - "nop.n \n\t" - ".type thumb32odd_func, %%function \n\t" - "thumb32odd_func: \n\t" - "adds.w r0, r0, r1 \n\t" - "bx lr \n\t" - - : : : "r0", "r1", "cc" - ); -} - -#endif /* CONFIG_THUMB2_KERNEL */ - - -static int call_test_func(long (*func)(long, long), bool check_test_regs) -{ - long ret; - - ++test_func_instance; - test_regs_ok = false; - - ret = (*func)(FUNC_ARG1, FUNC_ARG2); - if (ret != FUNC_ARG1 + FUNC_ARG2) { - pr_err("FAIL: call_test_func: func returned %lx\n", ret); - return false; - } - - if (check_test_regs && !test_regs_ok) { - pr_err("FAIL: test regs not OK\n"); - return false; - } - - return true; -} - -static int __kprobes pre_handler(struct kprobe *p, struct pt_regs *regs) -{ - pre_handler_called = test_func_instance; - if (regs->ARM_r0 == FUNC_ARG1 && regs->ARM_r1 == FUNC_ARG2) - test_regs_ok = true; - return 0; -} - -static void __kprobes post_handler(struct kprobe *p, struct pt_regs *regs, - unsigned long flags) -{ - post_handler_called = test_func_instance; - if (regs->ARM_r0 != FUNC_ARG1 + FUNC_ARG2 || regs->ARM_r1 != FUNC_ARG2) - test_regs_ok = false; -} - -static struct kprobe the_kprobe = { - .addr = 0, - .pre_handler = pre_handler, - .post_handler = post_handler -}; - -static int test_kprobe(long (*func)(long, long)) -{ - int ret; - - the_kprobe.addr = (kprobe_opcode_t *)func; - ret = register_kprobe(&the_kprobe); - if (ret < 0) { - pr_err("FAIL: register_kprobe failed with %d\n", ret); - return ret; - } - - ret = call_test_func(func, true); - - unregister_kprobe(&the_kprobe); - the_kprobe.flags = 0; /* Clear disable flag to allow reuse */ - - if (!ret) - return -EINVAL; - if (pre_handler_called != test_func_instance) { - pr_err("FAIL: kprobe pre_handler not called\n"); - return -EINVAL; - } - if (post_handler_called != test_func_instance) { - pr_err("FAIL: kprobe post_handler not called\n"); - return -EINVAL; - } - if (!call_test_func(func, false)) - return -EINVAL; - if (pre_handler_called == test_func_instance || - post_handler_called == test_func_instance) { - pr_err("FAIL: probe called after unregistering\n"); - return -EINVAL; - } - - return 0; -} - -static void __kprobes jprobe_func(long r0, long r1) -{ - jprobe_func_called = test_func_instance; - if (r0 == FUNC_ARG1 && r1 == FUNC_ARG2) - test_regs_ok = true; - jprobe_return(); -} - -static struct jprobe the_jprobe = { - .entry = jprobe_func, -}; - -static int test_jprobe(long (*func)(long, long)) -{ - int ret; - - the_jprobe.kp.addr = (kprobe_opcode_t *)func; - ret = register_jprobe(&the_jprobe); - if (ret < 0) { - pr_err("FAIL: register_jprobe failed with %d\n", ret); - return ret; - } - - ret = call_test_func(func, true); - - unregister_jprobe(&the_jprobe); - the_jprobe.kp.flags = 0; /* Clear disable flag to allow reuse */ - - if (!ret) - return -EINVAL; - if (jprobe_func_called != test_func_instance) { - pr_err("FAIL: jprobe handler function not called\n"); - return -EINVAL; - } - if (!call_test_func(func, false)) - return -EINVAL; - if (jprobe_func_called == test_func_instance) { - pr_err("FAIL: probe called after unregistering\n"); - return -EINVAL; - } - - return 0; -} - -static int __kprobes -kretprobe_handler(struct kretprobe_instance *ri, struct pt_regs *regs) -{ - kretprobe_handler_called = test_func_instance; - if (regs_return_value(regs) == FUNC_ARG1 + FUNC_ARG2) - test_regs_ok = true; - return 0; -} - -static struct kretprobe the_kretprobe = { - .handler = kretprobe_handler, -}; - -static int test_kretprobe(long (*func)(long, long)) -{ - int ret; - - the_kretprobe.kp.addr = (kprobe_opcode_t *)func; - ret = register_kretprobe(&the_kretprobe); - if (ret < 0) { - pr_err("FAIL: register_kretprobe failed with %d\n", ret); - return ret; - } - - ret = call_test_func(func, true); - - unregister_kretprobe(&the_kretprobe); - the_kretprobe.kp.flags = 0; /* Clear disable flag to allow reuse */ - - if (!ret) - return -EINVAL; - if (kretprobe_handler_called != test_func_instance) { - pr_err("FAIL: kretprobe handler not called\n"); - return -EINVAL; - } - if (!call_test_func(func, false)) - return -EINVAL; - if (jprobe_func_called == test_func_instance) { - pr_err("FAIL: kretprobe called after unregistering\n"); - return -EINVAL; - } - - return 0; -} - -static int run_api_tests(long (*func)(long, long)) -{ - int ret; - - pr_info(" kprobe\n"); - ret = test_kprobe(func); - if (ret < 0) - return ret; - - pr_info(" jprobe\n"); - ret = test_jprobe(func); -#if defined(CONFIG_THUMB2_KERNEL) && !defined(MODULE) - if (ret == -EINVAL) { - pr_err("FAIL: Known longtime bug with jprobe on Thumb kernels\n"); - tests_failed = ret; - ret = 0; - } -#endif - if (ret < 0) - return ret; - - pr_info(" kretprobe\n"); - ret = test_kretprobe(func); - if (ret < 0) - return ret; - - return 0; -} - - -/* - * Benchmarking - */ - -#if BENCHMARKING - -static void __naked benchmark_nop(void) -{ - __asm__ __volatile__ ( - "nop \n\t" - "bx lr" - ); -} - -#ifdef CONFIG_THUMB2_KERNEL -#define wide ".w" -#else -#define wide -#endif - -static void __naked benchmark_pushpop1(void) -{ - __asm__ __volatile__ ( - "stmdb"wide" sp!, {r3-r11,lr} \n\t" - "ldmia"wide" sp!, {r3-r11,pc}" - ); -} - -static void __naked benchmark_pushpop2(void) -{ - __asm__ __volatile__ ( - "stmdb"wide" sp!, {r0-r8,lr} \n\t" - "ldmia"wide" sp!, {r0-r8,pc}" - ); -} - -static void __naked benchmark_pushpop3(void) -{ - __asm__ __volatile__ ( - "stmdb"wide" sp!, {r4,lr} \n\t" - "ldmia"wide" sp!, {r4,pc}" - ); -} - -static void __naked benchmark_pushpop4(void) -{ - __asm__ __volatile__ ( - "stmdb"wide" sp!, {r0,lr} \n\t" - "ldmia"wide" sp!, {r0,pc}" - ); -} - - -#ifdef CONFIG_THUMB2_KERNEL - -static void __naked benchmark_pushpop_thumb(void) -{ - __asm__ __volatile__ ( - "push.n {r0-r7,lr} \n\t" - "pop.n {r0-r7,pc}" - ); -} - -#endif - -static int __kprobes -benchmark_pre_handler(struct kprobe *p, struct pt_regs *regs) -{ - return 0; -} - -static int benchmark(void(*fn)(void)) -{ - unsigned n, i, t, t0; - - for (n = 1000; ; n *= 2) { - t0 = sched_clock(); - for (i = n; i > 0; --i) - fn(); - t = sched_clock() - t0; - if (t >= 250000000) - break; /* Stop once we took more than 0.25 seconds */ - } - return t / n; /* Time for one iteration in nanoseconds */ -}; - -static int kprobe_benchmark(void(*fn)(void), unsigned offset) -{ - struct kprobe k = { - .addr = (kprobe_opcode_t *)((uintptr_t)fn + offset), - .pre_handler = benchmark_pre_handler, - }; - - int ret = register_kprobe(&k); - if (ret < 0) { - pr_err("FAIL: register_kprobe failed with %d\n", ret); - return ret; - } - - ret = benchmark(fn); - - unregister_kprobe(&k); - return ret; -}; - -struct benchmarks { - void (*fn)(void); - unsigned offset; - const char *title; -}; - -static int run_benchmarks(void) -{ - int ret; - struct benchmarks list[] = { - {&benchmark_nop, 0, "nop"}, - /* - * benchmark_pushpop{1,3} will have the optimised - * instruction emulation, whilst benchmark_pushpop{2,4} will - * be the equivalent unoptimised instructions. - */ - {&benchmark_pushpop1, 0, "stmdb sp!, {r3-r11,lr}"}, - {&benchmark_pushpop1, 4, "ldmia sp!, {r3-r11,pc}"}, - {&benchmark_pushpop2, 0, "stmdb sp!, {r0-r8,lr}"}, - {&benchmark_pushpop2, 4, "ldmia sp!, {r0-r8,pc}"}, - {&benchmark_pushpop3, 0, "stmdb sp!, {r4,lr}"}, - {&benchmark_pushpop3, 4, "ldmia sp!, {r4,pc}"}, - {&benchmark_pushpop4, 0, "stmdb sp!, {r0,lr}"}, - {&benchmark_pushpop4, 4, "ldmia sp!, {r0,pc}"}, -#ifdef CONFIG_THUMB2_KERNEL - {&benchmark_pushpop_thumb, 0, "push.n {r0-r7,lr}"}, - {&benchmark_pushpop_thumb, 2, "pop.n {r0-r7,pc}"}, -#endif - {0} - }; - - struct benchmarks *b; - for (b = list; b->fn; ++b) { - ret = kprobe_benchmark(b->fn, b->offset); - if (ret < 0) - return ret; - pr_info(" %dns for kprobe %s\n", ret, b->title); - } - - pr_info("\n"); - return 0; -} - -#endif /* BENCHMARKING */ - - -/* - * Decoding table self-consistency tests - */ - -static const int decode_struct_sizes[NUM_DECODE_TYPES] = { - [DECODE_TYPE_TABLE] = sizeof(struct decode_table), - [DECODE_TYPE_CUSTOM] = sizeof(struct decode_custom), - [DECODE_TYPE_SIMULATE] = sizeof(struct decode_simulate), - [DECODE_TYPE_EMULATE] = sizeof(struct decode_emulate), - [DECODE_TYPE_OR] = sizeof(struct decode_or), - [DECODE_TYPE_REJECT] = sizeof(struct decode_reject) -}; - -static int table_iter(const union decode_item *table, - int (*fn)(const struct decode_header *, void *), - void *args) -{ - const struct decode_header *h = (struct decode_header *)table; - int result; - - for (;;) { - enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK; - - if (type == DECODE_TYPE_END) - return 0; - - result = fn(h, args); - if (result) - return result; - - h = (struct decode_header *) - ((uintptr_t)h + decode_struct_sizes[type]); - - } -} - -static int table_test_fail(const struct decode_header *h, const char* message) -{ - - pr_err("FAIL: kprobes test failure \"%s\" (mask %08x, value %08x)\n", - message, h->mask.bits, h->value.bits); - return -EINVAL; -} - -struct table_test_args { - const union decode_item *root_table; - u32 parent_mask; - u32 parent_value; -}; - -static int table_test_fn(const struct decode_header *h, void *args) -{ - struct table_test_args *a = (struct table_test_args *)args; - enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK; - - if (h->value.bits & ~h->mask.bits) - return table_test_fail(h, "Match value has bits not in mask"); - - if ((h->mask.bits & a->parent_mask) != a->parent_mask) - return table_test_fail(h, "Mask has bits not in parent mask"); - - if ((h->value.bits ^ a->parent_value) & a->parent_mask) - return table_test_fail(h, "Value is inconsistent with parent"); - - if (type == DECODE_TYPE_TABLE) { - struct decode_table *d = (struct decode_table *)h; - struct table_test_args args2 = *a; - args2.parent_mask = h->mask.bits; - args2.parent_value = h->value.bits; - return table_iter(d->table.table, table_test_fn, &args2); - } - - return 0; -} - -static int table_test(const union decode_item *table) -{ - struct table_test_args args = { - .root_table = table, - .parent_mask = 0, - .parent_value = 0 - }; - return table_iter(args.root_table, table_test_fn, &args); -} - - -/* - * Decoding table test coverage analysis - * - * coverage_start() builds a coverage_table which contains a list of - * coverage_entry's to match each entry in the specified kprobes instruction - * decoding table. - * - * When test cases are run, coverage_add() is called to process each case. - * This looks up the corresponding entry in the coverage_table and sets it as - * being matched, as well as clearing the regs flag appropriate for the test. - * - * After all test cases have been run, coverage_end() is called to check that - * all entries in coverage_table have been matched and that all regs flags are - * cleared. I.e. that all possible combinations of instructions described by - * the kprobes decoding tables have had a test case executed for them. - */ - -bool coverage_fail; - -#define MAX_COVERAGE_ENTRIES 256 - -struct coverage_entry { - const struct decode_header *header; - unsigned regs; - unsigned nesting; - char matched; -}; - -struct coverage_table { - struct coverage_entry *base; - unsigned num_entries; - unsigned nesting; -}; - -struct coverage_table coverage; - -#define COVERAGE_ANY_REG (1<<0) -#define COVERAGE_SP (1<<1) -#define COVERAGE_PC (1<<2) -#define COVERAGE_PCWB (1<<3) - -static const char coverage_register_lookup[16] = { - [REG_TYPE_ANY] = COVERAGE_ANY_REG | COVERAGE_SP | COVERAGE_PC, - [REG_TYPE_SAMEAS16] = COVERAGE_ANY_REG, - [REG_TYPE_SP] = COVERAGE_SP, - [REG_TYPE_PC] = COVERAGE_PC, - [REG_TYPE_NOSP] = COVERAGE_ANY_REG | COVERAGE_SP, - [REG_TYPE_NOSPPC] = COVERAGE_ANY_REG | COVERAGE_SP | COVERAGE_PC, - [REG_TYPE_NOPC] = COVERAGE_ANY_REG | COVERAGE_PC, - [REG_TYPE_NOPCWB] = COVERAGE_ANY_REG | COVERAGE_PC | COVERAGE_PCWB, - [REG_TYPE_NOPCX] = COVERAGE_ANY_REG, - [REG_TYPE_NOSPPCX] = COVERAGE_ANY_REG | COVERAGE_SP, -}; - -unsigned coverage_start_registers(const struct decode_header *h) -{ - unsigned regs = 0; - int i; - for (i = 0; i < 20; i += 4) { - int r = (h->type_regs.bits >> (DECODE_TYPE_BITS + i)) & 0xf; - regs |= coverage_register_lookup[r] << i; - } - return regs; -} - -static int coverage_start_fn(const struct decode_header *h, void *args) -{ - struct coverage_table *coverage = (struct coverage_table *)args; - enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK; - struct coverage_entry *entry = coverage->base + coverage->num_entries; - - if (coverage->num_entries == MAX_COVERAGE_ENTRIES - 1) { - pr_err("FAIL: Out of space for test coverage data"); - return -ENOMEM; - } - - ++coverage->num_entries; - - entry->header = h; - entry->regs = coverage_start_registers(h); - entry->nesting = coverage->nesting; - entry->matched = false; - - if (type == DECODE_TYPE_TABLE) { - struct decode_table *d = (struct decode_table *)h; - int ret; - ++coverage->nesting; - ret = table_iter(d->table.table, coverage_start_fn, coverage); - --coverage->nesting; - return ret; - } - - return 0; -} - -static int coverage_start(const union decode_item *table) -{ - coverage.base = kmalloc(MAX_COVERAGE_ENTRIES * - sizeof(struct coverage_entry), GFP_KERNEL); - coverage.num_entries = 0; - coverage.nesting = 0; - return table_iter(table, coverage_start_fn, &coverage); -} - -static void -coverage_add_registers(struct coverage_entry *entry, kprobe_opcode_t insn) -{ - int regs = entry->header->type_regs.bits >> DECODE_TYPE_BITS; - int i; - for (i = 0; i < 20; i += 4) { - enum decode_reg_type reg_type = (regs >> i) & 0xf; - int reg = (insn >> i) & 0xf; - int flag; - - if (!reg_type) - continue; - - if (reg == 13) - flag = COVERAGE_SP; - else if (reg == 15) - flag = COVERAGE_PC; - else - flag = COVERAGE_ANY_REG; - entry->regs &= ~(flag << i); - - switch (reg_type) { - - case REG_TYPE_NONE: - case REG_TYPE_ANY: - case REG_TYPE_SAMEAS16: - break; - - case REG_TYPE_SP: - if (reg != 13) - return; - break; - - case REG_TYPE_PC: - if (reg != 15) - return; - break; - - case REG_TYPE_NOSP: - if (reg == 13) - return; - break; - - case REG_TYPE_NOSPPC: - case REG_TYPE_NOSPPCX: - if (reg == 13 || reg == 15) - return; - break; - - case REG_TYPE_NOPCWB: - if (!is_writeback(insn)) - break; - if (reg == 15) { - entry->regs &= ~(COVERAGE_PCWB << i); - return; - } - break; - - case REG_TYPE_NOPC: - case REG_TYPE_NOPCX: - if (reg == 15) - return; - break; - } - - } -} - -static void coverage_add(kprobe_opcode_t insn) -{ - struct coverage_entry *entry = coverage.base; - struct coverage_entry *end = coverage.base + coverage.num_entries; - bool matched = false; - unsigned nesting = 0; - - for (; entry < end; ++entry) { - const struct decode_header *h = entry->header; - enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK; - - if (entry->nesting > nesting) - continue; /* Skip sub-table we didn't match */ - - if (entry->nesting < nesting) - break; /* End of sub-table we were scanning */ - - if (!matched) { - if ((insn & h->mask.bits) != h->value.bits) - continue; - entry->matched = true; - } - - switch (type) { - - case DECODE_TYPE_TABLE: - ++nesting; - break; - - case DECODE_TYPE_CUSTOM: - case DECODE_TYPE_SIMULATE: - case DECODE_TYPE_EMULATE: - coverage_add_registers(entry, insn); - return; - - case DECODE_TYPE_OR: - matched = true; - break; - - case DECODE_TYPE_REJECT: - default: - return; - } - - } -} - -static void coverage_end(void) -{ - struct coverage_entry *entry = coverage.base; - struct coverage_entry *end = coverage.base + coverage.num_entries; - - for (; entry < end; ++entry) { - u32 mask = entry->header->mask.bits; - u32 value = entry->header->value.bits; - - if (entry->regs) { - pr_err("FAIL: Register test coverage missing for %08x %08x (%05x)\n", - mask, value, entry->regs); - coverage_fail = true; - } - if (!entry->matched) { - pr_err("FAIL: Test coverage entry missing for %08x %08x\n", - mask, value); - coverage_fail = true; - } - } - - kfree(coverage.base); -} - - -/* - * Framework for instruction set test cases - */ - -void __naked __kprobes_test_case_start(void) -{ - __asm__ __volatile__ ( - "stmdb sp!, {r4-r11} \n\t" - "sub sp, sp, #"__stringify(TEST_MEMORY_SIZE)"\n\t" - "bic r0, lr, #1 @ r0 = inline data \n\t" - "mov r1, sp \n\t" - "bl kprobes_test_case_start \n\t" - "bx r0 \n\t" - ); -} - -#ifndef CONFIG_THUMB2_KERNEL - -void __naked __kprobes_test_case_end_32(void) -{ - __asm__ __volatile__ ( - "mov r4, lr \n\t" - "bl kprobes_test_case_end \n\t" - "cmp r0, #0 \n\t" - "movne pc, r0 \n\t" - "mov r0, r4 \n\t" - "add sp, sp, #"__stringify(TEST_MEMORY_SIZE)"\n\t" - "ldmia sp!, {r4-r11} \n\t" - "mov pc, r0 \n\t" - ); -} - -#else /* CONFIG_THUMB2_KERNEL */ - -void __naked __kprobes_test_case_end_16(void) -{ - __asm__ __volatile__ ( - "mov r4, lr \n\t" - "bl kprobes_test_case_end \n\t" - "cmp r0, #0 \n\t" - "bxne r0 \n\t" - "mov r0, r4 \n\t" - "add sp, sp, #"__stringify(TEST_MEMORY_SIZE)"\n\t" - "ldmia sp!, {r4-r11} \n\t" - "bx r0 \n\t" - ); -} - -void __naked __kprobes_test_case_end_32(void) -{ - __asm__ __volatile__ ( - ".arm \n\t" - "orr lr, lr, #1 @ will return to Thumb code \n\t" - "ldr pc, 1f \n\t" - "1: \n\t" - ".word __kprobes_test_case_end_16 \n\t" - ); -} - -#endif - - -int kprobe_test_flags; -int kprobe_test_cc_position; - -static int test_try_count; -static int test_pass_count; -static int test_fail_count; - -static struct pt_regs initial_regs; -static struct pt_regs expected_regs; -static struct pt_regs result_regs; - -static u32 expected_memory[TEST_MEMORY_SIZE/sizeof(u32)]; - -static const char *current_title; -static struct test_arg *current_args; -static u32 *current_stack; -static uintptr_t current_branch_target; - -static uintptr_t current_code_start; -static kprobe_opcode_t current_instruction; - - -#define TEST_CASE_PASSED -1 -#define TEST_CASE_FAILED -2 - -static int test_case_run_count; -static bool test_case_is_thumb; -static int test_instance; - -/* - * We ignore the state of the imprecise abort disable flag (CPSR.A) because this - * can change randomly as the kernel doesn't take care to preserve or initialise - * this across context switches. Also, with Security Extentions, the flag may - * not be under control of the kernel; for this reason we ignore the state of - * the FIQ disable flag CPSR.F as well. - */ -#define PSR_IGNORE_BITS (PSR_A_BIT | PSR_F_BIT) - -static unsigned long test_check_cc(int cc, unsigned long cpsr) -{ - int ret = arm_check_condition(cc << 28, cpsr); - - return (ret != ARM_OPCODE_CONDTEST_FAIL); -} - -static int is_last_scenario; -static int probe_should_run; /* 0 = no, 1 = yes, -1 = unknown */ -static int memory_needs_checking; - -static unsigned long test_context_cpsr(int scenario) -{ - unsigned long cpsr; - - probe_should_run = 1; - - /* Default case is that we cycle through 16 combinations of flags */ - cpsr = (scenario & 0xf) << 28; /* N,Z,C,V flags */ - cpsr |= (scenario & 0xf) << 16; /* GE flags */ - cpsr |= (scenario & 0x1) << 27; /* Toggle Q flag */ - - if (!test_case_is_thumb) { - /* Testing ARM code */ - int cc = current_instruction >> 28; - - probe_should_run = test_check_cc(cc, cpsr) != 0; - if (scenario == 15) - is_last_scenario = true; - - } else if (kprobe_test_flags & TEST_FLAG_NO_ITBLOCK) { - /* Testing Thumb code without setting ITSTATE */ - if (kprobe_test_cc_position) { - int cc = (current_instruction >> kprobe_test_cc_position) & 0xf; - probe_should_run = test_check_cc(cc, cpsr) != 0; - } - - if (scenario == 15) - is_last_scenario = true; - - } else if (kprobe_test_flags & TEST_FLAG_FULL_ITBLOCK) { - /* Testing Thumb code with all combinations of ITSTATE */ - unsigned x = (scenario >> 4); - unsigned cond_base = x % 7; /* ITSTATE<7:5> */ - unsigned mask = x / 7 + 2; /* ITSTATE<4:0>, bits reversed */ - - if (mask > 0x1f) { - /* Finish by testing state from instruction 'itt al' */ - cond_base = 7; - mask = 0x4; - if ((scenario & 0xf) == 0xf) - is_last_scenario = true; - } - - cpsr |= cond_base << 13; /* ITSTATE<7:5> */ - cpsr |= (mask & 0x1) << 12; /* ITSTATE<4> */ - cpsr |= (mask & 0x2) << 10; /* ITSTATE<3> */ - cpsr |= (mask & 0x4) << 8; /* ITSTATE<2> */ - cpsr |= (mask & 0x8) << 23; /* ITSTATE<1> */ - cpsr |= (mask & 0x10) << 21; /* ITSTATE<0> */ - - probe_should_run = test_check_cc((cpsr >> 12) & 0xf, cpsr) != 0; - - } else { - /* Testing Thumb code with several combinations of ITSTATE */ - switch (scenario) { - case 16: /* Clear NZCV flags and 'it eq' state (false as Z=0) */ - cpsr = 0x00000800; - probe_should_run = 0; - break; - case 17: /* Set NZCV flags and 'it vc' state (false as V=1) */ - cpsr = 0xf0007800; - probe_should_run = 0; - break; - case 18: /* Clear NZCV flags and 'it ls' state (true as C=0) */ - cpsr = 0x00009800; - break; - case 19: /* Set NZCV flags and 'it cs' state (true as C=1) */ - cpsr = 0xf0002800; - is_last_scenario = true; - break; - } - } - - return cpsr; -} - -static void setup_test_context(struct pt_regs *regs) -{ - int scenario = test_case_run_count>>1; - unsigned long val; - struct test_arg *args; - int i; - - is_last_scenario = false; - memory_needs_checking = false; - - /* Initialise test memory on stack */ - val = (scenario & 1) ? VALM : ~VALM; - for (i = 0; i < TEST_MEMORY_SIZE / sizeof(current_stack[0]); ++i) - current_stack[i] = val + (i << 8); - /* Put target of branch on stack for tests which load PC from memory */ - if (current_branch_target) - current_stack[15] = current_branch_target; - /* Put a value for SP on stack for tests which load SP from memory */ - current_stack[13] = (u32)current_stack + 120; - - /* Initialise register values to their default state */ - val = (scenario & 2) ? VALR : ~VALR; - for (i = 0; i < 13; ++i) - regs->uregs[i] = val ^ (i << 8); - regs->ARM_lr = val ^ (14 << 8); - regs->ARM_cpsr &= ~(APSR_MASK | PSR_IT_MASK); - regs->ARM_cpsr |= test_context_cpsr(scenario); - - /* Perform testcase specific register setup */ - args = current_args; - for (; args[0].type != ARG_TYPE_END; ++args) - switch (args[0].type) { - case ARG_TYPE_REG: { - struct test_arg_regptr *arg = - (struct test_arg_regptr *)args; - regs->uregs[arg->reg] = arg->val; - break; - } - case ARG_TYPE_PTR: { - struct test_arg_regptr *arg = - (struct test_arg_regptr *)args; - regs->uregs[arg->reg] = - (unsigned long)current_stack + arg->val; - memory_needs_checking = true; - break; - } - case ARG_TYPE_MEM: { - struct test_arg_mem *arg = (struct test_arg_mem *)args; - current_stack[arg->index] = arg->val; - break; - } - default: - break; - } -} - -struct test_probe { - struct kprobe kprobe; - bool registered; - int hit; -}; - -static void unregister_test_probe(struct test_probe *probe) -{ - if (probe->registered) { - unregister_kprobe(&probe->kprobe); - probe->kprobe.flags = 0; /* Clear disable flag to allow reuse */ - } - probe->registered = false; -} - -static int register_test_probe(struct test_probe *probe) -{ - int ret; - - if (probe->registered) - BUG(); - - ret = register_kprobe(&probe->kprobe); - if (ret >= 0) { - probe->registered = true; - probe->hit = -1; - } - return ret; -} - -static int __kprobes -test_before_pre_handler(struct kprobe *p, struct pt_regs *regs) -{ - container_of(p, struct test_probe, kprobe)->hit = test_instance; - return 0; -} - -static void __kprobes -test_before_post_handler(struct kprobe *p, struct pt_regs *regs, - unsigned long flags) -{ - setup_test_context(regs); - initial_regs = *regs; - initial_regs.ARM_cpsr &= ~PSR_IGNORE_BITS; -} - -static int __kprobes -test_case_pre_handler(struct kprobe *p, struct pt_regs *regs) -{ - container_of(p, struct test_probe, kprobe)->hit = test_instance; - return 0; -} - -static int __kprobes -test_after_pre_handler(struct kprobe *p, struct pt_regs *regs) -{ - if (container_of(p, struct test_probe, kprobe)->hit == test_instance) - return 0; /* Already run for this test instance */ - - result_regs = *regs; - result_regs.ARM_cpsr &= ~PSR_IGNORE_BITS; - - /* Undo any changes done to SP by the test case */ - regs->ARM_sp = (unsigned long)current_stack; - - container_of(p, struct test_probe, kprobe)->hit = test_instance; - return 0; -} - -static struct test_probe test_before_probe = { - .kprobe.pre_handler = test_before_pre_handler, - .kprobe.post_handler = test_before_post_handler, -}; - -static struct test_probe test_case_probe = { - .kprobe.pre_handler = test_case_pre_handler, -}; - -static struct test_probe test_after_probe = { - .kprobe.pre_handler = test_after_pre_handler, -}; - -static struct test_probe test_after2_probe = { - .kprobe.pre_handler = test_after_pre_handler, -}; - -static void test_case_cleanup(void) -{ - unregister_test_probe(&test_before_probe); - unregister_test_probe(&test_case_probe); - unregister_test_probe(&test_after_probe); - unregister_test_probe(&test_after2_probe); -} - -static void print_registers(struct pt_regs *regs) -{ - pr_err("r0 %08lx | r1 %08lx | r2 %08lx | r3 %08lx\n", - regs->ARM_r0, regs->ARM_r1, regs->ARM_r2, regs->ARM_r3); - pr_err("r4 %08lx | r5 %08lx | r6 %08lx | r7 %08lx\n", - regs->ARM_r4, regs->ARM_r5, regs->ARM_r6, regs->ARM_r7); - pr_err("r8 %08lx | r9 %08lx | r10 %08lx | r11 %08lx\n", - regs->ARM_r8, regs->ARM_r9, regs->ARM_r10, regs->ARM_fp); - pr_err("r12 %08lx | sp %08lx | lr %08lx | pc %08lx\n", - regs->ARM_ip, regs->ARM_sp, regs->ARM_lr, regs->ARM_pc); - pr_err("cpsr %08lx\n", regs->ARM_cpsr); -} - -static void print_memory(u32 *mem, size_t size) -{ - int i; - for (i = 0; i < size / sizeof(u32); i += 4) - pr_err("%08x %08x %08x %08x\n", mem[i], mem[i+1], - mem[i+2], mem[i+3]); -} - -static size_t expected_memory_size(u32 *sp) -{ - size_t size = sizeof(expected_memory); - int offset = (uintptr_t)sp - (uintptr_t)current_stack; - if (offset > 0) - size -= offset; - return size; -} - -static void test_case_failed(const char *message) -{ - test_case_cleanup(); - - pr_err("FAIL: %s\n", message); - pr_err("FAIL: Test %s\n", current_title); - pr_err("FAIL: Scenario %d\n", test_case_run_count >> 1); -} - -static unsigned long next_instruction(unsigned long pc) -{ -#ifdef CONFIG_THUMB2_KERNEL - if ((pc & 1) && - !is_wide_instruction(__mem_to_opcode_thumb16(*(u16 *)(pc - 1)))) - return pc + 2; - else -#endif - return pc + 4; -} - -static uintptr_t __used kprobes_test_case_start(const char **title, void *stack) -{ - struct test_arg *args; - struct test_arg_end *end_arg; - unsigned long test_code; - - current_title = *title++; - args = (struct test_arg *)title; - current_args = args; - current_stack = stack; - - ++test_try_count; - - while (args->type != ARG_TYPE_END) - ++args; - end_arg = (struct test_arg_end *)args; - - test_code = (unsigned long)(args + 1); /* Code starts after args */ - - test_case_is_thumb = end_arg->flags & ARG_FLAG_THUMB; - if (test_case_is_thumb) - test_code |= 1; - - current_code_start = test_code; - - current_branch_target = 0; - if (end_arg->branch_offset != end_arg->end_offset) - current_branch_target = test_code + end_arg->branch_offset; - - test_code += end_arg->code_offset; - test_before_probe.kprobe.addr = (kprobe_opcode_t *)test_code; - - test_code = next_instruction(test_code); - test_case_probe.kprobe.addr = (kprobe_opcode_t *)test_code; - - if (test_case_is_thumb) { - u16 *p = (u16 *)(test_code & ~1); - current_instruction = __mem_to_opcode_thumb16(p[0]); - if (is_wide_instruction(current_instruction)) { - u16 instr2 = __mem_to_opcode_thumb16(p[1]); - current_instruction = __opcode_thumb32_compose(current_instruction, instr2); - } - } else { - current_instruction = __mem_to_opcode_arm(*(u32 *)test_code); - } - - if (current_title[0] == '.') - verbose("%s\n", current_title); - else - verbose("%s\t@ %0*x\n", current_title, - test_case_is_thumb ? 4 : 8, - current_instruction); - - test_code = next_instruction(test_code); - test_after_probe.kprobe.addr = (kprobe_opcode_t *)test_code; - - if (kprobe_test_flags & TEST_FLAG_NARROW_INSTR) { - if (!test_case_is_thumb || - is_wide_instruction(current_instruction)) { - test_case_failed("expected 16-bit instruction"); - goto fail; - } - } else { - if (test_case_is_thumb && - !is_wide_instruction(current_instruction)) { - test_case_failed("expected 32-bit instruction"); - goto fail; - } - } - - coverage_add(current_instruction); - - if (end_arg->flags & ARG_FLAG_UNSUPPORTED) { - if (register_test_probe(&test_case_probe) < 0) - goto pass; - test_case_failed("registered probe for unsupported instruction"); - goto fail; - } - - if (end_arg->flags & ARG_FLAG_SUPPORTED) { - if (register_test_probe(&test_case_probe) >= 0) - goto pass; - test_case_failed("couldn't register probe for supported instruction"); - goto fail; - } - - if (register_test_probe(&test_before_probe) < 0) { - test_case_failed("register test_before_probe failed"); - goto fail; - } - if (register_test_probe(&test_after_probe) < 0) { - test_case_failed("register test_after_probe failed"); - goto fail; - } - if (current_branch_target) { - test_after2_probe.kprobe.addr = - (kprobe_opcode_t *)current_branch_target; - if (register_test_probe(&test_after2_probe) < 0) { - test_case_failed("register test_after2_probe failed"); - goto fail; - } - } - - /* Start first run of test case */ - test_case_run_count = 0; - ++test_instance; - return current_code_start; -pass: - test_case_run_count = TEST_CASE_PASSED; - return (uintptr_t)test_after_probe.kprobe.addr; -fail: - test_case_run_count = TEST_CASE_FAILED; - return (uintptr_t)test_after_probe.kprobe.addr; -} - -static bool check_test_results(void) -{ - size_t mem_size = 0; - u32 *mem = 0; - - if (memcmp(&expected_regs, &result_regs, sizeof(expected_regs))) { - test_case_failed("registers differ"); - goto fail; - } - - if (memory_needs_checking) { - mem = (u32 *)result_regs.ARM_sp; - mem_size = expected_memory_size(mem); - if (memcmp(expected_memory, mem, mem_size)) { - test_case_failed("test memory differs"); - goto fail; - } - } - - return true; - -fail: - pr_err("initial_regs:\n"); - print_registers(&initial_regs); - pr_err("expected_regs:\n"); - print_registers(&expected_regs); - pr_err("result_regs:\n"); - print_registers(&result_regs); - - if (mem) { - pr_err("current_stack=%p\n", current_stack); - pr_err("expected_memory:\n"); - print_memory(expected_memory, mem_size); - pr_err("result_memory:\n"); - print_memory(mem, mem_size); - } - - return false; -} - -static uintptr_t __used kprobes_test_case_end(void) -{ - if (test_case_run_count < 0) { - if (test_case_run_count == TEST_CASE_PASSED) - /* kprobes_test_case_start did all the needed testing */ - goto pass; - else - /* kprobes_test_case_start failed */ - goto fail; - } - - if (test_before_probe.hit != test_instance) { - test_case_failed("test_before_handler not run"); - goto fail; - } - - if (test_after_probe.hit != test_instance && - test_after2_probe.hit != test_instance) { - test_case_failed("test_after_handler not run"); - goto fail; - } - - /* - * Even numbered test runs ran without a probe on the test case so - * we can gather reference results. The subsequent odd numbered run - * will have the probe inserted. - */ - if ((test_case_run_count & 1) == 0) { - /* Save results from run without probe */ - u32 *mem = (u32 *)result_regs.ARM_sp; - expected_regs = result_regs; - memcpy(expected_memory, mem, expected_memory_size(mem)); - - /* Insert probe onto test case instruction */ - if (register_test_probe(&test_case_probe) < 0) { - test_case_failed("register test_case_probe failed"); - goto fail; - } - } else { - /* Check probe ran as expected */ - if (probe_should_run == 1) { - if (test_case_probe.hit != test_instance) { - test_case_failed("test_case_handler not run"); - goto fail; - } - } else if (probe_should_run == 0) { - if (test_case_probe.hit == test_instance) { - test_case_failed("test_case_handler ran"); - goto fail; - } - } - - /* Remove probe for any subsequent reference run */ - unregister_test_probe(&test_case_probe); - - if (!check_test_results()) - goto fail; - - if (is_last_scenario) - goto pass; - } - - /* Do next test run */ - ++test_case_run_count; - ++test_instance; - return current_code_start; -fail: - ++test_fail_count; - goto end; -pass: - ++test_pass_count; -end: - test_case_cleanup(); - return 0; -} - - -/* - * Top level test functions - */ - -static int run_test_cases(void (*tests)(void), const union decode_item *table) -{ - int ret; - - pr_info(" Check decoding tables\n"); - ret = table_test(table); - if (ret) - return ret; - - pr_info(" Run test cases\n"); - ret = coverage_start(table); - if (ret) - return ret; - - tests(); - - coverage_end(); - return 0; -} - - -static int __init run_all_tests(void) -{ - int ret = 0; - - pr_info("Beginning kprobe tests...\n"); - -#ifndef CONFIG_THUMB2_KERNEL - - pr_info("Probe ARM code\n"); - ret = run_api_tests(arm_func); - if (ret) - goto out; - - pr_info("ARM instruction simulation\n"); - ret = run_test_cases(kprobe_arm_test_cases, probes_decode_arm_table); - if (ret) - goto out; - -#else /* CONFIG_THUMB2_KERNEL */ - - pr_info("Probe 16-bit Thumb code\n"); - ret = run_api_tests(thumb16_func); - if (ret) - goto out; - - pr_info("Probe 32-bit Thumb code, even halfword\n"); - ret = run_api_tests(thumb32even_func); - if (ret) - goto out; - - pr_info("Probe 32-bit Thumb code, odd halfword\n"); - ret = run_api_tests(thumb32odd_func); - if (ret) - goto out; - - pr_info("16-bit Thumb instruction simulation\n"); - ret = run_test_cases(kprobe_thumb16_test_cases, - probes_decode_thumb16_table); - if (ret) - goto out; - - pr_info("32-bit Thumb instruction simulation\n"); - ret = run_test_cases(kprobe_thumb32_test_cases, - probes_decode_thumb32_table); - if (ret) - goto out; -#endif - - pr_info("Total instruction simulation tests=%d, pass=%d fail=%d\n", - test_try_count, test_pass_count, test_fail_count); - if (test_fail_count) { - ret = -EINVAL; - goto out; - } - -#if BENCHMARKING - pr_info("Benchmarks\n"); - ret = run_benchmarks(); - if (ret) - goto out; -#endif - -#if __LINUX_ARM_ARCH__ >= 7 - /* We are able to run all test cases so coverage should be complete */ - if (coverage_fail) { - pr_err("FAIL: Test coverage checks failed\n"); - ret = -EINVAL; - goto out; - } -#endif - -out: - if (ret == 0) - ret = tests_failed; - if (ret == 0) - pr_info("Finished kprobe tests OK\n"); - else - pr_err("kprobe tests failed\n"); - - return ret; -} - - -/* - * Module setup - */ - -#ifdef MODULE - -static void __exit kprobe_test_exit(void) -{ -} - -module_init(run_all_tests) -module_exit(kprobe_test_exit) -MODULE_LICENSE("GPL"); - -#else /* !MODULE */ - -late_initcall(run_all_tests); - -#endif diff --git a/arch/arm/kernel/kprobes-test.h b/arch/arm/kernel/kprobes-test.h deleted file mode 100644 index 4430990e90e7..000000000000 --- a/arch/arm/kernel/kprobes-test.h +++ /dev/null @@ -1,435 +0,0 @@ -/* - * arch/arm/kernel/kprobes-test.h - * - * Copyright (C) 2011 Jon Medhurst . - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - */ - -#define VERBOSE 0 /* Set to '1' for more logging of test cases */ - -#ifdef CONFIG_THUMB2_KERNEL -#define NORMAL_ISA "16" -#else -#define NORMAL_ISA "32" -#endif - - -/* Flags used in kprobe_test_flags */ -#define TEST_FLAG_NO_ITBLOCK (1<<0) -#define TEST_FLAG_FULL_ITBLOCK (1<<1) -#define TEST_FLAG_NARROW_INSTR (1<<2) - -extern int kprobe_test_flags; -extern int kprobe_test_cc_position; - - -#define TEST_MEMORY_SIZE 256 - - -/* - * Test case structures. - * - * The arguments given to test cases can be one of three types. - * - * ARG_TYPE_REG - * Load a register with the given value. - * - * ARG_TYPE_PTR - * Load a register with a pointer into the stack buffer (SP + given value). - * - * ARG_TYPE_MEM - * Store the given value into the stack buffer at [SP+index]. - * - */ - -#define ARG_TYPE_END 0 -#define ARG_TYPE_REG 1 -#define ARG_TYPE_PTR 2 -#define ARG_TYPE_MEM 3 - -#define ARG_FLAG_UNSUPPORTED 0x01 -#define ARG_FLAG_SUPPORTED 0x02 -#define ARG_FLAG_THUMB 0x10 /* Must be 16 so TEST_ISA can be used */ -#define ARG_FLAG_ARM 0x20 /* Must be 32 so TEST_ISA can be used */ - -struct test_arg { - u8 type; /* ARG_TYPE_x */ - u8 _padding[7]; -}; - -struct test_arg_regptr { - u8 type; /* ARG_TYPE_REG or ARG_TYPE_PTR */ - u8 reg; - u8 _padding[2]; - u32 val; -}; - -struct test_arg_mem { - u8 type; /* ARG_TYPE_MEM */ - u8 index; - u8 _padding[2]; - u32 val; -}; - -struct test_arg_end { - u8 type; /* ARG_TYPE_END */ - u8 flags; /* ARG_FLAG_x */ - u16 code_offset; - u16 branch_offset; - u16 end_offset; -}; - - -/* - * Building blocks for test cases. - * - * Each test case is wrapped between TESTCASE_START and TESTCASE_END. - * - * To specify arguments for a test case the TEST_ARG_{REG,PTR,MEM} macros are - * used followed by a terminating TEST_ARG_END. - * - * After this, the instruction to be tested is defined with TEST_INSTRUCTION. - * Or for branches, TEST_BRANCH_B and TEST_BRANCH_F (branch forwards/backwards). - * - * Some specific test cases may make use of other custom constructs. - */ - -#if VERBOSE -#define verbose(fmt, ...) pr_info(fmt, ##__VA_ARGS__) -#else -#define verbose(fmt, ...) -#endif - -#define TEST_GROUP(title) \ - verbose("\n"); \ - verbose(title"\n"); \ - verbose("---------------------------------------------------------\n"); - -#define TESTCASE_START(title) \ - __asm__ __volatile__ ( \ - "bl __kprobes_test_case_start \n\t" \ - ".pushsection .rodata \n\t" \ - "10: \n\t" \ - /* don't use .asciz here as 'title' may be */ \ - /* multiple strings to be concatenated. */ \ - ".ascii "#title" \n\t" \ - ".byte 0 \n\t" \ - ".popsection \n\t" \ - ".word 10b \n\t" - -#define TEST_ARG_REG(reg, val) \ - ".byte "__stringify(ARG_TYPE_REG)" \n\t" \ - ".byte "#reg" \n\t" \ - ".short 0 \n\t" \ - ".word "#val" \n\t" - -#define TEST_ARG_PTR(reg, val) \ - ".byte "__stringify(ARG_TYPE_PTR)" \n\t" \ - ".byte "#reg" \n\t" \ - ".short 0 \n\t" \ - ".word "#val" \n\t" - -#define TEST_ARG_MEM(index, val) \ - ".byte "__stringify(ARG_TYPE_MEM)" \n\t" \ - ".byte "#index" \n\t" \ - ".short 0 \n\t" \ - ".word "#val" \n\t" - -#define TEST_ARG_END(flags) \ - ".byte "__stringify(ARG_TYPE_END)" \n\t" \ - ".byte "TEST_ISA flags" \n\t" \ - ".short 50f-0f \n\t" \ - ".short 2f-0f \n\t" \ - ".short 99f-0f \n\t" \ - ".code "TEST_ISA" \n\t" \ - "0: \n\t" - -#define TEST_INSTRUCTION(instruction) \ - "50: nop \n\t" \ - "1: "instruction" \n\t" \ - " nop \n\t" - -#define TEST_BRANCH_F(instruction) \ - TEST_INSTRUCTION(instruction) \ - " b 99f \n\t" \ - "2: nop \n\t" - -#define TEST_BRANCH_B(instruction) \ - " b 50f \n\t" \ - " b 99f \n\t" \ - "2: nop \n\t" \ - " b 99f \n\t" \ - TEST_INSTRUCTION(instruction) - -#define TEST_BRANCH_FX(instruction, codex) \ - TEST_INSTRUCTION(instruction) \ - " b 99f \n\t" \ - codex" \n\t" \ - " b 99f \n\t" \ - "2: nop \n\t" - -#define TEST_BRANCH_BX(instruction, codex) \ - " b 50f \n\t" \ - " b 99f \n\t" \ - "2: nop \n\t" \ - " b 99f \n\t" \ - codex" \n\t" \ - TEST_INSTRUCTION(instruction) - -#define TESTCASE_END \ - "2: \n\t" \ - "99: \n\t" \ - " bl __kprobes_test_case_end_"TEST_ISA" \n\t" \ - ".code "NORMAL_ISA" \n\t" \ - : : \ - : "r0", "r1", "r2", "r3", "ip", "lr", "memory", "cc" \ - ); - - -/* - * Macros to define test cases. - * - * Those of the form TEST_{R,P,M}* can be used to define test cases - * which take combinations of the three basic types of arguments. E.g. - * - * TEST_R One register argument - * TEST_RR Two register arguments - * TEST_RPR A register, a pointer, then a register argument - * - * For testing instructions which may branch, there are macros TEST_BF_* - * and TEST_BB_* for branching forwards and backwards. - * - * TEST_SUPPORTED and TEST_UNSUPPORTED don't cause the code to be executed, - * the just verify that a kprobe is or is not allowed on the given instruction. - */ - -#define TEST(code) \ - TESTCASE_START(code) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code) \ - TESTCASE_END - -#define TEST_UNSUPPORTED(code) \ - TESTCASE_START(code) \ - TEST_ARG_END("|"__stringify(ARG_FLAG_UNSUPPORTED)) \ - TEST_INSTRUCTION(code) \ - TESTCASE_END - -#define TEST_SUPPORTED(code) \ - TESTCASE_START(code) \ - TEST_ARG_END("|"__stringify(ARG_FLAG_SUPPORTED)) \ - TEST_INSTRUCTION(code) \ - TESTCASE_END - -#define TEST_R(code1, reg, val, code2) \ - TESTCASE_START(code1 #reg code2) \ - TEST_ARG_REG(reg, val) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code1 #reg code2) \ - TESTCASE_END - -#define TEST_RR(code1, reg1, val1, code2, reg2, val2, code3) \ - TESTCASE_START(code1 #reg1 code2 #reg2 code3) \ - TEST_ARG_REG(reg1, val1) \ - TEST_ARG_REG(reg2, val2) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3) \ - TESTCASE_END - -#define TEST_RRR(code1, reg1, val1, code2, reg2, val2, code3, reg3, val3, code4)\ - TESTCASE_START(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ - TEST_ARG_REG(reg1, val1) \ - TEST_ARG_REG(reg2, val2) \ - TEST_ARG_REG(reg3, val3) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ - TESTCASE_END - -#define TEST_RRRR(code1, reg1, val1, code2, reg2, val2, code3, reg3, val3, code4, reg4, val4) \ - TESTCASE_START(code1 #reg1 code2 #reg2 code3 #reg3 code4 #reg4) \ - TEST_ARG_REG(reg1, val1) \ - TEST_ARG_REG(reg2, val2) \ - TEST_ARG_REG(reg3, val3) \ - TEST_ARG_REG(reg4, val4) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3 #reg3 code4 #reg4) \ - TESTCASE_END - -#define TEST_P(code1, reg1, val1, code2) \ - TESTCASE_START(code1 #reg1 code2) \ - TEST_ARG_PTR(reg1, val1) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code1 #reg1 code2) \ - TESTCASE_END - -#define TEST_PR(code1, reg1, val1, code2, reg2, val2, code3) \ - TESTCASE_START(code1 #reg1 code2 #reg2 code3) \ - TEST_ARG_PTR(reg1, val1) \ - TEST_ARG_REG(reg2, val2) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3) \ - TESTCASE_END - -#define TEST_RP(code1, reg1, val1, code2, reg2, val2, code3) \ - TESTCASE_START(code1 #reg1 code2 #reg2 code3) \ - TEST_ARG_REG(reg1, val1) \ - TEST_ARG_PTR(reg2, val2) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3) \ - TESTCASE_END - -#define TEST_PRR(code1, reg1, val1, code2, reg2, val2, code3, reg3, val3, code4)\ - TESTCASE_START(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ - TEST_ARG_PTR(reg1, val1) \ - TEST_ARG_REG(reg2, val2) \ - TEST_ARG_REG(reg3, val3) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ - TESTCASE_END - -#define TEST_RPR(code1, reg1, val1, code2, reg2, val2, code3, reg3, val3, code4)\ - TESTCASE_START(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ - TEST_ARG_REG(reg1, val1) \ - TEST_ARG_PTR(reg2, val2) \ - TEST_ARG_REG(reg3, val3) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ - TESTCASE_END - -#define TEST_RRP(code1, reg1, val1, code2, reg2, val2, code3, reg3, val3, code4)\ - TESTCASE_START(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ - TEST_ARG_REG(reg1, val1) \ - TEST_ARG_REG(reg2, val2) \ - TEST_ARG_PTR(reg3, val3) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ - TESTCASE_END - -#define TEST_BF_P(code1, reg1, val1, code2) \ - TESTCASE_START(code1 #reg1 code2) \ - TEST_ARG_PTR(reg1, val1) \ - TEST_ARG_END("") \ - TEST_BRANCH_F(code1 #reg1 code2) \ - TESTCASE_END - -#define TEST_BF(code) \ - TESTCASE_START(code) \ - TEST_ARG_END("") \ - TEST_BRANCH_F(code) \ - TESTCASE_END - -#define TEST_BB(code) \ - TESTCASE_START(code) \ - TEST_ARG_END("") \ - TEST_BRANCH_B(code) \ - TESTCASE_END - -#define TEST_BF_R(code1, reg, val, code2) \ - TESTCASE_START(code1 #reg code2) \ - TEST_ARG_REG(reg, val) \ - TEST_ARG_END("") \ - TEST_BRANCH_F(code1 #reg code2) \ - TESTCASE_END - -#define TEST_BB_R(code1, reg, val, code2) \ - TESTCASE_START(code1 #reg code2) \ - TEST_ARG_REG(reg, val) \ - TEST_ARG_END("") \ - TEST_BRANCH_B(code1 #reg code2) \ - TESTCASE_END - -#define TEST_BF_RR(code1, reg1, val1, code2, reg2, val2, code3) \ - TESTCASE_START(code1 #reg1 code2 #reg2 code3) \ - TEST_ARG_REG(reg1, val1) \ - TEST_ARG_REG(reg2, val2) \ - TEST_ARG_END("") \ - TEST_BRANCH_F(code1 #reg1 code2 #reg2 code3) \ - TESTCASE_END - -#define TEST_BF_X(code, codex) \ - TESTCASE_START(code) \ - TEST_ARG_END("") \ - TEST_BRANCH_FX(code, codex) \ - TESTCASE_END - -#define TEST_BB_X(code, codex) \ - TESTCASE_START(code) \ - TEST_ARG_END("") \ - TEST_BRANCH_BX(code, codex) \ - TESTCASE_END - -#define TEST_BF_RX(code1, reg, val, code2, codex) \ - TESTCASE_START(code1 #reg code2) \ - TEST_ARG_REG(reg, val) \ - TEST_ARG_END("") \ - TEST_BRANCH_FX(code1 #reg code2, codex) \ - TESTCASE_END - -#define TEST_X(code, codex) \ - TESTCASE_START(code) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code) \ - " b 99f \n\t" \ - " "codex" \n\t" \ - TESTCASE_END - -#define TEST_RX(code1, reg, val, code2, codex) \ - TESTCASE_START(code1 #reg code2) \ - TEST_ARG_REG(reg, val) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code1 __stringify(reg) code2) \ - " b 99f \n\t" \ - " "codex" \n\t" \ - TESTCASE_END - -#define TEST_RRX(code1, reg1, val1, code2, reg2, val2, code3, codex) \ - TESTCASE_START(code1 #reg1 code2 #reg2 code3) \ - TEST_ARG_REG(reg1, val1) \ - TEST_ARG_REG(reg2, val2) \ - TEST_ARG_END("") \ - TEST_INSTRUCTION(code1 __stringify(reg1) code2 __stringify(reg2) code3) \ - " b 99f \n\t" \ - " "codex" \n\t" \ - TESTCASE_END - - -/* - * Macros for defining space directives spread over multiple lines. - * These are required so the compiler guesses better the length of inline asm - * code and will spill the literal pool early enough to avoid generating PC - * relative loads with out of range offsets. - */ -#define TWICE(x) x x -#define SPACE_0x8 TWICE(".space 4\n\t") -#define SPACE_0x10 TWICE(SPACE_0x8) -#define SPACE_0x20 TWICE(SPACE_0x10) -#define SPACE_0x40 TWICE(SPACE_0x20) -#define SPACE_0x80 TWICE(SPACE_0x40) -#define SPACE_0x100 TWICE(SPACE_0x80) -#define SPACE_0x200 TWICE(SPACE_0x100) -#define SPACE_0x400 TWICE(SPACE_0x200) -#define SPACE_0x800 TWICE(SPACE_0x400) -#define SPACE_0x1000 TWICE(SPACE_0x800) - - -/* Various values used in test cases... */ -#define N(val) (val ^ 0xffffffff) -#define VAL1 0x12345678 -#define VAL2 N(VAL1) -#define VAL3 0xa5f801 -#define VAL4 N(VAL3) -#define VALM 0x456789ab -#define VALR 0xdeaddead -#define HH1 0x0123fecb -#define HH2 0xa9874567 - - -#ifdef CONFIG_THUMB2_KERNEL -void kprobe_thumb16_test_cases(void); -void kprobe_thumb32_test_cases(void); -#else -void kprobe_arm_test_cases(void); -#endif diff --git a/arch/arm/kernel/kprobes-thumb.c b/arch/arm/kernel/kprobes-thumb.c deleted file mode 100644 index 9495d7f3516f..000000000000 --- a/arch/arm/kernel/kprobes-thumb.c +++ /dev/null @@ -1,666 +0,0 @@ -/* - * arch/arm/kernel/kprobes-thumb.c - * - * Copyright (C) 2011 Jon Medhurst . - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - */ - -#include -#include -#include -#include - -#include "kprobes.h" -#include "probes-thumb.h" - -/* These emulation encodings are functionally equivalent... */ -#define t32_emulate_rd8rn16rm0ra12_noflags \ - t32_emulate_rdlo12rdhi8rn16rm0_noflags - -/* t32 thumb actions */ - -static void __kprobes -t32_simulate_table_branch(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long pc = regs->ARM_pc; - int rn = (insn >> 16) & 0xf; - int rm = insn & 0xf; - - unsigned long rnv = (rn == 15) ? pc : regs->uregs[rn]; - unsigned long rmv = regs->uregs[rm]; - unsigned int halfwords; - - if (insn & 0x10) /* TBH */ - halfwords = ((u16 *)rnv)[rmv]; - else /* TBB */ - halfwords = ((u8 *)rnv)[rmv]; - - regs->ARM_pc = pc + 2 * halfwords; -} - -static void __kprobes -t32_simulate_mrs(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - int rd = (insn >> 8) & 0xf; - unsigned long mask = 0xf8ff03df; /* Mask out execution state */ - regs->uregs[rd] = regs->ARM_cpsr & mask; -} - -static void __kprobes -t32_simulate_cond_branch(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long pc = regs->ARM_pc; - - long offset = insn & 0x7ff; /* imm11 */ - offset += (insn & 0x003f0000) >> 5; /* imm6 */ - offset += (insn & 0x00002000) << 4; /* J1 */ - offset += (insn & 0x00000800) << 7; /* J2 */ - offset -= (insn & 0x04000000) >> 7; /* Apply sign bit */ - - regs->ARM_pc = pc + (offset * 2); -} - -static enum probes_insn __kprobes -t32_decode_cond_branch(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *d) -{ - int cc = (insn >> 22) & 0xf; - asi->insn_check_cc = probes_condition_checks[cc]; - asi->insn_handler = t32_simulate_cond_branch; - return INSN_GOOD_NO_SLOT; -} - -static void __kprobes -t32_simulate_branch(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long pc = regs->ARM_pc; - - long offset = insn & 0x7ff; /* imm11 */ - offset += (insn & 0x03ff0000) >> 5; /* imm10 */ - offset += (insn & 0x00002000) << 9; /* J1 */ - offset += (insn & 0x00000800) << 10; /* J2 */ - if (insn & 0x04000000) - offset -= 0x00800000; /* Apply sign bit */ - else - offset ^= 0x00600000; /* Invert J1 and J2 */ - - if (insn & (1 << 14)) { - /* BL or BLX */ - regs->ARM_lr = regs->ARM_pc | 1; - if (!(insn & (1 << 12))) { - /* BLX so switch to ARM mode */ - regs->ARM_cpsr &= ~PSR_T_BIT; - pc &= ~3; - } - } - - regs->ARM_pc = pc + (offset * 2); -} - -static void __kprobes -t32_simulate_ldr_literal(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long addr = regs->ARM_pc & ~3; - int rt = (insn >> 12) & 0xf; - unsigned long rtv; - - long offset = insn & 0xfff; - if (insn & 0x00800000) - addr += offset; - else - addr -= offset; - - if (insn & 0x00400000) { - /* LDR */ - rtv = *(unsigned long *)addr; - if (rt == 15) { - bx_write_pc(rtv, regs); - return; - } - } else if (insn & 0x00200000) { - /* LDRH */ - if (insn & 0x01000000) - rtv = *(s16 *)addr; - else - rtv = *(u16 *)addr; - } else { - /* LDRB */ - if (insn & 0x01000000) - rtv = *(s8 *)addr; - else - rtv = *(u8 *)addr; - } - - regs->uregs[rt] = rtv; -} - -static enum probes_insn __kprobes -t32_decode_ldmstm(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *d) -{ - enum probes_insn ret = kprobe_decode_ldmstm(insn, asi, d); - - /* Fixup modified instruction to have halfwords in correct order...*/ - insn = __mem_to_opcode_arm(asi->insn[0]); - ((u16 *)asi->insn)[0] = __opcode_to_mem_thumb16(insn >> 16); - ((u16 *)asi->insn)[1] = __opcode_to_mem_thumb16(insn & 0xffff); - - return ret; -} - -static void __kprobes -t32_emulate_ldrdstrd(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long pc = regs->ARM_pc & ~3; - int rt1 = (insn >> 12) & 0xf; - int rt2 = (insn >> 8) & 0xf; - int rn = (insn >> 16) & 0xf; - - register unsigned long rt1v asm("r0") = regs->uregs[rt1]; - register unsigned long rt2v asm("r1") = regs->uregs[rt2]; - register unsigned long rnv asm("r2") = (rn == 15) ? pc - : regs->uregs[rn]; - - __asm__ __volatile__ ( - "blx %[fn]" - : "=r" (rt1v), "=r" (rt2v), "=r" (rnv) - : "0" (rt1v), "1" (rt2v), "2" (rnv), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - if (rn != 15) - regs->uregs[rn] = rnv; /* Writeback base register */ - regs->uregs[rt1] = rt1v; - regs->uregs[rt2] = rt2v; -} - -static void __kprobes -t32_emulate_ldrstr(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - int rt = (insn >> 12) & 0xf; - int rn = (insn >> 16) & 0xf; - int rm = insn & 0xf; - - register unsigned long rtv asm("r0") = regs->uregs[rt]; - register unsigned long rnv asm("r2") = regs->uregs[rn]; - register unsigned long rmv asm("r3") = regs->uregs[rm]; - - __asm__ __volatile__ ( - "blx %[fn]" - : "=r" (rtv), "=r" (rnv) - : "0" (rtv), "1" (rnv), "r" (rmv), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - regs->uregs[rn] = rnv; /* Writeback base register */ - if (rt == 15) /* Can't be true for a STR as they aren't allowed */ - bx_write_pc(rtv, regs); - else - regs->uregs[rt] = rtv; -} - -static void __kprobes -t32_emulate_rd8rn16rm0_rwflags(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - int rd = (insn >> 8) & 0xf; - int rn = (insn >> 16) & 0xf; - int rm = insn & 0xf; - - register unsigned long rdv asm("r1") = regs->uregs[rd]; - register unsigned long rnv asm("r2") = regs->uregs[rn]; - register unsigned long rmv asm("r3") = regs->uregs[rm]; - unsigned long cpsr = regs->ARM_cpsr; - - __asm__ __volatile__ ( - "msr cpsr_fs, %[cpsr] \n\t" - "blx %[fn] \n\t" - "mrs %[cpsr], cpsr \n\t" - : "=r" (rdv), [cpsr] "=r" (cpsr) - : "0" (rdv), "r" (rnv), "r" (rmv), - "1" (cpsr), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - regs->uregs[rd] = rdv; - regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK); -} - -static void __kprobes -t32_emulate_rd8pc16_noflags(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long pc = regs->ARM_pc; - int rd = (insn >> 8) & 0xf; - - register unsigned long rdv asm("r1") = regs->uregs[rd]; - register unsigned long rnv asm("r2") = pc & ~3; - - __asm__ __volatile__ ( - "blx %[fn]" - : "=r" (rdv) - : "0" (rdv), "r" (rnv), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - regs->uregs[rd] = rdv; -} - -static void __kprobes -t32_emulate_rd8rn16_noflags(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - int rd = (insn >> 8) & 0xf; - int rn = (insn >> 16) & 0xf; - - register unsigned long rdv asm("r1") = regs->uregs[rd]; - register unsigned long rnv asm("r2") = regs->uregs[rn]; - - __asm__ __volatile__ ( - "blx %[fn]" - : "=r" (rdv) - : "0" (rdv), "r" (rnv), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - regs->uregs[rd] = rdv; -} - -static void __kprobes -t32_emulate_rdlo12rdhi8rn16rm0_noflags(probes_opcode_t insn, - struct arch_probes_insn *asi, - struct pt_regs *regs) -{ - int rdlo = (insn >> 12) & 0xf; - int rdhi = (insn >> 8) & 0xf; - int rn = (insn >> 16) & 0xf; - int rm = insn & 0xf; - - register unsigned long rdlov asm("r0") = regs->uregs[rdlo]; - register unsigned long rdhiv asm("r1") = regs->uregs[rdhi]; - register unsigned long rnv asm("r2") = regs->uregs[rn]; - register unsigned long rmv asm("r3") = regs->uregs[rm]; - - __asm__ __volatile__ ( - "blx %[fn]" - : "=r" (rdlov), "=r" (rdhiv) - : "0" (rdlov), "1" (rdhiv), "r" (rnv), "r" (rmv), - [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - regs->uregs[rdlo] = rdlov; - regs->uregs[rdhi] = rdhiv; -} -/* t16 thumb actions */ - -static void __kprobes -t16_simulate_bxblx(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long pc = regs->ARM_pc + 2; - int rm = (insn >> 3) & 0xf; - unsigned long rmv = (rm == 15) ? pc : regs->uregs[rm]; - - if (insn & (1 << 7)) /* BLX ? */ - regs->ARM_lr = regs->ARM_pc | 1; - - bx_write_pc(rmv, regs); -} - -static void __kprobes -t16_simulate_ldr_literal(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long *base = (unsigned long *)((regs->ARM_pc + 2) & ~3); - long index = insn & 0xff; - int rt = (insn >> 8) & 0x7; - regs->uregs[rt] = base[index]; -} - -static void __kprobes -t16_simulate_ldrstr_sp_relative(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long* base = (unsigned long *)regs->ARM_sp; - long index = insn & 0xff; - int rt = (insn >> 8) & 0x7; - if (insn & 0x800) /* LDR */ - regs->uregs[rt] = base[index]; - else /* STR */ - base[index] = regs->uregs[rt]; -} - -static void __kprobes -t16_simulate_reladr(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long base = (insn & 0x800) ? regs->ARM_sp - : ((regs->ARM_pc + 2) & ~3); - long offset = insn & 0xff; - int rt = (insn >> 8) & 0x7; - regs->uregs[rt] = base + offset * 4; -} - -static void __kprobes -t16_simulate_add_sp_imm(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - long imm = insn & 0x7f; - if (insn & 0x80) /* SUB */ - regs->ARM_sp -= imm * 4; - else /* ADD */ - regs->ARM_sp += imm * 4; -} - -static void __kprobes -t16_simulate_cbz(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - int rn = insn & 0x7; - probes_opcode_t nonzero = regs->uregs[rn] ? insn : ~insn; - if (nonzero & 0x800) { - long i = insn & 0x200; - long imm5 = insn & 0xf8; - unsigned long pc = regs->ARM_pc + 2; - regs->ARM_pc = pc + (i >> 3) + (imm5 >> 2); - } -} - -static void __kprobes -t16_simulate_it(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - /* - * The 8 IT state bits are split into two parts in CPSR: - * ITSTATE<1:0> are in CPSR<26:25> - * ITSTATE<7:2> are in CPSR<15:10> - * The new IT state is in the lower byte of insn. - */ - unsigned long cpsr = regs->ARM_cpsr; - cpsr &= ~PSR_IT_MASK; - cpsr |= (insn & 0xfc) << 8; - cpsr |= (insn & 0x03) << 25; - regs->ARM_cpsr = cpsr; -} - -static void __kprobes -t16_singlestep_it(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - regs->ARM_pc += 2; - t16_simulate_it(insn, asi, regs); -} - -static enum probes_insn __kprobes -t16_decode_it(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *d) -{ - asi->insn_singlestep = t16_singlestep_it; - return INSN_GOOD_NO_SLOT; -} - -static void __kprobes -t16_simulate_cond_branch(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long pc = regs->ARM_pc + 2; - long offset = insn & 0x7f; - offset -= insn & 0x80; /* Apply sign bit */ - regs->ARM_pc = pc + (offset * 2); -} - -static enum probes_insn __kprobes -t16_decode_cond_branch(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *d) -{ - int cc = (insn >> 8) & 0xf; - asi->insn_check_cc = probes_condition_checks[cc]; - asi->insn_handler = t16_simulate_cond_branch; - return INSN_GOOD_NO_SLOT; -} - -static void __kprobes -t16_simulate_branch(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long pc = regs->ARM_pc + 2; - long offset = insn & 0x3ff; - offset -= insn & 0x400; /* Apply sign bit */ - regs->ARM_pc = pc + (offset * 2); -} - -static unsigned long __kprobes -t16_emulate_loregs(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long oldcpsr = regs->ARM_cpsr; - unsigned long newcpsr; - - __asm__ __volatile__ ( - "msr cpsr_fs, %[oldcpsr] \n\t" - "ldmia %[regs], {r0-r7} \n\t" - "blx %[fn] \n\t" - "stmia %[regs], {r0-r7} \n\t" - "mrs %[newcpsr], cpsr \n\t" - : [newcpsr] "=r" (newcpsr) - : [oldcpsr] "r" (oldcpsr), [regs] "r" (regs), - [fn] "r" (asi->insn_fn) - : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", - "lr", "memory", "cc" - ); - - return (oldcpsr & ~APSR_MASK) | (newcpsr & APSR_MASK); -} - -static void __kprobes -t16_emulate_loregs_rwflags(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - regs->ARM_cpsr = t16_emulate_loregs(insn, asi, regs); -} - -static void __kprobes -t16_emulate_loregs_noitrwflags(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long cpsr = t16_emulate_loregs(insn, asi, regs); - if (!in_it_block(cpsr)) - regs->ARM_cpsr = cpsr; -} - -static void __kprobes -t16_emulate_hiregs(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - unsigned long pc = regs->ARM_pc + 2; - int rdn = (insn & 0x7) | ((insn & 0x80) >> 4); - int rm = (insn >> 3) & 0xf; - - register unsigned long rdnv asm("r1"); - register unsigned long rmv asm("r0"); - unsigned long cpsr = regs->ARM_cpsr; - - rdnv = (rdn == 15) ? pc : regs->uregs[rdn]; - rmv = (rm == 15) ? pc : regs->uregs[rm]; - - __asm__ __volatile__ ( - "msr cpsr_fs, %[cpsr] \n\t" - "blx %[fn] \n\t" - "mrs %[cpsr], cpsr \n\t" - : "=r" (rdnv), [cpsr] "=r" (cpsr) - : "0" (rdnv), "r" (rmv), "1" (cpsr), [fn] "r" (asi->insn_fn) - : "lr", "memory", "cc" - ); - - if (rdn == 15) - rdnv &= ~1; - - regs->uregs[rdn] = rdnv; - regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK); -} - -static enum probes_insn __kprobes -t16_decode_hiregs(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *d) -{ - insn &= ~0x00ff; - insn |= 0x001; /* Set Rdn = R1 and Rm = R0 */ - ((u16 *)asi->insn)[0] = __opcode_to_mem_thumb16(insn); - asi->insn_handler = t16_emulate_hiregs; - return INSN_GOOD; -} - -static void __kprobes -t16_emulate_push(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - __asm__ __volatile__ ( - "ldr r9, [%[regs], #13*4] \n\t" - "ldr r8, [%[regs], #14*4] \n\t" - "ldmia %[regs], {r0-r7} \n\t" - "blx %[fn] \n\t" - "str r9, [%[regs], #13*4] \n\t" - : - : [regs] "r" (regs), [fn] "r" (asi->insn_fn) - : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", - "lr", "memory", "cc" - ); -} - -static enum probes_insn __kprobes -t16_decode_push(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *d) -{ - /* - * To simulate a PUSH we use a Thumb-2 "STMDB R9!, {registers}" - * and call it with R9=SP and LR in the register list represented - * by R8. - */ - /* 1st half STMDB R9!,{} */ - ((u16 *)asi->insn)[0] = __opcode_to_mem_thumb16(0xe929); - /* 2nd half (register list) */ - ((u16 *)asi->insn)[1] = __opcode_to_mem_thumb16(insn & 0x1ff); - asi->insn_handler = t16_emulate_push; - return INSN_GOOD; -} - -static void __kprobes -t16_emulate_pop_nopc(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - __asm__ __volatile__ ( - "ldr r9, [%[regs], #13*4] \n\t" - "ldmia %[regs], {r0-r7} \n\t" - "blx %[fn] \n\t" - "stmia %[regs], {r0-r7} \n\t" - "str r9, [%[regs], #13*4] \n\t" - : - : [regs] "r" (regs), [fn] "r" (asi->insn_fn) - : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r9", - "lr", "memory", "cc" - ); -} - -static void __kprobes -t16_emulate_pop_pc(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - register unsigned long pc asm("r8"); - - __asm__ __volatile__ ( - "ldr r9, [%[regs], #13*4] \n\t" - "ldmia %[regs], {r0-r7} \n\t" - "blx %[fn] \n\t" - "stmia %[regs], {r0-r7} \n\t" - "str r9, [%[regs], #13*4] \n\t" - : "=r" (pc) - : [regs] "r" (regs), [fn] "r" (asi->insn_fn) - : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r9", - "lr", "memory", "cc" - ); - - bx_write_pc(pc, regs); -} - -static enum probes_insn __kprobes -t16_decode_pop(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *d) -{ - /* - * To simulate a POP we use a Thumb-2 "LDMDB R9!, {registers}" - * and call it with R9=SP and PC in the register list represented - * by R8. - */ - /* 1st half LDMIA R9!,{} */ - ((u16 *)asi->insn)[0] = __opcode_to_mem_thumb16(0xe8b9); - /* 2nd half (register list) */ - ((u16 *)asi->insn)[1] = __opcode_to_mem_thumb16(insn & 0x1ff); - asi->insn_handler = insn & 0x100 ? t16_emulate_pop_pc - : t16_emulate_pop_nopc; - return INSN_GOOD; -} - -const union decode_action kprobes_t16_actions[NUM_PROBES_T16_ACTIONS] = { - [PROBES_T16_ADD_SP] = {.handler = t16_simulate_add_sp_imm}, - [PROBES_T16_CBZ] = {.handler = t16_simulate_cbz}, - [PROBES_T16_SIGN_EXTEND] = {.handler = t16_emulate_loregs_rwflags}, - [PROBES_T16_PUSH] = {.decoder = t16_decode_push}, - [PROBES_T16_POP] = {.decoder = t16_decode_pop}, - [PROBES_T16_SEV] = {.handler = probes_emulate_none}, - [PROBES_T16_WFE] = {.handler = probes_simulate_nop}, - [PROBES_T16_IT] = {.decoder = t16_decode_it}, - [PROBES_T16_CMP] = {.handler = t16_emulate_loregs_rwflags}, - [PROBES_T16_ADDSUB] = {.handler = t16_emulate_loregs_noitrwflags}, - [PROBES_T16_LOGICAL] = {.handler = t16_emulate_loregs_noitrwflags}, - [PROBES_T16_LDR_LIT] = {.handler = t16_simulate_ldr_literal}, - [PROBES_T16_BLX] = {.handler = t16_simulate_bxblx}, - [PROBES_T16_HIREGOPS] = {.decoder = t16_decode_hiregs}, - [PROBES_T16_LDRHSTRH] = {.handler = t16_emulate_loregs_rwflags}, - [PROBES_T16_LDRSTR] = {.handler = t16_simulate_ldrstr_sp_relative}, - [PROBES_T16_ADR] = {.handler = t16_simulate_reladr}, - [PROBES_T16_LDMSTM] = {.handler = t16_emulate_loregs_rwflags}, - [PROBES_T16_BRANCH_COND] = {.decoder = t16_decode_cond_branch}, - [PROBES_T16_BRANCH] = {.handler = t16_simulate_branch}, -}; - -const union decode_action kprobes_t32_actions[NUM_PROBES_T32_ACTIONS] = { - [PROBES_T32_LDMSTM] = {.decoder = t32_decode_ldmstm}, - [PROBES_T32_LDRDSTRD] = {.handler = t32_emulate_ldrdstrd}, - [PROBES_T32_TABLE_BRANCH] = {.handler = t32_simulate_table_branch}, - [PROBES_T32_TST] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, - [PROBES_T32_MOV] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, - [PROBES_T32_ADDSUB] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, - [PROBES_T32_LOGICAL] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, - [PROBES_T32_CMP] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, - [PROBES_T32_ADDWSUBW_PC] = {.handler = t32_emulate_rd8pc16_noflags,}, - [PROBES_T32_ADDWSUBW] = {.handler = t32_emulate_rd8rn16_noflags}, - [PROBES_T32_MOVW] = {.handler = t32_emulate_rd8rn16_noflags}, - [PROBES_T32_SAT] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, - [PROBES_T32_BITFIELD] = {.handler = t32_emulate_rd8rn16_noflags}, - [PROBES_T32_SEV] = {.handler = probes_emulate_none}, - [PROBES_T32_WFE] = {.handler = probes_simulate_nop}, - [PROBES_T32_MRS] = {.handler = t32_simulate_mrs}, - [PROBES_T32_BRANCH_COND] = {.decoder = t32_decode_cond_branch}, - [PROBES_T32_BRANCH] = {.handler = t32_simulate_branch}, - [PROBES_T32_PLDI] = {.handler = probes_simulate_nop}, - [PROBES_T32_LDR_LIT] = {.handler = t32_simulate_ldr_literal}, - [PROBES_T32_LDRSTR] = {.handler = t32_emulate_ldrstr}, - [PROBES_T32_SIGN_EXTEND] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, - [PROBES_T32_MEDIA] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, - [PROBES_T32_REVERSE] = {.handler = t32_emulate_rd8rn16_noflags}, - [PROBES_T32_MUL_ADD] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, - [PROBES_T32_MUL_ADD2] = {.handler = t32_emulate_rd8rn16rm0ra12_noflags}, - [PROBES_T32_MUL_ADD_LONG] = { - .handler = t32_emulate_rdlo12rdhi8rn16rm0_noflags}, -}; diff --git a/arch/arm/kernel/kprobes.c b/arch/arm/kernel/kprobes.c deleted file mode 100644 index 6d644202c8dc..000000000000 --- a/arch/arm/kernel/kprobes.c +++ /dev/null @@ -1,628 +0,0 @@ -/* - * arch/arm/kernel/kprobes.c - * - * Kprobes on ARM - * - * Abhishek Sagar - * Copyright (C) 2006, 2007 Motorola Inc. - * - * Nicolas Pitre - * Copyright (C) 2007 Marvell Ltd. - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * General Public License for more details. - */ - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "kprobes.h" -#include "probes-arm.h" -#include "probes-thumb.h" -#include "patch.h" - -#define MIN_STACK_SIZE(addr) \ - min((unsigned long)MAX_STACK_SIZE, \ - (unsigned long)current_thread_info() + THREAD_START_SP - (addr)) - -#define flush_insns(addr, size) \ - flush_icache_range((unsigned long)(addr), \ - (unsigned long)(addr) + \ - (size)) - -/* Used as a marker in ARM_pc to note when we're in a jprobe. */ -#define JPROBE_MAGIC_ADDR 0xffffffff - -DEFINE_PER_CPU(struct kprobe *, current_kprobe) = NULL; -DEFINE_PER_CPU(struct kprobe_ctlblk, kprobe_ctlblk); - - -int __kprobes arch_prepare_kprobe(struct kprobe *p) -{ - kprobe_opcode_t insn; - kprobe_opcode_t tmp_insn[MAX_INSN_SIZE]; - unsigned long addr = (unsigned long)p->addr; - bool thumb; - kprobe_decode_insn_t *decode_insn; - const union decode_action *actions; - int is; - - if (in_exception_text(addr)) - return -EINVAL; - -#ifdef CONFIG_THUMB2_KERNEL - thumb = true; - addr &= ~1; /* Bit 0 would normally be set to indicate Thumb code */ - insn = __mem_to_opcode_thumb16(((u16 *)addr)[0]); - if (is_wide_instruction(insn)) { - u16 inst2 = __mem_to_opcode_thumb16(((u16 *)addr)[1]); - insn = __opcode_thumb32_compose(insn, inst2); - decode_insn = thumb32_probes_decode_insn; - actions = kprobes_t32_actions; - } else { - decode_insn = thumb16_probes_decode_insn; - actions = kprobes_t16_actions; - } -#else /* !CONFIG_THUMB2_KERNEL */ - thumb = false; - if (addr & 0x3) - return -EINVAL; - insn = __mem_to_opcode_arm(*p->addr); - decode_insn = arm_probes_decode_insn; - actions = kprobes_arm_actions; -#endif - - p->opcode = insn; - p->ainsn.insn = tmp_insn; - - switch ((*decode_insn)(insn, &p->ainsn, true, actions)) { - case INSN_REJECTED: /* not supported */ - return -EINVAL; - - case INSN_GOOD: /* instruction uses slot */ - p->ainsn.insn = get_insn_slot(); - if (!p->ainsn.insn) - return -ENOMEM; - for (is = 0; is < MAX_INSN_SIZE; ++is) - p->ainsn.insn[is] = tmp_insn[is]; - flush_insns(p->ainsn.insn, - sizeof(p->ainsn.insn[0]) * MAX_INSN_SIZE); - p->ainsn.insn_fn = (probes_insn_fn_t *) - ((uintptr_t)p->ainsn.insn | thumb); - break; - - case INSN_GOOD_NO_SLOT: /* instruction doesn't need insn slot */ - p->ainsn.insn = NULL; - break; - } - - return 0; -} - -void __kprobes arch_arm_kprobe(struct kprobe *p) -{ - unsigned int brkp; - void *addr; - - if (IS_ENABLED(CONFIG_THUMB2_KERNEL)) { - /* Remove any Thumb flag */ - addr = (void *)((uintptr_t)p->addr & ~1); - - if (is_wide_instruction(p->opcode)) - brkp = KPROBE_THUMB32_BREAKPOINT_INSTRUCTION; - else - brkp = KPROBE_THUMB16_BREAKPOINT_INSTRUCTION; - } else { - kprobe_opcode_t insn = p->opcode; - - addr = p->addr; - brkp = KPROBE_ARM_BREAKPOINT_INSTRUCTION; - - if (insn >= 0xe0000000) - brkp |= 0xe0000000; /* Unconditional instruction */ - else - brkp |= insn & 0xf0000000; /* Copy condition from insn */ - } - - patch_text(addr, brkp); -} - -/* - * The actual disarming is done here on each CPU and synchronized using - * stop_machine. This synchronization is necessary on SMP to avoid removing - * a probe between the moment the 'Undefined Instruction' exception is raised - * and the moment the exception handler reads the faulting instruction from - * memory. It is also needed to atomically set the two half-words of a 32-bit - * Thumb breakpoint. - */ -int __kprobes __arch_disarm_kprobe(void *p) -{ - struct kprobe *kp = p; - void *addr = (void *)((uintptr_t)kp->addr & ~1); - - __patch_text(addr, kp->opcode); - - return 0; -} - -void __kprobes arch_disarm_kprobe(struct kprobe *p) -{ - stop_machine(__arch_disarm_kprobe, p, cpu_online_mask); -} - -void __kprobes arch_remove_kprobe(struct kprobe *p) -{ - if (p->ainsn.insn) { - free_insn_slot(p->ainsn.insn, 0); - p->ainsn.insn = NULL; - } -} - -static void __kprobes save_previous_kprobe(struct kprobe_ctlblk *kcb) -{ - kcb->prev_kprobe.kp = kprobe_running(); - kcb->prev_kprobe.status = kcb->kprobe_status; -} - -static void __kprobes restore_previous_kprobe(struct kprobe_ctlblk *kcb) -{ - __this_cpu_write(current_kprobe, kcb->prev_kprobe.kp); - kcb->kprobe_status = kcb->prev_kprobe.status; -} - -static void __kprobes set_current_kprobe(struct kprobe *p) -{ - __this_cpu_write(current_kprobe, p); -} - -static void __kprobes -singlestep_skip(struct kprobe *p, struct pt_regs *regs) -{ -#ifdef CONFIG_THUMB2_KERNEL - regs->ARM_cpsr = it_advance(regs->ARM_cpsr); - if (is_wide_instruction(p->opcode)) - regs->ARM_pc += 4; - else - regs->ARM_pc += 2; -#else - regs->ARM_pc += 4; -#endif -} - -static inline void __kprobes -singlestep(struct kprobe *p, struct pt_regs *regs, struct kprobe_ctlblk *kcb) -{ - p->ainsn.insn_singlestep(p->opcode, &p->ainsn, regs); -} - -/* - * Called with IRQs disabled. IRQs must remain disabled from that point - * all the way until processing this kprobe is complete. The current - * kprobes implementation cannot process more than one nested level of - * kprobe, and that level is reserved for user kprobe handlers, so we can't - * risk encountering a new kprobe in an interrupt handler. - */ -void __kprobes kprobe_handler(struct pt_regs *regs) -{ - struct kprobe *p, *cur; - struct kprobe_ctlblk *kcb; - - kcb = get_kprobe_ctlblk(); - cur = kprobe_running(); - -#ifdef CONFIG_THUMB2_KERNEL - /* - * First look for a probe which was registered using an address with - * bit 0 set, this is the usual situation for pointers to Thumb code. - * If not found, fallback to looking for one with bit 0 clear. - */ - p = get_kprobe((kprobe_opcode_t *)(regs->ARM_pc | 1)); - if (!p) - p = get_kprobe((kprobe_opcode_t *)regs->ARM_pc); - -#else /* ! CONFIG_THUMB2_KERNEL */ - p = get_kprobe((kprobe_opcode_t *)regs->ARM_pc); -#endif - - if (p) { - if (cur) { - /* Kprobe is pending, so we're recursing. */ - switch (kcb->kprobe_status) { - case KPROBE_HIT_ACTIVE: - case KPROBE_HIT_SSDONE: - /* A pre- or post-handler probe got us here. */ - kprobes_inc_nmissed_count(p); - save_previous_kprobe(kcb); - set_current_kprobe(p); - kcb->kprobe_status = KPROBE_REENTER; - singlestep(p, regs, kcb); - restore_previous_kprobe(kcb); - break; - default: - /* impossible cases */ - BUG(); - } - } else if (p->ainsn.insn_check_cc(regs->ARM_cpsr)) { - /* Probe hit and conditional execution check ok. */ - set_current_kprobe(p); - kcb->kprobe_status = KPROBE_HIT_ACTIVE; - - /* - * If we have no pre-handler or it returned 0, we - * continue with normal processing. If we have a - * pre-handler and it returned non-zero, it prepped - * for calling the break_handler below on re-entry, - * so get out doing nothing more here. - */ - if (!p->pre_handler || !p->pre_handler(p, regs)) { - kcb->kprobe_status = KPROBE_HIT_SS; - singlestep(p, regs, kcb); - if (p->post_handler) { - kcb->kprobe_status = KPROBE_HIT_SSDONE; - p->post_handler(p, regs, 0); - } - reset_current_kprobe(); - } - } else { - /* - * Probe hit but conditional execution check failed, - * so just skip the instruction and continue as if - * nothing had happened. - */ - singlestep_skip(p, regs); - } - } else if (cur) { - /* We probably hit a jprobe. Call its break handler. */ - if (cur->break_handler && cur->break_handler(cur, regs)) { - kcb->kprobe_status = KPROBE_HIT_SS; - singlestep(cur, regs, kcb); - if (cur->post_handler) { - kcb->kprobe_status = KPROBE_HIT_SSDONE; - cur->post_handler(cur, regs, 0); - } - } - reset_current_kprobe(); - } else { - /* - * The probe was removed and a race is in progress. - * There is nothing we can do about it. Let's restart - * the instruction. By the time we can restart, the - * real instruction will be there. - */ - } -} - -static int __kprobes kprobe_trap_handler(struct pt_regs *regs, unsigned int instr) -{ - unsigned long flags; - local_irq_save(flags); - kprobe_handler(regs); - local_irq_restore(flags); - return 0; -} - -int __kprobes kprobe_fault_handler(struct pt_regs *regs, unsigned int fsr) -{ - struct kprobe *cur = kprobe_running(); - struct kprobe_ctlblk *kcb = get_kprobe_ctlblk(); - - switch (kcb->kprobe_status) { - case KPROBE_HIT_SS: - case KPROBE_REENTER: - /* - * We are here because the instruction being single - * stepped caused a page fault. We reset the current - * kprobe and the PC to point back to the probe address - * and allow the page fault handler to continue as a - * normal page fault. - */ - regs->ARM_pc = (long)cur->addr; - if (kcb->kprobe_status == KPROBE_REENTER) { - restore_previous_kprobe(kcb); - } else { - reset_current_kprobe(); - } - break; - - case KPROBE_HIT_ACTIVE: - case KPROBE_HIT_SSDONE: - /* - * We increment the nmissed count for accounting, - * we can also use npre/npostfault count for accounting - * these specific fault cases. - */ - kprobes_inc_nmissed_count(cur); - - /* - * We come here because instructions in the pre/post - * handler caused the page_fault, this could happen - * if handler tries to access user space by - * copy_from_user(), get_user() etc. Let the - * user-specified handler try to fix it. - */ - if (cur->fault_handler && cur->fault_handler(cur, regs, fsr)) - return 1; - break; - - default: - break; - } - - return 0; -} - -int __kprobes kprobe_exceptions_notify(struct notifier_block *self, - unsigned long val, void *data) -{ - /* - * notify_die() is currently never called on ARM, - * so this callback is currently empty. - */ - return NOTIFY_DONE; -} - -/* - * When a retprobed function returns, trampoline_handler() is called, - * calling the kretprobe's handler. We construct a struct pt_regs to - * give a view of registers r0-r11 to the user return-handler. This is - * not a complete pt_regs structure, but that should be plenty sufficient - * for kretprobe handlers which should normally be interested in r0 only - * anyway. - */ -void __naked __kprobes kretprobe_trampoline(void) -{ - __asm__ __volatile__ ( - "stmdb sp!, {r0 - r11} \n\t" - "mov r0, sp \n\t" - "bl trampoline_handler \n\t" - "mov lr, r0 \n\t" - "ldmia sp!, {r0 - r11} \n\t" -#ifdef CONFIG_THUMB2_KERNEL - "bx lr \n\t" -#else - "mov pc, lr \n\t" -#endif - : : : "memory"); -} - -/* Called from kretprobe_trampoline */ -static __used __kprobes void *trampoline_handler(struct pt_regs *regs) -{ - struct kretprobe_instance *ri = NULL; - struct hlist_head *head, empty_rp; - struct hlist_node *tmp; - unsigned long flags, orig_ret_address = 0; - unsigned long trampoline_address = (unsigned long)&kretprobe_trampoline; - - INIT_HLIST_HEAD(&empty_rp); - kretprobe_hash_lock(current, &head, &flags); - - /* - * It is possible to have multiple instances associated with a given - * task either because multiple functions in the call path have - * a return probe installed on them, and/or more than one return - * probe was registered for a target function. - * - * We can handle this because: - * - instances are always inserted at the head of the list - * - when multiple return probes are registered for the same - * function, the first instance's ret_addr will point to the - * real return address, and all the rest will point to - * kretprobe_trampoline - */ - hlist_for_each_entry_safe(ri, tmp, head, hlist) { - if (ri->task != current) - /* another task is sharing our hash bucket */ - continue; - - if (ri->rp && ri->rp->handler) { - __this_cpu_write(current_kprobe, &ri->rp->kp); - get_kprobe_ctlblk()->kprobe_status = KPROBE_HIT_ACTIVE; - ri->rp->handler(ri, regs); - __this_cpu_write(current_kprobe, NULL); - } - - orig_ret_address = (unsigned long)ri->ret_addr; - recycle_rp_inst(ri, &empty_rp); - - if (orig_ret_address != trampoline_address) - /* - * This is the real return address. Any other - * instances associated with this task are for - * other calls deeper on the call stack - */ - break; - } - - kretprobe_assert(ri, orig_ret_address, trampoline_address); - kretprobe_hash_unlock(current, &flags); - - hlist_for_each_entry_safe(ri, tmp, &empty_rp, hlist) { - hlist_del(&ri->hlist); - kfree(ri); - } - - return (void *)orig_ret_address; -} - -void __kprobes arch_prepare_kretprobe(struct kretprobe_instance *ri, - struct pt_regs *regs) -{ - ri->ret_addr = (kprobe_opcode_t *)regs->ARM_lr; - - /* Replace the return addr with trampoline addr. */ - regs->ARM_lr = (unsigned long)&kretprobe_trampoline; -} - -int __kprobes setjmp_pre_handler(struct kprobe *p, struct pt_regs *regs) -{ - struct jprobe *jp = container_of(p, struct jprobe, kp); - struct kprobe_ctlblk *kcb = get_kprobe_ctlblk(); - long sp_addr = regs->ARM_sp; - long cpsr; - - kcb->jprobe_saved_regs = *regs; - memcpy(kcb->jprobes_stack, (void *)sp_addr, MIN_STACK_SIZE(sp_addr)); - regs->ARM_pc = (long)jp->entry; - - cpsr = regs->ARM_cpsr | PSR_I_BIT; -#ifdef CONFIG_THUMB2_KERNEL - /* Set correct Thumb state in cpsr */ - if (regs->ARM_pc & 1) - cpsr |= PSR_T_BIT; - else - cpsr &= ~PSR_T_BIT; -#endif - regs->ARM_cpsr = cpsr; - - preempt_disable(); - return 1; -} - -void __kprobes jprobe_return(void) -{ - struct kprobe_ctlblk *kcb = get_kprobe_ctlblk(); - - __asm__ __volatile__ ( - /* - * Setup an empty pt_regs. Fill SP and PC fields as - * they're needed by longjmp_break_handler. - * - * We allocate some slack between the original SP and start of - * our fabricated regs. To be precise we want to have worst case - * covered which is STMFD with all 16 regs so we allocate 2 * - * sizeof(struct_pt_regs)). - * - * This is to prevent any simulated instruction from writing - * over the regs when they are accessing the stack. - */ -#ifdef CONFIG_THUMB2_KERNEL - "sub r0, %0, %1 \n\t" - "mov sp, r0 \n\t" -#else - "sub sp, %0, %1 \n\t" -#endif - "ldr r0, ="__stringify(JPROBE_MAGIC_ADDR)"\n\t" - "str %0, [sp, %2] \n\t" - "str r0, [sp, %3] \n\t" - "mov r0, sp \n\t" - "bl kprobe_handler \n\t" - - /* - * Return to the context saved by setjmp_pre_handler - * and restored by longjmp_break_handler. - */ -#ifdef CONFIG_THUMB2_KERNEL - "ldr lr, [sp, %2] \n\t" /* lr = saved sp */ - "ldrd r0, r1, [sp, %5] \n\t" /* r0,r1 = saved lr,pc */ - "ldr r2, [sp, %4] \n\t" /* r2 = saved psr */ - "stmdb lr!, {r0, r1, r2} \n\t" /* push saved lr and */ - /* rfe context */ - "ldmia sp, {r0 - r12} \n\t" - "mov sp, lr \n\t" - "ldr lr, [sp], #4 \n\t" - "rfeia sp! \n\t" -#else - "ldr r0, [sp, %4] \n\t" - "msr cpsr_cxsf, r0 \n\t" - "ldmia sp, {r0 - pc} \n\t" -#endif - : - : "r" (kcb->jprobe_saved_regs.ARM_sp), - "I" (sizeof(struct pt_regs) * 2), - "J" (offsetof(struct pt_regs, ARM_sp)), - "J" (offsetof(struct pt_regs, ARM_pc)), - "J" (offsetof(struct pt_regs, ARM_cpsr)), - "J" (offsetof(struct pt_regs, ARM_lr)) - : "memory", "cc"); -} - -int __kprobes longjmp_break_handler(struct kprobe *p, struct pt_regs *regs) -{ - struct kprobe_ctlblk *kcb = get_kprobe_ctlblk(); - long stack_addr = kcb->jprobe_saved_regs.ARM_sp; - long orig_sp = regs->ARM_sp; - struct jprobe *jp = container_of(p, struct jprobe, kp); - - if (regs->ARM_pc == JPROBE_MAGIC_ADDR) { - if (orig_sp != stack_addr) { - struct pt_regs *saved_regs = - (struct pt_regs *)kcb->jprobe_saved_regs.ARM_sp; - printk("current sp %lx does not match saved sp %lx\n", - orig_sp, stack_addr); - printk("Saved registers for jprobe %p\n", jp); - show_regs(saved_regs); - printk("Current registers\n"); - show_regs(regs); - BUG(); - } - *regs = kcb->jprobe_saved_regs; - memcpy((void *)stack_addr, kcb->jprobes_stack, - MIN_STACK_SIZE(stack_addr)); - preempt_enable_no_resched(); - return 1; - } - return 0; -} - -int __kprobes arch_trampoline_kprobe(struct kprobe *p) -{ - return 0; -} - -#ifdef CONFIG_THUMB2_KERNEL - -static struct undef_hook kprobes_thumb16_break_hook = { - .instr_mask = 0xffff, - .instr_val = KPROBE_THUMB16_BREAKPOINT_INSTRUCTION, - .cpsr_mask = MODE_MASK, - .cpsr_val = SVC_MODE, - .fn = kprobe_trap_handler, -}; - -static struct undef_hook kprobes_thumb32_break_hook = { - .instr_mask = 0xffffffff, - .instr_val = KPROBE_THUMB32_BREAKPOINT_INSTRUCTION, - .cpsr_mask = MODE_MASK, - .cpsr_val = SVC_MODE, - .fn = kprobe_trap_handler, -}; - -#else /* !CONFIG_THUMB2_KERNEL */ - -static struct undef_hook kprobes_arm_break_hook = { - .instr_mask = 0x0fffffff, - .instr_val = KPROBE_ARM_BREAKPOINT_INSTRUCTION, - .cpsr_mask = MODE_MASK, - .cpsr_val = SVC_MODE, - .fn = kprobe_trap_handler, -}; - -#endif /* !CONFIG_THUMB2_KERNEL */ - -int __init arch_init_kprobes() -{ - arm_probes_decode_init(); -#ifdef CONFIG_THUMB2_KERNEL - register_undef_hook(&kprobes_thumb16_break_hook); - register_undef_hook(&kprobes_thumb32_break_hook); -#else - register_undef_hook(&kprobes_arm_break_hook); -#endif - return 0; -} diff --git a/arch/arm/kernel/kprobes.h b/arch/arm/kernel/kprobes.h deleted file mode 100644 index 9a2712ecefc3..000000000000 --- a/arch/arm/kernel/kprobes.h +++ /dev/null @@ -1,52 +0,0 @@ -/* - * arch/arm/kernel/kprobes.h - * - * Copyright (C) 2011 Jon Medhurst . - * - * Some contents moved here from arch/arm/include/asm/kprobes.h which is - * Copyright (C) 2006, 2007 Motorola Inc. - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * General Public License for more details. - */ - -#ifndef _ARM_KERNEL_KPROBES_H -#define _ARM_KERNEL_KPROBES_H - -#include "probes.h" - -/* - * These undefined instructions must be unique and - * reserved solely for kprobes' use. - */ -#define KPROBE_ARM_BREAKPOINT_INSTRUCTION 0x07f001f8 -#define KPROBE_THUMB16_BREAKPOINT_INSTRUCTION 0xde18 -#define KPROBE_THUMB32_BREAKPOINT_INSTRUCTION 0xf7f0a018 - -enum probes_insn __kprobes -kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *h); - -typedef enum probes_insn (kprobe_decode_insn_t)(probes_opcode_t, - struct arch_probes_insn *, - bool, - const union decode_action *); - -#ifdef CONFIG_THUMB2_KERNEL - -extern const union decode_action kprobes_t32_actions[]; -extern const union decode_action kprobes_t16_actions[]; - -#else /* !CONFIG_THUMB2_KERNEL */ - -extern const union decode_action kprobes_arm_actions[]; - -#endif - -#endif /* _ARM_KERNEL_KPROBES_H */ diff --git a/arch/arm/kernel/patch.c b/arch/arm/kernel/patch.c index 5038960e3c55..69bda1a5707e 100644 --- a/arch/arm/kernel/patch.c +++ b/arch/arm/kernel/patch.c @@ -8,8 +8,7 @@ #include #include #include - -#include "patch.h" +#include struct patch { void *addr; diff --git a/arch/arm/kernel/patch.h b/arch/arm/kernel/patch.h deleted file mode 100644 index 77e054c2f6cd..000000000000 --- a/arch/arm/kernel/patch.h +++ /dev/null @@ -1,17 +0,0 @@ -#ifndef _ARM_KERNEL_PATCH_H -#define _ARM_KERNEL_PATCH_H - -void patch_text(void *addr, unsigned int insn); -void __patch_text_real(void *addr, unsigned int insn, bool remap); - -static inline void __patch_text(void *addr, unsigned int insn) -{ - __patch_text_real(addr, insn, true); -} - -static inline void __patch_text_early(void *addr, unsigned int insn) -{ - __patch_text_real(addr, insn, false); -} - -#endif diff --git a/arch/arm/kernel/probes-arm.c b/arch/arm/kernel/probes-arm.c deleted file mode 100644 index 8eaef81d8344..000000000000 --- a/arch/arm/kernel/probes-arm.c +++ /dev/null @@ -1,734 +0,0 @@ -/* - * arch/arm/kernel/probes-arm.c - * - * Some code moved here from arch/arm/kernel/kprobes-arm.c - * - * Copyright (C) 2006, 2007 Motorola Inc. - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * General Public License for more details. - */ - -#include -#include -#include -#include - -#include "probes.h" -#include "probes-arm.h" - -#define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit))))) - -#define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25) - -/* - * To avoid the complications of mimicing single-stepping on a - * processor without a Next-PC or a single-step mode, and to - * avoid having to deal with the side-effects of boosting, we - * simulate or emulate (almost) all ARM instructions. - * - * "Simulation" is where the instruction's behavior is duplicated in - * C code. "Emulation" is where the original instruction is rewritten - * and executed, often by altering its registers. - * - * By having all behavior of the kprobe'd instruction completed before - * returning from the kprobe_handler(), all locks (scheduler and - * interrupt) can safely be released. There is no need for secondary - * breakpoints, no race with MP or preemptable kernels, nor having to - * clean up resources counts at a later time impacting overall system - * performance. By rewriting the instruction, only the minimum registers - * need to be loaded and saved back optimizing performance. - * - * Calling the insnslot_*_rwflags version of a function doesn't hurt - * anything even when the CPSR flags aren't updated by the - * instruction. It's just a little slower in return for saving - * a little space by not having a duplicate function that doesn't - * update the flags. (The same optimization can be said for - * instructions that do or don't perform register writeback) - * Also, instructions can either read the flags, only write the - * flags, or read and write the flags. To save combinations - * rather than for sheer performance, flag functions just assume - * read and write of flags. - */ - -void __kprobes simulate_bbl(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - long iaddr = (long) regs->ARM_pc - 4; - int disp = branch_displacement(insn); - - if (insn & (1 << 24)) - regs->ARM_lr = iaddr + 4; - - regs->ARM_pc = iaddr + 8 + disp; -} - -void __kprobes simulate_blx1(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - long iaddr = (long) regs->ARM_pc - 4; - int disp = branch_displacement(insn); - - regs->ARM_lr = iaddr + 4; - regs->ARM_pc = iaddr + 8 + disp + ((insn >> 23) & 0x2); - regs->ARM_cpsr |= PSR_T_BIT; -} - -void __kprobes simulate_blx2bx(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - int rm = insn & 0xf; - long rmv = regs->uregs[rm]; - - if (insn & (1 << 5)) - regs->ARM_lr = (long) regs->ARM_pc; - - regs->ARM_pc = rmv & ~0x1; - regs->ARM_cpsr &= ~PSR_T_BIT; - if (rmv & 0x1) - regs->ARM_cpsr |= PSR_T_BIT; -} - -void __kprobes simulate_mrs(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - int rd = (insn >> 12) & 0xf; - unsigned long mask = 0xf8ff03df; /* Mask out execution state */ - regs->uregs[rd] = regs->ARM_cpsr & mask; -} - -void __kprobes simulate_mov_ipsp(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - regs->uregs[12] = regs->uregs[13]; -} - -/* - * For the instruction masking and comparisons in all the "space_*" - * functions below, Do _not_ rearrange the order of tests unless - * you're very, very sure of what you are doing. For the sake of - * efficiency, the masks for some tests sometimes assume other test - * have been done prior to them so the number of patterns to test - * for an instruction set can be as broad as possible to reduce the - * number of tests needed. - */ - -static const union decode_item arm_1111_table[] = { - /* Unconditional instructions */ - - /* memory hint 1111 0100 x001 xxxx xxxx xxxx xxxx xxxx */ - /* PLDI (immediate) 1111 0100 x101 xxxx xxxx xxxx xxxx xxxx */ - /* PLDW (immediate) 1111 0101 x001 xxxx xxxx xxxx xxxx xxxx */ - /* PLD (immediate) 1111 0101 x101 xxxx xxxx xxxx xxxx xxxx */ - DECODE_SIMULATE (0xfe300000, 0xf4100000, PROBES_PRELOAD_IMM), - - /* memory hint 1111 0110 x001 xxxx xxxx xxxx xxx0 xxxx */ - /* PLDI (register) 1111 0110 x101 xxxx xxxx xxxx xxx0 xxxx */ - /* PLDW (register) 1111 0111 x001 xxxx xxxx xxxx xxx0 xxxx */ - /* PLD (register) 1111 0111 x101 xxxx xxxx xxxx xxx0 xxxx */ - DECODE_SIMULATE (0xfe300010, 0xf6100000, PROBES_PRELOAD_REG), - - /* BLX (immediate) 1111 101x xxxx xxxx xxxx xxxx xxxx xxxx */ - DECODE_SIMULATE (0xfe000000, 0xfa000000, PROBES_BRANCH_IMM), - - /* CPS 1111 0001 0000 xxx0 xxxx xxxx xx0x xxxx */ - /* SETEND 1111 0001 0000 0001 xxxx xxxx 0000 xxxx */ - /* SRS 1111 100x x1x0 xxxx xxxx xxxx xxxx xxxx */ - /* RFE 1111 100x x0x1 xxxx xxxx xxxx xxxx xxxx */ - - /* Coprocessor instructions... */ - /* MCRR2 1111 1100 0100 xxxx xxxx xxxx xxxx xxxx */ - /* MRRC2 1111 1100 0101 xxxx xxxx xxxx xxxx xxxx */ - /* LDC2 1111 110x xxx1 xxxx xxxx xxxx xxxx xxxx */ - /* STC2 1111 110x xxx0 xxxx xxxx xxxx xxxx xxxx */ - /* CDP2 1111 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */ - /* MCR2 1111 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */ - /* MRC2 1111 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */ - - /* Other unallocated instructions... */ - DECODE_END -}; - -static const union decode_item arm_cccc_0001_0xx0____0xxx_table[] = { - /* Miscellaneous instructions */ - - /* MRS cpsr cccc 0001 0000 xxxx xxxx xxxx 0000 xxxx */ - DECODE_SIMULATEX(0x0ff000f0, 0x01000000, PROBES_MRS, - REGS(0, NOPC, 0, 0, 0)), - - /* BX cccc 0001 0010 xxxx xxxx xxxx 0001 xxxx */ - DECODE_SIMULATE (0x0ff000f0, 0x01200010, PROBES_BRANCH_REG), - - /* BLX (register) cccc 0001 0010 xxxx xxxx xxxx 0011 xxxx */ - DECODE_SIMULATEX(0x0ff000f0, 0x01200030, PROBES_BRANCH_REG, - REGS(0, 0, 0, 0, NOPC)), - - /* CLZ cccc 0001 0110 xxxx xxxx xxxx 0001 xxxx */ - DECODE_EMULATEX (0x0ff000f0, 0x01600010, PROBES_CLZ, - REGS(0, NOPC, 0, 0, NOPC)), - - /* QADD cccc 0001 0000 xxxx xxxx xxxx 0101 xxxx */ - /* QSUB cccc 0001 0010 xxxx xxxx xxxx 0101 xxxx */ - /* QDADD cccc 0001 0100 xxxx xxxx xxxx 0101 xxxx */ - /* QDSUB cccc 0001 0110 xxxx xxxx xxxx 0101 xxxx */ - DECODE_EMULATEX (0x0f9000f0, 0x01000050, PROBES_SATURATING_ARITHMETIC, - REGS(NOPC, NOPC, 0, 0, NOPC)), - - /* BXJ cccc 0001 0010 xxxx xxxx xxxx 0010 xxxx */ - /* MSR cccc 0001 0x10 xxxx xxxx xxxx 0000 xxxx */ - /* MRS spsr cccc 0001 0100 xxxx xxxx xxxx 0000 xxxx */ - /* BKPT 1110 0001 0010 xxxx xxxx xxxx 0111 xxxx */ - /* SMC cccc 0001 0110 xxxx xxxx xxxx 0111 xxxx */ - /* And unallocated instructions... */ - DECODE_END -}; - -static const union decode_item arm_cccc_0001_0xx0____1xx0_table[] = { - /* Halfword multiply and multiply-accumulate */ - - /* SMLALxy cccc 0001 0100 xxxx xxxx xxxx 1xx0 xxxx */ - DECODE_EMULATEX (0x0ff00090, 0x01400080, PROBES_MUL1, - REGS(NOPC, NOPC, NOPC, 0, NOPC)), - - /* SMULWy cccc 0001 0010 xxxx xxxx xxxx 1x10 xxxx */ - DECODE_OR (0x0ff000b0, 0x012000a0), - /* SMULxy cccc 0001 0110 xxxx xxxx xxxx 1xx0 xxxx */ - DECODE_EMULATEX (0x0ff00090, 0x01600080, PROBES_MUL2, - REGS(NOPC, 0, NOPC, 0, NOPC)), - - /* SMLAxy cccc 0001 0000 xxxx xxxx xxxx 1xx0 xxxx */ - DECODE_OR (0x0ff00090, 0x01000080), - /* SMLAWy cccc 0001 0010 xxxx xxxx xxxx 1x00 xxxx */ - DECODE_EMULATEX (0x0ff000b0, 0x01200080, PROBES_MUL2, - REGS(NOPC, NOPC, NOPC, 0, NOPC)), - - DECODE_END -}; - -static const union decode_item arm_cccc_0000_____1001_table[] = { - /* Multiply and multiply-accumulate */ - - /* MUL cccc 0000 0000 xxxx xxxx xxxx 1001 xxxx */ - /* MULS cccc 0000 0001 xxxx xxxx xxxx 1001 xxxx */ - DECODE_EMULATEX (0x0fe000f0, 0x00000090, PROBES_MUL2, - REGS(NOPC, 0, NOPC, 0, NOPC)), - - /* MLA cccc 0000 0010 xxxx xxxx xxxx 1001 xxxx */ - /* MLAS cccc 0000 0011 xxxx xxxx xxxx 1001 xxxx */ - DECODE_OR (0x0fe000f0, 0x00200090), - /* MLS cccc 0000 0110 xxxx xxxx xxxx 1001 xxxx */ - DECODE_EMULATEX (0x0ff000f0, 0x00600090, PROBES_MUL2, - REGS(NOPC, NOPC, NOPC, 0, NOPC)), - - /* UMAAL cccc 0000 0100 xxxx xxxx xxxx 1001 xxxx */ - DECODE_OR (0x0ff000f0, 0x00400090), - /* UMULL cccc 0000 1000 xxxx xxxx xxxx 1001 xxxx */ - /* UMULLS cccc 0000 1001 xxxx xxxx xxxx 1001 xxxx */ - /* UMLAL cccc 0000 1010 xxxx xxxx xxxx 1001 xxxx */ - /* UMLALS cccc 0000 1011 xxxx xxxx xxxx 1001 xxxx */ - /* SMULL cccc 0000 1100 xxxx xxxx xxxx 1001 xxxx */ - /* SMULLS cccc 0000 1101 xxxx xxxx xxxx 1001 xxxx */ - /* SMLAL cccc 0000 1110 xxxx xxxx xxxx 1001 xxxx */ - /* SMLALS cccc 0000 1111 xxxx xxxx xxxx 1001 xxxx */ - DECODE_EMULATEX (0x0f8000f0, 0x00800090, PROBES_MUL1, - REGS(NOPC, NOPC, NOPC, 0, NOPC)), - - DECODE_END -}; - -static const union decode_item arm_cccc_0001_____1001_table[] = { - /* Synchronization primitives */ - -#if __LINUX_ARM_ARCH__ < 6 - /* Deprecated on ARMv6 and may be UNDEFINED on v7 */ - /* SMP/SWPB cccc 0001 0x00 xxxx xxxx xxxx 1001 xxxx */ - DECODE_EMULATEX (0x0fb000f0, 0x01000090, PROBES_SWP, - REGS(NOPC, NOPC, 0, 0, NOPC)), -#endif - /* LDREX/STREX{,D,B,H} cccc 0001 1xxx xxxx xxxx xxxx 1001 xxxx */ - /* And unallocated instructions... */ - DECODE_END -}; - -static const union decode_item arm_cccc_000x_____1xx1_table[] = { - /* Extra load/store instructions */ - - /* STRHT cccc 0000 xx10 xxxx xxxx xxxx 1011 xxxx */ - /* ??? cccc 0000 xx10 xxxx xxxx xxxx 11x1 xxxx */ - /* LDRHT cccc 0000 xx11 xxxx xxxx xxxx 1011 xxxx */ - /* LDRSBT cccc 0000 xx11 xxxx xxxx xxxx 1101 xxxx */ - /* LDRSHT cccc 0000 xx11 xxxx xxxx xxxx 1111 xxxx */ - DECODE_REJECT (0x0f200090, 0x00200090), - - /* LDRD/STRD lr,pc,{... cccc 000x x0x0 xxxx 111x xxxx 1101 xxxx */ - DECODE_REJECT (0x0e10e0d0, 0x0000e0d0), - - /* LDRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1101 xxxx */ - /* STRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1111 xxxx */ - DECODE_EMULATEX (0x0e5000d0, 0x000000d0, PROBES_LDRSTRD, - REGS(NOPCWB, NOPCX, 0, 0, NOPC)), - - /* LDRD (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1101 xxxx */ - /* STRD (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1111 xxxx */ - DECODE_EMULATEX (0x0e5000d0, 0x004000d0, PROBES_LDRSTRD, - REGS(NOPCWB, NOPCX, 0, 0, 0)), - - /* STRH (register) cccc 000x x0x0 xxxx xxxx xxxx 1011 xxxx */ - DECODE_EMULATEX (0x0e5000f0, 0x000000b0, PROBES_STORE_EXTRA, - REGS(NOPCWB, NOPC, 0, 0, NOPC)), - - /* LDRH (register) cccc 000x x0x1 xxxx xxxx xxxx 1011 xxxx */ - /* LDRSB (register) cccc 000x x0x1 xxxx xxxx xxxx 1101 xxxx */ - /* LDRSH (register) cccc 000x x0x1 xxxx xxxx xxxx 1111 xxxx */ - DECODE_EMULATEX (0x0e500090, 0x00100090, PROBES_LOAD_EXTRA, - REGS(NOPCWB, NOPC, 0, 0, NOPC)), - - /* STRH (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1011 xxxx */ - DECODE_EMULATEX (0x0e5000f0, 0x004000b0, PROBES_STORE_EXTRA, - REGS(NOPCWB, NOPC, 0, 0, 0)), - - /* LDRH (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1011 xxxx */ - /* LDRSB (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1101 xxxx */ - /* LDRSH (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1111 xxxx */ - DECODE_EMULATEX (0x0e500090, 0x00500090, PROBES_LOAD_EXTRA, - REGS(NOPCWB, NOPC, 0, 0, 0)), - - DECODE_END -}; - -static const union decode_item arm_cccc_000x_table[] = { - /* Data-processing (register) */ - - /* S PC, ... cccc 000x xxx1 xxxx 1111 xxxx xxxx xxxx */ - DECODE_REJECT (0x0e10f000, 0x0010f000), - - /* MOV IP, SP 1110 0001 1010 0000 1100 0000 0000 1101 */ - DECODE_SIMULATE (0xffffffff, 0xe1a0c00d, PROBES_MOV_IP_SP), - - /* TST (register) cccc 0001 0001 xxxx xxxx xxxx xxx0 xxxx */ - /* TEQ (register) cccc 0001 0011 xxxx xxxx xxxx xxx0 xxxx */ - /* CMP (register) cccc 0001 0101 xxxx xxxx xxxx xxx0 xxxx */ - /* CMN (register) cccc 0001 0111 xxxx xxxx xxxx xxx0 xxxx */ - DECODE_EMULATEX (0x0f900010, 0x01100000, PROBES_DATA_PROCESSING_REG, - REGS(ANY, 0, 0, 0, ANY)), - - /* MOV (register) cccc 0001 101x xxxx xxxx xxxx xxx0 xxxx */ - /* MVN (register) cccc 0001 111x xxxx xxxx xxxx xxx0 xxxx */ - DECODE_EMULATEX (0x0fa00010, 0x01a00000, PROBES_DATA_PROCESSING_REG, - REGS(0, ANY, 0, 0, ANY)), - - /* AND (register) cccc 0000 000x xxxx xxxx xxxx xxx0 xxxx */ - /* EOR (register) cccc 0000 001x xxxx xxxx xxxx xxx0 xxxx */ - /* SUB (register) cccc 0000 010x xxxx xxxx xxxx xxx0 xxxx */ - /* RSB (register) cccc 0000 011x xxxx xxxx xxxx xxx0 xxxx */ - /* ADD (register) cccc 0000 100x xxxx xxxx xxxx xxx0 xxxx */ - /* ADC (register) cccc 0000 101x xxxx xxxx xxxx xxx0 xxxx */ - /* SBC (register) cccc 0000 110x xxxx xxxx xxxx xxx0 xxxx */ - /* RSC (register) cccc 0000 111x xxxx xxxx xxxx xxx0 xxxx */ - /* ORR (register) cccc 0001 100x xxxx xxxx xxxx xxx0 xxxx */ - /* BIC (register) cccc 0001 110x xxxx xxxx xxxx xxx0 xxxx */ - DECODE_EMULATEX (0x0e000010, 0x00000000, PROBES_DATA_PROCESSING_REG, - REGS(ANY, ANY, 0, 0, ANY)), - - /* TST (reg-shift reg) cccc 0001 0001 xxxx xxxx xxxx 0xx1 xxxx */ - /* TEQ (reg-shift reg) cccc 0001 0011 xxxx xxxx xxxx 0xx1 xxxx */ - /* CMP (reg-shift reg) cccc 0001 0101 xxxx xxxx xxxx 0xx1 xxxx */ - /* CMN (reg-shift reg) cccc 0001 0111 xxxx xxxx xxxx 0xx1 xxxx */ - DECODE_EMULATEX (0x0f900090, 0x01100010, PROBES_DATA_PROCESSING_REG, - REGS(NOPC, 0, NOPC, 0, NOPC)), - - /* MOV (reg-shift reg) cccc 0001 101x xxxx xxxx xxxx 0xx1 xxxx */ - /* MVN (reg-shift reg) cccc 0001 111x xxxx xxxx xxxx 0xx1 xxxx */ - DECODE_EMULATEX (0x0fa00090, 0x01a00010, PROBES_DATA_PROCESSING_REG, - REGS(0, NOPC, NOPC, 0, NOPC)), - - /* AND (reg-shift reg) cccc 0000 000x xxxx xxxx xxxx 0xx1 xxxx */ - /* EOR (reg-shift reg) cccc 0000 001x xxxx xxxx xxxx 0xx1 xxxx */ - /* SUB (reg-shift reg) cccc 0000 010x xxxx xxxx xxxx 0xx1 xxxx */ - /* RSB (reg-shift reg) cccc 0000 011x xxxx xxxx xxxx 0xx1 xxxx */ - /* ADD (reg-shift reg) cccc 0000 100x xxxx xxxx xxxx 0xx1 xxxx */ - /* ADC (reg-shift reg) cccc 0000 101x xxxx xxxx xxxx 0xx1 xxxx */ - /* SBC (reg-shift reg) cccc 0000 110x xxxx xxxx xxxx 0xx1 xxxx */ - /* RSC (reg-shift reg) cccc 0000 111x xxxx xxxx xxxx 0xx1 xxxx */ - /* ORR (reg-shift reg) cccc 0001 100x xxxx xxxx xxxx 0xx1 xxxx */ - /* BIC (reg-shift reg) cccc 0001 110x xxxx xxxx xxxx 0xx1 xxxx */ - DECODE_EMULATEX (0x0e000090, 0x00000010, PROBES_DATA_PROCESSING_REG, - REGS(NOPC, NOPC, NOPC, 0, NOPC)), - - DECODE_END -}; - -static const union decode_item arm_cccc_001x_table[] = { - /* Data-processing (immediate) */ - - /* MOVW cccc 0011 0000 xxxx xxxx xxxx xxxx xxxx */ - /* MOVT cccc 0011 0100 xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0x0fb00000, 0x03000000, PROBES_DATA_PROCESSING_IMM, - REGS(0, NOPC, 0, 0, 0)), - - /* YIELD cccc 0011 0010 0000 xxxx xxxx 0000 0001 */ - DECODE_OR (0x0fff00ff, 0x03200001), - /* SEV cccc 0011 0010 0000 xxxx xxxx 0000 0100 */ - DECODE_EMULATE (0x0fff00ff, 0x03200004, PROBES_EMULATE_NONE), - /* NOP cccc 0011 0010 0000 xxxx xxxx 0000 0000 */ - /* WFE cccc 0011 0010 0000 xxxx xxxx 0000 0010 */ - /* WFI cccc 0011 0010 0000 xxxx xxxx 0000 0011 */ - DECODE_SIMULATE (0x0fff00fc, 0x03200000, PROBES_SIMULATE_NOP), - /* DBG cccc 0011 0010 0000 xxxx xxxx ffff xxxx */ - /* unallocated hints cccc 0011 0010 0000 xxxx xxxx xxxx xxxx */ - /* MSR (immediate) cccc 0011 0x10 xxxx xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0x0fb00000, 0x03200000), - - /* S PC, ... cccc 001x xxx1 xxxx 1111 xxxx xxxx xxxx */ - DECODE_REJECT (0x0e10f000, 0x0210f000), - - /* TST (immediate) cccc 0011 0001 xxxx xxxx xxxx xxxx xxxx */ - /* TEQ (immediate) cccc 0011 0011 xxxx xxxx xxxx xxxx xxxx */ - /* CMP (immediate) cccc 0011 0101 xxxx xxxx xxxx xxxx xxxx */ - /* CMN (immediate) cccc 0011 0111 xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0x0f900000, 0x03100000, PROBES_DATA_PROCESSING_IMM, - REGS(ANY, 0, 0, 0, 0)), - - /* MOV (immediate) cccc 0011 101x xxxx xxxx xxxx xxxx xxxx */ - /* MVN (immediate) cccc 0011 111x xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0x0fa00000, 0x03a00000, PROBES_DATA_PROCESSING_IMM, - REGS(0, ANY, 0, 0, 0)), - - /* AND (immediate) cccc 0010 000x xxxx xxxx xxxx xxxx xxxx */ - /* EOR (immediate) cccc 0010 001x xxxx xxxx xxxx xxxx xxxx */ - /* SUB (immediate) cccc 0010 010x xxxx xxxx xxxx xxxx xxxx */ - /* RSB (immediate) cccc 0010 011x xxxx xxxx xxxx xxxx xxxx */ - /* ADD (immediate) cccc 0010 100x xxxx xxxx xxxx xxxx xxxx */ - /* ADC (immediate) cccc 0010 101x xxxx xxxx xxxx xxxx xxxx */ - /* SBC (immediate) cccc 0010 110x xxxx xxxx xxxx xxxx xxxx */ - /* RSC (immediate) cccc 0010 111x xxxx xxxx xxxx xxxx xxxx */ - /* ORR (immediate) cccc 0011 100x xxxx xxxx xxxx xxxx xxxx */ - /* BIC (immediate) cccc 0011 110x xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0x0e000000, 0x02000000, PROBES_DATA_PROCESSING_IMM, - REGS(ANY, ANY, 0, 0, 0)), - - DECODE_END -}; - -static const union decode_item arm_cccc_0110_____xxx1_table[] = { - /* Media instructions */ - - /* SEL cccc 0110 1000 xxxx xxxx xxxx 1011 xxxx */ - DECODE_EMULATEX (0x0ff000f0, 0x068000b0, PROBES_SATURATE, - REGS(NOPC, NOPC, 0, 0, NOPC)), - - /* SSAT cccc 0110 101x xxxx xxxx xxxx xx01 xxxx */ - /* USAT cccc 0110 111x xxxx xxxx xxxx xx01 xxxx */ - DECODE_OR(0x0fa00030, 0x06a00010), - /* SSAT16 cccc 0110 1010 xxxx xxxx xxxx 0011 xxxx */ - /* USAT16 cccc 0110 1110 xxxx xxxx xxxx 0011 xxxx */ - DECODE_EMULATEX (0x0fb000f0, 0x06a00030, PROBES_SATURATE, - REGS(0, NOPC, 0, 0, NOPC)), - - /* REV cccc 0110 1011 xxxx xxxx xxxx 0011 xxxx */ - /* REV16 cccc 0110 1011 xxxx xxxx xxxx 1011 xxxx */ - /* RBIT cccc 0110 1111 xxxx xxxx xxxx 0011 xxxx */ - /* REVSH cccc 0110 1111 xxxx xxxx xxxx 1011 xxxx */ - DECODE_EMULATEX (0x0fb00070, 0x06b00030, PROBES_REV, - REGS(0, NOPC, 0, 0, NOPC)), - - /* ??? cccc 0110 0x00 xxxx xxxx xxxx xxx1 xxxx */ - DECODE_REJECT (0x0fb00010, 0x06000010), - /* ??? cccc 0110 0xxx xxxx xxxx xxxx 1011 xxxx */ - DECODE_REJECT (0x0f8000f0, 0x060000b0), - /* ??? cccc 0110 0xxx xxxx xxxx xxxx 1101 xxxx */ - DECODE_REJECT (0x0f8000f0, 0x060000d0), - /* SADD16 cccc 0110 0001 xxxx xxxx xxxx 0001 xxxx */ - /* SADDSUBX cccc 0110 0001 xxxx xxxx xxxx 0011 xxxx */ - /* SSUBADDX cccc 0110 0001 xxxx xxxx xxxx 0101 xxxx */ - /* SSUB16 cccc 0110 0001 xxxx xxxx xxxx 0111 xxxx */ - /* SADD8 cccc 0110 0001 xxxx xxxx xxxx 1001 xxxx */ - /* SSUB8 cccc 0110 0001 xxxx xxxx xxxx 1111 xxxx */ - /* QADD16 cccc 0110 0010 xxxx xxxx xxxx 0001 xxxx */ - /* QADDSUBX cccc 0110 0010 xxxx xxxx xxxx 0011 xxxx */ - /* QSUBADDX cccc 0110 0010 xxxx xxxx xxxx 0101 xxxx */ - /* QSUB16 cccc 0110 0010 xxxx xxxx xxxx 0111 xxxx */ - /* QADD8 cccc 0110 0010 xxxx xxxx xxxx 1001 xxxx */ - /* QSUB8 cccc 0110 0010 xxxx xxxx xxxx 1111 xxxx */ - /* SHADD16 cccc 0110 0011 xxxx xxxx xxxx 0001 xxxx */ - /* SHADDSUBX cccc 0110 0011 xxxx xxxx xxxx 0011 xxxx */ - /* SHSUBADDX cccc 0110 0011 xxxx xxxx xxxx 0101 xxxx */ - /* SHSUB16 cccc 0110 0011 xxxx xxxx xxxx 0111 xxxx */ - /* SHADD8 cccc 0110 0011 xxxx xxxx xxxx 1001 xxxx */ - /* SHSUB8 cccc 0110 0011 xxxx xxxx xxxx 1111 xxxx */ - /* UADD16 cccc 0110 0101 xxxx xxxx xxxx 0001 xxxx */ - /* UADDSUBX cccc 0110 0101 xxxx xxxx xxxx 0011 xxxx */ - /* USUBADDX cccc 0110 0101 xxxx xxxx xxxx 0101 xxxx */ - /* USUB16 cccc 0110 0101 xxxx xxxx xxxx 0111 xxxx */ - /* UADD8 cccc 0110 0101 xxxx xxxx xxxx 1001 xxxx */ - /* USUB8 cccc 0110 0101 xxxx xxxx xxxx 1111 xxxx */ - /* UQADD16 cccc 0110 0110 xxxx xxxx xxxx 0001 xxxx */ - /* UQADDSUBX cccc 0110 0110 xxxx xxxx xxxx 0011 xxxx */ - /* UQSUBADDX cccc 0110 0110 xxxx xxxx xxxx 0101 xxxx */ - /* UQSUB16 cccc 0110 0110 xxxx xxxx xxxx 0111 xxxx */ - /* UQADD8 cccc 0110 0110 xxxx xxxx xxxx 1001 xxxx */ - /* UQSUB8 cccc 0110 0110 xxxx xxxx xxxx 1111 xxxx */ - /* UHADD16 cccc 0110 0111 xxxx xxxx xxxx 0001 xxxx */ - /* UHADDSUBX cccc 0110 0111 xxxx xxxx xxxx 0011 xxxx */ - /* UHSUBADDX cccc 0110 0111 xxxx xxxx xxxx 0101 xxxx */ - /* UHSUB16 cccc 0110 0111 xxxx xxxx xxxx 0111 xxxx */ - /* UHADD8 cccc 0110 0111 xxxx xxxx xxxx 1001 xxxx */ - /* UHSUB8 cccc 0110 0111 xxxx xxxx xxxx 1111 xxxx */ - DECODE_EMULATEX (0x0f800010, 0x06000010, PROBES_MMI, - REGS(NOPC, NOPC, 0, 0, NOPC)), - - /* PKHBT cccc 0110 1000 xxxx xxxx xxxx x001 xxxx */ - /* PKHTB cccc 0110 1000 xxxx xxxx xxxx x101 xxxx */ - DECODE_EMULATEX (0x0ff00030, 0x06800010, PROBES_PACK, - REGS(NOPC, NOPC, 0, 0, NOPC)), - - /* ??? cccc 0110 1001 xxxx xxxx xxxx 0111 xxxx */ - /* ??? cccc 0110 1101 xxxx xxxx xxxx 0111 xxxx */ - DECODE_REJECT (0x0fb000f0, 0x06900070), - - /* SXTB16 cccc 0110 1000 1111 xxxx xxxx 0111 xxxx */ - /* SXTB cccc 0110 1010 1111 xxxx xxxx 0111 xxxx */ - /* SXTH cccc 0110 1011 1111 xxxx xxxx 0111 xxxx */ - /* UXTB16 cccc 0110 1100 1111 xxxx xxxx 0111 xxxx */ - /* UXTB cccc 0110 1110 1111 xxxx xxxx 0111 xxxx */ - /* UXTH cccc 0110 1111 1111 xxxx xxxx 0111 xxxx */ - DECODE_EMULATEX (0x0f8f00f0, 0x068f0070, PROBES_EXTEND, - REGS(0, NOPC, 0, 0, NOPC)), - - /* SXTAB16 cccc 0110 1000 xxxx xxxx xxxx 0111 xxxx */ - /* SXTAB cccc 0110 1010 xxxx xxxx xxxx 0111 xxxx */ - /* SXTAH cccc 0110 1011 xxxx xxxx xxxx 0111 xxxx */ - /* UXTAB16 cccc 0110 1100 xxxx xxxx xxxx 0111 xxxx */ - /* UXTAB cccc 0110 1110 xxxx xxxx xxxx 0111 xxxx */ - /* UXTAH cccc 0110 1111 xxxx xxxx xxxx 0111 xxxx */ - DECODE_EMULATEX (0x0f8000f0, 0x06800070, PROBES_EXTEND_ADD, - REGS(NOPCX, NOPC, 0, 0, NOPC)), - - DECODE_END -}; - -static const union decode_item arm_cccc_0111_____xxx1_table[] = { - /* Media instructions */ - - /* UNDEFINED cccc 0111 1111 xxxx xxxx xxxx 1111 xxxx */ - DECODE_REJECT (0x0ff000f0, 0x07f000f0), - - /* SMLALD cccc 0111 0100 xxxx xxxx xxxx 00x1 xxxx */ - /* SMLSLD cccc 0111 0100 xxxx xxxx xxxx 01x1 xxxx */ - DECODE_EMULATEX (0x0ff00090, 0x07400010, PROBES_MUL_ADD_LONG, - REGS(NOPC, NOPC, NOPC, 0, NOPC)), - - /* SMUAD cccc 0111 0000 xxxx 1111 xxxx 00x1 xxxx */ - /* SMUSD cccc 0111 0000 xxxx 1111 xxxx 01x1 xxxx */ - DECODE_OR (0x0ff0f090, 0x0700f010), - /* SMMUL cccc 0111 0101 xxxx 1111 xxxx 00x1 xxxx */ - DECODE_OR (0x0ff0f0d0, 0x0750f010), - /* USAD8 cccc 0111 1000 xxxx 1111 xxxx 0001 xxxx */ - DECODE_EMULATEX (0x0ff0f0f0, 0x0780f010, PROBES_MUL_ADD, - REGS(NOPC, 0, NOPC, 0, NOPC)), - - /* SMLAD cccc 0111 0000 xxxx xxxx xxxx 00x1 xxxx */ - /* SMLSD cccc 0111 0000 xxxx xxxx xxxx 01x1 xxxx */ - DECODE_OR (0x0ff00090, 0x07000010), - /* SMMLA cccc 0111 0101 xxxx xxxx xxxx 00x1 xxxx */ - DECODE_OR (0x0ff000d0, 0x07500010), - /* USADA8 cccc 0111 1000 xxxx xxxx xxxx 0001 xxxx */ - DECODE_EMULATEX (0x0ff000f0, 0x07800010, PROBES_MUL_ADD, - REGS(NOPC, NOPCX, NOPC, 0, NOPC)), - - /* SMMLS cccc 0111 0101 xxxx xxxx xxxx 11x1 xxxx */ - DECODE_EMULATEX (0x0ff000d0, 0x075000d0, PROBES_MUL_ADD, - REGS(NOPC, NOPC, NOPC, 0, NOPC)), - - /* SBFX cccc 0111 101x xxxx xxxx xxxx x101 xxxx */ - /* UBFX cccc 0111 111x xxxx xxxx xxxx x101 xxxx */ - DECODE_EMULATEX (0x0fa00070, 0x07a00050, PROBES_BITFIELD, - REGS(0, NOPC, 0, 0, NOPC)), - - /* BFC cccc 0111 110x xxxx xxxx xxxx x001 1111 */ - DECODE_EMULATEX (0x0fe0007f, 0x07c0001f, PROBES_BITFIELD, - REGS(0, NOPC, 0, 0, 0)), - - /* BFI cccc 0111 110x xxxx xxxx xxxx x001 xxxx */ - DECODE_EMULATEX (0x0fe00070, 0x07c00010, PROBES_BITFIELD, - REGS(0, NOPC, 0, 0, NOPCX)), - - DECODE_END -}; - -static const union decode_item arm_cccc_01xx_table[] = { - /* Load/store word and unsigned byte */ - - /* LDRB/STRB pc,[...] cccc 01xx x0xx xxxx xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0x0c40f000, 0x0440f000), - - /* STRT cccc 01x0 x010 xxxx xxxx xxxx xxxx xxxx */ - /* LDRT cccc 01x0 x011 xxxx xxxx xxxx xxxx xxxx */ - /* STRBT cccc 01x0 x110 xxxx xxxx xxxx xxxx xxxx */ - /* LDRBT cccc 01x0 x111 xxxx xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0x0d200000, 0x04200000), - - /* STR (immediate) cccc 010x x0x0 xxxx xxxx xxxx xxxx xxxx */ - /* STRB (immediate) cccc 010x x1x0 xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0x0e100000, 0x04000000, PROBES_STORE, - REGS(NOPCWB, ANY, 0, 0, 0)), - - /* LDR (immediate) cccc 010x x0x1 xxxx xxxx xxxx xxxx xxxx */ - /* LDRB (immediate) cccc 010x x1x1 xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0x0e100000, 0x04100000, PROBES_LOAD, - REGS(NOPCWB, ANY, 0, 0, 0)), - - /* STR (register) cccc 011x x0x0 xxxx xxxx xxxx xxxx xxxx */ - /* STRB (register) cccc 011x x1x0 xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0x0e100000, 0x06000000, PROBES_STORE, - REGS(NOPCWB, ANY, 0, 0, NOPC)), - - /* LDR (register) cccc 011x x0x1 xxxx xxxx xxxx xxxx xxxx */ - /* LDRB (register) cccc 011x x1x1 xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0x0e100000, 0x06100000, PROBES_LOAD, - REGS(NOPCWB, ANY, 0, 0, NOPC)), - - DECODE_END -}; - -static const union decode_item arm_cccc_100x_table[] = { - /* Block data transfer instructions */ - - /* LDM cccc 100x x0x1 xxxx xxxx xxxx xxxx xxxx */ - /* STM cccc 100x x0x0 xxxx xxxx xxxx xxxx xxxx */ - DECODE_CUSTOM (0x0e400000, 0x08000000, PROBES_LDMSTM), - - /* STM (user registers) cccc 100x x1x0 xxxx xxxx xxxx xxxx xxxx */ - /* LDM (user registers) cccc 100x x1x1 xxxx 0xxx xxxx xxxx xxxx */ - /* LDM (exception ret) cccc 100x x1x1 xxxx 1xxx xxxx xxxx xxxx */ - DECODE_END -}; - -const union decode_item probes_decode_arm_table[] = { - /* - * Unconditional instructions - * 1111 xxxx xxxx xxxx xxxx xxxx xxxx xxxx - */ - DECODE_TABLE (0xf0000000, 0xf0000000, arm_1111_table), - - /* - * Miscellaneous instructions - * cccc 0001 0xx0 xxxx xxxx xxxx 0xxx xxxx - */ - DECODE_TABLE (0x0f900080, 0x01000000, arm_cccc_0001_0xx0____0xxx_table), - - /* - * Halfword multiply and multiply-accumulate - * cccc 0001 0xx0 xxxx xxxx xxxx 1xx0 xxxx - */ - DECODE_TABLE (0x0f900090, 0x01000080, arm_cccc_0001_0xx0____1xx0_table), - - /* - * Multiply and multiply-accumulate - * cccc 0000 xxxx xxxx xxxx xxxx 1001 xxxx - */ - DECODE_TABLE (0x0f0000f0, 0x00000090, arm_cccc_0000_____1001_table), - - /* - * Synchronization primitives - * cccc 0001 xxxx xxxx xxxx xxxx 1001 xxxx - */ - DECODE_TABLE (0x0f0000f0, 0x01000090, arm_cccc_0001_____1001_table), - - /* - * Extra load/store instructions - * cccc 000x xxxx xxxx xxxx xxxx 1xx1 xxxx - */ - DECODE_TABLE (0x0e000090, 0x00000090, arm_cccc_000x_____1xx1_table), - - /* - * Data-processing (register) - * cccc 000x xxxx xxxx xxxx xxxx xxx0 xxxx - * Data-processing (register-shifted register) - * cccc 000x xxxx xxxx xxxx xxxx 0xx1 xxxx - */ - DECODE_TABLE (0x0e000000, 0x00000000, arm_cccc_000x_table), - - /* - * Data-processing (immediate) - * cccc 001x xxxx xxxx xxxx xxxx xxxx xxxx - */ - DECODE_TABLE (0x0e000000, 0x02000000, arm_cccc_001x_table), - - /* - * Media instructions - * cccc 011x xxxx xxxx xxxx xxxx xxx1 xxxx - */ - DECODE_TABLE (0x0f000010, 0x06000010, arm_cccc_0110_____xxx1_table), - DECODE_TABLE (0x0f000010, 0x07000010, arm_cccc_0111_____xxx1_table), - - /* - * Load/store word and unsigned byte - * cccc 01xx xxxx xxxx xxxx xxxx xxxx xxxx - */ - DECODE_TABLE (0x0c000000, 0x04000000, arm_cccc_01xx_table), - - /* - * Block data transfer instructions - * cccc 100x xxxx xxxx xxxx xxxx xxxx xxxx - */ - DECODE_TABLE (0x0e000000, 0x08000000, arm_cccc_100x_table), - - /* B cccc 1010 xxxx xxxx xxxx xxxx xxxx xxxx */ - /* BL cccc 1011 xxxx xxxx xxxx xxxx xxxx xxxx */ - DECODE_SIMULATE (0x0e000000, 0x0a000000, PROBES_BRANCH), - - /* - * Supervisor Call, and coprocessor instructions - */ - - /* MCRR cccc 1100 0100 xxxx xxxx xxxx xxxx xxxx */ - /* MRRC cccc 1100 0101 xxxx xxxx xxxx xxxx xxxx */ - /* LDC cccc 110x xxx1 xxxx xxxx xxxx xxxx xxxx */ - /* STC cccc 110x xxx0 xxxx xxxx xxxx xxxx xxxx */ - /* CDP cccc 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */ - /* MCR cccc 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */ - /* MRC cccc 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */ - /* SVC cccc 1111 xxxx xxxx xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0x0c000000, 0x0c000000), - - DECODE_END -}; -#ifdef CONFIG_ARM_KPROBES_TEST_MODULE -EXPORT_SYMBOL_GPL(probes_decode_arm_table); -#endif - -static void __kprobes arm_singlestep(probes_opcode_t insn, - struct arch_probes_insn *asi, struct pt_regs *regs) -{ - regs->ARM_pc += 4; - asi->insn_handler(insn, asi, regs); -} - -/* Return: - * INSN_REJECTED If instruction is one not allowed to kprobe, - * INSN_GOOD If instruction is supported and uses instruction slot, - * INSN_GOOD_NO_SLOT If instruction is supported but doesn't use its slot. - * - * For instructions we don't want to kprobe (INSN_REJECTED return result): - * These are generally ones that modify the processor state making - * them "hard" to simulate such as switches processor modes or - * make accesses in alternate modes. Any of these could be simulated - * if the work was put into it, but low return considering they - * should also be very rare. - */ -enum probes_insn __kprobes -arm_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - bool emulate, const union decode_action *actions) -{ - asi->insn_singlestep = arm_singlestep; - asi->insn_check_cc = probes_condition_checks[insn>>28]; - return probes_decode_insn(insn, asi, probes_decode_arm_table, false, - emulate, actions); -} diff --git a/arch/arm/kernel/probes-arm.h b/arch/arm/kernel/probes-arm.h deleted file mode 100644 index ace6572f6e26..000000000000 --- a/arch/arm/kernel/probes-arm.h +++ /dev/null @@ -1,73 +0,0 @@ -/* - * arch/arm/kernel/probes-arm.h - * - * Copyright 2013 Linaro Ltd. - * Written by: David A. Long - * - * The code contained herein is licensed under the GNU General Public - * License. You may obtain a copy of the GNU General Public License - * Version 2 or later at the following locations: - * - * http://www.opensource.org/licenses/gpl-license.html - * http://www.gnu.org/copyleft/gpl.html - */ - -#ifndef _ARM_KERNEL_PROBES_ARM_H -#define _ARM_KERNEL_PROBES_ARM_H - -enum probes_arm_action { - PROBES_EMULATE_NONE, - PROBES_SIMULATE_NOP, - PROBES_PRELOAD_IMM, - PROBES_PRELOAD_REG, - PROBES_BRANCH_IMM, - PROBES_BRANCH_REG, - PROBES_MRS, - PROBES_CLZ, - PROBES_SATURATING_ARITHMETIC, - PROBES_MUL1, - PROBES_MUL2, - PROBES_SWP, - PROBES_LDRSTRD, - PROBES_LOAD, - PROBES_STORE, - PROBES_LOAD_EXTRA, - PROBES_STORE_EXTRA, - PROBES_MOV_IP_SP, - PROBES_DATA_PROCESSING_REG, - PROBES_DATA_PROCESSING_IMM, - PROBES_MOV_HALFWORD, - PROBES_SEV, - PROBES_WFE, - PROBES_SATURATE, - PROBES_REV, - PROBES_MMI, - PROBES_PACK, - PROBES_EXTEND, - PROBES_EXTEND_ADD, - PROBES_MUL_ADD_LONG, - PROBES_MUL_ADD, - PROBES_BITFIELD, - PROBES_BRANCH, - PROBES_LDMSTM, - NUM_PROBES_ARM_ACTIONS -}; - -void __kprobes simulate_bbl(probes_opcode_t opcode, - struct arch_probes_insn *asi, struct pt_regs *regs); -void __kprobes simulate_blx1(probes_opcode_t opcode, - struct arch_probes_insn *asi, struct pt_regs *regs); -void __kprobes simulate_blx2bx(probes_opcode_t opcode, - struct arch_probes_insn *asi, struct pt_regs *regs); -void __kprobes simulate_mrs(probes_opcode_t opcode, - struct arch_probes_insn *asi, struct pt_regs *regs); -void __kprobes simulate_mov_ipsp(probes_opcode_t opcode, - struct arch_probes_insn *asi, struct pt_regs *regs); - -extern const union decode_item probes_decode_arm_table[]; - -enum probes_insn arm_probes_decode_insn(probes_opcode_t, - struct arch_probes_insn *, bool emulate, - const union decode_action *actions); - -#endif diff --git a/arch/arm/kernel/probes-thumb.c b/arch/arm/kernel/probes-thumb.c deleted file mode 100644 index 4131351e812f..000000000000 --- a/arch/arm/kernel/probes-thumb.c +++ /dev/null @@ -1,882 +0,0 @@ -/* - * arch/arm/kernel/probes-thumb.c - * - * Copyright (C) 2011 Jon Medhurst . - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - */ - -#include -#include -#include - -#include "probes.h" -#include "probes-thumb.h" - - -static const union decode_item t32_table_1110_100x_x0xx[] = { - /* Load/store multiple instructions */ - - /* Rn is PC 1110 100x x0xx 1111 xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0xfe4f0000, 0xe80f0000), - - /* SRS 1110 1000 00x0 xxxx xxxx xxxx xxxx xxxx */ - /* RFE 1110 1000 00x1 xxxx xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0xffc00000, 0xe8000000), - /* SRS 1110 1001 10x0 xxxx xxxx xxxx xxxx xxxx */ - /* RFE 1110 1001 10x1 xxxx xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0xffc00000, 0xe9800000), - - /* STM Rn, {...pc} 1110 100x x0x0 xxxx 1xxx xxxx xxxx xxxx */ - DECODE_REJECT (0xfe508000, 0xe8008000), - /* LDM Rn, {...lr,pc} 1110 100x x0x1 xxxx 11xx xxxx xxxx xxxx */ - DECODE_REJECT (0xfe50c000, 0xe810c000), - /* LDM/STM Rn, {...sp} 1110 100x x0xx xxxx xx1x xxxx xxxx xxxx */ - DECODE_REJECT (0xfe402000, 0xe8002000), - - /* STMIA 1110 1000 10x0 xxxx xxxx xxxx xxxx xxxx */ - /* LDMIA 1110 1000 10x1 xxxx xxxx xxxx xxxx xxxx */ - /* STMDB 1110 1001 00x0 xxxx xxxx xxxx xxxx xxxx */ - /* LDMDB 1110 1001 00x1 xxxx xxxx xxxx xxxx xxxx */ - DECODE_CUSTOM (0xfe400000, 0xe8000000, PROBES_T32_LDMSTM), - - DECODE_END -}; - -static const union decode_item t32_table_1110_100x_x1xx[] = { - /* Load/store dual, load/store exclusive, table branch */ - - /* STRD (immediate) 1110 1000 x110 xxxx xxxx xxxx xxxx xxxx */ - /* LDRD (immediate) 1110 1000 x111 xxxx xxxx xxxx xxxx xxxx */ - DECODE_OR (0xff600000, 0xe8600000), - /* STRD (immediate) 1110 1001 x1x0 xxxx xxxx xxxx xxxx xxxx */ - /* LDRD (immediate) 1110 1001 x1x1 xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xff400000, 0xe9400000, PROBES_T32_LDRDSTRD, - REGS(NOPCWB, NOSPPC, NOSPPC, 0, 0)), - - /* TBB 1110 1000 1101 xxxx xxxx xxxx 0000 xxxx */ - /* TBH 1110 1000 1101 xxxx xxxx xxxx 0001 xxxx */ - DECODE_SIMULATEX(0xfff000e0, 0xe8d00000, PROBES_T32_TABLE_BRANCH, - REGS(NOSP, 0, 0, 0, NOSPPC)), - - /* STREX 1110 1000 0100 xxxx xxxx xxxx xxxx xxxx */ - /* LDREX 1110 1000 0101 xxxx xxxx xxxx xxxx xxxx */ - /* STREXB 1110 1000 1100 xxxx xxxx xxxx 0100 xxxx */ - /* STREXH 1110 1000 1100 xxxx xxxx xxxx 0101 xxxx */ - /* STREXD 1110 1000 1100 xxxx xxxx xxxx 0111 xxxx */ - /* LDREXB 1110 1000 1101 xxxx xxxx xxxx 0100 xxxx */ - /* LDREXH 1110 1000 1101 xxxx xxxx xxxx 0101 xxxx */ - /* LDREXD 1110 1000 1101 xxxx xxxx xxxx 0111 xxxx */ - /* And unallocated instructions... */ - DECODE_END -}; - -static const union decode_item t32_table_1110_101x[] = { - /* Data-processing (shifted register) */ - - /* TST 1110 1010 0001 xxxx xxxx 1111 xxxx xxxx */ - /* TEQ 1110 1010 1001 xxxx xxxx 1111 xxxx xxxx */ - DECODE_EMULATEX (0xff700f00, 0xea100f00, PROBES_T32_TST, - REGS(NOSPPC, 0, 0, 0, NOSPPC)), - - /* CMN 1110 1011 0001 xxxx xxxx 1111 xxxx xxxx */ - DECODE_OR (0xfff00f00, 0xeb100f00), - /* CMP 1110 1011 1011 xxxx xxxx 1111 xxxx xxxx */ - DECODE_EMULATEX (0xfff00f00, 0xebb00f00, PROBES_T32_TST, - REGS(NOPC, 0, 0, 0, NOSPPC)), - - /* MOV 1110 1010 010x 1111 xxxx xxxx xxxx xxxx */ - /* MVN 1110 1010 011x 1111 xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xffcf0000, 0xea4f0000, PROBES_T32_MOV, - REGS(0, 0, NOSPPC, 0, NOSPPC)), - - /* ??? 1110 1010 101x xxxx xxxx xxxx xxxx xxxx */ - /* ??? 1110 1010 111x xxxx xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0xffa00000, 0xeaa00000), - /* ??? 1110 1011 001x xxxx xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0xffe00000, 0xeb200000), - /* ??? 1110 1011 100x xxxx xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0xffe00000, 0xeb800000), - /* ??? 1110 1011 111x xxxx xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0xffe00000, 0xebe00000), - - /* ADD/SUB SP, SP, Rm, LSL #0..3 */ - /* 1110 1011 x0xx 1101 x000 1101 xx00 xxxx */ - DECODE_EMULATEX (0xff4f7f30, 0xeb0d0d00, PROBES_T32_ADDSUB, - REGS(SP, 0, SP, 0, NOSPPC)), - - /* ADD/SUB SP, SP, Rm, shift */ - /* 1110 1011 x0xx 1101 xxxx 1101 xxxx xxxx */ - DECODE_REJECT (0xff4f0f00, 0xeb0d0d00), - - /* ADD/SUB Rd, SP, Rm, shift */ - /* 1110 1011 x0xx 1101 xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xff4f0000, 0xeb0d0000, PROBES_T32_ADDSUB, - REGS(SP, 0, NOPC, 0, NOSPPC)), - - /* AND 1110 1010 000x xxxx xxxx xxxx xxxx xxxx */ - /* BIC 1110 1010 001x xxxx xxxx xxxx xxxx xxxx */ - /* ORR 1110 1010 010x xxxx xxxx xxxx xxxx xxxx */ - /* ORN 1110 1010 011x xxxx xxxx xxxx xxxx xxxx */ - /* EOR 1110 1010 100x xxxx xxxx xxxx xxxx xxxx */ - /* PKH 1110 1010 110x xxxx xxxx xxxx xxxx xxxx */ - /* ADD 1110 1011 000x xxxx xxxx xxxx xxxx xxxx */ - /* ADC 1110 1011 010x xxxx xxxx xxxx xxxx xxxx */ - /* SBC 1110 1011 011x xxxx xxxx xxxx xxxx xxxx */ - /* SUB 1110 1011 101x xxxx xxxx xxxx xxxx xxxx */ - /* RSB 1110 1011 110x xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xfe000000, 0xea000000, PROBES_T32_LOGICAL, - REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)), - - DECODE_END -}; - -static const union decode_item t32_table_1111_0x0x___0[] = { - /* Data-processing (modified immediate) */ - - /* TST 1111 0x00 0001 xxxx 0xxx 1111 xxxx xxxx */ - /* TEQ 1111 0x00 1001 xxxx 0xxx 1111 xxxx xxxx */ - DECODE_EMULATEX (0xfb708f00, 0xf0100f00, PROBES_T32_TST, - REGS(NOSPPC, 0, 0, 0, 0)), - - /* CMN 1111 0x01 0001 xxxx 0xxx 1111 xxxx xxxx */ - DECODE_OR (0xfbf08f00, 0xf1100f00), - /* CMP 1111 0x01 1011 xxxx 0xxx 1111 xxxx xxxx */ - DECODE_EMULATEX (0xfbf08f00, 0xf1b00f00, PROBES_T32_CMP, - REGS(NOPC, 0, 0, 0, 0)), - - /* MOV 1111 0x00 010x 1111 0xxx xxxx xxxx xxxx */ - /* MVN 1111 0x00 011x 1111 0xxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xfbcf8000, 0xf04f0000, PROBES_T32_MOV, - REGS(0, 0, NOSPPC, 0, 0)), - - /* ??? 1111 0x00 101x xxxx 0xxx xxxx xxxx xxxx */ - DECODE_REJECT (0xfbe08000, 0xf0a00000), - /* ??? 1111 0x00 110x xxxx 0xxx xxxx xxxx xxxx */ - /* ??? 1111 0x00 111x xxxx 0xxx xxxx xxxx xxxx */ - DECODE_REJECT (0xfbc08000, 0xf0c00000), - /* ??? 1111 0x01 001x xxxx 0xxx xxxx xxxx xxxx */ - DECODE_REJECT (0xfbe08000, 0xf1200000), - /* ??? 1111 0x01 100x xxxx 0xxx xxxx xxxx xxxx */ - DECODE_REJECT (0xfbe08000, 0xf1800000), - /* ??? 1111 0x01 111x xxxx 0xxx xxxx xxxx xxxx */ - DECODE_REJECT (0xfbe08000, 0xf1e00000), - - /* ADD Rd, SP, #imm 1111 0x01 000x 1101 0xxx xxxx xxxx xxxx */ - /* SUB Rd, SP, #imm 1111 0x01 101x 1101 0xxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xfb4f8000, 0xf10d0000, PROBES_T32_ADDSUB, - REGS(SP, 0, NOPC, 0, 0)), - - /* AND 1111 0x00 000x xxxx 0xxx xxxx xxxx xxxx */ - /* BIC 1111 0x00 001x xxxx 0xxx xxxx xxxx xxxx */ - /* ORR 1111 0x00 010x xxxx 0xxx xxxx xxxx xxxx */ - /* ORN 1111 0x00 011x xxxx 0xxx xxxx xxxx xxxx */ - /* EOR 1111 0x00 100x xxxx 0xxx xxxx xxxx xxxx */ - /* ADD 1111 0x01 000x xxxx 0xxx xxxx xxxx xxxx */ - /* ADC 1111 0x01 010x xxxx 0xxx xxxx xxxx xxxx */ - /* SBC 1111 0x01 011x xxxx 0xxx xxxx xxxx xxxx */ - /* SUB 1111 0x01 101x xxxx 0xxx xxxx xxxx xxxx */ - /* RSB 1111 0x01 110x xxxx 0xxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xfa008000, 0xf0000000, PROBES_T32_LOGICAL, - REGS(NOSPPC, 0, NOSPPC, 0, 0)), - - DECODE_END -}; - -static const union decode_item t32_table_1111_0x1x___0[] = { - /* Data-processing (plain binary immediate) */ - - /* ADDW Rd, PC, #imm 1111 0x10 0000 1111 0xxx xxxx xxxx xxxx */ - DECODE_OR (0xfbff8000, 0xf20f0000), - /* SUBW Rd, PC, #imm 1111 0x10 1010 1111 0xxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xfbff8000, 0xf2af0000, PROBES_T32_ADDWSUBW_PC, - REGS(PC, 0, NOSPPC, 0, 0)), - - /* ADDW SP, SP, #imm 1111 0x10 0000 1101 0xxx 1101 xxxx xxxx */ - DECODE_OR (0xfbff8f00, 0xf20d0d00), - /* SUBW SP, SP, #imm 1111 0x10 1010 1101 0xxx 1101 xxxx xxxx */ - DECODE_EMULATEX (0xfbff8f00, 0xf2ad0d00, PROBES_T32_ADDWSUBW, - REGS(SP, 0, SP, 0, 0)), - - /* ADDW 1111 0x10 0000 xxxx 0xxx xxxx xxxx xxxx */ - DECODE_OR (0xfbf08000, 0xf2000000), - /* SUBW 1111 0x10 1010 xxxx 0xxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xfbf08000, 0xf2a00000, PROBES_T32_ADDWSUBW, - REGS(NOPCX, 0, NOSPPC, 0, 0)), - - /* MOVW 1111 0x10 0100 xxxx 0xxx xxxx xxxx xxxx */ - /* MOVT 1111 0x10 1100 xxxx 0xxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xfb708000, 0xf2400000, PROBES_T32_MOVW, - REGS(0, 0, NOSPPC, 0, 0)), - - /* SSAT16 1111 0x11 0010 xxxx 0000 xxxx 00xx xxxx */ - /* SSAT 1111 0x11 00x0 xxxx 0xxx xxxx xxxx xxxx */ - /* USAT16 1111 0x11 1010 xxxx 0000 xxxx 00xx xxxx */ - /* USAT 1111 0x11 10x0 xxxx 0xxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xfb508000, 0xf3000000, PROBES_T32_SAT, - REGS(NOSPPC, 0, NOSPPC, 0, 0)), - - /* SFBX 1111 0x11 0100 xxxx 0xxx xxxx xxxx xxxx */ - /* UFBX 1111 0x11 1100 xxxx 0xxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xfb708000, 0xf3400000, PROBES_T32_BITFIELD, - REGS(NOSPPC, 0, NOSPPC, 0, 0)), - - /* BFC 1111 0x11 0110 1111 0xxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xfbff8000, 0xf36f0000, PROBES_T32_BITFIELD, - REGS(0, 0, NOSPPC, 0, 0)), - - /* BFI 1111 0x11 0110 xxxx 0xxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xfbf08000, 0xf3600000, PROBES_T32_BITFIELD, - REGS(NOSPPCX, 0, NOSPPC, 0, 0)), - - DECODE_END -}; - -static const union decode_item t32_table_1111_0xxx___1[] = { - /* Branches and miscellaneous control */ - - /* YIELD 1111 0011 1010 xxxx 10x0 x000 0000 0001 */ - DECODE_OR (0xfff0d7ff, 0xf3a08001), - /* SEV 1111 0011 1010 xxxx 10x0 x000 0000 0100 */ - DECODE_EMULATE (0xfff0d7ff, 0xf3a08004, PROBES_T32_SEV), - /* NOP 1111 0011 1010 xxxx 10x0 x000 0000 0000 */ - /* WFE 1111 0011 1010 xxxx 10x0 x000 0000 0010 */ - /* WFI 1111 0011 1010 xxxx 10x0 x000 0000 0011 */ - DECODE_SIMULATE (0xfff0d7fc, 0xf3a08000, PROBES_T32_WFE), - - /* MRS Rd, CPSR 1111 0011 1110 xxxx 10x0 xxxx xxxx xxxx */ - DECODE_SIMULATEX(0xfff0d000, 0xf3e08000, PROBES_T32_MRS, - REGS(0, 0, NOSPPC, 0, 0)), - - /* - * Unsupported instructions - * 1111 0x11 1xxx xxxx 10x0 xxxx xxxx xxxx - * - * MSR 1111 0011 100x xxxx 10x0 xxxx xxxx xxxx - * DBG hint 1111 0011 1010 xxxx 10x0 x000 1111 xxxx - * Unallocated hints 1111 0011 1010 xxxx 10x0 x000 xxxx xxxx - * CPS 1111 0011 1010 xxxx 10x0 xxxx xxxx xxxx - * CLREX/DSB/DMB/ISB 1111 0011 1011 xxxx 10x0 xxxx xxxx xxxx - * BXJ 1111 0011 1100 xxxx 10x0 xxxx xxxx xxxx - * SUBS PC,LR,# 1111 0011 1101 xxxx 10x0 xxxx xxxx xxxx - * MRS Rd, SPSR 1111 0011 1111 xxxx 10x0 xxxx xxxx xxxx - * SMC 1111 0111 1111 xxxx 1000 xxxx xxxx xxxx - * UNDEFINED 1111 0111 1111 xxxx 1010 xxxx xxxx xxxx - * ??? 1111 0111 1xxx xxxx 1010 xxxx xxxx xxxx - */ - DECODE_REJECT (0xfb80d000, 0xf3808000), - - /* Bcc 1111 0xxx xxxx xxxx 10x0 xxxx xxxx xxxx */ - DECODE_CUSTOM (0xf800d000, 0xf0008000, PROBES_T32_BRANCH_COND), - - /* BLX 1111 0xxx xxxx xxxx 11x0 xxxx xxxx xxx0 */ - DECODE_OR (0xf800d001, 0xf000c000), - /* B 1111 0xxx xxxx xxxx 10x1 xxxx xxxx xxxx */ - /* BL 1111 0xxx xxxx xxxx 11x1 xxxx xxxx xxxx */ - DECODE_SIMULATE (0xf8009000, 0xf0009000, PROBES_T32_BRANCH), - - DECODE_END -}; - -static const union decode_item t32_table_1111_100x_x0x1__1111[] = { - /* Memory hints */ - - /* PLD (literal) 1111 1000 x001 1111 1111 xxxx xxxx xxxx */ - /* PLI (literal) 1111 1001 x001 1111 1111 xxxx xxxx xxxx */ - DECODE_SIMULATE (0xfe7ff000, 0xf81ff000, PROBES_T32_PLDI), - - /* PLD{W} (immediate) 1111 1000 10x1 xxxx 1111 xxxx xxxx xxxx */ - DECODE_OR (0xffd0f000, 0xf890f000), - /* PLD{W} (immediate) 1111 1000 00x1 xxxx 1111 1100 xxxx xxxx */ - DECODE_OR (0xffd0ff00, 0xf810fc00), - /* PLI (immediate) 1111 1001 1001 xxxx 1111 xxxx xxxx xxxx */ - DECODE_OR (0xfff0f000, 0xf990f000), - /* PLI (immediate) 1111 1001 0001 xxxx 1111 1100 xxxx xxxx */ - DECODE_SIMULATEX(0xfff0ff00, 0xf910fc00, PROBES_T32_PLDI, - REGS(NOPCX, 0, 0, 0, 0)), - - /* PLD{W} (register) 1111 1000 00x1 xxxx 1111 0000 00xx xxxx */ - DECODE_OR (0xffd0ffc0, 0xf810f000), - /* PLI (register) 1111 1001 0001 xxxx 1111 0000 00xx xxxx */ - DECODE_SIMULATEX(0xfff0ffc0, 0xf910f000, PROBES_T32_PLDI, - REGS(NOPCX, 0, 0, 0, NOSPPC)), - - /* Other unallocated instructions... */ - DECODE_END -}; - -static const union decode_item t32_table_1111_100x[] = { - /* Store/Load single data item */ - - /* ??? 1111 100x x11x xxxx xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0xfe600000, 0xf8600000), - - /* ??? 1111 1001 0101 xxxx xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0xfff00000, 0xf9500000), - - /* ??? 1111 100x 0xxx xxxx xxxx 10x0 xxxx xxxx */ - DECODE_REJECT (0xfe800d00, 0xf8000800), - - /* STRBT 1111 1000 0000 xxxx xxxx 1110 xxxx xxxx */ - /* STRHT 1111 1000 0010 xxxx xxxx 1110 xxxx xxxx */ - /* STRT 1111 1000 0100 xxxx xxxx 1110 xxxx xxxx */ - /* LDRBT 1111 1000 0001 xxxx xxxx 1110 xxxx xxxx */ - /* LDRSBT 1111 1001 0001 xxxx xxxx 1110 xxxx xxxx */ - /* LDRHT 1111 1000 0011 xxxx xxxx 1110 xxxx xxxx */ - /* LDRSHT 1111 1001 0011 xxxx xxxx 1110 xxxx xxxx */ - /* LDRT 1111 1000 0101 xxxx xxxx 1110 xxxx xxxx */ - DECODE_REJECT (0xfe800f00, 0xf8000e00), - - /* STR{,B,H} Rn,[PC...] 1111 1000 xxx0 1111 xxxx xxxx xxxx xxxx */ - DECODE_REJECT (0xff1f0000, 0xf80f0000), - - /* STR{,B,H} PC,[Rn...] 1111 1000 xxx0 xxxx 1111 xxxx xxxx xxxx */ - DECODE_REJECT (0xff10f000, 0xf800f000), - - /* LDR (literal) 1111 1000 x101 1111 xxxx xxxx xxxx xxxx */ - DECODE_SIMULATEX(0xff7f0000, 0xf85f0000, PROBES_T32_LDR_LIT, - REGS(PC, ANY, 0, 0, 0)), - - /* STR (immediate) 1111 1000 0100 xxxx xxxx 1xxx xxxx xxxx */ - /* LDR (immediate) 1111 1000 0101 xxxx xxxx 1xxx xxxx xxxx */ - DECODE_OR (0xffe00800, 0xf8400800), - /* STR (immediate) 1111 1000 1100 xxxx xxxx xxxx xxxx xxxx */ - /* LDR (immediate) 1111 1000 1101 xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xffe00000, 0xf8c00000, PROBES_T32_LDRSTR, - REGS(NOPCX, ANY, 0, 0, 0)), - - /* STR (register) 1111 1000 0100 xxxx xxxx 0000 00xx xxxx */ - /* LDR (register) 1111 1000 0101 xxxx xxxx 0000 00xx xxxx */ - DECODE_EMULATEX (0xffe00fc0, 0xf8400000, PROBES_T32_LDRSTR, - REGS(NOPCX, ANY, 0, 0, NOSPPC)), - - /* LDRB (literal) 1111 1000 x001 1111 xxxx xxxx xxxx xxxx */ - /* LDRSB (literal) 1111 1001 x001 1111 xxxx xxxx xxxx xxxx */ - /* LDRH (literal) 1111 1000 x011 1111 xxxx xxxx xxxx xxxx */ - /* LDRSH (literal) 1111 1001 x011 1111 xxxx xxxx xxxx xxxx */ - DECODE_SIMULATEX(0xfe5f0000, 0xf81f0000, PROBES_T32_LDR_LIT, - REGS(PC, NOSPPCX, 0, 0, 0)), - - /* STRB (immediate) 1111 1000 0000 xxxx xxxx 1xxx xxxx xxxx */ - /* STRH (immediate) 1111 1000 0010 xxxx xxxx 1xxx xxxx xxxx */ - /* LDRB (immediate) 1111 1000 0001 xxxx xxxx 1xxx xxxx xxxx */ - /* LDRSB (immediate) 1111 1001 0001 xxxx xxxx 1xxx xxxx xxxx */ - /* LDRH (immediate) 1111 1000 0011 xxxx xxxx 1xxx xxxx xxxx */ - /* LDRSH (immediate) 1111 1001 0011 xxxx xxxx 1xxx xxxx xxxx */ - DECODE_OR (0xfec00800, 0xf8000800), - /* STRB (immediate) 1111 1000 1000 xxxx xxxx xxxx xxxx xxxx */ - /* STRH (immediate) 1111 1000 1010 xxxx xxxx xxxx xxxx xxxx */ - /* LDRB (immediate) 1111 1000 1001 xxxx xxxx xxxx xxxx xxxx */ - /* LDRSB (immediate) 1111 1001 1001 xxxx xxxx xxxx xxxx xxxx */ - /* LDRH (immediate) 1111 1000 1011 xxxx xxxx xxxx xxxx xxxx */ - /* LDRSH (immediate) 1111 1001 1011 xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0xfec00000, 0xf8800000, PROBES_T32_LDRSTR, - REGS(NOPCX, NOSPPCX, 0, 0, 0)), - - /* STRB (register) 1111 1000 0000 xxxx xxxx 0000 00xx xxxx */ - /* STRH (register) 1111 1000 0010 xxxx xxxx 0000 00xx xxxx */ - /* LDRB (register) 1111 1000 0001 xxxx xxxx 0000 00xx xxxx */ - /* LDRSB (register) 1111 1001 0001 xxxx xxxx 0000 00xx xxxx */ - /* LDRH (register) 1111 1000 0011 xxxx xxxx 0000 00xx xxxx */ - /* LDRSH (register) 1111 1001 0011 xxxx xxxx 0000 00xx xxxx */ - DECODE_EMULATEX (0xfe800fc0, 0xf8000000, PROBES_T32_LDRSTR, - REGS(NOPCX, NOSPPCX, 0, 0, NOSPPC)), - - /* Other unallocated instructions... */ - DECODE_END -}; - -static const union decode_item t32_table_1111_1010___1111[] = { - /* Data-processing (register) */ - - /* ??? 1111 1010 011x xxxx 1111 xxxx 1xxx xxxx */ - DECODE_REJECT (0xffe0f080, 0xfa60f080), - - /* SXTH 1111 1010 0000 1111 1111 xxxx 1xxx xxxx */ - /* UXTH 1111 1010 0001 1111 1111 xxxx 1xxx xxxx */ - /* SXTB16 1111 1010 0010 1111 1111 xxxx 1xxx xxxx */ - /* UXTB16 1111 1010 0011 1111 1111 xxxx 1xxx xxxx */ - /* SXTB 1111 1010 0100 1111 1111 xxxx 1xxx xxxx */ - /* UXTB 1111 1010 0101 1111 1111 xxxx 1xxx xxxx */ - DECODE_EMULATEX (0xff8ff080, 0xfa0ff080, PROBES_T32_SIGN_EXTEND, - REGS(0, 0, NOSPPC, 0, NOSPPC)), - - - /* ??? 1111 1010 1xxx xxxx 1111 xxxx 0x11 xxxx */ - DECODE_REJECT (0xff80f0b0, 0xfa80f030), - /* ??? 1111 1010 1x11 xxxx 1111 xxxx 0xxx xxxx */ - DECODE_REJECT (0xffb0f080, 0xfab0f000), - - /* SADD16 1111 1010 1001 xxxx 1111 xxxx 0000 xxxx */ - /* SASX 1111 1010 1010 xxxx 1111 xxxx 0000 xxxx */ - /* SSAX 1111 1010 1110 xxxx 1111 xxxx 0000 xxxx */ - /* SSUB16 1111 1010 1101 xxxx 1111 xxxx 0000 xxxx */ - /* SADD8 1111 1010 1000 xxxx 1111 xxxx 0000 xxxx */ - /* SSUB8 1111 1010 1100 xxxx 1111 xxxx 0000 xxxx */ - - /* QADD16 1111 1010 1001 xxxx 1111 xxxx 0001 xxxx */ - /* QASX 1111 1010 1010 xxxx 1111 xxxx 0001 xxxx */ - /* QSAX 1111 1010 1110 xxxx 1111 xxxx 0001 xxxx */ - /* QSUB16 1111 1010 1101 xxxx 1111 xxxx 0001 xxxx */ - /* QADD8 1111 1010 1000 xxxx 1111 xxxx 0001 xxxx */ - /* QSUB8 1111 1010 1100 xxxx 1111 xxxx 0001 xxxx */ - - /* SHADD16 1111 1010 1001 xxxx 1111 xxxx 0010 xxxx */ - /* SHASX 1111 1010 1010 xxxx 1111 xxxx 0010 xxxx */ - /* SHSAX 1111 1010 1110 xxxx 1111 xxxx 0010 xxxx */ - /* SHSUB16 1111 1010 1101 xxxx 1111 xxxx 0010 xxxx */ - /* SHADD8 1111 1010 1000 xxxx 1111 xxxx 0010 xxxx */ - /* SHSUB8 1111 1010 1100 xxxx 1111 xxxx 0010 xxxx */ - - /* UADD16 1111 1010 1001 xxxx 1111 xxxx 0100 xxxx */ - /* UASX 1111 1010 1010 xxxx 1111 xxxx 0100 xxxx */ - /* USAX 1111 1010 1110 xxxx 1111 xxxx 0100 xxxx */ - /* USUB16 1111 1010 1101 xxxx 1111 xxxx 0100 xxxx */ - /* UADD8 1111 1010 1000 xxxx 1111 xxxx 0100 xxxx */ - /* USUB8 1111 1010 1100 xxxx 1111 xxxx 0100 xxxx */ - - /* UQADD16 1111 1010 1001 xxxx 1111 xxxx 0101 xxxx */ - /* UQASX 1111 1010 1010 xxxx 1111 xxxx 0101 xxxx */ - /* UQSAX 1111 1010 1110 xxxx 1111 xxxx 0101 xxxx */ - /* UQSUB16 1111 1010 1101 xxxx 1111 xxxx 0101 xxxx */ - /* UQADD8 1111 1010 1000 xxxx 1111 xxxx 0101 xxxx */ - /* UQSUB8 1111 1010 1100 xxxx 1111 xxxx 0101 xxxx */ - - /* UHADD16 1111 1010 1001 xxxx 1111 xxxx 0110 xxxx */ - /* UHASX 1111 1010 1010 xxxx 1111 xxxx 0110 xxxx */ - /* UHSAX 1111 1010 1110 xxxx 1111 xxxx 0110 xxxx */ - /* UHSUB16 1111 1010 1101 xxxx 1111 xxxx 0110 xxxx */ - /* UHADD8 1111 1010 1000 xxxx 1111 xxxx 0110 xxxx */ - /* UHSUB8 1111 1010 1100 xxxx 1111 xxxx 0110 xxxx */ - DECODE_OR (0xff80f080, 0xfa80f000), - - /* SXTAH 1111 1010 0000 xxxx 1111 xxxx 1xxx xxxx */ - /* UXTAH 1111 1010 0001 xxxx 1111 xxxx 1xxx xxxx */ - /* SXTAB16 1111 1010 0010 xxxx 1111 xxxx 1xxx xxxx */ - /* UXTAB16 1111 1010 0011 xxxx 1111 xxxx 1xxx xxxx */ - /* SXTAB 1111 1010 0100 xxxx 1111 xxxx 1xxx xxxx */ - /* UXTAB 1111 1010 0101 xxxx 1111 xxxx 1xxx xxxx */ - DECODE_OR (0xff80f080, 0xfa00f080), - - /* QADD 1111 1010 1000 xxxx 1111 xxxx 1000 xxxx */ - /* QDADD 1111 1010 1000 xxxx 1111 xxxx 1001 xxxx */ - /* QSUB 1111 1010 1000 xxxx 1111 xxxx 1010 xxxx */ - /* QDSUB 1111 1010 1000 xxxx 1111 xxxx 1011 xxxx */ - DECODE_OR (0xfff0f0c0, 0xfa80f080), - - /* SEL 1111 1010 1010 xxxx 1111 xxxx 1000 xxxx */ - DECODE_OR (0xfff0f0f0, 0xfaa0f080), - - /* LSL 1111 1010 000x xxxx 1111 xxxx 0000 xxxx */ - /* LSR 1111 1010 001x xxxx 1111 xxxx 0000 xxxx */ - /* ASR 1111 1010 010x xxxx 1111 xxxx 0000 xxxx */ - /* ROR 1111 1010 011x xxxx 1111 xxxx 0000 xxxx */ - DECODE_EMULATEX (0xff80f0f0, 0xfa00f000, PROBES_T32_MEDIA, - REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)), - - /* CLZ 1111 1010 1010 xxxx 1111 xxxx 1000 xxxx */ - DECODE_OR (0xfff0f0f0, 0xfab0f080), - - /* REV 1111 1010 1001 xxxx 1111 xxxx 1000 xxxx */ - /* REV16 1111 1010 1001 xxxx 1111 xxxx 1001 xxxx */ - /* RBIT 1111 1010 1001 xxxx 1111 xxxx 1010 xxxx */ - /* REVSH 1111 1010 1001 xxxx 1111 xxxx 1011 xxxx */ - DECODE_EMULATEX (0xfff0f0c0, 0xfa90f080, PROBES_T32_REVERSE, - REGS(NOSPPC, 0, NOSPPC, 0, SAMEAS16)), - - /* Other unallocated instructions... */ - DECODE_END -}; - -static const union decode_item t32_table_1111_1011_0[] = { - /* Multiply, multiply accumulate, and absolute difference */ - - /* ??? 1111 1011 0000 xxxx 1111 xxxx 0001 xxxx */ - DECODE_REJECT (0xfff0f0f0, 0xfb00f010), - /* ??? 1111 1011 0111 xxxx 1111 xxxx 0001 xxxx */ - DECODE_REJECT (0xfff0f0f0, 0xfb70f010), - - /* SMULxy 1111 1011 0001 xxxx 1111 xxxx 00xx xxxx */ - DECODE_OR (0xfff0f0c0, 0xfb10f000), - /* MUL 1111 1011 0000 xxxx 1111 xxxx 0000 xxxx */ - /* SMUAD{X} 1111 1011 0010 xxxx 1111 xxxx 000x xxxx */ - /* SMULWy 1111 1011 0011 xxxx 1111 xxxx 000x xxxx */ - /* SMUSD{X} 1111 1011 0100 xxxx 1111 xxxx 000x xxxx */ - /* SMMUL{R} 1111 1011 0101 xxxx 1111 xxxx 000x xxxx */ - /* USAD8 1111 1011 0111 xxxx 1111 xxxx 0000 xxxx */ - DECODE_EMULATEX (0xff80f0e0, 0xfb00f000, PROBES_T32_MUL_ADD, - REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)), - - /* ??? 1111 1011 0111 xxxx xxxx xxxx 0001 xxxx */ - DECODE_REJECT (0xfff000f0, 0xfb700010), - - /* SMLAxy 1111 1011 0001 xxxx xxxx xxxx 00xx xxxx */ - DECODE_OR (0xfff000c0, 0xfb100000), - /* MLA 1111 1011 0000 xxxx xxxx xxxx 0000 xxxx */ - /* MLS 1111 1011 0000 xxxx xxxx xxxx 0001 xxxx */ - /* SMLAD{X} 1111 1011 0010 xxxx xxxx xxxx 000x xxxx */ - /* SMLAWy 1111 1011 0011 xxxx xxxx xxxx 000x xxxx */ - /* SMLSD{X} 1111 1011 0100 xxxx xxxx xxxx 000x xxxx */ - /* SMMLA{R} 1111 1011 0101 xxxx xxxx xxxx 000x xxxx */ - /* SMMLS{R} 1111 1011 0110 xxxx xxxx xxxx 000x xxxx */ - /* USADA8 1111 1011 0111 xxxx xxxx xxxx 0000 xxxx */ - DECODE_EMULATEX (0xff8000c0, 0xfb000000, PROBES_T32_MUL_ADD2, - REGS(NOSPPC, NOSPPCX, NOSPPC, 0, NOSPPC)), - - /* Other unallocated instructions... */ - DECODE_END -}; - -static const union decode_item t32_table_1111_1011_1[] = { - /* Long multiply, long multiply accumulate, and divide */ - - /* UMAAL 1111 1011 1110 xxxx xxxx xxxx 0110 xxxx */ - DECODE_OR (0xfff000f0, 0xfbe00060), - /* SMLALxy 1111 1011 1100 xxxx xxxx xxxx 10xx xxxx */ - DECODE_OR (0xfff000c0, 0xfbc00080), - /* SMLALD{X} 1111 1011 1100 xxxx xxxx xxxx 110x xxxx */ - /* SMLSLD{X} 1111 1011 1101 xxxx xxxx xxxx 110x xxxx */ - DECODE_OR (0xffe000e0, 0xfbc000c0), - /* SMULL 1111 1011 1000 xxxx xxxx xxxx 0000 xxxx */ - /* UMULL 1111 1011 1010 xxxx xxxx xxxx 0000 xxxx */ - /* SMLAL 1111 1011 1100 xxxx xxxx xxxx 0000 xxxx */ - /* UMLAL 1111 1011 1110 xxxx xxxx xxxx 0000 xxxx */ - DECODE_EMULATEX (0xff9000f0, 0xfb800000, PROBES_T32_MUL_ADD_LONG, - REGS(NOSPPC, NOSPPC, NOSPPC, 0, NOSPPC)), - - /* SDIV 1111 1011 1001 xxxx xxxx xxxx 1111 xxxx */ - /* UDIV 1111 1011 1011 xxxx xxxx xxxx 1111 xxxx */ - /* Other unallocated instructions... */ - DECODE_END -}; - -const union decode_item probes_decode_thumb32_table[] = { - - /* - * Load/store multiple instructions - * 1110 100x x0xx xxxx xxxx xxxx xxxx xxxx - */ - DECODE_TABLE (0xfe400000, 0xe8000000, t32_table_1110_100x_x0xx), - - /* - * Load/store dual, load/store exclusive, table branch - * 1110 100x x1xx xxxx xxxx xxxx xxxx xxxx - */ - DECODE_TABLE (0xfe400000, 0xe8400000, t32_table_1110_100x_x1xx), - - /* - * Data-processing (shifted register) - * 1110 101x xxxx xxxx xxxx xxxx xxxx xxxx - */ - DECODE_TABLE (0xfe000000, 0xea000000, t32_table_1110_101x), - - /* - * Coprocessor instructions - * 1110 11xx xxxx xxxx xxxx xxxx xxxx xxxx - */ - DECODE_REJECT (0xfc000000, 0xec000000), - - /* - * Data-processing (modified immediate) - * 1111 0x0x xxxx xxxx 0xxx xxxx xxxx xxxx - */ - DECODE_TABLE (0xfa008000, 0xf0000000, t32_table_1111_0x0x___0), - - /* - * Data-processing (plain binary immediate) - * 1111 0x1x xxxx xxxx 0xxx xxxx xxxx xxxx - */ - DECODE_TABLE (0xfa008000, 0xf2000000, t32_table_1111_0x1x___0), - - /* - * Branches and miscellaneous control - * 1111 0xxx xxxx xxxx 1xxx xxxx xxxx xxxx - */ - DECODE_TABLE (0xf8008000, 0xf0008000, t32_table_1111_0xxx___1), - - /* - * Advanced SIMD element or structure load/store instructions - * 1111 1001 xxx0 xxxx xxxx xxxx xxxx xxxx - */ - DECODE_REJECT (0xff100000, 0xf9000000), - - /* - * Memory hints - * 1111 100x x0x1 xxxx 1111 xxxx xxxx xxxx - */ - DECODE_TABLE (0xfe50f000, 0xf810f000, t32_table_1111_100x_x0x1__1111), - - /* - * Store single data item - * 1111 1000 xxx0 xxxx xxxx xxxx xxxx xxxx - * Load single data items - * 1111 100x xxx1 xxxx xxxx xxxx xxxx xxxx - */ - DECODE_TABLE (0xfe000000, 0xf8000000, t32_table_1111_100x), - - /* - * Data-processing (register) - * 1111 1010 xxxx xxxx 1111 xxxx xxxx xxxx - */ - DECODE_TABLE (0xff00f000, 0xfa00f000, t32_table_1111_1010___1111), - - /* - * Multiply, multiply accumulate, and absolute difference - * 1111 1011 0xxx xxxx xxxx xxxx xxxx xxxx - */ - DECODE_TABLE (0xff800000, 0xfb000000, t32_table_1111_1011_0), - - /* - * Long multiply, long multiply accumulate, and divide - * 1111 1011 1xxx xxxx xxxx xxxx xxxx xxxx - */ - DECODE_TABLE (0xff800000, 0xfb800000, t32_table_1111_1011_1), - - /* - * Coprocessor instructions - * 1111 11xx xxxx xxxx xxxx xxxx xxxx xxxx - */ - DECODE_END -}; -#ifdef CONFIG_ARM_KPROBES_TEST_MODULE -EXPORT_SYMBOL_GPL(probes_decode_thumb32_table); -#endif - -static const union decode_item t16_table_1011[] = { - /* Miscellaneous 16-bit instructions */ - - /* ADD (SP plus immediate) 1011 0000 0xxx xxxx */ - /* SUB (SP minus immediate) 1011 0000 1xxx xxxx */ - DECODE_SIMULATE (0xff00, 0xb000, PROBES_T16_ADD_SP), - - /* CBZ 1011 00x1 xxxx xxxx */ - /* CBNZ 1011 10x1 xxxx xxxx */ - DECODE_SIMULATE (0xf500, 0xb100, PROBES_T16_CBZ), - - /* SXTH 1011 0010 00xx xxxx */ - /* SXTB 1011 0010 01xx xxxx */ - /* UXTH 1011 0010 10xx xxxx */ - /* UXTB 1011 0010 11xx xxxx */ - /* REV 1011 1010 00xx xxxx */ - /* REV16 1011 1010 01xx xxxx */ - /* ??? 1011 1010 10xx xxxx */ - /* REVSH 1011 1010 11xx xxxx */ - DECODE_REJECT (0xffc0, 0xba80), - DECODE_EMULATE (0xf500, 0xb000, PROBES_T16_SIGN_EXTEND), - - /* PUSH 1011 010x xxxx xxxx */ - DECODE_CUSTOM (0xfe00, 0xb400, PROBES_T16_PUSH), - /* POP 1011 110x xxxx xxxx */ - DECODE_CUSTOM (0xfe00, 0xbc00, PROBES_T16_POP), - - /* - * If-Then, and hints - * 1011 1111 xxxx xxxx - */ - - /* YIELD 1011 1111 0001 0000 */ - DECODE_OR (0xffff, 0xbf10), - /* SEV 1011 1111 0100 0000 */ - DECODE_EMULATE (0xffff, 0xbf40, PROBES_T16_SEV), - /* NOP 1011 1111 0000 0000 */ - /* WFE 1011 1111 0010 0000 */ - /* WFI 1011 1111 0011 0000 */ - DECODE_SIMULATE (0xffcf, 0xbf00, PROBES_T16_WFE), - /* Unassigned hints 1011 1111 xxxx 0000 */ - DECODE_REJECT (0xff0f, 0xbf00), - /* IT 1011 1111 xxxx xxxx */ - DECODE_CUSTOM (0xff00, 0xbf00, PROBES_T16_IT), - - /* SETEND 1011 0110 010x xxxx */ - /* CPS 1011 0110 011x xxxx */ - /* BKPT 1011 1110 xxxx xxxx */ - /* And unallocated instructions... */ - DECODE_END -}; - -const union decode_item probes_decode_thumb16_table[] = { - - /* - * Shift (immediate), add, subtract, move, and compare - * 00xx xxxx xxxx xxxx - */ - - /* CMP (immediate) 0010 1xxx xxxx xxxx */ - DECODE_EMULATE (0xf800, 0x2800, PROBES_T16_CMP), - - /* ADD (register) 0001 100x xxxx xxxx */ - /* SUB (register) 0001 101x xxxx xxxx */ - /* LSL (immediate) 0000 0xxx xxxx xxxx */ - /* LSR (immediate) 0000 1xxx xxxx xxxx */ - /* ASR (immediate) 0001 0xxx xxxx xxxx */ - /* ADD (immediate, Thumb) 0001 110x xxxx xxxx */ - /* SUB (immediate, Thumb) 0001 111x xxxx xxxx */ - /* MOV (immediate) 0010 0xxx xxxx xxxx */ - /* ADD (immediate, Thumb) 0011 0xxx xxxx xxxx */ - /* SUB (immediate, Thumb) 0011 1xxx xxxx xxxx */ - DECODE_EMULATE (0xc000, 0x0000, PROBES_T16_ADDSUB), - - /* - * 16-bit Thumb data-processing instructions - * 0100 00xx xxxx xxxx - */ - - /* TST (register) 0100 0010 00xx xxxx */ - DECODE_EMULATE (0xffc0, 0x4200, PROBES_T16_CMP), - /* CMP (register) 0100 0010 10xx xxxx */ - /* CMN (register) 0100 0010 11xx xxxx */ - DECODE_EMULATE (0xff80, 0x4280, PROBES_T16_CMP), - /* AND (register) 0100 0000 00xx xxxx */ - /* EOR (register) 0100 0000 01xx xxxx */ - /* LSL (register) 0100 0000 10xx xxxx */ - /* LSR (register) 0100 0000 11xx xxxx */ - /* ASR (register) 0100 0001 00xx xxxx */ - /* ADC (register) 0100 0001 01xx xxxx */ - /* SBC (register) 0100 0001 10xx xxxx */ - /* ROR (register) 0100 0001 11xx xxxx */ - /* RSB (immediate) 0100 0010 01xx xxxx */ - /* ORR (register) 0100 0011 00xx xxxx */ - /* MUL 0100 0011 00xx xxxx */ - /* BIC (register) 0100 0011 10xx xxxx */ - /* MVN (register) 0100 0011 10xx xxxx */ - DECODE_EMULATE (0xfc00, 0x4000, PROBES_T16_LOGICAL), - - /* - * Special data instructions and branch and exchange - * 0100 01xx xxxx xxxx - */ - - /* BLX pc 0100 0111 1111 1xxx */ - DECODE_REJECT (0xfff8, 0x47f8), - - /* BX (register) 0100 0111 0xxx xxxx */ - /* BLX (register) 0100 0111 1xxx xxxx */ - DECODE_SIMULATE (0xff00, 0x4700, PROBES_T16_BLX), - - /* ADD pc, pc 0100 0100 1111 1111 */ - DECODE_REJECT (0xffff, 0x44ff), - - /* ADD (register) 0100 0100 xxxx xxxx */ - /* CMP (register) 0100 0101 xxxx xxxx */ - /* MOV (register) 0100 0110 xxxx xxxx */ - DECODE_CUSTOM (0xfc00, 0x4400, PROBES_T16_HIREGOPS), - - /* - * Load from Literal Pool - * LDR (literal) 0100 1xxx xxxx xxxx - */ - DECODE_SIMULATE (0xf800, 0x4800, PROBES_T16_LDR_LIT), - - /* - * 16-bit Thumb Load/store instructions - * 0101 xxxx xxxx xxxx - * 011x xxxx xxxx xxxx - * 100x xxxx xxxx xxxx - */ - - /* STR (register) 0101 000x xxxx xxxx */ - /* STRH (register) 0101 001x xxxx xxxx */ - /* STRB (register) 0101 010x xxxx xxxx */ - /* LDRSB (register) 0101 011x xxxx xxxx */ - /* LDR (register) 0101 100x xxxx xxxx */ - /* LDRH (register) 0101 101x xxxx xxxx */ - /* LDRB (register) 0101 110x xxxx xxxx */ - /* LDRSH (register) 0101 111x xxxx xxxx */ - /* STR (immediate, Thumb) 0110 0xxx xxxx xxxx */ - /* LDR (immediate, Thumb) 0110 1xxx xxxx xxxx */ - /* STRB (immediate, Thumb) 0111 0xxx xxxx xxxx */ - /* LDRB (immediate, Thumb) 0111 1xxx xxxx xxxx */ - DECODE_EMULATE (0xc000, 0x4000, PROBES_T16_LDRHSTRH), - /* STRH (immediate, Thumb) 1000 0xxx xxxx xxxx */ - /* LDRH (immediate, Thumb) 1000 1xxx xxxx xxxx */ - DECODE_EMULATE (0xf000, 0x8000, PROBES_T16_LDRHSTRH), - /* STR (immediate, Thumb) 1001 0xxx xxxx xxxx */ - /* LDR (immediate, Thumb) 1001 1xxx xxxx xxxx */ - DECODE_SIMULATE (0xf000, 0x9000, PROBES_T16_LDRSTR), - - /* - * Generate PC-/SP-relative address - * ADR (literal) 1010 0xxx xxxx xxxx - * ADD (SP plus immediate) 1010 1xxx xxxx xxxx - */ - DECODE_SIMULATE (0xf000, 0xa000, PROBES_T16_ADR), - - /* - * Miscellaneous 16-bit instructions - * 1011 xxxx xxxx xxxx - */ - DECODE_TABLE (0xf000, 0xb000, t16_table_1011), - - /* STM 1100 0xxx xxxx xxxx */ - /* LDM 1100 1xxx xxxx xxxx */ - DECODE_EMULATE (0xf000, 0xc000, PROBES_T16_LDMSTM), - - /* - * Conditional branch, and Supervisor Call - */ - - /* Permanently UNDEFINED 1101 1110 xxxx xxxx */ - /* SVC 1101 1111 xxxx xxxx */ - DECODE_REJECT (0xfe00, 0xde00), - - /* Conditional branch 1101 xxxx xxxx xxxx */ - DECODE_CUSTOM (0xf000, 0xd000, PROBES_T16_BRANCH_COND), - - /* - * Unconditional branch - * B 1110 0xxx xxxx xxxx - */ - DECODE_SIMULATE (0xf800, 0xe000, PROBES_T16_BRANCH), - - DECODE_END -}; -#ifdef CONFIG_ARM_KPROBES_TEST_MODULE -EXPORT_SYMBOL_GPL(probes_decode_thumb16_table); -#endif - -static unsigned long __kprobes thumb_check_cc(unsigned long cpsr) -{ - if (unlikely(in_it_block(cpsr))) - return probes_condition_checks[current_cond(cpsr)](cpsr); - return true; -} - -static void __kprobes thumb16_singlestep(probes_opcode_t opcode, - struct arch_probes_insn *asi, - struct pt_regs *regs) -{ - regs->ARM_pc += 2; - asi->insn_handler(opcode, asi, regs); - regs->ARM_cpsr = it_advance(regs->ARM_cpsr); -} - -static void __kprobes thumb32_singlestep(probes_opcode_t opcode, - struct arch_probes_insn *asi, - struct pt_regs *regs) -{ - regs->ARM_pc += 4; - asi->insn_handler(opcode, asi, regs); - regs->ARM_cpsr = it_advance(regs->ARM_cpsr); -} - -enum probes_insn __kprobes -thumb16_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - bool emulate, const union decode_action *actions) -{ - asi->insn_singlestep = thumb16_singlestep; - asi->insn_check_cc = thumb_check_cc; - return probes_decode_insn(insn, asi, probes_decode_thumb16_table, true, - emulate, actions); -} - -enum probes_insn __kprobes -thumb32_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - bool emulate, const union decode_action *actions) -{ - asi->insn_singlestep = thumb32_singlestep; - asi->insn_check_cc = thumb_check_cc; - return probes_decode_insn(insn, asi, probes_decode_thumb32_table, true, - emulate, actions); -} diff --git a/arch/arm/kernel/probes-thumb.h b/arch/arm/kernel/probes-thumb.h deleted file mode 100644 index 7c6f6ebe514f..000000000000 --- a/arch/arm/kernel/probes-thumb.h +++ /dev/null @@ -1,97 +0,0 @@ -/* - * arch/arm/kernel/probes-thumb.h - * - * Copyright 2013 Linaro Ltd. - * Written by: David A. Long - * - * The code contained herein is licensed under the GNU General Public - * License. You may obtain a copy of the GNU General Public License - * Version 2 or later at the following locations: - * - * http://www.opensource.org/licenses/gpl-license.html - * http://www.gnu.org/copyleft/gpl.html - */ - -#ifndef _ARM_KERNEL_PROBES_THUMB_H -#define _ARM_KERNEL_PROBES_THUMB_H - -/* - * True if current instruction is in an IT block. - */ -#define in_it_block(cpsr) ((cpsr & 0x06000c00) != 0x00000000) - -/* - * Return the condition code to check for the currently executing instruction. - * This is in ITSTATE<7:4> which is in CPSR<15:12> but is only valid if - * in_it_block returns true. - */ -#define current_cond(cpsr) ((cpsr >> 12) & 0xf) - -enum probes_t32_action { - PROBES_T32_EMULATE_NONE, - PROBES_T32_SIMULATE_NOP, - PROBES_T32_LDMSTM, - PROBES_T32_LDRDSTRD, - PROBES_T32_TABLE_BRANCH, - PROBES_T32_TST, - PROBES_T32_CMP, - PROBES_T32_MOV, - PROBES_T32_ADDSUB, - PROBES_T32_LOGICAL, - PROBES_T32_ADDWSUBW_PC, - PROBES_T32_ADDWSUBW, - PROBES_T32_MOVW, - PROBES_T32_SAT, - PROBES_T32_BITFIELD, - PROBES_T32_SEV, - PROBES_T32_WFE, - PROBES_T32_MRS, - PROBES_T32_BRANCH_COND, - PROBES_T32_BRANCH, - PROBES_T32_PLDI, - PROBES_T32_LDR_LIT, - PROBES_T32_LDRSTR, - PROBES_T32_SIGN_EXTEND, - PROBES_T32_MEDIA, - PROBES_T32_REVERSE, - PROBES_T32_MUL_ADD, - PROBES_T32_MUL_ADD2, - PROBES_T32_MUL_ADD_LONG, - NUM_PROBES_T32_ACTIONS -}; - -enum probes_t16_action { - PROBES_T16_ADD_SP, - PROBES_T16_CBZ, - PROBES_T16_SIGN_EXTEND, - PROBES_T16_PUSH, - PROBES_T16_POP, - PROBES_T16_SEV, - PROBES_T16_WFE, - PROBES_T16_IT, - PROBES_T16_CMP, - PROBES_T16_ADDSUB, - PROBES_T16_LOGICAL, - PROBES_T16_BLX, - PROBES_T16_HIREGOPS, - PROBES_T16_LDR_LIT, - PROBES_T16_LDRHSTRH, - PROBES_T16_LDRSTR, - PROBES_T16_ADR, - PROBES_T16_LDMSTM, - PROBES_T16_BRANCH_COND, - PROBES_T16_BRANCH, - NUM_PROBES_T16_ACTIONS -}; - -extern const union decode_item probes_decode_thumb32_table[]; -extern const union decode_item probes_decode_thumb16_table[]; - -enum probes_insn __kprobes -thumb16_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - bool emulate, const union decode_action *actions); -enum probes_insn __kprobes -thumb32_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - bool emulate, const union decode_action *actions); - -#endif diff --git a/arch/arm/kernel/probes.c b/arch/arm/kernel/probes.c deleted file mode 100644 index a8ab540d7e73..000000000000 --- a/arch/arm/kernel/probes.c +++ /dev/null @@ -1,456 +0,0 @@ -/* - * arch/arm/kernel/probes.c - * - * Copyright (C) 2011 Jon Medhurst . - * - * Some contents moved here from arch/arm/include/asm/kprobes-arm.c which is - * Copyright (C) 2006, 2007 Motorola Inc. - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - */ - -#include -#include -#include -#include -#include - -#include "probes.h" - - -#ifndef find_str_pc_offset - -/* - * For STR and STM instructions, an ARM core may choose to use either - * a +8 or a +12 displacement from the current instruction's address. - * Whichever value is chosen for a given core, it must be the same for - * both instructions and may not change. This function measures it. - */ - -int str_pc_offset; - -void __init find_str_pc_offset(void) -{ - int addr, scratch, ret; - - __asm__ ( - "sub %[ret], pc, #4 \n\t" - "str pc, %[addr] \n\t" - "ldr %[scr], %[addr] \n\t" - "sub %[ret], %[scr], %[ret] \n\t" - : [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr)); - - str_pc_offset = ret; -} - -#endif /* !find_str_pc_offset */ - - -#ifndef test_load_write_pc_interworking - -bool load_write_pc_interworks; - -void __init test_load_write_pc_interworking(void) -{ - int arch = cpu_architecture(); - BUG_ON(arch == CPU_ARCH_UNKNOWN); - load_write_pc_interworks = arch >= CPU_ARCH_ARMv5T; -} - -#endif /* !test_load_write_pc_interworking */ - - -#ifndef test_alu_write_pc_interworking - -bool alu_write_pc_interworks; - -void __init test_alu_write_pc_interworking(void) -{ - int arch = cpu_architecture(); - BUG_ON(arch == CPU_ARCH_UNKNOWN); - alu_write_pc_interworks = arch >= CPU_ARCH_ARMv7; -} - -#endif /* !test_alu_write_pc_interworking */ - - -void __init arm_probes_decode_init(void) -{ - find_str_pc_offset(); - test_load_write_pc_interworking(); - test_alu_write_pc_interworking(); -} - - -static unsigned long __kprobes __check_eq(unsigned long cpsr) -{ - return cpsr & PSR_Z_BIT; -} - -static unsigned long __kprobes __check_ne(unsigned long cpsr) -{ - return (~cpsr) & PSR_Z_BIT; -} - -static unsigned long __kprobes __check_cs(unsigned long cpsr) -{ - return cpsr & PSR_C_BIT; -} - -static unsigned long __kprobes __check_cc(unsigned long cpsr) -{ - return (~cpsr) & PSR_C_BIT; -} - -static unsigned long __kprobes __check_mi(unsigned long cpsr) -{ - return cpsr & PSR_N_BIT; -} - -static unsigned long __kprobes __check_pl(unsigned long cpsr) -{ - return (~cpsr) & PSR_N_BIT; -} - -static unsigned long __kprobes __check_vs(unsigned long cpsr) -{ - return cpsr & PSR_V_BIT; -} - -static unsigned long __kprobes __check_vc(unsigned long cpsr) -{ - return (~cpsr) & PSR_V_BIT; -} - -static unsigned long __kprobes __check_hi(unsigned long cpsr) -{ - cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */ - return cpsr & PSR_C_BIT; -} - -static unsigned long __kprobes __check_ls(unsigned long cpsr) -{ - cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */ - return (~cpsr) & PSR_C_BIT; -} - -static unsigned long __kprobes __check_ge(unsigned long cpsr) -{ - cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */ - return (~cpsr) & PSR_N_BIT; -} - -static unsigned long __kprobes __check_lt(unsigned long cpsr) -{ - cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */ - return cpsr & PSR_N_BIT; -} - -static unsigned long __kprobes __check_gt(unsigned long cpsr) -{ - unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */ - temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */ - return (~temp) & PSR_N_BIT; -} - -static unsigned long __kprobes __check_le(unsigned long cpsr) -{ - unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */ - temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */ - return temp & PSR_N_BIT; -} - -static unsigned long __kprobes __check_al(unsigned long cpsr) -{ - return true; -} - -probes_check_cc * const probes_condition_checks[16] = { - &__check_eq, &__check_ne, &__check_cs, &__check_cc, - &__check_mi, &__check_pl, &__check_vs, &__check_vc, - &__check_hi, &__check_ls, &__check_ge, &__check_lt, - &__check_gt, &__check_le, &__check_al, &__check_al -}; - - -void __kprobes probes_simulate_nop(probes_opcode_t opcode, - struct arch_probes_insn *asi, - struct pt_regs *regs) -{ -} - -void __kprobes probes_emulate_none(probes_opcode_t opcode, - struct arch_probes_insn *asi, - struct pt_regs *regs) -{ - asi->insn_fn(); -} - -/* - * Prepare an instruction slot to receive an instruction for emulating. - * This is done by placing a subroutine return after the location where the - * instruction will be placed. We also modify ARM instructions to be - * unconditional as the condition code will already be checked before any - * emulation handler is called. - */ -static probes_opcode_t __kprobes -prepare_emulated_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - bool thumb) -{ -#ifdef CONFIG_THUMB2_KERNEL - if (thumb) { - u16 *thumb_insn = (u16 *)asi->insn; - /* Thumb bx lr */ - thumb_insn[1] = __opcode_to_mem_thumb16(0x4770); - thumb_insn[2] = __opcode_to_mem_thumb16(0x4770); - return insn; - } - asi->insn[1] = __opcode_to_mem_arm(0xe12fff1e); /* ARM bx lr */ -#else - asi->insn[1] = __opcode_to_mem_arm(0xe1a0f00e); /* mov pc, lr */ -#endif - /* Make an ARM instruction unconditional */ - if (insn < 0xe0000000) - insn = (insn | 0xe0000000) & ~0x10000000; - return insn; -} - -/* - * Write a (probably modified) instruction into the slot previously prepared by - * prepare_emulated_insn - */ -static void __kprobes -set_emulated_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - bool thumb) -{ -#ifdef CONFIG_THUMB2_KERNEL - if (thumb) { - u16 *ip = (u16 *)asi->insn; - if (is_wide_instruction(insn)) - *ip++ = __opcode_to_mem_thumb16(insn >> 16); - *ip++ = __opcode_to_mem_thumb16(insn); - return; - } -#endif - asi->insn[0] = __opcode_to_mem_arm(insn); -} - -/* - * When we modify the register numbers encoded in an instruction to be emulated, - * the new values come from this define. For ARM and 32-bit Thumb instructions - * this gives... - * - * bit position 16 12 8 4 0 - * ---------------+---+---+---+---+---+ - * register r2 r0 r1 -- r3 - */ -#define INSN_NEW_BITS 0x00020103 - -/* Each nibble has same value as that at INSN_NEW_BITS bit 16 */ -#define INSN_SAMEAS16_BITS 0x22222222 - -/* - * Validate and modify each of the registers encoded in an instruction. - * - * Each nibble in regs contains a value from enum decode_reg_type. For each - * non-zero value, the corresponding nibble in pinsn is validated and modified - * according to the type. - */ -static bool __kprobes decode_regs(probes_opcode_t *pinsn, u32 regs, bool modify) -{ - probes_opcode_t insn = *pinsn; - probes_opcode_t mask = 0xf; /* Start at least significant nibble */ - - for (; regs != 0; regs >>= 4, mask <<= 4) { - - probes_opcode_t new_bits = INSN_NEW_BITS; - - switch (regs & 0xf) { - - case REG_TYPE_NONE: - /* Nibble not a register, skip to next */ - continue; - - case REG_TYPE_ANY: - /* Any register is allowed */ - break; - - case REG_TYPE_SAMEAS16: - /* Replace register with same as at bit position 16 */ - new_bits = INSN_SAMEAS16_BITS; - break; - - case REG_TYPE_SP: - /* Only allow SP (R13) */ - if ((insn ^ 0xdddddddd) & mask) - goto reject; - break; - - case REG_TYPE_PC: - /* Only allow PC (R15) */ - if ((insn ^ 0xffffffff) & mask) - goto reject; - break; - - case REG_TYPE_NOSP: - /* Reject SP (R13) */ - if (((insn ^ 0xdddddddd) & mask) == 0) - goto reject; - break; - - case REG_TYPE_NOSPPC: - case REG_TYPE_NOSPPCX: - /* Reject SP and PC (R13 and R15) */ - if (((insn ^ 0xdddddddd) & 0xdddddddd & mask) == 0) - goto reject; - break; - - case REG_TYPE_NOPCWB: - if (!is_writeback(insn)) - break; /* No writeback, so any register is OK */ - /* fall through... */ - case REG_TYPE_NOPC: - case REG_TYPE_NOPCX: - /* Reject PC (R15) */ - if (((insn ^ 0xffffffff) & mask) == 0) - goto reject; - break; - } - - /* Replace value of nibble with new register number... */ - insn &= ~mask; - insn |= new_bits & mask; - } - - if (modify) - *pinsn = insn; - - return true; - -reject: - return false; -} - -static const int decode_struct_sizes[NUM_DECODE_TYPES] = { - [DECODE_TYPE_TABLE] = sizeof(struct decode_table), - [DECODE_TYPE_CUSTOM] = sizeof(struct decode_custom), - [DECODE_TYPE_SIMULATE] = sizeof(struct decode_simulate), - [DECODE_TYPE_EMULATE] = sizeof(struct decode_emulate), - [DECODE_TYPE_OR] = sizeof(struct decode_or), - [DECODE_TYPE_REJECT] = sizeof(struct decode_reject) -}; - -/* - * probes_decode_insn operates on data tables in order to decode an ARM - * architecture instruction onto which a kprobe has been placed. - * - * These instruction decoding tables are a concatenation of entries each - * of which consist of one of the following structs: - * - * decode_table - * decode_custom - * decode_simulate - * decode_emulate - * decode_or - * decode_reject - * - * Each of these starts with a struct decode_header which has the following - * fields: - * - * type_regs - * mask - * value - * - * The least significant DECODE_TYPE_BITS of type_regs contains a value - * from enum decode_type, this indicates which of the decode_* structs - * the entry contains. The value DECODE_TYPE_END indicates the end of the - * table. - * - * When the table is parsed, each entry is checked in turn to see if it - * matches the instruction to be decoded using the test: - * - * (insn & mask) == value - * - * If no match is found before the end of the table is reached then decoding - * fails with INSN_REJECTED. - * - * When a match is found, decode_regs() is called to validate and modify each - * of the registers encoded in the instruction; the data it uses to do this - * is (type_regs >> DECODE_TYPE_BITS). A validation failure will cause decoding - * to fail with INSN_REJECTED. - * - * Once the instruction has passed the above tests, further processing - * depends on the type of the table entry's decode struct. - * - */ -int __kprobes -probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - const union decode_item *table, bool thumb, - bool emulate, const union decode_action *actions) -{ - const struct decode_header *h = (struct decode_header *)table; - const struct decode_header *next; - bool matched = false; - - if (emulate) - insn = prepare_emulated_insn(insn, asi, thumb); - - for (;; h = next) { - enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK; - u32 regs = h->type_regs.bits >> DECODE_TYPE_BITS; - - if (type == DECODE_TYPE_END) - return INSN_REJECTED; - - next = (struct decode_header *) - ((uintptr_t)h + decode_struct_sizes[type]); - - if (!matched && (insn & h->mask.bits) != h->value.bits) - continue; - - if (!decode_regs(&insn, regs, emulate)) - return INSN_REJECTED; - - switch (type) { - - case DECODE_TYPE_TABLE: { - struct decode_table *d = (struct decode_table *)h; - next = (struct decode_header *)d->table.table; - break; - } - - case DECODE_TYPE_CUSTOM: { - struct decode_custom *d = (struct decode_custom *)h; - return actions[d->decoder.action].decoder(insn, asi, h); - } - - case DECODE_TYPE_SIMULATE: { - struct decode_simulate *d = (struct decode_simulate *)h; - asi->insn_handler = actions[d->handler.action].handler; - return INSN_GOOD_NO_SLOT; - } - - case DECODE_TYPE_EMULATE: { - struct decode_emulate *d = (struct decode_emulate *)h; - - if (!emulate) - return actions[d->handler.action].decoder(insn, - asi, h); - - asi->insn_handler = actions[d->handler.action].handler; - set_emulated_insn(insn, asi, thumb); - return INSN_GOOD; - } - - case DECODE_TYPE_OR: - matched = true; - break; - - case DECODE_TYPE_REJECT: - default: - return INSN_REJECTED; - } - } -} diff --git a/arch/arm/kernel/probes.h b/arch/arm/kernel/probes.h deleted file mode 100644 index dba9f2466a93..000000000000 --- a/arch/arm/kernel/probes.h +++ /dev/null @@ -1,407 +0,0 @@ -/* - * arch/arm/kernel/probes.h - * - * Copyright (C) 2011 Jon Medhurst . - * - * Some contents moved here from arch/arm/include/asm/kprobes.h which is - * Copyright (C) 2006, 2007 Motorola Inc. - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * General Public License for more details. - */ - -#ifndef _ARM_KERNEL_PROBES_H -#define _ARM_KERNEL_PROBES_H - -#include -#include -#include - -void __init arm_probes_decode_init(void); - -extern probes_check_cc * const probes_condition_checks[16]; - -#if __LINUX_ARM_ARCH__ >= 7 - -/* str_pc_offset is architecturally defined from ARMv7 onwards */ -#define str_pc_offset 8 -#define find_str_pc_offset() - -#else /* __LINUX_ARM_ARCH__ < 7 */ - -/* We need a run-time check to determine str_pc_offset */ -extern int str_pc_offset; -void __init find_str_pc_offset(void); - -#endif - - -/* - * Update ITSTATE after normal execution of an IT block instruction. - * - * The 8 IT state bits are split into two parts in CPSR: - * ITSTATE<1:0> are in CPSR<26:25> - * ITSTATE<7:2> are in CPSR<15:10> - */ -static inline unsigned long it_advance(unsigned long cpsr) - { - if ((cpsr & 0x06000400) == 0) { - /* ITSTATE<2:0> == 0 means end of IT block, so clear IT state */ - cpsr &= ~PSR_IT_MASK; - } else { - /* We need to shift left ITSTATE<4:0> */ - const unsigned long mask = 0x06001c00; /* Mask ITSTATE<4:0> */ - unsigned long it = cpsr & mask; - it <<= 1; - it |= it >> (27 - 10); /* Carry ITSTATE<2> to correct place */ - it &= mask; - cpsr &= ~mask; - cpsr |= it; - } - return cpsr; -} - -static inline void __kprobes bx_write_pc(long pcv, struct pt_regs *regs) -{ - long cpsr = regs->ARM_cpsr; - if (pcv & 0x1) { - cpsr |= PSR_T_BIT; - pcv &= ~0x1; - } else { - cpsr &= ~PSR_T_BIT; - pcv &= ~0x2; /* Avoid UNPREDICTABLE address allignment */ - } - regs->ARM_cpsr = cpsr; - regs->ARM_pc = pcv; -} - - -#if __LINUX_ARM_ARCH__ >= 6 - -/* Kernels built for >= ARMv6 should never run on <= ARMv5 hardware, so... */ -#define load_write_pc_interworks true -#define test_load_write_pc_interworking() - -#else /* __LINUX_ARM_ARCH__ < 6 */ - -/* We need run-time testing to determine if load_write_pc() should interwork. */ -extern bool load_write_pc_interworks; -void __init test_load_write_pc_interworking(void); - -#endif - -static inline void __kprobes load_write_pc(long pcv, struct pt_regs *regs) -{ - if (load_write_pc_interworks) - bx_write_pc(pcv, regs); - else - regs->ARM_pc = pcv; -} - - -#if __LINUX_ARM_ARCH__ >= 7 - -#define alu_write_pc_interworks true -#define test_alu_write_pc_interworking() - -#elif __LINUX_ARM_ARCH__ <= 5 - -/* Kernels built for <= ARMv5 should never run on >= ARMv6 hardware, so... */ -#define alu_write_pc_interworks false -#define test_alu_write_pc_interworking() - -#else /* __LINUX_ARM_ARCH__ == 6 */ - -/* We could be an ARMv6 binary on ARMv7 hardware so we need a run-time check. */ -extern bool alu_write_pc_interworks; -void __init test_alu_write_pc_interworking(void); - -#endif /* __LINUX_ARM_ARCH__ == 6 */ - -static inline void __kprobes alu_write_pc(long pcv, struct pt_regs *regs) -{ - if (alu_write_pc_interworks) - bx_write_pc(pcv, regs); - else - regs->ARM_pc = pcv; -} - - -/* - * Test if load/store instructions writeback the address register. - * if P (bit 24) == 0 or W (bit 21) == 1 - */ -#define is_writeback(insn) ((insn ^ 0x01000000) & 0x01200000) - -/* - * The following definitions and macros are used to build instruction - * decoding tables for use by probes_decode_insn. - * - * These tables are a concatenation of entries each of which consist of one of - * the decode_* structs. All of the fields in every type of decode structure - * are of the union type decode_item, therefore the entire decode table can be - * viewed as an array of these and declared like: - * - * static const union decode_item table_name[] = {}; - * - * In order to construct each entry in the table, macros are used to - * initialise a number of sequential decode_item values in a layout which - * matches the relevant struct. E.g. DECODE_SIMULATE initialise a struct - * decode_simulate by initialising four decode_item objects like this... - * - * {.bits = _type}, - * {.bits = _mask}, - * {.bits = _value}, - * {.action = _handler}, - * - * Initialising a specified member of the union means that the compiler - * will produce a warning if the argument is of an incorrect type. - * - * Below is a list of each of the macros used to initialise entries and a - * description of the action performed when that entry is matched to an - * instruction. A match is found when (instruction & mask) == value. - * - * DECODE_TABLE(mask, value, table) - * Instruction decoding jumps to parsing the new sub-table 'table'. - * - * DECODE_CUSTOM(mask, value, decoder) - * The value of 'decoder' is used as an index into the array of - * action functions, and the retrieved decoder function is invoked - * to complete decoding of the instruction. - * - * DECODE_SIMULATE(mask, value, handler) - * The probes instruction handler is set to the value found by - * indexing into the action array using the value of 'handler'. This - * will be used to simulate the instruction when the probe is hit. - * Decoding returns with INSN_GOOD_NO_SLOT. - * - * DECODE_EMULATE(mask, value, handler) - * The probes instruction handler is set to the value found by - * indexing into the action array using the value of 'handler'. This - * will be used to emulate the instruction when the probe is hit. The - * modified instruction (see below) is placed in the probes instruction - * slot so it may be called by the emulation code. Decoding returns - * with INSN_GOOD. - * - * DECODE_REJECT(mask, value) - * Instruction decoding fails with INSN_REJECTED - * - * DECODE_OR(mask, value) - * This allows the mask/value test of multiple table entries to be - * logically ORed. Once an 'or' entry is matched the decoding action to - * be performed is that of the next entry which isn't an 'or'. E.g. - * - * DECODE_OR (mask1, value1) - * DECODE_OR (mask2, value2) - * DECODE_SIMULATE (mask3, value3, simulation_handler) - * - * This means that if any of the three mask/value pairs match the - * instruction being decoded, then 'simulation_handler' will be used - * for it. - * - * Both the SIMULATE and EMULATE macros have a second form which take an - * additional 'regs' argument. - * - * DECODE_SIMULATEX(mask, value, handler, regs) - * DECODE_EMULATEX (mask, value, handler, regs) - * - * These are used to specify what kind of CPU register is encoded in each of the - * least significant 5 nibbles of the instruction being decoded. The regs value - * is specified using the REGS macro, this takes any of the REG_TYPE_* values - * from enum decode_reg_type as arguments; only the '*' part of the name is - * given. E.g. - * - * REGS(0, ANY, NOPC, 0, ANY) - * - * This indicates an instruction is encoded like: - * - * bits 19..16 ignore - * bits 15..12 any register allowed here - * bits 11.. 8 any register except PC allowed here - * bits 7.. 4 ignore - * bits 3.. 0 any register allowed here - * - * This register specification is checked after a decode table entry is found to - * match an instruction (through the mask/value test). Any invalid register then - * found in the instruction will cause decoding to fail with INSN_REJECTED. In - * the above example this would happen if bits 11..8 of the instruction were - * 1111, indicating R15 or PC. - * - * As well as checking for legal combinations of registers, this data is also - * used to modify the registers encoded in the instructions so that an - * emulation routines can use it. (See decode_regs() and INSN_NEW_BITS.) - * - * Here is a real example which matches ARM instructions of the form - * "AND ,,, " - * - * DECODE_EMULATEX (0x0e000090, 0x00000010, PROBES_DATA_PROCESSING_REG, - * REGS(ANY, ANY, NOPC, 0, ANY)), - * ^ ^ ^ ^ - * Rn Rd Rs Rm - * - * Decoding the instruction "AND R4, R5, R6, ASL R15" will be rejected because - * Rs == R15 - * - * Decoding the instruction "AND R4, R5, R6, ASL R7" will be accepted and the - * instruction will be modified to "AND R0, R2, R3, ASL R1" and then placed into - * the kprobes instruction slot. This can then be called later by the handler - * function emulate_rd12rn16rm0rs8_rwflags (a pointer to which is retrieved from - * the indicated slot in the action array), in order to simulate the instruction. - */ - -enum decode_type { - DECODE_TYPE_END, - DECODE_TYPE_TABLE, - DECODE_TYPE_CUSTOM, - DECODE_TYPE_SIMULATE, - DECODE_TYPE_EMULATE, - DECODE_TYPE_OR, - DECODE_TYPE_REJECT, - NUM_DECODE_TYPES /* Must be last enum */ -}; - -#define DECODE_TYPE_BITS 4 -#define DECODE_TYPE_MASK ((1 << DECODE_TYPE_BITS) - 1) - -enum decode_reg_type { - REG_TYPE_NONE = 0, /* Not a register, ignore */ - REG_TYPE_ANY, /* Any register allowed */ - REG_TYPE_SAMEAS16, /* Register should be same as that at bits 19..16 */ - REG_TYPE_SP, /* Register must be SP */ - REG_TYPE_PC, /* Register must be PC */ - REG_TYPE_NOSP, /* Register must not be SP */ - REG_TYPE_NOSPPC, /* Register must not be SP or PC */ - REG_TYPE_NOPC, /* Register must not be PC */ - REG_TYPE_NOPCWB, /* No PC if load/store write-back flag also set */ - - /* The following types are used when the encoding for PC indicates - * another instruction form. This distiction only matters for test - * case coverage checks. - */ - REG_TYPE_NOPCX, /* Register must not be PC */ - REG_TYPE_NOSPPCX, /* Register must not be SP or PC */ - - /* Alias to allow '0' arg to be used in REGS macro. */ - REG_TYPE_0 = REG_TYPE_NONE -}; - -#define REGS(r16, r12, r8, r4, r0) \ - (((REG_TYPE_##r16) << 16) + \ - ((REG_TYPE_##r12) << 12) + \ - ((REG_TYPE_##r8) << 8) + \ - ((REG_TYPE_##r4) << 4) + \ - (REG_TYPE_##r0)) - -union decode_item { - u32 bits; - const union decode_item *table; - int action; -}; - -struct decode_header; -typedef enum probes_insn (probes_custom_decode_t)(probes_opcode_t, - struct arch_probes_insn *, - const struct decode_header *); - -union decode_action { - probes_insn_handler_t *handler; - probes_custom_decode_t *decoder; -}; - -#define DECODE_END \ - {.bits = DECODE_TYPE_END} - - -struct decode_header { - union decode_item type_regs; - union decode_item mask; - union decode_item value; -}; - -#define DECODE_HEADER(_type, _mask, _value, _regs) \ - {.bits = (_type) | ((_regs) << DECODE_TYPE_BITS)}, \ - {.bits = (_mask)}, \ - {.bits = (_value)} - - -struct decode_table { - struct decode_header header; - union decode_item table; -}; - -#define DECODE_TABLE(_mask, _value, _table) \ - DECODE_HEADER(DECODE_TYPE_TABLE, _mask, _value, 0), \ - {.table = (_table)} - - -struct decode_custom { - struct decode_header header; - union decode_item decoder; -}; - -#define DECODE_CUSTOM(_mask, _value, _decoder) \ - DECODE_HEADER(DECODE_TYPE_CUSTOM, _mask, _value, 0), \ - {.action = (_decoder)} - - -struct decode_simulate { - struct decode_header header; - union decode_item handler; -}; - -#define DECODE_SIMULATEX(_mask, _value, _handler, _regs) \ - DECODE_HEADER(DECODE_TYPE_SIMULATE, _mask, _value, _regs), \ - {.action = (_handler)} - -#define DECODE_SIMULATE(_mask, _value, _handler) \ - DECODE_SIMULATEX(_mask, _value, _handler, 0) - - -struct decode_emulate { - struct decode_header header; - union decode_item handler; -}; - -#define DECODE_EMULATEX(_mask, _value, _handler, _regs) \ - DECODE_HEADER(DECODE_TYPE_EMULATE, _mask, _value, _regs), \ - {.action = (_handler)} - -#define DECODE_EMULATE(_mask, _value, _handler) \ - DECODE_EMULATEX(_mask, _value, _handler, 0) - - -struct decode_or { - struct decode_header header; -}; - -#define DECODE_OR(_mask, _value) \ - DECODE_HEADER(DECODE_TYPE_OR, _mask, _value, 0) - -enum probes_insn { - INSN_REJECTED, - INSN_GOOD, - INSN_GOOD_NO_SLOT -}; - -struct decode_reject { - struct decode_header header; -}; - -#define DECODE_REJECT(_mask, _value) \ - DECODE_HEADER(DECODE_TYPE_REJECT, _mask, _value, 0) - -probes_insn_handler_t probes_simulate_nop; -probes_insn_handler_t probes_emulate_none; - -int __kprobes -probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - const union decode_item *table, bool thumb, bool emulate, - const union decode_action *actions); - -#endif diff --git a/arch/arm/kernel/uprobes-arm.c b/arch/arm/kernel/uprobes-arm.c deleted file mode 100644 index d3b655ff17da..000000000000 --- a/arch/arm/kernel/uprobes-arm.c +++ /dev/null @@ -1,234 +0,0 @@ -/* - * Copyright (C) 2012 Rabin Vincent - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - */ - -#include -#include -#include -#include -#include -#include - -#include "probes.h" -#include "probes-arm.h" -#include "uprobes.h" - -static int uprobes_substitute_pc(unsigned long *pinsn, u32 oregs) -{ - probes_opcode_t insn = __mem_to_opcode_arm(*pinsn); - probes_opcode_t temp; - probes_opcode_t mask; - int freereg; - u32 free = 0xffff; - u32 regs; - - for (regs = oregs; regs; regs >>= 4, insn >>= 4) { - if ((regs & 0xf) == REG_TYPE_NONE) - continue; - - free &= ~(1 << (insn & 0xf)); - } - - /* No PC, no problem */ - if (free & (1 << 15)) - return 15; - - if (!free) - return -1; - - /* - * fls instead of ffs ensures that for "ldrd r0, r1, [pc]" we would - * pick LR instead of R1. - */ - freereg = free = fls(free) - 1; - - temp = __mem_to_opcode_arm(*pinsn); - insn = temp; - regs = oregs; - mask = 0xf; - - for (; regs; regs >>= 4, mask <<= 4, free <<= 4, temp >>= 4) { - if ((regs & 0xf) == REG_TYPE_NONE) - continue; - - if ((temp & 0xf) != 15) - continue; - - insn &= ~mask; - insn |= free & mask; - } - - *pinsn = __opcode_to_mem_arm(insn); - return freereg; -} - -static void uprobe_set_pc(struct arch_uprobe *auprobe, - struct arch_uprobe_task *autask, - struct pt_regs *regs) -{ - u32 pcreg = auprobe->pcreg; - - autask->backup = regs->uregs[pcreg]; - regs->uregs[pcreg] = regs->ARM_pc + 8; -} - -static void uprobe_unset_pc(struct arch_uprobe *auprobe, - struct arch_uprobe_task *autask, - struct pt_regs *regs) -{ - /* PC will be taken care of by common code */ - regs->uregs[auprobe->pcreg] = autask->backup; -} - -static void uprobe_aluwrite_pc(struct arch_uprobe *auprobe, - struct arch_uprobe_task *autask, - struct pt_regs *regs) -{ - u32 pcreg = auprobe->pcreg; - - alu_write_pc(regs->uregs[pcreg], regs); - regs->uregs[pcreg] = autask->backup; -} - -static void uprobe_write_pc(struct arch_uprobe *auprobe, - struct arch_uprobe_task *autask, - struct pt_regs *regs) -{ - u32 pcreg = auprobe->pcreg; - - load_write_pc(regs->uregs[pcreg], regs); - regs->uregs[pcreg] = autask->backup; -} - -enum probes_insn -decode_pc_ro(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *d) -{ - struct arch_uprobe *auprobe = container_of(asi, struct arch_uprobe, - asi); - struct decode_emulate *decode = (struct decode_emulate *) d; - u32 regs = decode->header.type_regs.bits >> DECODE_TYPE_BITS; - int reg; - - reg = uprobes_substitute_pc(&auprobe->ixol[0], regs); - if (reg == 15) - return INSN_GOOD; - - if (reg == -1) - return INSN_REJECTED; - - auprobe->pcreg = reg; - auprobe->prehandler = uprobe_set_pc; - auprobe->posthandler = uprobe_unset_pc; - - return INSN_GOOD; -} - -enum probes_insn -decode_wb_pc(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *d, bool alu) -{ - struct arch_uprobe *auprobe = container_of(asi, struct arch_uprobe, - asi); - enum probes_insn ret = decode_pc_ro(insn, asi, d); - - if (((insn >> 12) & 0xf) == 15) - auprobe->posthandler = alu ? uprobe_aluwrite_pc - : uprobe_write_pc; - - return ret; -} - -enum probes_insn -decode_rd12rn16rm0rs8_rwflags(probes_opcode_t insn, - struct arch_probes_insn *asi, - const struct decode_header *d) -{ - return decode_wb_pc(insn, asi, d, true); -} - -enum probes_insn -decode_ldr(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *d) -{ - return decode_wb_pc(insn, asi, d, false); -} - -enum probes_insn -uprobe_decode_ldmstm(probes_opcode_t insn, - struct arch_probes_insn *asi, - const struct decode_header *d) -{ - struct arch_uprobe *auprobe = container_of(asi, struct arch_uprobe, - asi); - unsigned reglist = insn & 0xffff; - int rn = (insn >> 16) & 0xf; - int lbit = insn & (1 << 20); - unsigned used = reglist | (1 << rn); - - if (rn == 15) - return INSN_REJECTED; - - if (!(used & (1 << 15))) - return INSN_GOOD; - - if (used & (1 << 14)) - return INSN_REJECTED; - - /* Use LR instead of PC */ - insn ^= 0xc000; - - auprobe->pcreg = 14; - auprobe->ixol[0] = __opcode_to_mem_arm(insn); - - auprobe->prehandler = uprobe_set_pc; - if (lbit) - auprobe->posthandler = uprobe_write_pc; - else - auprobe->posthandler = uprobe_unset_pc; - - return INSN_GOOD; -} - -const union decode_action uprobes_probes_actions[] = { - [PROBES_EMULATE_NONE] = {.handler = probes_simulate_nop}, - [PROBES_SIMULATE_NOP] = {.handler = probes_simulate_nop}, - [PROBES_PRELOAD_IMM] = {.handler = probes_simulate_nop}, - [PROBES_PRELOAD_REG] = {.handler = probes_simulate_nop}, - [PROBES_BRANCH_IMM] = {.handler = simulate_blx1}, - [PROBES_MRS] = {.handler = simulate_mrs}, - [PROBES_BRANCH_REG] = {.handler = simulate_blx2bx}, - [PROBES_CLZ] = {.handler = probes_simulate_nop}, - [PROBES_SATURATING_ARITHMETIC] = {.handler = probes_simulate_nop}, - [PROBES_MUL1] = {.handler = probes_simulate_nop}, - [PROBES_MUL2] = {.handler = probes_simulate_nop}, - [PROBES_SWP] = {.handler = probes_simulate_nop}, - [PROBES_LDRSTRD] = {.decoder = decode_pc_ro}, - [PROBES_LOAD_EXTRA] = {.decoder = decode_pc_ro}, - [PROBES_LOAD] = {.decoder = decode_ldr}, - [PROBES_STORE_EXTRA] = {.decoder = decode_pc_ro}, - [PROBES_STORE] = {.decoder = decode_pc_ro}, - [PROBES_MOV_IP_SP] = {.handler = simulate_mov_ipsp}, - [PROBES_DATA_PROCESSING_REG] = { - .decoder = decode_rd12rn16rm0rs8_rwflags}, - [PROBES_DATA_PROCESSING_IMM] = { - .decoder = decode_rd12rn16rm0rs8_rwflags}, - [PROBES_MOV_HALFWORD] = {.handler = probes_simulate_nop}, - [PROBES_SEV] = {.handler = probes_simulate_nop}, - [PROBES_WFE] = {.handler = probes_simulate_nop}, - [PROBES_SATURATE] = {.handler = probes_simulate_nop}, - [PROBES_REV] = {.handler = probes_simulate_nop}, - [PROBES_MMI] = {.handler = probes_simulate_nop}, - [PROBES_PACK] = {.handler = probes_simulate_nop}, - [PROBES_EXTEND] = {.handler = probes_simulate_nop}, - [PROBES_EXTEND_ADD] = {.handler = probes_simulate_nop}, - [PROBES_MUL_ADD_LONG] = {.handler = probes_simulate_nop}, - [PROBES_MUL_ADD] = {.handler = probes_simulate_nop}, - [PROBES_BITFIELD] = {.handler = probes_simulate_nop}, - [PROBES_BRANCH] = {.handler = simulate_bbl}, - [PROBES_LDMSTM] = {.decoder = uprobe_decode_ldmstm} -}; diff --git a/arch/arm/kernel/uprobes.c b/arch/arm/kernel/uprobes.c deleted file mode 100644 index 56adf9c1fde0..000000000000 --- a/arch/arm/kernel/uprobes.c +++ /dev/null @@ -1,230 +0,0 @@ -/* - * Copyright (C) 2012 Rabin Vincent - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - */ - -#include -#include -#include -#include -#include -#include -#include - -#include -#include - -#include "probes.h" -#include "probes-arm.h" -#include "uprobes.h" - -#define UPROBE_TRAP_NR UINT_MAX - -bool is_swbp_insn(uprobe_opcode_t *insn) -{ - return (__mem_to_opcode_arm(*insn) & 0x0fffffff) == - (UPROBE_SWBP_ARM_INSN & 0x0fffffff); -} - -int set_swbp(struct arch_uprobe *auprobe, struct mm_struct *mm, - unsigned long vaddr) -{ - return uprobe_write_opcode(mm, vaddr, - __opcode_to_mem_arm(auprobe->bpinsn)); -} - -bool arch_uprobe_ignore(struct arch_uprobe *auprobe, struct pt_regs *regs) -{ - if (!auprobe->asi.insn_check_cc(regs->ARM_cpsr)) { - regs->ARM_pc += 4; - return true; - } - - return false; -} - -bool arch_uprobe_skip_sstep(struct arch_uprobe *auprobe, struct pt_regs *regs) -{ - probes_opcode_t opcode; - - if (!auprobe->simulate) - return false; - - opcode = __mem_to_opcode_arm(*(unsigned int *) auprobe->insn); - - auprobe->asi.insn_singlestep(opcode, &auprobe->asi, regs); - - return true; -} - -unsigned long -arch_uretprobe_hijack_return_addr(unsigned long trampoline_vaddr, - struct pt_regs *regs) -{ - unsigned long orig_ret_vaddr; - - orig_ret_vaddr = regs->ARM_lr; - /* Replace the return addr with trampoline addr */ - regs->ARM_lr = trampoline_vaddr; - return orig_ret_vaddr; -} - -int arch_uprobe_analyze_insn(struct arch_uprobe *auprobe, struct mm_struct *mm, - unsigned long addr) -{ - unsigned int insn; - unsigned int bpinsn; - enum probes_insn ret; - - /* Thumb not yet support */ - if (addr & 0x3) - return -EINVAL; - - insn = __mem_to_opcode_arm(*(unsigned int *)auprobe->insn); - auprobe->ixol[0] = __opcode_to_mem_arm(insn); - auprobe->ixol[1] = __opcode_to_mem_arm(UPROBE_SS_ARM_INSN); - - ret = arm_probes_decode_insn(insn, &auprobe->asi, false, - uprobes_probes_actions); - switch (ret) { - case INSN_REJECTED: - return -EINVAL; - - case INSN_GOOD_NO_SLOT: - auprobe->simulate = true; - break; - - case INSN_GOOD: - default: - break; - } - - bpinsn = UPROBE_SWBP_ARM_INSN & 0x0fffffff; - if (insn >= 0xe0000000) - bpinsn |= 0xe0000000; /* Unconditional instruction */ - else - bpinsn |= insn & 0xf0000000; /* Copy condition from insn */ - - auprobe->bpinsn = bpinsn; - - return 0; -} - -void arch_uprobe_copy_ixol(struct page *page, unsigned long vaddr, - void *src, unsigned long len) -{ - void *xol_page_kaddr = kmap_atomic(page); - void *dst = xol_page_kaddr + (vaddr & ~PAGE_MASK); - - preempt_disable(); - - /* Initialize the slot */ - memcpy(dst, src, len); - - /* flush caches (dcache/icache) */ - flush_uprobe_xol_access(page, vaddr, dst, len); - - preempt_enable(); - - kunmap_atomic(xol_page_kaddr); -} - - -int arch_uprobe_pre_xol(struct arch_uprobe *auprobe, struct pt_regs *regs) -{ - struct uprobe_task *utask = current->utask; - - if (auprobe->prehandler) - auprobe->prehandler(auprobe, &utask->autask, regs); - - utask->autask.saved_trap_no = current->thread.trap_no; - current->thread.trap_no = UPROBE_TRAP_NR; - regs->ARM_pc = utask->xol_vaddr; - - return 0; -} - -int arch_uprobe_post_xol(struct arch_uprobe *auprobe, struct pt_regs *regs) -{ - struct uprobe_task *utask = current->utask; - - WARN_ON_ONCE(current->thread.trap_no != UPROBE_TRAP_NR); - - current->thread.trap_no = utask->autask.saved_trap_no; - regs->ARM_pc = utask->vaddr + 4; - - if (auprobe->posthandler) - auprobe->posthandler(auprobe, &utask->autask, regs); - - return 0; -} - -bool arch_uprobe_xol_was_trapped(struct task_struct *t) -{ - if (t->thread.trap_no != UPROBE_TRAP_NR) - return true; - - return false; -} - -void arch_uprobe_abort_xol(struct arch_uprobe *auprobe, struct pt_regs *regs) -{ - struct uprobe_task *utask = current->utask; - - current->thread.trap_no = utask->autask.saved_trap_no; - instruction_pointer_set(regs, utask->vaddr); -} - -int arch_uprobe_exception_notify(struct notifier_block *self, - unsigned long val, void *data) -{ - return NOTIFY_DONE; -} - -static int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr) -{ - unsigned long flags; - - local_irq_save(flags); - instr &= 0x0fffffff; - if (instr == (UPROBE_SWBP_ARM_INSN & 0x0fffffff)) - uprobe_pre_sstep_notifier(regs); - else if (instr == (UPROBE_SS_ARM_INSN & 0x0fffffff)) - uprobe_post_sstep_notifier(regs); - local_irq_restore(flags); - - return 0; -} - -unsigned long uprobe_get_swbp_addr(struct pt_regs *regs) -{ - return instruction_pointer(regs); -} - -static struct undef_hook uprobes_arm_break_hook = { - .instr_mask = 0x0fffffff, - .instr_val = (UPROBE_SWBP_ARM_INSN & 0x0fffffff), - .cpsr_mask = MODE_MASK, - .cpsr_val = USR_MODE, - .fn = uprobe_trap_handler, -}; - -static struct undef_hook uprobes_arm_ss_hook = { - .instr_mask = 0x0fffffff, - .instr_val = (UPROBE_SS_ARM_INSN & 0x0fffffff), - .cpsr_mask = MODE_MASK, - .cpsr_val = USR_MODE, - .fn = uprobe_trap_handler, -}; - -static int arch_uprobes_init(void) -{ - register_undef_hook(&uprobes_arm_break_hook); - register_undef_hook(&uprobes_arm_ss_hook); - - return 0; -} -device_initcall(arch_uprobes_init); diff --git a/arch/arm/kernel/uprobes.h b/arch/arm/kernel/uprobes.h deleted file mode 100644 index 1d0c12dfbd03..000000000000 --- a/arch/arm/kernel/uprobes.h +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (C) 2012 Rabin Vincent - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - */ - -#ifndef __ARM_KERNEL_UPROBES_H -#define __ARM_KERNEL_UPROBES_H - -enum probes_insn uprobe_decode_ldmstm(probes_opcode_t insn, - struct arch_probes_insn *asi, - const struct decode_header *d); - -enum probes_insn decode_ldr(probes_opcode_t insn, - struct arch_probes_insn *asi, - const struct decode_header *d); - -enum probes_insn -decode_rd12rn16rm0rs8_rwflags(probes_opcode_t insn, - struct arch_probes_insn *asi, - const struct decode_header *d); - -enum probes_insn -decode_wb_pc(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *d, bool alu); - -enum probes_insn -decode_pc_ro(probes_opcode_t insn, struct arch_probes_insn *asi, - const struct decode_header *d); - -extern const union decode_action uprobes_probes_actions[]; - -#endif diff --git a/arch/arm/probes/Makefile b/arch/arm/probes/Makefile new file mode 100644 index 000000000000..aa1f8590dcdd --- /dev/null +++ b/arch/arm/probes/Makefile @@ -0,0 +1,7 @@ +obj-$(CONFIG_UPROBES) += decode.o decode-arm.o uprobes/ +obj-$(CONFIG_KPROBES) += decode.o kprobes/ +ifdef CONFIG_THUMB2_KERNEL +obj-$(CONFIG_KPROBES) += decode-thumb.o +else +obj-$(CONFIG_KPROBES) += decode-arm.o +endif diff --git a/arch/arm/probes/decode-arm.c b/arch/arm/probes/decode-arm.c new file mode 100644 index 000000000000..e39cc75952f2 --- /dev/null +++ b/arch/arm/probes/decode-arm.c @@ -0,0 +1,735 @@ +/* + * + * arch/arm/probes/decode-arm.c + * + * Some code moved here from arch/arm/kernel/kprobes-arm.c + * + * Copyright (C) 2006, 2007 Motorola Inc. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + */ + +#include +#include +#include +#include + +#include "decode.h" +#include "decode-arm.h" + +#define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit))))) + +#define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25) + +/* + * To avoid the complications of mimicing single-stepping on a + * processor without a Next-PC or a single-step mode, and to + * avoid having to deal with the side-effects of boosting, we + * simulate or emulate (almost) all ARM instructions. + * + * "Simulation" is where the instruction's behavior is duplicated in + * C code. "Emulation" is where the original instruction is rewritten + * and executed, often by altering its registers. + * + * By having all behavior of the kprobe'd instruction completed before + * returning from the kprobe_handler(), all locks (scheduler and + * interrupt) can safely be released. There is no need for secondary + * breakpoints, no race with MP or preemptable kernels, nor having to + * clean up resources counts at a later time impacting overall system + * performance. By rewriting the instruction, only the minimum registers + * need to be loaded and saved back optimizing performance. + * + * Calling the insnslot_*_rwflags version of a function doesn't hurt + * anything even when the CPSR flags aren't updated by the + * instruction. It's just a little slower in return for saving + * a little space by not having a duplicate function that doesn't + * update the flags. (The same optimization can be said for + * instructions that do or don't perform register writeback) + * Also, instructions can either read the flags, only write the + * flags, or read and write the flags. To save combinations + * rather than for sheer performance, flag functions just assume + * read and write of flags. + */ + +void __kprobes simulate_bbl(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + long iaddr = (long) regs->ARM_pc - 4; + int disp = branch_displacement(insn); + + if (insn & (1 << 24)) + regs->ARM_lr = iaddr + 4; + + regs->ARM_pc = iaddr + 8 + disp; +} + +void __kprobes simulate_blx1(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + long iaddr = (long) regs->ARM_pc - 4; + int disp = branch_displacement(insn); + + regs->ARM_lr = iaddr + 4; + regs->ARM_pc = iaddr + 8 + disp + ((insn >> 23) & 0x2); + regs->ARM_cpsr |= PSR_T_BIT; +} + +void __kprobes simulate_blx2bx(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + int rm = insn & 0xf; + long rmv = regs->uregs[rm]; + + if (insn & (1 << 5)) + regs->ARM_lr = (long) regs->ARM_pc; + + regs->ARM_pc = rmv & ~0x1; + regs->ARM_cpsr &= ~PSR_T_BIT; + if (rmv & 0x1) + regs->ARM_cpsr |= PSR_T_BIT; +} + +void __kprobes simulate_mrs(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + int rd = (insn >> 12) & 0xf; + unsigned long mask = 0xf8ff03df; /* Mask out execution state */ + regs->uregs[rd] = regs->ARM_cpsr & mask; +} + +void __kprobes simulate_mov_ipsp(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + regs->uregs[12] = regs->uregs[13]; +} + +/* + * For the instruction masking and comparisons in all the "space_*" + * functions below, Do _not_ rearrange the order of tests unless + * you're very, very sure of what you are doing. For the sake of + * efficiency, the masks for some tests sometimes assume other test + * have been done prior to them so the number of patterns to test + * for an instruction set can be as broad as possible to reduce the + * number of tests needed. + */ + +static const union decode_item arm_1111_table[] = { + /* Unconditional instructions */ + + /* memory hint 1111 0100 x001 xxxx xxxx xxxx xxxx xxxx */ + /* PLDI (immediate) 1111 0100 x101 xxxx xxxx xxxx xxxx xxxx */ + /* PLDW (immediate) 1111 0101 x001 xxxx xxxx xxxx xxxx xxxx */ + /* PLD (immediate) 1111 0101 x101 xxxx xxxx xxxx xxxx xxxx */ + DECODE_SIMULATE (0xfe300000, 0xf4100000, PROBES_PRELOAD_IMM), + + /* memory hint 1111 0110 x001 xxxx xxxx xxxx xxx0 xxxx */ + /* PLDI (register) 1111 0110 x101 xxxx xxxx xxxx xxx0 xxxx */ + /* PLDW (register) 1111 0111 x001 xxxx xxxx xxxx xxx0 xxxx */ + /* PLD (register) 1111 0111 x101 xxxx xxxx xxxx xxx0 xxxx */ + DECODE_SIMULATE (0xfe300010, 0xf6100000, PROBES_PRELOAD_REG), + + /* BLX (immediate) 1111 101x xxxx xxxx xxxx xxxx xxxx xxxx */ + DECODE_SIMULATE (0xfe000000, 0xfa000000, PROBES_BRANCH_IMM), + + /* CPS 1111 0001 0000 xxx0 xxxx xxxx xx0x xxxx */ + /* SETEND 1111 0001 0000 0001 xxxx xxxx 0000 xxxx */ + /* SRS 1111 100x x1x0 xxxx xxxx xxxx xxxx xxxx */ + /* RFE 1111 100x x0x1 xxxx xxxx xxxx xxxx xxxx */ + + /* Coprocessor instructions... */ + /* MCRR2 1111 1100 0100 xxxx xxxx xxxx xxxx xxxx */ + /* MRRC2 1111 1100 0101 xxxx xxxx xxxx xxxx xxxx */ + /* LDC2 1111 110x xxx1 xxxx xxxx xxxx xxxx xxxx */ + /* STC2 1111 110x xxx0 xxxx xxxx xxxx xxxx xxxx */ + /* CDP2 1111 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */ + /* MCR2 1111 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */ + /* MRC2 1111 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */ + + /* Other unallocated instructions... */ + DECODE_END +}; + +static const union decode_item arm_cccc_0001_0xx0____0xxx_table[] = { + /* Miscellaneous instructions */ + + /* MRS cpsr cccc 0001 0000 xxxx xxxx xxxx 0000 xxxx */ + DECODE_SIMULATEX(0x0ff000f0, 0x01000000, PROBES_MRS, + REGS(0, NOPC, 0, 0, 0)), + + /* BX cccc 0001 0010 xxxx xxxx xxxx 0001 xxxx */ + DECODE_SIMULATE (0x0ff000f0, 0x01200010, PROBES_BRANCH_REG), + + /* BLX (register) cccc 0001 0010 xxxx xxxx xxxx 0011 xxxx */ + DECODE_SIMULATEX(0x0ff000f0, 0x01200030, PROBES_BRANCH_REG, + REGS(0, 0, 0, 0, NOPC)), + + /* CLZ cccc 0001 0110 xxxx xxxx xxxx 0001 xxxx */ + DECODE_EMULATEX (0x0ff000f0, 0x01600010, PROBES_CLZ, + REGS(0, NOPC, 0, 0, NOPC)), + + /* QADD cccc 0001 0000 xxxx xxxx xxxx 0101 xxxx */ + /* QSUB cccc 0001 0010 xxxx xxxx xxxx 0101 xxxx */ + /* QDADD cccc 0001 0100 xxxx xxxx xxxx 0101 xxxx */ + /* QDSUB cccc 0001 0110 xxxx xxxx xxxx 0101 xxxx */ + DECODE_EMULATEX (0x0f9000f0, 0x01000050, PROBES_SATURATING_ARITHMETIC, + REGS(NOPC, NOPC, 0, 0, NOPC)), + + /* BXJ cccc 0001 0010 xxxx xxxx xxxx 0010 xxxx */ + /* MSR cccc 0001 0x10 xxxx xxxx xxxx 0000 xxxx */ + /* MRS spsr cccc 0001 0100 xxxx xxxx xxxx 0000 xxxx */ + /* BKPT 1110 0001 0010 xxxx xxxx xxxx 0111 xxxx */ + /* SMC cccc 0001 0110 xxxx xxxx xxxx 0111 xxxx */ + /* And unallocated instructions... */ + DECODE_END +}; + +static const union decode_item arm_cccc_0001_0xx0____1xx0_table[] = { + /* Halfword multiply and multiply-accumulate */ + + /* SMLALxy cccc 0001 0100 xxxx xxxx xxxx 1xx0 xxxx */ + DECODE_EMULATEX (0x0ff00090, 0x01400080, PROBES_MUL1, + REGS(NOPC, NOPC, NOPC, 0, NOPC)), + + /* SMULWy cccc 0001 0010 xxxx xxxx xxxx 1x10 xxxx */ + DECODE_OR (0x0ff000b0, 0x012000a0), + /* SMULxy cccc 0001 0110 xxxx xxxx xxxx 1xx0 xxxx */ + DECODE_EMULATEX (0x0ff00090, 0x01600080, PROBES_MUL2, + REGS(NOPC, 0, NOPC, 0, NOPC)), + + /* SMLAxy cccc 0001 0000 xxxx xxxx xxxx 1xx0 xxxx */ + DECODE_OR (0x0ff00090, 0x01000080), + /* SMLAWy cccc 0001 0010 xxxx xxxx xxxx 1x00 xxxx */ + DECODE_EMULATEX (0x0ff000b0, 0x01200080, PROBES_MUL2, + REGS(NOPC, NOPC, NOPC, 0, NOPC)), + + DECODE_END +}; + +static const union decode_item arm_cccc_0000_____1001_table[] = { + /* Multiply and multiply-accumulate */ + + /* MUL cccc 0000 0000 xxxx xxxx xxxx 1001 xxxx */ + /* MULS cccc 0000 0001 xxxx xxxx xxxx 1001 xxxx */ + DECODE_EMULATEX (0x0fe000f0, 0x00000090, PROBES_MUL2, + REGS(NOPC, 0, NOPC, 0, NOPC)), + + /* MLA cccc 0000 0010 xxxx xxxx xxxx 1001 xxxx */ + /* MLAS cccc 0000 0011 xxxx xxxx xxxx 1001 xxxx */ + DECODE_OR (0x0fe000f0, 0x00200090), + /* MLS cccc 0000 0110 xxxx xxxx xxxx 1001 xxxx */ + DECODE_EMULATEX (0x0ff000f0, 0x00600090, PROBES_MUL2, + REGS(NOPC, NOPC, NOPC, 0, NOPC)), + + /* UMAAL cccc 0000 0100 xxxx xxxx xxxx 1001 xxxx */ + DECODE_OR (0x0ff000f0, 0x00400090), + /* UMULL cccc 0000 1000 xxxx xxxx xxxx 1001 xxxx */ + /* UMULLS cccc 0000 1001 xxxx xxxx xxxx 1001 xxxx */ + /* UMLAL cccc 0000 1010 xxxx xxxx xxxx 1001 xxxx */ + /* UMLALS cccc 0000 1011 xxxx xxxx xxxx 1001 xxxx */ + /* SMULL cccc 0000 1100 xxxx xxxx xxxx 1001 xxxx */ + /* SMULLS cccc 0000 1101 xxxx xxxx xxxx 1001 xxxx */ + /* SMLAL cccc 0000 1110 xxxx xxxx xxxx 1001 xxxx */ + /* SMLALS cccc 0000 1111 xxxx xxxx xxxx 1001 xxxx */ + DECODE_EMULATEX (0x0f8000f0, 0x00800090, PROBES_MUL1, + REGS(NOPC, NOPC, NOPC, 0, NOPC)), + + DECODE_END +}; + +static const union decode_item arm_cccc_0001_____1001_table[] = { + /* Synchronization primitives */ + +#if __LINUX_ARM_ARCH__ < 6 + /* Deprecated on ARMv6 and may be UNDEFINED on v7 */ + /* SMP/SWPB cccc 0001 0x00 xxxx xxxx xxxx 1001 xxxx */ + DECODE_EMULATEX (0x0fb000f0, 0x01000090, PROBES_SWP, + REGS(NOPC, NOPC, 0, 0, NOPC)), +#endif + /* LDREX/STREX{,D,B,H} cccc 0001 1xxx xxxx xxxx xxxx 1001 xxxx */ + /* And unallocated instructions... */ + DECODE_END +}; + +static const union decode_item arm_cccc_000x_____1xx1_table[] = { + /* Extra load/store instructions */ + + /* STRHT cccc 0000 xx10 xxxx xxxx xxxx 1011 xxxx */ + /* ??? cccc 0000 xx10 xxxx xxxx xxxx 11x1 xxxx */ + /* LDRHT cccc 0000 xx11 xxxx xxxx xxxx 1011 xxxx */ + /* LDRSBT cccc 0000 xx11 xxxx xxxx xxxx 1101 xxxx */ + /* LDRSHT cccc 0000 xx11 xxxx xxxx xxxx 1111 xxxx */ + DECODE_REJECT (0x0f200090, 0x00200090), + + /* LDRD/STRD lr,pc,{... cccc 000x x0x0 xxxx 111x xxxx 1101 xxxx */ + DECODE_REJECT (0x0e10e0d0, 0x0000e0d0), + + /* LDRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1101 xxxx */ + /* STRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1111 xxxx */ + DECODE_EMULATEX (0x0e5000d0, 0x000000d0, PROBES_LDRSTRD, + REGS(NOPCWB, NOPCX, 0, 0, NOPC)), + + /* LDRD (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1101 xxxx */ + /* STRD (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1111 xxxx */ + DECODE_EMULATEX (0x0e5000d0, 0x004000d0, PROBES_LDRSTRD, + REGS(NOPCWB, NOPCX, 0, 0, 0)), + + /* STRH (register) cccc 000x x0x0 xxxx xxxx xxxx 1011 xxxx */ + DECODE_EMULATEX (0x0e5000f0, 0x000000b0, PROBES_STORE_EXTRA, + REGS(NOPCWB, NOPC, 0, 0, NOPC)), + + /* LDRH (register) cccc 000x x0x1 xxxx xxxx xxxx 1011 xxxx */ + /* LDRSB (register) cccc 000x x0x1 xxxx xxxx xxxx 1101 xxxx */ + /* LDRSH (register) cccc 000x x0x1 xxxx xxxx xxxx 1111 xxxx */ + DECODE_EMULATEX (0x0e500090, 0x00100090, PROBES_LOAD_EXTRA, + REGS(NOPCWB, NOPC, 0, 0, NOPC)), + + /* STRH (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1011 xxxx */ + DECODE_EMULATEX (0x0e5000f0, 0x004000b0, PROBES_STORE_EXTRA, + REGS(NOPCWB, NOPC, 0, 0, 0)), + + /* LDRH (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1011 xxxx */ + /* LDRSB (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1101 xxxx */ + /* LDRSH (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1111 xxxx */ + DECODE_EMULATEX (0x0e500090, 0x00500090, PROBES_LOAD_EXTRA, + REGS(NOPCWB, NOPC, 0, 0, 0)), + + DECODE_END +}; + +static const union decode_item arm_cccc_000x_table[] = { + /* Data-processing (register) */ + + /* S PC, ... cccc 000x xxx1 xxxx 1111 xxxx xxxx xxxx */ + DECODE_REJECT (0x0e10f000, 0x0010f000), + + /* MOV IP, SP 1110 0001 1010 0000 1100 0000 0000 1101 */ + DECODE_SIMULATE (0xffffffff, 0xe1a0c00d, PROBES_MOV_IP_SP), + + /* TST (register) cccc 0001 0001 xxxx xxxx xxxx xxx0 xxxx */ + /* TEQ (register) cccc 0001 0011 xxxx xxxx xxxx xxx0 xxxx */ + /* CMP (register) cccc 0001 0101 xxxx xxxx xxxx xxx0 xxxx */ + /* CMN (register) cccc 0001 0111 xxxx xxxx xxxx xxx0 xxxx */ + DECODE_EMULATEX (0x0f900010, 0x01100000, PROBES_DATA_PROCESSING_REG, + REGS(ANY, 0, 0, 0, ANY)), + + /* MOV (register) cccc 0001 101x xxxx xxxx xxxx xxx0 xxxx */ + /* MVN (register) cccc 0001 111x xxxx xxxx xxxx xxx0 xxxx */ + DECODE_EMULATEX (0x0fa00010, 0x01a00000, PROBES_DATA_PROCESSING_REG, + REGS(0, ANY, 0, 0, ANY)), + + /* AND (register) cccc 0000 000x xxxx xxxx xxxx xxx0 xxxx */ + /* EOR (register) cccc 0000 001x xxxx xxxx xxxx xxx0 xxxx */ + /* SUB (register) cccc 0000 010x xxxx xxxx xxxx xxx0 xxxx */ + /* RSB (register) cccc 0000 011x xxxx xxxx xxxx xxx0 xxxx */ + /* ADD (register) cccc 0000 100x xxxx xxxx xxxx xxx0 xxxx */ + /* ADC (register) cccc 0000 101x xxxx xxxx xxxx xxx0 xxxx */ + /* SBC (register) cccc 0000 110x xxxx xxxx xxxx xxx0 xxxx */ + /* RSC (register) cccc 0000 111x xxxx xxxx xxxx xxx0 xxxx */ + /* ORR (register) cccc 0001 100x xxxx xxxx xxxx xxx0 xxxx */ + /* BIC (register) cccc 0001 110x xxxx xxxx xxxx xxx0 xxxx */ + DECODE_EMULATEX (0x0e000010, 0x00000000, PROBES_DATA_PROCESSING_REG, + REGS(ANY, ANY, 0, 0, ANY)), + + /* TST (reg-shift reg) cccc 0001 0001 xxxx xxxx xxxx 0xx1 xxxx */ + /* TEQ (reg-shift reg) cccc 0001 0011 xxxx xxxx xxxx 0xx1 xxxx */ + /* CMP (reg-shift reg) cccc 0001 0101 xxxx xxxx xxxx 0xx1 xxxx */ + /* CMN (reg-shift reg) cccc 0001 0111 xxxx xxxx xxxx 0xx1 xxxx */ + DECODE_EMULATEX (0x0f900090, 0x01100010, PROBES_DATA_PROCESSING_REG, + REGS(NOPC, 0, NOPC, 0, NOPC)), + + /* MOV (reg-shift reg) cccc 0001 101x xxxx xxxx xxxx 0xx1 xxxx */ + /* MVN (reg-shift reg) cccc 0001 111x xxxx xxxx xxxx 0xx1 xxxx */ + DECODE_EMULATEX (0x0fa00090, 0x01a00010, PROBES_DATA_PROCESSING_REG, + REGS(0, NOPC, NOPC, 0, NOPC)), + + /* AND (reg-shift reg) cccc 0000 000x xxxx xxxx xxxx 0xx1 xxxx */ + /* EOR (reg-shift reg) cccc 0000 001x xxxx xxxx xxxx 0xx1 xxxx */ + /* SUB (reg-shift reg) cccc 0000 010x xxxx xxxx xxxx 0xx1 xxxx */ + /* RSB (reg-shift reg) cccc 0000 011x xxxx xxxx xxxx 0xx1 xxxx */ + /* ADD (reg-shift reg) cccc 0000 100x xxxx xxxx xxxx 0xx1 xxxx */ + /* ADC (reg-shift reg) cccc 0000 101x xxxx xxxx xxxx 0xx1 xxxx */ + /* SBC (reg-shift reg) cccc 0000 110x xxxx xxxx xxxx 0xx1 xxxx */ + /* RSC (reg-shift reg) cccc 0000 111x xxxx xxxx xxxx 0xx1 xxxx */ + /* ORR (reg-shift reg) cccc 0001 100x xxxx xxxx xxxx 0xx1 xxxx */ + /* BIC (reg-shift reg) cccc 0001 110x xxxx xxxx xxxx 0xx1 xxxx */ + DECODE_EMULATEX (0x0e000090, 0x00000010, PROBES_DATA_PROCESSING_REG, + REGS(NOPC, NOPC, NOPC, 0, NOPC)), + + DECODE_END +}; + +static const union decode_item arm_cccc_001x_table[] = { + /* Data-processing (immediate) */ + + /* MOVW cccc 0011 0000 xxxx xxxx xxxx xxxx xxxx */ + /* MOVT cccc 0011 0100 xxxx xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0x0fb00000, 0x03000000, PROBES_DATA_PROCESSING_IMM, + REGS(0, NOPC, 0, 0, 0)), + + /* YIELD cccc 0011 0010 0000 xxxx xxxx 0000 0001 */ + DECODE_OR (0x0fff00ff, 0x03200001), + /* SEV cccc 0011 0010 0000 xxxx xxxx 0000 0100 */ + DECODE_EMULATE (0x0fff00ff, 0x03200004, PROBES_EMULATE_NONE), + /* NOP cccc 0011 0010 0000 xxxx xxxx 0000 0000 */ + /* WFE cccc 0011 0010 0000 xxxx xxxx 0000 0010 */ + /* WFI cccc 0011 0010 0000 xxxx xxxx 0000 0011 */ + DECODE_SIMULATE (0x0fff00fc, 0x03200000, PROBES_SIMULATE_NOP), + /* DBG cccc 0011 0010 0000 xxxx xxxx ffff xxxx */ + /* unallocated hints cccc 0011 0010 0000 xxxx xxxx xxxx xxxx */ + /* MSR (immediate) cccc 0011 0x10 xxxx xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0x0fb00000, 0x03200000), + + /* S PC, ... cccc 001x xxx1 xxxx 1111 xxxx xxxx xxxx */ + DECODE_REJECT (0x0e10f000, 0x0210f000), + + /* TST (immediate) cccc 0011 0001 xxxx xxxx xxxx xxxx xxxx */ + /* TEQ (immediate) cccc 0011 0011 xxxx xxxx xxxx xxxx xxxx */ + /* CMP (immediate) cccc 0011 0101 xxxx xxxx xxxx xxxx xxxx */ + /* CMN (immediate) cccc 0011 0111 xxxx xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0x0f900000, 0x03100000, PROBES_DATA_PROCESSING_IMM, + REGS(ANY, 0, 0, 0, 0)), + + /* MOV (immediate) cccc 0011 101x xxxx xxxx xxxx xxxx xxxx */ + /* MVN (immediate) cccc 0011 111x xxxx xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0x0fa00000, 0x03a00000, PROBES_DATA_PROCESSING_IMM, + REGS(0, ANY, 0, 0, 0)), + + /* AND (immediate) cccc 0010 000x xxxx xxxx xxxx xxxx xxxx */ + /* EOR (immediate) cccc 0010 001x xxxx xxxx xxxx xxxx xxxx */ + /* SUB (immediate) cccc 0010 010x xxxx xxxx xxxx xxxx xxxx */ + /* RSB (immediate) cccc 0010 011x xxxx xxxx xxxx xxxx xxxx */ + /* ADD (immediate) cccc 0010 100x xxxx xxxx xxxx xxxx xxxx */ + /* ADC (immediate) cccc 0010 101x xxxx xxxx xxxx xxxx xxxx */ + /* SBC (immediate) cccc 0010 110x xxxx xxxx xxxx xxxx xxxx */ + /* RSC (immediate) cccc 0010 111x xxxx xxxx xxxx xxxx xxxx */ + /* ORR (immediate) cccc 0011 100x xxxx xxxx xxxx xxxx xxxx */ + /* BIC (immediate) cccc 0011 110x xxxx xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0x0e000000, 0x02000000, PROBES_DATA_PROCESSING_IMM, + REGS(ANY, ANY, 0, 0, 0)), + + DECODE_END +}; + +static const union decode_item arm_cccc_0110_____xxx1_table[] = { + /* Media instructions */ + + /* SEL cccc 0110 1000 xxxx xxxx xxxx 1011 xxxx */ + DECODE_EMULATEX (0x0ff000f0, 0x068000b0, PROBES_SATURATE, + REGS(NOPC, NOPC, 0, 0, NOPC)), + + /* SSAT cccc 0110 101x xxxx xxxx xxxx xx01 xxxx */ + /* USAT cccc 0110 111x xxxx xxxx xxxx xx01 xxxx */ + DECODE_OR(0x0fa00030, 0x06a00010), + /* SSAT16 cccc 0110 1010 xxxx xxxx xxxx 0011 xxxx */ + /* USAT16 cccc 0110 1110 xxxx xxxx xxxx 0011 xxxx */ + DECODE_EMULATEX (0x0fb000f0, 0x06a00030, PROBES_SATURATE, + REGS(0, NOPC, 0, 0, NOPC)), + + /* REV cccc 0110 1011 xxxx xxxx xxxx 0011 xxxx */ + /* REV16 cccc 0110 1011 xxxx xxxx xxxx 1011 xxxx */ + /* RBIT cccc 0110 1111 xxxx xxxx xxxx 0011 xxxx */ + /* REVSH cccc 0110 1111 xxxx xxxx xxxx 1011 xxxx */ + DECODE_EMULATEX (0x0fb00070, 0x06b00030, PROBES_REV, + REGS(0, NOPC, 0, 0, NOPC)), + + /* ??? cccc 0110 0x00 xxxx xxxx xxxx xxx1 xxxx */ + DECODE_REJECT (0x0fb00010, 0x06000010), + /* ??? cccc 0110 0xxx xxxx xxxx xxxx 1011 xxxx */ + DECODE_REJECT (0x0f8000f0, 0x060000b0), + /* ??? cccc 0110 0xxx xxxx xxxx xxxx 1101 xxxx */ + DECODE_REJECT (0x0f8000f0, 0x060000d0), + /* SADD16 cccc 0110 0001 xxxx xxxx xxxx 0001 xxxx */ + /* SADDSUBX cccc 0110 0001 xxxx xxxx xxxx 0011 xxxx */ + /* SSUBADDX cccc 0110 0001 xxxx xxxx xxxx 0101 xxxx */ + /* SSUB16 cccc 0110 0001 xxxx xxxx xxxx 0111 xxxx */ + /* SADD8 cccc 0110 0001 xxxx xxxx xxxx 1001 xxxx */ + /* SSUB8 cccc 0110 0001 xxxx xxxx xxxx 1111 xxxx */ + /* QADD16 cccc 0110 0010 xxxx xxxx xxxx 0001 xxxx */ + /* QADDSUBX cccc 0110 0010 xxxx xxxx xxxx 0011 xxxx */ + /* QSUBADDX cccc 0110 0010 xxxx xxxx xxxx 0101 xxxx */ + /* QSUB16 cccc 0110 0010 xxxx xxxx xxxx 0111 xxxx */ + /* QADD8 cccc 0110 0010 xxxx xxxx xxxx 1001 xxxx */ + /* QSUB8 cccc 0110 0010 xxxx xxxx xxxx 1111 xxxx */ + /* SHADD16 cccc 0110 0011 xxxx xxxx xxxx 0001 xxxx */ + /* SHADDSUBX cccc 0110 0011 xxxx xxxx xxxx 0011 xxxx */ + /* SHSUBADDX cccc 0110 0011 xxxx xxxx xxxx 0101 xxxx */ + /* SHSUB16 cccc 0110 0011 xxxx xxxx xxxx 0111 xxxx */ + /* SHADD8 cccc 0110 0011 xxxx xxxx xxxx 1001 xxxx */ + /* SHSUB8 cccc 0110 0011 xxxx xxxx xxxx 1111 xxxx */ + /* UADD16 cccc 0110 0101 xxxx xxxx xxxx 0001 xxxx */ + /* UADDSUBX cccc 0110 0101 xxxx xxxx xxxx 0011 xxxx */ + /* USUBADDX cccc 0110 0101 xxxx xxxx xxxx 0101 xxxx */ + /* USUB16 cccc 0110 0101 xxxx xxxx xxxx 0111 xxxx */ + /* UADD8 cccc 0110 0101 xxxx xxxx xxxx 1001 xxxx */ + /* USUB8 cccc 0110 0101 xxxx xxxx xxxx 1111 xxxx */ + /* UQADD16 cccc 0110 0110 xxxx xxxx xxxx 0001 xxxx */ + /* UQADDSUBX cccc 0110 0110 xxxx xxxx xxxx 0011 xxxx */ + /* UQSUBADDX cccc 0110 0110 xxxx xxxx xxxx 0101 xxxx */ + /* UQSUB16 cccc 0110 0110 xxxx xxxx xxxx 0111 xxxx */ + /* UQADD8 cccc 0110 0110 xxxx xxxx xxxx 1001 xxxx */ + /* UQSUB8 cccc 0110 0110 xxxx xxxx xxxx 1111 xxxx */ + /* UHADD16 cccc 0110 0111 xxxx xxxx xxxx 0001 xxxx */ + /* UHADDSUBX cccc 0110 0111 xxxx xxxx xxxx 0011 xxxx */ + /* UHSUBADDX cccc 0110 0111 xxxx xxxx xxxx 0101 xxxx */ + /* UHSUB16 cccc 0110 0111 xxxx xxxx xxxx 0111 xxxx */ + /* UHADD8 cccc 0110 0111 xxxx xxxx xxxx 1001 xxxx */ + /* UHSUB8 cccc 0110 0111 xxxx xxxx xxxx 1111 xxxx */ + DECODE_EMULATEX (0x0f800010, 0x06000010, PROBES_MMI, + REGS(NOPC, NOPC, 0, 0, NOPC)), + + /* PKHBT cccc 0110 1000 xxxx xxxx xxxx x001 xxxx */ + /* PKHTB cccc 0110 1000 xxxx xxxx xxxx x101 xxxx */ + DECODE_EMULATEX (0x0ff00030, 0x06800010, PROBES_PACK, + REGS(NOPC, NOPC, 0, 0, NOPC)), + + /* ??? cccc 0110 1001 xxxx xxxx xxxx 0111 xxxx */ + /* ??? cccc 0110 1101 xxxx xxxx xxxx 0111 xxxx */ + DECODE_REJECT (0x0fb000f0, 0x06900070), + + /* SXTB16 cccc 0110 1000 1111 xxxx xxxx 0111 xxxx */ + /* SXTB cccc 0110 1010 1111 xxxx xxxx 0111 xxxx */ + /* SXTH cccc 0110 1011 1111 xxxx xxxx 0111 xxxx */ + /* UXTB16 cccc 0110 1100 1111 xxxx xxxx 0111 xxxx */ + /* UXTB cccc 0110 1110 1111 xxxx xxxx 0111 xxxx */ + /* UXTH cccc 0110 1111 1111 xxxx xxxx 0111 xxxx */ + DECODE_EMULATEX (0x0f8f00f0, 0x068f0070, PROBES_EXTEND, + REGS(0, NOPC, 0, 0, NOPC)), + + /* SXTAB16 cccc 0110 1000 xxxx xxxx xxxx 0111 xxxx */ + /* SXTAB cccc 0110 1010 xxxx xxxx xxxx 0111 xxxx */ + /* SXTAH cccc 0110 1011 xxxx xxxx xxxx 0111 xxxx */ + /* UXTAB16 cccc 0110 1100 xxxx xxxx xxxx 0111 xxxx */ + /* UXTAB cccc 0110 1110 xxxx xxxx xxxx 0111 xxxx */ + /* UXTAH cccc 0110 1111 xxxx xxxx xxxx 0111 xxxx */ + DECODE_EMULATEX (0x0f8000f0, 0x06800070, PROBES_EXTEND_ADD, + REGS(NOPCX, NOPC, 0, 0, NOPC)), + + DECODE_END +}; + +static const union decode_item arm_cccc_0111_____xxx1_table[] = { + /* Media instructions */ + + /* UNDEFINED cccc 0111 1111 xxxx xxxx xxxx 1111 xxxx */ + DECODE_REJECT (0x0ff000f0, 0x07f000f0), + + /* SMLALD cccc 0111 0100 xxxx xxxx xxxx 00x1 xxxx */ + /* SMLSLD cccc 0111 0100 xxxx xxxx xxxx 01x1 xxxx */ + DECODE_EMULATEX (0x0ff00090, 0x07400010, PROBES_MUL_ADD_LONG, + REGS(NOPC, NOPC, NOPC, 0, NOPC)), + + /* SMUAD cccc 0111 0000 xxxx 1111 xxxx 00x1 xxxx */ + /* SMUSD cccc 0111 0000 xxxx 1111 xxxx 01x1 xxxx */ + DECODE_OR (0x0ff0f090, 0x0700f010), + /* SMMUL cccc 0111 0101 xxxx 1111 xxxx 00x1 xxxx */ + DECODE_OR (0x0ff0f0d0, 0x0750f010), + /* USAD8 cccc 0111 1000 xxxx 1111 xxxx 0001 xxxx */ + DECODE_EMULATEX (0x0ff0f0f0, 0x0780f010, PROBES_MUL_ADD, + REGS(NOPC, 0, NOPC, 0, NOPC)), + + /* SMLAD cccc 0111 0000 xxxx xxxx xxxx 00x1 xxxx */ + /* SMLSD cccc 0111 0000 xxxx xxxx xxxx 01x1 xxxx */ + DECODE_OR (0x0ff00090, 0x07000010), + /* SMMLA cccc 0111 0101 xxxx xxxx xxxx 00x1 xxxx */ + DECODE_OR (0x0ff000d0, 0x07500010), + /* USADA8 cccc 0111 1000 xxxx xxxx xxxx 0001 xxxx */ + DECODE_EMULATEX (0x0ff000f0, 0x07800010, PROBES_MUL_ADD, + REGS(NOPC, NOPCX, NOPC, 0, NOPC)), + + /* SMMLS cccc 0111 0101 xxxx xxxx xxxx 11x1 xxxx */ + DECODE_EMULATEX (0x0ff000d0, 0x075000d0, PROBES_MUL_ADD, + REGS(NOPC, NOPC, NOPC, 0, NOPC)), + + /* SBFX cccc 0111 101x xxxx xxxx xxxx x101 xxxx */ + /* UBFX cccc 0111 111x xxxx xxxx xxxx x101 xxxx */ + DECODE_EMULATEX (0x0fa00070, 0x07a00050, PROBES_BITFIELD, + REGS(0, NOPC, 0, 0, NOPC)), + + /* BFC cccc 0111 110x xxxx xxxx xxxx x001 1111 */ + DECODE_EMULATEX (0x0fe0007f, 0x07c0001f, PROBES_BITFIELD, + REGS(0, NOPC, 0, 0, 0)), + + /* BFI cccc 0111 110x xxxx xxxx xxxx x001 xxxx */ + DECODE_EMULATEX (0x0fe00070, 0x07c00010, PROBES_BITFIELD, + REGS(0, NOPC, 0, 0, NOPCX)), + + DECODE_END +}; + +static const union decode_item arm_cccc_01xx_table[] = { + /* Load/store word and unsigned byte */ + + /* LDRB/STRB pc,[...] cccc 01xx x0xx xxxx xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0x0c40f000, 0x0440f000), + + /* STRT cccc 01x0 x010 xxxx xxxx xxxx xxxx xxxx */ + /* LDRT cccc 01x0 x011 xxxx xxxx xxxx xxxx xxxx */ + /* STRBT cccc 01x0 x110 xxxx xxxx xxxx xxxx xxxx */ + /* LDRBT cccc 01x0 x111 xxxx xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0x0d200000, 0x04200000), + + /* STR (immediate) cccc 010x x0x0 xxxx xxxx xxxx xxxx xxxx */ + /* STRB (immediate) cccc 010x x1x0 xxxx xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0x0e100000, 0x04000000, PROBES_STORE, + REGS(NOPCWB, ANY, 0, 0, 0)), + + /* LDR (immediate) cccc 010x x0x1 xxxx xxxx xxxx xxxx xxxx */ + /* LDRB (immediate) cccc 010x x1x1 xxxx xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0x0e100000, 0x04100000, PROBES_LOAD, + REGS(NOPCWB, ANY, 0, 0, 0)), + + /* STR (register) cccc 011x x0x0 xxxx xxxx xxxx xxxx xxxx */ + /* STRB (register) cccc 011x x1x0 xxxx xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0x0e100000, 0x06000000, PROBES_STORE, + REGS(NOPCWB, ANY, 0, 0, NOPC)), + + /* LDR (register) cccc 011x x0x1 xxxx xxxx xxxx xxxx xxxx */ + /* LDRB (register) cccc 011x x1x1 xxxx xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0x0e100000, 0x06100000, PROBES_LOAD, + REGS(NOPCWB, ANY, 0, 0, NOPC)), + + DECODE_END +}; + +static const union decode_item arm_cccc_100x_table[] = { + /* Block data transfer instructions */ + + /* LDM cccc 100x x0x1 xxxx xxxx xxxx xxxx xxxx */ + /* STM cccc 100x x0x0 xxxx xxxx xxxx xxxx xxxx */ + DECODE_CUSTOM (0x0e400000, 0x08000000, PROBES_LDMSTM), + + /* STM (user registers) cccc 100x x1x0 xxxx xxxx xxxx xxxx xxxx */ + /* LDM (user registers) cccc 100x x1x1 xxxx 0xxx xxxx xxxx xxxx */ + /* LDM (exception ret) cccc 100x x1x1 xxxx 1xxx xxxx xxxx xxxx */ + DECODE_END +}; + +const union decode_item probes_decode_arm_table[] = { + /* + * Unconditional instructions + * 1111 xxxx xxxx xxxx xxxx xxxx xxxx xxxx + */ + DECODE_TABLE (0xf0000000, 0xf0000000, arm_1111_table), + + /* + * Miscellaneous instructions + * cccc 0001 0xx0 xxxx xxxx xxxx 0xxx xxxx + */ + DECODE_TABLE (0x0f900080, 0x01000000, arm_cccc_0001_0xx0____0xxx_table), + + /* + * Halfword multiply and multiply-accumulate + * cccc 0001 0xx0 xxxx xxxx xxxx 1xx0 xxxx + */ + DECODE_TABLE (0x0f900090, 0x01000080, arm_cccc_0001_0xx0____1xx0_table), + + /* + * Multiply and multiply-accumulate + * cccc 0000 xxxx xxxx xxxx xxxx 1001 xxxx + */ + DECODE_TABLE (0x0f0000f0, 0x00000090, arm_cccc_0000_____1001_table), + + /* + * Synchronization primitives + * cccc 0001 xxxx xxxx xxxx xxxx 1001 xxxx + */ + DECODE_TABLE (0x0f0000f0, 0x01000090, arm_cccc_0001_____1001_table), + + /* + * Extra load/store instructions + * cccc 000x xxxx xxxx xxxx xxxx 1xx1 xxxx + */ + DECODE_TABLE (0x0e000090, 0x00000090, arm_cccc_000x_____1xx1_table), + + /* + * Data-processing (register) + * cccc 000x xxxx xxxx xxxx xxxx xxx0 xxxx + * Data-processing (register-shifted register) + * cccc 000x xxxx xxxx xxxx xxxx 0xx1 xxxx + */ + DECODE_TABLE (0x0e000000, 0x00000000, arm_cccc_000x_table), + + /* + * Data-processing (immediate) + * cccc 001x xxxx xxxx xxxx xxxx xxxx xxxx + */ + DECODE_TABLE (0x0e000000, 0x02000000, arm_cccc_001x_table), + + /* + * Media instructions + * cccc 011x xxxx xxxx xxxx xxxx xxx1 xxxx + */ + DECODE_TABLE (0x0f000010, 0x06000010, arm_cccc_0110_____xxx1_table), + DECODE_TABLE (0x0f000010, 0x07000010, arm_cccc_0111_____xxx1_table), + + /* + * Load/store word and unsigned byte + * cccc 01xx xxxx xxxx xxxx xxxx xxxx xxxx + */ + DECODE_TABLE (0x0c000000, 0x04000000, arm_cccc_01xx_table), + + /* + * Block data transfer instructions + * cccc 100x xxxx xxxx xxxx xxxx xxxx xxxx + */ + DECODE_TABLE (0x0e000000, 0x08000000, arm_cccc_100x_table), + + /* B cccc 1010 xxxx xxxx xxxx xxxx xxxx xxxx */ + /* BL cccc 1011 xxxx xxxx xxxx xxxx xxxx xxxx */ + DECODE_SIMULATE (0x0e000000, 0x0a000000, PROBES_BRANCH), + + /* + * Supervisor Call, and coprocessor instructions + */ + + /* MCRR cccc 1100 0100 xxxx xxxx xxxx xxxx xxxx */ + /* MRRC cccc 1100 0101 xxxx xxxx xxxx xxxx xxxx */ + /* LDC cccc 110x xxx1 xxxx xxxx xxxx xxxx xxxx */ + /* STC cccc 110x xxx0 xxxx xxxx xxxx xxxx xxxx */ + /* CDP cccc 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */ + /* MCR cccc 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */ + /* MRC cccc 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */ + /* SVC cccc 1111 xxxx xxxx xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0x0c000000, 0x0c000000), + + DECODE_END +}; +#ifdef CONFIG_ARM_KPROBES_TEST_MODULE +EXPORT_SYMBOL_GPL(probes_decode_arm_table); +#endif + +static void __kprobes arm_singlestep(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + regs->ARM_pc += 4; + asi->insn_handler(insn, asi, regs); +} + +/* Return: + * INSN_REJECTED If instruction is one not allowed to kprobe, + * INSN_GOOD If instruction is supported and uses instruction slot, + * INSN_GOOD_NO_SLOT If instruction is supported but doesn't use its slot. + * + * For instructions we don't want to kprobe (INSN_REJECTED return result): + * These are generally ones that modify the processor state making + * them "hard" to simulate such as switches processor modes or + * make accesses in alternate modes. Any of these could be simulated + * if the work was put into it, but low return considering they + * should also be very rare. + */ +enum probes_insn __kprobes +arm_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, + bool emulate, const union decode_action *actions) +{ + asi->insn_singlestep = arm_singlestep; + asi->insn_check_cc = probes_condition_checks[insn>>28]; + return probes_decode_insn(insn, asi, probes_decode_arm_table, false, + emulate, actions); +} diff --git a/arch/arm/probes/decode-arm.h b/arch/arm/probes/decode-arm.h new file mode 100644 index 000000000000..9c56b40d6a57 --- /dev/null +++ b/arch/arm/probes/decode-arm.h @@ -0,0 +1,75 @@ +/* + * arch/arm/probes/decode-arm.h + * + * Copyright 2013 Linaro Ltd. + * Written by: David A. Long + * + * The code contained herein is licensed under the GNU General Public + * License. You may obtain a copy of the GNU General Public License + * Version 2 or later at the following locations: + * + * http://www.opensource.org/licenses/gpl-license.html + * http://www.gnu.org/copyleft/gpl.html + */ + +#ifndef _ARM_KERNEL_PROBES_ARM_H +#define _ARM_KERNEL_PROBES_ARM_H + +#include "decode.h" + +enum probes_arm_action { + PROBES_EMULATE_NONE, + PROBES_SIMULATE_NOP, + PROBES_PRELOAD_IMM, + PROBES_PRELOAD_REG, + PROBES_BRANCH_IMM, + PROBES_BRANCH_REG, + PROBES_MRS, + PROBES_CLZ, + PROBES_SATURATING_ARITHMETIC, + PROBES_MUL1, + PROBES_MUL2, + PROBES_SWP, + PROBES_LDRSTRD, + PROBES_LOAD, + PROBES_STORE, + PROBES_LOAD_EXTRA, + PROBES_STORE_EXTRA, + PROBES_MOV_IP_SP, + PROBES_DATA_PROCESSING_REG, + PROBES_DATA_PROCESSING_IMM, + PROBES_MOV_HALFWORD, + PROBES_SEV, + PROBES_WFE, + PROBES_SATURATE, + PROBES_REV, + PROBES_MMI, + PROBES_PACK, + PROBES_EXTEND, + PROBES_EXTEND_ADD, + PROBES_MUL_ADD_LONG, + PROBES_MUL_ADD, + PROBES_BITFIELD, + PROBES_BRANCH, + PROBES_LDMSTM, + NUM_PROBES_ARM_ACTIONS +}; + +void __kprobes simulate_bbl(probes_opcode_t opcode, + struct arch_probes_insn *asi, struct pt_regs *regs); +void __kprobes simulate_blx1(probes_opcode_t opcode, + struct arch_probes_insn *asi, struct pt_regs *regs); +void __kprobes simulate_blx2bx(probes_opcode_t opcode, + struct arch_probes_insn *asi, struct pt_regs *regs); +void __kprobes simulate_mrs(probes_opcode_t opcode, + struct arch_probes_insn *asi, struct pt_regs *regs); +void __kprobes simulate_mov_ipsp(probes_opcode_t opcode, + struct arch_probes_insn *asi, struct pt_regs *regs); + +extern const union decode_item probes_decode_arm_table[]; + +enum probes_insn arm_probes_decode_insn(probes_opcode_t, + struct arch_probes_insn *, bool emulate, + const union decode_action *actions); + +#endif diff --git a/arch/arm/probes/decode-thumb.c b/arch/arm/probes/decode-thumb.c new file mode 100644 index 000000000000..2f0453a895dc --- /dev/null +++ b/arch/arm/probes/decode-thumb.c @@ -0,0 +1,882 @@ +/* + * arch/arm/probes/decode-thumb.c + * + * Copyright (C) 2011 Jon Medhurst . + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include +#include +#include + +#include "decode.h" +#include "decode-thumb.h" + + +static const union decode_item t32_table_1110_100x_x0xx[] = { + /* Load/store multiple instructions */ + + /* Rn is PC 1110 100x x0xx 1111 xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0xfe4f0000, 0xe80f0000), + + /* SRS 1110 1000 00x0 xxxx xxxx xxxx xxxx xxxx */ + /* RFE 1110 1000 00x1 xxxx xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0xffc00000, 0xe8000000), + /* SRS 1110 1001 10x0 xxxx xxxx xxxx xxxx xxxx */ + /* RFE 1110 1001 10x1 xxxx xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0xffc00000, 0xe9800000), + + /* STM Rn, {...pc} 1110 100x x0x0 xxxx 1xxx xxxx xxxx xxxx */ + DECODE_REJECT (0xfe508000, 0xe8008000), + /* LDM Rn, {...lr,pc} 1110 100x x0x1 xxxx 11xx xxxx xxxx xxxx */ + DECODE_REJECT (0xfe50c000, 0xe810c000), + /* LDM/STM Rn, {...sp} 1110 100x x0xx xxxx xx1x xxxx xxxx xxxx */ + DECODE_REJECT (0xfe402000, 0xe8002000), + + /* STMIA 1110 1000 10x0 xxxx xxxx xxxx xxxx xxxx */ + /* LDMIA 1110 1000 10x1 xxxx xxxx xxxx xxxx xxxx */ + /* STMDB 1110 1001 00x0 xxxx xxxx xxxx xxxx xxxx */ + /* LDMDB 1110 1001 00x1 xxxx xxxx xxxx xxxx xxxx */ + DECODE_CUSTOM (0xfe400000, 0xe8000000, PROBES_T32_LDMSTM), + + DECODE_END +}; + +static const union decode_item t32_table_1110_100x_x1xx[] = { + /* Load/store dual, load/store exclusive, table branch */ + + /* STRD (immediate) 1110 1000 x110 xxxx xxxx xxxx xxxx xxxx */ + /* LDRD (immediate) 1110 1000 x111 xxxx xxxx xxxx xxxx xxxx */ + DECODE_OR (0xff600000, 0xe8600000), + /* STRD (immediate) 1110 1001 x1x0 xxxx xxxx xxxx xxxx xxxx */ + /* LDRD (immediate) 1110 1001 x1x1 xxxx xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xff400000, 0xe9400000, PROBES_T32_LDRDSTRD, + REGS(NOPCWB, NOSPPC, NOSPPC, 0, 0)), + + /* TBB 1110 1000 1101 xxxx xxxx xxxx 0000 xxxx */ + /* TBH 1110 1000 1101 xxxx xxxx xxxx 0001 xxxx */ + DECODE_SIMULATEX(0xfff000e0, 0xe8d00000, PROBES_T32_TABLE_BRANCH, + REGS(NOSP, 0, 0, 0, NOSPPC)), + + /* STREX 1110 1000 0100 xxxx xxxx xxxx xxxx xxxx */ + /* LDREX 1110 1000 0101 xxxx xxxx xxxx xxxx xxxx */ + /* STREXB 1110 1000 1100 xxxx xxxx xxxx 0100 xxxx */ + /* STREXH 1110 1000 1100 xxxx xxxx xxxx 0101 xxxx */ + /* STREXD 1110 1000 1100 xxxx xxxx xxxx 0111 xxxx */ + /* LDREXB 1110 1000 1101 xxxx xxxx xxxx 0100 xxxx */ + /* LDREXH 1110 1000 1101 xxxx xxxx xxxx 0101 xxxx */ + /* LDREXD 1110 1000 1101 xxxx xxxx xxxx 0111 xxxx */ + /* And unallocated instructions... */ + DECODE_END +}; + +static const union decode_item t32_table_1110_101x[] = { + /* Data-processing (shifted register) */ + + /* TST 1110 1010 0001 xxxx xxxx 1111 xxxx xxxx */ + /* TEQ 1110 1010 1001 xxxx xxxx 1111 xxxx xxxx */ + DECODE_EMULATEX (0xff700f00, 0xea100f00, PROBES_T32_TST, + REGS(NOSPPC, 0, 0, 0, NOSPPC)), + + /* CMN 1110 1011 0001 xxxx xxxx 1111 xxxx xxxx */ + DECODE_OR (0xfff00f00, 0xeb100f00), + /* CMP 1110 1011 1011 xxxx xxxx 1111 xxxx xxxx */ + DECODE_EMULATEX (0xfff00f00, 0xebb00f00, PROBES_T32_TST, + REGS(NOPC, 0, 0, 0, NOSPPC)), + + /* MOV 1110 1010 010x 1111 xxxx xxxx xxxx xxxx */ + /* MVN 1110 1010 011x 1111 xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xffcf0000, 0xea4f0000, PROBES_T32_MOV, + REGS(0, 0, NOSPPC, 0, NOSPPC)), + + /* ??? 1110 1010 101x xxxx xxxx xxxx xxxx xxxx */ + /* ??? 1110 1010 111x xxxx xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0xffa00000, 0xeaa00000), + /* ??? 1110 1011 001x xxxx xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0xffe00000, 0xeb200000), + /* ??? 1110 1011 100x xxxx xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0xffe00000, 0xeb800000), + /* ??? 1110 1011 111x xxxx xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0xffe00000, 0xebe00000), + + /* ADD/SUB SP, SP, Rm, LSL #0..3 */ + /* 1110 1011 x0xx 1101 x000 1101 xx00 xxxx */ + DECODE_EMULATEX (0xff4f7f30, 0xeb0d0d00, PROBES_T32_ADDSUB, + REGS(SP, 0, SP, 0, NOSPPC)), + + /* ADD/SUB SP, SP, Rm, shift */ + /* 1110 1011 x0xx 1101 xxxx 1101 xxxx xxxx */ + DECODE_REJECT (0xff4f0f00, 0xeb0d0d00), + + /* ADD/SUB Rd, SP, Rm, shift */ + /* 1110 1011 x0xx 1101 xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xff4f0000, 0xeb0d0000, PROBES_T32_ADDSUB, + REGS(SP, 0, NOPC, 0, NOSPPC)), + + /* AND 1110 1010 000x xxxx xxxx xxxx xxxx xxxx */ + /* BIC 1110 1010 001x xxxx xxxx xxxx xxxx xxxx */ + /* ORR 1110 1010 010x xxxx xxxx xxxx xxxx xxxx */ + /* ORN 1110 1010 011x xxxx xxxx xxxx xxxx xxxx */ + /* EOR 1110 1010 100x xxxx xxxx xxxx xxxx xxxx */ + /* PKH 1110 1010 110x xxxx xxxx xxxx xxxx xxxx */ + /* ADD 1110 1011 000x xxxx xxxx xxxx xxxx xxxx */ + /* ADC 1110 1011 010x xxxx xxxx xxxx xxxx xxxx */ + /* SBC 1110 1011 011x xxxx xxxx xxxx xxxx xxxx */ + /* SUB 1110 1011 101x xxxx xxxx xxxx xxxx xxxx */ + /* RSB 1110 1011 110x xxxx xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xfe000000, 0xea000000, PROBES_T32_LOGICAL, + REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)), + + DECODE_END +}; + +static const union decode_item t32_table_1111_0x0x___0[] = { + /* Data-processing (modified immediate) */ + + /* TST 1111 0x00 0001 xxxx 0xxx 1111 xxxx xxxx */ + /* TEQ 1111 0x00 1001 xxxx 0xxx 1111 xxxx xxxx */ + DECODE_EMULATEX (0xfb708f00, 0xf0100f00, PROBES_T32_TST, + REGS(NOSPPC, 0, 0, 0, 0)), + + /* CMN 1111 0x01 0001 xxxx 0xxx 1111 xxxx xxxx */ + DECODE_OR (0xfbf08f00, 0xf1100f00), + /* CMP 1111 0x01 1011 xxxx 0xxx 1111 xxxx xxxx */ + DECODE_EMULATEX (0xfbf08f00, 0xf1b00f00, PROBES_T32_CMP, + REGS(NOPC, 0, 0, 0, 0)), + + /* MOV 1111 0x00 010x 1111 0xxx xxxx xxxx xxxx */ + /* MVN 1111 0x00 011x 1111 0xxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xfbcf8000, 0xf04f0000, PROBES_T32_MOV, + REGS(0, 0, NOSPPC, 0, 0)), + + /* ??? 1111 0x00 101x xxxx 0xxx xxxx xxxx xxxx */ + DECODE_REJECT (0xfbe08000, 0xf0a00000), + /* ??? 1111 0x00 110x xxxx 0xxx xxxx xxxx xxxx */ + /* ??? 1111 0x00 111x xxxx 0xxx xxxx xxxx xxxx */ + DECODE_REJECT (0xfbc08000, 0xf0c00000), + /* ??? 1111 0x01 001x xxxx 0xxx xxxx xxxx xxxx */ + DECODE_REJECT (0xfbe08000, 0xf1200000), + /* ??? 1111 0x01 100x xxxx 0xxx xxxx xxxx xxxx */ + DECODE_REJECT (0xfbe08000, 0xf1800000), + /* ??? 1111 0x01 111x xxxx 0xxx xxxx xxxx xxxx */ + DECODE_REJECT (0xfbe08000, 0xf1e00000), + + /* ADD Rd, SP, #imm 1111 0x01 000x 1101 0xxx xxxx xxxx xxxx */ + /* SUB Rd, SP, #imm 1111 0x01 101x 1101 0xxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xfb4f8000, 0xf10d0000, PROBES_T32_ADDSUB, + REGS(SP, 0, NOPC, 0, 0)), + + /* AND 1111 0x00 000x xxxx 0xxx xxxx xxxx xxxx */ + /* BIC 1111 0x00 001x xxxx 0xxx xxxx xxxx xxxx */ + /* ORR 1111 0x00 010x xxxx 0xxx xxxx xxxx xxxx */ + /* ORN 1111 0x00 011x xxxx 0xxx xxxx xxxx xxxx */ + /* EOR 1111 0x00 100x xxxx 0xxx xxxx xxxx xxxx */ + /* ADD 1111 0x01 000x xxxx 0xxx xxxx xxxx xxxx */ + /* ADC 1111 0x01 010x xxxx 0xxx xxxx xxxx xxxx */ + /* SBC 1111 0x01 011x xxxx 0xxx xxxx xxxx xxxx */ + /* SUB 1111 0x01 101x xxxx 0xxx xxxx xxxx xxxx */ + /* RSB 1111 0x01 110x xxxx 0xxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xfa008000, 0xf0000000, PROBES_T32_LOGICAL, + REGS(NOSPPC, 0, NOSPPC, 0, 0)), + + DECODE_END +}; + +static const union decode_item t32_table_1111_0x1x___0[] = { + /* Data-processing (plain binary immediate) */ + + /* ADDW Rd, PC, #imm 1111 0x10 0000 1111 0xxx xxxx xxxx xxxx */ + DECODE_OR (0xfbff8000, 0xf20f0000), + /* SUBW Rd, PC, #imm 1111 0x10 1010 1111 0xxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xfbff8000, 0xf2af0000, PROBES_T32_ADDWSUBW_PC, + REGS(PC, 0, NOSPPC, 0, 0)), + + /* ADDW SP, SP, #imm 1111 0x10 0000 1101 0xxx 1101 xxxx xxxx */ + DECODE_OR (0xfbff8f00, 0xf20d0d00), + /* SUBW SP, SP, #imm 1111 0x10 1010 1101 0xxx 1101 xxxx xxxx */ + DECODE_EMULATEX (0xfbff8f00, 0xf2ad0d00, PROBES_T32_ADDWSUBW, + REGS(SP, 0, SP, 0, 0)), + + /* ADDW 1111 0x10 0000 xxxx 0xxx xxxx xxxx xxxx */ + DECODE_OR (0xfbf08000, 0xf2000000), + /* SUBW 1111 0x10 1010 xxxx 0xxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xfbf08000, 0xf2a00000, PROBES_T32_ADDWSUBW, + REGS(NOPCX, 0, NOSPPC, 0, 0)), + + /* MOVW 1111 0x10 0100 xxxx 0xxx xxxx xxxx xxxx */ + /* MOVT 1111 0x10 1100 xxxx 0xxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xfb708000, 0xf2400000, PROBES_T32_MOVW, + REGS(0, 0, NOSPPC, 0, 0)), + + /* SSAT16 1111 0x11 0010 xxxx 0000 xxxx 00xx xxxx */ + /* SSAT 1111 0x11 00x0 xxxx 0xxx xxxx xxxx xxxx */ + /* USAT16 1111 0x11 1010 xxxx 0000 xxxx 00xx xxxx */ + /* USAT 1111 0x11 10x0 xxxx 0xxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xfb508000, 0xf3000000, PROBES_T32_SAT, + REGS(NOSPPC, 0, NOSPPC, 0, 0)), + + /* SFBX 1111 0x11 0100 xxxx 0xxx xxxx xxxx xxxx */ + /* UFBX 1111 0x11 1100 xxxx 0xxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xfb708000, 0xf3400000, PROBES_T32_BITFIELD, + REGS(NOSPPC, 0, NOSPPC, 0, 0)), + + /* BFC 1111 0x11 0110 1111 0xxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xfbff8000, 0xf36f0000, PROBES_T32_BITFIELD, + REGS(0, 0, NOSPPC, 0, 0)), + + /* BFI 1111 0x11 0110 xxxx 0xxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xfbf08000, 0xf3600000, PROBES_T32_BITFIELD, + REGS(NOSPPCX, 0, NOSPPC, 0, 0)), + + DECODE_END +}; + +static const union decode_item t32_table_1111_0xxx___1[] = { + /* Branches and miscellaneous control */ + + /* YIELD 1111 0011 1010 xxxx 10x0 x000 0000 0001 */ + DECODE_OR (0xfff0d7ff, 0xf3a08001), + /* SEV 1111 0011 1010 xxxx 10x0 x000 0000 0100 */ + DECODE_EMULATE (0xfff0d7ff, 0xf3a08004, PROBES_T32_SEV), + /* NOP 1111 0011 1010 xxxx 10x0 x000 0000 0000 */ + /* WFE 1111 0011 1010 xxxx 10x0 x000 0000 0010 */ + /* WFI 1111 0011 1010 xxxx 10x0 x000 0000 0011 */ + DECODE_SIMULATE (0xfff0d7fc, 0xf3a08000, PROBES_T32_WFE), + + /* MRS Rd, CPSR 1111 0011 1110 xxxx 10x0 xxxx xxxx xxxx */ + DECODE_SIMULATEX(0xfff0d000, 0xf3e08000, PROBES_T32_MRS, + REGS(0, 0, NOSPPC, 0, 0)), + + /* + * Unsupported instructions + * 1111 0x11 1xxx xxxx 10x0 xxxx xxxx xxxx + * + * MSR 1111 0011 100x xxxx 10x0 xxxx xxxx xxxx + * DBG hint 1111 0011 1010 xxxx 10x0 x000 1111 xxxx + * Unallocated hints 1111 0011 1010 xxxx 10x0 x000 xxxx xxxx + * CPS 1111 0011 1010 xxxx 10x0 xxxx xxxx xxxx + * CLREX/DSB/DMB/ISB 1111 0011 1011 xxxx 10x0 xxxx xxxx xxxx + * BXJ 1111 0011 1100 xxxx 10x0 xxxx xxxx xxxx + * SUBS PC,LR,# 1111 0011 1101 xxxx 10x0 xxxx xxxx xxxx + * MRS Rd, SPSR 1111 0011 1111 xxxx 10x0 xxxx xxxx xxxx + * SMC 1111 0111 1111 xxxx 1000 xxxx xxxx xxxx + * UNDEFINED 1111 0111 1111 xxxx 1010 xxxx xxxx xxxx + * ??? 1111 0111 1xxx xxxx 1010 xxxx xxxx xxxx + */ + DECODE_REJECT (0xfb80d000, 0xf3808000), + + /* Bcc 1111 0xxx xxxx xxxx 10x0 xxxx xxxx xxxx */ + DECODE_CUSTOM (0xf800d000, 0xf0008000, PROBES_T32_BRANCH_COND), + + /* BLX 1111 0xxx xxxx xxxx 11x0 xxxx xxxx xxx0 */ + DECODE_OR (0xf800d001, 0xf000c000), + /* B 1111 0xxx xxxx xxxx 10x1 xxxx xxxx xxxx */ + /* BL 1111 0xxx xxxx xxxx 11x1 xxxx xxxx xxxx */ + DECODE_SIMULATE (0xf8009000, 0xf0009000, PROBES_T32_BRANCH), + + DECODE_END +}; + +static const union decode_item t32_table_1111_100x_x0x1__1111[] = { + /* Memory hints */ + + /* PLD (literal) 1111 1000 x001 1111 1111 xxxx xxxx xxxx */ + /* PLI (literal) 1111 1001 x001 1111 1111 xxxx xxxx xxxx */ + DECODE_SIMULATE (0xfe7ff000, 0xf81ff000, PROBES_T32_PLDI), + + /* PLD{W} (immediate) 1111 1000 10x1 xxxx 1111 xxxx xxxx xxxx */ + DECODE_OR (0xffd0f000, 0xf890f000), + /* PLD{W} (immediate) 1111 1000 00x1 xxxx 1111 1100 xxxx xxxx */ + DECODE_OR (0xffd0ff00, 0xf810fc00), + /* PLI (immediate) 1111 1001 1001 xxxx 1111 xxxx xxxx xxxx */ + DECODE_OR (0xfff0f000, 0xf990f000), + /* PLI (immediate) 1111 1001 0001 xxxx 1111 1100 xxxx xxxx */ + DECODE_SIMULATEX(0xfff0ff00, 0xf910fc00, PROBES_T32_PLDI, + REGS(NOPCX, 0, 0, 0, 0)), + + /* PLD{W} (register) 1111 1000 00x1 xxxx 1111 0000 00xx xxxx */ + DECODE_OR (0xffd0ffc0, 0xf810f000), + /* PLI (register) 1111 1001 0001 xxxx 1111 0000 00xx xxxx */ + DECODE_SIMULATEX(0xfff0ffc0, 0xf910f000, PROBES_T32_PLDI, + REGS(NOPCX, 0, 0, 0, NOSPPC)), + + /* Other unallocated instructions... */ + DECODE_END +}; + +static const union decode_item t32_table_1111_100x[] = { + /* Store/Load single data item */ + + /* ??? 1111 100x x11x xxxx xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0xfe600000, 0xf8600000), + + /* ??? 1111 1001 0101 xxxx xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0xfff00000, 0xf9500000), + + /* ??? 1111 100x 0xxx xxxx xxxx 10x0 xxxx xxxx */ + DECODE_REJECT (0xfe800d00, 0xf8000800), + + /* STRBT 1111 1000 0000 xxxx xxxx 1110 xxxx xxxx */ + /* STRHT 1111 1000 0010 xxxx xxxx 1110 xxxx xxxx */ + /* STRT 1111 1000 0100 xxxx xxxx 1110 xxxx xxxx */ + /* LDRBT 1111 1000 0001 xxxx xxxx 1110 xxxx xxxx */ + /* LDRSBT 1111 1001 0001 xxxx xxxx 1110 xxxx xxxx */ + /* LDRHT 1111 1000 0011 xxxx xxxx 1110 xxxx xxxx */ + /* LDRSHT 1111 1001 0011 xxxx xxxx 1110 xxxx xxxx */ + /* LDRT 1111 1000 0101 xxxx xxxx 1110 xxxx xxxx */ + DECODE_REJECT (0xfe800f00, 0xf8000e00), + + /* STR{,B,H} Rn,[PC...] 1111 1000 xxx0 1111 xxxx xxxx xxxx xxxx */ + DECODE_REJECT (0xff1f0000, 0xf80f0000), + + /* STR{,B,H} PC,[Rn...] 1111 1000 xxx0 xxxx 1111 xxxx xxxx xxxx */ + DECODE_REJECT (0xff10f000, 0xf800f000), + + /* LDR (literal) 1111 1000 x101 1111 xxxx xxxx xxxx xxxx */ + DECODE_SIMULATEX(0xff7f0000, 0xf85f0000, PROBES_T32_LDR_LIT, + REGS(PC, ANY, 0, 0, 0)), + + /* STR (immediate) 1111 1000 0100 xxxx xxxx 1xxx xxxx xxxx */ + /* LDR (immediate) 1111 1000 0101 xxxx xxxx 1xxx xxxx xxxx */ + DECODE_OR (0xffe00800, 0xf8400800), + /* STR (immediate) 1111 1000 1100 xxxx xxxx xxxx xxxx xxxx */ + /* LDR (immediate) 1111 1000 1101 xxxx xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xffe00000, 0xf8c00000, PROBES_T32_LDRSTR, + REGS(NOPCX, ANY, 0, 0, 0)), + + /* STR (register) 1111 1000 0100 xxxx xxxx 0000 00xx xxxx */ + /* LDR (register) 1111 1000 0101 xxxx xxxx 0000 00xx xxxx */ + DECODE_EMULATEX (0xffe00fc0, 0xf8400000, PROBES_T32_LDRSTR, + REGS(NOPCX, ANY, 0, 0, NOSPPC)), + + /* LDRB (literal) 1111 1000 x001 1111 xxxx xxxx xxxx xxxx */ + /* LDRSB (literal) 1111 1001 x001 1111 xxxx xxxx xxxx xxxx */ + /* LDRH (literal) 1111 1000 x011 1111 xxxx xxxx xxxx xxxx */ + /* LDRSH (literal) 1111 1001 x011 1111 xxxx xxxx xxxx xxxx */ + DECODE_SIMULATEX(0xfe5f0000, 0xf81f0000, PROBES_T32_LDR_LIT, + REGS(PC, NOSPPCX, 0, 0, 0)), + + /* STRB (immediate) 1111 1000 0000 xxxx xxxx 1xxx xxxx xxxx */ + /* STRH (immediate) 1111 1000 0010 xxxx xxxx 1xxx xxxx xxxx */ + /* LDRB (immediate) 1111 1000 0001 xxxx xxxx 1xxx xxxx xxxx */ + /* LDRSB (immediate) 1111 1001 0001 xxxx xxxx 1xxx xxxx xxxx */ + /* LDRH (immediate) 1111 1000 0011 xxxx xxxx 1xxx xxxx xxxx */ + /* LDRSH (immediate) 1111 1001 0011 xxxx xxxx 1xxx xxxx xxxx */ + DECODE_OR (0xfec00800, 0xf8000800), + /* STRB (immediate) 1111 1000 1000 xxxx xxxx xxxx xxxx xxxx */ + /* STRH (immediate) 1111 1000 1010 xxxx xxxx xxxx xxxx xxxx */ + /* LDRB (immediate) 1111 1000 1001 xxxx xxxx xxxx xxxx xxxx */ + /* LDRSB (immediate) 1111 1001 1001 xxxx xxxx xxxx xxxx xxxx */ + /* LDRH (immediate) 1111 1000 1011 xxxx xxxx xxxx xxxx xxxx */ + /* LDRSH (immediate) 1111 1001 1011 xxxx xxxx xxxx xxxx xxxx */ + DECODE_EMULATEX (0xfec00000, 0xf8800000, PROBES_T32_LDRSTR, + REGS(NOPCX, NOSPPCX, 0, 0, 0)), + + /* STRB (register) 1111 1000 0000 xxxx xxxx 0000 00xx xxxx */ + /* STRH (register) 1111 1000 0010 xxxx xxxx 0000 00xx xxxx */ + /* LDRB (register) 1111 1000 0001 xxxx xxxx 0000 00xx xxxx */ + /* LDRSB (register) 1111 1001 0001 xxxx xxxx 0000 00xx xxxx */ + /* LDRH (register) 1111 1000 0011 xxxx xxxx 0000 00xx xxxx */ + /* LDRSH (register) 1111 1001 0011 xxxx xxxx 0000 00xx xxxx */ + DECODE_EMULATEX (0xfe800fc0, 0xf8000000, PROBES_T32_LDRSTR, + REGS(NOPCX, NOSPPCX, 0, 0, NOSPPC)), + + /* Other unallocated instructions... */ + DECODE_END +}; + +static const union decode_item t32_table_1111_1010___1111[] = { + /* Data-processing (register) */ + + /* ??? 1111 1010 011x xxxx 1111 xxxx 1xxx xxxx */ + DECODE_REJECT (0xffe0f080, 0xfa60f080), + + /* SXTH 1111 1010 0000 1111 1111 xxxx 1xxx xxxx */ + /* UXTH 1111 1010 0001 1111 1111 xxxx 1xxx xxxx */ + /* SXTB16 1111 1010 0010 1111 1111 xxxx 1xxx xxxx */ + /* UXTB16 1111 1010 0011 1111 1111 xxxx 1xxx xxxx */ + /* SXTB 1111 1010 0100 1111 1111 xxxx 1xxx xxxx */ + /* UXTB 1111 1010 0101 1111 1111 xxxx 1xxx xxxx */ + DECODE_EMULATEX (0xff8ff080, 0xfa0ff080, PROBES_T32_SIGN_EXTEND, + REGS(0, 0, NOSPPC, 0, NOSPPC)), + + + /* ??? 1111 1010 1xxx xxxx 1111 xxxx 0x11 xxxx */ + DECODE_REJECT (0xff80f0b0, 0xfa80f030), + /* ??? 1111 1010 1x11 xxxx 1111 xxxx 0xxx xxxx */ + DECODE_REJECT (0xffb0f080, 0xfab0f000), + + /* SADD16 1111 1010 1001 xxxx 1111 xxxx 0000 xxxx */ + /* SASX 1111 1010 1010 xxxx 1111 xxxx 0000 xxxx */ + /* SSAX 1111 1010 1110 xxxx 1111 xxxx 0000 xxxx */ + /* SSUB16 1111 1010 1101 xxxx 1111 xxxx 0000 xxxx */ + /* SADD8 1111 1010 1000 xxxx 1111 xxxx 0000 xxxx */ + /* SSUB8 1111 1010 1100 xxxx 1111 xxxx 0000 xxxx */ + + /* QADD16 1111 1010 1001 xxxx 1111 xxxx 0001 xxxx */ + /* QASX 1111 1010 1010 xxxx 1111 xxxx 0001 xxxx */ + /* QSAX 1111 1010 1110 xxxx 1111 xxxx 0001 xxxx */ + /* QSUB16 1111 1010 1101 xxxx 1111 xxxx 0001 xxxx */ + /* QADD8 1111 1010 1000 xxxx 1111 xxxx 0001 xxxx */ + /* QSUB8 1111 1010 1100 xxxx 1111 xxxx 0001 xxxx */ + + /* SHADD16 1111 1010 1001 xxxx 1111 xxxx 0010 xxxx */ + /* SHASX 1111 1010 1010 xxxx 1111 xxxx 0010 xxxx */ + /* SHSAX 1111 1010 1110 xxxx 1111 xxxx 0010 xxxx */ + /* SHSUB16 1111 1010 1101 xxxx 1111 xxxx 0010 xxxx */ + /* SHADD8 1111 1010 1000 xxxx 1111 xxxx 0010 xxxx */ + /* SHSUB8 1111 1010 1100 xxxx 1111 xxxx 0010 xxxx */ + + /* UADD16 1111 1010 1001 xxxx 1111 xxxx 0100 xxxx */ + /* UASX 1111 1010 1010 xxxx 1111 xxxx 0100 xxxx */ + /* USAX 1111 1010 1110 xxxx 1111 xxxx 0100 xxxx */ + /* USUB16 1111 1010 1101 xxxx 1111 xxxx 0100 xxxx */ + /* UADD8 1111 1010 1000 xxxx 1111 xxxx 0100 xxxx */ + /* USUB8 1111 1010 1100 xxxx 1111 xxxx 0100 xxxx */ + + /* UQADD16 1111 1010 1001 xxxx 1111 xxxx 0101 xxxx */ + /* UQASX 1111 1010 1010 xxxx 1111 xxxx 0101 xxxx */ + /* UQSAX 1111 1010 1110 xxxx 1111 xxxx 0101 xxxx */ + /* UQSUB16 1111 1010 1101 xxxx 1111 xxxx 0101 xxxx */ + /* UQADD8 1111 1010 1000 xxxx 1111 xxxx 0101 xxxx */ + /* UQSUB8 1111 1010 1100 xxxx 1111 xxxx 0101 xxxx */ + + /* UHADD16 1111 1010 1001 xxxx 1111 xxxx 0110 xxxx */ + /* UHASX 1111 1010 1010 xxxx 1111 xxxx 0110 xxxx */ + /* UHSAX 1111 1010 1110 xxxx 1111 xxxx 0110 xxxx */ + /* UHSUB16 1111 1010 1101 xxxx 1111 xxxx 0110 xxxx */ + /* UHADD8 1111 1010 1000 xxxx 1111 xxxx 0110 xxxx */ + /* UHSUB8 1111 1010 1100 xxxx 1111 xxxx 0110 xxxx */ + DECODE_OR (0xff80f080, 0xfa80f000), + + /* SXTAH 1111 1010 0000 xxxx 1111 xxxx 1xxx xxxx */ + /* UXTAH 1111 1010 0001 xxxx 1111 xxxx 1xxx xxxx */ + /* SXTAB16 1111 1010 0010 xxxx 1111 xxxx 1xxx xxxx */ + /* UXTAB16 1111 1010 0011 xxxx 1111 xxxx 1xxx xxxx */ + /* SXTAB 1111 1010 0100 xxxx 1111 xxxx 1xxx xxxx */ + /* UXTAB 1111 1010 0101 xxxx 1111 xxxx 1xxx xxxx */ + DECODE_OR (0xff80f080, 0xfa00f080), + + /* QADD 1111 1010 1000 xxxx 1111 xxxx 1000 xxxx */ + /* QDADD 1111 1010 1000 xxxx 1111 xxxx 1001 xxxx */ + /* QSUB 1111 1010 1000 xxxx 1111 xxxx 1010 xxxx */ + /* QDSUB 1111 1010 1000 xxxx 1111 xxxx 1011 xxxx */ + DECODE_OR (0xfff0f0c0, 0xfa80f080), + + /* SEL 1111 1010 1010 xxxx 1111 xxxx 1000 xxxx */ + DECODE_OR (0xfff0f0f0, 0xfaa0f080), + + /* LSL 1111 1010 000x xxxx 1111 xxxx 0000 xxxx */ + /* LSR 1111 1010 001x xxxx 1111 xxxx 0000 xxxx */ + /* ASR 1111 1010 010x xxxx 1111 xxxx 0000 xxxx */ + /* ROR 1111 1010 011x xxxx 1111 xxxx 0000 xxxx */ + DECODE_EMULATEX (0xff80f0f0, 0xfa00f000, PROBES_T32_MEDIA, + REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)), + + /* CLZ 1111 1010 1010 xxxx 1111 xxxx 1000 xxxx */ + DECODE_OR (0xfff0f0f0, 0xfab0f080), + + /* REV 1111 1010 1001 xxxx 1111 xxxx 1000 xxxx */ + /* REV16 1111 1010 1001 xxxx 1111 xxxx 1001 xxxx */ + /* RBIT 1111 1010 1001 xxxx 1111 xxxx 1010 xxxx */ + /* REVSH 1111 1010 1001 xxxx 1111 xxxx 1011 xxxx */ + DECODE_EMULATEX (0xfff0f0c0, 0xfa90f080, PROBES_T32_REVERSE, + REGS(NOSPPC, 0, NOSPPC, 0, SAMEAS16)), + + /* Other unallocated instructions... */ + DECODE_END +}; + +static const union decode_item t32_table_1111_1011_0[] = { + /* Multiply, multiply accumulate, and absolute difference */ + + /* ??? 1111 1011 0000 xxxx 1111 xxxx 0001 xxxx */ + DECODE_REJECT (0xfff0f0f0, 0xfb00f010), + /* ??? 1111 1011 0111 xxxx 1111 xxxx 0001 xxxx */ + DECODE_REJECT (0xfff0f0f0, 0xfb70f010), + + /* SMULxy 1111 1011 0001 xxxx 1111 xxxx 00xx xxxx */ + DECODE_OR (0xfff0f0c0, 0xfb10f000), + /* MUL 1111 1011 0000 xxxx 1111 xxxx 0000 xxxx */ + /* SMUAD{X} 1111 1011 0010 xxxx 1111 xxxx 000x xxxx */ + /* SMULWy 1111 1011 0011 xxxx 1111 xxxx 000x xxxx */ + /* SMUSD{X} 1111 1011 0100 xxxx 1111 xxxx 000x xxxx */ + /* SMMUL{R} 1111 1011 0101 xxxx 1111 xxxx 000x xxxx */ + /* USAD8 1111 1011 0111 xxxx 1111 xxxx 0000 xxxx */ + DECODE_EMULATEX (0xff80f0e0, 0xfb00f000, PROBES_T32_MUL_ADD, + REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)), + + /* ??? 1111 1011 0111 xxxx xxxx xxxx 0001 xxxx */ + DECODE_REJECT (0xfff000f0, 0xfb700010), + + /* SMLAxy 1111 1011 0001 xxxx xxxx xxxx 00xx xxxx */ + DECODE_OR (0xfff000c0, 0xfb100000), + /* MLA 1111 1011 0000 xxxx xxxx xxxx 0000 xxxx */ + /* MLS 1111 1011 0000 xxxx xxxx xxxx 0001 xxxx */ + /* SMLAD{X} 1111 1011 0010 xxxx xxxx xxxx 000x xxxx */ + /* SMLAWy 1111 1011 0011 xxxx xxxx xxxx 000x xxxx */ + /* SMLSD{X} 1111 1011 0100 xxxx xxxx xxxx 000x xxxx */ + /* SMMLA{R} 1111 1011 0101 xxxx xxxx xxxx 000x xxxx */ + /* SMMLS{R} 1111 1011 0110 xxxx xxxx xxxx 000x xxxx */ + /* USADA8 1111 1011 0111 xxxx xxxx xxxx 0000 xxxx */ + DECODE_EMULATEX (0xff8000c0, 0xfb000000, PROBES_T32_MUL_ADD2, + REGS(NOSPPC, NOSPPCX, NOSPPC, 0, NOSPPC)), + + /* Other unallocated instructions... */ + DECODE_END +}; + +static const union decode_item t32_table_1111_1011_1[] = { + /* Long multiply, long multiply accumulate, and divide */ + + /* UMAAL 1111 1011 1110 xxxx xxxx xxxx 0110 xxxx */ + DECODE_OR (0xfff000f0, 0xfbe00060), + /* SMLALxy 1111 1011 1100 xxxx xxxx xxxx 10xx xxxx */ + DECODE_OR (0xfff000c0, 0xfbc00080), + /* SMLALD{X} 1111 1011 1100 xxxx xxxx xxxx 110x xxxx */ + /* SMLSLD{X} 1111 1011 1101 xxxx xxxx xxxx 110x xxxx */ + DECODE_OR (0xffe000e0, 0xfbc000c0), + /* SMULL 1111 1011 1000 xxxx xxxx xxxx 0000 xxxx */ + /* UMULL 1111 1011 1010 xxxx xxxx xxxx 0000 xxxx */ + /* SMLAL 1111 1011 1100 xxxx xxxx xxxx 0000 xxxx */ + /* UMLAL 1111 1011 1110 xxxx xxxx xxxx 0000 xxxx */ + DECODE_EMULATEX (0xff9000f0, 0xfb800000, PROBES_T32_MUL_ADD_LONG, + REGS(NOSPPC, NOSPPC, NOSPPC, 0, NOSPPC)), + + /* SDIV 1111 1011 1001 xxxx xxxx xxxx 1111 xxxx */ + /* UDIV 1111 1011 1011 xxxx xxxx xxxx 1111 xxxx */ + /* Other unallocated instructions... */ + DECODE_END +}; + +const union decode_item probes_decode_thumb32_table[] = { + + /* + * Load/store multiple instructions + * 1110 100x x0xx xxxx xxxx xxxx xxxx xxxx + */ + DECODE_TABLE (0xfe400000, 0xe8000000, t32_table_1110_100x_x0xx), + + /* + * Load/store dual, load/store exclusive, table branch + * 1110 100x x1xx xxxx xxxx xxxx xxxx xxxx + */ + DECODE_TABLE (0xfe400000, 0xe8400000, t32_table_1110_100x_x1xx), + + /* + * Data-processing (shifted register) + * 1110 101x xxxx xxxx xxxx xxxx xxxx xxxx + */ + DECODE_TABLE (0xfe000000, 0xea000000, t32_table_1110_101x), + + /* + * Coprocessor instructions + * 1110 11xx xxxx xxxx xxxx xxxx xxxx xxxx + */ + DECODE_REJECT (0xfc000000, 0xec000000), + + /* + * Data-processing (modified immediate) + * 1111 0x0x xxxx xxxx 0xxx xxxx xxxx xxxx + */ + DECODE_TABLE (0xfa008000, 0xf0000000, t32_table_1111_0x0x___0), + + /* + * Data-processing (plain binary immediate) + * 1111 0x1x xxxx xxxx 0xxx xxxx xxxx xxxx + */ + DECODE_TABLE (0xfa008000, 0xf2000000, t32_table_1111_0x1x___0), + + /* + * Branches and miscellaneous control + * 1111 0xxx xxxx xxxx 1xxx xxxx xxxx xxxx + */ + DECODE_TABLE (0xf8008000, 0xf0008000, t32_table_1111_0xxx___1), + + /* + * Advanced SIMD element or structure load/store instructions + * 1111 1001 xxx0 xxxx xxxx xxxx xxxx xxxx + */ + DECODE_REJECT (0xff100000, 0xf9000000), + + /* + * Memory hints + * 1111 100x x0x1 xxxx 1111 xxxx xxxx xxxx + */ + DECODE_TABLE (0xfe50f000, 0xf810f000, t32_table_1111_100x_x0x1__1111), + + /* + * Store single data item + * 1111 1000 xxx0 xxxx xxxx xxxx xxxx xxxx + * Load single data items + * 1111 100x xxx1 xxxx xxxx xxxx xxxx xxxx + */ + DECODE_TABLE (0xfe000000, 0xf8000000, t32_table_1111_100x), + + /* + * Data-processing (register) + * 1111 1010 xxxx xxxx 1111 xxxx xxxx xxxx + */ + DECODE_TABLE (0xff00f000, 0xfa00f000, t32_table_1111_1010___1111), + + /* + * Multiply, multiply accumulate, and absolute difference + * 1111 1011 0xxx xxxx xxxx xxxx xxxx xxxx + */ + DECODE_TABLE (0xff800000, 0xfb000000, t32_table_1111_1011_0), + + /* + * Long multiply, long multiply accumulate, and divide + * 1111 1011 1xxx xxxx xxxx xxxx xxxx xxxx + */ + DECODE_TABLE (0xff800000, 0xfb800000, t32_table_1111_1011_1), + + /* + * Coprocessor instructions + * 1111 11xx xxxx xxxx xxxx xxxx xxxx xxxx + */ + DECODE_END +}; +#ifdef CONFIG_ARM_KPROBES_TEST_MODULE +EXPORT_SYMBOL_GPL(probes_decode_thumb32_table); +#endif + +static const union decode_item t16_table_1011[] = { + /* Miscellaneous 16-bit instructions */ + + /* ADD (SP plus immediate) 1011 0000 0xxx xxxx */ + /* SUB (SP minus immediate) 1011 0000 1xxx xxxx */ + DECODE_SIMULATE (0xff00, 0xb000, PROBES_T16_ADD_SP), + + /* CBZ 1011 00x1 xxxx xxxx */ + /* CBNZ 1011 10x1 xxxx xxxx */ + DECODE_SIMULATE (0xf500, 0xb100, PROBES_T16_CBZ), + + /* SXTH 1011 0010 00xx xxxx */ + /* SXTB 1011 0010 01xx xxxx */ + /* UXTH 1011 0010 10xx xxxx */ + /* UXTB 1011 0010 11xx xxxx */ + /* REV 1011 1010 00xx xxxx */ + /* REV16 1011 1010 01xx xxxx */ + /* ??? 1011 1010 10xx xxxx */ + /* REVSH 1011 1010 11xx xxxx */ + DECODE_REJECT (0xffc0, 0xba80), + DECODE_EMULATE (0xf500, 0xb000, PROBES_T16_SIGN_EXTEND), + + /* PUSH 1011 010x xxxx xxxx */ + DECODE_CUSTOM (0xfe00, 0xb400, PROBES_T16_PUSH), + /* POP 1011 110x xxxx xxxx */ + DECODE_CUSTOM (0xfe00, 0xbc00, PROBES_T16_POP), + + /* + * If-Then, and hints + * 1011 1111 xxxx xxxx + */ + + /* YIELD 1011 1111 0001 0000 */ + DECODE_OR (0xffff, 0xbf10), + /* SEV 1011 1111 0100 0000 */ + DECODE_EMULATE (0xffff, 0xbf40, PROBES_T16_SEV), + /* NOP 1011 1111 0000 0000 */ + /* WFE 1011 1111 0010 0000 */ + /* WFI 1011 1111 0011 0000 */ + DECODE_SIMULATE (0xffcf, 0xbf00, PROBES_T16_WFE), + /* Unassigned hints 1011 1111 xxxx 0000 */ + DECODE_REJECT (0xff0f, 0xbf00), + /* IT 1011 1111 xxxx xxxx */ + DECODE_CUSTOM (0xff00, 0xbf00, PROBES_T16_IT), + + /* SETEND 1011 0110 010x xxxx */ + /* CPS 1011 0110 011x xxxx */ + /* BKPT 1011 1110 xxxx xxxx */ + /* And unallocated instructions... */ + DECODE_END +}; + +const union decode_item probes_decode_thumb16_table[] = { + + /* + * Shift (immediate), add, subtract, move, and compare + * 00xx xxxx xxxx xxxx + */ + + /* CMP (immediate) 0010 1xxx xxxx xxxx */ + DECODE_EMULATE (0xf800, 0x2800, PROBES_T16_CMP), + + /* ADD (register) 0001 100x xxxx xxxx */ + /* SUB (register) 0001 101x xxxx xxxx */ + /* LSL (immediate) 0000 0xxx xxxx xxxx */ + /* LSR (immediate) 0000 1xxx xxxx xxxx */ + /* ASR (immediate) 0001 0xxx xxxx xxxx */ + /* ADD (immediate, Thumb) 0001 110x xxxx xxxx */ + /* SUB (immediate, Thumb) 0001 111x xxxx xxxx */ + /* MOV (immediate) 0010 0xxx xxxx xxxx */ + /* ADD (immediate, Thumb) 0011 0xxx xxxx xxxx */ + /* SUB (immediate, Thumb) 0011 1xxx xxxx xxxx */ + DECODE_EMULATE (0xc000, 0x0000, PROBES_T16_ADDSUB), + + /* + * 16-bit Thumb data-processing instructions + * 0100 00xx xxxx xxxx + */ + + /* TST (register) 0100 0010 00xx xxxx */ + DECODE_EMULATE (0xffc0, 0x4200, PROBES_T16_CMP), + /* CMP (register) 0100 0010 10xx xxxx */ + /* CMN (register) 0100 0010 11xx xxxx */ + DECODE_EMULATE (0xff80, 0x4280, PROBES_T16_CMP), + /* AND (register) 0100 0000 00xx xxxx */ + /* EOR (register) 0100 0000 01xx xxxx */ + /* LSL (register) 0100 0000 10xx xxxx */ + /* LSR (register) 0100 0000 11xx xxxx */ + /* ASR (register) 0100 0001 00xx xxxx */ + /* ADC (register) 0100 0001 01xx xxxx */ + /* SBC (register) 0100 0001 10xx xxxx */ + /* ROR (register) 0100 0001 11xx xxxx */ + /* RSB (immediate) 0100 0010 01xx xxxx */ + /* ORR (register) 0100 0011 00xx xxxx */ + /* MUL 0100 0011 00xx xxxx */ + /* BIC (register) 0100 0011 10xx xxxx */ + /* MVN (register) 0100 0011 10xx xxxx */ + DECODE_EMULATE (0xfc00, 0x4000, PROBES_T16_LOGICAL), + + /* + * Special data instructions and branch and exchange + * 0100 01xx xxxx xxxx + */ + + /* BLX pc 0100 0111 1111 1xxx */ + DECODE_REJECT (0xfff8, 0x47f8), + + /* BX (register) 0100 0111 0xxx xxxx */ + /* BLX (register) 0100 0111 1xxx xxxx */ + DECODE_SIMULATE (0xff00, 0x4700, PROBES_T16_BLX), + + /* ADD pc, pc 0100 0100 1111 1111 */ + DECODE_REJECT (0xffff, 0x44ff), + + /* ADD (register) 0100 0100 xxxx xxxx */ + /* CMP (register) 0100 0101 xxxx xxxx */ + /* MOV (register) 0100 0110 xxxx xxxx */ + DECODE_CUSTOM (0xfc00, 0x4400, PROBES_T16_HIREGOPS), + + /* + * Load from Literal Pool + * LDR (literal) 0100 1xxx xxxx xxxx + */ + DECODE_SIMULATE (0xf800, 0x4800, PROBES_T16_LDR_LIT), + + /* + * 16-bit Thumb Load/store instructions + * 0101 xxxx xxxx xxxx + * 011x xxxx xxxx xxxx + * 100x xxxx xxxx xxxx + */ + + /* STR (register) 0101 000x xxxx xxxx */ + /* STRH (register) 0101 001x xxxx xxxx */ + /* STRB (register) 0101 010x xxxx xxxx */ + /* LDRSB (register) 0101 011x xxxx xxxx */ + /* LDR (register) 0101 100x xxxx xxxx */ + /* LDRH (register) 0101 101x xxxx xxxx */ + /* LDRB (register) 0101 110x xxxx xxxx */ + /* LDRSH (register) 0101 111x xxxx xxxx */ + /* STR (immediate, Thumb) 0110 0xxx xxxx xxxx */ + /* LDR (immediate, Thumb) 0110 1xxx xxxx xxxx */ + /* STRB (immediate, Thumb) 0111 0xxx xxxx xxxx */ + /* LDRB (immediate, Thumb) 0111 1xxx xxxx xxxx */ + DECODE_EMULATE (0xc000, 0x4000, PROBES_T16_LDRHSTRH), + /* STRH (immediate, Thumb) 1000 0xxx xxxx xxxx */ + /* LDRH (immediate, Thumb) 1000 1xxx xxxx xxxx */ + DECODE_EMULATE (0xf000, 0x8000, PROBES_T16_LDRHSTRH), + /* STR (immediate, Thumb) 1001 0xxx xxxx xxxx */ + /* LDR (immediate, Thumb) 1001 1xxx xxxx xxxx */ + DECODE_SIMULATE (0xf000, 0x9000, PROBES_T16_LDRSTR), + + /* + * Generate PC-/SP-relative address + * ADR (literal) 1010 0xxx xxxx xxxx + * ADD (SP plus immediate) 1010 1xxx xxxx xxxx + */ + DECODE_SIMULATE (0xf000, 0xa000, PROBES_T16_ADR), + + /* + * Miscellaneous 16-bit instructions + * 1011 xxxx xxxx xxxx + */ + DECODE_TABLE (0xf000, 0xb000, t16_table_1011), + + /* STM 1100 0xxx xxxx xxxx */ + /* LDM 1100 1xxx xxxx xxxx */ + DECODE_EMULATE (0xf000, 0xc000, PROBES_T16_LDMSTM), + + /* + * Conditional branch, and Supervisor Call + */ + + /* Permanently UNDEFINED 1101 1110 xxxx xxxx */ + /* SVC 1101 1111 xxxx xxxx */ + DECODE_REJECT (0xfe00, 0xde00), + + /* Conditional branch 1101 xxxx xxxx xxxx */ + DECODE_CUSTOM (0xf000, 0xd000, PROBES_T16_BRANCH_COND), + + /* + * Unconditional branch + * B 1110 0xxx xxxx xxxx + */ + DECODE_SIMULATE (0xf800, 0xe000, PROBES_T16_BRANCH), + + DECODE_END +}; +#ifdef CONFIG_ARM_KPROBES_TEST_MODULE +EXPORT_SYMBOL_GPL(probes_decode_thumb16_table); +#endif + +static unsigned long __kprobes thumb_check_cc(unsigned long cpsr) +{ + if (unlikely(in_it_block(cpsr))) + return probes_condition_checks[current_cond(cpsr)](cpsr); + return true; +} + +static void __kprobes thumb16_singlestep(probes_opcode_t opcode, + struct arch_probes_insn *asi, + struct pt_regs *regs) +{ + regs->ARM_pc += 2; + asi->insn_handler(opcode, asi, regs); + regs->ARM_cpsr = it_advance(regs->ARM_cpsr); +} + +static void __kprobes thumb32_singlestep(probes_opcode_t opcode, + struct arch_probes_insn *asi, + struct pt_regs *regs) +{ + regs->ARM_pc += 4; + asi->insn_handler(opcode, asi, regs); + regs->ARM_cpsr = it_advance(regs->ARM_cpsr); +} + +enum probes_insn __kprobes +thumb16_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, + bool emulate, const union decode_action *actions) +{ + asi->insn_singlestep = thumb16_singlestep; + asi->insn_check_cc = thumb_check_cc; + return probes_decode_insn(insn, asi, probes_decode_thumb16_table, true, + emulate, actions); +} + +enum probes_insn __kprobes +thumb32_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, + bool emulate, const union decode_action *actions) +{ + asi->insn_singlestep = thumb32_singlestep; + asi->insn_check_cc = thumb_check_cc; + return probes_decode_insn(insn, asi, probes_decode_thumb32_table, true, + emulate, actions); +} diff --git a/arch/arm/probes/decode-thumb.h b/arch/arm/probes/decode-thumb.h new file mode 100644 index 000000000000..039013c7131d --- /dev/null +++ b/arch/arm/probes/decode-thumb.h @@ -0,0 +1,99 @@ +/* + * arch/arm/probes/decode-thumb.h + * + * Copyright 2013 Linaro Ltd. + * Written by: David A. Long + * + * The code contained herein is licensed under the GNU General Public + * License. You may obtain a copy of the GNU General Public License + * Version 2 or later at the following locations: + * + * http://www.opensource.org/licenses/gpl-license.html + * http://www.gnu.org/copyleft/gpl.html + */ + +#ifndef _ARM_KERNEL_PROBES_THUMB_H +#define _ARM_KERNEL_PROBES_THUMB_H + +#include "decode.h" + +/* + * True if current instruction is in an IT block. + */ +#define in_it_block(cpsr) ((cpsr & 0x06000c00) != 0x00000000) + +/* + * Return the condition code to check for the currently executing instruction. + * This is in ITSTATE<7:4> which is in CPSR<15:12> but is only valid if + * in_it_block returns true. + */ +#define current_cond(cpsr) ((cpsr >> 12) & 0xf) + +enum probes_t32_action { + PROBES_T32_EMULATE_NONE, + PROBES_T32_SIMULATE_NOP, + PROBES_T32_LDMSTM, + PROBES_T32_LDRDSTRD, + PROBES_T32_TABLE_BRANCH, + PROBES_T32_TST, + PROBES_T32_CMP, + PROBES_T32_MOV, + PROBES_T32_ADDSUB, + PROBES_T32_LOGICAL, + PROBES_T32_ADDWSUBW_PC, + PROBES_T32_ADDWSUBW, + PROBES_T32_MOVW, + PROBES_T32_SAT, + PROBES_T32_BITFIELD, + PROBES_T32_SEV, + PROBES_T32_WFE, + PROBES_T32_MRS, + PROBES_T32_BRANCH_COND, + PROBES_T32_BRANCH, + PROBES_T32_PLDI, + PROBES_T32_LDR_LIT, + PROBES_T32_LDRSTR, + PROBES_T32_SIGN_EXTEND, + PROBES_T32_MEDIA, + PROBES_T32_REVERSE, + PROBES_T32_MUL_ADD, + PROBES_T32_MUL_ADD2, + PROBES_T32_MUL_ADD_LONG, + NUM_PROBES_T32_ACTIONS +}; + +enum probes_t16_action { + PROBES_T16_ADD_SP, + PROBES_T16_CBZ, + PROBES_T16_SIGN_EXTEND, + PROBES_T16_PUSH, + PROBES_T16_POP, + PROBES_T16_SEV, + PROBES_T16_WFE, + PROBES_T16_IT, + PROBES_T16_CMP, + PROBES_T16_ADDSUB, + PROBES_T16_LOGICAL, + PROBES_T16_BLX, + PROBES_T16_HIREGOPS, + PROBES_T16_LDR_LIT, + PROBES_T16_LDRHSTRH, + PROBES_T16_LDRSTR, + PROBES_T16_ADR, + PROBES_T16_LDMSTM, + PROBES_T16_BRANCH_COND, + PROBES_T16_BRANCH, + NUM_PROBES_T16_ACTIONS +}; + +extern const union decode_item probes_decode_thumb32_table[]; +extern const union decode_item probes_decode_thumb16_table[]; + +enum probes_insn __kprobes +thumb16_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, + bool emulate, const union decode_action *actions); +enum probes_insn __kprobes +thumb32_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, + bool emulate, const union decode_action *actions); + +#endif diff --git a/arch/arm/probes/decode.c b/arch/arm/probes/decode.c new file mode 100644 index 000000000000..3b05d5742359 --- /dev/null +++ b/arch/arm/probes/decode.c @@ -0,0 +1,456 @@ +/* + * arch/arm/probes/decode.c + * + * Copyright (C) 2011 Jon Medhurst . + * + * Some contents moved here from arch/arm/include/asm/kprobes-arm.c which is + * Copyright (C) 2006, 2007 Motorola Inc. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include +#include +#include +#include +#include + +#include "decode.h" + + +#ifndef find_str_pc_offset + +/* + * For STR and STM instructions, an ARM core may choose to use either + * a +8 or a +12 displacement from the current instruction's address. + * Whichever value is chosen for a given core, it must be the same for + * both instructions and may not change. This function measures it. + */ + +int str_pc_offset; + +void __init find_str_pc_offset(void) +{ + int addr, scratch, ret; + + __asm__ ( + "sub %[ret], pc, #4 \n\t" + "str pc, %[addr] \n\t" + "ldr %[scr], %[addr] \n\t" + "sub %[ret], %[scr], %[ret] \n\t" + : [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr)); + + str_pc_offset = ret; +} + +#endif /* !find_str_pc_offset */ + + +#ifndef test_load_write_pc_interworking + +bool load_write_pc_interworks; + +void __init test_load_write_pc_interworking(void) +{ + int arch = cpu_architecture(); + BUG_ON(arch == CPU_ARCH_UNKNOWN); + load_write_pc_interworks = arch >= CPU_ARCH_ARMv5T; +} + +#endif /* !test_load_write_pc_interworking */ + + +#ifndef test_alu_write_pc_interworking + +bool alu_write_pc_interworks; + +void __init test_alu_write_pc_interworking(void) +{ + int arch = cpu_architecture(); + BUG_ON(arch == CPU_ARCH_UNKNOWN); + alu_write_pc_interworks = arch >= CPU_ARCH_ARMv7; +} + +#endif /* !test_alu_write_pc_interworking */ + + +void __init arm_probes_decode_init(void) +{ + find_str_pc_offset(); + test_load_write_pc_interworking(); + test_alu_write_pc_interworking(); +} + + +static unsigned long __kprobes __check_eq(unsigned long cpsr) +{ + return cpsr & PSR_Z_BIT; +} + +static unsigned long __kprobes __check_ne(unsigned long cpsr) +{ + return (~cpsr) & PSR_Z_BIT; +} + +static unsigned long __kprobes __check_cs(unsigned long cpsr) +{ + return cpsr & PSR_C_BIT; +} + +static unsigned long __kprobes __check_cc(unsigned long cpsr) +{ + return (~cpsr) & PSR_C_BIT; +} + +static unsigned long __kprobes __check_mi(unsigned long cpsr) +{ + return cpsr & PSR_N_BIT; +} + +static unsigned long __kprobes __check_pl(unsigned long cpsr) +{ + return (~cpsr) & PSR_N_BIT; +} + +static unsigned long __kprobes __check_vs(unsigned long cpsr) +{ + return cpsr & PSR_V_BIT; +} + +static unsigned long __kprobes __check_vc(unsigned long cpsr) +{ + return (~cpsr) & PSR_V_BIT; +} + +static unsigned long __kprobes __check_hi(unsigned long cpsr) +{ + cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */ + return cpsr & PSR_C_BIT; +} + +static unsigned long __kprobes __check_ls(unsigned long cpsr) +{ + cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */ + return (~cpsr) & PSR_C_BIT; +} + +static unsigned long __kprobes __check_ge(unsigned long cpsr) +{ + cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */ + return (~cpsr) & PSR_N_BIT; +} + +static unsigned long __kprobes __check_lt(unsigned long cpsr) +{ + cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */ + return cpsr & PSR_N_BIT; +} + +static unsigned long __kprobes __check_gt(unsigned long cpsr) +{ + unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */ + temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */ + return (~temp) & PSR_N_BIT; +} + +static unsigned long __kprobes __check_le(unsigned long cpsr) +{ + unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */ + temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */ + return temp & PSR_N_BIT; +} + +static unsigned long __kprobes __check_al(unsigned long cpsr) +{ + return true; +} + +probes_check_cc * const probes_condition_checks[16] = { + &__check_eq, &__check_ne, &__check_cs, &__check_cc, + &__check_mi, &__check_pl, &__check_vs, &__check_vc, + &__check_hi, &__check_ls, &__check_ge, &__check_lt, + &__check_gt, &__check_le, &__check_al, &__check_al +}; + + +void __kprobes probes_simulate_nop(probes_opcode_t opcode, + struct arch_probes_insn *asi, + struct pt_regs *regs) +{ +} + +void __kprobes probes_emulate_none(probes_opcode_t opcode, + struct arch_probes_insn *asi, + struct pt_regs *regs) +{ + asi->insn_fn(); +} + +/* + * Prepare an instruction slot to receive an instruction for emulating. + * This is done by placing a subroutine return after the location where the + * instruction will be placed. We also modify ARM instructions to be + * unconditional as the condition code will already be checked before any + * emulation handler is called. + */ +static probes_opcode_t __kprobes +prepare_emulated_insn(probes_opcode_t insn, struct arch_probes_insn *asi, + bool thumb) +{ +#ifdef CONFIG_THUMB2_KERNEL + if (thumb) { + u16 *thumb_insn = (u16 *)asi->insn; + /* Thumb bx lr */ + thumb_insn[1] = __opcode_to_mem_thumb16(0x4770); + thumb_insn[2] = __opcode_to_mem_thumb16(0x4770); + return insn; + } + asi->insn[1] = __opcode_to_mem_arm(0xe12fff1e); /* ARM bx lr */ +#else + asi->insn[1] = __opcode_to_mem_arm(0xe1a0f00e); /* mov pc, lr */ +#endif + /* Make an ARM instruction unconditional */ + if (insn < 0xe0000000) + insn = (insn | 0xe0000000) & ~0x10000000; + return insn; +} + +/* + * Write a (probably modified) instruction into the slot previously prepared by + * prepare_emulated_insn + */ +static void __kprobes +set_emulated_insn(probes_opcode_t insn, struct arch_probes_insn *asi, + bool thumb) +{ +#ifdef CONFIG_THUMB2_KERNEL + if (thumb) { + u16 *ip = (u16 *)asi->insn; + if (is_wide_instruction(insn)) + *ip++ = __opcode_to_mem_thumb16(insn >> 16); + *ip++ = __opcode_to_mem_thumb16(insn); + return; + } +#endif + asi->insn[0] = __opcode_to_mem_arm(insn); +} + +/* + * When we modify the register numbers encoded in an instruction to be emulated, + * the new values come from this define. For ARM and 32-bit Thumb instructions + * this gives... + * + * bit position 16 12 8 4 0 + * ---------------+---+---+---+---+---+ + * register r2 r0 r1 -- r3 + */ +#define INSN_NEW_BITS 0x00020103 + +/* Each nibble has same value as that at INSN_NEW_BITS bit 16 */ +#define INSN_SAMEAS16_BITS 0x22222222 + +/* + * Validate and modify each of the registers encoded in an instruction. + * + * Each nibble in regs contains a value from enum decode_reg_type. For each + * non-zero value, the corresponding nibble in pinsn is validated and modified + * according to the type. + */ +static bool __kprobes decode_regs(probes_opcode_t *pinsn, u32 regs, bool modify) +{ + probes_opcode_t insn = *pinsn; + probes_opcode_t mask = 0xf; /* Start at least significant nibble */ + + for (; regs != 0; regs >>= 4, mask <<= 4) { + + probes_opcode_t new_bits = INSN_NEW_BITS; + + switch (regs & 0xf) { + + case REG_TYPE_NONE: + /* Nibble not a register, skip to next */ + continue; + + case REG_TYPE_ANY: + /* Any register is allowed */ + break; + + case REG_TYPE_SAMEAS16: + /* Replace register with same as at bit position 16 */ + new_bits = INSN_SAMEAS16_BITS; + break; + + case REG_TYPE_SP: + /* Only allow SP (R13) */ + if ((insn ^ 0xdddddddd) & mask) + goto reject; + break; + + case REG_TYPE_PC: + /* Only allow PC (R15) */ + if ((insn ^ 0xffffffff) & mask) + goto reject; + break; + + case REG_TYPE_NOSP: + /* Reject SP (R13) */ + if (((insn ^ 0xdddddddd) & mask) == 0) + goto reject; + break; + + case REG_TYPE_NOSPPC: + case REG_TYPE_NOSPPCX: + /* Reject SP and PC (R13 and R15) */ + if (((insn ^ 0xdddddddd) & 0xdddddddd & mask) == 0) + goto reject; + break; + + case REG_TYPE_NOPCWB: + if (!is_writeback(insn)) + break; /* No writeback, so any register is OK */ + /* fall through... */ + case REG_TYPE_NOPC: + case REG_TYPE_NOPCX: + /* Reject PC (R15) */ + if (((insn ^ 0xffffffff) & mask) == 0) + goto reject; + break; + } + + /* Replace value of nibble with new register number... */ + insn &= ~mask; + insn |= new_bits & mask; + } + + if (modify) + *pinsn = insn; + + return true; + +reject: + return false; +} + +static const int decode_struct_sizes[NUM_DECODE_TYPES] = { + [DECODE_TYPE_TABLE] = sizeof(struct decode_table), + [DECODE_TYPE_CUSTOM] = sizeof(struct decode_custom), + [DECODE_TYPE_SIMULATE] = sizeof(struct decode_simulate), + [DECODE_TYPE_EMULATE] = sizeof(struct decode_emulate), + [DECODE_TYPE_OR] = sizeof(struct decode_or), + [DECODE_TYPE_REJECT] = sizeof(struct decode_reject) +}; + +/* + * probes_decode_insn operates on data tables in order to decode an ARM + * architecture instruction onto which a kprobe has been placed. + * + * These instruction decoding tables are a concatenation of entries each + * of which consist of one of the following structs: + * + * decode_table + * decode_custom + * decode_simulate + * decode_emulate + * decode_or + * decode_reject + * + * Each of these starts with a struct decode_header which has the following + * fields: + * + * type_regs + * mask + * value + * + * The least significant DECODE_TYPE_BITS of type_regs contains a value + * from enum decode_type, this indicates which of the decode_* structs + * the entry contains. The value DECODE_TYPE_END indicates the end of the + * table. + * + * When the table is parsed, each entry is checked in turn to see if it + * matches the instruction to be decoded using the test: + * + * (insn & mask) == value + * + * If no match is found before the end of the table is reached then decoding + * fails with INSN_REJECTED. + * + * When a match is found, decode_regs() is called to validate and modify each + * of the registers encoded in the instruction; the data it uses to do this + * is (type_regs >> DECODE_TYPE_BITS). A validation failure will cause decoding + * to fail with INSN_REJECTED. + * + * Once the instruction has passed the above tests, further processing + * depends on the type of the table entry's decode struct. + * + */ +int __kprobes +probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, + const union decode_item *table, bool thumb, + bool emulate, const union decode_action *actions) +{ + const struct decode_header *h = (struct decode_header *)table; + const struct decode_header *next; + bool matched = false; + + if (emulate) + insn = prepare_emulated_insn(insn, asi, thumb); + + for (;; h = next) { + enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK; + u32 regs = h->type_regs.bits >> DECODE_TYPE_BITS; + + if (type == DECODE_TYPE_END) + return INSN_REJECTED; + + next = (struct decode_header *) + ((uintptr_t)h + decode_struct_sizes[type]); + + if (!matched && (insn & h->mask.bits) != h->value.bits) + continue; + + if (!decode_regs(&insn, regs, emulate)) + return INSN_REJECTED; + + switch (type) { + + case DECODE_TYPE_TABLE: { + struct decode_table *d = (struct decode_table *)h; + next = (struct decode_header *)d->table.table; + break; + } + + case DECODE_TYPE_CUSTOM: { + struct decode_custom *d = (struct decode_custom *)h; + return actions[d->decoder.action].decoder(insn, asi, h); + } + + case DECODE_TYPE_SIMULATE: { + struct decode_simulate *d = (struct decode_simulate *)h; + asi->insn_handler = actions[d->handler.action].handler; + return INSN_GOOD_NO_SLOT; + } + + case DECODE_TYPE_EMULATE: { + struct decode_emulate *d = (struct decode_emulate *)h; + + if (!emulate) + return actions[d->handler.action].decoder(insn, + asi, h); + + asi->insn_handler = actions[d->handler.action].handler; + set_emulated_insn(insn, asi, thumb); + return INSN_GOOD; + } + + case DECODE_TYPE_OR: + matched = true; + break; + + case DECODE_TYPE_REJECT: + default: + return INSN_REJECTED; + } + } +} diff --git a/arch/arm/probes/decode.h b/arch/arm/probes/decode.h new file mode 100644 index 000000000000..1d0b53169080 --- /dev/null +++ b/arch/arm/probes/decode.h @@ -0,0 +1,407 @@ +/* + * arch/arm/probes/decode.h + * + * Copyright (C) 2011 Jon Medhurst . + * + * Some contents moved here from arch/arm/include/asm/kprobes.h which is + * Copyright (C) 2006, 2007 Motorola Inc. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + */ + +#ifndef _ARM_KERNEL_PROBES_H +#define _ARM_KERNEL_PROBES_H + +#include +#include +#include + +void __init arm_probes_decode_init(void); + +extern probes_check_cc * const probes_condition_checks[16]; + +#if __LINUX_ARM_ARCH__ >= 7 + +/* str_pc_offset is architecturally defined from ARMv7 onwards */ +#define str_pc_offset 8 +#define find_str_pc_offset() + +#else /* __LINUX_ARM_ARCH__ < 7 */ + +/* We need a run-time check to determine str_pc_offset */ +extern int str_pc_offset; +void __init find_str_pc_offset(void); + +#endif + + +/* + * Update ITSTATE after normal execution of an IT block instruction. + * + * The 8 IT state bits are split into two parts in CPSR: + * ITSTATE<1:0> are in CPSR<26:25> + * ITSTATE<7:2> are in CPSR<15:10> + */ +static inline unsigned long it_advance(unsigned long cpsr) + { + if ((cpsr & 0x06000400) == 0) { + /* ITSTATE<2:0> == 0 means end of IT block, so clear IT state */ + cpsr &= ~PSR_IT_MASK; + } else { + /* We need to shift left ITSTATE<4:0> */ + const unsigned long mask = 0x06001c00; /* Mask ITSTATE<4:0> */ + unsigned long it = cpsr & mask; + it <<= 1; + it |= it >> (27 - 10); /* Carry ITSTATE<2> to correct place */ + it &= mask; + cpsr &= ~mask; + cpsr |= it; + } + return cpsr; +} + +static inline void __kprobes bx_write_pc(long pcv, struct pt_regs *regs) +{ + long cpsr = regs->ARM_cpsr; + if (pcv & 0x1) { + cpsr |= PSR_T_BIT; + pcv &= ~0x1; + } else { + cpsr &= ~PSR_T_BIT; + pcv &= ~0x2; /* Avoid UNPREDICTABLE address allignment */ + } + regs->ARM_cpsr = cpsr; + regs->ARM_pc = pcv; +} + + +#if __LINUX_ARM_ARCH__ >= 6 + +/* Kernels built for >= ARMv6 should never run on <= ARMv5 hardware, so... */ +#define load_write_pc_interworks true +#define test_load_write_pc_interworking() + +#else /* __LINUX_ARM_ARCH__ < 6 */ + +/* We need run-time testing to determine if load_write_pc() should interwork. */ +extern bool load_write_pc_interworks; +void __init test_load_write_pc_interworking(void); + +#endif + +static inline void __kprobes load_write_pc(long pcv, struct pt_regs *regs) +{ + if (load_write_pc_interworks) + bx_write_pc(pcv, regs); + else + regs->ARM_pc = pcv; +} + + +#if __LINUX_ARM_ARCH__ >= 7 + +#define alu_write_pc_interworks true +#define test_alu_write_pc_interworking() + +#elif __LINUX_ARM_ARCH__ <= 5 + +/* Kernels built for <= ARMv5 should never run on >= ARMv6 hardware, so... */ +#define alu_write_pc_interworks false +#define test_alu_write_pc_interworking() + +#else /* __LINUX_ARM_ARCH__ == 6 */ + +/* We could be an ARMv6 binary on ARMv7 hardware so we need a run-time check. */ +extern bool alu_write_pc_interworks; +void __init test_alu_write_pc_interworking(void); + +#endif /* __LINUX_ARM_ARCH__ == 6 */ + +static inline void __kprobes alu_write_pc(long pcv, struct pt_regs *regs) +{ + if (alu_write_pc_interworks) + bx_write_pc(pcv, regs); + else + regs->ARM_pc = pcv; +} + + +/* + * Test if load/store instructions writeback the address register. + * if P (bit 24) == 0 or W (bit 21) == 1 + */ +#define is_writeback(insn) ((insn ^ 0x01000000) & 0x01200000) + +/* + * The following definitions and macros are used to build instruction + * decoding tables for use by probes_decode_insn. + * + * These tables are a concatenation of entries each of which consist of one of + * the decode_* structs. All of the fields in every type of decode structure + * are of the union type decode_item, therefore the entire decode table can be + * viewed as an array of these and declared like: + * + * static const union decode_item table_name[] = {}; + * + * In order to construct each entry in the table, macros are used to + * initialise a number of sequential decode_item values in a layout which + * matches the relevant struct. E.g. DECODE_SIMULATE initialise a struct + * decode_simulate by initialising four decode_item objects like this... + * + * {.bits = _type}, + * {.bits = _mask}, + * {.bits = _value}, + * {.action = _handler}, + * + * Initialising a specified member of the union means that the compiler + * will produce a warning if the argument is of an incorrect type. + * + * Below is a list of each of the macros used to initialise entries and a + * description of the action performed when that entry is matched to an + * instruction. A match is found when (instruction & mask) == value. + * + * DECODE_TABLE(mask, value, table) + * Instruction decoding jumps to parsing the new sub-table 'table'. + * + * DECODE_CUSTOM(mask, value, decoder) + * The value of 'decoder' is used as an index into the array of + * action functions, and the retrieved decoder function is invoked + * to complete decoding of the instruction. + * + * DECODE_SIMULATE(mask, value, handler) + * The probes instruction handler is set to the value found by + * indexing into the action array using the value of 'handler'. This + * will be used to simulate the instruction when the probe is hit. + * Decoding returns with INSN_GOOD_NO_SLOT. + * + * DECODE_EMULATE(mask, value, handler) + * The probes instruction handler is set to the value found by + * indexing into the action array using the value of 'handler'. This + * will be used to emulate the instruction when the probe is hit. The + * modified instruction (see below) is placed in the probes instruction + * slot so it may be called by the emulation code. Decoding returns + * with INSN_GOOD. + * + * DECODE_REJECT(mask, value) + * Instruction decoding fails with INSN_REJECTED + * + * DECODE_OR(mask, value) + * This allows the mask/value test of multiple table entries to be + * logically ORed. Once an 'or' entry is matched the decoding action to + * be performed is that of the next entry which isn't an 'or'. E.g. + * + * DECODE_OR (mask1, value1) + * DECODE_OR (mask2, value2) + * DECODE_SIMULATE (mask3, value3, simulation_handler) + * + * This means that if any of the three mask/value pairs match the + * instruction being decoded, then 'simulation_handler' will be used + * for it. + * + * Both the SIMULATE and EMULATE macros have a second form which take an + * additional 'regs' argument. + * + * DECODE_SIMULATEX(mask, value, handler, regs) + * DECODE_EMULATEX (mask, value, handler, regs) + * + * These are used to specify what kind of CPU register is encoded in each of the + * least significant 5 nibbles of the instruction being decoded. The regs value + * is specified using the REGS macro, this takes any of the REG_TYPE_* values + * from enum decode_reg_type as arguments; only the '*' part of the name is + * given. E.g. + * + * REGS(0, ANY, NOPC, 0, ANY) + * + * This indicates an instruction is encoded like: + * + * bits 19..16 ignore + * bits 15..12 any register allowed here + * bits 11.. 8 any register except PC allowed here + * bits 7.. 4 ignore + * bits 3.. 0 any register allowed here + * + * This register specification is checked after a decode table entry is found to + * match an instruction (through the mask/value test). Any invalid register then + * found in the instruction will cause decoding to fail with INSN_REJECTED. In + * the above example this would happen if bits 11..8 of the instruction were + * 1111, indicating R15 or PC. + * + * As well as checking for legal combinations of registers, this data is also + * used to modify the registers encoded in the instructions so that an + * emulation routines can use it. (See decode_regs() and INSN_NEW_BITS.) + * + * Here is a real example which matches ARM instructions of the form + * "AND ,,, " + * + * DECODE_EMULATEX (0x0e000090, 0x00000010, PROBES_DATA_PROCESSING_REG, + * REGS(ANY, ANY, NOPC, 0, ANY)), + * ^ ^ ^ ^ + * Rn Rd Rs Rm + * + * Decoding the instruction "AND R4, R5, R6, ASL R15" will be rejected because + * Rs == R15 + * + * Decoding the instruction "AND R4, R5, R6, ASL R7" will be accepted and the + * instruction will be modified to "AND R0, R2, R3, ASL R1" and then placed into + * the kprobes instruction slot. This can then be called later by the handler + * function emulate_rd12rn16rm0rs8_rwflags (a pointer to which is retrieved from + * the indicated slot in the action array), in order to simulate the instruction. + */ + +enum decode_type { + DECODE_TYPE_END, + DECODE_TYPE_TABLE, + DECODE_TYPE_CUSTOM, + DECODE_TYPE_SIMULATE, + DECODE_TYPE_EMULATE, + DECODE_TYPE_OR, + DECODE_TYPE_REJECT, + NUM_DECODE_TYPES /* Must be last enum */ +}; + +#define DECODE_TYPE_BITS 4 +#define DECODE_TYPE_MASK ((1 << DECODE_TYPE_BITS) - 1) + +enum decode_reg_type { + REG_TYPE_NONE = 0, /* Not a register, ignore */ + REG_TYPE_ANY, /* Any register allowed */ + REG_TYPE_SAMEAS16, /* Register should be same as that at bits 19..16 */ + REG_TYPE_SP, /* Register must be SP */ + REG_TYPE_PC, /* Register must be PC */ + REG_TYPE_NOSP, /* Register must not be SP */ + REG_TYPE_NOSPPC, /* Register must not be SP or PC */ + REG_TYPE_NOPC, /* Register must not be PC */ + REG_TYPE_NOPCWB, /* No PC if load/store write-back flag also set */ + + /* The following types are used when the encoding for PC indicates + * another instruction form. This distiction only matters for test + * case coverage checks. + */ + REG_TYPE_NOPCX, /* Register must not be PC */ + REG_TYPE_NOSPPCX, /* Register must not be SP or PC */ + + /* Alias to allow '0' arg to be used in REGS macro. */ + REG_TYPE_0 = REG_TYPE_NONE +}; + +#define REGS(r16, r12, r8, r4, r0) \ + (((REG_TYPE_##r16) << 16) + \ + ((REG_TYPE_##r12) << 12) + \ + ((REG_TYPE_##r8) << 8) + \ + ((REG_TYPE_##r4) << 4) + \ + (REG_TYPE_##r0)) + +union decode_item { + u32 bits; + const union decode_item *table; + int action; +}; + +struct decode_header; +typedef enum probes_insn (probes_custom_decode_t)(probes_opcode_t, + struct arch_probes_insn *, + const struct decode_header *); + +union decode_action { + probes_insn_handler_t *handler; + probes_custom_decode_t *decoder; +}; + +#define DECODE_END \ + {.bits = DECODE_TYPE_END} + + +struct decode_header { + union decode_item type_regs; + union decode_item mask; + union decode_item value; +}; + +#define DECODE_HEADER(_type, _mask, _value, _regs) \ + {.bits = (_type) | ((_regs) << DECODE_TYPE_BITS)}, \ + {.bits = (_mask)}, \ + {.bits = (_value)} + + +struct decode_table { + struct decode_header header; + union decode_item table; +}; + +#define DECODE_TABLE(_mask, _value, _table) \ + DECODE_HEADER(DECODE_TYPE_TABLE, _mask, _value, 0), \ + {.table = (_table)} + + +struct decode_custom { + struct decode_header header; + union decode_item decoder; +}; + +#define DECODE_CUSTOM(_mask, _value, _decoder) \ + DECODE_HEADER(DECODE_TYPE_CUSTOM, _mask, _value, 0), \ + {.action = (_decoder)} + + +struct decode_simulate { + struct decode_header header; + union decode_item handler; +}; + +#define DECODE_SIMULATEX(_mask, _value, _handler, _regs) \ + DECODE_HEADER(DECODE_TYPE_SIMULATE, _mask, _value, _regs), \ + {.action = (_handler)} + +#define DECODE_SIMULATE(_mask, _value, _handler) \ + DECODE_SIMULATEX(_mask, _value, _handler, 0) + + +struct decode_emulate { + struct decode_header header; + union decode_item handler; +}; + +#define DECODE_EMULATEX(_mask, _value, _handler, _regs) \ + DECODE_HEADER(DECODE_TYPE_EMULATE, _mask, _value, _regs), \ + {.action = (_handler)} + +#define DECODE_EMULATE(_mask, _value, _handler) \ + DECODE_EMULATEX(_mask, _value, _handler, 0) + + +struct decode_or { + struct decode_header header; +}; + +#define DECODE_OR(_mask, _value) \ + DECODE_HEADER(DECODE_TYPE_OR, _mask, _value, 0) + +enum probes_insn { + INSN_REJECTED, + INSN_GOOD, + INSN_GOOD_NO_SLOT +}; + +struct decode_reject { + struct decode_header header; +}; + +#define DECODE_REJECT(_mask, _value) \ + DECODE_HEADER(DECODE_TYPE_REJECT, _mask, _value, 0) + +probes_insn_handler_t probes_simulate_nop; +probes_insn_handler_t probes_emulate_none; + +int __kprobes +probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, + const union decode_item *table, bool thumb, bool emulate, + const union decode_action *actions); + +#endif diff --git a/arch/arm/probes/kprobes/Makefile b/arch/arm/probes/kprobes/Makefile new file mode 100644 index 000000000000..eb38a428ecd6 --- /dev/null +++ b/arch/arm/probes/kprobes/Makefile @@ -0,0 +1,11 @@ +obj-$(CONFIG_KPROBES) += core.o actions-common.o +obj-$(CONFIG_ARM_KPROBES_TEST) += test-kprobes.o +test-kprobes-objs := test-core.o + +ifdef CONFIG_THUMB2_KERNEL +obj-$(CONFIG_KPROBES) += actions-thumb.o +test-kprobes-objs += test-thumb.o +else +obj-$(CONFIG_KPROBES) += actions-arm.o +test-kprobes-objs += test-arm.o +endif diff --git a/arch/arm/probes/kprobes/actions-arm.c b/arch/arm/probes/kprobes/actions-arm.c new file mode 100644 index 000000000000..8797879f7b3a --- /dev/null +++ b/arch/arm/probes/kprobes/actions-arm.c @@ -0,0 +1,343 @@ +/* + * arch/arm/probes/kprobes/actions-arm.c + * + * Copyright (C) 2006, 2007 Motorola Inc. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + */ + +/* + * We do not have hardware single-stepping on ARM, This + * effort is further complicated by the ARM not having a + * "next PC" register. Instructions that change the PC + * can't be safely single-stepped in a MP environment, so + * we have a lot of work to do: + * + * In the prepare phase: + * *) If it is an instruction that does anything + * with the CPU mode, we reject it for a kprobe. + * (This is out of laziness rather than need. The + * instructions could be simulated.) + * + * *) Otherwise, decode the instruction rewriting its + * registers to take fixed, ordered registers and + * setting a handler for it to run the instruction. + * + * In the execution phase by an instruction's handler: + * + * *) If the PC is written to by the instruction, the + * instruction must be fully simulated in software. + * + * *) Otherwise, a modified form of the instruction is + * directly executed. Its handler calls the + * instruction in insn[0]. In insn[1] is a + * "mov pc, lr" to return. + * + * Before calling, load up the reordered registers + * from the original instruction's registers. If one + * of the original input registers is the PC, compute + * and adjust the appropriate input register. + * + * After call completes, copy the output registers to + * the original instruction's original registers. + * + * We don't use a real breakpoint instruction since that + * would have us in the kernel go from SVC mode to SVC + * mode losing the link register. Instead we use an + * undefined instruction. To simplify processing, the + * undefined instruction used for kprobes must be reserved + * exclusively for kprobes use. + * + * TODO: ifdef out some instruction decoding based on architecture. + */ + +#include +#include +#include + +#include "../decode-arm.h" +#include "core.h" + +#if __LINUX_ARM_ARCH__ >= 6 +#define BLX(reg) "blx "reg" \n\t" +#else +#define BLX(reg) "mov lr, pc \n\t" \ + "mov pc, "reg" \n\t" +#endif + +static void __kprobes +emulate_ldrdstrd(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long pc = regs->ARM_pc + 4; + int rt = (insn >> 12) & 0xf; + int rn = (insn >> 16) & 0xf; + int rm = insn & 0xf; + + register unsigned long rtv asm("r0") = regs->uregs[rt]; + register unsigned long rt2v asm("r1") = regs->uregs[rt+1]; + register unsigned long rnv asm("r2") = (rn == 15) ? pc + : regs->uregs[rn]; + register unsigned long rmv asm("r3") = regs->uregs[rm]; + + __asm__ __volatile__ ( + BLX("%[fn]") + : "=r" (rtv), "=r" (rt2v), "=r" (rnv) + : "0" (rtv), "1" (rt2v), "2" (rnv), "r" (rmv), + [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + regs->uregs[rt] = rtv; + regs->uregs[rt+1] = rt2v; + if (is_writeback(insn)) + regs->uregs[rn] = rnv; +} + +static void __kprobes +emulate_ldr(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long pc = regs->ARM_pc + 4; + int rt = (insn >> 12) & 0xf; + int rn = (insn >> 16) & 0xf; + int rm = insn & 0xf; + + register unsigned long rtv asm("r0"); + register unsigned long rnv asm("r2") = (rn == 15) ? pc + : regs->uregs[rn]; + register unsigned long rmv asm("r3") = regs->uregs[rm]; + + __asm__ __volatile__ ( + BLX("%[fn]") + : "=r" (rtv), "=r" (rnv) + : "1" (rnv), "r" (rmv), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + if (rt == 15) + load_write_pc(rtv, regs); + else + regs->uregs[rt] = rtv; + + if (is_writeback(insn)) + regs->uregs[rn] = rnv; +} + +static void __kprobes +emulate_str(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long rtpc = regs->ARM_pc - 4 + str_pc_offset; + unsigned long rnpc = regs->ARM_pc + 4; + int rt = (insn >> 12) & 0xf; + int rn = (insn >> 16) & 0xf; + int rm = insn & 0xf; + + register unsigned long rtv asm("r0") = (rt == 15) ? rtpc + : regs->uregs[rt]; + register unsigned long rnv asm("r2") = (rn == 15) ? rnpc + : regs->uregs[rn]; + register unsigned long rmv asm("r3") = regs->uregs[rm]; + + __asm__ __volatile__ ( + BLX("%[fn]") + : "=r" (rnv) + : "r" (rtv), "0" (rnv), "r" (rmv), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + if (is_writeback(insn)) + regs->uregs[rn] = rnv; +} + +static void __kprobes +emulate_rd12rn16rm0rs8_rwflags(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long pc = regs->ARM_pc + 4; + int rd = (insn >> 12) & 0xf; + int rn = (insn >> 16) & 0xf; + int rm = insn & 0xf; + int rs = (insn >> 8) & 0xf; + + register unsigned long rdv asm("r0") = regs->uregs[rd]; + register unsigned long rnv asm("r2") = (rn == 15) ? pc + : regs->uregs[rn]; + register unsigned long rmv asm("r3") = (rm == 15) ? pc + : regs->uregs[rm]; + register unsigned long rsv asm("r1") = regs->uregs[rs]; + unsigned long cpsr = regs->ARM_cpsr; + + __asm__ __volatile__ ( + "msr cpsr_fs, %[cpsr] \n\t" + BLX("%[fn]") + "mrs %[cpsr], cpsr \n\t" + : "=r" (rdv), [cpsr] "=r" (cpsr) + : "0" (rdv), "r" (rnv), "r" (rmv), "r" (rsv), + "1" (cpsr), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + if (rd == 15) + alu_write_pc(rdv, regs); + else + regs->uregs[rd] = rdv; + regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK); +} + +static void __kprobes +emulate_rd12rn16rm0_rwflags_nopc(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + int rd = (insn >> 12) & 0xf; + int rn = (insn >> 16) & 0xf; + int rm = insn & 0xf; + + register unsigned long rdv asm("r0") = regs->uregs[rd]; + register unsigned long rnv asm("r2") = regs->uregs[rn]; + register unsigned long rmv asm("r3") = regs->uregs[rm]; + unsigned long cpsr = regs->ARM_cpsr; + + __asm__ __volatile__ ( + "msr cpsr_fs, %[cpsr] \n\t" + BLX("%[fn]") + "mrs %[cpsr], cpsr \n\t" + : "=r" (rdv), [cpsr] "=r" (cpsr) + : "0" (rdv), "r" (rnv), "r" (rmv), + "1" (cpsr), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + regs->uregs[rd] = rdv; + regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK); +} + +static void __kprobes +emulate_rd16rn12rm0rs8_rwflags_nopc(probes_opcode_t insn, + struct arch_probes_insn *asi, + struct pt_regs *regs) +{ + int rd = (insn >> 16) & 0xf; + int rn = (insn >> 12) & 0xf; + int rm = insn & 0xf; + int rs = (insn >> 8) & 0xf; + + register unsigned long rdv asm("r2") = regs->uregs[rd]; + register unsigned long rnv asm("r0") = regs->uregs[rn]; + register unsigned long rmv asm("r3") = regs->uregs[rm]; + register unsigned long rsv asm("r1") = regs->uregs[rs]; + unsigned long cpsr = regs->ARM_cpsr; + + __asm__ __volatile__ ( + "msr cpsr_fs, %[cpsr] \n\t" + BLX("%[fn]") + "mrs %[cpsr], cpsr \n\t" + : "=r" (rdv), [cpsr] "=r" (cpsr) + : "0" (rdv), "r" (rnv), "r" (rmv), "r" (rsv), + "1" (cpsr), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + regs->uregs[rd] = rdv; + regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK); +} + +static void __kprobes +emulate_rd12rm0_noflags_nopc(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + int rd = (insn >> 12) & 0xf; + int rm = insn & 0xf; + + register unsigned long rdv asm("r0") = regs->uregs[rd]; + register unsigned long rmv asm("r3") = regs->uregs[rm]; + + __asm__ __volatile__ ( + BLX("%[fn]") + : "=r" (rdv) + : "0" (rdv), "r" (rmv), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + regs->uregs[rd] = rdv; +} + +static void __kprobes +emulate_rdlo12rdhi16rn0rm8_rwflags_nopc(probes_opcode_t insn, + struct arch_probes_insn *asi, + struct pt_regs *regs) +{ + int rdlo = (insn >> 12) & 0xf; + int rdhi = (insn >> 16) & 0xf; + int rn = insn & 0xf; + int rm = (insn >> 8) & 0xf; + + register unsigned long rdlov asm("r0") = regs->uregs[rdlo]; + register unsigned long rdhiv asm("r2") = regs->uregs[rdhi]; + register unsigned long rnv asm("r3") = regs->uregs[rn]; + register unsigned long rmv asm("r1") = regs->uregs[rm]; + unsigned long cpsr = regs->ARM_cpsr; + + __asm__ __volatile__ ( + "msr cpsr_fs, %[cpsr] \n\t" + BLX("%[fn]") + "mrs %[cpsr], cpsr \n\t" + : "=r" (rdlov), "=r" (rdhiv), [cpsr] "=r" (cpsr) + : "0" (rdlov), "1" (rdhiv), "r" (rnv), "r" (rmv), + "2" (cpsr), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + regs->uregs[rdlo] = rdlov; + regs->uregs[rdhi] = rdhiv; + regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK); +} + +const union decode_action kprobes_arm_actions[NUM_PROBES_ARM_ACTIONS] = { + [PROBES_EMULATE_NONE] = {.handler = probes_emulate_none}, + [PROBES_SIMULATE_NOP] = {.handler = probes_simulate_nop}, + [PROBES_PRELOAD_IMM] = {.handler = probes_simulate_nop}, + [PROBES_PRELOAD_REG] = {.handler = probes_simulate_nop}, + [PROBES_BRANCH_IMM] = {.handler = simulate_blx1}, + [PROBES_MRS] = {.handler = simulate_mrs}, + [PROBES_BRANCH_REG] = {.handler = simulate_blx2bx}, + [PROBES_CLZ] = {.handler = emulate_rd12rm0_noflags_nopc}, + [PROBES_SATURATING_ARITHMETIC] = { + .handler = emulate_rd12rn16rm0_rwflags_nopc}, + [PROBES_MUL1] = {.handler = emulate_rdlo12rdhi16rn0rm8_rwflags_nopc}, + [PROBES_MUL2] = {.handler = emulate_rd16rn12rm0rs8_rwflags_nopc}, + [PROBES_SWP] = {.handler = emulate_rd12rn16rm0_rwflags_nopc}, + [PROBES_LDRSTRD] = {.handler = emulate_ldrdstrd}, + [PROBES_LOAD_EXTRA] = {.handler = emulate_ldr}, + [PROBES_LOAD] = {.handler = emulate_ldr}, + [PROBES_STORE_EXTRA] = {.handler = emulate_str}, + [PROBES_STORE] = {.handler = emulate_str}, + [PROBES_MOV_IP_SP] = {.handler = simulate_mov_ipsp}, + [PROBES_DATA_PROCESSING_REG] = { + .handler = emulate_rd12rn16rm0rs8_rwflags}, + [PROBES_DATA_PROCESSING_IMM] = { + .handler = emulate_rd12rn16rm0rs8_rwflags}, + [PROBES_MOV_HALFWORD] = {.handler = emulate_rd12rm0_noflags_nopc}, + [PROBES_SEV] = {.handler = probes_emulate_none}, + [PROBES_WFE] = {.handler = probes_simulate_nop}, + [PROBES_SATURATE] = {.handler = emulate_rd12rn16rm0_rwflags_nopc}, + [PROBES_REV] = {.handler = emulate_rd12rm0_noflags_nopc}, + [PROBES_MMI] = {.handler = emulate_rd12rn16rm0_rwflags_nopc}, + [PROBES_PACK] = {.handler = emulate_rd12rn16rm0_rwflags_nopc}, + [PROBES_EXTEND] = {.handler = emulate_rd12rm0_noflags_nopc}, + [PROBES_EXTEND_ADD] = {.handler = emulate_rd12rn16rm0_rwflags_nopc}, + [PROBES_MUL_ADD_LONG] = { + .handler = emulate_rdlo12rdhi16rn0rm8_rwflags_nopc}, + [PROBES_MUL_ADD] = {.handler = emulate_rd16rn12rm0rs8_rwflags_nopc}, + [PROBES_BITFIELD] = {.handler = emulate_rd12rm0_noflags_nopc}, + [PROBES_BRANCH] = {.handler = simulate_bbl}, + [PROBES_LDMSTM] = {.decoder = kprobe_decode_ldmstm} +}; diff --git a/arch/arm/probes/kprobes/actions-common.c b/arch/arm/probes/kprobes/actions-common.c new file mode 100644 index 000000000000..bd20a71cd34a --- /dev/null +++ b/arch/arm/probes/kprobes/actions-common.c @@ -0,0 +1,171 @@ +/* + * arch/arm/probes/kprobes/actions-common.c + * + * Copyright (C) 2011 Jon Medhurst . + * + * Some contents moved here from arch/arm/include/asm/kprobes-arm.c which is + * Copyright (C) 2006, 2007 Motorola Inc. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include +#include +#include + +#include "core.h" + + +static void __kprobes simulate_ldm1stm1(probes_opcode_t insn, + struct arch_probes_insn *asi, + struct pt_regs *regs) +{ + int rn = (insn >> 16) & 0xf; + int lbit = insn & (1 << 20); + int wbit = insn & (1 << 21); + int ubit = insn & (1 << 23); + int pbit = insn & (1 << 24); + long *addr = (long *)regs->uregs[rn]; + int reg_bit_vector; + int reg_count; + + reg_count = 0; + reg_bit_vector = insn & 0xffff; + while (reg_bit_vector) { + reg_bit_vector &= (reg_bit_vector - 1); + ++reg_count; + } + + if (!ubit) + addr -= reg_count; + addr += (!pbit == !ubit); + + reg_bit_vector = insn & 0xffff; + while (reg_bit_vector) { + int reg = __ffs(reg_bit_vector); + reg_bit_vector &= (reg_bit_vector - 1); + if (lbit) + regs->uregs[reg] = *addr++; + else + *addr++ = regs->uregs[reg]; + } + + if (wbit) { + if (!ubit) + addr -= reg_count; + addr -= (!pbit == !ubit); + regs->uregs[rn] = (long)addr; + } +} + +static void __kprobes simulate_stm1_pc(probes_opcode_t insn, + struct arch_probes_insn *asi, + struct pt_regs *regs) +{ + unsigned long addr = regs->ARM_pc - 4; + + regs->ARM_pc = (long)addr + str_pc_offset; + simulate_ldm1stm1(insn, asi, regs); + regs->ARM_pc = (long)addr + 4; +} + +static void __kprobes simulate_ldm1_pc(probes_opcode_t insn, + struct arch_probes_insn *asi, + struct pt_regs *regs) +{ + simulate_ldm1stm1(insn, asi, regs); + load_write_pc(regs->ARM_pc, regs); +} + +static void __kprobes +emulate_generic_r0_12_noflags(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + register void *rregs asm("r1") = regs; + register void *rfn asm("lr") = asi->insn_fn; + + __asm__ __volatile__ ( + "stmdb sp!, {%[regs], r11} \n\t" + "ldmia %[regs], {r0-r12} \n\t" +#if __LINUX_ARM_ARCH__ >= 6 + "blx %[fn] \n\t" +#else + "str %[fn], [sp, #-4]! \n\t" + "adr lr, 1f \n\t" + "ldr pc, [sp], #4 \n\t" + "1: \n\t" +#endif + "ldr lr, [sp], #4 \n\t" /* lr = regs */ + "stmia lr, {r0-r12} \n\t" + "ldr r11, [sp], #4 \n\t" + : [regs] "=r" (rregs), [fn] "=r" (rfn) + : "0" (rregs), "1" (rfn) + : "r0", "r2", "r3", "r4", "r5", "r6", "r7", + "r8", "r9", "r10", "r12", "memory", "cc" + ); +} + +static void __kprobes +emulate_generic_r2_14_noflags(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + emulate_generic_r0_12_noflags(insn, asi, + (struct pt_regs *)(regs->uregs+2)); +} + +static void __kprobes +emulate_ldm_r3_15(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + emulate_generic_r0_12_noflags(insn, asi, + (struct pt_regs *)(regs->uregs+3)); + load_write_pc(regs->ARM_pc, regs); +} + +enum probes_insn __kprobes +kprobe_decode_ldmstm(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *h) +{ + probes_insn_handler_t *handler = 0; + unsigned reglist = insn & 0xffff; + int is_ldm = insn & 0x100000; + int rn = (insn >> 16) & 0xf; + + if (rn <= 12 && (reglist & 0xe000) == 0) { + /* Instruction only uses registers in the range R0..R12 */ + handler = emulate_generic_r0_12_noflags; + + } else if (rn >= 2 && (reglist & 0x8003) == 0) { + /* Instruction only uses registers in the range R2..R14 */ + rn -= 2; + reglist >>= 2; + handler = emulate_generic_r2_14_noflags; + + } else if (rn >= 3 && (reglist & 0x0007) == 0) { + /* Instruction only uses registers in the range R3..R15 */ + if (is_ldm && (reglist & 0x8000)) { + rn -= 3; + reglist >>= 3; + handler = emulate_ldm_r3_15; + } + } + + if (handler) { + /* We can emulate the instruction in (possibly) modified form */ + asi->insn[0] = __opcode_to_mem_arm((insn & 0xfff00000) | + (rn << 16) | reglist); + asi->insn_handler = handler; + return INSN_GOOD; + } + + /* Fallback to slower simulation... */ + if (reglist & 0x8000) + handler = is_ldm ? simulate_ldm1_pc : simulate_stm1_pc; + else + handler = simulate_ldm1stm1; + asi->insn_handler = handler; + return INSN_GOOD_NO_SLOT; +} + diff --git a/arch/arm/probes/kprobes/actions-thumb.c b/arch/arm/probes/kprobes/actions-thumb.c new file mode 100644 index 000000000000..6c4e60b62826 --- /dev/null +++ b/arch/arm/probes/kprobes/actions-thumb.c @@ -0,0 +1,666 @@ +/* + * arch/arm/probes/kprobes/actions-thumb.c + * + * Copyright (C) 2011 Jon Medhurst . + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include +#include +#include +#include + +#include "../decode-thumb.h" +#include "core.h" + +/* These emulation encodings are functionally equivalent... */ +#define t32_emulate_rd8rn16rm0ra12_noflags \ + t32_emulate_rdlo12rdhi8rn16rm0_noflags + +/* t32 thumb actions */ + +static void __kprobes +t32_simulate_table_branch(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long pc = regs->ARM_pc; + int rn = (insn >> 16) & 0xf; + int rm = insn & 0xf; + + unsigned long rnv = (rn == 15) ? pc : regs->uregs[rn]; + unsigned long rmv = regs->uregs[rm]; + unsigned int halfwords; + + if (insn & 0x10) /* TBH */ + halfwords = ((u16 *)rnv)[rmv]; + else /* TBB */ + halfwords = ((u8 *)rnv)[rmv]; + + regs->ARM_pc = pc + 2 * halfwords; +} + +static void __kprobes +t32_simulate_mrs(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + int rd = (insn >> 8) & 0xf; + unsigned long mask = 0xf8ff03df; /* Mask out execution state */ + regs->uregs[rd] = regs->ARM_cpsr & mask; +} + +static void __kprobes +t32_simulate_cond_branch(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long pc = regs->ARM_pc; + + long offset = insn & 0x7ff; /* imm11 */ + offset += (insn & 0x003f0000) >> 5; /* imm6 */ + offset += (insn & 0x00002000) << 4; /* J1 */ + offset += (insn & 0x00000800) << 7; /* J2 */ + offset -= (insn & 0x04000000) >> 7; /* Apply sign bit */ + + regs->ARM_pc = pc + (offset * 2); +} + +static enum probes_insn __kprobes +t32_decode_cond_branch(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *d) +{ + int cc = (insn >> 22) & 0xf; + asi->insn_check_cc = probes_condition_checks[cc]; + asi->insn_handler = t32_simulate_cond_branch; + return INSN_GOOD_NO_SLOT; +} + +static void __kprobes +t32_simulate_branch(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long pc = regs->ARM_pc; + + long offset = insn & 0x7ff; /* imm11 */ + offset += (insn & 0x03ff0000) >> 5; /* imm10 */ + offset += (insn & 0x00002000) << 9; /* J1 */ + offset += (insn & 0x00000800) << 10; /* J2 */ + if (insn & 0x04000000) + offset -= 0x00800000; /* Apply sign bit */ + else + offset ^= 0x00600000; /* Invert J1 and J2 */ + + if (insn & (1 << 14)) { + /* BL or BLX */ + regs->ARM_lr = regs->ARM_pc | 1; + if (!(insn & (1 << 12))) { + /* BLX so switch to ARM mode */ + regs->ARM_cpsr &= ~PSR_T_BIT; + pc &= ~3; + } + } + + regs->ARM_pc = pc + (offset * 2); +} + +static void __kprobes +t32_simulate_ldr_literal(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long addr = regs->ARM_pc & ~3; + int rt = (insn >> 12) & 0xf; + unsigned long rtv; + + long offset = insn & 0xfff; + if (insn & 0x00800000) + addr += offset; + else + addr -= offset; + + if (insn & 0x00400000) { + /* LDR */ + rtv = *(unsigned long *)addr; + if (rt == 15) { + bx_write_pc(rtv, regs); + return; + } + } else if (insn & 0x00200000) { + /* LDRH */ + if (insn & 0x01000000) + rtv = *(s16 *)addr; + else + rtv = *(u16 *)addr; + } else { + /* LDRB */ + if (insn & 0x01000000) + rtv = *(s8 *)addr; + else + rtv = *(u8 *)addr; + } + + regs->uregs[rt] = rtv; +} + +static enum probes_insn __kprobes +t32_decode_ldmstm(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *d) +{ + enum probes_insn ret = kprobe_decode_ldmstm(insn, asi, d); + + /* Fixup modified instruction to have halfwords in correct order...*/ + insn = __mem_to_opcode_arm(asi->insn[0]); + ((u16 *)asi->insn)[0] = __opcode_to_mem_thumb16(insn >> 16); + ((u16 *)asi->insn)[1] = __opcode_to_mem_thumb16(insn & 0xffff); + + return ret; +} + +static void __kprobes +t32_emulate_ldrdstrd(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long pc = regs->ARM_pc & ~3; + int rt1 = (insn >> 12) & 0xf; + int rt2 = (insn >> 8) & 0xf; + int rn = (insn >> 16) & 0xf; + + register unsigned long rt1v asm("r0") = regs->uregs[rt1]; + register unsigned long rt2v asm("r1") = regs->uregs[rt2]; + register unsigned long rnv asm("r2") = (rn == 15) ? pc + : regs->uregs[rn]; + + __asm__ __volatile__ ( + "blx %[fn]" + : "=r" (rt1v), "=r" (rt2v), "=r" (rnv) + : "0" (rt1v), "1" (rt2v), "2" (rnv), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + if (rn != 15) + regs->uregs[rn] = rnv; /* Writeback base register */ + regs->uregs[rt1] = rt1v; + regs->uregs[rt2] = rt2v; +} + +static void __kprobes +t32_emulate_ldrstr(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + int rt = (insn >> 12) & 0xf; + int rn = (insn >> 16) & 0xf; + int rm = insn & 0xf; + + register unsigned long rtv asm("r0") = regs->uregs[rt]; + register unsigned long rnv asm("r2") = regs->uregs[rn]; + register unsigned long rmv asm("r3") = regs->uregs[rm]; + + __asm__ __volatile__ ( + "blx %[fn]" + : "=r" (rtv), "=r" (rnv) + : "0" (rtv), "1" (rnv), "r" (rmv), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + regs->uregs[rn] = rnv; /* Writeback base register */ + if (rt == 15) /* Can't be true for a STR as they aren't allowed */ + bx_write_pc(rtv, regs); + else + regs->uregs[rt] = rtv; +} + +static void __kprobes +t32_emulate_rd8rn16rm0_rwflags(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + int rd = (insn >> 8) & 0xf; + int rn = (insn >> 16) & 0xf; + int rm = insn & 0xf; + + register unsigned long rdv asm("r1") = regs->uregs[rd]; + register unsigned long rnv asm("r2") = regs->uregs[rn]; + register unsigned long rmv asm("r3") = regs->uregs[rm]; + unsigned long cpsr = regs->ARM_cpsr; + + __asm__ __volatile__ ( + "msr cpsr_fs, %[cpsr] \n\t" + "blx %[fn] \n\t" + "mrs %[cpsr], cpsr \n\t" + : "=r" (rdv), [cpsr] "=r" (cpsr) + : "0" (rdv), "r" (rnv), "r" (rmv), + "1" (cpsr), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + regs->uregs[rd] = rdv; + regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK); +} + +static void __kprobes +t32_emulate_rd8pc16_noflags(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long pc = regs->ARM_pc; + int rd = (insn >> 8) & 0xf; + + register unsigned long rdv asm("r1") = regs->uregs[rd]; + register unsigned long rnv asm("r2") = pc & ~3; + + __asm__ __volatile__ ( + "blx %[fn]" + : "=r" (rdv) + : "0" (rdv), "r" (rnv), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + regs->uregs[rd] = rdv; +} + +static void __kprobes +t32_emulate_rd8rn16_noflags(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + int rd = (insn >> 8) & 0xf; + int rn = (insn >> 16) & 0xf; + + register unsigned long rdv asm("r1") = regs->uregs[rd]; + register unsigned long rnv asm("r2") = regs->uregs[rn]; + + __asm__ __volatile__ ( + "blx %[fn]" + : "=r" (rdv) + : "0" (rdv), "r" (rnv), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + regs->uregs[rd] = rdv; +} + +static void __kprobes +t32_emulate_rdlo12rdhi8rn16rm0_noflags(probes_opcode_t insn, + struct arch_probes_insn *asi, + struct pt_regs *regs) +{ + int rdlo = (insn >> 12) & 0xf; + int rdhi = (insn >> 8) & 0xf; + int rn = (insn >> 16) & 0xf; + int rm = insn & 0xf; + + register unsigned long rdlov asm("r0") = regs->uregs[rdlo]; + register unsigned long rdhiv asm("r1") = regs->uregs[rdhi]; + register unsigned long rnv asm("r2") = regs->uregs[rn]; + register unsigned long rmv asm("r3") = regs->uregs[rm]; + + __asm__ __volatile__ ( + "blx %[fn]" + : "=r" (rdlov), "=r" (rdhiv) + : "0" (rdlov), "1" (rdhiv), "r" (rnv), "r" (rmv), + [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + regs->uregs[rdlo] = rdlov; + regs->uregs[rdhi] = rdhiv; +} +/* t16 thumb actions */ + +static void __kprobes +t16_simulate_bxblx(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long pc = regs->ARM_pc + 2; + int rm = (insn >> 3) & 0xf; + unsigned long rmv = (rm == 15) ? pc : regs->uregs[rm]; + + if (insn & (1 << 7)) /* BLX ? */ + regs->ARM_lr = regs->ARM_pc | 1; + + bx_write_pc(rmv, regs); +} + +static void __kprobes +t16_simulate_ldr_literal(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long *base = (unsigned long *)((regs->ARM_pc + 2) & ~3); + long index = insn & 0xff; + int rt = (insn >> 8) & 0x7; + regs->uregs[rt] = base[index]; +} + +static void __kprobes +t16_simulate_ldrstr_sp_relative(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long* base = (unsigned long *)regs->ARM_sp; + long index = insn & 0xff; + int rt = (insn >> 8) & 0x7; + if (insn & 0x800) /* LDR */ + regs->uregs[rt] = base[index]; + else /* STR */ + base[index] = regs->uregs[rt]; +} + +static void __kprobes +t16_simulate_reladr(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long base = (insn & 0x800) ? regs->ARM_sp + : ((regs->ARM_pc + 2) & ~3); + long offset = insn & 0xff; + int rt = (insn >> 8) & 0x7; + regs->uregs[rt] = base + offset * 4; +} + +static void __kprobes +t16_simulate_add_sp_imm(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + long imm = insn & 0x7f; + if (insn & 0x80) /* SUB */ + regs->ARM_sp -= imm * 4; + else /* ADD */ + regs->ARM_sp += imm * 4; +} + +static void __kprobes +t16_simulate_cbz(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + int rn = insn & 0x7; + probes_opcode_t nonzero = regs->uregs[rn] ? insn : ~insn; + if (nonzero & 0x800) { + long i = insn & 0x200; + long imm5 = insn & 0xf8; + unsigned long pc = regs->ARM_pc + 2; + regs->ARM_pc = pc + (i >> 3) + (imm5 >> 2); + } +} + +static void __kprobes +t16_simulate_it(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + /* + * The 8 IT state bits are split into two parts in CPSR: + * ITSTATE<1:0> are in CPSR<26:25> + * ITSTATE<7:2> are in CPSR<15:10> + * The new IT state is in the lower byte of insn. + */ + unsigned long cpsr = regs->ARM_cpsr; + cpsr &= ~PSR_IT_MASK; + cpsr |= (insn & 0xfc) << 8; + cpsr |= (insn & 0x03) << 25; + regs->ARM_cpsr = cpsr; +} + +static void __kprobes +t16_singlestep_it(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + regs->ARM_pc += 2; + t16_simulate_it(insn, asi, regs); +} + +static enum probes_insn __kprobes +t16_decode_it(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *d) +{ + asi->insn_singlestep = t16_singlestep_it; + return INSN_GOOD_NO_SLOT; +} + +static void __kprobes +t16_simulate_cond_branch(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long pc = regs->ARM_pc + 2; + long offset = insn & 0x7f; + offset -= insn & 0x80; /* Apply sign bit */ + regs->ARM_pc = pc + (offset * 2); +} + +static enum probes_insn __kprobes +t16_decode_cond_branch(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *d) +{ + int cc = (insn >> 8) & 0xf; + asi->insn_check_cc = probes_condition_checks[cc]; + asi->insn_handler = t16_simulate_cond_branch; + return INSN_GOOD_NO_SLOT; +} + +static void __kprobes +t16_simulate_branch(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long pc = regs->ARM_pc + 2; + long offset = insn & 0x3ff; + offset -= insn & 0x400; /* Apply sign bit */ + regs->ARM_pc = pc + (offset * 2); +} + +static unsigned long __kprobes +t16_emulate_loregs(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long oldcpsr = regs->ARM_cpsr; + unsigned long newcpsr; + + __asm__ __volatile__ ( + "msr cpsr_fs, %[oldcpsr] \n\t" + "ldmia %[regs], {r0-r7} \n\t" + "blx %[fn] \n\t" + "stmia %[regs], {r0-r7} \n\t" + "mrs %[newcpsr], cpsr \n\t" + : [newcpsr] "=r" (newcpsr) + : [oldcpsr] "r" (oldcpsr), [regs] "r" (regs), + [fn] "r" (asi->insn_fn) + : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", + "lr", "memory", "cc" + ); + + return (oldcpsr & ~APSR_MASK) | (newcpsr & APSR_MASK); +} + +static void __kprobes +t16_emulate_loregs_rwflags(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + regs->ARM_cpsr = t16_emulate_loregs(insn, asi, regs); +} + +static void __kprobes +t16_emulate_loregs_noitrwflags(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long cpsr = t16_emulate_loregs(insn, asi, regs); + if (!in_it_block(cpsr)) + regs->ARM_cpsr = cpsr; +} + +static void __kprobes +t16_emulate_hiregs(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + unsigned long pc = regs->ARM_pc + 2; + int rdn = (insn & 0x7) | ((insn & 0x80) >> 4); + int rm = (insn >> 3) & 0xf; + + register unsigned long rdnv asm("r1"); + register unsigned long rmv asm("r0"); + unsigned long cpsr = regs->ARM_cpsr; + + rdnv = (rdn == 15) ? pc : regs->uregs[rdn]; + rmv = (rm == 15) ? pc : regs->uregs[rm]; + + __asm__ __volatile__ ( + "msr cpsr_fs, %[cpsr] \n\t" + "blx %[fn] \n\t" + "mrs %[cpsr], cpsr \n\t" + : "=r" (rdnv), [cpsr] "=r" (cpsr) + : "0" (rdnv), "r" (rmv), "1" (cpsr), [fn] "r" (asi->insn_fn) + : "lr", "memory", "cc" + ); + + if (rdn == 15) + rdnv &= ~1; + + regs->uregs[rdn] = rdnv; + regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK); +} + +static enum probes_insn __kprobes +t16_decode_hiregs(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *d) +{ + insn &= ~0x00ff; + insn |= 0x001; /* Set Rdn = R1 and Rm = R0 */ + ((u16 *)asi->insn)[0] = __opcode_to_mem_thumb16(insn); + asi->insn_handler = t16_emulate_hiregs; + return INSN_GOOD; +} + +static void __kprobes +t16_emulate_push(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + __asm__ __volatile__ ( + "ldr r9, [%[regs], #13*4] \n\t" + "ldr r8, [%[regs], #14*4] \n\t" + "ldmia %[regs], {r0-r7} \n\t" + "blx %[fn] \n\t" + "str r9, [%[regs], #13*4] \n\t" + : + : [regs] "r" (regs), [fn] "r" (asi->insn_fn) + : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", + "lr", "memory", "cc" + ); +} + +static enum probes_insn __kprobes +t16_decode_push(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *d) +{ + /* + * To simulate a PUSH we use a Thumb-2 "STMDB R9!, {registers}" + * and call it with R9=SP and LR in the register list represented + * by R8. + */ + /* 1st half STMDB R9!,{} */ + ((u16 *)asi->insn)[0] = __opcode_to_mem_thumb16(0xe929); + /* 2nd half (register list) */ + ((u16 *)asi->insn)[1] = __opcode_to_mem_thumb16(insn & 0x1ff); + asi->insn_handler = t16_emulate_push; + return INSN_GOOD; +} + +static void __kprobes +t16_emulate_pop_nopc(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + __asm__ __volatile__ ( + "ldr r9, [%[regs], #13*4] \n\t" + "ldmia %[regs], {r0-r7} \n\t" + "blx %[fn] \n\t" + "stmia %[regs], {r0-r7} \n\t" + "str r9, [%[regs], #13*4] \n\t" + : + : [regs] "r" (regs), [fn] "r" (asi->insn_fn) + : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r9", + "lr", "memory", "cc" + ); +} + +static void __kprobes +t16_emulate_pop_pc(probes_opcode_t insn, + struct arch_probes_insn *asi, struct pt_regs *regs) +{ + register unsigned long pc asm("r8"); + + __asm__ __volatile__ ( + "ldr r9, [%[regs], #13*4] \n\t" + "ldmia %[regs], {r0-r7} \n\t" + "blx %[fn] \n\t" + "stmia %[regs], {r0-r7} \n\t" + "str r9, [%[regs], #13*4] \n\t" + : "=r" (pc) + : [regs] "r" (regs), [fn] "r" (asi->insn_fn) + : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r9", + "lr", "memory", "cc" + ); + + bx_write_pc(pc, regs); +} + +static enum probes_insn __kprobes +t16_decode_pop(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *d) +{ + /* + * To simulate a POP we use a Thumb-2 "LDMDB R9!, {registers}" + * and call it with R9=SP and PC in the register list represented + * by R8. + */ + /* 1st half LDMIA R9!,{} */ + ((u16 *)asi->insn)[0] = __opcode_to_mem_thumb16(0xe8b9); + /* 2nd half (register list) */ + ((u16 *)asi->insn)[1] = __opcode_to_mem_thumb16(insn & 0x1ff); + asi->insn_handler = insn & 0x100 ? t16_emulate_pop_pc + : t16_emulate_pop_nopc; + return INSN_GOOD; +} + +const union decode_action kprobes_t16_actions[NUM_PROBES_T16_ACTIONS] = { + [PROBES_T16_ADD_SP] = {.handler = t16_simulate_add_sp_imm}, + [PROBES_T16_CBZ] = {.handler = t16_simulate_cbz}, + [PROBES_T16_SIGN_EXTEND] = {.handler = t16_emulate_loregs_rwflags}, + [PROBES_T16_PUSH] = {.decoder = t16_decode_push}, + [PROBES_T16_POP] = {.decoder = t16_decode_pop}, + [PROBES_T16_SEV] = {.handler = probes_emulate_none}, + [PROBES_T16_WFE] = {.handler = probes_simulate_nop}, + [PROBES_T16_IT] = {.decoder = t16_decode_it}, + [PROBES_T16_CMP] = {.handler = t16_emulate_loregs_rwflags}, + [PROBES_T16_ADDSUB] = {.handler = t16_emulate_loregs_noitrwflags}, + [PROBES_T16_LOGICAL] = {.handler = t16_emulate_loregs_noitrwflags}, + [PROBES_T16_LDR_LIT] = {.handler = t16_simulate_ldr_literal}, + [PROBES_T16_BLX] = {.handler = t16_simulate_bxblx}, + [PROBES_T16_HIREGOPS] = {.decoder = t16_decode_hiregs}, + [PROBES_T16_LDRHSTRH] = {.handler = t16_emulate_loregs_rwflags}, + [PROBES_T16_LDRSTR] = {.handler = t16_simulate_ldrstr_sp_relative}, + [PROBES_T16_ADR] = {.handler = t16_simulate_reladr}, + [PROBES_T16_LDMSTM] = {.handler = t16_emulate_loregs_rwflags}, + [PROBES_T16_BRANCH_COND] = {.decoder = t16_decode_cond_branch}, + [PROBES_T16_BRANCH] = {.handler = t16_simulate_branch}, +}; + +const union decode_action kprobes_t32_actions[NUM_PROBES_T32_ACTIONS] = { + [PROBES_T32_LDMSTM] = {.decoder = t32_decode_ldmstm}, + [PROBES_T32_LDRDSTRD] = {.handler = t32_emulate_ldrdstrd}, + [PROBES_T32_TABLE_BRANCH] = {.handler = t32_simulate_table_branch}, + [PROBES_T32_TST] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, + [PROBES_T32_MOV] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, + [PROBES_T32_ADDSUB] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, + [PROBES_T32_LOGICAL] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, + [PROBES_T32_CMP] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, + [PROBES_T32_ADDWSUBW_PC] = {.handler = t32_emulate_rd8pc16_noflags,}, + [PROBES_T32_ADDWSUBW] = {.handler = t32_emulate_rd8rn16_noflags}, + [PROBES_T32_MOVW] = {.handler = t32_emulate_rd8rn16_noflags}, + [PROBES_T32_SAT] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, + [PROBES_T32_BITFIELD] = {.handler = t32_emulate_rd8rn16_noflags}, + [PROBES_T32_SEV] = {.handler = probes_emulate_none}, + [PROBES_T32_WFE] = {.handler = probes_simulate_nop}, + [PROBES_T32_MRS] = {.handler = t32_simulate_mrs}, + [PROBES_T32_BRANCH_COND] = {.decoder = t32_decode_cond_branch}, + [PROBES_T32_BRANCH] = {.handler = t32_simulate_branch}, + [PROBES_T32_PLDI] = {.handler = probes_simulate_nop}, + [PROBES_T32_LDR_LIT] = {.handler = t32_simulate_ldr_literal}, + [PROBES_T32_LDRSTR] = {.handler = t32_emulate_ldrstr}, + [PROBES_T32_SIGN_EXTEND] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, + [PROBES_T32_MEDIA] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, + [PROBES_T32_REVERSE] = {.handler = t32_emulate_rd8rn16_noflags}, + [PROBES_T32_MUL_ADD] = {.handler = t32_emulate_rd8rn16rm0_rwflags}, + [PROBES_T32_MUL_ADD2] = {.handler = t32_emulate_rd8rn16rm0ra12_noflags}, + [PROBES_T32_MUL_ADD_LONG] = { + .handler = t32_emulate_rdlo12rdhi8rn16rm0_noflags}, +}; diff --git a/arch/arm/probes/kprobes/core.c b/arch/arm/probes/kprobes/core.c new file mode 100644 index 000000000000..701f49d74c35 --- /dev/null +++ b/arch/arm/probes/kprobes/core.c @@ -0,0 +1,628 @@ +/* + * arch/arm/kernel/kprobes.c + * + * Kprobes on ARM + * + * Abhishek Sagar + * Copyright (C) 2006, 2007 Motorola Inc. + * + * Nicolas Pitre + * Copyright (C) 2007 Marvell Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "../decode-arm.h" +#include "../decode-thumb.h" +#include "core.h" + +#define MIN_STACK_SIZE(addr) \ + min((unsigned long)MAX_STACK_SIZE, \ + (unsigned long)current_thread_info() + THREAD_START_SP - (addr)) + +#define flush_insns(addr, size) \ + flush_icache_range((unsigned long)(addr), \ + (unsigned long)(addr) + \ + (size)) + +/* Used as a marker in ARM_pc to note when we're in a jprobe. */ +#define JPROBE_MAGIC_ADDR 0xffffffff + +DEFINE_PER_CPU(struct kprobe *, current_kprobe) = NULL; +DEFINE_PER_CPU(struct kprobe_ctlblk, kprobe_ctlblk); + + +int __kprobes arch_prepare_kprobe(struct kprobe *p) +{ + kprobe_opcode_t insn; + kprobe_opcode_t tmp_insn[MAX_INSN_SIZE]; + unsigned long addr = (unsigned long)p->addr; + bool thumb; + kprobe_decode_insn_t *decode_insn; + const union decode_action *actions; + int is; + + if (in_exception_text(addr)) + return -EINVAL; + +#ifdef CONFIG_THUMB2_KERNEL + thumb = true; + addr &= ~1; /* Bit 0 would normally be set to indicate Thumb code */ + insn = __mem_to_opcode_thumb16(((u16 *)addr)[0]); + if (is_wide_instruction(insn)) { + u16 inst2 = __mem_to_opcode_thumb16(((u16 *)addr)[1]); + insn = __opcode_thumb32_compose(insn, inst2); + decode_insn = thumb32_probes_decode_insn; + actions = kprobes_t32_actions; + } else { + decode_insn = thumb16_probes_decode_insn; + actions = kprobes_t16_actions; + } +#else /* !CONFIG_THUMB2_KERNEL */ + thumb = false; + if (addr & 0x3) + return -EINVAL; + insn = __mem_to_opcode_arm(*p->addr); + decode_insn = arm_probes_decode_insn; + actions = kprobes_arm_actions; +#endif + + p->opcode = insn; + p->ainsn.insn = tmp_insn; + + switch ((*decode_insn)(insn, &p->ainsn, true, actions)) { + case INSN_REJECTED: /* not supported */ + return -EINVAL; + + case INSN_GOOD: /* instruction uses slot */ + p->ainsn.insn = get_insn_slot(); + if (!p->ainsn.insn) + return -ENOMEM; + for (is = 0; is < MAX_INSN_SIZE; ++is) + p->ainsn.insn[is] = tmp_insn[is]; + flush_insns(p->ainsn.insn, + sizeof(p->ainsn.insn[0]) * MAX_INSN_SIZE); + p->ainsn.insn_fn = (probes_insn_fn_t *) + ((uintptr_t)p->ainsn.insn | thumb); + break; + + case INSN_GOOD_NO_SLOT: /* instruction doesn't need insn slot */ + p->ainsn.insn = NULL; + break; + } + + return 0; +} + +void __kprobes arch_arm_kprobe(struct kprobe *p) +{ + unsigned int brkp; + void *addr; + + if (IS_ENABLED(CONFIG_THUMB2_KERNEL)) { + /* Remove any Thumb flag */ + addr = (void *)((uintptr_t)p->addr & ~1); + + if (is_wide_instruction(p->opcode)) + brkp = KPROBE_THUMB32_BREAKPOINT_INSTRUCTION; + else + brkp = KPROBE_THUMB16_BREAKPOINT_INSTRUCTION; + } else { + kprobe_opcode_t insn = p->opcode; + + addr = p->addr; + brkp = KPROBE_ARM_BREAKPOINT_INSTRUCTION; + + if (insn >= 0xe0000000) + brkp |= 0xe0000000; /* Unconditional instruction */ + else + brkp |= insn & 0xf0000000; /* Copy condition from insn */ + } + + patch_text(addr, brkp); +} + +/* + * The actual disarming is done here on each CPU and synchronized using + * stop_machine. This synchronization is necessary on SMP to avoid removing + * a probe between the moment the 'Undefined Instruction' exception is raised + * and the moment the exception handler reads the faulting instruction from + * memory. It is also needed to atomically set the two half-words of a 32-bit + * Thumb breakpoint. + */ +int __kprobes __arch_disarm_kprobe(void *p) +{ + struct kprobe *kp = p; + void *addr = (void *)((uintptr_t)kp->addr & ~1); + + __patch_text(addr, kp->opcode); + + return 0; +} + +void __kprobes arch_disarm_kprobe(struct kprobe *p) +{ + stop_machine(__arch_disarm_kprobe, p, cpu_online_mask); +} + +void __kprobes arch_remove_kprobe(struct kprobe *p) +{ + if (p->ainsn.insn) { + free_insn_slot(p->ainsn.insn, 0); + p->ainsn.insn = NULL; + } +} + +static void __kprobes save_previous_kprobe(struct kprobe_ctlblk *kcb) +{ + kcb->prev_kprobe.kp = kprobe_running(); + kcb->prev_kprobe.status = kcb->kprobe_status; +} + +static void __kprobes restore_previous_kprobe(struct kprobe_ctlblk *kcb) +{ + __this_cpu_write(current_kprobe, kcb->prev_kprobe.kp); + kcb->kprobe_status = kcb->prev_kprobe.status; +} + +static void __kprobes set_current_kprobe(struct kprobe *p) +{ + __this_cpu_write(current_kprobe, p); +} + +static void __kprobes +singlestep_skip(struct kprobe *p, struct pt_regs *regs) +{ +#ifdef CONFIG_THUMB2_KERNEL + regs->ARM_cpsr = it_advance(regs->ARM_cpsr); + if (is_wide_instruction(p->opcode)) + regs->ARM_pc += 4; + else + regs->ARM_pc += 2; +#else + regs->ARM_pc += 4; +#endif +} + +static inline void __kprobes +singlestep(struct kprobe *p, struct pt_regs *regs, struct kprobe_ctlblk *kcb) +{ + p->ainsn.insn_singlestep(p->opcode, &p->ainsn, regs); +} + +/* + * Called with IRQs disabled. IRQs must remain disabled from that point + * all the way until processing this kprobe is complete. The current + * kprobes implementation cannot process more than one nested level of + * kprobe, and that level is reserved for user kprobe handlers, so we can't + * risk encountering a new kprobe in an interrupt handler. + */ +void __kprobes kprobe_handler(struct pt_regs *regs) +{ + struct kprobe *p, *cur; + struct kprobe_ctlblk *kcb; + + kcb = get_kprobe_ctlblk(); + cur = kprobe_running(); + +#ifdef CONFIG_THUMB2_KERNEL + /* + * First look for a probe which was registered using an address with + * bit 0 set, this is the usual situation for pointers to Thumb code. + * If not found, fallback to looking for one with bit 0 clear. + */ + p = get_kprobe((kprobe_opcode_t *)(regs->ARM_pc | 1)); + if (!p) + p = get_kprobe((kprobe_opcode_t *)regs->ARM_pc); + +#else /* ! CONFIG_THUMB2_KERNEL */ + p = get_kprobe((kprobe_opcode_t *)regs->ARM_pc); +#endif + + if (p) { + if (cur) { + /* Kprobe is pending, so we're recursing. */ + switch (kcb->kprobe_status) { + case KPROBE_HIT_ACTIVE: + case KPROBE_HIT_SSDONE: + /* A pre- or post-handler probe got us here. */ + kprobes_inc_nmissed_count(p); + save_previous_kprobe(kcb); + set_current_kprobe(p); + kcb->kprobe_status = KPROBE_REENTER; + singlestep(p, regs, kcb); + restore_previous_kprobe(kcb); + break; + default: + /* impossible cases */ + BUG(); + } + } else if (p->ainsn.insn_check_cc(regs->ARM_cpsr)) { + /* Probe hit and conditional execution check ok. */ + set_current_kprobe(p); + kcb->kprobe_status = KPROBE_HIT_ACTIVE; + + /* + * If we have no pre-handler or it returned 0, we + * continue with normal processing. If we have a + * pre-handler and it returned non-zero, it prepped + * for calling the break_handler below on re-entry, + * so get out doing nothing more here. + */ + if (!p->pre_handler || !p->pre_handler(p, regs)) { + kcb->kprobe_status = KPROBE_HIT_SS; + singlestep(p, regs, kcb); + if (p->post_handler) { + kcb->kprobe_status = KPROBE_HIT_SSDONE; + p->post_handler(p, regs, 0); + } + reset_current_kprobe(); + } + } else { + /* + * Probe hit but conditional execution check failed, + * so just skip the instruction and continue as if + * nothing had happened. + */ + singlestep_skip(p, regs); + } + } else if (cur) { + /* We probably hit a jprobe. Call its break handler. */ + if (cur->break_handler && cur->break_handler(cur, regs)) { + kcb->kprobe_status = KPROBE_HIT_SS; + singlestep(cur, regs, kcb); + if (cur->post_handler) { + kcb->kprobe_status = KPROBE_HIT_SSDONE; + cur->post_handler(cur, regs, 0); + } + } + reset_current_kprobe(); + } else { + /* + * The probe was removed and a race is in progress. + * There is nothing we can do about it. Let's restart + * the instruction. By the time we can restart, the + * real instruction will be there. + */ + } +} + +static int __kprobes kprobe_trap_handler(struct pt_regs *regs, unsigned int instr) +{ + unsigned long flags; + local_irq_save(flags); + kprobe_handler(regs); + local_irq_restore(flags); + return 0; +} + +int __kprobes kprobe_fault_handler(struct pt_regs *regs, unsigned int fsr) +{ + struct kprobe *cur = kprobe_running(); + struct kprobe_ctlblk *kcb = get_kprobe_ctlblk(); + + switch (kcb->kprobe_status) { + case KPROBE_HIT_SS: + case KPROBE_REENTER: + /* + * We are here because the instruction being single + * stepped caused a page fault. We reset the current + * kprobe and the PC to point back to the probe address + * and allow the page fault handler to continue as a + * normal page fault. + */ + regs->ARM_pc = (long)cur->addr; + if (kcb->kprobe_status == KPROBE_REENTER) { + restore_previous_kprobe(kcb); + } else { + reset_current_kprobe(); + } + break; + + case KPROBE_HIT_ACTIVE: + case KPROBE_HIT_SSDONE: + /* + * We increment the nmissed count for accounting, + * we can also use npre/npostfault count for accounting + * these specific fault cases. + */ + kprobes_inc_nmissed_count(cur); + + /* + * We come here because instructions in the pre/post + * handler caused the page_fault, this could happen + * if handler tries to access user space by + * copy_from_user(), get_user() etc. Let the + * user-specified handler try to fix it. + */ + if (cur->fault_handler && cur->fault_handler(cur, regs, fsr)) + return 1; + break; + + default: + break; + } + + return 0; +} + +int __kprobes kprobe_exceptions_notify(struct notifier_block *self, + unsigned long val, void *data) +{ + /* + * notify_die() is currently never called on ARM, + * so this callback is currently empty. + */ + return NOTIFY_DONE; +} + +/* + * When a retprobed function returns, trampoline_handler() is called, + * calling the kretprobe's handler. We construct a struct pt_regs to + * give a view of registers r0-r11 to the user return-handler. This is + * not a complete pt_regs structure, but that should be plenty sufficient + * for kretprobe handlers which should normally be interested in r0 only + * anyway. + */ +void __naked __kprobes kretprobe_trampoline(void) +{ + __asm__ __volatile__ ( + "stmdb sp!, {r0 - r11} \n\t" + "mov r0, sp \n\t" + "bl trampoline_handler \n\t" + "mov lr, r0 \n\t" + "ldmia sp!, {r0 - r11} \n\t" +#ifdef CONFIG_THUMB2_KERNEL + "bx lr \n\t" +#else + "mov pc, lr \n\t" +#endif + : : : "memory"); +} + +/* Called from kretprobe_trampoline */ +static __used __kprobes void *trampoline_handler(struct pt_regs *regs) +{ + struct kretprobe_instance *ri = NULL; + struct hlist_head *head, empty_rp; + struct hlist_node *tmp; + unsigned long flags, orig_ret_address = 0; + unsigned long trampoline_address = (unsigned long)&kretprobe_trampoline; + + INIT_HLIST_HEAD(&empty_rp); + kretprobe_hash_lock(current, &head, &flags); + + /* + * It is possible to have multiple instances associated with a given + * task either because multiple functions in the call path have + * a return probe installed on them, and/or more than one return + * probe was registered for a target function. + * + * We can handle this because: + * - instances are always inserted at the head of the list + * - when multiple return probes are registered for the same + * function, the first instance's ret_addr will point to the + * real return address, and all the rest will point to + * kretprobe_trampoline + */ + hlist_for_each_entry_safe(ri, tmp, head, hlist) { + if (ri->task != current) + /* another task is sharing our hash bucket */ + continue; + + if (ri->rp && ri->rp->handler) { + __this_cpu_write(current_kprobe, &ri->rp->kp); + get_kprobe_ctlblk()->kprobe_status = KPROBE_HIT_ACTIVE; + ri->rp->handler(ri, regs); + __this_cpu_write(current_kprobe, NULL); + } + + orig_ret_address = (unsigned long)ri->ret_addr; + recycle_rp_inst(ri, &empty_rp); + + if (orig_ret_address != trampoline_address) + /* + * This is the real return address. Any other + * instances associated with this task are for + * other calls deeper on the call stack + */ + break; + } + + kretprobe_assert(ri, orig_ret_address, trampoline_address); + kretprobe_hash_unlock(current, &flags); + + hlist_for_each_entry_safe(ri, tmp, &empty_rp, hlist) { + hlist_del(&ri->hlist); + kfree(ri); + } + + return (void *)orig_ret_address; +} + +void __kprobes arch_prepare_kretprobe(struct kretprobe_instance *ri, + struct pt_regs *regs) +{ + ri->ret_addr = (kprobe_opcode_t *)regs->ARM_lr; + + /* Replace the return addr with trampoline addr. */ + regs->ARM_lr = (unsigned long)&kretprobe_trampoline; +} + +int __kprobes setjmp_pre_handler(struct kprobe *p, struct pt_regs *regs) +{ + struct jprobe *jp = container_of(p, struct jprobe, kp); + struct kprobe_ctlblk *kcb = get_kprobe_ctlblk(); + long sp_addr = regs->ARM_sp; + long cpsr; + + kcb->jprobe_saved_regs = *regs; + memcpy(kcb->jprobes_stack, (void *)sp_addr, MIN_STACK_SIZE(sp_addr)); + regs->ARM_pc = (long)jp->entry; + + cpsr = regs->ARM_cpsr | PSR_I_BIT; +#ifdef CONFIG_THUMB2_KERNEL + /* Set correct Thumb state in cpsr */ + if (regs->ARM_pc & 1) + cpsr |= PSR_T_BIT; + else + cpsr &= ~PSR_T_BIT; +#endif + regs->ARM_cpsr = cpsr; + + preempt_disable(); + return 1; +} + +void __kprobes jprobe_return(void) +{ + struct kprobe_ctlblk *kcb = get_kprobe_ctlblk(); + + __asm__ __volatile__ ( + /* + * Setup an empty pt_regs. Fill SP and PC fields as + * they're needed by longjmp_break_handler. + * + * We allocate some slack between the original SP and start of + * our fabricated regs. To be precise we want to have worst case + * covered which is STMFD with all 16 regs so we allocate 2 * + * sizeof(struct_pt_regs)). + * + * This is to prevent any simulated instruction from writing + * over the regs when they are accessing the stack. + */ +#ifdef CONFIG_THUMB2_KERNEL + "sub r0, %0, %1 \n\t" + "mov sp, r0 \n\t" +#else + "sub sp, %0, %1 \n\t" +#endif + "ldr r0, ="__stringify(JPROBE_MAGIC_ADDR)"\n\t" + "str %0, [sp, %2] \n\t" + "str r0, [sp, %3] \n\t" + "mov r0, sp \n\t" + "bl kprobe_handler \n\t" + + /* + * Return to the context saved by setjmp_pre_handler + * and restored by longjmp_break_handler. + */ +#ifdef CONFIG_THUMB2_KERNEL + "ldr lr, [sp, %2] \n\t" /* lr = saved sp */ + "ldrd r0, r1, [sp, %5] \n\t" /* r0,r1 = saved lr,pc */ + "ldr r2, [sp, %4] \n\t" /* r2 = saved psr */ + "stmdb lr!, {r0, r1, r2} \n\t" /* push saved lr and */ + /* rfe context */ + "ldmia sp, {r0 - r12} \n\t" + "mov sp, lr \n\t" + "ldr lr, [sp], #4 \n\t" + "rfeia sp! \n\t" +#else + "ldr r0, [sp, %4] \n\t" + "msr cpsr_cxsf, r0 \n\t" + "ldmia sp, {r0 - pc} \n\t" +#endif + : + : "r" (kcb->jprobe_saved_regs.ARM_sp), + "I" (sizeof(struct pt_regs) * 2), + "J" (offsetof(struct pt_regs, ARM_sp)), + "J" (offsetof(struct pt_regs, ARM_pc)), + "J" (offsetof(struct pt_regs, ARM_cpsr)), + "J" (offsetof(struct pt_regs, ARM_lr)) + : "memory", "cc"); +} + +int __kprobes longjmp_break_handler(struct kprobe *p, struct pt_regs *regs) +{ + struct kprobe_ctlblk *kcb = get_kprobe_ctlblk(); + long stack_addr = kcb->jprobe_saved_regs.ARM_sp; + long orig_sp = regs->ARM_sp; + struct jprobe *jp = container_of(p, struct jprobe, kp); + + if (regs->ARM_pc == JPROBE_MAGIC_ADDR) { + if (orig_sp != stack_addr) { + struct pt_regs *saved_regs = + (struct pt_regs *)kcb->jprobe_saved_regs.ARM_sp; + printk("current sp %lx does not match saved sp %lx\n", + orig_sp, stack_addr); + printk("Saved registers for jprobe %p\n", jp); + show_regs(saved_regs); + printk("Current registers\n"); + show_regs(regs); + BUG(); + } + *regs = kcb->jprobe_saved_regs; + memcpy((void *)stack_addr, kcb->jprobes_stack, + MIN_STACK_SIZE(stack_addr)); + preempt_enable_no_resched(); + return 1; + } + return 0; +} + +int __kprobes arch_trampoline_kprobe(struct kprobe *p) +{ + return 0; +} + +#ifdef CONFIG_THUMB2_KERNEL + +static struct undef_hook kprobes_thumb16_break_hook = { + .instr_mask = 0xffff, + .instr_val = KPROBE_THUMB16_BREAKPOINT_INSTRUCTION, + .cpsr_mask = MODE_MASK, + .cpsr_val = SVC_MODE, + .fn = kprobe_trap_handler, +}; + +static struct undef_hook kprobes_thumb32_break_hook = { + .instr_mask = 0xffffffff, + .instr_val = KPROBE_THUMB32_BREAKPOINT_INSTRUCTION, + .cpsr_mask = MODE_MASK, + .cpsr_val = SVC_MODE, + .fn = kprobe_trap_handler, +}; + +#else /* !CONFIG_THUMB2_KERNEL */ + +static struct undef_hook kprobes_arm_break_hook = { + .instr_mask = 0x0fffffff, + .instr_val = KPROBE_ARM_BREAKPOINT_INSTRUCTION, + .cpsr_mask = MODE_MASK, + .cpsr_val = SVC_MODE, + .fn = kprobe_trap_handler, +}; + +#endif /* !CONFIG_THUMB2_KERNEL */ + +int __init arch_init_kprobes() +{ + arm_probes_decode_init(); +#ifdef CONFIG_THUMB2_KERNEL + register_undef_hook(&kprobes_thumb16_break_hook); + register_undef_hook(&kprobes_thumb32_break_hook); +#else + register_undef_hook(&kprobes_arm_break_hook); +#endif + return 0; +} diff --git a/arch/arm/probes/kprobes/core.h b/arch/arm/probes/kprobes/core.h new file mode 100644 index 000000000000..2e1e5a3d9155 --- /dev/null +++ b/arch/arm/probes/kprobes/core.h @@ -0,0 +1,53 @@ +/* + * arch/arm/kernel/kprobes.h + * + * Copyright (C) 2011 Jon Medhurst . + * + * Some contents moved here from arch/arm/include/asm/kprobes.h which is + * Copyright (C) 2006, 2007 Motorola Inc. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + */ + +#ifndef _ARM_KERNEL_KPROBES_H +#define _ARM_KERNEL_KPROBES_H + +#include +#include "../decode.h" + +/* + * These undefined instructions must be unique and + * reserved solely for kprobes' use. + */ +#define KPROBE_ARM_BREAKPOINT_INSTRUCTION 0x07f001f8 +#define KPROBE_THUMB16_BREAKPOINT_INSTRUCTION 0xde18 +#define KPROBE_THUMB32_BREAKPOINT_INSTRUCTION 0xf7f0a018 + +enum probes_insn __kprobes +kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *h); + +typedef enum probes_insn (kprobe_decode_insn_t)(probes_opcode_t, + struct arch_probes_insn *, + bool, + const union decode_action *); + +#ifdef CONFIG_THUMB2_KERNEL + +extern const union decode_action kprobes_t32_actions[]; +extern const union decode_action kprobes_t16_actions[]; + +#else /* !CONFIG_THUMB2_KERNEL */ + +extern const union decode_action kprobes_arm_actions[]; + +#endif + +#endif /* _ARM_KERNEL_KPROBES_H */ diff --git a/arch/arm/probes/kprobes/test-arm.c b/arch/arm/probes/kprobes/test-arm.c new file mode 100644 index 000000000000..d9a1255f3043 --- /dev/null +++ b/arch/arm/probes/kprobes/test-arm.c @@ -0,0 +1,1346 @@ +/* + * arch/arm/kernel/kprobes-test-arm.c + * + * Copyright (C) 2011 Jon Medhurst . + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include +#include +#include +#include + +#include "test-core.h" + + +#define TEST_ISA "32" + +#define TEST_ARM_TO_THUMB_INTERWORK_R(code1, reg, val, code2) \ + TESTCASE_START(code1 #reg code2) \ + TEST_ARG_REG(reg, val) \ + TEST_ARG_REG(14, 99f) \ + TEST_ARG_END("") \ + "50: nop \n\t" \ + "1: "code1 #reg code2" \n\t" \ + " bx lr \n\t" \ + ".thumb \n\t" \ + "3: adr lr, 2f \n\t" \ + " bx lr \n\t" \ + ".arm \n\t" \ + "2: nop \n\t" \ + TESTCASE_END + +#define TEST_ARM_TO_THUMB_INTERWORK_P(code1, reg, val, code2) \ + TESTCASE_START(code1 #reg code2) \ + TEST_ARG_PTR(reg, val) \ + TEST_ARG_REG(14, 99f) \ + TEST_ARG_MEM(15, 3f+1) \ + TEST_ARG_END("") \ + "50: nop \n\t" \ + "1: "code1 #reg code2" \n\t" \ + " bx lr \n\t" \ + ".thumb \n\t" \ + "3: adr lr, 2f \n\t" \ + " bx lr \n\t" \ + ".arm \n\t" \ + "2: nop \n\t" \ + TESTCASE_END + + +void kprobe_arm_test_cases(void) +{ + kprobe_test_flags = 0; + + TEST_GROUP("Data-processing (register), (register-shifted register), (immediate)") + +#define _DATA_PROCESSING_DNM(op,s,val) \ + TEST_RR( op "eq" s " r0, r",1, VAL1,", r",2, val, "") \ + TEST_RR( op "ne" s " r1, r",1, VAL1,", r",2, val, ", lsl #3") \ + TEST_RR( op "cs" s " r2, r",3, VAL1,", r",2, val, ", lsr #4") \ + TEST_RR( op "cc" s " r3, r",3, VAL1,", r",2, val, ", asr #5") \ + TEST_RR( op "mi" s " r4, r",5, VAL1,", r",2, N(val),", asr #6") \ + TEST_RR( op "pl" s " r5, r",5, VAL1,", r",2, val, ", ror #7") \ + TEST_RR( op "vs" s " r6, r",7, VAL1,", r",2, val, ", rrx") \ + TEST_R( op "vc" s " r6, r",7, VAL1,", pc, lsl #3") \ + TEST_R( op "vc" s " r6, r",7, VAL1,", sp, lsr #4") \ + TEST_R( op "vc" s " r6, pc, r",7, VAL1,", asr #5") \ + TEST_R( op "vc" s " r6, sp, r",7, VAL1,", ror #6") \ + TEST_RRR( op "hi" s " r8, r",9, VAL1,", r",14,val, ", lsl r",0, 3,"")\ + TEST_RRR( op "ls" s " r9, r",9, VAL1,", r",14,val, ", lsr r",7, 4,"")\ + TEST_RRR( op "ge" s " r10, r",11,VAL1,", r",14,val, ", asr r",7, 5,"")\ + TEST_RRR( op "lt" s " r11, r",11,VAL1,", r",14,N(val),", asr r",7, 6,"")\ + TEST_RR( op "gt" s " r12, r13" ", r",14,val, ", ror r",14,7,"")\ + TEST_RR( op "le" s " r14, r",0, val, ", r13" ", lsl r",14,8,"")\ + TEST_R( op "eq" s " r0, r",11,VAL1,", #0xf5") \ + TEST_R( op "ne" s " r11, r",0, VAL1,", #0xf5000000") \ + TEST_R( op s " r7, r",8, VAL2,", #0x000af000") \ + TEST( op s " r4, pc" ", #0x00005a00") + +#define DATA_PROCESSING_DNM(op,val) \ + _DATA_PROCESSING_DNM(op,"",val) \ + _DATA_PROCESSING_DNM(op,"s",val) + +#define DATA_PROCESSING_NM(op,val) \ + TEST_RR( op "ne r",1, VAL1,", r",2, val, "") \ + TEST_RR( op "eq r",1, VAL1,", r",2, val, ", lsl #3") \ + TEST_RR( op "cc r",3, VAL1,", r",2, val, ", lsr #4") \ + TEST_RR( op "cs r",3, VAL1,", r",2, val, ", asr #5") \ + TEST_RR( op "pl r",5, VAL1,", r",2, N(val),", asr #6") \ + TEST_RR( op "mi r",5, VAL1,", r",2, val, ", ror #7") \ + TEST_RR( op "vc r",7, VAL1,", r",2, val, ", rrx") \ + TEST_R ( op "vs r",7, VAL1,", pc, lsl #3") \ + TEST_R ( op "vs r",7, VAL1,", sp, lsr #4") \ + TEST_R( op "vs pc, r",7, VAL1,", asr #5") \ + TEST_R( op "vs sp, r",7, VAL1,", ror #6") \ + TEST_RRR( op "ls r",9, VAL1,", r",14,val, ", lsl r",0, 3,"") \ + TEST_RRR( op "hi r",9, VAL1,", r",14,val, ", lsr r",7, 4,"") \ + TEST_RRR( op "lt r",11,VAL1,", r",14,val, ", asr r",7, 5,"") \ + TEST_RRR( op "ge r",11,VAL1,", r",14,N(val),", asr r",7, 6,"") \ + TEST_RR( op "le r13" ", r",14,val, ", ror r",14,7,"") \ + TEST_RR( op "gt r",0, val, ", r13" ", lsl r",14,8,"") \ + TEST_R( op "eq r",11,VAL1,", #0xf5") \ + TEST_R( op "ne r",0, VAL1,", #0xf5000000") \ + TEST_R( op " r",8, VAL2,", #0x000af000") + +#define _DATA_PROCESSING_DM(op,s,val) \ + TEST_R( op "eq" s " r0, r",1, val, "") \ + TEST_R( op "ne" s " r1, r",1, val, ", lsl #3") \ + TEST_R( op "cs" s " r2, r",3, val, ", lsr #4") \ + TEST_R( op "cc" s " r3, r",3, val, ", asr #5") \ + TEST_R( op "mi" s " r4, r",5, N(val),", asr #6") \ + TEST_R( op "pl" s " r5, r",5, val, ", ror #7") \ + TEST_R( op "vs" s " r6, r",10,val, ", rrx") \ + TEST( op "vs" s " r7, pc, lsl #3") \ + TEST( op "vs" s " r7, sp, lsr #4") \ + TEST_RR( op "vc" s " r8, r",7, val, ", lsl r",0, 3,"") \ + TEST_RR( op "hi" s " r9, r",9, val, ", lsr r",7, 4,"") \ + TEST_RR( op "ls" s " r10, r",9, val, ", asr r",7, 5,"") \ + TEST_RR( op "ge" s " r11, r",11,N(val),", asr r",7, 6,"") \ + TEST_RR( op "lt" s " r12, r",11,val, ", ror r",14,7,"") \ + TEST_R( op "gt" s " r14, r13" ", lsl r",14,8,"") \ + TEST( op "eq" s " r0, #0xf5") \ + TEST( op "ne" s " r11, #0xf5000000") \ + TEST( op s " r7, #0x000af000") \ + TEST( op s " r4, #0x00005a00") + +#define DATA_PROCESSING_DM(op,val) \ + _DATA_PROCESSING_DM(op,"",val) \ + _DATA_PROCESSING_DM(op,"s",val) + + DATA_PROCESSING_DNM("and",0xf00f00ff) + DATA_PROCESSING_DNM("eor",0xf00f00ff) + DATA_PROCESSING_DNM("sub",VAL2) + DATA_PROCESSING_DNM("rsb",VAL2) + DATA_PROCESSING_DNM("add",VAL2) + DATA_PROCESSING_DNM("adc",VAL2) + DATA_PROCESSING_DNM("sbc",VAL2) + DATA_PROCESSING_DNM("rsc",VAL2) + DATA_PROCESSING_NM("tst",0xf00f00ff) + DATA_PROCESSING_NM("teq",0xf00f00ff) + DATA_PROCESSING_NM("cmp",VAL2) + DATA_PROCESSING_NM("cmn",VAL2) + DATA_PROCESSING_DNM("orr",0xf00f00ff) + DATA_PROCESSING_DM("mov",VAL2) + DATA_PROCESSING_DNM("bic",0xf00f00ff) + DATA_PROCESSING_DM("mvn",VAL2) + + TEST("mov ip, sp") /* This has special case emulation code */ + + TEST_SUPPORTED("mov pc, #0x1000"); + TEST_SUPPORTED("mov sp, #0x1000"); + TEST_SUPPORTED("cmp pc, #0x1000"); + TEST_SUPPORTED("cmp sp, #0x1000"); + + /* Data-processing with PC and a shift count in a register */ + TEST_UNSUPPORTED(__inst_arm(0xe15c0f1e) " @ cmp r12, r14, asl pc") + TEST_UNSUPPORTED(__inst_arm(0xe1a0cf1e) " @ mov r12, r14, asl pc") + TEST_UNSUPPORTED(__inst_arm(0xe08caf1e) " @ add r10, r12, r14, asl pc") + TEST_UNSUPPORTED(__inst_arm(0xe151021f) " @ cmp r1, pc, lsl r2") + TEST_UNSUPPORTED(__inst_arm(0xe17f0211) " @ cmn pc, r1, lsl r2") + TEST_UNSUPPORTED(__inst_arm(0xe1a0121f) " @ mov r1, pc, lsl r2") + TEST_UNSUPPORTED(__inst_arm(0xe1a0f211) " @ mov pc, r1, lsl r2") + TEST_UNSUPPORTED(__inst_arm(0xe042131f) " @ sub r1, r2, pc, lsl r3") + TEST_UNSUPPORTED(__inst_arm(0xe1cf1312) " @ bic r1, pc, r2, lsl r3") + TEST_UNSUPPORTED(__inst_arm(0xe081f312) " @ add pc, r1, r2, lsl r3") + + /* Data-processing with PC as a target and status registers updated */ + TEST_UNSUPPORTED("movs pc, r1") + TEST_UNSUPPORTED("movs pc, r1, lsl r2") + TEST_UNSUPPORTED("movs pc, #0x10000") + TEST_UNSUPPORTED("adds pc, lr, r1") + TEST_UNSUPPORTED("adds pc, lr, r1, lsl r2") + TEST_UNSUPPORTED("adds pc, lr, #4") + + /* Data-processing with SP as target */ + TEST("add sp, sp, #16") + TEST("sub sp, sp, #8") + TEST("bic sp, sp, #0x20") + TEST("orr sp, sp, #0x20") + TEST_PR( "add sp, r",10,0,", r",11,4,"") + TEST_PRR("add sp, r",10,0,", r",11,4,", asl r",12,1,"") + TEST_P( "mov sp, r",10,0,"") + TEST_PR( "mov sp, r",10,0,", asl r",12,0,"") + + /* Data-processing with PC as target */ + TEST_BF( "add pc, pc, #2f-1b-8") + TEST_BF_R ("add pc, pc, r",14,2f-1f-8,"") + TEST_BF_R ("add pc, r",14,2f-1f-8,", pc") + TEST_BF_R ("mov pc, r",0,2f,"") + TEST_BF_R ("add pc, pc, r",14,(2f-1f-8)*2,", asr #1") + TEST_BB( "sub pc, pc, #1b-2b+8") +#if __LINUX_ARM_ARCH__ == 6 && !defined(CONFIG_CPU_V7) + TEST_BB( "sub pc, pc, #1b-2b+8-2") /* UNPREDICTABLE before and after ARMv6 */ +#endif + TEST_BB_R( "sub pc, pc, r",14, 1f-2f+8,"") + TEST_BB_R( "rsb pc, r",14,1f-2f+8,", pc") + TEST_R( "add pc, pc, r",10,-2,", asl #1") +#ifdef CONFIG_THUMB2_KERNEL + TEST_ARM_TO_THUMB_INTERWORK_R("add pc, pc, r",0,3f-1f-8+1,"") + TEST_ARM_TO_THUMB_INTERWORK_R("sub pc, r",0,3f+8+1,", #8") +#endif + TEST_GROUP("Miscellaneous instructions") + + TEST("mrs r0, cpsr") + TEST("mrspl r7, cpsr") + TEST("mrs r14, cpsr") + TEST_UNSUPPORTED(__inst_arm(0xe10ff000) " @ mrs r15, cpsr") + TEST_UNSUPPORTED("mrs r0, spsr") + TEST_UNSUPPORTED("mrs lr, spsr") + + TEST_UNSUPPORTED("msr cpsr, r0") + TEST_UNSUPPORTED("msr cpsr_f, lr") + TEST_UNSUPPORTED("msr spsr, r0") + + TEST_BF_R("bx r",0,2f,"") + TEST_BB_R("bx r",7,2f,"") + TEST_BF_R("bxeq r",14,2f,"") + +#if __LINUX_ARM_ARCH__ >= 5 + TEST_R("clz r0, r",0, 0x0,"") + TEST_R("clzeq r7, r",14,0x1,"") + TEST_R("clz lr, r",7, 0xffffffff,"") + TEST( "clz r4, sp") + TEST_UNSUPPORTED(__inst_arm(0x016fff10) " @ clz pc, r0") + TEST_UNSUPPORTED(__inst_arm(0x016f0f1f) " @ clz r0, pc") + +#if __LINUX_ARM_ARCH__ >= 6 + TEST_UNSUPPORTED("bxj r0") +#endif + + TEST_BF_R("blx r",0,2f,"") + TEST_BB_R("blx r",7,2f,"") + TEST_BF_R("blxeq r",14,2f,"") + TEST_UNSUPPORTED(__inst_arm(0x0120003f) " @ blx pc") + + TEST_RR( "qadd r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "qaddvs lr, r",9, VAL2,", r",8, VAL1,"") + TEST_R( "qadd lr, r",9, VAL2,", r13") + TEST_RR( "qsub r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "qsubvs lr, r",9, VAL2,", r",8, VAL1,"") + TEST_R( "qsub lr, r",9, VAL2,", r13") + TEST_RR( "qdadd r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "qdaddvs lr, r",9, VAL2,", r",8, VAL1,"") + TEST_R( "qdadd lr, r",9, VAL2,", r13") + TEST_RR( "qdsub r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "qdsubvs lr, r",9, VAL2,", r",8, VAL1,"") + TEST_R( "qdsub lr, r",9, VAL2,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe101f050) " @ qadd pc, r0, r1") + TEST_UNSUPPORTED(__inst_arm(0xe121f050) " @ qsub pc, r0, r1") + TEST_UNSUPPORTED(__inst_arm(0xe141f050) " @ qdadd pc, r0, r1") + TEST_UNSUPPORTED(__inst_arm(0xe161f050) " @ qdsub pc, r0, r1") + TEST_UNSUPPORTED(__inst_arm(0xe16f2050) " @ qdsub r2, r0, pc") + TEST_UNSUPPORTED(__inst_arm(0xe161205f) " @ qdsub r2, pc, r1") + + TEST_UNSUPPORTED("bkpt 0xffff") + TEST_UNSUPPORTED("bkpt 0x0000") + + TEST_UNSUPPORTED(__inst_arm(0xe1600070) " @ smc #0") + + TEST_GROUP("Halfword multiply and multiply-accumulate") + + TEST_RRR( "smlabb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "smlabbge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RR( "smlabb lr, r",1, VAL2,", r",2, VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe10f3281) " @ smlabb pc, r1, r2, r3") + TEST_RRR( "smlatb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "smlatbge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RR( "smlatb lr, r",1, VAL2,", r",2, VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe10f32a1) " @ smlatb pc, r1, r2, r3") + TEST_RRR( "smlabt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "smlabtge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RR( "smlabt lr, r",1, VAL2,", r",2, VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe10f32c1) " @ smlabt pc, r1, r2, r3") + TEST_RRR( "smlatt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "smlattge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RR( "smlatt lr, r",1, VAL2,", r",2, VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe10f32e1) " @ smlatt pc, r1, r2, r3") + + TEST_RRR( "smlawb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "smlawbge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RR( "smlawb lr, r",1, VAL2,", r",2, VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe12f3281) " @ smlawb pc, r1, r2, r3") + TEST_RRR( "smlawt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "smlawtge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RR( "smlawt lr, r",1, VAL2,", r",2, VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe12f32c1) " @ smlawt pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe12032cf) " @ smlawt r0, pc, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe1203fc1) " @ smlawt r0, r1, pc, r3") + TEST_UNSUPPORTED(__inst_arm(0xe120f2c1) " @ smlawt r0, r1, r2, pc") + + TEST_RR( "smulwb r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "smulwbge r7, r",8, VAL3,", r",9, VAL1,"") + TEST_R( "smulwb lr, r",1, VAL2,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe12f02a1) " @ smulwb pc, r1, r2") + TEST_RR( "smulwt r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "smulwtge r7, r",8, VAL3,", r",9, VAL1,"") + TEST_R( "smulwt lr, r",1, VAL2,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe12f02e1) " @ smulwt pc, r1, r2") + + TEST_RRRR( "smlalbb r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "smlalbble r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + TEST_RRR( "smlalbb r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe14f1382) " @ smlalbb pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe141f382) " @ smlalbb r1, pc, r2, r3") + TEST_RRRR( "smlaltb r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "smlaltble r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + TEST_RRR( "smlaltb r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe14f13a2) " @ smlaltb pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe141f3a2) " @ smlaltb r1, pc, r2, r3") + TEST_RRRR( "smlalbt r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "smlalbtle r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + TEST_RRR( "smlalbt r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe14f13c2) " @ smlalbt pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe141f3c2) " @ smlalbt r1, pc, r2, r3") + TEST_RRRR( "smlaltt r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "smlalttle r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + TEST_RRR( "smlaltt r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe14f13e2) " @ smlalbb pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe140f3e2) " @ smlalbb r0, pc, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe14013ef) " @ smlalbb r0, r1, pc, r3") + TEST_UNSUPPORTED(__inst_arm(0xe1401fe2) " @ smlalbb r0, r1, r2, pc") + + TEST_RR( "smulbb r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "smulbbge r7, r",8, VAL3,", r",9, VAL1,"") + TEST_R( "smulbb lr, r",1, VAL2,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe16f0281) " @ smulbb pc, r1, r2") + TEST_RR( "smultb r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "smultbge r7, r",8, VAL3,", r",9, VAL1,"") + TEST_R( "smultb lr, r",1, VAL2,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe16f02a1) " @ smultb pc, r1, r2") + TEST_RR( "smulbt r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "smulbtge r7, r",8, VAL3,", r",9, VAL1,"") + TEST_R( "smulbt lr, r",1, VAL2,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe16f02c1) " @ smultb pc, r1, r2") + TEST_RR( "smultt r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "smulttge r7, r",8, VAL3,", r",9, VAL1,"") + TEST_R( "smultt lr, r",1, VAL2,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe16f02e1) " @ smultt pc, r1, r2") + TEST_UNSUPPORTED(__inst_arm(0xe16002ef) " @ smultt r0, pc, r2") + TEST_UNSUPPORTED(__inst_arm(0xe1600fe1) " @ smultt r0, r1, pc") +#endif + + TEST_GROUP("Multiply and multiply-accumulate") + + TEST_RR( "mul r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "mulls r7, r",8, VAL2,", r",9, VAL2,"") + TEST_R( "mul lr, r",4, VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe00f0291) " @ mul pc, r1, r2") + TEST_UNSUPPORTED(__inst_arm(0xe000029f) " @ mul r0, pc, r2") + TEST_UNSUPPORTED(__inst_arm(0xe0000f91) " @ mul r0, r1, pc") + TEST_RR( "muls r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "mullss r7, r",8, VAL2,", r",9, VAL2,"") + TEST_R( "muls lr, r",4, VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe01f0291) " @ muls pc, r1, r2") + + TEST_RRR( "mla r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "mlahi r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RR( "mla lr, r",1, VAL2,", r",2, VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe02f3291) " @ mla pc, r1, r2, r3") + TEST_RRR( "mlas r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "mlahis r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RR( "mlas lr, r",1, VAL2,", r",2, VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe03f3291) " @ mlas pc, r1, r2, r3") + +#if __LINUX_ARM_ARCH__ >= 6 + TEST_RR( "umaal r0, r1, r",2, VAL1,", r",3, VAL2,"") + TEST_RR( "umaalls r7, r8, r",9, VAL2,", r",10, VAL1,"") + TEST_R( "umaal lr, r12, r",11,VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe041f392) " @ umaal pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe04f0392) " @ umaal r0, pc, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe0500090) " @ undef") + TEST_UNSUPPORTED(__inst_arm(0xe05fff9f) " @ undef") +#endif + +#if __LINUX_ARM_ARCH__ >= 7 + TEST_RRR( "mls r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "mlshi r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RR( "mls lr, r",1, VAL2,", r",2, VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe06f3291) " @ mls pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe060329f) " @ mls r0, pc, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe0603f91) " @ mls r0, r1, pc, r3") + TEST_UNSUPPORTED(__inst_arm(0xe060f291) " @ mls r0, r1, r2, pc") +#endif + + TEST_UNSUPPORTED(__inst_arm(0xe0700090) " @ undef") + TEST_UNSUPPORTED(__inst_arm(0xe07fff9f) " @ undef") + + TEST_RR( "umull r0, r1, r",2, VAL1,", r",3, VAL2,"") + TEST_RR( "umullls r7, r8, r",9, VAL2,", r",10, VAL1,"") + TEST_R( "umull lr, r12, r",11,VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe081f392) " @ umull pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe08f1392) " @ umull r1, pc, r2, r3") + TEST_RR( "umulls r0, r1, r",2, VAL1,", r",3, VAL2,"") + TEST_RR( "umulllss r7, r8, r",9, VAL2,", r",10, VAL1,"") + TEST_R( "umulls lr, r12, r",11,VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe091f392) " @ umulls pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe09f1392) " @ umulls r1, pc, r2, r3") + + TEST_RRRR( "umlal r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "umlalle r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + TEST_RRR( "umlal r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe0af1392) " @ umlal pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe0a1f392) " @ umlal r1, pc, r2, r3") + TEST_RRRR( "umlals r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "umlalles r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + TEST_RRR( "umlals r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe0bf1392) " @ umlals pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe0b1f392) " @ umlals r1, pc, r2, r3") + + TEST_RR( "smull r0, r1, r",2, VAL1,", r",3, VAL2,"") + TEST_RR( "smullls r7, r8, r",9, VAL2,", r",10, VAL1,"") + TEST_R( "smull lr, r12, r",11,VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe0c1f392) " @ smull pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe0cf1392) " @ smull r1, pc, r2, r3") + TEST_RR( "smulls r0, r1, r",2, VAL1,", r",3, VAL2,"") + TEST_RR( "smulllss r7, r8, r",9, VAL2,", r",10, VAL1,"") + TEST_R( "smulls lr, r12, r",11,VAL3,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe0d1f392) " @ smulls pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe0df1392) " @ smulls r1, pc, r2, r3") + + TEST_RRRR( "smlal r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "smlalle r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + TEST_RRR( "smlal r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe0ef1392) " @ smlal pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe0e1f392) " @ smlal r1, pc, r2, r3") + TEST_RRRR( "smlals r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "smlalles r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + TEST_RRR( "smlals r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13") + TEST_UNSUPPORTED(__inst_arm(0xe0ff1392) " @ smlals pc, r1, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe0f0f392) " @ smlals r0, pc, r2, r3") + TEST_UNSUPPORTED(__inst_arm(0xe0f0139f) " @ smlals r0, r1, pc, r3") + TEST_UNSUPPORTED(__inst_arm(0xe0f01f92) " @ smlals r0, r1, r2, pc") + + TEST_GROUP("Synchronization primitives") + +#if __LINUX_ARM_ARCH__ < 6 + TEST_RP("swp lr, r",7,VAL2,", [r",8,0,"]") + TEST_R( "swpvs r0, r",1,VAL1,", [sp]") + TEST_RP("swp sp, r",14,VAL2,", [r",12,13*4,"]") +#else + TEST_UNSUPPORTED(__inst_arm(0xe108e097) " @ swp lr, r7, [r8]") + TEST_UNSUPPORTED(__inst_arm(0x610d0091) " @ swpvs r0, r1, [sp]") + TEST_UNSUPPORTED(__inst_arm(0xe10cd09e) " @ swp sp, r14 [r12]") +#endif + TEST_UNSUPPORTED(__inst_arm(0xe102f091) " @ swp pc, r1, [r2]") + TEST_UNSUPPORTED(__inst_arm(0xe102009f) " @ swp r0, pc, [r2]") + TEST_UNSUPPORTED(__inst_arm(0xe10f0091) " @ swp r0, r1, [pc]") +#if __LINUX_ARM_ARCH__ < 6 + TEST_RP("swpb lr, r",7,VAL2,", [r",8,0,"]") + TEST_R( "swpvsb r0, r",1,VAL1,", [sp]") +#else + TEST_UNSUPPORTED(__inst_arm(0xe148e097) " @ swpb lr, r7, [r8]") + TEST_UNSUPPORTED(__inst_arm(0x614d0091) " @ swpvsb r0, r1, [sp]") +#endif + TEST_UNSUPPORTED(__inst_arm(0xe142f091) " @ swpb pc, r1, [r2]") + + TEST_UNSUPPORTED(__inst_arm(0xe1100090)) /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe1200090)) /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe1300090)) /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe1500090)) /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe1600090)) /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe1700090)) /* Unallocated space */ +#if __LINUX_ARM_ARCH__ >= 6 + TEST_UNSUPPORTED("ldrex r2, [sp]") +#endif +#if (__LINUX_ARM_ARCH__ >= 7) || defined(CONFIG_CPU_32v6K) + TEST_UNSUPPORTED("strexd r0, r2, r3, [sp]") + TEST_UNSUPPORTED("ldrexd r2, r3, [sp]") + TEST_UNSUPPORTED("strexb r0, r2, [sp]") + TEST_UNSUPPORTED("ldrexb r2, [sp]") + TEST_UNSUPPORTED("strexh r0, r2, [sp]") + TEST_UNSUPPORTED("ldrexh r2, [sp]") +#endif + TEST_GROUP("Extra load/store instructions") + + TEST_RPR( "strh r",0, VAL1,", [r",1, 48,", -r",2, 24,"]") + TEST_RPR( "streqh r",14,VAL2,", [r",13,0, ", r",12, 48,"]") + TEST_RPR( "strh r",1, VAL1,", [r",2, 24,", r",3, 48,"]!") + TEST_RPR( "strneh r",12,VAL2,", [r",11,48,", -r",10,24,"]!") + TEST_RPR( "strh r",2, VAL1,", [r",3, 24,"], r",4, 48,"") + TEST_RPR( "strh r",10,VAL2,", [r",9, 48,"], -r",11,24,"") + TEST_UNSUPPORTED(__inst_arm(0xe1afc0ba) " @ strh r12, [pc, r10]!") + TEST_UNSUPPORTED(__inst_arm(0xe089f0bb) " @ strh pc, [r9], r11") + TEST_UNSUPPORTED(__inst_arm(0xe089a0bf) " @ strh r10, [r9], pc") + + TEST_PR( "ldrh r0, [r",0, 48,", -r",2, 24,"]") + TEST_PR( "ldrcsh r14, [r",13,0, ", r",12, 48,"]") + TEST_PR( "ldrh r1, [r",2, 24,", r",3, 48,"]!") + TEST_PR( "ldrcch r12, [r",11,48,", -r",10,24,"]!") + TEST_PR( "ldrh r2, [r",3, 24,"], r",4, 48,"") + TEST_PR( "ldrh r10, [r",9, 48,"], -r",11,24,"") + TEST_UNSUPPORTED(__inst_arm(0xe1bfc0ba) " @ ldrh r12, [pc, r10]!") + TEST_UNSUPPORTED(__inst_arm(0xe099f0bb) " @ ldrh pc, [r9], r11") + TEST_UNSUPPORTED(__inst_arm(0xe099a0bf) " @ ldrh r10, [r9], pc") + + TEST_RP( "strh r",0, VAL1,", [r",1, 24,", #-2]") + TEST_RP( "strmih r",14,VAL2,", [r",13,0, ", #2]") + TEST_RP( "strh r",1, VAL1,", [r",2, 24,", #4]!") + TEST_RP( "strplh r",12,VAL2,", [r",11,24,", #-4]!") + TEST_RP( "strh r",2, VAL1,", [r",3, 24,"], #48") + TEST_RP( "strh r",10,VAL2,", [r",9, 64,"], #-48") + TEST_UNSUPPORTED(__inst_arm(0xe1efc3b0) " @ strh r12, [pc, #48]!") + TEST_UNSUPPORTED(__inst_arm(0xe0c9f3b0) " @ strh pc, [r9], #48") + + TEST_P( "ldrh r0, [r",0, 24,", #-2]") + TEST_P( "ldrvsh r14, [r",13,0, ", #2]") + TEST_P( "ldrh r1, [r",2, 24,", #4]!") + TEST_P( "ldrvch r12, [r",11,24,", #-4]!") + TEST_P( "ldrh r2, [r",3, 24,"], #48") + TEST_P( "ldrh r10, [r",9, 64,"], #-48") + TEST( "ldrh r0, [pc, #0]") + TEST_UNSUPPORTED(__inst_arm(0xe1ffc3b0) " @ ldrh r12, [pc, #48]!") + TEST_UNSUPPORTED(__inst_arm(0xe0d9f3b0) " @ ldrh pc, [r9], #48") + + TEST_PR( "ldrsb r0, [r",0, 48,", -r",2, 24,"]") + TEST_PR( "ldrhisb r14, [r",13,0,", r",12, 48,"]") + TEST_PR( "ldrsb r1, [r",2, 24,", r",3, 48,"]!") + TEST_PR( "ldrlssb r12, [r",11,48,", -r",10,24,"]!") + TEST_PR( "ldrsb r2, [r",3, 24,"], r",4, 48,"") + TEST_PR( "ldrsb r10, [r",9, 48,"], -r",11,24,"") + TEST_UNSUPPORTED(__inst_arm(0xe1bfc0da) " @ ldrsb r12, [pc, r10]!") + TEST_UNSUPPORTED(__inst_arm(0xe099f0db) " @ ldrsb pc, [r9], r11") + + TEST_P( "ldrsb r0, [r",0, 24,", #-1]") + TEST_P( "ldrgesb r14, [r",13,0, ", #1]") + TEST_P( "ldrsb r1, [r",2, 24,", #4]!") + TEST_P( "ldrltsb r12, [r",11,24,", #-4]!") + TEST_P( "ldrsb r2, [r",3, 24,"], #48") + TEST_P( "ldrsb r10, [r",9, 64,"], #-48") + TEST( "ldrsb r0, [pc, #0]") + TEST_UNSUPPORTED(__inst_arm(0xe1ffc3d0) " @ ldrsb r12, [pc, #48]!") + TEST_UNSUPPORTED(__inst_arm(0xe0d9f3d0) " @ ldrsb pc, [r9], #48") + + TEST_PR( "ldrsh r0, [r",0, 48,", -r",2, 24,"]") + TEST_PR( "ldrgtsh r14, [r",13,0, ", r",12, 48,"]") + TEST_PR( "ldrsh r1, [r",2, 24,", r",3, 48,"]!") + TEST_PR( "ldrlesh r12, [r",11,48,", -r",10,24,"]!") + TEST_PR( "ldrsh r2, [r",3, 24,"], r",4, 48,"") + TEST_PR( "ldrsh r10, [r",9, 48,"], -r",11,24,"") + TEST_UNSUPPORTED(__inst_arm(0xe1bfc0fa) " @ ldrsh r12, [pc, r10]!") + TEST_UNSUPPORTED(__inst_arm(0xe099f0fb) " @ ldrsh pc, [r9], r11") + + TEST_P( "ldrsh r0, [r",0, 24,", #-1]") + TEST_P( "ldreqsh r14, [r",13,0 ,", #1]") + TEST_P( "ldrsh r1, [r",2, 24,", #4]!") + TEST_P( "ldrnesh r12, [r",11,24,", #-4]!") + TEST_P( "ldrsh r2, [r",3, 24,"], #48") + TEST_P( "ldrsh r10, [r",9, 64,"], #-48") + TEST( "ldrsh r0, [pc, #0]") + TEST_UNSUPPORTED(__inst_arm(0xe1ffc3f0) " @ ldrsh r12, [pc, #48]!") + TEST_UNSUPPORTED(__inst_arm(0xe0d9f3f0) " @ ldrsh pc, [r9], #48") + +#if __LINUX_ARM_ARCH__ >= 7 + TEST_UNSUPPORTED("strht r1, [r2], r3") + TEST_UNSUPPORTED("ldrht r1, [r2], r3") + TEST_UNSUPPORTED("strht r1, [r2], #48") + TEST_UNSUPPORTED("ldrht r1, [r2], #48") + TEST_UNSUPPORTED("ldrsbt r1, [r2], r3") + TEST_UNSUPPORTED("ldrsbt r1, [r2], #48") + TEST_UNSUPPORTED("ldrsht r1, [r2], r3") + TEST_UNSUPPORTED("ldrsht r1, [r2], #48") +#endif + +#if __LINUX_ARM_ARCH__ >= 5 + TEST_RPR( "strd r",0, VAL1,", [r",1, 48,", -r",2,24,"]") + TEST_RPR( "strccd r",8, VAL2,", [r",13,0, ", r",12,48,"]") + TEST_RPR( "strd r",4, VAL1,", [r",2, 24,", r",3, 48,"]!") + TEST_RPR( "strcsd r",12,VAL2,", [r",11,48,", -r",10,24,"]!") + TEST_RPR( "strd r",2, VAL1,", [r",5, 24,"], r",4,48,"") + TEST_RPR( "strd r",10,VAL2,", [r",9, 48,"], -r",7,24,"") + TEST_UNSUPPORTED(__inst_arm(0xe1afc0fa) " @ strd r12, [pc, r10]!") + + TEST_PR( "ldrd r0, [r",0, 48,", -r",2,24,"]") + TEST_PR( "ldrmid r8, [r",13,0, ", r",12,48,"]") + TEST_PR( "ldrd r4, [r",2, 24,", r",3, 48,"]!") + TEST_PR( "ldrpld r6, [r",11,48,", -r",10,24,"]!") + TEST_PR( "ldrd r2, [r",5, 24,"], r",4,48,"") + TEST_PR( "ldrd r10, [r",9,48,"], -r",7,24,"") + TEST_UNSUPPORTED(__inst_arm(0xe1afc0da) " @ ldrd r12, [pc, r10]!") + TEST_UNSUPPORTED(__inst_arm(0xe089f0db) " @ ldrd pc, [r9], r11") + TEST_UNSUPPORTED(__inst_arm(0xe089e0db) " @ ldrd lr, [r9], r11") + TEST_UNSUPPORTED(__inst_arm(0xe089c0df) " @ ldrd r12, [r9], pc") + + TEST_RP( "strd r",0, VAL1,", [r",1, 24,", #-8]") + TEST_RP( "strvsd r",8, VAL2,", [r",13,0, ", #8]") + TEST_RP( "strd r",4, VAL1,", [r",2, 24,", #16]!") + TEST_RP( "strvcd r",12,VAL2,", [r",11,24,", #-16]!") + TEST_RP( "strd r",2, VAL1,", [r",4, 24,"], #48") + TEST_RP( "strd r",10,VAL2,", [r",9, 64,"], #-48") + TEST_UNSUPPORTED(__inst_arm(0xe1efc3f0) " @ strd r12, [pc, #48]!") + + TEST_P( "ldrd r0, [r",0, 24,", #-8]") + TEST_P( "ldrhid r8, [r",13,0, ", #8]") + TEST_P( "ldrd r4, [r",2, 24,", #16]!") + TEST_P( "ldrlsd r6, [r",11,24,", #-16]!") + TEST_P( "ldrd r2, [r",5, 24,"], #48") + TEST_P( "ldrd r10, [r",9,6,"], #-48") + TEST_UNSUPPORTED(__inst_arm(0xe1efc3d0) " @ ldrd r12, [pc, #48]!") + TEST_UNSUPPORTED(__inst_arm(0xe0c9f3d0) " @ ldrd pc, [r9], #48") + TEST_UNSUPPORTED(__inst_arm(0xe0c9e3d0) " @ ldrd lr, [r9], #48") +#endif + + TEST_GROUP("Miscellaneous") + +#if __LINUX_ARM_ARCH__ >= 7 + TEST("movw r0, #0") + TEST("movw r0, #0xffff") + TEST("movw lr, #0xffff") + TEST_UNSUPPORTED(__inst_arm(0xe300f000) " @ movw pc, #0") + TEST_R("movt r",0, VAL1,", #0") + TEST_R("movt r",0, VAL2,", #0xffff") + TEST_R("movt r",14,VAL1,", #0xffff") + TEST_UNSUPPORTED(__inst_arm(0xe340f000) " @ movt pc, #0") +#endif + + TEST_UNSUPPORTED("msr cpsr, 0x13") + TEST_UNSUPPORTED("msr cpsr_f, 0xf0000000") + TEST_UNSUPPORTED("msr spsr, 0x13") + +#if __LINUX_ARM_ARCH__ >= 7 + TEST_SUPPORTED("yield") + TEST("sev") + TEST("nop") + TEST("wfi") + TEST_SUPPORTED("wfe") + TEST_UNSUPPORTED("dbg #0") +#endif + + TEST_GROUP("Load/store word and unsigned byte") + +#define LOAD_STORE(byte) \ + TEST_RP( "str"byte" r",0, VAL1,", [r",1, 24,", #-2]") \ + TEST_RP( "str"byte" r",14,VAL2,", [r",13,0, ", #2]") \ + TEST_RP( "str"byte" r",1, VAL1,", [r",2, 24,", #4]!") \ + TEST_RP( "str"byte" r",12,VAL2,", [r",11,24,", #-4]!") \ + TEST_RP( "str"byte" r",2, VAL1,", [r",3, 24,"], #48") \ + TEST_RP( "str"byte" r",10,VAL2,", [r",9, 64,"], #-48") \ + TEST_RPR("str"byte" r",0, VAL1,", [r",1, 48,", -r",2, 24,"]") \ + TEST_RPR("str"byte" r",14,VAL2,", [r",13,0, ", r",12, 48,"]") \ + TEST_RPR("str"byte" r",1, VAL1,", [r",2, 24,", r",3, 48,"]!") \ + TEST_RPR("str"byte" r",12,VAL2,", [r",11,48,", -r",10,24,"]!") \ + TEST_RPR("str"byte" r",2, VAL1,", [r",3, 24,"], r",4, 48,"") \ + TEST_RPR("str"byte" r",10,VAL2,", [r",9, 48,"], -r",11,24,"") \ + TEST_RPR("str"byte" r",0, VAL1,", [r",1, 24,", r",2, 32,", asl #1]")\ + TEST_RPR("str"byte" r",14,VAL2,", [r",13,0, ", r",12, 32,", lsr #2]")\ + TEST_RPR("str"byte" r",1, VAL1,", [r",2, 24,", r",3, 32,", asr #3]!")\ + TEST_RPR("str"byte" r",12,VAL2,", [r",11,24,", r",10, 4,", ror #31]!")\ + TEST_P( "ldr"byte" r0, [r",0, 24,", #-2]") \ + TEST_P( "ldr"byte" r14, [r",13,0, ", #2]") \ + TEST_P( "ldr"byte" r1, [r",2, 24,", #4]!") \ + TEST_P( "ldr"byte" r12, [r",11,24,", #-4]!") \ + TEST_P( "ldr"byte" r2, [r",3, 24,"], #48") \ + TEST_P( "ldr"byte" r10, [r",9, 64,"], #-48") \ + TEST_PR( "ldr"byte" r0, [r",0, 48,", -r",2, 24,"]") \ + TEST_PR( "ldr"byte" r14, [r",13,0, ", r",12, 48,"]") \ + TEST_PR( "ldr"byte" r1, [r",2, 24,", r",3, 48,"]!") \ + TEST_PR( "ldr"byte" r12, [r",11,48,", -r",10,24,"]!") \ + TEST_PR( "ldr"byte" r2, [r",3, 24,"], r",4, 48,"") \ + TEST_PR( "ldr"byte" r10, [r",9, 48,"], -r",11,24,"") \ + TEST_PR( "ldr"byte" r0, [r",0, 24,", r",2, 32,", asl #1]") \ + TEST_PR( "ldr"byte" r14, [r",13,0, ", r",12, 32,", lsr #2]") \ + TEST_PR( "ldr"byte" r1, [r",2, 24,", r",3, 32,", asr #3]!") \ + TEST_PR( "ldr"byte" r12, [r",11,24,", r",10, 4,", ror #31]!") \ + TEST( "ldr"byte" r0, [pc, #0]") \ + TEST_R( "ldr"byte" r12, [pc, r",14,0,"]") + + LOAD_STORE("") + TEST_P( "str pc, [r",0,0,", #15*4]") + TEST_R( "str pc, [sp, r",2,15*4,"]") + TEST_BF( "ldr pc, [sp, #15*4]") + TEST_BF_R("ldr pc, [sp, r",2,15*4,"]") + + TEST_P( "str sp, [r",0,0,", #13*4]") + TEST_R( "str sp, [sp, r",2,13*4,"]") + TEST_BF( "ldr sp, [sp, #13*4]") + TEST_BF_R("ldr sp, [sp, r",2,13*4,"]") + +#ifdef CONFIG_THUMB2_KERNEL + TEST_ARM_TO_THUMB_INTERWORK_P("ldr pc, [r",0,0,", #15*4]") +#endif + TEST_UNSUPPORTED(__inst_arm(0xe5af6008) " @ str r6, [pc, #8]!") + TEST_UNSUPPORTED(__inst_arm(0xe7af6008) " @ str r6, [pc, r8]!") + TEST_UNSUPPORTED(__inst_arm(0xe5bf6008) " @ ldr r6, [pc, #8]!") + TEST_UNSUPPORTED(__inst_arm(0xe7bf6008) " @ ldr r6, [pc, r8]!") + TEST_UNSUPPORTED(__inst_arm(0xe788600f) " @ str r6, [r8, pc]") + TEST_UNSUPPORTED(__inst_arm(0xe798600f) " @ ldr r6, [r8, pc]") + + LOAD_STORE("b") + TEST_UNSUPPORTED(__inst_arm(0xe5f7f008) " @ ldrb pc, [r7, #8]!") + TEST_UNSUPPORTED(__inst_arm(0xe7f7f008) " @ ldrb pc, [r7, r8]!") + TEST_UNSUPPORTED(__inst_arm(0xe5ef6008) " @ strb r6, [pc, #8]!") + TEST_UNSUPPORTED(__inst_arm(0xe7ef6008) " @ strb r6, [pc, r3]!") + TEST_UNSUPPORTED(__inst_arm(0xe5ff6008) " @ ldrb r6, [pc, #8]!") + TEST_UNSUPPORTED(__inst_arm(0xe7ff6008) " @ ldrb r6, [pc, r3]!") + + TEST_UNSUPPORTED("ldrt r0, [r1], #4") + TEST_UNSUPPORTED("ldrt r1, [r2], r3") + TEST_UNSUPPORTED("strt r2, [r3], #4") + TEST_UNSUPPORTED("strt r3, [r4], r5") + TEST_UNSUPPORTED("ldrbt r4, [r5], #4") + TEST_UNSUPPORTED("ldrbt r5, [r6], r7") + TEST_UNSUPPORTED("strbt r6, [r7], #4") + TEST_UNSUPPORTED("strbt r7, [r8], r9") + +#if __LINUX_ARM_ARCH__ >= 7 + TEST_GROUP("Parallel addition and subtraction, signed") + + TEST_UNSUPPORTED(__inst_arm(0xe6000010) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe60fffff) "") /* Unallocated space */ + + TEST_RR( "sadd16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "sadd16 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe61cff1a) " @ sadd16 pc, r12, r10") + TEST_RR( "sasx r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "sasx r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe61cff3a) " @ sasx pc, r12, r10") + TEST_RR( "ssax r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "ssax r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe61cff5a) " @ ssax pc, r12, r10") + TEST_RR( "ssub16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "ssub16 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe61cff7a) " @ ssub16 pc, r12, r10") + TEST_RR( "sadd8 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "sadd8 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe61cff9a) " @ sadd8 pc, r12, r10") + TEST_UNSUPPORTED(__inst_arm(0xe61000b0) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe61fffbf) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe61000d0) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe61fffdf) "") /* Unallocated space */ + TEST_RR( "ssub8 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "ssub8 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe61cfffa) " @ ssub8 pc, r12, r10") + + TEST_RR( "qadd16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "qadd16 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe62cff1a) " @ qadd16 pc, r12, r10") + TEST_RR( "qasx r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "qasx r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe62cff3a) " @ qasx pc, r12, r10") + TEST_RR( "qsax r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "qsax r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe62cff5a) " @ qsax pc, r12, r10") + TEST_RR( "qsub16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "qsub16 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe62cff7a) " @ qsub16 pc, r12, r10") + TEST_RR( "qadd8 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "qadd8 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe62cff9a) " @ qadd8 pc, r12, r10") + TEST_UNSUPPORTED(__inst_arm(0xe62000b0) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe62fffbf) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe62000d0) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe62fffdf) "") /* Unallocated space */ + TEST_RR( "qsub8 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "qsub8 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe62cfffa) " @ qsub8 pc, r12, r10") + + TEST_RR( "shadd16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "shadd16 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe63cff1a) " @ shadd16 pc, r12, r10") + TEST_RR( "shasx r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "shasx r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe63cff3a) " @ shasx pc, r12, r10") + TEST_RR( "shsax r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "shsax r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe63cff5a) " @ shsax pc, r12, r10") + TEST_RR( "shsub16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "shsub16 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe63cff7a) " @ shsub16 pc, r12, r10") + TEST_RR( "shadd8 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "shadd8 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe63cff9a) " @ shadd8 pc, r12, r10") + TEST_UNSUPPORTED(__inst_arm(0xe63000b0) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe63fffbf) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe63000d0) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe63fffdf) "") /* Unallocated space */ + TEST_RR( "shsub8 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "shsub8 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe63cfffa) " @ shsub8 pc, r12, r10") + + TEST_GROUP("Parallel addition and subtraction, unsigned") + + TEST_UNSUPPORTED(__inst_arm(0xe6400010) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe64fffff) "") /* Unallocated space */ + + TEST_RR( "uadd16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uadd16 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe65cff1a) " @ uadd16 pc, r12, r10") + TEST_RR( "uasx r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uasx r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe65cff3a) " @ uasx pc, r12, r10") + TEST_RR( "usax r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "usax r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe65cff5a) " @ usax pc, r12, r10") + TEST_RR( "usub16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "usub16 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe65cff7a) " @ usub16 pc, r12, r10") + TEST_RR( "uadd8 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uadd8 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe65cff9a) " @ uadd8 pc, r12, r10") + TEST_UNSUPPORTED(__inst_arm(0xe65000b0) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe65fffbf) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe65000d0) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe65fffdf) "") /* Unallocated space */ + TEST_RR( "usub8 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "usub8 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe65cfffa) " @ usub8 pc, r12, r10") + + TEST_RR( "uqadd16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uqadd16 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe66cff1a) " @ uqadd16 pc, r12, r10") + TEST_RR( "uqasx r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uqasx r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe66cff3a) " @ uqasx pc, r12, r10") + TEST_RR( "uqsax r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uqsax r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe66cff5a) " @ uqsax pc, r12, r10") + TEST_RR( "uqsub16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uqsub16 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe66cff7a) " @ uqsub16 pc, r12, r10") + TEST_RR( "uqadd8 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uqadd8 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe66cff9a) " @ uqadd8 pc, r12, r10") + TEST_UNSUPPORTED(__inst_arm(0xe66000b0) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe66fffbf) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe66000d0) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe66fffdf) "") /* Unallocated space */ + TEST_RR( "uqsub8 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uqsub8 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe66cfffa) " @ uqsub8 pc, r12, r10") + + TEST_RR( "uhadd16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uhadd16 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe67cff1a) " @ uhadd16 pc, r12, r10") + TEST_RR( "uhasx r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uhasx r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe67cff3a) " @ uhasx pc, r12, r10") + TEST_RR( "uhsax r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uhsax r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe67cff5a) " @ uhsax pc, r12, r10") + TEST_RR( "uhsub16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uhsub16 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe67cff7a) " @ uhsub16 pc, r12, r10") + TEST_RR( "uhadd8 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uhadd8 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe67cff9a) " @ uhadd8 pc, r12, r10") + TEST_UNSUPPORTED(__inst_arm(0xe67000b0) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe67fffbf) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe67000d0) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe67fffdf) "") /* Unallocated space */ + TEST_RR( "uhsub8 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uhsub8 r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe67cfffa) " @ uhsub8 pc, r12, r10") + TEST_UNSUPPORTED(__inst_arm(0xe67feffa) " @ uhsub8 r14, pc, r10") + TEST_UNSUPPORTED(__inst_arm(0xe67cefff) " @ uhsub8 r14, r12, pc") +#endif /* __LINUX_ARM_ARCH__ >= 7 */ + +#if __LINUX_ARM_ARCH__ >= 6 + TEST_GROUP("Packing, unpacking, saturation, and reversal") + + TEST_RR( "pkhbt r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "pkhbt r14,r",12, HH1,", r",10,HH2,", lsl #2") + TEST_UNSUPPORTED(__inst_arm(0xe68cf11a) " @ pkhbt pc, r12, r10, lsl #2") + TEST_RR( "pkhtb r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "pkhtb r14,r",12, HH1,", r",10,HH2,", asr #2") + TEST_UNSUPPORTED(__inst_arm(0xe68cf15a) " @ pkhtb pc, r12, r10, asr #2") + TEST_UNSUPPORTED(__inst_arm(0xe68fe15a) " @ pkhtb r14, pc, r10, asr #2") + TEST_UNSUPPORTED(__inst_arm(0xe68ce15f) " @ pkhtb r14, r12, pc, asr #2") + TEST_UNSUPPORTED(__inst_arm(0xe6900010) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe69fffdf) "") /* Unallocated space */ + + TEST_R( "ssat r0, #24, r",0, VAL1,"") + TEST_R( "ssat r14, #24, r",12, VAL2,"") + TEST_R( "ssat r0, #24, r",0, VAL1,", lsl #8") + TEST_R( "ssat r14, #24, r",12, VAL2,", asr #8") + TEST_UNSUPPORTED(__inst_arm(0xe6b7f01c) " @ ssat pc, #24, r12") + + TEST_R( "usat r0, #24, r",0, VAL1,"") + TEST_R( "usat r14, #24, r",12, VAL2,"") + TEST_R( "usat r0, #24, r",0, VAL1,", lsl #8") + TEST_R( "usat r14, #24, r",12, VAL2,", asr #8") + TEST_UNSUPPORTED(__inst_arm(0xe6f7f01c) " @ usat pc, #24, r12") + + TEST_RR( "sxtab16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "sxtab16 r14,r",12, HH2,", r",10,HH1,", ror #8") + TEST_R( "sxtb16 r8, r",7, HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe68cf47a) " @ sxtab16 pc,r12, r10, ror #8") + + TEST_RR( "sel r0, r",0, VAL1,", r",1, VAL2,"") + TEST_RR( "sel r14, r",12,VAL1,", r",10, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe68cffba) " @ sel pc, r12, r10") + TEST_UNSUPPORTED(__inst_arm(0xe68fefba) " @ sel r14, pc, r10") + TEST_UNSUPPORTED(__inst_arm(0xe68cefbf) " @ sel r14, r12, pc") + + TEST_R( "ssat16 r0, #12, r",0, HH1,"") + TEST_R( "ssat16 r14, #12, r",12, HH2,"") + TEST_UNSUPPORTED(__inst_arm(0xe6abff3c) " @ ssat16 pc, #12, r12") + + TEST_RR( "sxtab r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "sxtab r14,r",12, HH2,", r",10,HH1,", ror #8") + TEST_R( "sxtb r8, r",7, HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe6acf47a) " @ sxtab pc,r12, r10, ror #8") + + TEST_R( "rev r0, r",0, VAL1,"") + TEST_R( "rev r14, r",12, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe6bfff3c) " @ rev pc, r12") + + TEST_RR( "sxtah r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "sxtah r14,r",12, HH2,", r",10,HH1,", ror #8") + TEST_R( "sxth r8, r",7, HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe6bcf47a) " @ sxtah pc,r12, r10, ror #8") + + TEST_R( "rev16 r0, r",0, VAL1,"") + TEST_R( "rev16 r14, r",12, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe6bfffbc) " @ rev16 pc, r12") + + TEST_RR( "uxtab16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uxtab16 r14,r",12, HH2,", r",10,HH1,", ror #8") + TEST_R( "uxtb16 r8, r",7, HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe6ccf47a) " @ uxtab16 pc,r12, r10, ror #8") + + TEST_R( "usat16 r0, #12, r",0, HH1,"") + TEST_R( "usat16 r14, #12, r",12, HH2,"") + TEST_UNSUPPORTED(__inst_arm(0xe6ecff3c) " @ usat16 pc, #12, r12") + TEST_UNSUPPORTED(__inst_arm(0xe6ecef3f) " @ usat16 r14, #12, pc") + + TEST_RR( "uxtab r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uxtab r14,r",12, HH2,", r",10,HH1,", ror #8") + TEST_R( "uxtb r8, r",7, HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe6ecf47a) " @ uxtab pc,r12, r10, ror #8") + +#if __LINUX_ARM_ARCH__ >= 7 + TEST_R( "rbit r0, r",0, VAL1,"") + TEST_R( "rbit r14, r",12, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe6ffff3c) " @ rbit pc, r12") +#endif + + TEST_RR( "uxtah r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uxtah r14,r",12, HH2,", r",10,HH1,", ror #8") + TEST_R( "uxth r8, r",7, HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe6fff077) " @ uxth pc, r7") + TEST_UNSUPPORTED(__inst_arm(0xe6ff807f) " @ uxth r8, pc") + TEST_UNSUPPORTED(__inst_arm(0xe6fcf47a) " @ uxtah pc, r12, r10, ror #8") + TEST_UNSUPPORTED(__inst_arm(0xe6fce47f) " @ uxtah r14, r12, pc, ror #8") + + TEST_R( "revsh r0, r",0, VAL1,"") + TEST_R( "revsh r14, r",12, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe6ffff3c) " @ revsh pc, r12") + TEST_UNSUPPORTED(__inst_arm(0xe6ffef3f) " @ revsh r14, pc") + + TEST_UNSUPPORTED(__inst_arm(0xe6900070) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe69fff7f) "") /* Unallocated space */ + + TEST_UNSUPPORTED(__inst_arm(0xe6d00070) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_arm(0xe6dfff7f) "") /* Unallocated space */ +#endif /* __LINUX_ARM_ARCH__ >= 6 */ + +#if __LINUX_ARM_ARCH__ >= 6 + TEST_GROUP("Signed multiplies") + + TEST_RRR( "smlad r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") + TEST_RRR( "smlad r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe70f8a1c) " @ smlad pc, r12, r10, r8") + TEST_RRR( "smladx r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") + TEST_RRR( "smladx r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe70f8a3c) " @ smladx pc, r12, r10, r8") + + TEST_RR( "smuad r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "smuad r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe70ffa1c) " @ smuad pc, r12, r10") + TEST_RR( "smuadx r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "smuadx r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe70ffa3c) " @ smuadx pc, r12, r10") + + TEST_RRR( "smlsd r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") + TEST_RRR( "smlsd r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe70f8a5c) " @ smlsd pc, r12, r10, r8") + TEST_RRR( "smlsdx r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") + TEST_RRR( "smlsdx r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe70f8a7c) " @ smlsdx pc, r12, r10, r8") + + TEST_RR( "smusd r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "smusd r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe70ffa5c) " @ smusd pc, r12, r10") + TEST_RR( "smusdx r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "smusdx r14, r",12,HH2,", r",10,HH1,"") + TEST_UNSUPPORTED(__inst_arm(0xe70ffa7c) " @ smusdx pc, r12, r10") + + TEST_RRRR( "smlald r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2) + TEST_RRRR( "smlald r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1) + TEST_UNSUPPORTED(__inst_arm(0xe74af819) " @ smlald pc, r10, r9, r8") + TEST_UNSUPPORTED(__inst_arm(0xe74fb819) " @ smlald r11, pc, r9, r8") + TEST_UNSUPPORTED(__inst_arm(0xe74ab81f) " @ smlald r11, r10, pc, r8") + TEST_UNSUPPORTED(__inst_arm(0xe74abf19) " @ smlald r11, r10, r9, pc") + + TEST_RRRR( "smlaldx r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2) + TEST_RRRR( "smlaldx r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1) + TEST_UNSUPPORTED(__inst_arm(0xe74af839) " @ smlaldx pc, r10, r9, r8") + TEST_UNSUPPORTED(__inst_arm(0xe74fb839) " @ smlaldx r11, pc, r9, r8") + + TEST_RRR( "smmla r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") + TEST_RRR( "smmla r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe75f8a1c) " @ smmla pc, r12, r10, r8") + TEST_RRR( "smmlar r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") + TEST_RRR( "smmlar r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe75f8a3c) " @ smmlar pc, r12, r10, r8") + + TEST_RR( "smmul r0, r",0, VAL1,", r",1, VAL2,"") + TEST_RR( "smmul r14, r",12,VAL2,", r",10,VAL1,"") + TEST_UNSUPPORTED(__inst_arm(0xe75ffa1c) " @ smmul pc, r12, r10") + TEST_RR( "smmulr r0, r",0, VAL1,", r",1, VAL2,"") + TEST_RR( "smmulr r14, r",12,VAL2,", r",10,VAL1,"") + TEST_UNSUPPORTED(__inst_arm(0xe75ffa3c) " @ smmulr pc, r12, r10") + + TEST_RRR( "smmls r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") + TEST_RRR( "smmls r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe75f8adc) " @ smmls pc, r12, r10, r8") + TEST_RRR( "smmlsr r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") + TEST_RRR( "smmlsr r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") + TEST_UNSUPPORTED(__inst_arm(0xe75f8afc) " @ smmlsr pc, r12, r10, r8") + TEST_UNSUPPORTED(__inst_arm(0xe75e8aff) " @ smmlsr r14, pc, r10, r8") + TEST_UNSUPPORTED(__inst_arm(0xe75e8ffc) " @ smmlsr r14, r12, pc, r8") + TEST_UNSUPPORTED(__inst_arm(0xe75efafc) " @ smmlsr r14, r12, r10, pc") + + TEST_RR( "usad8 r0, r",0, VAL1,", r",1, VAL2,"") + TEST_RR( "usad8 r14, r",12,VAL2,", r",10,VAL1,"") + TEST_UNSUPPORTED(__inst_arm(0xe75ffa1c) " @ usad8 pc, r12, r10") + TEST_UNSUPPORTED(__inst_arm(0xe75efa1f) " @ usad8 r14, pc, r10") + TEST_UNSUPPORTED(__inst_arm(0xe75eff1c) " @ usad8 r14, r12, pc") + + TEST_RRR( "usada8 r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL3,"") + TEST_RRR( "usada8 r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL3,"") + TEST_UNSUPPORTED(__inst_arm(0xe78f8a1c) " @ usada8 pc, r12, r10, r8") + TEST_UNSUPPORTED(__inst_arm(0xe78e8a1f) " @ usada8 r14, pc, r10, r8") + TEST_UNSUPPORTED(__inst_arm(0xe78e8f1c) " @ usada8 r14, r12, pc, r8") +#endif /* __LINUX_ARM_ARCH__ >= 6 */ + +#if __LINUX_ARM_ARCH__ >= 7 + TEST_GROUP("Bit Field") + + TEST_R( "sbfx r0, r",0 , VAL1,", #0, #31") + TEST_R( "sbfxeq r14, r",12, VAL2,", #8, #16") + TEST_R( "sbfx r4, r",10, VAL1,", #16, #15") + TEST_UNSUPPORTED(__inst_arm(0xe7aff45c) " @ sbfx pc, r12, #8, #16") + + TEST_R( "ubfx r0, r",0 , VAL1,", #0, #31") + TEST_R( "ubfxcs r14, r",12, VAL2,", #8, #16") + TEST_R( "ubfx r4, r",10, VAL1,", #16, #15") + TEST_UNSUPPORTED(__inst_arm(0xe7eff45c) " @ ubfx pc, r12, #8, #16") + TEST_UNSUPPORTED(__inst_arm(0xe7efc45f) " @ ubfx r12, pc, #8, #16") + + TEST_R( "bfc r",0, VAL1,", #4, #20") + TEST_R( "bfcvs r",14,VAL2,", #4, #20") + TEST_R( "bfc r",7, VAL1,", #0, #31") + TEST_R( "bfc r",8, VAL2,", #0, #31") + TEST_UNSUPPORTED(__inst_arm(0xe7def01f) " @ bfc pc, #0, #31"); + + TEST_RR( "bfi r",0, VAL1,", r",0 , VAL2,", #0, #31") + TEST_RR( "bfipl r",12,VAL1,", r",14 , VAL2,", #4, #20") + TEST_UNSUPPORTED(__inst_arm(0xe7d7f21e) " @ bfi pc, r14, #4, #20") + + TEST_UNSUPPORTED(__inst_arm(0x07f000f0) "") /* Permanently UNDEFINED */ + TEST_UNSUPPORTED(__inst_arm(0x07ffffff) "") /* Permanently UNDEFINED */ +#endif /* __LINUX_ARM_ARCH__ >= 6 */ + + TEST_GROUP("Branch, branch with link, and block data transfer") + + TEST_P( "stmda r",0, 16*4,", {r0}") + TEST_P( "stmeqda r",4, 16*4,", {r0-r15}") + TEST_P( "stmneda r",8, 16*4,"!, {r8-r15}") + TEST_P( "stmda r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") + TEST_P( "stmda r",13,0, "!, {pc}") + + TEST_P( "ldmda r",0, 16*4,", {r0}") + TEST_BF_P("ldmcsda r",4, 15*4,", {r0-r15}") + TEST_BF_P("ldmccda r",7, 15*4,"!, {r8-r15}") + TEST_P( "ldmda r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") + TEST_BF_P("ldmda r",14,15*4,"!, {pc}") + + TEST_P( "stmia r",0, 16*4,", {r0}") + TEST_P( "stmmiia r",4, 16*4,", {r0-r15}") + TEST_P( "stmplia r",8, 16*4,"!, {r8-r15}") + TEST_P( "stmia r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") + TEST_P( "stmia r",14,0, "!, {pc}") + + TEST_P( "ldmia r",0, 16*4,", {r0}") + TEST_BF_P("ldmvsia r",4, 0, ", {r0-r15}") + TEST_BF_P("ldmvcia r",7, 8*4, "!, {r8-r15}") + TEST_P( "ldmia r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") + TEST_BF_P("ldmia r",14,15*4,"!, {pc}") + + TEST_P( "stmdb r",0, 16*4,", {r0}") + TEST_P( "stmhidb r",4, 16*4,", {r0-r15}") + TEST_P( "stmlsdb r",8, 16*4,"!, {r8-r15}") + TEST_P( "stmdb r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") + TEST_P( "stmdb r",13,4, "!, {pc}") + + TEST_P( "ldmdb r",0, 16*4,", {r0}") + TEST_BF_P("ldmgedb r",4, 16*4,", {r0-r15}") + TEST_BF_P("ldmltdb r",7, 16*4,"!, {r8-r15}") + TEST_P( "ldmdb r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") + TEST_BF_P("ldmdb r",14,16*4,"!, {pc}") + + TEST_P( "stmib r",0, 16*4,", {r0}") + TEST_P( "stmgtib r",4, 16*4,", {r0-r15}") + TEST_P( "stmleib r",8, 16*4,"!, {r8-r15}") + TEST_P( "stmib r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") + TEST_P( "stmib r",13,-4, "!, {pc}") + + TEST_P( "ldmib r",0, 16*4,", {r0}") + TEST_BF_P("ldmeqib r",4, -4,", {r0-r15}") + TEST_BF_P("ldmneib r",7, 7*4,"!, {r8-r15}") + TEST_P( "ldmib r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") + TEST_BF_P("ldmib r",14,14*4,"!, {pc}") + + TEST_P( "stmdb r",13,16*4,"!, {r3-r12,lr}") + TEST_P( "stmeqdb r",13,16*4,"!, {r3-r12}") + TEST_P( "stmnedb r",2, 16*4,", {r3-r12,lr}") + TEST_P( "stmdb r",13,16*4,"!, {r2-r12,lr}") + TEST_P( "stmdb r",0, 16*4,", {r0-r12}") + TEST_P( "stmdb r",0, 16*4,", {r0-r12,lr}") + + TEST_BF_P("ldmia r",13,5*4, "!, {r3-r12,pc}") + TEST_P( "ldmccia r",13,5*4, "!, {r3-r12}") + TEST_BF_P("ldmcsia r",2, 5*4, "!, {r3-r12,pc}") + TEST_BF_P("ldmia r",13,4*4, "!, {r2-r12,pc}") + TEST_P( "ldmia r",0, 16*4,", {r0-r12}") + TEST_P( "ldmia r",0, 16*4,", {r0-r12,lr}") + +#ifdef CONFIG_THUMB2_KERNEL + TEST_ARM_TO_THUMB_INTERWORK_P("ldmplia r",0,15*4,", {pc}") + TEST_ARM_TO_THUMB_INTERWORK_P("ldmmiia r",13,0,", {r0-r15}") +#endif + TEST_BF("b 2f") + TEST_BF("bl 2f") + TEST_BB("b 2b") + TEST_BB("bl 2b") + + TEST_BF("beq 2f") + TEST_BF("bleq 2f") + TEST_BB("bne 2b") + TEST_BB("blne 2b") + + TEST_BF("bgt 2f") + TEST_BF("blgt 2f") + TEST_BB("blt 2b") + TEST_BB("bllt 2b") + + TEST_GROUP("Supervisor Call, and coprocessor instructions") + + /* + * We can't really test these by executing them, so all + * we can do is check that probes are, or are not allowed. + * At the moment none are allowed... + */ +#define TEST_COPROCESSOR(code) TEST_UNSUPPORTED(code) + +#define COPROCESSOR_INSTRUCTIONS_ST_LD(two,cc) \ + TEST_COPROCESSOR("stc"two" 0, cr0, [r13, #4]") \ + TEST_COPROCESSOR("stc"two" 0, cr0, [r13, #-4]") \ + TEST_COPROCESSOR("stc"two" 0, cr0, [r13, #4]!") \ + TEST_COPROCESSOR("stc"two" 0, cr0, [r13, #-4]!") \ + TEST_COPROCESSOR("stc"two" 0, cr0, [r13], #4") \ + TEST_COPROCESSOR("stc"two" 0, cr0, [r13], #-4") \ + TEST_COPROCESSOR("stc"two" 0, cr0, [r13], {1}") \ + TEST_COPROCESSOR("stc"two"l 0, cr0, [r13, #4]") \ + TEST_COPROCESSOR("stc"two"l 0, cr0, [r13, #-4]") \ + TEST_COPROCESSOR("stc"two"l 0, cr0, [r13, #4]!") \ + TEST_COPROCESSOR("stc"two"l 0, cr0, [r13, #-4]!") \ + TEST_COPROCESSOR("stc"two"l 0, cr0, [r13], #4") \ + TEST_COPROCESSOR("stc"two"l 0, cr0, [r13], #-4") \ + TEST_COPROCESSOR("stc"two"l 0, cr0, [r13], {1}") \ + TEST_COPROCESSOR("ldc"two" 0, cr0, [r13, #4]") \ + TEST_COPROCESSOR("ldc"two" 0, cr0, [r13, #-4]") \ + TEST_COPROCESSOR("ldc"two" 0, cr0, [r13, #4]!") \ + TEST_COPROCESSOR("ldc"two" 0, cr0, [r13, #-4]!") \ + TEST_COPROCESSOR("ldc"two" 0, cr0, [r13], #4") \ + TEST_COPROCESSOR("ldc"two" 0, cr0, [r13], #-4") \ + TEST_COPROCESSOR("ldc"two" 0, cr0, [r13], {1}") \ + TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13, #4]") \ + TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13, #-4]") \ + TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13, #4]!") \ + TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13, #-4]!") \ + TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13], #4") \ + TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13], #-4") \ + TEST_COPROCESSOR("ldc"two"l 0, cr0, [r13], {1}") \ + \ + TEST_COPROCESSOR( "stc"two" 0, cr0, [r15, #4]") \ + TEST_COPROCESSOR( "stc"two" 0, cr0, [r15, #-4]") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##daf0001) " @ stc"two" 0, cr0, [r15, #4]!") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##d2f0001) " @ stc"two" 0, cr0, [r15, #-4]!") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##caf0001) " @ stc"two" 0, cr0, [r15], #4") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##c2f0001) " @ stc"two" 0, cr0, [r15], #-4") \ + TEST_COPROCESSOR( "stc"two" 0, cr0, [r15], {1}") \ + TEST_COPROCESSOR( "stc"two"l 0, cr0, [r15, #4]") \ + TEST_COPROCESSOR( "stc"two"l 0, cr0, [r15, #-4]") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##def0001) " @ stc"two"l 0, cr0, [r15, #4]!") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##d6f0001) " @ stc"two"l 0, cr0, [r15, #-4]!") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##cef0001) " @ stc"two"l 0, cr0, [r15], #4") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##c6f0001) " @ stc"two"l 0, cr0, [r15], #-4") \ + TEST_COPROCESSOR( "stc"two"l 0, cr0, [r15], {1}") \ + TEST_COPROCESSOR( "ldc"two" 0, cr0, [r15, #4]") \ + TEST_COPROCESSOR( "ldc"two" 0, cr0, [r15, #-4]") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##dbf0001) " @ ldc"two" 0, cr0, [r15, #4]!") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##d3f0001) " @ ldc"two" 0, cr0, [r15, #-4]!") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##cbf0001) " @ ldc"two" 0, cr0, [r15], #4") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##c3f0001) " @ ldc"two" 0, cr0, [r15], #-4") \ + TEST_COPROCESSOR( "ldc"two" 0, cr0, [r15], {1}") \ + TEST_COPROCESSOR( "ldc"two"l 0, cr0, [r15, #4]") \ + TEST_COPROCESSOR( "ldc"two"l 0, cr0, [r15, #-4]") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##dff0001) " @ ldc"two"l 0, cr0, [r15, #4]!") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##d7f0001) " @ ldc"two"l 0, cr0, [r15, #-4]!") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##cff0001) " @ ldc"two"l 0, cr0, [r15], #4") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##c7f0001) " @ ldc"two"l 0, cr0, [r15], #-4") \ + TEST_COPROCESSOR( "ldc"two"l 0, cr0, [r15], {1}") + +#define COPROCESSOR_INSTRUCTIONS_MC_MR(two,cc) \ + \ + TEST_COPROCESSOR( "mcrr"two" 0, 15, r0, r14, cr0") \ + TEST_COPROCESSOR( "mcrr"two" 15, 0, r14, r0, cr15") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##c4f00f0) " @ mcrr"two" 0, 15, r0, r15, cr0") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##c40ff0f) " @ mcrr"two" 15, 0, r15, r0, cr15") \ + TEST_COPROCESSOR( "mrrc"two" 0, 15, r0, r14, cr0") \ + TEST_COPROCESSOR( "mrrc"two" 15, 0, r14, r0, cr15") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##c5f00f0) " @ mrrc"two" 0, 15, r0, r15, cr0") \ + TEST_UNSUPPORTED(__inst_arm(0x##cc##c50ff0f) " @ mrrc"two" 15, 0, r15, r0, cr15") \ + TEST_COPROCESSOR( "cdp"two" 15, 15, cr15, cr15, cr15, 7") \ + TEST_COPROCESSOR( "cdp"two" 0, 0, cr0, cr0, cr0, 0") \ + TEST_COPROCESSOR( "mcr"two" 15, 7, r15, cr15, cr15, 7") \ + TEST_COPROCESSOR( "mcr"two" 0, 0, r0, cr0, cr0, 0") \ + TEST_COPROCESSOR( "mrc"two" 15, 7, r15, cr15, cr15, 7") \ + TEST_COPROCESSOR( "mrc"two" 0, 0, r0, cr0, cr0, 0") + + COPROCESSOR_INSTRUCTIONS_ST_LD("",e) +#if __LINUX_ARM_ARCH__ >= 5 + COPROCESSOR_INSTRUCTIONS_MC_MR("",e) +#endif + TEST_UNSUPPORTED("svc 0") + TEST_UNSUPPORTED("svc 0xffffff") + + TEST_UNSUPPORTED("svc 0") + + TEST_GROUP("Unconditional instruction") + +#if __LINUX_ARM_ARCH__ >= 6 + TEST_UNSUPPORTED("srsda sp, 0x13") + TEST_UNSUPPORTED("srsdb sp, 0x13") + TEST_UNSUPPORTED("srsia sp, 0x13") + TEST_UNSUPPORTED("srsib sp, 0x13") + TEST_UNSUPPORTED("srsda sp!, 0x13") + TEST_UNSUPPORTED("srsdb sp!, 0x13") + TEST_UNSUPPORTED("srsia sp!, 0x13") + TEST_UNSUPPORTED("srsib sp!, 0x13") + + TEST_UNSUPPORTED("rfeda sp") + TEST_UNSUPPORTED("rfedb sp") + TEST_UNSUPPORTED("rfeia sp") + TEST_UNSUPPORTED("rfeib sp") + TEST_UNSUPPORTED("rfeda sp!") + TEST_UNSUPPORTED("rfedb sp!") + TEST_UNSUPPORTED("rfeia sp!") + TEST_UNSUPPORTED("rfeib sp!") + TEST_UNSUPPORTED(__inst_arm(0xf81d0a00) " @ rfeda pc") + TEST_UNSUPPORTED(__inst_arm(0xf91d0a00) " @ rfedb pc") + TEST_UNSUPPORTED(__inst_arm(0xf89d0a00) " @ rfeia pc") + TEST_UNSUPPORTED(__inst_arm(0xf99d0a00) " @ rfeib pc") + TEST_UNSUPPORTED(__inst_arm(0xf83d0a00) " @ rfeda pc!") + TEST_UNSUPPORTED(__inst_arm(0xf93d0a00) " @ rfedb pc!") + TEST_UNSUPPORTED(__inst_arm(0xf8bd0a00) " @ rfeia pc!") + TEST_UNSUPPORTED(__inst_arm(0xf9bd0a00) " @ rfeib pc!") +#endif /* __LINUX_ARM_ARCH__ >= 6 */ + +#if __LINUX_ARM_ARCH__ >= 6 + TEST_X( "blx __dummy_thumb_subroutine_even", + ".thumb \n\t" + ".space 4 \n\t" + ".type __dummy_thumb_subroutine_even, %%function \n\t" + "__dummy_thumb_subroutine_even: \n\t" + "mov r0, pc \n\t" + "bx lr \n\t" + ".arm \n\t" + ) + TEST( "blx __dummy_thumb_subroutine_even") + + TEST_X( "blx __dummy_thumb_subroutine_odd", + ".thumb \n\t" + ".space 2 \n\t" + ".type __dummy_thumb_subroutine_odd, %%function \n\t" + "__dummy_thumb_subroutine_odd: \n\t" + "mov r0, pc \n\t" + "bx lr \n\t" + ".arm \n\t" + ) + TEST( "blx __dummy_thumb_subroutine_odd") +#endif /* __LINUX_ARM_ARCH__ >= 6 */ + +#if __LINUX_ARM_ARCH__ >= 5 + COPROCESSOR_INSTRUCTIONS_ST_LD("2",f) +#endif +#if __LINUX_ARM_ARCH__ >= 6 + COPROCESSOR_INSTRUCTIONS_MC_MR("2",f) +#endif + + TEST_GROUP("Miscellaneous instructions, memory hints, and Advanced SIMD instructions") + +#if __LINUX_ARM_ARCH__ >= 6 + TEST_UNSUPPORTED("cps 0x13") + TEST_UNSUPPORTED("cpsie i") + TEST_UNSUPPORTED("cpsid i") + TEST_UNSUPPORTED("cpsie i,0x13") + TEST_UNSUPPORTED("cpsid i,0x13") + TEST_UNSUPPORTED("setend le") + TEST_UNSUPPORTED("setend be") +#endif + +#if __LINUX_ARM_ARCH__ >= 7 + TEST_P("pli [r",0,0b,", #16]") + TEST( "pli [pc, #0]") + TEST_RR("pli [r",12,0b,", r",0, 16,"]") + TEST_RR("pli [r",0, 0b,", -r",12,16,", lsl #4]") +#endif + +#if __LINUX_ARM_ARCH__ >= 5 + TEST_P("pld [r",0,32,", #-16]") + TEST( "pld [pc, #0]") + TEST_PR("pld [r",7, 24, ", r",0, 16,"]") + TEST_PR("pld [r",8, 24, ", -r",12,16,", lsl #4]") +#endif + +#if __LINUX_ARM_ARCH__ >= 7 + TEST_SUPPORTED( __inst_arm(0xf590f000) " @ pldw [r0, #0]") + TEST_SUPPORTED( __inst_arm(0xf797f000) " @ pldw [r7, r0]") + TEST_SUPPORTED( __inst_arm(0xf798f18c) " @ pldw [r8, r12, lsl #3]"); +#endif + +#if __LINUX_ARM_ARCH__ >= 7 + TEST_UNSUPPORTED("clrex") + TEST_UNSUPPORTED("dsb") + TEST_UNSUPPORTED("dmb") + TEST_UNSUPPORTED("isb") +#endif + + verbose("\n"); +} + diff --git a/arch/arm/probes/kprobes/test-core.c b/arch/arm/probes/kprobes/test-core.c new file mode 100644 index 000000000000..7ab633d51954 --- /dev/null +++ b/arch/arm/probes/kprobes/test-core.c @@ -0,0 +1,1713 @@ +/* + * arch/arm/kernel/kprobes-test.c + * + * Copyright (C) 2011 Jon Medhurst . + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +/* + * This file contains test code for ARM kprobes. + * + * The top level function run_all_tests() executes tests for all of the + * supported instruction sets: ARM, 16-bit Thumb, and 32-bit Thumb. These tests + * fall into two categories; run_api_tests() checks basic functionality of the + * kprobes API, and run_test_cases() is a comprehensive test for kprobes + * instruction decoding and simulation. + * + * run_test_cases() first checks the kprobes decoding table for self consistency + * (using table_test()) then executes a series of test cases for each of the CPU + * instruction forms. coverage_start() and coverage_end() are used to verify + * that these test cases cover all of the possible combinations of instructions + * described by the kprobes decoding tables. + * + * The individual test cases are in kprobes-test-arm.c and kprobes-test-thumb.c + * which use the macros defined in kprobes-test.h. The rest of this + * documentation will describe the operation of the framework used by these + * test cases. + */ + +/* + * TESTING METHODOLOGY + * ------------------- + * + * The methodology used to test an ARM instruction 'test_insn' is to use + * inline assembler like: + * + * test_before: nop + * test_case: test_insn + * test_after: nop + * + * When the test case is run a kprobe is placed of each nop. The + * post-handler of the test_before probe is used to modify the saved CPU + * register context to that which we require for the test case. The + * pre-handler of the of the test_after probe saves a copy of the CPU + * register context. In this way we can execute test_insn with a specific + * register context and see the results afterwards. + * + * To actually test the kprobes instruction emulation we perform the above + * step a second time but with an additional kprobe on the test_case + * instruction itself. If the emulation is accurate then the results seen + * by the test_after probe will be identical to the first run which didn't + * have a probe on test_case. + * + * Each test case is run several times with a variety of variations in the + * flags value of stored in CPSR, and for Thumb code, different ITState. + * + * For instructions which can modify PC, a second test_after probe is used + * like this: + * + * test_before: nop + * test_case: test_insn + * test_after: nop + * b test_done + * test_after2: nop + * test_done: + * + * The test case is constructed such that test_insn branches to + * test_after2, or, if testing a conditional instruction, it may just + * continue to test_after. The probes inserted at both locations let us + * determine which happened. A similar approach is used for testing + * backwards branches... + * + * b test_before + * b test_done @ helps to cope with off by 1 branches + * test_after2: nop + * b test_done + * test_before: nop + * test_case: test_insn + * test_after: nop + * test_done: + * + * The macros used to generate the assembler instructions describe above + * are TEST_INSTRUCTION, TEST_BRANCH_F (branch forwards) and TEST_BRANCH_B + * (branch backwards). In these, the local variables numbered 1, 50, 2 and + * 99 represent: test_before, test_case, test_after2 and test_done. + * + * FRAMEWORK + * --------- + * + * Each test case is wrapped between the pair of macros TESTCASE_START and + * TESTCASE_END. As well as performing the inline assembler boilerplate, + * these call out to the kprobes_test_case_start() and + * kprobes_test_case_end() functions which drive the execution of the test + * case. The specific arguments to use for each test case are stored as + * inline data constructed using the various TEST_ARG_* macros. Putting + * this all together, a simple test case may look like: + * + * TESTCASE_START("Testing mov r0, r7") + * TEST_ARG_REG(7, 0x12345678) // Set r7=0x12345678 + * TEST_ARG_END("") + * TEST_INSTRUCTION("mov r0, r7") + * TESTCASE_END + * + * Note, in practice the single convenience macro TEST_R would be used for this + * instead. + * + * The above would expand to assembler looking something like: + * + * @ TESTCASE_START + * bl __kprobes_test_case_start + * .pushsection .rodata + * "10: + * .ascii "mov r0, r7" @ text title for test case + * .byte 0 + * .popsection + * @ start of inline data... + * .word 10b @ pointer to title in .rodata section + * + * @ TEST_ARG_REG + * .byte ARG_TYPE_REG + * .byte 7 + * .short 0 + * .word 0x1234567 + * + * @ TEST_ARG_END + * .byte ARG_TYPE_END + * .byte TEST_ISA @ flags, including ISA being tested + * .short 50f-0f @ offset of 'test_before' + * .short 2f-0f @ offset of 'test_after2' (if relevent) + * .short 99f-0f @ offset of 'test_done' + * @ start of test case code... + * 0: + * .code TEST_ISA @ switch to ISA being tested + * + * @ TEST_INSTRUCTION + * 50: nop @ location for 'test_before' probe + * 1: mov r0, r7 @ the test case instruction 'test_insn' + * nop @ location for 'test_after' probe + * + * // TESTCASE_END + * 2: + * 99: bl __kprobes_test_case_end_##TEST_ISA + * .code NONMAL_ISA + * + * When the above is execute the following happens... + * + * __kprobes_test_case_start() is an assembler wrapper which sets up space + * for a stack buffer and calls the C function kprobes_test_case_start(). + * This C function will do some initial processing of the inline data and + * setup some global state. It then inserts the test_before and test_after + * kprobes and returns a value which causes the assembler wrapper to jump + * to the start of the test case code, (local label '0'). + * + * When the test case code executes, the test_before probe will be hit and + * test_before_post_handler will call setup_test_context(). This fills the + * stack buffer and CPU registers with a test pattern and then processes + * the test case arguments. In our example there is one TEST_ARG_REG which + * indicates that R7 should be loaded with the value 0x12345678. + * + * When the test_before probe ends, the test case continues and executes + * the "mov r0, r7" instruction. It then hits the test_after probe and the + * pre-handler for this (test_after_pre_handler) will save a copy of the + * CPU register context. This should now have R0 holding the same value as + * R7. + * + * Finally we get to the call to __kprobes_test_case_end_{32,16}. This is + * an assembler wrapper which switches back to the ISA used by the test + * code and calls the C function kprobes_test_case_end(). + * + * For each run through the test case, test_case_run_count is incremented + * by one. For even runs, kprobes_test_case_end() saves a copy of the + * register and stack buffer contents from the test case just run. It then + * inserts a kprobe on the test case instruction 'test_insn' and returns a + * value to cause the test case code to be re-run. + * + * For odd numbered runs, kprobes_test_case_end() compares the register and + * stack buffer contents to those that were saved on the previous even + * numbered run (the one without the kprobe on test_insn). These should be + * the same if the kprobe instruction simulation routine is correct. + * + * The pair of test case runs is repeated with different combinations of + * flag values in CPSR and, for Thumb, different ITState. This is + * controlled by test_context_cpsr(). + * + * BUILDING TEST CASES + * ------------------- + * + * + * As an aid to building test cases, the stack buffer is initialised with + * some special values: + * + * [SP+13*4] Contains SP+120. This can be used to test instructions + * which load a value into SP. + * + * [SP+15*4] When testing branching instructions using TEST_BRANCH_{F,B}, + * this holds the target address of the branch, 'test_after2'. + * This can be used to test instructions which load a PC value + * from memory. + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "core.h" +#include "test-core.h" +#include "../decode-arm.h" +#include "../decode-thumb.h" + + +#define BENCHMARKING 1 + + +/* + * Test basic API + */ + +static bool test_regs_ok; +static int test_func_instance; +static int pre_handler_called; +static int post_handler_called; +static int jprobe_func_called; +static int kretprobe_handler_called; +static int tests_failed; + +#define FUNC_ARG1 0x12345678 +#define FUNC_ARG2 0xabcdef + + +#ifndef CONFIG_THUMB2_KERNEL + +long arm_func(long r0, long r1); + +static void __used __naked __arm_kprobes_test_func(void) +{ + __asm__ __volatile__ ( + ".arm \n\t" + ".type arm_func, %%function \n\t" + "arm_func: \n\t" + "adds r0, r0, r1 \n\t" + "bx lr \n\t" + ".code "NORMAL_ISA /* Back to Thumb if necessary */ + : : : "r0", "r1", "cc" + ); +} + +#else /* CONFIG_THUMB2_KERNEL */ + +long thumb16_func(long r0, long r1); +long thumb32even_func(long r0, long r1); +long thumb32odd_func(long r0, long r1); + +static void __used __naked __thumb_kprobes_test_funcs(void) +{ + __asm__ __volatile__ ( + ".type thumb16_func, %%function \n\t" + "thumb16_func: \n\t" + "adds.n r0, r0, r1 \n\t" + "bx lr \n\t" + + ".align \n\t" + ".type thumb32even_func, %%function \n\t" + "thumb32even_func: \n\t" + "adds.w r0, r0, r1 \n\t" + "bx lr \n\t" + + ".align \n\t" + "nop.n \n\t" + ".type thumb32odd_func, %%function \n\t" + "thumb32odd_func: \n\t" + "adds.w r0, r0, r1 \n\t" + "bx lr \n\t" + + : : : "r0", "r1", "cc" + ); +} + +#endif /* CONFIG_THUMB2_KERNEL */ + + +static int call_test_func(long (*func)(long, long), bool check_test_regs) +{ + long ret; + + ++test_func_instance; + test_regs_ok = false; + + ret = (*func)(FUNC_ARG1, FUNC_ARG2); + if (ret != FUNC_ARG1 + FUNC_ARG2) { + pr_err("FAIL: call_test_func: func returned %lx\n", ret); + return false; + } + + if (check_test_regs && !test_regs_ok) { + pr_err("FAIL: test regs not OK\n"); + return false; + } + + return true; +} + +static int __kprobes pre_handler(struct kprobe *p, struct pt_regs *regs) +{ + pre_handler_called = test_func_instance; + if (regs->ARM_r0 == FUNC_ARG1 && regs->ARM_r1 == FUNC_ARG2) + test_regs_ok = true; + return 0; +} + +static void __kprobes post_handler(struct kprobe *p, struct pt_regs *regs, + unsigned long flags) +{ + post_handler_called = test_func_instance; + if (regs->ARM_r0 != FUNC_ARG1 + FUNC_ARG2 || regs->ARM_r1 != FUNC_ARG2) + test_regs_ok = false; +} + +static struct kprobe the_kprobe = { + .addr = 0, + .pre_handler = pre_handler, + .post_handler = post_handler +}; + +static int test_kprobe(long (*func)(long, long)) +{ + int ret; + + the_kprobe.addr = (kprobe_opcode_t *)func; + ret = register_kprobe(&the_kprobe); + if (ret < 0) { + pr_err("FAIL: register_kprobe failed with %d\n", ret); + return ret; + } + + ret = call_test_func(func, true); + + unregister_kprobe(&the_kprobe); + the_kprobe.flags = 0; /* Clear disable flag to allow reuse */ + + if (!ret) + return -EINVAL; + if (pre_handler_called != test_func_instance) { + pr_err("FAIL: kprobe pre_handler not called\n"); + return -EINVAL; + } + if (post_handler_called != test_func_instance) { + pr_err("FAIL: kprobe post_handler not called\n"); + return -EINVAL; + } + if (!call_test_func(func, false)) + return -EINVAL; + if (pre_handler_called == test_func_instance || + post_handler_called == test_func_instance) { + pr_err("FAIL: probe called after unregistering\n"); + return -EINVAL; + } + + return 0; +} + +static void __kprobes jprobe_func(long r0, long r1) +{ + jprobe_func_called = test_func_instance; + if (r0 == FUNC_ARG1 && r1 == FUNC_ARG2) + test_regs_ok = true; + jprobe_return(); +} + +static struct jprobe the_jprobe = { + .entry = jprobe_func, +}; + +static int test_jprobe(long (*func)(long, long)) +{ + int ret; + + the_jprobe.kp.addr = (kprobe_opcode_t *)func; + ret = register_jprobe(&the_jprobe); + if (ret < 0) { + pr_err("FAIL: register_jprobe failed with %d\n", ret); + return ret; + } + + ret = call_test_func(func, true); + + unregister_jprobe(&the_jprobe); + the_jprobe.kp.flags = 0; /* Clear disable flag to allow reuse */ + + if (!ret) + return -EINVAL; + if (jprobe_func_called != test_func_instance) { + pr_err("FAIL: jprobe handler function not called\n"); + return -EINVAL; + } + if (!call_test_func(func, false)) + return -EINVAL; + if (jprobe_func_called == test_func_instance) { + pr_err("FAIL: probe called after unregistering\n"); + return -EINVAL; + } + + return 0; +} + +static int __kprobes +kretprobe_handler(struct kretprobe_instance *ri, struct pt_regs *regs) +{ + kretprobe_handler_called = test_func_instance; + if (regs_return_value(regs) == FUNC_ARG1 + FUNC_ARG2) + test_regs_ok = true; + return 0; +} + +static struct kretprobe the_kretprobe = { + .handler = kretprobe_handler, +}; + +static int test_kretprobe(long (*func)(long, long)) +{ + int ret; + + the_kretprobe.kp.addr = (kprobe_opcode_t *)func; + ret = register_kretprobe(&the_kretprobe); + if (ret < 0) { + pr_err("FAIL: register_kretprobe failed with %d\n", ret); + return ret; + } + + ret = call_test_func(func, true); + + unregister_kretprobe(&the_kretprobe); + the_kretprobe.kp.flags = 0; /* Clear disable flag to allow reuse */ + + if (!ret) + return -EINVAL; + if (kretprobe_handler_called != test_func_instance) { + pr_err("FAIL: kretprobe handler not called\n"); + return -EINVAL; + } + if (!call_test_func(func, false)) + return -EINVAL; + if (jprobe_func_called == test_func_instance) { + pr_err("FAIL: kretprobe called after unregistering\n"); + return -EINVAL; + } + + return 0; +} + +static int run_api_tests(long (*func)(long, long)) +{ + int ret; + + pr_info(" kprobe\n"); + ret = test_kprobe(func); + if (ret < 0) + return ret; + + pr_info(" jprobe\n"); + ret = test_jprobe(func); +#if defined(CONFIG_THUMB2_KERNEL) && !defined(MODULE) + if (ret == -EINVAL) { + pr_err("FAIL: Known longtime bug with jprobe on Thumb kernels\n"); + tests_failed = ret; + ret = 0; + } +#endif + if (ret < 0) + return ret; + + pr_info(" kretprobe\n"); + ret = test_kretprobe(func); + if (ret < 0) + return ret; + + return 0; +} + + +/* + * Benchmarking + */ + +#if BENCHMARKING + +static void __naked benchmark_nop(void) +{ + __asm__ __volatile__ ( + "nop \n\t" + "bx lr" + ); +} + +#ifdef CONFIG_THUMB2_KERNEL +#define wide ".w" +#else +#define wide +#endif + +static void __naked benchmark_pushpop1(void) +{ + __asm__ __volatile__ ( + "stmdb"wide" sp!, {r3-r11,lr} \n\t" + "ldmia"wide" sp!, {r3-r11,pc}" + ); +} + +static void __naked benchmark_pushpop2(void) +{ + __asm__ __volatile__ ( + "stmdb"wide" sp!, {r0-r8,lr} \n\t" + "ldmia"wide" sp!, {r0-r8,pc}" + ); +} + +static void __naked benchmark_pushpop3(void) +{ + __asm__ __volatile__ ( + "stmdb"wide" sp!, {r4,lr} \n\t" + "ldmia"wide" sp!, {r4,pc}" + ); +} + +static void __naked benchmark_pushpop4(void) +{ + __asm__ __volatile__ ( + "stmdb"wide" sp!, {r0,lr} \n\t" + "ldmia"wide" sp!, {r0,pc}" + ); +} + + +#ifdef CONFIG_THUMB2_KERNEL + +static void __naked benchmark_pushpop_thumb(void) +{ + __asm__ __volatile__ ( + "push.n {r0-r7,lr} \n\t" + "pop.n {r0-r7,pc}" + ); +} + +#endif + +static int __kprobes +benchmark_pre_handler(struct kprobe *p, struct pt_regs *regs) +{ + return 0; +} + +static int benchmark(void(*fn)(void)) +{ + unsigned n, i, t, t0; + + for (n = 1000; ; n *= 2) { + t0 = sched_clock(); + for (i = n; i > 0; --i) + fn(); + t = sched_clock() - t0; + if (t >= 250000000) + break; /* Stop once we took more than 0.25 seconds */ + } + return t / n; /* Time for one iteration in nanoseconds */ +}; + +static int kprobe_benchmark(void(*fn)(void), unsigned offset) +{ + struct kprobe k = { + .addr = (kprobe_opcode_t *)((uintptr_t)fn + offset), + .pre_handler = benchmark_pre_handler, + }; + + int ret = register_kprobe(&k); + if (ret < 0) { + pr_err("FAIL: register_kprobe failed with %d\n", ret); + return ret; + } + + ret = benchmark(fn); + + unregister_kprobe(&k); + return ret; +}; + +struct benchmarks { + void (*fn)(void); + unsigned offset; + const char *title; +}; + +static int run_benchmarks(void) +{ + int ret; + struct benchmarks list[] = { + {&benchmark_nop, 0, "nop"}, + /* + * benchmark_pushpop{1,3} will have the optimised + * instruction emulation, whilst benchmark_pushpop{2,4} will + * be the equivalent unoptimised instructions. + */ + {&benchmark_pushpop1, 0, "stmdb sp!, {r3-r11,lr}"}, + {&benchmark_pushpop1, 4, "ldmia sp!, {r3-r11,pc}"}, + {&benchmark_pushpop2, 0, "stmdb sp!, {r0-r8,lr}"}, + {&benchmark_pushpop2, 4, "ldmia sp!, {r0-r8,pc}"}, + {&benchmark_pushpop3, 0, "stmdb sp!, {r4,lr}"}, + {&benchmark_pushpop3, 4, "ldmia sp!, {r4,pc}"}, + {&benchmark_pushpop4, 0, "stmdb sp!, {r0,lr}"}, + {&benchmark_pushpop4, 4, "ldmia sp!, {r0,pc}"}, +#ifdef CONFIG_THUMB2_KERNEL + {&benchmark_pushpop_thumb, 0, "push.n {r0-r7,lr}"}, + {&benchmark_pushpop_thumb, 2, "pop.n {r0-r7,pc}"}, +#endif + {0} + }; + + struct benchmarks *b; + for (b = list; b->fn; ++b) { + ret = kprobe_benchmark(b->fn, b->offset); + if (ret < 0) + return ret; + pr_info(" %dns for kprobe %s\n", ret, b->title); + } + + pr_info("\n"); + return 0; +} + +#endif /* BENCHMARKING */ + + +/* + * Decoding table self-consistency tests + */ + +static const int decode_struct_sizes[NUM_DECODE_TYPES] = { + [DECODE_TYPE_TABLE] = sizeof(struct decode_table), + [DECODE_TYPE_CUSTOM] = sizeof(struct decode_custom), + [DECODE_TYPE_SIMULATE] = sizeof(struct decode_simulate), + [DECODE_TYPE_EMULATE] = sizeof(struct decode_emulate), + [DECODE_TYPE_OR] = sizeof(struct decode_or), + [DECODE_TYPE_REJECT] = sizeof(struct decode_reject) +}; + +static int table_iter(const union decode_item *table, + int (*fn)(const struct decode_header *, void *), + void *args) +{ + const struct decode_header *h = (struct decode_header *)table; + int result; + + for (;;) { + enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK; + + if (type == DECODE_TYPE_END) + return 0; + + result = fn(h, args); + if (result) + return result; + + h = (struct decode_header *) + ((uintptr_t)h + decode_struct_sizes[type]); + + } +} + +static int table_test_fail(const struct decode_header *h, const char* message) +{ + + pr_err("FAIL: kprobes test failure \"%s\" (mask %08x, value %08x)\n", + message, h->mask.bits, h->value.bits); + return -EINVAL; +} + +struct table_test_args { + const union decode_item *root_table; + u32 parent_mask; + u32 parent_value; +}; + +static int table_test_fn(const struct decode_header *h, void *args) +{ + struct table_test_args *a = (struct table_test_args *)args; + enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK; + + if (h->value.bits & ~h->mask.bits) + return table_test_fail(h, "Match value has bits not in mask"); + + if ((h->mask.bits & a->parent_mask) != a->parent_mask) + return table_test_fail(h, "Mask has bits not in parent mask"); + + if ((h->value.bits ^ a->parent_value) & a->parent_mask) + return table_test_fail(h, "Value is inconsistent with parent"); + + if (type == DECODE_TYPE_TABLE) { + struct decode_table *d = (struct decode_table *)h; + struct table_test_args args2 = *a; + args2.parent_mask = h->mask.bits; + args2.parent_value = h->value.bits; + return table_iter(d->table.table, table_test_fn, &args2); + } + + return 0; +} + +static int table_test(const union decode_item *table) +{ + struct table_test_args args = { + .root_table = table, + .parent_mask = 0, + .parent_value = 0 + }; + return table_iter(args.root_table, table_test_fn, &args); +} + + +/* + * Decoding table test coverage analysis + * + * coverage_start() builds a coverage_table which contains a list of + * coverage_entry's to match each entry in the specified kprobes instruction + * decoding table. + * + * When test cases are run, coverage_add() is called to process each case. + * This looks up the corresponding entry in the coverage_table and sets it as + * being matched, as well as clearing the regs flag appropriate for the test. + * + * After all test cases have been run, coverage_end() is called to check that + * all entries in coverage_table have been matched and that all regs flags are + * cleared. I.e. that all possible combinations of instructions described by + * the kprobes decoding tables have had a test case executed for them. + */ + +bool coverage_fail; + +#define MAX_COVERAGE_ENTRIES 256 + +struct coverage_entry { + const struct decode_header *header; + unsigned regs; + unsigned nesting; + char matched; +}; + +struct coverage_table { + struct coverage_entry *base; + unsigned num_entries; + unsigned nesting; +}; + +struct coverage_table coverage; + +#define COVERAGE_ANY_REG (1<<0) +#define COVERAGE_SP (1<<1) +#define COVERAGE_PC (1<<2) +#define COVERAGE_PCWB (1<<3) + +static const char coverage_register_lookup[16] = { + [REG_TYPE_ANY] = COVERAGE_ANY_REG | COVERAGE_SP | COVERAGE_PC, + [REG_TYPE_SAMEAS16] = COVERAGE_ANY_REG, + [REG_TYPE_SP] = COVERAGE_SP, + [REG_TYPE_PC] = COVERAGE_PC, + [REG_TYPE_NOSP] = COVERAGE_ANY_REG | COVERAGE_SP, + [REG_TYPE_NOSPPC] = COVERAGE_ANY_REG | COVERAGE_SP | COVERAGE_PC, + [REG_TYPE_NOPC] = COVERAGE_ANY_REG | COVERAGE_PC, + [REG_TYPE_NOPCWB] = COVERAGE_ANY_REG | COVERAGE_PC | COVERAGE_PCWB, + [REG_TYPE_NOPCX] = COVERAGE_ANY_REG, + [REG_TYPE_NOSPPCX] = COVERAGE_ANY_REG | COVERAGE_SP, +}; + +unsigned coverage_start_registers(const struct decode_header *h) +{ + unsigned regs = 0; + int i; + for (i = 0; i < 20; i += 4) { + int r = (h->type_regs.bits >> (DECODE_TYPE_BITS + i)) & 0xf; + regs |= coverage_register_lookup[r] << i; + } + return regs; +} + +static int coverage_start_fn(const struct decode_header *h, void *args) +{ + struct coverage_table *coverage = (struct coverage_table *)args; + enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK; + struct coverage_entry *entry = coverage->base + coverage->num_entries; + + if (coverage->num_entries == MAX_COVERAGE_ENTRIES - 1) { + pr_err("FAIL: Out of space for test coverage data"); + return -ENOMEM; + } + + ++coverage->num_entries; + + entry->header = h; + entry->regs = coverage_start_registers(h); + entry->nesting = coverage->nesting; + entry->matched = false; + + if (type == DECODE_TYPE_TABLE) { + struct decode_table *d = (struct decode_table *)h; + int ret; + ++coverage->nesting; + ret = table_iter(d->table.table, coverage_start_fn, coverage); + --coverage->nesting; + return ret; + } + + return 0; +} + +static int coverage_start(const union decode_item *table) +{ + coverage.base = kmalloc(MAX_COVERAGE_ENTRIES * + sizeof(struct coverage_entry), GFP_KERNEL); + coverage.num_entries = 0; + coverage.nesting = 0; + return table_iter(table, coverage_start_fn, &coverage); +} + +static void +coverage_add_registers(struct coverage_entry *entry, kprobe_opcode_t insn) +{ + int regs = entry->header->type_regs.bits >> DECODE_TYPE_BITS; + int i; + for (i = 0; i < 20; i += 4) { + enum decode_reg_type reg_type = (regs >> i) & 0xf; + int reg = (insn >> i) & 0xf; + int flag; + + if (!reg_type) + continue; + + if (reg == 13) + flag = COVERAGE_SP; + else if (reg == 15) + flag = COVERAGE_PC; + else + flag = COVERAGE_ANY_REG; + entry->regs &= ~(flag << i); + + switch (reg_type) { + + case REG_TYPE_NONE: + case REG_TYPE_ANY: + case REG_TYPE_SAMEAS16: + break; + + case REG_TYPE_SP: + if (reg != 13) + return; + break; + + case REG_TYPE_PC: + if (reg != 15) + return; + break; + + case REG_TYPE_NOSP: + if (reg == 13) + return; + break; + + case REG_TYPE_NOSPPC: + case REG_TYPE_NOSPPCX: + if (reg == 13 || reg == 15) + return; + break; + + case REG_TYPE_NOPCWB: + if (!is_writeback(insn)) + break; + if (reg == 15) { + entry->regs &= ~(COVERAGE_PCWB << i); + return; + } + break; + + case REG_TYPE_NOPC: + case REG_TYPE_NOPCX: + if (reg == 15) + return; + break; + } + + } +} + +static void coverage_add(kprobe_opcode_t insn) +{ + struct coverage_entry *entry = coverage.base; + struct coverage_entry *end = coverage.base + coverage.num_entries; + bool matched = false; + unsigned nesting = 0; + + for (; entry < end; ++entry) { + const struct decode_header *h = entry->header; + enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK; + + if (entry->nesting > nesting) + continue; /* Skip sub-table we didn't match */ + + if (entry->nesting < nesting) + break; /* End of sub-table we were scanning */ + + if (!matched) { + if ((insn & h->mask.bits) != h->value.bits) + continue; + entry->matched = true; + } + + switch (type) { + + case DECODE_TYPE_TABLE: + ++nesting; + break; + + case DECODE_TYPE_CUSTOM: + case DECODE_TYPE_SIMULATE: + case DECODE_TYPE_EMULATE: + coverage_add_registers(entry, insn); + return; + + case DECODE_TYPE_OR: + matched = true; + break; + + case DECODE_TYPE_REJECT: + default: + return; + } + + } +} + +static void coverage_end(void) +{ + struct coverage_entry *entry = coverage.base; + struct coverage_entry *end = coverage.base + coverage.num_entries; + + for (; entry < end; ++entry) { + u32 mask = entry->header->mask.bits; + u32 value = entry->header->value.bits; + + if (entry->regs) { + pr_err("FAIL: Register test coverage missing for %08x %08x (%05x)\n", + mask, value, entry->regs); + coverage_fail = true; + } + if (!entry->matched) { + pr_err("FAIL: Test coverage entry missing for %08x %08x\n", + mask, value); + coverage_fail = true; + } + } + + kfree(coverage.base); +} + + +/* + * Framework for instruction set test cases + */ + +void __naked __kprobes_test_case_start(void) +{ + __asm__ __volatile__ ( + "stmdb sp!, {r4-r11} \n\t" + "sub sp, sp, #"__stringify(TEST_MEMORY_SIZE)"\n\t" + "bic r0, lr, #1 @ r0 = inline data \n\t" + "mov r1, sp \n\t" + "bl kprobes_test_case_start \n\t" + "bx r0 \n\t" + ); +} + +#ifndef CONFIG_THUMB2_KERNEL + +void __naked __kprobes_test_case_end_32(void) +{ + __asm__ __volatile__ ( + "mov r4, lr \n\t" + "bl kprobes_test_case_end \n\t" + "cmp r0, #0 \n\t" + "movne pc, r0 \n\t" + "mov r0, r4 \n\t" + "add sp, sp, #"__stringify(TEST_MEMORY_SIZE)"\n\t" + "ldmia sp!, {r4-r11} \n\t" + "mov pc, r0 \n\t" + ); +} + +#else /* CONFIG_THUMB2_KERNEL */ + +void __naked __kprobes_test_case_end_16(void) +{ + __asm__ __volatile__ ( + "mov r4, lr \n\t" + "bl kprobes_test_case_end \n\t" + "cmp r0, #0 \n\t" + "bxne r0 \n\t" + "mov r0, r4 \n\t" + "add sp, sp, #"__stringify(TEST_MEMORY_SIZE)"\n\t" + "ldmia sp!, {r4-r11} \n\t" + "bx r0 \n\t" + ); +} + +void __naked __kprobes_test_case_end_32(void) +{ + __asm__ __volatile__ ( + ".arm \n\t" + "orr lr, lr, #1 @ will return to Thumb code \n\t" + "ldr pc, 1f \n\t" + "1: \n\t" + ".word __kprobes_test_case_end_16 \n\t" + ); +} + +#endif + + +int kprobe_test_flags; +int kprobe_test_cc_position; + +static int test_try_count; +static int test_pass_count; +static int test_fail_count; + +static struct pt_regs initial_regs; +static struct pt_regs expected_regs; +static struct pt_regs result_regs; + +static u32 expected_memory[TEST_MEMORY_SIZE/sizeof(u32)]; + +static const char *current_title; +static struct test_arg *current_args; +static u32 *current_stack; +static uintptr_t current_branch_target; + +static uintptr_t current_code_start; +static kprobe_opcode_t current_instruction; + + +#define TEST_CASE_PASSED -1 +#define TEST_CASE_FAILED -2 + +static int test_case_run_count; +static bool test_case_is_thumb; +static int test_instance; + +/* + * We ignore the state of the imprecise abort disable flag (CPSR.A) because this + * can change randomly as the kernel doesn't take care to preserve or initialise + * this across context switches. Also, with Security Extentions, the flag may + * not be under control of the kernel; for this reason we ignore the state of + * the FIQ disable flag CPSR.F as well. + */ +#define PSR_IGNORE_BITS (PSR_A_BIT | PSR_F_BIT) + +static unsigned long test_check_cc(int cc, unsigned long cpsr) +{ + int ret = arm_check_condition(cc << 28, cpsr); + + return (ret != ARM_OPCODE_CONDTEST_FAIL); +} + +static int is_last_scenario; +static int probe_should_run; /* 0 = no, 1 = yes, -1 = unknown */ +static int memory_needs_checking; + +static unsigned long test_context_cpsr(int scenario) +{ + unsigned long cpsr; + + probe_should_run = 1; + + /* Default case is that we cycle through 16 combinations of flags */ + cpsr = (scenario & 0xf) << 28; /* N,Z,C,V flags */ + cpsr |= (scenario & 0xf) << 16; /* GE flags */ + cpsr |= (scenario & 0x1) << 27; /* Toggle Q flag */ + + if (!test_case_is_thumb) { + /* Testing ARM code */ + int cc = current_instruction >> 28; + + probe_should_run = test_check_cc(cc, cpsr) != 0; + if (scenario == 15) + is_last_scenario = true; + + } else if (kprobe_test_flags & TEST_FLAG_NO_ITBLOCK) { + /* Testing Thumb code without setting ITSTATE */ + if (kprobe_test_cc_position) { + int cc = (current_instruction >> kprobe_test_cc_position) & 0xf; + probe_should_run = test_check_cc(cc, cpsr) != 0; + } + + if (scenario == 15) + is_last_scenario = true; + + } else if (kprobe_test_flags & TEST_FLAG_FULL_ITBLOCK) { + /* Testing Thumb code with all combinations of ITSTATE */ + unsigned x = (scenario >> 4); + unsigned cond_base = x % 7; /* ITSTATE<7:5> */ + unsigned mask = x / 7 + 2; /* ITSTATE<4:0>, bits reversed */ + + if (mask > 0x1f) { + /* Finish by testing state from instruction 'itt al' */ + cond_base = 7; + mask = 0x4; + if ((scenario & 0xf) == 0xf) + is_last_scenario = true; + } + + cpsr |= cond_base << 13; /* ITSTATE<7:5> */ + cpsr |= (mask & 0x1) << 12; /* ITSTATE<4> */ + cpsr |= (mask & 0x2) << 10; /* ITSTATE<3> */ + cpsr |= (mask & 0x4) << 8; /* ITSTATE<2> */ + cpsr |= (mask & 0x8) << 23; /* ITSTATE<1> */ + cpsr |= (mask & 0x10) << 21; /* ITSTATE<0> */ + + probe_should_run = test_check_cc((cpsr >> 12) & 0xf, cpsr) != 0; + + } else { + /* Testing Thumb code with several combinations of ITSTATE */ + switch (scenario) { + case 16: /* Clear NZCV flags and 'it eq' state (false as Z=0) */ + cpsr = 0x00000800; + probe_should_run = 0; + break; + case 17: /* Set NZCV flags and 'it vc' state (false as V=1) */ + cpsr = 0xf0007800; + probe_should_run = 0; + break; + case 18: /* Clear NZCV flags and 'it ls' state (true as C=0) */ + cpsr = 0x00009800; + break; + case 19: /* Set NZCV flags and 'it cs' state (true as C=1) */ + cpsr = 0xf0002800; + is_last_scenario = true; + break; + } + } + + return cpsr; +} + +static void setup_test_context(struct pt_regs *regs) +{ + int scenario = test_case_run_count>>1; + unsigned long val; + struct test_arg *args; + int i; + + is_last_scenario = false; + memory_needs_checking = false; + + /* Initialise test memory on stack */ + val = (scenario & 1) ? VALM : ~VALM; + for (i = 0; i < TEST_MEMORY_SIZE / sizeof(current_stack[0]); ++i) + current_stack[i] = val + (i << 8); + /* Put target of branch on stack for tests which load PC from memory */ + if (current_branch_target) + current_stack[15] = current_branch_target; + /* Put a value for SP on stack for tests which load SP from memory */ + current_stack[13] = (u32)current_stack + 120; + + /* Initialise register values to their default state */ + val = (scenario & 2) ? VALR : ~VALR; + for (i = 0; i < 13; ++i) + regs->uregs[i] = val ^ (i << 8); + regs->ARM_lr = val ^ (14 << 8); + regs->ARM_cpsr &= ~(APSR_MASK | PSR_IT_MASK); + regs->ARM_cpsr |= test_context_cpsr(scenario); + + /* Perform testcase specific register setup */ + args = current_args; + for (; args[0].type != ARG_TYPE_END; ++args) + switch (args[0].type) { + case ARG_TYPE_REG: { + struct test_arg_regptr *arg = + (struct test_arg_regptr *)args; + regs->uregs[arg->reg] = arg->val; + break; + } + case ARG_TYPE_PTR: { + struct test_arg_regptr *arg = + (struct test_arg_regptr *)args; + regs->uregs[arg->reg] = + (unsigned long)current_stack + arg->val; + memory_needs_checking = true; + break; + } + case ARG_TYPE_MEM: { + struct test_arg_mem *arg = (struct test_arg_mem *)args; + current_stack[arg->index] = arg->val; + break; + } + default: + break; + } +} + +struct test_probe { + struct kprobe kprobe; + bool registered; + int hit; +}; + +static void unregister_test_probe(struct test_probe *probe) +{ + if (probe->registered) { + unregister_kprobe(&probe->kprobe); + probe->kprobe.flags = 0; /* Clear disable flag to allow reuse */ + } + probe->registered = false; +} + +static int register_test_probe(struct test_probe *probe) +{ + int ret; + + if (probe->registered) + BUG(); + + ret = register_kprobe(&probe->kprobe); + if (ret >= 0) { + probe->registered = true; + probe->hit = -1; + } + return ret; +} + +static int __kprobes +test_before_pre_handler(struct kprobe *p, struct pt_regs *regs) +{ + container_of(p, struct test_probe, kprobe)->hit = test_instance; + return 0; +} + +static void __kprobes +test_before_post_handler(struct kprobe *p, struct pt_regs *regs, + unsigned long flags) +{ + setup_test_context(regs); + initial_regs = *regs; + initial_regs.ARM_cpsr &= ~PSR_IGNORE_BITS; +} + +static int __kprobes +test_case_pre_handler(struct kprobe *p, struct pt_regs *regs) +{ + container_of(p, struct test_probe, kprobe)->hit = test_instance; + return 0; +} + +static int __kprobes +test_after_pre_handler(struct kprobe *p, struct pt_regs *regs) +{ + if (container_of(p, struct test_probe, kprobe)->hit == test_instance) + return 0; /* Already run for this test instance */ + + result_regs = *regs; + result_regs.ARM_cpsr &= ~PSR_IGNORE_BITS; + + /* Undo any changes done to SP by the test case */ + regs->ARM_sp = (unsigned long)current_stack; + + container_of(p, struct test_probe, kprobe)->hit = test_instance; + return 0; +} + +static struct test_probe test_before_probe = { + .kprobe.pre_handler = test_before_pre_handler, + .kprobe.post_handler = test_before_post_handler, +}; + +static struct test_probe test_case_probe = { + .kprobe.pre_handler = test_case_pre_handler, +}; + +static struct test_probe test_after_probe = { + .kprobe.pre_handler = test_after_pre_handler, +}; + +static struct test_probe test_after2_probe = { + .kprobe.pre_handler = test_after_pre_handler, +}; + +static void test_case_cleanup(void) +{ + unregister_test_probe(&test_before_probe); + unregister_test_probe(&test_case_probe); + unregister_test_probe(&test_after_probe); + unregister_test_probe(&test_after2_probe); +} + +static void print_registers(struct pt_regs *regs) +{ + pr_err("r0 %08lx | r1 %08lx | r2 %08lx | r3 %08lx\n", + regs->ARM_r0, regs->ARM_r1, regs->ARM_r2, regs->ARM_r3); + pr_err("r4 %08lx | r5 %08lx | r6 %08lx | r7 %08lx\n", + regs->ARM_r4, regs->ARM_r5, regs->ARM_r6, regs->ARM_r7); + pr_err("r8 %08lx | r9 %08lx | r10 %08lx | r11 %08lx\n", + regs->ARM_r8, regs->ARM_r9, regs->ARM_r10, regs->ARM_fp); + pr_err("r12 %08lx | sp %08lx | lr %08lx | pc %08lx\n", + regs->ARM_ip, regs->ARM_sp, regs->ARM_lr, regs->ARM_pc); + pr_err("cpsr %08lx\n", regs->ARM_cpsr); +} + +static void print_memory(u32 *mem, size_t size) +{ + int i; + for (i = 0; i < size / sizeof(u32); i += 4) + pr_err("%08x %08x %08x %08x\n", mem[i], mem[i+1], + mem[i+2], mem[i+3]); +} + +static size_t expected_memory_size(u32 *sp) +{ + size_t size = sizeof(expected_memory); + int offset = (uintptr_t)sp - (uintptr_t)current_stack; + if (offset > 0) + size -= offset; + return size; +} + +static void test_case_failed(const char *message) +{ + test_case_cleanup(); + + pr_err("FAIL: %s\n", message); + pr_err("FAIL: Test %s\n", current_title); + pr_err("FAIL: Scenario %d\n", test_case_run_count >> 1); +} + +static unsigned long next_instruction(unsigned long pc) +{ +#ifdef CONFIG_THUMB2_KERNEL + if ((pc & 1) && + !is_wide_instruction(__mem_to_opcode_thumb16(*(u16 *)(pc - 1)))) + return pc + 2; + else +#endif + return pc + 4; +} + +static uintptr_t __used kprobes_test_case_start(const char **title, void *stack) +{ + struct test_arg *args; + struct test_arg_end *end_arg; + unsigned long test_code; + + current_title = *title++; + args = (struct test_arg *)title; + current_args = args; + current_stack = stack; + + ++test_try_count; + + while (args->type != ARG_TYPE_END) + ++args; + end_arg = (struct test_arg_end *)args; + + test_code = (unsigned long)(args + 1); /* Code starts after args */ + + test_case_is_thumb = end_arg->flags & ARG_FLAG_THUMB; + if (test_case_is_thumb) + test_code |= 1; + + current_code_start = test_code; + + current_branch_target = 0; + if (end_arg->branch_offset != end_arg->end_offset) + current_branch_target = test_code + end_arg->branch_offset; + + test_code += end_arg->code_offset; + test_before_probe.kprobe.addr = (kprobe_opcode_t *)test_code; + + test_code = next_instruction(test_code); + test_case_probe.kprobe.addr = (kprobe_opcode_t *)test_code; + + if (test_case_is_thumb) { + u16 *p = (u16 *)(test_code & ~1); + current_instruction = __mem_to_opcode_thumb16(p[0]); + if (is_wide_instruction(current_instruction)) { + u16 instr2 = __mem_to_opcode_thumb16(p[1]); + current_instruction = __opcode_thumb32_compose(current_instruction, instr2); + } + } else { + current_instruction = __mem_to_opcode_arm(*(u32 *)test_code); + } + + if (current_title[0] == '.') + verbose("%s\n", current_title); + else + verbose("%s\t@ %0*x\n", current_title, + test_case_is_thumb ? 4 : 8, + current_instruction); + + test_code = next_instruction(test_code); + test_after_probe.kprobe.addr = (kprobe_opcode_t *)test_code; + + if (kprobe_test_flags & TEST_FLAG_NARROW_INSTR) { + if (!test_case_is_thumb || + is_wide_instruction(current_instruction)) { + test_case_failed("expected 16-bit instruction"); + goto fail; + } + } else { + if (test_case_is_thumb && + !is_wide_instruction(current_instruction)) { + test_case_failed("expected 32-bit instruction"); + goto fail; + } + } + + coverage_add(current_instruction); + + if (end_arg->flags & ARG_FLAG_UNSUPPORTED) { + if (register_test_probe(&test_case_probe) < 0) + goto pass; + test_case_failed("registered probe for unsupported instruction"); + goto fail; + } + + if (end_arg->flags & ARG_FLAG_SUPPORTED) { + if (register_test_probe(&test_case_probe) >= 0) + goto pass; + test_case_failed("couldn't register probe for supported instruction"); + goto fail; + } + + if (register_test_probe(&test_before_probe) < 0) { + test_case_failed("register test_before_probe failed"); + goto fail; + } + if (register_test_probe(&test_after_probe) < 0) { + test_case_failed("register test_after_probe failed"); + goto fail; + } + if (current_branch_target) { + test_after2_probe.kprobe.addr = + (kprobe_opcode_t *)current_branch_target; + if (register_test_probe(&test_after2_probe) < 0) { + test_case_failed("register test_after2_probe failed"); + goto fail; + } + } + + /* Start first run of test case */ + test_case_run_count = 0; + ++test_instance; + return current_code_start; +pass: + test_case_run_count = TEST_CASE_PASSED; + return (uintptr_t)test_after_probe.kprobe.addr; +fail: + test_case_run_count = TEST_CASE_FAILED; + return (uintptr_t)test_after_probe.kprobe.addr; +} + +static bool check_test_results(void) +{ + size_t mem_size = 0; + u32 *mem = 0; + + if (memcmp(&expected_regs, &result_regs, sizeof(expected_regs))) { + test_case_failed("registers differ"); + goto fail; + } + + if (memory_needs_checking) { + mem = (u32 *)result_regs.ARM_sp; + mem_size = expected_memory_size(mem); + if (memcmp(expected_memory, mem, mem_size)) { + test_case_failed("test memory differs"); + goto fail; + } + } + + return true; + +fail: + pr_err("initial_regs:\n"); + print_registers(&initial_regs); + pr_err("expected_regs:\n"); + print_registers(&expected_regs); + pr_err("result_regs:\n"); + print_registers(&result_regs); + + if (mem) { + pr_err("current_stack=%p\n", current_stack); + pr_err("expected_memory:\n"); + print_memory(expected_memory, mem_size); + pr_err("result_memory:\n"); + print_memory(mem, mem_size); + } + + return false; +} + +static uintptr_t __used kprobes_test_case_end(void) +{ + if (test_case_run_count < 0) { + if (test_case_run_count == TEST_CASE_PASSED) + /* kprobes_test_case_start did all the needed testing */ + goto pass; + else + /* kprobes_test_case_start failed */ + goto fail; + } + + if (test_before_probe.hit != test_instance) { + test_case_failed("test_before_handler not run"); + goto fail; + } + + if (test_after_probe.hit != test_instance && + test_after2_probe.hit != test_instance) { + test_case_failed("test_after_handler not run"); + goto fail; + } + + /* + * Even numbered test runs ran without a probe on the test case so + * we can gather reference results. The subsequent odd numbered run + * will have the probe inserted. + */ + if ((test_case_run_count & 1) == 0) { + /* Save results from run without probe */ + u32 *mem = (u32 *)result_regs.ARM_sp; + expected_regs = result_regs; + memcpy(expected_memory, mem, expected_memory_size(mem)); + + /* Insert probe onto test case instruction */ + if (register_test_probe(&test_case_probe) < 0) { + test_case_failed("register test_case_probe failed"); + goto fail; + } + } else { + /* Check probe ran as expected */ + if (probe_should_run == 1) { + if (test_case_probe.hit != test_instance) { + test_case_failed("test_case_handler not run"); + goto fail; + } + } else if (probe_should_run == 0) { + if (test_case_probe.hit == test_instance) { + test_case_failed("test_case_handler ran"); + goto fail; + } + } + + /* Remove probe for any subsequent reference run */ + unregister_test_probe(&test_case_probe); + + if (!check_test_results()) + goto fail; + + if (is_last_scenario) + goto pass; + } + + /* Do next test run */ + ++test_case_run_count; + ++test_instance; + return current_code_start; +fail: + ++test_fail_count; + goto end; +pass: + ++test_pass_count; +end: + test_case_cleanup(); + return 0; +} + + +/* + * Top level test functions + */ + +static int run_test_cases(void (*tests)(void), const union decode_item *table) +{ + int ret; + + pr_info(" Check decoding tables\n"); + ret = table_test(table); + if (ret) + return ret; + + pr_info(" Run test cases\n"); + ret = coverage_start(table); + if (ret) + return ret; + + tests(); + + coverage_end(); + return 0; +} + + +static int __init run_all_tests(void) +{ + int ret = 0; + + pr_info("Beginning kprobe tests...\n"); + +#ifndef CONFIG_THUMB2_KERNEL + + pr_info("Probe ARM code\n"); + ret = run_api_tests(arm_func); + if (ret) + goto out; + + pr_info("ARM instruction simulation\n"); + ret = run_test_cases(kprobe_arm_test_cases, probes_decode_arm_table); + if (ret) + goto out; + +#else /* CONFIG_THUMB2_KERNEL */ + + pr_info("Probe 16-bit Thumb code\n"); + ret = run_api_tests(thumb16_func); + if (ret) + goto out; + + pr_info("Probe 32-bit Thumb code, even halfword\n"); + ret = run_api_tests(thumb32even_func); + if (ret) + goto out; + + pr_info("Probe 32-bit Thumb code, odd halfword\n"); + ret = run_api_tests(thumb32odd_func); + if (ret) + goto out; + + pr_info("16-bit Thumb instruction simulation\n"); + ret = run_test_cases(kprobe_thumb16_test_cases, + probes_decode_thumb16_table); + if (ret) + goto out; + + pr_info("32-bit Thumb instruction simulation\n"); + ret = run_test_cases(kprobe_thumb32_test_cases, + probes_decode_thumb32_table); + if (ret) + goto out; +#endif + + pr_info("Total instruction simulation tests=%d, pass=%d fail=%d\n", + test_try_count, test_pass_count, test_fail_count); + if (test_fail_count) { + ret = -EINVAL; + goto out; + } + +#if BENCHMARKING + pr_info("Benchmarks\n"); + ret = run_benchmarks(); + if (ret) + goto out; +#endif + +#if __LINUX_ARM_ARCH__ >= 7 + /* We are able to run all test cases so coverage should be complete */ + if (coverage_fail) { + pr_err("FAIL: Test coverage checks failed\n"); + ret = -EINVAL; + goto out; + } +#endif + +out: + if (ret == 0) + ret = tests_failed; + if (ret == 0) + pr_info("Finished kprobe tests OK\n"); + else + pr_err("kprobe tests failed\n"); + + return ret; +} + + +/* + * Module setup + */ + +#ifdef MODULE + +static void __exit kprobe_test_exit(void) +{ +} + +module_init(run_all_tests) +module_exit(kprobe_test_exit) +MODULE_LICENSE("GPL"); + +#else /* !MODULE */ + +late_initcall(run_all_tests); + +#endif diff --git a/arch/arm/probes/kprobes/test-core.h b/arch/arm/probes/kprobes/test-core.h new file mode 100644 index 000000000000..9991754947bc --- /dev/null +++ b/arch/arm/probes/kprobes/test-core.h @@ -0,0 +1,435 @@ +/* + * arch/arm/probes/kprobes/test-core.h + * + * Copyright (C) 2011 Jon Medhurst . + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#define VERBOSE 0 /* Set to '1' for more logging of test cases */ + +#ifdef CONFIG_THUMB2_KERNEL +#define NORMAL_ISA "16" +#else +#define NORMAL_ISA "32" +#endif + + +/* Flags used in kprobe_test_flags */ +#define TEST_FLAG_NO_ITBLOCK (1<<0) +#define TEST_FLAG_FULL_ITBLOCK (1<<1) +#define TEST_FLAG_NARROW_INSTR (1<<2) + +extern int kprobe_test_flags; +extern int kprobe_test_cc_position; + + +#define TEST_MEMORY_SIZE 256 + + +/* + * Test case structures. + * + * The arguments given to test cases can be one of three types. + * + * ARG_TYPE_REG + * Load a register with the given value. + * + * ARG_TYPE_PTR + * Load a register with a pointer into the stack buffer (SP + given value). + * + * ARG_TYPE_MEM + * Store the given value into the stack buffer at [SP+index]. + * + */ + +#define ARG_TYPE_END 0 +#define ARG_TYPE_REG 1 +#define ARG_TYPE_PTR 2 +#define ARG_TYPE_MEM 3 + +#define ARG_FLAG_UNSUPPORTED 0x01 +#define ARG_FLAG_SUPPORTED 0x02 +#define ARG_FLAG_THUMB 0x10 /* Must be 16 so TEST_ISA can be used */ +#define ARG_FLAG_ARM 0x20 /* Must be 32 so TEST_ISA can be used */ + +struct test_arg { + u8 type; /* ARG_TYPE_x */ + u8 _padding[7]; +}; + +struct test_arg_regptr { + u8 type; /* ARG_TYPE_REG or ARG_TYPE_PTR */ + u8 reg; + u8 _padding[2]; + u32 val; +}; + +struct test_arg_mem { + u8 type; /* ARG_TYPE_MEM */ + u8 index; + u8 _padding[2]; + u32 val; +}; + +struct test_arg_end { + u8 type; /* ARG_TYPE_END */ + u8 flags; /* ARG_FLAG_x */ + u16 code_offset; + u16 branch_offset; + u16 end_offset; +}; + + +/* + * Building blocks for test cases. + * + * Each test case is wrapped between TESTCASE_START and TESTCASE_END. + * + * To specify arguments for a test case the TEST_ARG_{REG,PTR,MEM} macros are + * used followed by a terminating TEST_ARG_END. + * + * After this, the instruction to be tested is defined with TEST_INSTRUCTION. + * Or for branches, TEST_BRANCH_B and TEST_BRANCH_F (branch forwards/backwards). + * + * Some specific test cases may make use of other custom constructs. + */ + +#if VERBOSE +#define verbose(fmt, ...) pr_info(fmt, ##__VA_ARGS__) +#else +#define verbose(fmt, ...) +#endif + +#define TEST_GROUP(title) \ + verbose("\n"); \ + verbose(title"\n"); \ + verbose("---------------------------------------------------------\n"); + +#define TESTCASE_START(title) \ + __asm__ __volatile__ ( \ + "bl __kprobes_test_case_start \n\t" \ + ".pushsection .rodata \n\t" \ + "10: \n\t" \ + /* don't use .asciz here as 'title' may be */ \ + /* multiple strings to be concatenated. */ \ + ".ascii "#title" \n\t" \ + ".byte 0 \n\t" \ + ".popsection \n\t" \ + ".word 10b \n\t" + +#define TEST_ARG_REG(reg, val) \ + ".byte "__stringify(ARG_TYPE_REG)" \n\t" \ + ".byte "#reg" \n\t" \ + ".short 0 \n\t" \ + ".word "#val" \n\t" + +#define TEST_ARG_PTR(reg, val) \ + ".byte "__stringify(ARG_TYPE_PTR)" \n\t" \ + ".byte "#reg" \n\t" \ + ".short 0 \n\t" \ + ".word "#val" \n\t" + +#define TEST_ARG_MEM(index, val) \ + ".byte "__stringify(ARG_TYPE_MEM)" \n\t" \ + ".byte "#index" \n\t" \ + ".short 0 \n\t" \ + ".word "#val" \n\t" + +#define TEST_ARG_END(flags) \ + ".byte "__stringify(ARG_TYPE_END)" \n\t" \ + ".byte "TEST_ISA flags" \n\t" \ + ".short 50f-0f \n\t" \ + ".short 2f-0f \n\t" \ + ".short 99f-0f \n\t" \ + ".code "TEST_ISA" \n\t" \ + "0: \n\t" + +#define TEST_INSTRUCTION(instruction) \ + "50: nop \n\t" \ + "1: "instruction" \n\t" \ + " nop \n\t" + +#define TEST_BRANCH_F(instruction) \ + TEST_INSTRUCTION(instruction) \ + " b 99f \n\t" \ + "2: nop \n\t" + +#define TEST_BRANCH_B(instruction) \ + " b 50f \n\t" \ + " b 99f \n\t" \ + "2: nop \n\t" \ + " b 99f \n\t" \ + TEST_INSTRUCTION(instruction) + +#define TEST_BRANCH_FX(instruction, codex) \ + TEST_INSTRUCTION(instruction) \ + " b 99f \n\t" \ + codex" \n\t" \ + " b 99f \n\t" \ + "2: nop \n\t" + +#define TEST_BRANCH_BX(instruction, codex) \ + " b 50f \n\t" \ + " b 99f \n\t" \ + "2: nop \n\t" \ + " b 99f \n\t" \ + codex" \n\t" \ + TEST_INSTRUCTION(instruction) + +#define TESTCASE_END \ + "2: \n\t" \ + "99: \n\t" \ + " bl __kprobes_test_case_end_"TEST_ISA" \n\t" \ + ".code "NORMAL_ISA" \n\t" \ + : : \ + : "r0", "r1", "r2", "r3", "ip", "lr", "memory", "cc" \ + ); + + +/* + * Macros to define test cases. + * + * Those of the form TEST_{R,P,M}* can be used to define test cases + * which take combinations of the three basic types of arguments. E.g. + * + * TEST_R One register argument + * TEST_RR Two register arguments + * TEST_RPR A register, a pointer, then a register argument + * + * For testing instructions which may branch, there are macros TEST_BF_* + * and TEST_BB_* for branching forwards and backwards. + * + * TEST_SUPPORTED and TEST_UNSUPPORTED don't cause the code to be executed, + * the just verify that a kprobe is or is not allowed on the given instruction. + */ + +#define TEST(code) \ + TESTCASE_START(code) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code) \ + TESTCASE_END + +#define TEST_UNSUPPORTED(code) \ + TESTCASE_START(code) \ + TEST_ARG_END("|"__stringify(ARG_FLAG_UNSUPPORTED)) \ + TEST_INSTRUCTION(code) \ + TESTCASE_END + +#define TEST_SUPPORTED(code) \ + TESTCASE_START(code) \ + TEST_ARG_END("|"__stringify(ARG_FLAG_SUPPORTED)) \ + TEST_INSTRUCTION(code) \ + TESTCASE_END + +#define TEST_R(code1, reg, val, code2) \ + TESTCASE_START(code1 #reg code2) \ + TEST_ARG_REG(reg, val) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 #reg code2) \ + TESTCASE_END + +#define TEST_RR(code1, reg1, val1, code2, reg2, val2, code3) \ + TESTCASE_START(code1 #reg1 code2 #reg2 code3) \ + TEST_ARG_REG(reg1, val1) \ + TEST_ARG_REG(reg2, val2) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3) \ + TESTCASE_END + +#define TEST_RRR(code1, reg1, val1, code2, reg2, val2, code3, reg3, val3, code4)\ + TESTCASE_START(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ + TEST_ARG_REG(reg1, val1) \ + TEST_ARG_REG(reg2, val2) \ + TEST_ARG_REG(reg3, val3) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ + TESTCASE_END + +#define TEST_RRRR(code1, reg1, val1, code2, reg2, val2, code3, reg3, val3, code4, reg4, val4) \ + TESTCASE_START(code1 #reg1 code2 #reg2 code3 #reg3 code4 #reg4) \ + TEST_ARG_REG(reg1, val1) \ + TEST_ARG_REG(reg2, val2) \ + TEST_ARG_REG(reg3, val3) \ + TEST_ARG_REG(reg4, val4) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3 #reg3 code4 #reg4) \ + TESTCASE_END + +#define TEST_P(code1, reg1, val1, code2) \ + TESTCASE_START(code1 #reg1 code2) \ + TEST_ARG_PTR(reg1, val1) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 #reg1 code2) \ + TESTCASE_END + +#define TEST_PR(code1, reg1, val1, code2, reg2, val2, code3) \ + TESTCASE_START(code1 #reg1 code2 #reg2 code3) \ + TEST_ARG_PTR(reg1, val1) \ + TEST_ARG_REG(reg2, val2) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3) \ + TESTCASE_END + +#define TEST_RP(code1, reg1, val1, code2, reg2, val2, code3) \ + TESTCASE_START(code1 #reg1 code2 #reg2 code3) \ + TEST_ARG_REG(reg1, val1) \ + TEST_ARG_PTR(reg2, val2) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3) \ + TESTCASE_END + +#define TEST_PRR(code1, reg1, val1, code2, reg2, val2, code3, reg3, val3, code4)\ + TESTCASE_START(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ + TEST_ARG_PTR(reg1, val1) \ + TEST_ARG_REG(reg2, val2) \ + TEST_ARG_REG(reg3, val3) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ + TESTCASE_END + +#define TEST_RPR(code1, reg1, val1, code2, reg2, val2, code3, reg3, val3, code4)\ + TESTCASE_START(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ + TEST_ARG_REG(reg1, val1) \ + TEST_ARG_PTR(reg2, val2) \ + TEST_ARG_REG(reg3, val3) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ + TESTCASE_END + +#define TEST_RRP(code1, reg1, val1, code2, reg2, val2, code3, reg3, val3, code4)\ + TESTCASE_START(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ + TEST_ARG_REG(reg1, val1) \ + TEST_ARG_REG(reg2, val2) \ + TEST_ARG_PTR(reg3, val3) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ + TESTCASE_END + +#define TEST_BF_P(code1, reg1, val1, code2) \ + TESTCASE_START(code1 #reg1 code2) \ + TEST_ARG_PTR(reg1, val1) \ + TEST_ARG_END("") \ + TEST_BRANCH_F(code1 #reg1 code2) \ + TESTCASE_END + +#define TEST_BF(code) \ + TESTCASE_START(code) \ + TEST_ARG_END("") \ + TEST_BRANCH_F(code) \ + TESTCASE_END + +#define TEST_BB(code) \ + TESTCASE_START(code) \ + TEST_ARG_END("") \ + TEST_BRANCH_B(code) \ + TESTCASE_END + +#define TEST_BF_R(code1, reg, val, code2) \ + TESTCASE_START(code1 #reg code2) \ + TEST_ARG_REG(reg, val) \ + TEST_ARG_END("") \ + TEST_BRANCH_F(code1 #reg code2) \ + TESTCASE_END + +#define TEST_BB_R(code1, reg, val, code2) \ + TESTCASE_START(code1 #reg code2) \ + TEST_ARG_REG(reg, val) \ + TEST_ARG_END("") \ + TEST_BRANCH_B(code1 #reg code2) \ + TESTCASE_END + +#define TEST_BF_RR(code1, reg1, val1, code2, reg2, val2, code3) \ + TESTCASE_START(code1 #reg1 code2 #reg2 code3) \ + TEST_ARG_REG(reg1, val1) \ + TEST_ARG_REG(reg2, val2) \ + TEST_ARG_END("") \ + TEST_BRANCH_F(code1 #reg1 code2 #reg2 code3) \ + TESTCASE_END + +#define TEST_BF_X(code, codex) \ + TESTCASE_START(code) \ + TEST_ARG_END("") \ + TEST_BRANCH_FX(code, codex) \ + TESTCASE_END + +#define TEST_BB_X(code, codex) \ + TESTCASE_START(code) \ + TEST_ARG_END("") \ + TEST_BRANCH_BX(code, codex) \ + TESTCASE_END + +#define TEST_BF_RX(code1, reg, val, code2, codex) \ + TESTCASE_START(code1 #reg code2) \ + TEST_ARG_REG(reg, val) \ + TEST_ARG_END("") \ + TEST_BRANCH_FX(code1 #reg code2, codex) \ + TESTCASE_END + +#define TEST_X(code, codex) \ + TESTCASE_START(code) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code) \ + " b 99f \n\t" \ + " "codex" \n\t" \ + TESTCASE_END + +#define TEST_RX(code1, reg, val, code2, codex) \ + TESTCASE_START(code1 #reg code2) \ + TEST_ARG_REG(reg, val) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 __stringify(reg) code2) \ + " b 99f \n\t" \ + " "codex" \n\t" \ + TESTCASE_END + +#define TEST_RRX(code1, reg1, val1, code2, reg2, val2, code3, codex) \ + TESTCASE_START(code1 #reg1 code2 #reg2 code3) \ + TEST_ARG_REG(reg1, val1) \ + TEST_ARG_REG(reg2, val2) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 __stringify(reg1) code2 __stringify(reg2) code3) \ + " b 99f \n\t" \ + " "codex" \n\t" \ + TESTCASE_END + + +/* + * Macros for defining space directives spread over multiple lines. + * These are required so the compiler guesses better the length of inline asm + * code and will spill the literal pool early enough to avoid generating PC + * relative loads with out of range offsets. + */ +#define TWICE(x) x x +#define SPACE_0x8 TWICE(".space 4\n\t") +#define SPACE_0x10 TWICE(SPACE_0x8) +#define SPACE_0x20 TWICE(SPACE_0x10) +#define SPACE_0x40 TWICE(SPACE_0x20) +#define SPACE_0x80 TWICE(SPACE_0x40) +#define SPACE_0x100 TWICE(SPACE_0x80) +#define SPACE_0x200 TWICE(SPACE_0x100) +#define SPACE_0x400 TWICE(SPACE_0x200) +#define SPACE_0x800 TWICE(SPACE_0x400) +#define SPACE_0x1000 TWICE(SPACE_0x800) + + +/* Various values used in test cases... */ +#define N(val) (val ^ 0xffffffff) +#define VAL1 0x12345678 +#define VAL2 N(VAL1) +#define VAL3 0xa5f801 +#define VAL4 N(VAL3) +#define VALM 0x456789ab +#define VALR 0xdeaddead +#define HH1 0x0123fecb +#define HH2 0xa9874567 + + +#ifdef CONFIG_THUMB2_KERNEL +void kprobe_thumb16_test_cases(void); +void kprobe_thumb32_test_cases(void); +#else +void kprobe_arm_test_cases(void); +#endif diff --git a/arch/arm/probes/kprobes/test-thumb.c b/arch/arm/probes/kprobes/test-thumb.c new file mode 100644 index 000000000000..6c6e9a9bb675 --- /dev/null +++ b/arch/arm/probes/kprobes/test-thumb.c @@ -0,0 +1,1188 @@ +/* + * arch/arm/probes/kprobes/test-thumb.c + * + * Copyright (C) 2011 Jon Medhurst . + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include +#include +#include + +#include "test-core.h" + + +#define TEST_ISA "16" + +#define DONT_TEST_IN_ITBLOCK(tests) \ + kprobe_test_flags |= TEST_FLAG_NO_ITBLOCK; \ + tests \ + kprobe_test_flags &= ~TEST_FLAG_NO_ITBLOCK; + +#define CONDITION_INSTRUCTIONS(cc_pos, tests) \ + kprobe_test_cc_position = cc_pos; \ + DONT_TEST_IN_ITBLOCK(tests) \ + kprobe_test_cc_position = 0; + +#define TEST_ITBLOCK(code) \ + kprobe_test_flags |= TEST_FLAG_FULL_ITBLOCK; \ + TESTCASE_START(code) \ + TEST_ARG_END("") \ + "50: nop \n\t" \ + "1: "code" \n\t" \ + " mov r1, #0x11 \n\t" \ + " mov r2, #0x22 \n\t" \ + " mov r3, #0x33 \n\t" \ + "2: nop \n\t" \ + TESTCASE_END \ + kprobe_test_flags &= ~TEST_FLAG_FULL_ITBLOCK; + +#define TEST_THUMB_TO_ARM_INTERWORK_P(code1, reg, val, code2) \ + TESTCASE_START(code1 #reg code2) \ + TEST_ARG_PTR(reg, val) \ + TEST_ARG_REG(14, 99f+1) \ + TEST_ARG_MEM(15, 3f) \ + TEST_ARG_END("") \ + " nop \n\t" /* To align 1f */ \ + "50: nop \n\t" \ + "1: "code1 #reg code2" \n\t" \ + " bx lr \n\t" \ + ".arm \n\t" \ + "3: adr lr, 2f+1 \n\t" \ + " bx lr \n\t" \ + ".thumb \n\t" \ + "2: nop \n\t" \ + TESTCASE_END + + +void kprobe_thumb16_test_cases(void) +{ + kprobe_test_flags = TEST_FLAG_NARROW_INSTR; + + TEST_GROUP("Shift (immediate), add, subtract, move, and compare") + + TEST_R( "lsls r7, r",0,VAL1,", #5") + TEST_R( "lsls r0, r",7,VAL2,", #11") + TEST_R( "lsrs r7, r",0,VAL1,", #5") + TEST_R( "lsrs r0, r",7,VAL2,", #11") + TEST_R( "asrs r7, r",0,VAL1,", #5") + TEST_R( "asrs r0, r",7,VAL2,", #11") + TEST_RR( "adds r2, r",0,VAL1,", r",7,VAL2,"") + TEST_RR( "adds r5, r",7,VAL2,", r",0,VAL2,"") + TEST_RR( "subs r2, r",0,VAL1,", r",7,VAL2,"") + TEST_RR( "subs r5, r",7,VAL2,", r",0,VAL2,"") + TEST_R( "adds r7, r",0,VAL1,", #5") + TEST_R( "adds r0, r",7,VAL2,", #2") + TEST_R( "subs r7, r",0,VAL1,", #5") + TEST_R( "subs r0, r",7,VAL2,", #2") + TEST( "movs.n r0, #0x5f") + TEST( "movs.n r7, #0xa0") + TEST_R( "cmp.n r",0,0x5e, ", #0x5f") + TEST_R( "cmp.n r",5,0x15f,", #0x5f") + TEST_R( "cmp.n r",7,0xa0, ", #0xa0") + TEST_R( "adds.n r",0,VAL1,", #0x5f") + TEST_R( "adds.n r",7,VAL2,", #0xa0") + TEST_R( "subs.n r",0,VAL1,", #0x5f") + TEST_R( "subs.n r",7,VAL2,", #0xa0") + + TEST_GROUP("16-bit Thumb data-processing instructions") + +#define DATA_PROCESSING16(op,val) \ + TEST_RR( op" r",0,VAL1,", r",7,val,"") \ + TEST_RR( op" r",7,VAL2,", r",0,val,"") + + DATA_PROCESSING16("ands",0xf00f00ff) + DATA_PROCESSING16("eors",0xf00f00ff) + DATA_PROCESSING16("lsls",11) + DATA_PROCESSING16("lsrs",11) + DATA_PROCESSING16("asrs",11) + DATA_PROCESSING16("adcs",VAL2) + DATA_PROCESSING16("sbcs",VAL2) + DATA_PROCESSING16("rors",11) + DATA_PROCESSING16("tst",0xf00f00ff) + TEST_R("rsbs r",0,VAL1,", #0") + TEST_R("rsbs r",7,VAL2,", #0") + DATA_PROCESSING16("cmp",0xf00f00ff) + DATA_PROCESSING16("cmn",0xf00f00ff) + DATA_PROCESSING16("orrs",0xf00f00ff) + DATA_PROCESSING16("muls",VAL2) + DATA_PROCESSING16("bics",0xf00f00ff) + DATA_PROCESSING16("mvns",VAL2) + + TEST_GROUP("Special data instructions and branch and exchange") + + TEST_RR( "add r",0, VAL1,", r",7,VAL2,"") + TEST_RR( "add r",3, VAL2,", r",8,VAL3,"") + TEST_RR( "add r",8, VAL3,", r",0,VAL1,"") + TEST_R( "add sp" ", r",8,-8, "") + TEST_R( "add r",14,VAL1,", pc") + TEST_BF_R("add pc" ", r",0,2f-1f-8,"") + TEST_UNSUPPORTED(__inst_thumb16(0x44ff) " @ add pc, pc") + + TEST_RR( "cmp r",3,VAL1,", r",8,VAL2,"") + TEST_RR( "cmp r",8,VAL2,", r",0,VAL1,"") + TEST_R( "cmp sp" ", r",8,-8, "") + + TEST_R( "mov r0, r",7,VAL2,"") + TEST_R( "mov r3, r",8,VAL3,"") + TEST_R( "mov r8, r",0,VAL1,"") + TEST_P( "mov sp, r",8,-8, "") + TEST( "mov lr, pc") + TEST_BF_R("mov pc, r",0,2f, "") + + TEST_BF_R("bx r",0, 2f+1,"") + TEST_BF_R("bx r",14,2f+1,"") + TESTCASE_START("bx pc") + TEST_ARG_REG(14, 99f+1) + TEST_ARG_END("") + " nop \n\t" /* To align the bx pc*/ + "50: nop \n\t" + "1: bx pc \n\t" + " bx lr \n\t" + ".arm \n\t" + " adr lr, 2f+1 \n\t" + " bx lr \n\t" + ".thumb \n\t" + "2: nop \n\t" + TESTCASE_END + + TEST_BF_R("blx r",0, 2f+1,"") + TEST_BB_R("blx r",14,2f+1,"") + TEST_UNSUPPORTED(__inst_thumb16(0x47f8) " @ blx pc") + + TEST_GROUP("Load from Literal Pool") + + TEST_X( "ldr r0, 3f", + ".align \n\t" + "3: .word "__stringify(VAL1)) + TEST_X( "ldr r7, 3f", + ".space 128 \n\t" + ".align \n\t" + "3: .word "__stringify(VAL2)) + + TEST_GROUP("16-bit Thumb Load/store instructions") + + TEST_RPR("str r",0, VAL1,", [r",1, 24,", r",2, 48,"]") + TEST_RPR("str r",7, VAL2,", [r",6, 24,", r",5, 48,"]") + TEST_RPR("strh r",0, VAL1,", [r",1, 24,", r",2, 48,"]") + TEST_RPR("strh r",7, VAL2,", [r",6, 24,", r",5, 48,"]") + TEST_RPR("strb r",0, VAL1,", [r",1, 24,", r",2, 48,"]") + TEST_RPR("strb r",7, VAL2,", [r",6, 24,", r",5, 48,"]") + TEST_PR( "ldrsb r0, [r",1, 24,", r",2, 48,"]") + TEST_PR( "ldrsb r7, [r",6, 24,", r",5, 50,"]") + TEST_PR( "ldr r0, [r",1, 24,", r",2, 48,"]") + TEST_PR( "ldr r7, [r",6, 24,", r",5, 48,"]") + TEST_PR( "ldrh r0, [r",1, 24,", r",2, 48,"]") + TEST_PR( "ldrh r7, [r",6, 24,", r",5, 50,"]") + TEST_PR( "ldrb r0, [r",1, 24,", r",2, 48,"]") + TEST_PR( "ldrb r7, [r",6, 24,", r",5, 50,"]") + TEST_PR( "ldrsh r0, [r",1, 24,", r",2, 48,"]") + TEST_PR( "ldrsh r7, [r",6, 24,", r",5, 50,"]") + + TEST_RP("str r",0, VAL1,", [r",1, 24,", #120]") + TEST_RP("str r",7, VAL2,", [r",6, 24,", #120]") + TEST_P( "ldr r0, [r",1, 24,", #120]") + TEST_P( "ldr r7, [r",6, 24,", #120]") + TEST_RP("strb r",0, VAL1,", [r",1, 24,", #30]") + TEST_RP("strb r",7, VAL2,", [r",6, 24,", #30]") + TEST_P( "ldrb r0, [r",1, 24,", #30]") + TEST_P( "ldrb r7, [r",6, 24,", #30]") + TEST_RP("strh r",0, VAL1,", [r",1, 24,", #60]") + TEST_RP("strh r",7, VAL2,", [r",6, 24,", #60]") + TEST_P( "ldrh r0, [r",1, 24,", #60]") + TEST_P( "ldrh r7, [r",6, 24,", #60]") + + TEST_R( "str r",0, VAL1,", [sp, #0]") + TEST_R( "str r",7, VAL2,", [sp, #160]") + TEST( "ldr r0, [sp, #0]") + TEST( "ldr r7, [sp, #160]") + + TEST_RP("str r",0, VAL1,", [r",0, 24,"]") + TEST_P( "ldr r0, [r",0, 24,"]") + + TEST_GROUP("Generate PC-/SP-relative address") + + TEST("add r0, pc, #4") + TEST("add r7, pc, #1020") + TEST("add r0, sp, #4") + TEST("add r7, sp, #1020") + + TEST_GROUP("Miscellaneous 16-bit instructions") + + TEST_UNSUPPORTED( "cpsie i") + TEST_UNSUPPORTED( "cpsid i") + TEST_UNSUPPORTED( "setend le") + TEST_UNSUPPORTED( "setend be") + + TEST("add sp, #"__stringify(TEST_MEMORY_SIZE)) /* Assumes TEST_MEMORY_SIZE < 0x400 */ + TEST("sub sp, #0x7f*4") + +DONT_TEST_IN_ITBLOCK( + TEST_BF_R( "cbnz r",0,0, ", 2f") + TEST_BF_R( "cbz r",2,-1,", 2f") + TEST_BF_RX( "cbnz r",4,1, ", 2f", SPACE_0x20) + TEST_BF_RX( "cbz r",7,0, ", 2f", SPACE_0x40) +) + TEST_R("sxth r0, r",7, HH1,"") + TEST_R("sxth r7, r",0, HH2,"") + TEST_R("sxtb r0, r",7, HH1,"") + TEST_R("sxtb r7, r",0, HH2,"") + TEST_R("uxth r0, r",7, HH1,"") + TEST_R("uxth r7, r",0, HH2,"") + TEST_R("uxtb r0, r",7, HH1,"") + TEST_R("uxtb r7, r",0, HH2,"") + TEST_R("rev r0, r",7, VAL1,"") + TEST_R("rev r7, r",0, VAL2,"") + TEST_R("rev16 r0, r",7, VAL1,"") + TEST_R("rev16 r7, r",0, VAL2,"") + TEST_UNSUPPORTED(__inst_thumb16(0xba80) "") + TEST_UNSUPPORTED(__inst_thumb16(0xbabf) "") + TEST_R("revsh r0, r",7, VAL1,"") + TEST_R("revsh r7, r",0, VAL2,"") + +#define TEST_POPPC(code, offset) \ + TESTCASE_START(code) \ + TEST_ARG_PTR(13, offset) \ + TEST_ARG_END("") \ + TEST_BRANCH_F(code) \ + TESTCASE_END + + TEST("push {r0}") + TEST("push {r7}") + TEST("push {r14}") + TEST("push {r0-r7,r14}") + TEST("push {r0,r2,r4,r6,r14}") + TEST("push {r1,r3,r5,r7}") + TEST("pop {r0}") + TEST("pop {r7}") + TEST("pop {r0,r2,r4,r6}") + TEST_POPPC("pop {pc}",15*4) + TEST_POPPC("pop {r0-r7,pc}",7*4) + TEST_POPPC("pop {r1,r3,r5,r7,pc}",11*4) + TEST_THUMB_TO_ARM_INTERWORK_P("pop {pc} @ ",13,15*4,"") + TEST_THUMB_TO_ARM_INTERWORK_P("pop {r0-r7,pc} @ ",13,7*4,"") + + TEST_UNSUPPORTED("bkpt.n 0") + TEST_UNSUPPORTED("bkpt.n 255") + + TEST_SUPPORTED("yield") + TEST("sev") + TEST("nop") + TEST("wfi") + TEST_SUPPORTED("wfe") + TEST_UNSUPPORTED(__inst_thumb16(0xbf50) "") /* Unassigned hints */ + TEST_UNSUPPORTED(__inst_thumb16(0xbff0) "") /* Unassigned hints */ + +#define TEST_IT(code, code2) \ + TESTCASE_START(code) \ + TEST_ARG_END("") \ + "50: nop \n\t" \ + "1: "code" \n\t" \ + " "code2" \n\t" \ + "2: nop \n\t" \ + TESTCASE_END + +DONT_TEST_IN_ITBLOCK( + TEST_IT("it eq","moveq r0,#0") + TEST_IT("it vc","movvc r0,#0") + TEST_IT("it le","movle r0,#0") + TEST_IT("ite eq","moveq r0,#0\n\t movne r1,#1") + TEST_IT("itet vc","movvc r0,#0\n\t movvs r1,#1\n\t movvc r2,#2") + TEST_IT("itete le","movle r0,#0\n\t movgt r1,#1\n\t movle r2,#2\n\t movgt r3,#3") + TEST_IT("itttt le","movle r0,#0\n\t movle r1,#1\n\t movle r2,#2\n\t movle r3,#3") + TEST_IT("iteee le","movle r0,#0\n\t movgt r1,#1\n\t movgt r2,#2\n\t movgt r3,#3") +) + + TEST_GROUP("Load and store multiple") + + TEST_P("ldmia r",4, 16*4,"!, {r0,r7}") + TEST_P("ldmia r",7, 16*4,"!, {r0-r6}") + TEST_P("stmia r",4, 16*4,"!, {r0,r7}") + TEST_P("stmia r",0, 16*4,"!, {r0-r7}") + + TEST_GROUP("Conditional branch and Supervisor Call instructions") + +CONDITION_INSTRUCTIONS(8, + TEST_BF("beq 2f") + TEST_BB("bne 2b") + TEST_BF("bgt 2f") + TEST_BB("blt 2b") +) + TEST_UNSUPPORTED(__inst_thumb16(0xde00) "") + TEST_UNSUPPORTED(__inst_thumb16(0xdeff) "") + TEST_UNSUPPORTED("svc #0x00") + TEST_UNSUPPORTED("svc #0xff") + + TEST_GROUP("Unconditional branch") + + TEST_BF( "b 2f") + TEST_BB( "b 2b") + TEST_BF_X("b 2f", SPACE_0x400) + TEST_BB_X("b 2b", SPACE_0x400) + + TEST_GROUP("Testing instructions in IT blocks") + + TEST_ITBLOCK("subs.n r0, r0") + + verbose("\n"); +} + + +void kprobe_thumb32_test_cases(void) +{ + kprobe_test_flags = 0; + + TEST_GROUP("Load/store multiple") + + TEST_UNSUPPORTED("rfedb sp") + TEST_UNSUPPORTED("rfeia sp") + TEST_UNSUPPORTED("rfedb sp!") + TEST_UNSUPPORTED("rfeia sp!") + + TEST_P( "stmia r",0, 16*4,", {r0,r8}") + TEST_P( "stmia r",4, 16*4,", {r0-r12,r14}") + TEST_P( "stmia r",7, 16*4,"!, {r8-r12,r14}") + TEST_P( "stmia r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") + + TEST_P( "ldmia r",0, 16*4,", {r0,r8}") + TEST_P( "ldmia r",4, 0, ", {r0-r12,r14}") + TEST_BF_P("ldmia r",5, 8*4, "!, {r6-r12,r15}") + TEST_P( "ldmia r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") + TEST_BF_P("ldmia r",14,14*4,"!, {r4,pc}") + + TEST_P( "stmdb r",0, 16*4,", {r0,r8}") + TEST_P( "stmdb r",4, 16*4,", {r0-r12,r14}") + TEST_P( "stmdb r",5, 16*4,"!, {r8-r12,r14}") + TEST_P( "stmdb r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") + + TEST_P( "ldmdb r",0, 16*4,", {r0,r8}") + TEST_P( "ldmdb r",4, 16*4,", {r0-r12,r14}") + TEST_BF_P("ldmdb r",5, 16*4,"!, {r6-r12,r15}") + TEST_P( "ldmdb r",12,16*4,"!, {r1,r3,r5,r7,r8-r11,r14}") + TEST_BF_P("ldmdb r",14,16*4,"!, {r4,pc}") + + TEST_P( "stmdb r",13,16*4,"!, {r3-r12,lr}") + TEST_P( "stmdb r",13,16*4,"!, {r3-r12}") + TEST_P( "stmdb r",2, 16*4,", {r3-r12,lr}") + TEST_P( "stmdb r",13,16*4,"!, {r2-r12,lr}") + TEST_P( "stmdb r",0, 16*4,", {r0-r12}") + TEST_P( "stmdb r",0, 16*4,", {r0-r12,lr}") + + TEST_BF_P("ldmia r",13,5*4, "!, {r3-r12,pc}") + TEST_P( "ldmia r",13,5*4, "!, {r3-r12}") + TEST_BF_P("ldmia r",2, 5*4, "!, {r3-r12,pc}") + TEST_BF_P("ldmia r",13,4*4, "!, {r2-r12,pc}") + TEST_P( "ldmia r",0, 16*4,", {r0-r12}") + TEST_P( "ldmia r",0, 16*4,", {r0-r12,lr}") + + TEST_THUMB_TO_ARM_INTERWORK_P("ldmia r",0,14*4,", {r12,pc}") + TEST_THUMB_TO_ARM_INTERWORK_P("ldmia r",13,2*4,", {r0-r12,pc}") + + TEST_UNSUPPORTED(__inst_thumb32(0xe88f0101) " @ stmia pc, {r0,r8}") + TEST_UNSUPPORTED(__inst_thumb32(0xe92f5f00) " @ stmdb pc!, {r8-r12,r14}") + TEST_UNSUPPORTED(__inst_thumb32(0xe8bdc000) " @ ldmia r13!, {r14,pc}") + TEST_UNSUPPORTED(__inst_thumb32(0xe93ec000) " @ ldmdb r14!, {r14,pc}") + TEST_UNSUPPORTED(__inst_thumb32(0xe8a73f00) " @ stmia r7!, {r8-r12,sp}") + TEST_UNSUPPORTED(__inst_thumb32(0xe8a79f00) " @ stmia r7!, {r8-r12,pc}") + TEST_UNSUPPORTED(__inst_thumb32(0xe93e2010) " @ ldmdb r14!, {r4,sp}") + + TEST_GROUP("Load/store double or exclusive, table branch") + + TEST_P( "ldrd r0, r1, [r",1, 24,", #-16]") + TEST( "ldrd r12, r14, [sp, #16]") + TEST_P( "ldrd r1, r0, [r",7, 24,", #-16]!") + TEST( "ldrd r14, r12, [sp, #16]!") + TEST_P( "ldrd r1, r0, [r",7, 24,"], #16") + TEST( "ldrd r7, r8, [sp], #-16") + + TEST_X( "ldrd r12, r14, 3f", + ".align 3 \n\t" + "3: .word "__stringify(VAL1)" \n\t" + " .word "__stringify(VAL2)) + + TEST_UNSUPPORTED(__inst_thumb32(0xe9ffec04) " @ ldrd r14, r12, [pc, #16]!") + TEST_UNSUPPORTED(__inst_thumb32(0xe8ffec04) " @ ldrd r14, r12, [pc], #16") + TEST_UNSUPPORTED(__inst_thumb32(0xe9d4d800) " @ ldrd sp, r8, [r4]") + TEST_UNSUPPORTED(__inst_thumb32(0xe9d4f800) " @ ldrd pc, r8, [r4]") + TEST_UNSUPPORTED(__inst_thumb32(0xe9d47d00) " @ ldrd r7, sp, [r4]") + TEST_UNSUPPORTED(__inst_thumb32(0xe9d47f00) " @ ldrd r7, pc, [r4]") + + TEST_RRP("strd r",0, VAL1,", r",1, VAL2,", [r",1, 24,", #-16]") + TEST_RR( "strd r",12,VAL2,", r",14,VAL1,", [sp, #16]") + TEST_RRP("strd r",1, VAL1,", r",0, VAL2,", [r",7, 24,", #-16]!") + TEST_RR( "strd r",14,VAL2,", r",12,VAL1,", [sp, #16]!") + TEST_RRP("strd r",1, VAL1,", r",0, VAL2,", [r",7, 24,"], #16") + TEST_RR( "strd r",7, VAL2,", r",8, VAL1,", [sp], #-16") + TEST_UNSUPPORTED(__inst_thumb32(0xe9efec04) " @ strd r14, r12, [pc, #16]!") + TEST_UNSUPPORTED(__inst_thumb32(0xe8efec04) " @ strd r14, r12, [pc], #16") + + TEST_RX("tbb [pc, r",0, (9f-(1f+4)),"]", + "9: \n\t" + ".byte (2f-1b-4)>>1 \n\t" + ".byte (3f-1b-4)>>1 \n\t" + "3: mvn r0, r0 \n\t" + "2: nop \n\t") + + TEST_RX("tbb [pc, r",4, (9f-(1f+4)+1),"]", + "9: \n\t" + ".byte (2f-1b-4)>>1 \n\t" + ".byte (3f-1b-4)>>1 \n\t" + "3: mvn r0, r0 \n\t" + "2: nop \n\t") + + TEST_RRX("tbb [r",1,9f,", r",2,0,"]", + "9: \n\t" + ".byte (2f-1b-4)>>1 \n\t" + ".byte (3f-1b-4)>>1 \n\t" + "3: mvn r0, r0 \n\t" + "2: nop \n\t") + + TEST_RX("tbh [pc, r",7, (9f-(1f+4))>>1,"]", + "9: \n\t" + ".short (2f-1b-4)>>1 \n\t" + ".short (3f-1b-4)>>1 \n\t" + "3: mvn r0, r0 \n\t" + "2: nop \n\t") + + TEST_RX("tbh [pc, r",12, ((9f-(1f+4))>>1)+1,"]", + "9: \n\t" + ".short (2f-1b-4)>>1 \n\t" + ".short (3f-1b-4)>>1 \n\t" + "3: mvn r0, r0 \n\t" + "2: nop \n\t") + + TEST_RRX("tbh [r",1,9f, ", r",14,1,"]", + "9: \n\t" + ".short (2f-1b-4)>>1 \n\t" + ".short (3f-1b-4)>>1 \n\t" + "3: mvn r0, r0 \n\t" + "2: nop \n\t") + + TEST_UNSUPPORTED(__inst_thumb32(0xe8d1f01f) " @ tbh [r1, pc]") + TEST_UNSUPPORTED(__inst_thumb32(0xe8d1f01d) " @ tbh [r1, sp]") + TEST_UNSUPPORTED(__inst_thumb32(0xe8ddf012) " @ tbh [sp, r2]") + + TEST_UNSUPPORTED("strexb r0, r1, [r2]") + TEST_UNSUPPORTED("strexh r0, r1, [r2]") + TEST_UNSUPPORTED("strexd r0, r1, [r2]") + TEST_UNSUPPORTED("ldrexb r0, [r1]") + TEST_UNSUPPORTED("ldrexh r0, [r1]") + TEST_UNSUPPORTED("ldrexd r0, [r1]") + + TEST_GROUP("Data-processing (shifted register) and (modified immediate)") + +#define _DATA_PROCESSING32_DNM(op,s,val) \ + TEST_RR(op s".w r0, r",1, VAL1,", r",2, val, "") \ + TEST_RR(op s" r1, r",1, VAL1,", r",2, val, ", lsl #3") \ + TEST_RR(op s" r2, r",3, VAL1,", r",2, val, ", lsr #4") \ + TEST_RR(op s" r3, r",3, VAL1,", r",2, val, ", asr #5") \ + TEST_RR(op s" r4, r",5, VAL1,", r",2, N(val),", asr #6") \ + TEST_RR(op s" r5, r",5, VAL1,", r",2, val, ", ror #7") \ + TEST_RR(op s" r8, r",9, VAL1,", r",10,val, ", rrx") \ + TEST_R( op s" r0, r",11,VAL1,", #0x00010001") \ + TEST_R( op s" r11, r",0, VAL1,", #0xf5000000") \ + TEST_R( op s" r7, r",8, VAL2,", #0x000af000") + +#define DATA_PROCESSING32_DNM(op,val) \ + _DATA_PROCESSING32_DNM(op,"",val) \ + _DATA_PROCESSING32_DNM(op,"s",val) + +#define DATA_PROCESSING32_NM(op,val) \ + TEST_RR(op".w r",1, VAL1,", r",2, val, "") \ + TEST_RR(op" r",1, VAL1,", r",2, val, ", lsl #3") \ + TEST_RR(op" r",3, VAL1,", r",2, val, ", lsr #4") \ + TEST_RR(op" r",3, VAL1,", r",2, val, ", asr #5") \ + TEST_RR(op" r",5, VAL1,", r",2, N(val),", asr #6") \ + TEST_RR(op" r",5, VAL1,", r",2, val, ", ror #7") \ + TEST_RR(op" r",9, VAL1,", r",10,val, ", rrx") \ + TEST_R( op" r",11,VAL1,", #0x00010001") \ + TEST_R( op" r",0, VAL1,", #0xf5000000") \ + TEST_R( op" r",8, VAL2,", #0x000af000") + +#define _DATA_PROCESSING32_DM(op,s,val) \ + TEST_R( op s".w r0, r",14, val, "") \ + TEST_R( op s" r1, r",12, val, ", lsl #3") \ + TEST_R( op s" r2, r",11, val, ", lsr #4") \ + TEST_R( op s" r3, r",10, val, ", asr #5") \ + TEST_R( op s" r4, r",9, N(val),", asr #6") \ + TEST_R( op s" r5, r",8, val, ", ror #7") \ + TEST_R( op s" r8, r",7,val, ", rrx") \ + TEST( op s" r0, #0x00010001") \ + TEST( op s" r11, #0xf5000000") \ + TEST( op s" r7, #0x000af000") \ + TEST( op s" r4, #0x00005a00") + +#define DATA_PROCESSING32_DM(op,val) \ + _DATA_PROCESSING32_DM(op,"",val) \ + _DATA_PROCESSING32_DM(op,"s",val) + + DATA_PROCESSING32_DNM("and",0xf00f00ff) + DATA_PROCESSING32_NM("tst",0xf00f00ff) + DATA_PROCESSING32_DNM("bic",0xf00f00ff) + DATA_PROCESSING32_DNM("orr",0xf00f00ff) + DATA_PROCESSING32_DM("mov",VAL2) + DATA_PROCESSING32_DNM("orn",0xf00f00ff) + DATA_PROCESSING32_DM("mvn",VAL2) + DATA_PROCESSING32_DNM("eor",0xf00f00ff) + DATA_PROCESSING32_NM("teq",0xf00f00ff) + DATA_PROCESSING32_DNM("add",VAL2) + DATA_PROCESSING32_NM("cmn",VAL2) + DATA_PROCESSING32_DNM("adc",VAL2) + DATA_PROCESSING32_DNM("sbc",VAL2) + DATA_PROCESSING32_DNM("sub",VAL2) + DATA_PROCESSING32_NM("cmp",VAL2) + DATA_PROCESSING32_DNM("rsb",VAL2) + + TEST_RR("pkhbt r0, r",0, HH1,", r",1, HH2,"") + TEST_RR("pkhbt r14,r",12, HH1,", r",10,HH2,", lsl #2") + TEST_RR("pkhtb r0, r",0, HH1,", r",1, HH2,"") + TEST_RR("pkhtb r14,r",12, HH1,", r",10,HH2,", asr #2") + + TEST_UNSUPPORTED(__inst_thumb32(0xea170f0d) " @ tst.w r7, sp") + TEST_UNSUPPORTED(__inst_thumb32(0xea170f0f) " @ tst.w r7, pc") + TEST_UNSUPPORTED(__inst_thumb32(0xea1d0f07) " @ tst.w sp, r7") + TEST_UNSUPPORTED(__inst_thumb32(0xea1f0f07) " @ tst.w pc, r7") + TEST_UNSUPPORTED(__inst_thumb32(0xf01d1f08) " @ tst sp, #0x00080008") + TEST_UNSUPPORTED(__inst_thumb32(0xf01f1f08) " @ tst pc, #0x00080008") + + TEST_UNSUPPORTED(__inst_thumb32(0xea970f0d) " @ teq.w r7, sp") + TEST_UNSUPPORTED(__inst_thumb32(0xea970f0f) " @ teq.w r7, pc") + TEST_UNSUPPORTED(__inst_thumb32(0xea9d0f07) " @ teq.w sp, r7") + TEST_UNSUPPORTED(__inst_thumb32(0xea9f0f07) " @ teq.w pc, r7") + TEST_UNSUPPORTED(__inst_thumb32(0xf09d1f08) " @ tst sp, #0x00080008") + TEST_UNSUPPORTED(__inst_thumb32(0xf09f1f08) " @ tst pc, #0x00080008") + + TEST_UNSUPPORTED(__inst_thumb32(0xeb170f0d) " @ cmn.w r7, sp") + TEST_UNSUPPORTED(__inst_thumb32(0xeb170f0f) " @ cmn.w r7, pc") + TEST_P("cmn.w sp, r",7,0,"") + TEST_UNSUPPORTED(__inst_thumb32(0xeb1f0f07) " @ cmn.w pc, r7") + TEST( "cmn sp, #0x00080008") + TEST_UNSUPPORTED(__inst_thumb32(0xf11f1f08) " @ cmn pc, #0x00080008") + + TEST_UNSUPPORTED(__inst_thumb32(0xebb70f0d) " @ cmp.w r7, sp") + TEST_UNSUPPORTED(__inst_thumb32(0xebb70f0f) " @ cmp.w r7, pc") + TEST_P("cmp.w sp, r",7,0,"") + TEST_UNSUPPORTED(__inst_thumb32(0xebbf0f07) " @ cmp.w pc, r7") + TEST( "cmp sp, #0x00080008") + TEST_UNSUPPORTED(__inst_thumb32(0xf1bf1f08) " @ cmp pc, #0x00080008") + + TEST_UNSUPPORTED(__inst_thumb32(0xea5f070d) " @ movs.w r7, sp") + TEST_UNSUPPORTED(__inst_thumb32(0xea5f070f) " @ movs.w r7, pc") + TEST_UNSUPPORTED(__inst_thumb32(0xea5f0d07) " @ movs.w sp, r7") + TEST_UNSUPPORTED(__inst_thumb32(0xea4f0f07) " @ mov.w pc, r7") + TEST_UNSUPPORTED(__inst_thumb32(0xf04f1d08) " @ mov sp, #0x00080008") + TEST_UNSUPPORTED(__inst_thumb32(0xf04f1f08) " @ mov pc, #0x00080008") + + TEST_R("add.w r0, sp, r",1, 4,"") + TEST_R("adds r0, sp, r",1, 4,", asl #3") + TEST_R("add r0, sp, r",1, 4,", asl #4") + TEST_R("add r0, sp, r",1, 16,", ror #1") + TEST_R("add.w sp, sp, r",1, 4,"") + TEST_R("add sp, sp, r",1, 4,", asl #3") + TEST_UNSUPPORTED(__inst_thumb32(0xeb0d1d01) " @ add sp, sp, r1, asl #4") + TEST_UNSUPPORTED(__inst_thumb32(0xeb0d0d71) " @ add sp, sp, r1, ror #1") + TEST( "add.w r0, sp, #24") + TEST( "add.w sp, sp, #24") + TEST_UNSUPPORTED(__inst_thumb32(0xeb0d0f01) " @ add pc, sp, r1") + TEST_UNSUPPORTED(__inst_thumb32(0xeb0d000f) " @ add r0, sp, pc") + TEST_UNSUPPORTED(__inst_thumb32(0xeb0d000d) " @ add r0, sp, sp") + TEST_UNSUPPORTED(__inst_thumb32(0xeb0d0d0f) " @ add sp, sp, pc") + TEST_UNSUPPORTED(__inst_thumb32(0xeb0d0d0d) " @ add sp, sp, sp") + + TEST_R("sub.w r0, sp, r",1, 4,"") + TEST_R("subs r0, sp, r",1, 4,", asl #3") + TEST_R("sub r0, sp, r",1, 4,", asl #4") + TEST_R("sub r0, sp, r",1, 16,", ror #1") + TEST_R("sub.w sp, sp, r",1, 4,"") + TEST_R("sub sp, sp, r",1, 4,", asl #3") + TEST_UNSUPPORTED(__inst_thumb32(0xebad1d01) " @ sub sp, sp, r1, asl #4") + TEST_UNSUPPORTED(__inst_thumb32(0xebad0d71) " @ sub sp, sp, r1, ror #1") + TEST_UNSUPPORTED(__inst_thumb32(0xebad0f01) " @ sub pc, sp, r1") + TEST( "sub.w r0, sp, #24") + TEST( "sub.w sp, sp, #24") + + TEST_UNSUPPORTED(__inst_thumb32(0xea02010f) " @ and r1, r2, pc") + TEST_UNSUPPORTED(__inst_thumb32(0xea0f0103) " @ and r1, pc, r3") + TEST_UNSUPPORTED(__inst_thumb32(0xea020f03) " @ and pc, r2, r3") + TEST_UNSUPPORTED(__inst_thumb32(0xea02010d) " @ and r1, r2, sp") + TEST_UNSUPPORTED(__inst_thumb32(0xea0d0103) " @ and r1, sp, r3") + TEST_UNSUPPORTED(__inst_thumb32(0xea020d03) " @ and sp, r2, r3") + TEST_UNSUPPORTED(__inst_thumb32(0xf00d1108) " @ and r1, sp, #0x00080008") + TEST_UNSUPPORTED(__inst_thumb32(0xf00f1108) " @ and r1, pc, #0x00080008") + TEST_UNSUPPORTED(__inst_thumb32(0xf0021d08) " @ and sp, r8, #0x00080008") + TEST_UNSUPPORTED(__inst_thumb32(0xf0021f08) " @ and pc, r8, #0x00080008") + + TEST_UNSUPPORTED(__inst_thumb32(0xeb02010f) " @ add r1, r2, pc") + TEST_UNSUPPORTED(__inst_thumb32(0xeb0f0103) " @ add r1, pc, r3") + TEST_UNSUPPORTED(__inst_thumb32(0xeb020f03) " @ add pc, r2, r3") + TEST_UNSUPPORTED(__inst_thumb32(0xeb02010d) " @ add r1, r2, sp") + TEST_SUPPORTED( __inst_thumb32(0xeb0d0103) " @ add r1, sp, r3") + TEST_UNSUPPORTED(__inst_thumb32(0xeb020d03) " @ add sp, r2, r3") + TEST_SUPPORTED( __inst_thumb32(0xf10d1108) " @ add r1, sp, #0x00080008") + TEST_UNSUPPORTED(__inst_thumb32(0xf10d1f08) " @ add pc, sp, #0x00080008") + TEST_UNSUPPORTED(__inst_thumb32(0xf10f1108) " @ add r1, pc, #0x00080008") + TEST_UNSUPPORTED(__inst_thumb32(0xf1021d08) " @ add sp, r8, #0x00080008") + TEST_UNSUPPORTED(__inst_thumb32(0xf1021f08) " @ add pc, r8, #0x00080008") + + TEST_UNSUPPORTED(__inst_thumb32(0xeaa00000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xeaf00000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xeb200000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xeb800000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xebe00000) "") + + TEST_UNSUPPORTED(__inst_thumb32(0xf0a00000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xf0c00000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xf0f00000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xf1200000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xf1800000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xf1e00000) "") + + TEST_GROUP("Coprocessor instructions") + + TEST_UNSUPPORTED(__inst_thumb32(0xec000000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xeff00000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xfc000000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xfff00000) "") + + TEST_GROUP("Data-processing (plain binary immediate)") + + TEST_R("addw r0, r",1, VAL1,", #0x123") + TEST( "addw r14, sp, #0xf5a") + TEST( "addw sp, sp, #0x20") + TEST( "addw r7, pc, #0x888") + TEST_UNSUPPORTED(__inst_thumb32(0xf20f1f20) " @ addw pc, pc, #0x120") + TEST_UNSUPPORTED(__inst_thumb32(0xf20d1f20) " @ addw pc, sp, #0x120") + TEST_UNSUPPORTED(__inst_thumb32(0xf20f1d20) " @ addw sp, pc, #0x120") + TEST_UNSUPPORTED(__inst_thumb32(0xf2001d20) " @ addw sp, r0, #0x120") + + TEST_R("subw r0, r",1, VAL1,", #0x123") + TEST( "subw r14, sp, #0xf5a") + TEST( "subw sp, sp, #0x20") + TEST( "subw r7, pc, #0x888") + TEST_UNSUPPORTED(__inst_thumb32(0xf2af1f20) " @ subw pc, pc, #0x120") + TEST_UNSUPPORTED(__inst_thumb32(0xf2ad1f20) " @ subw pc, sp, #0x120") + TEST_UNSUPPORTED(__inst_thumb32(0xf2af1d20) " @ subw sp, pc, #0x120") + TEST_UNSUPPORTED(__inst_thumb32(0xf2a01d20) " @ subw sp, r0, #0x120") + + TEST("movw r0, #0") + TEST("movw r0, #0xffff") + TEST("movw lr, #0xffff") + TEST_UNSUPPORTED(__inst_thumb32(0xf2400d00) " @ movw sp, #0") + TEST_UNSUPPORTED(__inst_thumb32(0xf2400f00) " @ movw pc, #0") + + TEST_R("movt r",0, VAL1,", #0") + TEST_R("movt r",0, VAL2,", #0xffff") + TEST_R("movt r",14,VAL1,", #0xffff") + TEST_UNSUPPORTED(__inst_thumb32(0xf2c00d00) " @ movt sp, #0") + TEST_UNSUPPORTED(__inst_thumb32(0xf2c00f00) " @ movt pc, #0") + + TEST_R( "ssat r0, #24, r",0, VAL1,"") + TEST_R( "ssat r14, #24, r",12, VAL2,"") + TEST_R( "ssat r0, #24, r",0, VAL1,", lsl #8") + TEST_R( "ssat r14, #24, r",12, VAL2,", asr #8") + TEST_UNSUPPORTED(__inst_thumb32(0xf30c0d17) " @ ssat sp, #24, r12") + TEST_UNSUPPORTED(__inst_thumb32(0xf30c0f17) " @ ssat pc, #24, r12") + TEST_UNSUPPORTED(__inst_thumb32(0xf30d0c17) " @ ssat r12, #24, sp") + TEST_UNSUPPORTED(__inst_thumb32(0xf30f0c17) " @ ssat r12, #24, pc") + + TEST_R( "usat r0, #24, r",0, VAL1,"") + TEST_R( "usat r14, #24, r",12, VAL2,"") + TEST_R( "usat r0, #24, r",0, VAL1,", lsl #8") + TEST_R( "usat r14, #24, r",12, VAL2,", asr #8") + TEST_UNSUPPORTED(__inst_thumb32(0xf38c0d17) " @ usat sp, #24, r12") + TEST_UNSUPPORTED(__inst_thumb32(0xf38c0f17) " @ usat pc, #24, r12") + TEST_UNSUPPORTED(__inst_thumb32(0xf38d0c17) " @ usat r12, #24, sp") + TEST_UNSUPPORTED(__inst_thumb32(0xf38f0c17) " @ usat r12, #24, pc") + + TEST_R( "ssat16 r0, #12, r",0, HH1,"") + TEST_R( "ssat16 r14, #12, r",12, HH2,"") + TEST_UNSUPPORTED(__inst_thumb32(0xf32c0d0b) " @ ssat16 sp, #12, r12") + TEST_UNSUPPORTED(__inst_thumb32(0xf32c0f0b) " @ ssat16 pc, #12, r12") + TEST_UNSUPPORTED(__inst_thumb32(0xf32d0c0b) " @ ssat16 r12, #12, sp") + TEST_UNSUPPORTED(__inst_thumb32(0xf32f0c0b) " @ ssat16 r12, #12, pc") + + TEST_R( "usat16 r0, #12, r",0, HH1,"") + TEST_R( "usat16 r14, #12, r",12, HH2,"") + TEST_UNSUPPORTED(__inst_thumb32(0xf3ac0d0b) " @ usat16 sp, #12, r12") + TEST_UNSUPPORTED(__inst_thumb32(0xf3ac0f0b) " @ usat16 pc, #12, r12") + TEST_UNSUPPORTED(__inst_thumb32(0xf3ad0c0b) " @ usat16 r12, #12, sp") + TEST_UNSUPPORTED(__inst_thumb32(0xf3af0c0b) " @ usat16 r12, #12, pc") + + TEST_R( "sbfx r0, r",0 , VAL1,", #0, #31") + TEST_R( "sbfx r14, r",12, VAL2,", #8, #16") + TEST_R( "sbfx r4, r",10, VAL1,", #16, #15") + TEST_UNSUPPORTED(__inst_thumb32(0xf34c2d0f) " @ sbfx sp, r12, #8, #16") + TEST_UNSUPPORTED(__inst_thumb32(0xf34c2f0f) " @ sbfx pc, r12, #8, #16") + TEST_UNSUPPORTED(__inst_thumb32(0xf34d2c0f) " @ sbfx r12, sp, #8, #16") + TEST_UNSUPPORTED(__inst_thumb32(0xf34f2c0f) " @ sbfx r12, pc, #8, #16") + + TEST_R( "ubfx r0, r",0 , VAL1,", #0, #31") + TEST_R( "ubfx r14, r",12, VAL2,", #8, #16") + TEST_R( "ubfx r4, r",10, VAL1,", #16, #15") + TEST_UNSUPPORTED(__inst_thumb32(0xf3cc2d0f) " @ ubfx sp, r12, #8, #16") + TEST_UNSUPPORTED(__inst_thumb32(0xf3cc2f0f) " @ ubfx pc, r12, #8, #16") + TEST_UNSUPPORTED(__inst_thumb32(0xf3cd2c0f) " @ ubfx r12, sp, #8, #16") + TEST_UNSUPPORTED(__inst_thumb32(0xf3cf2c0f) " @ ubfx r12, pc, #8, #16") + + TEST_R( "bfc r",0, VAL1,", #4, #20") + TEST_R( "bfc r",14,VAL2,", #4, #20") + TEST_R( "bfc r",7, VAL1,", #0, #31") + TEST_R( "bfc r",8, VAL2,", #0, #31") + TEST_UNSUPPORTED(__inst_thumb32(0xf36f0d1e) " @ bfc sp, #0, #31") + TEST_UNSUPPORTED(__inst_thumb32(0xf36f0f1e) " @ bfc pc, #0, #31") + + TEST_RR( "bfi r",0, VAL1,", r",0 , VAL2,", #0, #31") + TEST_RR( "bfi r",12,VAL1,", r",14 , VAL2,", #4, #20") + TEST_UNSUPPORTED(__inst_thumb32(0xf36e1d17) " @ bfi sp, r14, #4, #20") + TEST_UNSUPPORTED(__inst_thumb32(0xf36e1f17) " @ bfi pc, r14, #4, #20") + TEST_UNSUPPORTED(__inst_thumb32(0xf36d1e17) " @ bfi r14, sp, #4, #20") + + TEST_GROUP("Branches and miscellaneous control") + +CONDITION_INSTRUCTIONS(22, + TEST_BF("beq.w 2f") + TEST_BB("bne.w 2b") + TEST_BF("bgt.w 2f") + TEST_BB("blt.w 2b") + TEST_BF_X("bpl.w 2f", SPACE_0x1000) +) + + TEST_UNSUPPORTED("msr cpsr, r0") + TEST_UNSUPPORTED("msr cpsr_f, r1") + TEST_UNSUPPORTED("msr spsr, r2") + + TEST_UNSUPPORTED("cpsie.w i") + TEST_UNSUPPORTED("cpsid.w i") + TEST_UNSUPPORTED("cps 0x13") + + TEST_SUPPORTED("yield.w") + TEST("sev.w") + TEST("nop.w") + TEST("wfi.w") + TEST_SUPPORTED("wfe.w") + TEST_UNSUPPORTED("dbg.w #0") + + TEST_UNSUPPORTED("clrex") + TEST_UNSUPPORTED("dsb") + TEST_UNSUPPORTED("dmb") + TEST_UNSUPPORTED("isb") + + TEST_UNSUPPORTED("bxj r0") + + TEST_UNSUPPORTED("subs pc, lr, #4") + + TEST("mrs r0, cpsr") + TEST("mrs r14, cpsr") + TEST_UNSUPPORTED(__inst_thumb32(0xf3ef8d00) " @ mrs sp, spsr") + TEST_UNSUPPORTED(__inst_thumb32(0xf3ef8f00) " @ mrs pc, spsr") + TEST_UNSUPPORTED("mrs r0, spsr") + TEST_UNSUPPORTED("mrs lr, spsr") + + TEST_UNSUPPORTED(__inst_thumb32(0xf7f08000) " @ smc #0") + + TEST_UNSUPPORTED(__inst_thumb32(0xf7f0a000) " @ undefeined") + + TEST_BF( "b.w 2f") + TEST_BB( "b.w 2b") + TEST_BF_X("b.w 2f", SPACE_0x1000) + + TEST_BF( "bl.w 2f") + TEST_BB( "bl.w 2b") + TEST_BB_X("bl.w 2b", SPACE_0x1000) + + TEST_X( "blx __dummy_arm_subroutine", + ".arm \n\t" + ".align \n\t" + ".type __dummy_arm_subroutine, %%function \n\t" + "__dummy_arm_subroutine: \n\t" + "mov r0, pc \n\t" + "bx lr \n\t" + ".thumb \n\t" + ) + TEST( "blx __dummy_arm_subroutine") + + TEST_GROUP("Store single data item") + +#define SINGLE_STORE(size) \ + TEST_RP( "str"size" r",0, VAL1,", [r",11,-1024,", #1024]") \ + TEST_RP( "str"size" r",14,VAL2,", [r",1, -1024,", #1080]") \ + TEST_RP( "str"size" r",0, VAL1,", [r",11,256, ", #-120]") \ + TEST_RP( "str"size" r",14,VAL2,", [r",1, 256, ", #-128]") \ + TEST_RP( "str"size" r",0, VAL1,", [r",11,24, "], #120") \ + TEST_RP( "str"size" r",14,VAL2,", [r",1, 24, "], #128") \ + TEST_RP( "str"size" r",0, VAL1,", [r",11,24, "], #-120") \ + TEST_RP( "str"size" r",14,VAL2,", [r",1, 24, "], #-128") \ + TEST_RP( "str"size" r",0, VAL1,", [r",11,24, ", #120]!") \ + TEST_RP( "str"size" r",14,VAL2,", [r",1, 24, ", #128]!") \ + TEST_RP( "str"size" r",0, VAL1,", [r",11,256, ", #-120]!") \ + TEST_RP( "str"size" r",14,VAL2,", [r",1, 256, ", #-128]!") \ + TEST_RPR("str"size".w r",0, VAL1,", [r",1, 0,", r",2, 4,"]") \ + TEST_RPR("str"size" r",14,VAL2,", [r",10,0,", r",11,4,", lsl #1]") \ + TEST_R( "str"size".w r",7, VAL1,", [sp, #24]") \ + TEST_RP( "str"size".w r",0, VAL2,", [r",0,0, "]") \ + TEST_UNSUPPORTED("str"size"t r0, [r1, #4]") + + SINGLE_STORE("b") + SINGLE_STORE("h") + SINGLE_STORE("") + + TEST("str sp, [sp]") + TEST_UNSUPPORTED(__inst_thumb32(0xf8cfe000) " @ str r14, [pc]") + TEST_UNSUPPORTED(__inst_thumb32(0xf8cef000) " @ str pc, [r14]") + + TEST_GROUP("Advanced SIMD element or structure load/store instructions") + + TEST_UNSUPPORTED(__inst_thumb32(0xf9000000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xf92fffff) "") + TEST_UNSUPPORTED(__inst_thumb32(0xf9800000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xf9efffff) "") + + TEST_GROUP("Load single data item and memory hints") + +#define SINGLE_LOAD(size) \ + TEST_P( "ldr"size" r0, [r",11,-1024, ", #1024]") \ + TEST_P( "ldr"size" r14, [r",1, -1024,", #1080]") \ + TEST_P( "ldr"size" r0, [r",11,256, ", #-120]") \ + TEST_P( "ldr"size" r14, [r",1, 256, ", #-128]") \ + TEST_P( "ldr"size" r0, [r",11,24, "], #120") \ + TEST_P( "ldr"size" r14, [r",1, 24, "], #128") \ + TEST_P( "ldr"size" r0, [r",11,24, "], #-120") \ + TEST_P( "ldr"size" r14, [r",1,24, "], #-128") \ + TEST_P( "ldr"size" r0, [r",11,24, ", #120]!") \ + TEST_P( "ldr"size" r14, [r",1, 24, ", #128]!") \ + TEST_P( "ldr"size" r0, [r",11,256, ", #-120]!") \ + TEST_P( "ldr"size" r14, [r",1, 256, ", #-128]!") \ + TEST_PR("ldr"size".w r0, [r",1, 0,", r",2, 4,"]") \ + TEST_PR("ldr"size" r14, [r",10,0,", r",11,4,", lsl #1]") \ + TEST_X( "ldr"size".w r0, 3f", \ + ".align 3 \n\t" \ + "3: .word "__stringify(VAL1)) \ + TEST_X( "ldr"size".w r14, 3f", \ + ".align 3 \n\t" \ + "3: .word "__stringify(VAL2)) \ + TEST( "ldr"size".w r7, 3b") \ + TEST( "ldr"size".w r7, [sp, #24]") \ + TEST_P( "ldr"size".w r0, [r",0,0, "]") \ + TEST_UNSUPPORTED("ldr"size"t r0, [r1, #4]") + + SINGLE_LOAD("b") + SINGLE_LOAD("sb") + SINGLE_LOAD("h") + SINGLE_LOAD("sh") + SINGLE_LOAD("") + + TEST_BF_P("ldr pc, [r",14, 15*4,"]") + TEST_P( "ldr sp, [r",14, 13*4,"]") + TEST_BF_R("ldr pc, [sp, r",14, 15*4,"]") + TEST_R( "ldr sp, [sp, r",14, 13*4,"]") + TEST_THUMB_TO_ARM_INTERWORK_P("ldr pc, [r",0,0,", #15*4]") + TEST_SUPPORTED("ldr sp, 99f") + TEST_SUPPORTED("ldr pc, 99f") + + TEST_UNSUPPORTED(__inst_thumb32(0xf854700d) " @ ldr r7, [r4, sp]") + TEST_UNSUPPORTED(__inst_thumb32(0xf854700f) " @ ldr r7, [r4, pc]") + TEST_UNSUPPORTED(__inst_thumb32(0xf814700d) " @ ldrb r7, [r4, sp]") + TEST_UNSUPPORTED(__inst_thumb32(0xf814700f) " @ ldrb r7, [r4, pc]") + TEST_UNSUPPORTED(__inst_thumb32(0xf89fd004) " @ ldrb sp, 99f") + TEST_UNSUPPORTED(__inst_thumb32(0xf814d008) " @ ldrb sp, [r4, r8]") + TEST_UNSUPPORTED(__inst_thumb32(0xf894d000) " @ ldrb sp, [r4]") + + TEST_UNSUPPORTED(__inst_thumb32(0xf8600000) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xf9ffffff) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xf9500000) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xf95fffff) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xf8000800) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xf97ffaff) "") /* Unallocated space */ + + TEST( "pli [pc, #4]") + TEST( "pli [pc, #-4]") + TEST( "pld [pc, #4]") + TEST( "pld [pc, #-4]") + + TEST_P( "pld [r",0,-1024,", #1024]") + TEST( __inst_thumb32(0xf8b0f400) " @ pldw [r0, #1024]") + TEST_P( "pli [r",4, 0b,", #1024]") + TEST_P( "pld [r",7, 120,", #-120]") + TEST( __inst_thumb32(0xf837fc78) " @ pldw [r7, #-120]") + TEST_P( "pli [r",11,120,", #-120]") + TEST( "pld [sp, #0]") + + TEST_PR("pld [r",7, 24, ", r",0, 16,"]") + TEST_PR("pld [r",8, 24, ", r",12,16,", lsl #3]") + TEST_SUPPORTED(__inst_thumb32(0xf837f000) " @ pldw [r7, r0]") + TEST_SUPPORTED(__inst_thumb32(0xf838f03c) " @ pldw [r8, r12, lsl #3]"); + TEST_RR("pli [r",12,0b,", r",0, 16,"]") + TEST_RR("pli [r",0, 0b,", r",12,16,", lsl #3]") + TEST_R( "pld [sp, r",1, 16,"]") + TEST_UNSUPPORTED(__inst_thumb32(0xf817f00d) " @pld [r7, sp]") + TEST_UNSUPPORTED(__inst_thumb32(0xf817f00f) " @pld [r7, pc]") + + TEST_GROUP("Data-processing (register)") + +#define SHIFTS32(op) \ + TEST_RR(op" r0, r",1, VAL1,", r",2, 3, "") \ + TEST_RR(op" r14, r",12,VAL2,", r",11,10,"") + + SHIFTS32("lsl") + SHIFTS32("lsls") + SHIFTS32("lsr") + SHIFTS32("lsrs") + SHIFTS32("asr") + SHIFTS32("asrs") + SHIFTS32("ror") + SHIFTS32("rors") + + TEST_UNSUPPORTED(__inst_thumb32(0xfa01ff02) " @ lsl pc, r1, r2") + TEST_UNSUPPORTED(__inst_thumb32(0xfa01fd02) " @ lsl sp, r1, r2") + TEST_UNSUPPORTED(__inst_thumb32(0xfa0ff002) " @ lsl r0, pc, r2") + TEST_UNSUPPORTED(__inst_thumb32(0xfa0df002) " @ lsl r0, sp, r2") + TEST_UNSUPPORTED(__inst_thumb32(0xfa01f00f) " @ lsl r0, r1, pc") + TEST_UNSUPPORTED(__inst_thumb32(0xfa01f00d) " @ lsl r0, r1, sp") + + TEST_RR( "sxtah r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "sxtah r14,r",12, HH2,", r",10,HH1,", ror #8") + TEST_R( "sxth r8, r",7, HH1,"") + + TEST_UNSUPPORTED(__inst_thumb32(0xfa0fff87) " @ sxth pc, r7"); + TEST_UNSUPPORTED(__inst_thumb32(0xfa0ffd87) " @ sxth sp, r7"); + TEST_UNSUPPORTED(__inst_thumb32(0xfa0ff88f) " @ sxth r8, pc"); + TEST_UNSUPPORTED(__inst_thumb32(0xfa0ff88d) " @ sxth r8, sp"); + + TEST_RR( "uxtah r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uxtah r14,r",12, HH2,", r",10,HH1,", ror #8") + TEST_R( "uxth r8, r",7, HH1,"") + + TEST_RR( "sxtab16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "sxtab16 r14,r",12, HH2,", r",10,HH1,", ror #8") + TEST_R( "sxtb16 r8, r",7, HH1,"") + + TEST_RR( "uxtab16 r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uxtab16 r14,r",12, HH2,", r",10,HH1,", ror #8") + TEST_R( "uxtb16 r8, r",7, HH1,"") + + TEST_RR( "sxtab r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "sxtab r14,r",12, HH2,", r",10,HH1,", ror #8") + TEST_R( "sxtb r8, r",7, HH1,"") + + TEST_RR( "uxtab r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "uxtab r14,r",12, HH2,", r",10,HH1,", ror #8") + TEST_R( "uxtb r8, r",7, HH1,"") + + TEST_UNSUPPORTED(__inst_thumb32(0xfa6000f0) "") + TEST_UNSUPPORTED(__inst_thumb32(0xfa7fffff) "") + +#define PARALLEL_ADD_SUB(op) \ + TEST_RR( op"add16 r0, r",0, HH1,", r",1, HH2,"") \ + TEST_RR( op"add16 r14, r",12,HH2,", r",10,HH1,"") \ + TEST_RR( op"asx r0, r",0, HH1,", r",1, HH2,"") \ + TEST_RR( op"asx r14, r",12,HH2,", r",10,HH1,"") \ + TEST_RR( op"sax r0, r",0, HH1,", r",1, HH2,"") \ + TEST_RR( op"sax r14, r",12,HH2,", r",10,HH1,"") \ + TEST_RR( op"sub16 r0, r",0, HH1,", r",1, HH2,"") \ + TEST_RR( op"sub16 r14, r",12,HH2,", r",10,HH1,"") \ + TEST_RR( op"add8 r0, r",0, HH1,", r",1, HH2,"") \ + TEST_RR( op"add8 r14, r",12,HH2,", r",10,HH1,"") \ + TEST_RR( op"sub8 r0, r",0, HH1,", r",1, HH2,"") \ + TEST_RR( op"sub8 r14, r",12,HH2,", r",10,HH1,"") + + TEST_GROUP("Parallel addition and subtraction, signed") + + PARALLEL_ADD_SUB("s") + PARALLEL_ADD_SUB("q") + PARALLEL_ADD_SUB("sh") + + TEST_GROUP("Parallel addition and subtraction, unsigned") + + PARALLEL_ADD_SUB("u") + PARALLEL_ADD_SUB("uq") + PARALLEL_ADD_SUB("uh") + + TEST_GROUP("Miscellaneous operations") + + TEST_RR("qadd r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR("qadd lr, r",9, VAL2,", r",8, VAL1,"") + TEST_RR("qsub r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR("qsub lr, r",9, VAL2,", r",8, VAL1,"") + TEST_RR("qdadd r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR("qdadd lr, r",9, VAL2,", r",8, VAL1,"") + TEST_RR("qdsub r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR("qdsub lr, r",9, VAL2,", r",8, VAL1,"") + + TEST_R("rev.w r0, r",0, VAL1,"") + TEST_R("rev r14, r",12, VAL2,"") + TEST_R("rev16.w r0, r",0, VAL1,"") + TEST_R("rev16 r14, r",12, VAL2,"") + TEST_R("rbit r0, r",0, VAL1,"") + TEST_R("rbit r14, r",12, VAL2,"") + TEST_R("revsh.w r0, r",0, VAL1,"") + TEST_R("revsh r14, r",12, VAL2,"") + + TEST_UNSUPPORTED(__inst_thumb32(0xfa9cff8c) " @ rev pc, r12"); + TEST_UNSUPPORTED(__inst_thumb32(0xfa9cfd8c) " @ rev sp, r12"); + TEST_UNSUPPORTED(__inst_thumb32(0xfa9ffe8f) " @ rev r14, pc"); + TEST_UNSUPPORTED(__inst_thumb32(0xfa9dfe8d) " @ rev r14, sp"); + + TEST_RR("sel r0, r",0, VAL1,", r",1, VAL2,"") + TEST_RR("sel r14, r",12,VAL1,", r",10, VAL2,"") + + TEST_R("clz r0, r",0, 0x0,"") + TEST_R("clz r7, r",14,0x1,"") + TEST_R("clz lr, r",7, 0xffffffff,"") + + TEST_UNSUPPORTED(__inst_thumb32(0xfa80f030) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xfaffff7f) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xfab0f000) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xfaffff7f) "") /* Unallocated space */ + + TEST_GROUP("Multiply, multiply accumulate, and absolute difference operations") + + TEST_RR( "mul r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "mul r7, r",8, VAL2,", r",9, VAL2,"") + TEST_UNSUPPORTED(__inst_thumb32(0xfb08ff09) " @ mul pc, r8, r9") + TEST_UNSUPPORTED(__inst_thumb32(0xfb08fd09) " @ mul sp, r8, r9") + TEST_UNSUPPORTED(__inst_thumb32(0xfb0ff709) " @ mul r7, pc, r9") + TEST_UNSUPPORTED(__inst_thumb32(0xfb0df709) " @ mul r7, sp, r9") + TEST_UNSUPPORTED(__inst_thumb32(0xfb08f70f) " @ mul r7, r8, pc") + TEST_UNSUPPORTED(__inst_thumb32(0xfb08f70d) " @ mul r7, r8, sp") + + TEST_RRR( "mla r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "mla r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_UNSUPPORTED(__inst_thumb32(0xfb08af09) " @ mla pc, r8, r9, r10"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb08ad09) " @ mla sp, r8, r9, r10"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb0fa709) " @ mla r7, pc, r9, r10"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb0da709) " @ mla r7, sp, r9, r10"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb08a70f) " @ mla r7, r8, pc, r10"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb08a70d) " @ mla r7, r8, sp, r10"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb08d709) " @ mla r7, r8, r9, sp"); + + TEST_RRR( "mls r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "mls r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + + TEST_RRR( "smlabb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "smlabb r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RRR( "smlatb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "smlatb r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RRR( "smlabt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "smlabt r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RRR( "smlatt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "smlatt r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RR( "smulbb r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "smulbb r7, r",8, VAL3,", r",9, VAL1,"") + TEST_RR( "smultb r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "smultb r7, r",8, VAL3,", r",9, VAL1,"") + TEST_RR( "smulbt r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "smulbt r7, r",8, VAL3,", r",9, VAL1,"") + TEST_RR( "smultt r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "smultt r7, r",8, VAL3,", r",9, VAL1,"") + + TEST_RRR( "smlad r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") + TEST_RRR( "smlad r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") + TEST_RRR( "smladx r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") + TEST_RRR( "smladx r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") + TEST_RR( "smuad r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "smuad r14, r",12,HH2,", r",10,HH1,"") + TEST_RR( "smuadx r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "smuadx r14, r",12,HH2,", r",10,HH1,"") + + TEST_RRR( "smlawb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "smlawb r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RRR( "smlawt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"") + TEST_RRR( "smlawt r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"") + TEST_RR( "smulwb r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "smulwb r7, r",8, VAL3,", r",9, VAL1,"") + TEST_RR( "smulwt r0, r",1, VAL1,", r",2, VAL2,"") + TEST_RR( "smulwt r7, r",8, VAL3,", r",9, VAL1,"") + + TEST_RRR( "smlsd r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") + TEST_RRR( "smlsd r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") + TEST_RRR( "smlsdx r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"") + TEST_RRR( "smlsdx r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"") + TEST_RR( "smusd r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "smusd r14, r",12,HH2,", r",10,HH1,"") + TEST_RR( "smusdx r0, r",0, HH1,", r",1, HH2,"") + TEST_RR( "smusdx r14, r",12,HH2,", r",10,HH1,"") + + TEST_RRR( "smmla r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") + TEST_RRR( "smmla r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") + TEST_RRR( "smmlar r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") + TEST_RRR( "smmlar r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") + TEST_RR( "smmul r0, r",0, VAL1,", r",1, VAL2,"") + TEST_RR( "smmul r14, r",12,VAL2,", r",10,VAL1,"") + TEST_RR( "smmulr r0, r",0, VAL1,", r",1, VAL2,"") + TEST_RR( "smmulr r14, r",12,VAL2,", r",10,VAL1,"") + + TEST_RRR( "smmls r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") + TEST_RRR( "smmls r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") + TEST_RRR( "smmlsr r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"") + TEST_RRR( "smmlsr r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"") + + TEST_RRR( "usada8 r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL3,"") + TEST_RRR( "usada8 r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL3,"") + TEST_RR( "usad8 r0, r",0, VAL1,", r",1, VAL2,"") + TEST_RR( "usad8 r14, r",12,VAL2,", r",10,VAL1,"") + + TEST_UNSUPPORTED(__inst_thumb32(0xfb00f010) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xfb0fff1f) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xfb70f010) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xfb7fff1f) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xfb700010) "") /* Unallocated space */ + TEST_UNSUPPORTED(__inst_thumb32(0xfb7fff1f) "") /* Unallocated space */ + + TEST_GROUP("Long multiply, long multiply accumulate, and divide") + + TEST_RR( "smull r0, r1, r",2, VAL1,", r",3, VAL2,"") + TEST_RR( "smull r7, r8, r",9, VAL2,", r",10, VAL1,"") + TEST_UNSUPPORTED(__inst_thumb32(0xfb89f80a) " @ smull pc, r8, r9, r10"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb89d80a) " @ smull sp, r8, r9, r10"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb897f0a) " @ smull r7, pc, r9, r10"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb897d0a) " @ smull r7, sp, r9, r10"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb8f780a) " @ smull r7, r8, pc, r10"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb8d780a) " @ smull r7, r8, sp, r10"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb89780f) " @ smull r7, r8, r9, pc"); + TEST_UNSUPPORTED(__inst_thumb32(0xfb89780d) " @ smull r7, r8, r9, sp"); + + TEST_RR( "umull r0, r1, r",2, VAL1,", r",3, VAL2,"") + TEST_RR( "umull r7, r8, r",9, VAL2,", r",10, VAL1,"") + + TEST_RRRR( "smlal r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "smlal r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + + TEST_RRRR( "smlalbb r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "smlalbb r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + TEST_RRRR( "smlalbt r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "smlalbt r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + TEST_RRRR( "smlaltb r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "smlaltb r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + TEST_RRRR( "smlaltt r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "smlaltt r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + + TEST_RRRR( "smlald r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2) + TEST_RRRR( "smlald r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1) + TEST_RRRR( "smlaldx r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2) + TEST_RRRR( "smlaldx r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1) + + TEST_RRRR( "smlsld r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2) + TEST_RRRR( "smlsld r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1) + TEST_RRRR( "smlsldx r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2) + TEST_RRRR( "smlsldx r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1) + + TEST_RRRR( "umlal r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "umlal r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + TEST_RRRR( "umaal r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4) + TEST_RRRR( "umaal r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3) + + TEST_GROUP("Coprocessor instructions") + + TEST_UNSUPPORTED(__inst_thumb32(0xfc000000) "") + TEST_UNSUPPORTED(__inst_thumb32(0xffffffff) "") + + TEST_GROUP("Testing instructions in IT blocks") + + TEST_ITBLOCK("sub.w r0, r0") + + verbose("\n"); +} + diff --git a/arch/arm/probes/uprobes/Makefile b/arch/arm/probes/uprobes/Makefile new file mode 100644 index 000000000000..e1dc3d0f6d5a --- /dev/null +++ b/arch/arm/probes/uprobes/Makefile @@ -0,0 +1 @@ +obj-$(CONFIG_UPROBES) += core.o actions-arm.o diff --git a/arch/arm/probes/uprobes/actions-arm.c b/arch/arm/probes/uprobes/actions-arm.c new file mode 100644 index 000000000000..1dd4916ba8aa --- /dev/null +++ b/arch/arm/probes/uprobes/actions-arm.c @@ -0,0 +1,234 @@ +/* + * Copyright (C) 2012 Rabin Vincent + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include +#include +#include +#include +#include +#include + +#include "../decode.h" +#include "../decode-arm.h" +#include "core.h" + +static int uprobes_substitute_pc(unsigned long *pinsn, u32 oregs) +{ + probes_opcode_t insn = __mem_to_opcode_arm(*pinsn); + probes_opcode_t temp; + probes_opcode_t mask; + int freereg; + u32 free = 0xffff; + u32 regs; + + for (regs = oregs; regs; regs >>= 4, insn >>= 4) { + if ((regs & 0xf) == REG_TYPE_NONE) + continue; + + free &= ~(1 << (insn & 0xf)); + } + + /* No PC, no problem */ + if (free & (1 << 15)) + return 15; + + if (!free) + return -1; + + /* + * fls instead of ffs ensures that for "ldrd r0, r1, [pc]" we would + * pick LR instead of R1. + */ + freereg = free = fls(free) - 1; + + temp = __mem_to_opcode_arm(*pinsn); + insn = temp; + regs = oregs; + mask = 0xf; + + for (; regs; regs >>= 4, mask <<= 4, free <<= 4, temp >>= 4) { + if ((regs & 0xf) == REG_TYPE_NONE) + continue; + + if ((temp & 0xf) != 15) + continue; + + insn &= ~mask; + insn |= free & mask; + } + + *pinsn = __opcode_to_mem_arm(insn); + return freereg; +} + +static void uprobe_set_pc(struct arch_uprobe *auprobe, + struct arch_uprobe_task *autask, + struct pt_regs *regs) +{ + u32 pcreg = auprobe->pcreg; + + autask->backup = regs->uregs[pcreg]; + regs->uregs[pcreg] = regs->ARM_pc + 8; +} + +static void uprobe_unset_pc(struct arch_uprobe *auprobe, + struct arch_uprobe_task *autask, + struct pt_regs *regs) +{ + /* PC will be taken care of by common code */ + regs->uregs[auprobe->pcreg] = autask->backup; +} + +static void uprobe_aluwrite_pc(struct arch_uprobe *auprobe, + struct arch_uprobe_task *autask, + struct pt_regs *regs) +{ + u32 pcreg = auprobe->pcreg; + + alu_write_pc(regs->uregs[pcreg], regs); + regs->uregs[pcreg] = autask->backup; +} + +static void uprobe_write_pc(struct arch_uprobe *auprobe, + struct arch_uprobe_task *autask, + struct pt_regs *regs) +{ + u32 pcreg = auprobe->pcreg; + + load_write_pc(regs->uregs[pcreg], regs); + regs->uregs[pcreg] = autask->backup; +} + +enum probes_insn +decode_pc_ro(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *d) +{ + struct arch_uprobe *auprobe = container_of(asi, struct arch_uprobe, + asi); + struct decode_emulate *decode = (struct decode_emulate *) d; + u32 regs = decode->header.type_regs.bits >> DECODE_TYPE_BITS; + int reg; + + reg = uprobes_substitute_pc(&auprobe->ixol[0], regs); + if (reg == 15) + return INSN_GOOD; + + if (reg == -1) + return INSN_REJECTED; + + auprobe->pcreg = reg; + auprobe->prehandler = uprobe_set_pc; + auprobe->posthandler = uprobe_unset_pc; + + return INSN_GOOD; +} + +enum probes_insn +decode_wb_pc(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *d, bool alu) +{ + struct arch_uprobe *auprobe = container_of(asi, struct arch_uprobe, + asi); + enum probes_insn ret = decode_pc_ro(insn, asi, d); + + if (((insn >> 12) & 0xf) == 15) + auprobe->posthandler = alu ? uprobe_aluwrite_pc + : uprobe_write_pc; + + return ret; +} + +enum probes_insn +decode_rd12rn16rm0rs8_rwflags(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *d) +{ + return decode_wb_pc(insn, asi, d, true); +} + +enum probes_insn +decode_ldr(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *d) +{ + return decode_wb_pc(insn, asi, d, false); +} + +enum probes_insn +uprobe_decode_ldmstm(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *d) +{ + struct arch_uprobe *auprobe = container_of(asi, struct arch_uprobe, + asi); + unsigned reglist = insn & 0xffff; + int rn = (insn >> 16) & 0xf; + int lbit = insn & (1 << 20); + unsigned used = reglist | (1 << rn); + + if (rn == 15) + return INSN_REJECTED; + + if (!(used & (1 << 15))) + return INSN_GOOD; + + if (used & (1 << 14)) + return INSN_REJECTED; + + /* Use LR instead of PC */ + insn ^= 0xc000; + + auprobe->pcreg = 14; + auprobe->ixol[0] = __opcode_to_mem_arm(insn); + + auprobe->prehandler = uprobe_set_pc; + if (lbit) + auprobe->posthandler = uprobe_write_pc; + else + auprobe->posthandler = uprobe_unset_pc; + + return INSN_GOOD; +} + +const union decode_action uprobes_probes_actions[] = { + [PROBES_EMULATE_NONE] = {.handler = probes_simulate_nop}, + [PROBES_SIMULATE_NOP] = {.handler = probes_simulate_nop}, + [PROBES_PRELOAD_IMM] = {.handler = probes_simulate_nop}, + [PROBES_PRELOAD_REG] = {.handler = probes_simulate_nop}, + [PROBES_BRANCH_IMM] = {.handler = simulate_blx1}, + [PROBES_MRS] = {.handler = simulate_mrs}, + [PROBES_BRANCH_REG] = {.handler = simulate_blx2bx}, + [PROBES_CLZ] = {.handler = probes_simulate_nop}, + [PROBES_SATURATING_ARITHMETIC] = {.handler = probes_simulate_nop}, + [PROBES_MUL1] = {.handler = probes_simulate_nop}, + [PROBES_MUL2] = {.handler = probes_simulate_nop}, + [PROBES_SWP] = {.handler = probes_simulate_nop}, + [PROBES_LDRSTRD] = {.decoder = decode_pc_ro}, + [PROBES_LOAD_EXTRA] = {.decoder = decode_pc_ro}, + [PROBES_LOAD] = {.decoder = decode_ldr}, + [PROBES_STORE_EXTRA] = {.decoder = decode_pc_ro}, + [PROBES_STORE] = {.decoder = decode_pc_ro}, + [PROBES_MOV_IP_SP] = {.handler = simulate_mov_ipsp}, + [PROBES_DATA_PROCESSING_REG] = { + .decoder = decode_rd12rn16rm0rs8_rwflags}, + [PROBES_DATA_PROCESSING_IMM] = { + .decoder = decode_rd12rn16rm0rs8_rwflags}, + [PROBES_MOV_HALFWORD] = {.handler = probes_simulate_nop}, + [PROBES_SEV] = {.handler = probes_simulate_nop}, + [PROBES_WFE] = {.handler = probes_simulate_nop}, + [PROBES_SATURATE] = {.handler = probes_simulate_nop}, + [PROBES_REV] = {.handler = probes_simulate_nop}, + [PROBES_MMI] = {.handler = probes_simulate_nop}, + [PROBES_PACK] = {.handler = probes_simulate_nop}, + [PROBES_EXTEND] = {.handler = probes_simulate_nop}, + [PROBES_EXTEND_ADD] = {.handler = probes_simulate_nop}, + [PROBES_MUL_ADD_LONG] = {.handler = probes_simulate_nop}, + [PROBES_MUL_ADD] = {.handler = probes_simulate_nop}, + [PROBES_BITFIELD] = {.handler = probes_simulate_nop}, + [PROBES_BRANCH] = {.handler = simulate_bbl}, + [PROBES_LDMSTM] = {.decoder = uprobe_decode_ldmstm} +}; diff --git a/arch/arm/probes/uprobes/core.c b/arch/arm/probes/uprobes/core.c new file mode 100644 index 000000000000..b2954f6d3abe --- /dev/null +++ b/arch/arm/probes/uprobes/core.c @@ -0,0 +1,230 @@ +/* + * Copyright (C) 2012 Rabin Vincent + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "../decode.h" +#include "../decode-arm.h" +#include "core.h" + +#define UPROBE_TRAP_NR UINT_MAX + +bool is_swbp_insn(uprobe_opcode_t *insn) +{ + return (__mem_to_opcode_arm(*insn) & 0x0fffffff) == + (UPROBE_SWBP_ARM_INSN & 0x0fffffff); +} + +int set_swbp(struct arch_uprobe *auprobe, struct mm_struct *mm, + unsigned long vaddr) +{ + return uprobe_write_opcode(mm, vaddr, + __opcode_to_mem_arm(auprobe->bpinsn)); +} + +bool arch_uprobe_ignore(struct arch_uprobe *auprobe, struct pt_regs *regs) +{ + if (!auprobe->asi.insn_check_cc(regs->ARM_cpsr)) { + regs->ARM_pc += 4; + return true; + } + + return false; +} + +bool arch_uprobe_skip_sstep(struct arch_uprobe *auprobe, struct pt_regs *regs) +{ + probes_opcode_t opcode; + + if (!auprobe->simulate) + return false; + + opcode = __mem_to_opcode_arm(*(unsigned int *) auprobe->insn); + + auprobe->asi.insn_singlestep(opcode, &auprobe->asi, regs); + + return true; +} + +unsigned long +arch_uretprobe_hijack_return_addr(unsigned long trampoline_vaddr, + struct pt_regs *regs) +{ + unsigned long orig_ret_vaddr; + + orig_ret_vaddr = regs->ARM_lr; + /* Replace the return addr with trampoline addr */ + regs->ARM_lr = trampoline_vaddr; + return orig_ret_vaddr; +} + +int arch_uprobe_analyze_insn(struct arch_uprobe *auprobe, struct mm_struct *mm, + unsigned long addr) +{ + unsigned int insn; + unsigned int bpinsn; + enum probes_insn ret; + + /* Thumb not yet support */ + if (addr & 0x3) + return -EINVAL; + + insn = __mem_to_opcode_arm(*(unsigned int *)auprobe->insn); + auprobe->ixol[0] = __opcode_to_mem_arm(insn); + auprobe->ixol[1] = __opcode_to_mem_arm(UPROBE_SS_ARM_INSN); + + ret = arm_probes_decode_insn(insn, &auprobe->asi, false, + uprobes_probes_actions); + switch (ret) { + case INSN_REJECTED: + return -EINVAL; + + case INSN_GOOD_NO_SLOT: + auprobe->simulate = true; + break; + + case INSN_GOOD: + default: + break; + } + + bpinsn = UPROBE_SWBP_ARM_INSN & 0x0fffffff; + if (insn >= 0xe0000000) + bpinsn |= 0xe0000000; /* Unconditional instruction */ + else + bpinsn |= insn & 0xf0000000; /* Copy condition from insn */ + + auprobe->bpinsn = bpinsn; + + return 0; +} + +void arch_uprobe_copy_ixol(struct page *page, unsigned long vaddr, + void *src, unsigned long len) +{ + void *xol_page_kaddr = kmap_atomic(page); + void *dst = xol_page_kaddr + (vaddr & ~PAGE_MASK); + + preempt_disable(); + + /* Initialize the slot */ + memcpy(dst, src, len); + + /* flush caches (dcache/icache) */ + flush_uprobe_xol_access(page, vaddr, dst, len); + + preempt_enable(); + + kunmap_atomic(xol_page_kaddr); +} + + +int arch_uprobe_pre_xol(struct arch_uprobe *auprobe, struct pt_regs *regs) +{ + struct uprobe_task *utask = current->utask; + + if (auprobe->prehandler) + auprobe->prehandler(auprobe, &utask->autask, regs); + + utask->autask.saved_trap_no = current->thread.trap_no; + current->thread.trap_no = UPROBE_TRAP_NR; + regs->ARM_pc = utask->xol_vaddr; + + return 0; +} + +int arch_uprobe_post_xol(struct arch_uprobe *auprobe, struct pt_regs *regs) +{ + struct uprobe_task *utask = current->utask; + + WARN_ON_ONCE(current->thread.trap_no != UPROBE_TRAP_NR); + + current->thread.trap_no = utask->autask.saved_trap_no; + regs->ARM_pc = utask->vaddr + 4; + + if (auprobe->posthandler) + auprobe->posthandler(auprobe, &utask->autask, regs); + + return 0; +} + +bool arch_uprobe_xol_was_trapped(struct task_struct *t) +{ + if (t->thread.trap_no != UPROBE_TRAP_NR) + return true; + + return false; +} + +void arch_uprobe_abort_xol(struct arch_uprobe *auprobe, struct pt_regs *regs) +{ + struct uprobe_task *utask = current->utask; + + current->thread.trap_no = utask->autask.saved_trap_no; + instruction_pointer_set(regs, utask->vaddr); +} + +int arch_uprobe_exception_notify(struct notifier_block *self, + unsigned long val, void *data) +{ + return NOTIFY_DONE; +} + +static int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr) +{ + unsigned long flags; + + local_irq_save(flags); + instr &= 0x0fffffff; + if (instr == (UPROBE_SWBP_ARM_INSN & 0x0fffffff)) + uprobe_pre_sstep_notifier(regs); + else if (instr == (UPROBE_SS_ARM_INSN & 0x0fffffff)) + uprobe_post_sstep_notifier(regs); + local_irq_restore(flags); + + return 0; +} + +unsigned long uprobe_get_swbp_addr(struct pt_regs *regs) +{ + return instruction_pointer(regs); +} + +static struct undef_hook uprobes_arm_break_hook = { + .instr_mask = 0x0fffffff, + .instr_val = (UPROBE_SWBP_ARM_INSN & 0x0fffffff), + .cpsr_mask = MODE_MASK, + .cpsr_val = USR_MODE, + .fn = uprobe_trap_handler, +}; + +static struct undef_hook uprobes_arm_ss_hook = { + .instr_mask = 0x0fffffff, + .instr_val = (UPROBE_SS_ARM_INSN & 0x0fffffff), + .cpsr_mask = MODE_MASK, + .cpsr_val = USR_MODE, + .fn = uprobe_trap_handler, +}; + +static int arch_uprobes_init(void) +{ + register_undef_hook(&uprobes_arm_break_hook); + register_undef_hook(&uprobes_arm_ss_hook); + + return 0; +} +device_initcall(arch_uprobes_init); diff --git a/arch/arm/probes/uprobes/core.h b/arch/arm/probes/uprobes/core.h new file mode 100644 index 000000000000..1d0c12dfbd03 --- /dev/null +++ b/arch/arm/probes/uprobes/core.h @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2012 Rabin Vincent + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#ifndef __ARM_KERNEL_UPROBES_H +#define __ARM_KERNEL_UPROBES_H + +enum probes_insn uprobe_decode_ldmstm(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *d); + +enum probes_insn decode_ldr(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *d); + +enum probes_insn +decode_rd12rn16rm0rs8_rwflags(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *d); + +enum probes_insn +decode_wb_pc(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *d, bool alu); + +enum probes_insn +decode_pc_ro(probes_opcode_t insn, struct arch_probes_insn *asi, + const struct decode_header *d); + +extern const union decode_action uprobes_probes_actions[]; + +#endif -- cgit v1.2.3-59-g8ed1b From 832607e79d7423c67ef8a3de8dfa3d25b5b0bf86 Mon Sep 17 00:00:00 2001 From: Jon Medhurst Date: Wed, 7 Jan 2015 11:42:30 +0000 Subject: ARM: probes: Use correct action types for MOVW, SEV and WFI This doesn't correct any bugs when probing these instructions but makes MOVW slightly faster and makes everything more symmetric with the Thumb instruction cases. We can also remove the now redundant PROBES_EMULATE_NONE and PROBES_SIMULATE_NOP actions. Signed-off-by: Jon Medhurst --- arch/arm/probes/decode-arm.c | 6 +++--- arch/arm/probes/decode-arm.h | 2 -- arch/arm/probes/kprobes/actions-arm.c | 2 -- arch/arm/probes/uprobes/actions-arm.c | 2 -- 4 files changed, 3 insertions(+), 9 deletions(-) (limited to 'arch') diff --git a/arch/arm/probes/decode-arm.c b/arch/arm/probes/decode-arm.c index e39cc75952f2..04114f74a2d2 100644 --- a/arch/arm/probes/decode-arm.c +++ b/arch/arm/probes/decode-arm.c @@ -370,17 +370,17 @@ static const union decode_item arm_cccc_001x_table[] = { /* MOVW cccc 0011 0000 xxxx xxxx xxxx xxxx xxxx */ /* MOVT cccc 0011 0100 xxxx xxxx xxxx xxxx xxxx */ - DECODE_EMULATEX (0x0fb00000, 0x03000000, PROBES_DATA_PROCESSING_IMM, + DECODE_EMULATEX (0x0fb00000, 0x03000000, PROBES_MOV_HALFWORD, REGS(0, NOPC, 0, 0, 0)), /* YIELD cccc 0011 0010 0000 xxxx xxxx 0000 0001 */ DECODE_OR (0x0fff00ff, 0x03200001), /* SEV cccc 0011 0010 0000 xxxx xxxx 0000 0100 */ - DECODE_EMULATE (0x0fff00ff, 0x03200004, PROBES_EMULATE_NONE), + DECODE_EMULATE (0x0fff00ff, 0x03200004, PROBES_SEV), /* NOP cccc 0011 0010 0000 xxxx xxxx 0000 0000 */ /* WFE cccc 0011 0010 0000 xxxx xxxx 0000 0010 */ /* WFI cccc 0011 0010 0000 xxxx xxxx 0000 0011 */ - DECODE_SIMULATE (0x0fff00fc, 0x03200000, PROBES_SIMULATE_NOP), + DECODE_SIMULATE (0x0fff00fc, 0x03200000, PROBES_WFE), /* DBG cccc 0011 0010 0000 xxxx xxxx ffff xxxx */ /* unallocated hints cccc 0011 0010 0000 xxxx xxxx xxxx xxxx */ /* MSR (immediate) cccc 0011 0x10 xxxx xxxx xxxx xxxx xxxx */ diff --git a/arch/arm/probes/decode-arm.h b/arch/arm/probes/decode-arm.h index 9c56b40d6a57..cb0b26331930 100644 --- a/arch/arm/probes/decode-arm.h +++ b/arch/arm/probes/decode-arm.h @@ -18,8 +18,6 @@ #include "decode.h" enum probes_arm_action { - PROBES_EMULATE_NONE, - PROBES_SIMULATE_NOP, PROBES_PRELOAD_IMM, PROBES_PRELOAD_REG, PROBES_BRANCH_IMM, diff --git a/arch/arm/probes/kprobes/actions-arm.c b/arch/arm/probes/kprobes/actions-arm.c index 8797879f7b3a..2206f2d80c76 100644 --- a/arch/arm/probes/kprobes/actions-arm.c +++ b/arch/arm/probes/kprobes/actions-arm.c @@ -302,8 +302,6 @@ emulate_rdlo12rdhi16rn0rm8_rwflags_nopc(probes_opcode_t insn, } const union decode_action kprobes_arm_actions[NUM_PROBES_ARM_ACTIONS] = { - [PROBES_EMULATE_NONE] = {.handler = probes_emulate_none}, - [PROBES_SIMULATE_NOP] = {.handler = probes_simulate_nop}, [PROBES_PRELOAD_IMM] = {.handler = probes_simulate_nop}, [PROBES_PRELOAD_REG] = {.handler = probes_simulate_nop}, [PROBES_BRANCH_IMM] = {.handler = simulate_blx1}, diff --git a/arch/arm/probes/uprobes/actions-arm.c b/arch/arm/probes/uprobes/actions-arm.c index 1dd4916ba8aa..76eb44972ebe 100644 --- a/arch/arm/probes/uprobes/actions-arm.c +++ b/arch/arm/probes/uprobes/actions-arm.c @@ -195,8 +195,6 @@ uprobe_decode_ldmstm(probes_opcode_t insn, } const union decode_action uprobes_probes_actions[] = { - [PROBES_EMULATE_NONE] = {.handler = probes_simulate_nop}, - [PROBES_SIMULATE_NOP] = {.handler = probes_simulate_nop}, [PROBES_PRELOAD_IMM] = {.handler = probes_simulate_nop}, [PROBES_PRELOAD_REG] = {.handler = probes_simulate_nop}, [PROBES_BRANCH_IMM] = {.handler = simulate_blx1}, -- cgit v1.2.3-59-g8ed1b From 83803d97dae1eaf6850a45ef8ee179cc66e147dc Mon Sep 17 00:00:00 2001 From: Wang Nan Date: Mon, 5 Jan 2015 19:29:18 +0800 Subject: ARM: kprobes: introduces checker This patch introdces 'checker' to decoding phase, and calls checkers when instruction decoding. This allows further decoding for specific instructions. This patch introduces a stub call of checkers in kprobe arch_prepare_kprobe() as an example and for further expansion. Signed-off-by: Wang Nan Reviewed-by: Jon Medhurst Reviewed-by: Masami Hiramatsu Signed-off-by: Jon Medhurst --- arch/arm/probes/decode-arm.c | 5 +-- arch/arm/probes/decode-arm.h | 3 +- arch/arm/probes/decode-thumb.c | 10 +++--- arch/arm/probes/decode-thumb.h | 6 ++-- arch/arm/probes/decode.c | 60 +++++++++++++++++++++++++++++---- arch/arm/probes/decode.h | 11 +++++- arch/arm/probes/kprobes/actions-arm.c | 2 ++ arch/arm/probes/kprobes/actions-thumb.c | 3 ++ arch/arm/probes/kprobes/core.c | 6 +++- arch/arm/probes/kprobes/core.h | 7 ++-- arch/arm/probes/uprobes/core.c | 2 +- 11 files changed, 95 insertions(+), 20 deletions(-) (limited to 'arch') diff --git a/arch/arm/probes/decode-arm.c b/arch/arm/probes/decode-arm.c index 04114f74a2d2..f72c33a2dcfb 100644 --- a/arch/arm/probes/decode-arm.c +++ b/arch/arm/probes/decode-arm.c @@ -726,10 +726,11 @@ static void __kprobes arm_singlestep(probes_opcode_t insn, */ enum probes_insn __kprobes arm_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - bool emulate, const union decode_action *actions) + bool emulate, const union decode_action *actions, + const struct decode_checker *checkers[]) { asi->insn_singlestep = arm_singlestep; asi->insn_check_cc = probes_condition_checks[insn>>28]; return probes_decode_insn(insn, asi, probes_decode_arm_table, false, - emulate, actions); + emulate, actions, checkers); } diff --git a/arch/arm/probes/decode-arm.h b/arch/arm/probes/decode-arm.h index cb0b26331930..b3b80f6d414b 100644 --- a/arch/arm/probes/decode-arm.h +++ b/arch/arm/probes/decode-arm.h @@ -68,6 +68,7 @@ extern const union decode_item probes_decode_arm_table[]; enum probes_insn arm_probes_decode_insn(probes_opcode_t, struct arch_probes_insn *, bool emulate, - const union decode_action *actions); + const union decode_action *actions, + const struct decode_checker *checkers[]); #endif diff --git a/arch/arm/probes/decode-thumb.c b/arch/arm/probes/decode-thumb.c index 2f0453a895dc..985e7dd4cac6 100644 --- a/arch/arm/probes/decode-thumb.c +++ b/arch/arm/probes/decode-thumb.c @@ -863,20 +863,22 @@ static void __kprobes thumb32_singlestep(probes_opcode_t opcode, enum probes_insn __kprobes thumb16_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - bool emulate, const union decode_action *actions) + bool emulate, const union decode_action *actions, + const struct decode_checker *checkers[]) { asi->insn_singlestep = thumb16_singlestep; asi->insn_check_cc = thumb_check_cc; return probes_decode_insn(insn, asi, probes_decode_thumb16_table, true, - emulate, actions); + emulate, actions, checkers); } enum probes_insn __kprobes thumb32_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - bool emulate, const union decode_action *actions) + bool emulate, const union decode_action *actions, + const struct decode_checker *checkers[]) { asi->insn_singlestep = thumb32_singlestep; asi->insn_check_cc = thumb_check_cc; return probes_decode_insn(insn, asi, probes_decode_thumb32_table, true, - emulate, actions); + emulate, actions, checkers); } diff --git a/arch/arm/probes/decode-thumb.h b/arch/arm/probes/decode-thumb.h index 039013c7131d..8457add0a2d8 100644 --- a/arch/arm/probes/decode-thumb.h +++ b/arch/arm/probes/decode-thumb.h @@ -91,9 +91,11 @@ extern const union decode_item probes_decode_thumb16_table[]; enum probes_insn __kprobes thumb16_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - bool emulate, const union decode_action *actions); + bool emulate, const union decode_action *actions, + const struct decode_checker *checkers[]); enum probes_insn __kprobes thumb32_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, - bool emulate, const union decode_action *actions); + bool emulate, const union decode_action *actions, + const struct decode_checker *checkers[]); #endif diff --git a/arch/arm/probes/decode.c b/arch/arm/probes/decode.c index 3b05d5742359..c7d442018902 100644 --- a/arch/arm/probes/decode.c +++ b/arch/arm/probes/decode.c @@ -342,6 +342,31 @@ static const int decode_struct_sizes[NUM_DECODE_TYPES] = { [DECODE_TYPE_REJECT] = sizeof(struct decode_reject) }; +static int run_checkers(const struct decode_checker *checkers[], + int action, probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + const struct decode_checker **p; + + if (!checkers) + return INSN_GOOD; + + p = checkers; + while (*p != NULL) { + int retval; + probes_check_t *checker_func = (*p)[action].checker; + + retval = INSN_GOOD; + if (checker_func) + retval = checker_func(insn, asi, h); + if (retval == INSN_REJECTED) + return retval; + p++; + } + return INSN_GOOD; +} + /* * probes_decode_insn operates on data tables in order to decode an ARM * architecture instruction onto which a kprobe has been placed. @@ -388,11 +413,17 @@ static const int decode_struct_sizes[NUM_DECODE_TYPES] = { int __kprobes probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, const union decode_item *table, bool thumb, - bool emulate, const union decode_action *actions) + bool emulate, const union decode_action *actions, + const struct decode_checker *checkers[]) { const struct decode_header *h = (struct decode_header *)table; const struct decode_header *next; bool matched = false; + /* + * @insn can be modified by decode_regs. Save its original + * value for checkers. + */ + probes_opcode_t origin_insn = insn; if (emulate) insn = prepare_emulated_insn(insn, asi, thumb); @@ -422,24 +453,41 @@ probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, } case DECODE_TYPE_CUSTOM: { + int err; struct decode_custom *d = (struct decode_custom *)h; - return actions[d->decoder.action].decoder(insn, asi, h); + int action = d->decoder.action; + + err = run_checkers(checkers, action, origin_insn, asi, h); + if (err == INSN_REJECTED) + return INSN_REJECTED; + return actions[action].decoder(insn, asi, h); } case DECODE_TYPE_SIMULATE: { + int err; struct decode_simulate *d = (struct decode_simulate *)h; - asi->insn_handler = actions[d->handler.action].handler; + int action = d->handler.action; + + err = run_checkers(checkers, action, origin_insn, asi, h); + if (err == INSN_REJECTED) + return INSN_REJECTED; + asi->insn_handler = actions[action].handler; return INSN_GOOD_NO_SLOT; } case DECODE_TYPE_EMULATE: { + int err; struct decode_emulate *d = (struct decode_emulate *)h; + int action = d->handler.action; + + err = run_checkers(checkers, action, origin_insn, asi, h); + if (err == INSN_REJECTED) + return INSN_REJECTED; if (!emulate) - return actions[d->handler.action].decoder(insn, - asi, h); + return actions[action].decoder(insn, asi, h); - asi->insn_handler = actions[d->handler.action].handler; + asi->insn_handler = actions[action].handler; set_emulated_insn(insn, asi, thumb); return INSN_GOOD; } diff --git a/arch/arm/probes/decode.h b/arch/arm/probes/decode.h index 1d0b53169080..f9b08ba7fe73 100644 --- a/arch/arm/probes/decode.h +++ b/arch/arm/probes/decode.h @@ -314,6 +314,14 @@ union decode_action { probes_custom_decode_t *decoder; }; +typedef enum probes_insn (probes_check_t)(probes_opcode_t, + struct arch_probes_insn *, + const struct decode_header *); + +struct decode_checker { + probes_check_t *checker; +}; + #define DECODE_END \ {.bits = DECODE_TYPE_END} @@ -402,6 +410,7 @@ probes_insn_handler_t probes_emulate_none; int __kprobes probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, const union decode_item *table, bool thumb, bool emulate, - const union decode_action *actions); + const union decode_action *actions, + const struct decode_checker **checkers); #endif diff --git a/arch/arm/probes/kprobes/actions-arm.c b/arch/arm/probes/kprobes/actions-arm.c index 2206f2d80c76..fbd93a9ada75 100644 --- a/arch/arm/probes/kprobes/actions-arm.c +++ b/arch/arm/probes/kprobes/actions-arm.c @@ -339,3 +339,5 @@ const union decode_action kprobes_arm_actions[NUM_PROBES_ARM_ACTIONS] = { [PROBES_BRANCH] = {.handler = simulate_bbl}, [PROBES_LDMSTM] = {.decoder = kprobe_decode_ldmstm} }; + +const struct decode_checker *kprobes_arm_checkers[] = {NULL}; diff --git a/arch/arm/probes/kprobes/actions-thumb.c b/arch/arm/probes/kprobes/actions-thumb.c index 6c4e60b62826..2796121fe90e 100644 --- a/arch/arm/probes/kprobes/actions-thumb.c +++ b/arch/arm/probes/kprobes/actions-thumb.c @@ -664,3 +664,6 @@ const union decode_action kprobes_t32_actions[NUM_PROBES_T32_ACTIONS] = { [PROBES_T32_MUL_ADD_LONG] = { .handler = t32_emulate_rdlo12rdhi8rn16rm0_noflags}, }; + +const struct decode_checker *kprobes_t32_checkers[] = {NULL}; +const struct decode_checker *kprobes_t16_checkers[] = {NULL}; diff --git a/arch/arm/probes/kprobes/core.c b/arch/arm/probes/kprobes/core.c index 701f49d74c35..74f3dc3ac212 100644 --- a/arch/arm/probes/kprobes/core.c +++ b/arch/arm/probes/kprobes/core.c @@ -61,6 +61,7 @@ int __kprobes arch_prepare_kprobe(struct kprobe *p) kprobe_decode_insn_t *decode_insn; const union decode_action *actions; int is; + const struct decode_checker **checkers; if (in_exception_text(addr)) return -EINVAL; @@ -74,9 +75,11 @@ int __kprobes arch_prepare_kprobe(struct kprobe *p) insn = __opcode_thumb32_compose(insn, inst2); decode_insn = thumb32_probes_decode_insn; actions = kprobes_t32_actions; + checkers = kprobes_t32_checkers; } else { decode_insn = thumb16_probes_decode_insn; actions = kprobes_t16_actions; + checkers = kprobes_t16_checkers; } #else /* !CONFIG_THUMB2_KERNEL */ thumb = false; @@ -85,12 +88,13 @@ int __kprobes arch_prepare_kprobe(struct kprobe *p) insn = __mem_to_opcode_arm(*p->addr); decode_insn = arm_probes_decode_insn; actions = kprobes_arm_actions; + checkers = kprobes_arm_checkers; #endif p->opcode = insn; p->ainsn.insn = tmp_insn; - switch ((*decode_insn)(insn, &p->ainsn, true, actions)) { + switch ((*decode_insn)(insn, &p->ainsn, true, actions, checkers)) { case INSN_REJECTED: /* not supported */ return -EINVAL; diff --git a/arch/arm/probes/kprobes/core.h b/arch/arm/probes/kprobes/core.h index 2e1e5a3d9155..f88c79fe632a 100644 --- a/arch/arm/probes/kprobes/core.h +++ b/arch/arm/probes/kprobes/core.h @@ -37,16 +37,19 @@ kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_probes_insn *asi, typedef enum probes_insn (kprobe_decode_insn_t)(probes_opcode_t, struct arch_probes_insn *, bool, - const union decode_action *); + const union decode_action *, + const struct decode_checker *[*]); #ifdef CONFIG_THUMB2_KERNEL extern const union decode_action kprobes_t32_actions[]; extern const union decode_action kprobes_t16_actions[]; - +extern const struct decode_checker *kprobes_t32_checkers[]; +extern const struct decode_checker *kprobes_t16_checkers[]; #else /* !CONFIG_THUMB2_KERNEL */ extern const union decode_action kprobes_arm_actions[]; +extern const struct decode_checker *kprobes_arm_checkers[]; #endif diff --git a/arch/arm/probes/uprobes/core.c b/arch/arm/probes/uprobes/core.c index b2954f6d3abe..d1329f1ba4e4 100644 --- a/arch/arm/probes/uprobes/core.c +++ b/arch/arm/probes/uprobes/core.c @@ -88,7 +88,7 @@ int arch_uprobe_analyze_insn(struct arch_uprobe *auprobe, struct mm_struct *mm, auprobe->ixol[1] = __opcode_to_mem_arm(UPROBE_SS_ARM_INSN); ret = arm_probes_decode_insn(insn, &auprobe->asi, false, - uprobes_probes_actions); + uprobes_probes_actions, NULL); switch (ret) { case INSN_REJECTED: return -EINVAL; -- cgit v1.2.3-59-g8ed1b From 6624cf651f1a14363d0385f36dc255d304ac7ebb Mon Sep 17 00:00:00 2001 From: Wang Nan Date: Mon, 5 Jan 2015 19:29:21 +0800 Subject: ARM: kprobes: collects stack consumption for store instructions This patch uses the previously introduced checker functionality on store instructions to record their stack consumption information to arch_probes_insn. Signed-off-by: Wang Nan Reviewed-by: Jon Medhurst Signed-off-by: Jon Medhurst --- arch/arm/include/asm/probes.h | 1 + arch/arm/probes/decode.c | 10 +++ arch/arm/probes/kprobes/Makefile | 6 +- arch/arm/probes/kprobes/actions-arm.c | 3 +- arch/arm/probes/kprobes/actions-thumb.c | 5 +- arch/arm/probes/kprobes/checkers-arm.c | 99 +++++++++++++++++++++++++++ arch/arm/probes/kprobes/checkers-common.c | 101 +++++++++++++++++++++++++++ arch/arm/probes/kprobes/checkers-thumb.c | 110 ++++++++++++++++++++++++++++++ arch/arm/probes/kprobes/checkers.h | 54 +++++++++++++++ 9 files changed, 383 insertions(+), 6 deletions(-) create mode 100644 arch/arm/probes/kprobes/checkers-arm.c create mode 100644 arch/arm/probes/kprobes/checkers-common.c create mode 100644 arch/arm/probes/kprobes/checkers-thumb.c create mode 100644 arch/arm/probes/kprobes/checkers.h (limited to 'arch') diff --git a/arch/arm/include/asm/probes.h b/arch/arm/include/asm/probes.h index 806cfe622a9e..6026deb28794 100644 --- a/arch/arm/include/asm/probes.h +++ b/arch/arm/include/asm/probes.h @@ -38,6 +38,7 @@ struct arch_probes_insn { probes_check_cc *insn_check_cc; probes_insn_singlestep_t *insn_singlestep; probes_insn_fn_t *insn_fn; + int stack_space; }; #endif diff --git a/arch/arm/probes/decode.c b/arch/arm/probes/decode.c index c7d442018902..f9d7c423f2cc 100644 --- a/arch/arm/probes/decode.c +++ b/arch/arm/probes/decode.c @@ -425,6 +425,16 @@ probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, */ probes_opcode_t origin_insn = insn; + /* + * stack_space is initialized to 0 here. Checker functions + * should update is value if they find this is a stack store + * instruction: positive value means bytes of stack usage, + * negitive value means unable to determine stack usage + * statically. For instruction doesn't store to stack, checker + * do nothing with it. + */ + asi->stack_space = 0; + if (emulate) insn = prepare_emulated_insn(insn, asi, thumb); diff --git a/arch/arm/probes/kprobes/Makefile b/arch/arm/probes/kprobes/Makefile index eb38a428ecd6..bc8d504c3d78 100644 --- a/arch/arm/probes/kprobes/Makefile +++ b/arch/arm/probes/kprobes/Makefile @@ -1,11 +1,11 @@ -obj-$(CONFIG_KPROBES) += core.o actions-common.o +obj-$(CONFIG_KPROBES) += core.o actions-common.o checkers-common.o obj-$(CONFIG_ARM_KPROBES_TEST) += test-kprobes.o test-kprobes-objs := test-core.o ifdef CONFIG_THUMB2_KERNEL -obj-$(CONFIG_KPROBES) += actions-thumb.o +obj-$(CONFIG_KPROBES) += actions-thumb.o checkers-thumb.o test-kprobes-objs += test-thumb.o else -obj-$(CONFIG_KPROBES) += actions-arm.o +obj-$(CONFIG_KPROBES) += actions-arm.o checkers-arm.o test-kprobes-objs += test-arm.o endif diff --git a/arch/arm/probes/kprobes/actions-arm.c b/arch/arm/probes/kprobes/actions-arm.c index fbd93a9ada75..06988ef7eeb7 100644 --- a/arch/arm/probes/kprobes/actions-arm.c +++ b/arch/arm/probes/kprobes/actions-arm.c @@ -64,6 +64,7 @@ #include "../decode-arm.h" #include "core.h" +#include "checkers.h" #if __LINUX_ARM_ARCH__ >= 6 #define BLX(reg) "blx "reg" \n\t" @@ -340,4 +341,4 @@ const union decode_action kprobes_arm_actions[NUM_PROBES_ARM_ACTIONS] = { [PROBES_LDMSTM] = {.decoder = kprobe_decode_ldmstm} }; -const struct decode_checker *kprobes_arm_checkers[] = {NULL}; +const struct decode_checker *kprobes_arm_checkers[] = {arm_stack_checker, NULL}; diff --git a/arch/arm/probes/kprobes/actions-thumb.c b/arch/arm/probes/kprobes/actions-thumb.c index 2796121fe90e..07cfd9bef340 100644 --- a/arch/arm/probes/kprobes/actions-thumb.c +++ b/arch/arm/probes/kprobes/actions-thumb.c @@ -15,6 +15,7 @@ #include "../decode-thumb.h" #include "core.h" +#include "checkers.h" /* These emulation encodings are functionally equivalent... */ #define t32_emulate_rd8rn16rm0ra12_noflags \ @@ -665,5 +666,5 @@ const union decode_action kprobes_t32_actions[NUM_PROBES_T32_ACTIONS] = { .handler = t32_emulate_rdlo12rdhi8rn16rm0_noflags}, }; -const struct decode_checker *kprobes_t32_checkers[] = {NULL}; -const struct decode_checker *kprobes_t16_checkers[] = {NULL}; +const struct decode_checker *kprobes_t32_checkers[] = {t32_stack_checker, NULL}; +const struct decode_checker *kprobes_t16_checkers[] = {t16_stack_checker, NULL}; diff --git a/arch/arm/probes/kprobes/checkers-arm.c b/arch/arm/probes/kprobes/checkers-arm.c new file mode 100644 index 000000000000..f8176631d2a5 --- /dev/null +++ b/arch/arm/probes/kprobes/checkers-arm.c @@ -0,0 +1,99 @@ +/* + * arch/arm/probes/kprobes/checkers-arm.c + * + * Copyright (C) 2014 Huawei Inc. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + */ + +#include +#include "../decode.h" +#include "../decode-arm.h" +#include "checkers.h" + +static enum probes_insn __kprobes arm_check_stack(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + /* + * PROBES_LDRSTRD, PROBES_LDMSTM, PROBES_STORE, + * PROBES_STORE_EXTRA may get here. Simply mark all normal + * insns as STACK_USE_NONE. + */ + static const union decode_item table[] = { + /* + * 'STR{,D,B,H}, Rt, [Rn, Rm]' should be marked as UNKNOWN + * if Rn or Rm is SP. + * x + * STR (register) cccc 011x x0x0 xxxx xxxx xxxx xxxx xxxx + * STRB (register) cccc 011x x1x0 xxxx xxxx xxxx xxxx xxxx + */ + DECODE_OR (0x0e10000f, 0x0600000d), + DECODE_OR (0x0e1f0000, 0x060d0000), + + /* + * x + * STRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1111 xxxx + * STRH (register) cccc 000x x0x0 xxxx xxxx xxxx 1011 xxxx + */ + DECODE_OR (0x0e5000bf, 0x000000bd), + DECODE_CUSTOM (0x0e5f00b0, 0x000d00b0, STACK_USE_UNKNOWN), + + /* + * For PROBES_LDMSTM, only stmdx sp, [...] need to examine + * + * Bit B/A (bit 24) encodes arithmetic operation order. 1 means + * before, 0 means after. + * Bit I/D (bit 23) encodes arithmetic operation. 1 means + * increment, 0 means decrement. + * + * So: + * B I + * / / + * A D | Rn | + * STMDX SP, [...] cccc 100x 00x0 xxxx xxxx xxxx xxxx xxxx + */ + DECODE_CUSTOM (0x0edf0000, 0x080d0000, STACK_USE_STMDX), + + /* P U W | Rn | Rt | imm12 |*/ + /* STR (immediate) cccc 010x x0x0 1101 xxxx xxxx xxxx xxxx */ + /* STRB (immediate) cccc 010x x1x0 1101 xxxx xxxx xxxx xxxx */ + /* P U W | Rn | Rt |imm4| |imm4|*/ + /* STRD (immediate) cccc 000x x1x0 1101 xxxx xxxx 1111 xxxx */ + /* STRH (immediate) cccc 000x x1x0 1101 xxxx xxxx 1011 xxxx */ + /* + * index = (P == '1'); add = (U == '1'). + * Above insns with: + * index == 0 (str{,d,h} rx, [sp], #+/-imm) or + * add == 1 (str{,d,h} rx, [sp, #+]) + * should be STACK_USE_NONE. + * Only str{,b,d,h} rx,[sp,#-n] (P == 1 and U == 0) are + * required to be examined. + */ + /* STR{,B} Rt,[SP,#-n] cccc 0101 0xx0 1101 xxxx xxxx xxxx xxxx */ + DECODE_CUSTOM (0x0f9f0000, 0x050d0000, STACK_USE_FIXED_XXX), + + /* STR{D,H} Rt,[SP,#-n] cccc 0001 01x0 1101 xxxx xxxx 1x11 xxxx */ + DECODE_CUSTOM (0x0fdf00b0, 0x014d00b0, STACK_USE_FIXED_X0X), + + /* fall through */ + DECODE_CUSTOM (0, 0, STACK_USE_NONE), + DECODE_END + }; + + return probes_decode_insn(insn, asi, table, false, false, stack_check_actions, NULL); +} + +const struct decode_checker arm_stack_checker[NUM_PROBES_ARM_ACTIONS] = { + [PROBES_LDRSTRD] = {.checker = arm_check_stack}, + [PROBES_STORE_EXTRA] = {.checker = arm_check_stack}, + [PROBES_STORE] = {.checker = arm_check_stack}, + [PROBES_LDMSTM] = {.checker = arm_check_stack}, +}; diff --git a/arch/arm/probes/kprobes/checkers-common.c b/arch/arm/probes/kprobes/checkers-common.c new file mode 100644 index 000000000000..971119c29474 --- /dev/null +++ b/arch/arm/probes/kprobes/checkers-common.c @@ -0,0 +1,101 @@ +/* + * arch/arm/probes/kprobes/checkers-common.c + * + * Copyright (C) 2014 Huawei Inc. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + */ + +#include +#include "../decode.h" +#include "../decode-arm.h" +#include "checkers.h" + +enum probes_insn checker_stack_use_none(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + asi->stack_space = 0; + return INSN_GOOD_NO_SLOT; +} + +enum probes_insn checker_stack_use_unknown(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + asi->stack_space = -1; + return INSN_GOOD_NO_SLOT; +} + +#ifdef CONFIG_THUMB2_KERNEL +enum probes_insn checker_stack_use_imm_0xx(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + int imm = insn & 0xff; + asi->stack_space = imm; + return INSN_GOOD_NO_SLOT; +} + +/* + * Different from other insn uses imm8, the real addressing offset of + * STRD in T32 encoding should be imm8 * 4. See ARMARM description. + */ +enum probes_insn checker_stack_use_t32strd(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + int imm = insn & 0xff; + asi->stack_space = imm << 2; + return INSN_GOOD_NO_SLOT; +} +#else +enum probes_insn checker_stack_use_imm_x0x(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + int imm = ((insn & 0xf00) >> 4) + (insn & 0xf); + asi->stack_space = imm; + return INSN_GOOD_NO_SLOT; +} +#endif + +enum probes_insn checker_stack_use_imm_xxx(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + int imm = insn & 0xfff; + asi->stack_space = imm; + return INSN_GOOD_NO_SLOT; +} + +enum probes_insn checker_stack_use_stmdx(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + unsigned int reglist = insn & 0xffff; + int pbit = insn & (1 << 24); + asi->stack_space = (hweight32(reglist) - (!pbit ? 1 : 0)) * 4; + + return INSN_GOOD_NO_SLOT; +} + +const union decode_action stack_check_actions[] = { + [STACK_USE_NONE] = {.decoder = checker_stack_use_none}, + [STACK_USE_UNKNOWN] = {.decoder = checker_stack_use_unknown}, +#ifdef CONFIG_THUMB2_KERNEL + [STACK_USE_FIXED_0XX] = {.decoder = checker_stack_use_imm_0xx}, + [STACK_USE_T32STRD] = {.decoder = checker_stack_use_t32strd}, +#else + [STACK_USE_FIXED_X0X] = {.decoder = checker_stack_use_imm_x0x}, +#endif + [STACK_USE_FIXED_XXX] = {.decoder = checker_stack_use_imm_xxx}, + [STACK_USE_STMDX] = {.decoder = checker_stack_use_stmdx}, +}; diff --git a/arch/arm/probes/kprobes/checkers-thumb.c b/arch/arm/probes/kprobes/checkers-thumb.c new file mode 100644 index 000000000000..d608e3b9017a --- /dev/null +++ b/arch/arm/probes/kprobes/checkers-thumb.c @@ -0,0 +1,110 @@ +/* + * arch/arm/probes/kprobes/checkers-thumb.c + * + * Copyright (C) 2014 Huawei Inc. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + */ + +#include +#include "../decode.h" +#include "../decode-thumb.h" +#include "checkers.h" + +static enum probes_insn __kprobes t32_check_stack(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + /* + * PROBES_T32_LDMSTM, PROBES_T32_LDRDSTRD and PROBES_T32_LDRSTR + * may get here. Simply mark all normal insns as STACK_USE_NONE. + */ + static const union decode_item table[] = { + + /* + * First, filter out all ldr insns to make our life easier. + * Following load insns may come here: + * LDM, LDRD, LDR. + * In T32 encoding, bit 20 is enough for distinguishing + * load and store. All load insns have this bit set, when + * all store insns have this bit clear. + */ + DECODE_CUSTOM (0x00100000, 0x00100000, STACK_USE_NONE), + + /* + * Mark all 'STR{,B,H}, Rt, [Rn, Rm]' as STACK_USE_UNKNOWN + * if Rn or Rm is SP. T32 doesn't encode STRD. + */ + /* xx | Rn | Rt | | Rm |*/ + /* STR (register) 1111 1000 0100 xxxx xxxx 0000 00xx xxxx */ + /* STRB (register) 1111 1000 0000 xxxx xxxx 0000 00xx xxxx */ + /* STRH (register) 1111 1000 0010 xxxx xxxx 0000 00xx xxxx */ + /* INVALID INSN 1111 1000 0110 xxxx xxxx 0000 00xx xxxx */ + /* By Introducing INVALID INSN, bit 21 and 22 can be ignored. */ + DECODE_OR (0xff9f0fc0, 0xf80d0000), + DECODE_CUSTOM (0xff900fcf, 0xf800000d, STACK_USE_UNKNOWN), + + + /* xx | Rn | Rt | PUW| imm8 |*/ + /* STR (imm 8) 1111 1000 0100 1101 xxxx 110x xxxx xxxx */ + /* STRB (imm 8) 1111 1000 0000 1101 xxxx 110x xxxx xxxx */ + /* STRH (imm 8) 1111 1000 0010 1101 xxxx 110x xxxx xxxx */ + /* INVALID INSN 1111 1000 0110 1101 xxxx 110x xxxx xxxx */ + /* Only consider U == 0 and P == 1: strx rx, [sp, #-] */ + DECODE_CUSTOM (0xff9f0e00, 0xf80d0c00, STACK_USE_FIXED_0XX), + + /* For STR{,B,H} (imm 12), offset is always positive, so ignore them. */ + + /* P U W | Rn | Rt | Rt2| imm8 |*/ + /* STRD (immediate) 1110 1001 01x0 1101 xxxx xxxx xxxx xxxx */ + /* + * Only consider U == 0 and P == 1. + * Also note that STRD in T32 encoding is special: + * imm = ZeroExtend(imm8:'00', 32) + */ + DECODE_CUSTOM (0xffdf0000, 0xe94d0000, STACK_USE_T32STRD), + + /* | Rn | */ + /* STMDB 1110 1001 00x0 1101 xxxx xxxx xxxx xxxx */ + DECODE_CUSTOM (0xffdf0000, 0xe90d0000, STACK_USE_STMDX), + + /* fall through */ + DECODE_CUSTOM (0, 0, STACK_USE_NONE), + DECODE_END + }; + + return probes_decode_insn(insn, asi, table, false, false, stack_check_actions, NULL); +} + +const struct decode_checker t32_stack_checker[NUM_PROBES_T32_ACTIONS] = { + [PROBES_T32_LDMSTM] = {.checker = t32_check_stack}, + [PROBES_T32_LDRDSTRD] = {.checker = t32_check_stack}, + [PROBES_T32_LDRSTR] = {.checker = t32_check_stack}, +}; + +/* + * See following comments. This insn must be 'push'. + */ +static enum probes_insn __kprobes t16_check_stack(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + unsigned int reglist = insn & 0x1ff; + asi->stack_space = hweight32(reglist) * 4; + return INSN_GOOD; +} + +/* + * T16 encoding is simple: only the 'push' insn can need extra stack space. + * Other insns, like str, can only use r0-r7 as Rn. + */ +const struct decode_checker t16_stack_checker[NUM_PROBES_T16_ACTIONS] = { + [PROBES_T16_PUSH] = {.checker = t16_check_stack}, +}; diff --git a/arch/arm/probes/kprobes/checkers.h b/arch/arm/probes/kprobes/checkers.h new file mode 100644 index 000000000000..bddfa0e82389 --- /dev/null +++ b/arch/arm/probes/kprobes/checkers.h @@ -0,0 +1,54 @@ +/* + * arch/arm/probes/kprobes/checkers.h + * + * Copyright (C) 2014 Huawei Inc. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + */ +#ifndef _ARM_KERNEL_PROBES_CHECKERS_H +#define _ARM_KERNEL_PROBES_CHECKERS_H + +#include +#include +#include "../decode.h" + +extern probes_check_t checker_stack_use_none; +extern probes_check_t checker_stack_use_unknown; +#ifdef CONFIG_THUMB2_KERNEL +extern probes_check_t checker_stack_use_imm_0xx; +#else +extern probes_check_t checker_stack_use_imm_x0x; +#endif +extern probes_check_t checker_stack_use_imm_xxx; +extern probes_check_t checker_stack_use_stmdx; + +enum { + STACK_USE_NONE, + STACK_USE_UNKNOWN, +#ifdef CONFIG_THUMB2_KERNEL + STACK_USE_FIXED_0XX, + STACK_USE_T32STRD, +#else + STACK_USE_FIXED_X0X, +#endif + STACK_USE_FIXED_XXX, + STACK_USE_STMDX, + NUM_STACK_USE_TYPES +}; + +extern const union decode_action stack_check_actions[]; + +#ifndef CONFIG_THUMB2_KERNEL +extern const struct decode_checker arm_stack_checker[]; +#else +#endif +extern const struct decode_checker t32_stack_checker[]; +extern const struct decode_checker t16_stack_checker[]; +#endif -- cgit v1.2.3-59-g8ed1b From a0266c214fab21371a499e6ab1c9385cc6589189 Mon Sep 17 00:00:00 2001 From: Wang Nan Date: Mon, 5 Jan 2015 19:29:25 +0800 Subject: ARM: kprobes: disallow probing stack consuming instructions This patch prohibits probing instructions for which the stack requirements are unable to be determined statically. Some test cases are found not work again after the modification, this patch also removes them. Signed-off-by: Wang Nan Reviewed-by: Jon Medhurst Signed-off-by: Jon Medhurst --- arch/arm/include/asm/kprobes.h | 1 - arch/arm/include/asm/probes.h | 12 ++++++++++++ arch/arm/kernel/entry-armv.S | 3 ++- arch/arm/probes/kprobes/core.c | 9 +++++++++ arch/arm/probes/kprobes/test-arm.c | 16 ++++++++++------ 5 files changed, 33 insertions(+), 8 deletions(-) (limited to 'arch') diff --git a/arch/arm/include/asm/kprobes.h b/arch/arm/include/asm/kprobes.h index 49fa0dfaad33..56f9ac68fbd1 100644 --- a/arch/arm/include/asm/kprobes.h +++ b/arch/arm/include/asm/kprobes.h @@ -22,7 +22,6 @@ #define __ARCH_WANT_KPROBES_INSN_SLOT #define MAX_INSN_SIZE 2 -#define MAX_STACK_SIZE 64 /* 32 would probably be OK */ #define flush_insn_slot(p) do { } while (0) #define kretprobe_blacklist_size 0 diff --git a/arch/arm/include/asm/probes.h b/arch/arm/include/asm/probes.h index 6026deb28794..cd9e81588d83 100644 --- a/arch/arm/include/asm/probes.h +++ b/arch/arm/include/asm/probes.h @@ -19,6 +19,8 @@ #ifndef _ASM_PROBES_H #define _ASM_PROBES_H +#ifndef __ASSEMBLY__ + typedef u32 probes_opcode_t; struct arch_probes_insn; @@ -41,4 +43,14 @@ struct arch_probes_insn { int stack_space; }; +#endif /* __ASSEMBLY__ */ + +/* + * We assume one instruction can consume at most 64 bytes stack, which is + * 'push {r0-r15}'. Instructions consume more or unknown stack space like + * 'str r0, [sp, #-80]' and 'str r0, [sp, r1]' should be prohibit to probe. + * Both kprobe and jprobe use this macro. + */ +#define MAX_STACK_SIZE 64 + #endif diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S index 2f5555d307b3..672b21942fff 100644 --- a/arch/arm/kernel/entry-armv.S +++ b/arch/arm/kernel/entry-armv.S @@ -31,6 +31,7 @@ #include "entry-header.S" #include +#include /* * Interrupt handling. @@ -249,7 +250,7 @@ __und_svc: @ If a kprobe is about to simulate a "stmdb sp..." instruction, @ it obviously needs free stack space which then will belong to @ the saved context. - svc_entry 64 + svc_entry MAX_STACK_SIZE #else svc_entry #endif diff --git a/arch/arm/probes/kprobes/core.c b/arch/arm/probes/kprobes/core.c index 74f3dc3ac212..3a58db4cc1c6 100644 --- a/arch/arm/probes/kprobes/core.c +++ b/arch/arm/probes/kprobes/core.c @@ -115,6 +115,15 @@ int __kprobes arch_prepare_kprobe(struct kprobe *p) break; } + /* + * Never instrument insn like 'str r0, [sp, +/-r1]'. Also, insn likes + * 'str r0, [sp, #-68]' should also be prohibited. + * See __und_svc. + */ + if ((p->ainsn.stack_space < 0) || + (p->ainsn.stack_space > MAX_STACK_SIZE)) + return -EINVAL; + return 0; } diff --git a/arch/arm/probes/kprobes/test-arm.c b/arch/arm/probes/kprobes/test-arm.c index d9a1255f3043..fdeb300b0fc8 100644 --- a/arch/arm/probes/kprobes/test-arm.c +++ b/arch/arm/probes/kprobes/test-arm.c @@ -476,7 +476,8 @@ void kprobe_arm_test_cases(void) TEST_GROUP("Extra load/store instructions") TEST_RPR( "strh r",0, VAL1,", [r",1, 48,", -r",2, 24,"]") - TEST_RPR( "streqh r",14,VAL2,", [r",13,0, ", r",12, 48,"]") + TEST_RPR( "streqh r",14,VAL2,", [r",11,0, ", r",12, 48,"]") + TEST_UNSUPPORTED( "streqh r14, [r13, r12]") TEST_RPR( "strh r",1, VAL1,", [r",2, 24,", r",3, 48,"]!") TEST_RPR( "strneh r",12,VAL2,", [r",11,48,", -r",10,24,"]!") TEST_RPR( "strh r",2, VAL1,", [r",3, 24,"], r",4, 48,"") @@ -565,7 +566,8 @@ void kprobe_arm_test_cases(void) #if __LINUX_ARM_ARCH__ >= 5 TEST_RPR( "strd r",0, VAL1,", [r",1, 48,", -r",2,24,"]") - TEST_RPR( "strccd r",8, VAL2,", [r",13,0, ", r",12,48,"]") + TEST_RPR( "strccd r",8, VAL2,", [r",11,0, ", r",12,48,"]") + TEST_UNSUPPORTED( "strccd r8, [r13, r12]") TEST_RPR( "strd r",4, VAL1,", [r",2, 24,", r",3, 48,"]!") TEST_RPR( "strcsd r",12,VAL2,", [r",11,48,", -r",10,24,"]!") TEST_RPR( "strd r",2, VAL1,", [r",5, 24,"], r",4,48,"") @@ -638,13 +640,15 @@ void kprobe_arm_test_cases(void) TEST_RP( "str"byte" r",2, VAL1,", [r",3, 24,"], #48") \ TEST_RP( "str"byte" r",10,VAL2,", [r",9, 64,"], #-48") \ TEST_RPR("str"byte" r",0, VAL1,", [r",1, 48,", -r",2, 24,"]") \ - TEST_RPR("str"byte" r",14,VAL2,", [r",13,0, ", r",12, 48,"]") \ + TEST_RPR("str"byte" r",14,VAL2,", [r",11,0, ", r",12, 48,"]") \ + TEST_UNSUPPORTED("str"byte" r14, [r13, r12]") \ TEST_RPR("str"byte" r",1, VAL1,", [r",2, 24,", r",3, 48,"]!") \ TEST_RPR("str"byte" r",12,VAL2,", [r",11,48,", -r",10,24,"]!") \ TEST_RPR("str"byte" r",2, VAL1,", [r",3, 24,"], r",4, 48,"") \ TEST_RPR("str"byte" r",10,VAL2,", [r",9, 48,"], -r",11,24,"") \ TEST_RPR("str"byte" r",0, VAL1,", [r",1, 24,", r",2, 32,", asl #1]")\ - TEST_RPR("str"byte" r",14,VAL2,", [r",13,0, ", r",12, 32,", lsr #2]")\ + TEST_RPR("str"byte" r",14,VAL2,", [r",11,0, ", r",12, 32,", lsr #2]")\ + TEST_UNSUPPORTED("str"byte" r14, [r13, r12, lsr #2]")\ TEST_RPR("str"byte" r",1, VAL1,", [r",2, 24,", r",3, 32,", asr #3]!")\ TEST_RPR("str"byte" r",12,VAL2,", [r",11,24,", r",10, 4,", ror #31]!")\ TEST_P( "ldr"byte" r0, [r",0, 24,", #-2]") \ @@ -668,12 +672,12 @@ void kprobe_arm_test_cases(void) LOAD_STORE("") TEST_P( "str pc, [r",0,0,", #15*4]") - TEST_R( "str pc, [sp, r",2,15*4,"]") + TEST_UNSUPPORTED( "str pc, [sp, r2]") TEST_BF( "ldr pc, [sp, #15*4]") TEST_BF_R("ldr pc, [sp, r",2,15*4,"]") TEST_P( "str sp, [r",0,0,", #13*4]") - TEST_R( "str sp, [sp, r",2,13*4,"]") + TEST_UNSUPPORTED( "str sp, [sp, r2]") TEST_BF( "ldr sp, [sp, #13*4]") TEST_BF_R("ldr sp, [sp, r",2,13*4,"]") -- cgit v1.2.3-59-g8ed1b From 8d257e95a9e643518e72232bf852b054a3d06c95 Mon Sep 17 00:00:00 2001 From: "Jon Medhurst (Tixy)" Date: Mon, 5 Jan 2015 19:29:29 +0800 Subject: ARM: kprobes: Add test cases for stack consuming instructions These have extra 'checker' functions associated with them so lets make sure those get covered by testing. As they may create uninitialised space on the stack we also update the test code to ensure such space is consistent between test runs. This is done by disabling interrupts in setup_test_context(). Signed-off-by: Jon Medhurst --- arch/arm/probes/kprobes/test-arm.c | 17 +++++++++++++++-- arch/arm/probes/kprobes/test-core.c | 9 +++++++++ arch/arm/probes/kprobes/test-thumb.c | 12 ++++++++++++ 3 files changed, 36 insertions(+), 2 deletions(-) (limited to 'arch') diff --git a/arch/arm/probes/kprobes/test-arm.c b/arch/arm/probes/kprobes/test-arm.c index fdeb300b0fc8..9b3b1b4a0939 100644 --- a/arch/arm/probes/kprobes/test-arm.c +++ b/arch/arm/probes/kprobes/test-arm.c @@ -12,6 +12,7 @@ #include #include #include +#include #include "test-core.h" @@ -478,6 +479,7 @@ void kprobe_arm_test_cases(void) TEST_RPR( "strh r",0, VAL1,", [r",1, 48,", -r",2, 24,"]") TEST_RPR( "streqh r",14,VAL2,", [r",11,0, ", r",12, 48,"]") TEST_UNSUPPORTED( "streqh r14, [r13, r12]") + TEST_UNSUPPORTED( "streqh r14, [r12, r13]") TEST_RPR( "strh r",1, VAL1,", [r",2, 24,", r",3, 48,"]!") TEST_RPR( "strneh r",12,VAL2,", [r",11,48,", -r",10,24,"]!") TEST_RPR( "strh r",2, VAL1,", [r",3, 24,"], r",4, 48,"") @@ -502,6 +504,9 @@ void kprobe_arm_test_cases(void) TEST_RP( "strplh r",12,VAL2,", [r",11,24,", #-4]!") TEST_RP( "strh r",2, VAL1,", [r",3, 24,"], #48") TEST_RP( "strh r",10,VAL2,", [r",9, 64,"], #-48") + TEST_RP( "strh r",3, VAL1,", [r",13,TEST_MEMORY_SIZE,", #-"__stringify(MAX_STACK_SIZE)"]!") + TEST_UNSUPPORTED("strh r3, [r13, #-"__stringify(MAX_STACK_SIZE)"-8]!") + TEST_RP( "strh r",4, VAL1,", [r",14,TEST_MEMORY_SIZE,", #-"__stringify(MAX_STACK_SIZE)"-8]!") TEST_UNSUPPORTED(__inst_arm(0xe1efc3b0) " @ strh r12, [pc, #48]!") TEST_UNSUPPORTED(__inst_arm(0xe0c9f3b0) " @ strh pc, [r9], #48") @@ -568,6 +573,7 @@ void kprobe_arm_test_cases(void) TEST_RPR( "strd r",0, VAL1,", [r",1, 48,", -r",2,24,"]") TEST_RPR( "strccd r",8, VAL2,", [r",11,0, ", r",12,48,"]") TEST_UNSUPPORTED( "strccd r8, [r13, r12]") + TEST_UNSUPPORTED( "strccd r8, [r12, r13]") TEST_RPR( "strd r",4, VAL1,", [r",2, 24,", r",3, 48,"]!") TEST_RPR( "strcsd r",12,VAL2,", [r",11,48,", -r",10,24,"]!") TEST_RPR( "strd r",2, VAL1,", [r",5, 24,"], r",4,48,"") @@ -591,6 +597,9 @@ void kprobe_arm_test_cases(void) TEST_RP( "strvcd r",12,VAL2,", [r",11,24,", #-16]!") TEST_RP( "strd r",2, VAL1,", [r",4, 24,"], #48") TEST_RP( "strd r",10,VAL2,", [r",9, 64,"], #-48") + TEST_RP( "strd r",6, VAL1,", [r",13,TEST_MEMORY_SIZE,", #-"__stringify(MAX_STACK_SIZE)"]!") + TEST_UNSUPPORTED("strd r6, [r13, #-"__stringify(MAX_STACK_SIZE)"-8]!") + TEST_RP( "strd r",4, VAL1,", [r",12,TEST_MEMORY_SIZE,", #-"__stringify(MAX_STACK_SIZE)"-8]!") TEST_UNSUPPORTED(__inst_arm(0xe1efc3f0) " @ strd r12, [pc, #48]!") TEST_P( "ldrd r0, [r",0, 24,", #-8]") @@ -639,16 +648,20 @@ void kprobe_arm_test_cases(void) TEST_RP( "str"byte" r",12,VAL2,", [r",11,24,", #-4]!") \ TEST_RP( "str"byte" r",2, VAL1,", [r",3, 24,"], #48") \ TEST_RP( "str"byte" r",10,VAL2,", [r",9, 64,"], #-48") \ + TEST_RP( "str"byte" r",3, VAL1,", [r",13,TEST_MEMORY_SIZE,", #-"__stringify(MAX_STACK_SIZE)"]!") \ + TEST_UNSUPPORTED("str"byte" r3, [r13, #-"__stringify(MAX_STACK_SIZE)"-8]!") \ + TEST_RP( "str"byte" r",4, VAL1,", [r",10,TEST_MEMORY_SIZE,", #-"__stringify(MAX_STACK_SIZE)"-8]!") \ TEST_RPR("str"byte" r",0, VAL1,", [r",1, 48,", -r",2, 24,"]") \ TEST_RPR("str"byte" r",14,VAL2,", [r",11,0, ", r",12, 48,"]") \ - TEST_UNSUPPORTED("str"byte" r14, [r13, r12]") \ + TEST_UNSUPPORTED("str"byte" r14, [r13, r12]") \ + TEST_UNSUPPORTED("str"byte" r14, [r12, r13]") \ TEST_RPR("str"byte" r",1, VAL1,", [r",2, 24,", r",3, 48,"]!") \ TEST_RPR("str"byte" r",12,VAL2,", [r",11,48,", -r",10,24,"]!") \ TEST_RPR("str"byte" r",2, VAL1,", [r",3, 24,"], r",4, 48,"") \ TEST_RPR("str"byte" r",10,VAL2,", [r",9, 48,"], -r",11,24,"") \ TEST_RPR("str"byte" r",0, VAL1,", [r",1, 24,", r",2, 32,", asl #1]")\ TEST_RPR("str"byte" r",14,VAL2,", [r",11,0, ", r",12, 32,", lsr #2]")\ - TEST_UNSUPPORTED("str"byte" r14, [r13, r12, lsr #2]")\ + TEST_UNSUPPORTED("str"byte" r14, [r13, r12, lsr #2]") \ TEST_RPR("str"byte" r",1, VAL1,", [r",2, 24,", r",3, 32,", asr #3]!")\ TEST_RPR("str"byte" r",12,VAL2,", [r",11,24,", r",10, 4,", ror #31]!")\ TEST_P( "ldr"byte" r0, [r",0, 24,", #-2]") \ diff --git a/arch/arm/probes/kprobes/test-core.c b/arch/arm/probes/kprobes/test-core.c index 7ab633d51954..7c5ddd5a6afd 100644 --- a/arch/arm/probes/kprobes/test-core.c +++ b/arch/arm/probes/kprobes/test-core.c @@ -1196,6 +1196,13 @@ static void setup_test_context(struct pt_regs *regs) regs->uregs[arg->reg] = (unsigned long)current_stack + arg->val; memory_needs_checking = true; + /* + * Test memory at an address below SP is in danger of + * being altered by an interrupt occurring and pushing + * data onto the stack. Disable interrupts to stop this. + */ + if (arg->reg == 13) + regs->ARM_cpsr |= PSR_I_BIT; break; } case ARG_TYPE_MEM: { @@ -1272,6 +1279,8 @@ test_after_pre_handler(struct kprobe *p, struct pt_regs *regs) /* Undo any changes done to SP by the test case */ regs->ARM_sp = (unsigned long)current_stack; + /* Enable interrupts in case setup_test_context disabled them */ + regs->ARM_cpsr &= ~PSR_I_BIT; container_of(p, struct test_probe, kprobe)->hit = test_instance; return 0; diff --git a/arch/arm/probes/kprobes/test-thumb.c b/arch/arm/probes/kprobes/test-thumb.c index 6c6e9a9bb675..e8cf193db1ea 100644 --- a/arch/arm/probes/kprobes/test-thumb.c +++ b/arch/arm/probes/kprobes/test-thumb.c @@ -11,6 +11,7 @@ #include #include #include +#include #include "test-core.h" @@ -416,6 +417,9 @@ void kprobe_thumb32_test_cases(void) TEST_RR( "strd r",14,VAL2,", r",12,VAL1,", [sp, #16]!") TEST_RRP("strd r",1, VAL1,", r",0, VAL2,", [r",7, 24,"], #16") TEST_RR( "strd r",7, VAL2,", r",8, VAL1,", [sp], #-16") + TEST_RRP("strd r",6, VAL1,", r",7, VAL2,", [r",13, TEST_MEMORY_SIZE,", #-"__stringify(MAX_STACK_SIZE)"]!") + TEST_UNSUPPORTED("strd r6, r7, [r13, #-"__stringify(MAX_STACK_SIZE)"-8]!") + TEST_RRP("strd r",4, VAL1,", r",5, VAL2,", [r",14, TEST_MEMORY_SIZE,", #-"__stringify(MAX_STACK_SIZE)"-8]!") TEST_UNSUPPORTED(__inst_thumb32(0xe9efec04) " @ strd r14, r12, [pc, #16]!") TEST_UNSUPPORTED(__inst_thumb32(0xe8efec04) " @ strd r14, r12, [pc], #16") @@ -821,14 +825,22 @@ CONDITION_INSTRUCTIONS(22, TEST_RP( "str"size" r",14,VAL2,", [r",1, 256, ", #-128]!") \ TEST_RPR("str"size".w r",0, VAL1,", [r",1, 0,", r",2, 4,"]") \ TEST_RPR("str"size" r",14,VAL2,", [r",10,0,", r",11,4,", lsl #1]") \ + TEST_UNSUPPORTED("str"size" r0, [r13, r1]") \ TEST_R( "str"size".w r",7, VAL1,", [sp, #24]") \ TEST_RP( "str"size".w r",0, VAL2,", [r",0,0, "]") \ + TEST_RP( "str"size" r",6, VAL1,", [r",13, TEST_MEMORY_SIZE,", #-"__stringify(MAX_STACK_SIZE)"]!") \ + TEST_UNSUPPORTED("str"size" r6, [r13, #-"__stringify(MAX_STACK_SIZE)"-8]!") \ + TEST_RP( "str"size" r",4, VAL2,", [r",12, TEST_MEMORY_SIZE,", #-"__stringify(MAX_STACK_SIZE)"-8]!") \ TEST_UNSUPPORTED("str"size"t r0, [r1, #4]") SINGLE_STORE("b") SINGLE_STORE("h") SINGLE_STORE("") + TEST_UNSUPPORTED(__inst_thumb32(0xf801000d) " @ strb r0, [r1, r13]") + TEST_UNSUPPORTED(__inst_thumb32(0xf821000d) " @ strh r0, [r1, r13]") + TEST_UNSUPPORTED(__inst_thumb32(0xf841000d) " @ str r0, [r1, r13]") + TEST("str sp, [sp]") TEST_UNSUPPORTED(__inst_thumb32(0xf8cfe000) " @ str r14, [pc]") TEST_UNSUPPORTED(__inst_thumb32(0xf8cef000) " @ str pc, [r14]") -- cgit v1.2.3-59-g8ed1b From cbf6ab52add20b845f903decc973afbd5463c527 Mon Sep 17 00:00:00 2001 From: Masami Hiramatsu Date: Mon, 5 Jan 2015 19:29:32 +0800 Subject: kprobes: Pass the original kprobe for preparing optimized kprobe Pass the original kprobe for preparing an optimized kprobe arch-dep part, since for some architecture (e.g. ARM32) requires the information in original kprobe. Signed-off-by: Masami Hiramatsu Signed-off-by: Wang Nan Signed-off-by: Jon Medhurst --- arch/x86/kernel/kprobes/opt.c | 3 ++- include/linux/kprobes.h | 3 ++- kernel/kprobes.c | 4 ++-- 3 files changed, 6 insertions(+), 4 deletions(-) (limited to 'arch') diff --git a/arch/x86/kernel/kprobes/opt.c b/arch/x86/kernel/kprobes/opt.c index 7c523bbf3dc8..0dd8d089c315 100644 --- a/arch/x86/kernel/kprobes/opt.c +++ b/arch/x86/kernel/kprobes/opt.c @@ -322,7 +322,8 @@ void arch_remove_optimized_kprobe(struct optimized_kprobe *op) * Target instructions MUST be relocatable (checked inside) * This is called when new aggr(opt)probe is allocated or reused. */ -int arch_prepare_optimized_kprobe(struct optimized_kprobe *op) +int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, + struct kprobe *__unused) { u8 *buf; int ret; diff --git a/include/linux/kprobes.h b/include/linux/kprobes.h index 5297f9fa0ef2..1ab54754a86d 100644 --- a/include/linux/kprobes.h +++ b/include/linux/kprobes.h @@ -308,7 +308,8 @@ struct optimized_kprobe { /* Architecture dependent functions for direct jump optimization */ extern int arch_prepared_optinsn(struct arch_optimized_insn *optinsn); extern int arch_check_optimized_kprobe(struct optimized_kprobe *op); -extern int arch_prepare_optimized_kprobe(struct optimized_kprobe *op); +extern int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, + struct kprobe *orig); extern void arch_remove_optimized_kprobe(struct optimized_kprobe *op); extern void arch_optimize_kprobes(struct list_head *oplist); extern void arch_unoptimize_kprobes(struct list_head *oplist, diff --git a/kernel/kprobes.c b/kernel/kprobes.c index 06f58309fed2..bad4e959f2f7 100644 --- a/kernel/kprobes.c +++ b/kernel/kprobes.c @@ -717,7 +717,7 @@ static void prepare_optimized_kprobe(struct kprobe *p) struct optimized_kprobe *op; op = container_of(p, struct optimized_kprobe, kp); - arch_prepare_optimized_kprobe(op); + arch_prepare_optimized_kprobe(op, p); } /* Allocate new optimized_kprobe and try to prepare optimized instructions */ @@ -731,7 +731,7 @@ static struct kprobe *alloc_aggr_kprobe(struct kprobe *p) INIT_LIST_HEAD(&op->list); op->kp.addr = p->addr; - arch_prepare_optimized_kprobe(op); + arch_prepare_optimized_kprobe(op, p); return &op->kp; } -- cgit v1.2.3-59-g8ed1b From 0dc016dbd820260b8ea74337980735b8c88d4ef2 Mon Sep 17 00:00:00 2001 From: Wang Nan Date: Fri, 9 Jan 2015 14:37:36 +0800 Subject: ARM: kprobes: enable OPTPROBES for ARM 32 This patch introduce kprobeopt for ARM 32. Limitations: - Currently only kernel compiled with ARM ISA is supported. - Offset between probe point and optinsn slot must not larger than 32MiB. Masami Hiramatsu suggests replacing 2 words, it will make things complex. Futher patch can make such optimization. Kprobe opt on ARM is relatively simpler than kprobe opt on x86 because ARM instruction is always 4 bytes aligned and 4 bytes long. This patch replace probed instruction by a 'b', branch to trampoline code and then calls optimized_callback(). optimized_callback() calls opt_pre_handler() to execute kprobe handler. It also emulate/simulate replaced instruction. When unregistering kprobe, the deferred manner of unoptimizer may leave branch instruction before optimizer is called. Different from x86_64, which only copy the probed insn after optprobe_template_end and reexecute them, this patch call singlestep to emulate/simulate the insn directly. Futher patch can optimize this behavior. Signed-off-by: Wang Nan Acked-by: Masami Hiramatsu Cc: Will Deacon Reviewed-by: Jon Medhurst (Tixy) Signed-off-by: Jon Medhurst --- arch/arm/Kconfig | 1 + arch/arm/include/asm/insn.h | 29 ++++ arch/arm/include/asm/kprobes.h | 29 ++++ arch/arm/kernel/Makefile | 2 +- arch/arm/kernel/ftrace.c | 3 +- arch/arm/kernel/insn.h | 29 ---- arch/arm/kernel/jump_label.c | 3 +- arch/arm/probes/kprobes/Makefile | 1 + arch/arm/probes/kprobes/core.c | 26 ++- arch/arm/probes/kprobes/core.h | 2 + arch/arm/probes/kprobes/opt-arm.c | 322 ++++++++++++++++++++++++++++++++++++++ 11 files changed, 406 insertions(+), 41 deletions(-) create mode 100644 arch/arm/include/asm/insn.h delete mode 100644 arch/arm/kernel/insn.h create mode 100644 arch/arm/probes/kprobes/opt-arm.c (limited to 'arch') diff --git a/arch/arm/Kconfig b/arch/arm/Kconfig index 97d07ed60a0b..3d5dc2df835b 100644 --- a/arch/arm/Kconfig +++ b/arch/arm/Kconfig @@ -60,6 +60,7 @@ config ARM select HAVE_MEMBLOCK select HAVE_MOD_ARCH_SPECIFIC if ARM_UNWIND select HAVE_OPROFILE if (HAVE_PERF_EVENTS) + select HAVE_OPTPROBES if !THUMB2_KERNEL select HAVE_PERF_EVENTS select HAVE_PERF_REGS select HAVE_PERF_USER_STACK_DUMP diff --git a/arch/arm/include/asm/insn.h b/arch/arm/include/asm/insn.h new file mode 100644 index 000000000000..e96065da4dae --- /dev/null +++ b/arch/arm/include/asm/insn.h @@ -0,0 +1,29 @@ +#ifndef __ASM_ARM_INSN_H +#define __ASM_ARM_INSN_H + +static inline unsigned long +arm_gen_nop(void) +{ +#ifdef CONFIG_THUMB2_KERNEL + return 0xf3af8000; /* nop.w */ +#else + return 0xe1a00000; /* mov r0, r0 */ +#endif +} + +unsigned long +__arm_gen_branch(unsigned long pc, unsigned long addr, bool link); + +static inline unsigned long +arm_gen_branch(unsigned long pc, unsigned long addr) +{ + return __arm_gen_branch(pc, addr, false); +} + +static inline unsigned long +arm_gen_branch_link(unsigned long pc, unsigned long addr) +{ + return __arm_gen_branch(pc, addr, true); +} + +#endif diff --git a/arch/arm/include/asm/kprobes.h b/arch/arm/include/asm/kprobes.h index 56f9ac68fbd1..50ff3bc7928e 100644 --- a/arch/arm/include/asm/kprobes.h +++ b/arch/arm/include/asm/kprobes.h @@ -50,5 +50,34 @@ int kprobe_fault_handler(struct pt_regs *regs, unsigned int fsr); int kprobe_exceptions_notify(struct notifier_block *self, unsigned long val, void *data); +/* optinsn template addresses */ +extern __visible kprobe_opcode_t optprobe_template_entry; +extern __visible kprobe_opcode_t optprobe_template_val; +extern __visible kprobe_opcode_t optprobe_template_call; +extern __visible kprobe_opcode_t optprobe_template_end; +extern __visible kprobe_opcode_t optprobe_template_sub_sp; +extern __visible kprobe_opcode_t optprobe_template_add_sp; + +#define MAX_OPTIMIZED_LENGTH 4 +#define MAX_OPTINSN_SIZE \ + ((unsigned long)&optprobe_template_end - \ + (unsigned long)&optprobe_template_entry) +#define RELATIVEJUMP_SIZE 4 + +struct arch_optimized_insn { + /* + * copy of the original instructions. + * Different from x86, ARM kprobe_opcode_t is u32. + */ +#define MAX_COPIED_INSN DIV_ROUND_UP(RELATIVEJUMP_SIZE, sizeof(kprobe_opcode_t)) + kprobe_opcode_t copied_insn[MAX_COPIED_INSN]; + /* detour code buffer */ + kprobe_opcode_t *insn; + /* + * We always copy one instruction on ARM, + * so size will always be 4, and unlike x86, there is no + * need for a size field. + */ +}; #endif /* _ARM_KPROBES_H */ diff --git a/arch/arm/kernel/Makefile b/arch/arm/kernel/Makefile index 9c51a433e025..902397dd1000 100644 --- a/arch/arm/kernel/Makefile +++ b/arch/arm/kernel/Makefile @@ -52,7 +52,7 @@ obj-$(CONFIG_FUNCTION_GRAPH_TRACER) += ftrace.o insn.o obj-$(CONFIG_JUMP_LABEL) += jump_label.o insn.o patch.o obj-$(CONFIG_KEXEC) += machine_kexec.o relocate_kernel.o # Main staffs in KPROBES are in arch/arm/probes/ . -obj-$(CONFIG_KPROBES) += patch.o +obj-$(CONFIG_KPROBES) += patch.o insn.o obj-$(CONFIG_OABI_COMPAT) += sys_oabi-compat.o obj-$(CONFIG_ARM_THUMBEE) += thumbee.o obj-$(CONFIG_KGDB) += kgdb.o patch.o diff --git a/arch/arm/kernel/ftrace.c b/arch/arm/kernel/ftrace.c index b8c75e45a950..709ee1d6d4df 100644 --- a/arch/arm/kernel/ftrace.c +++ b/arch/arm/kernel/ftrace.c @@ -20,8 +20,7 @@ #include #include #include - -#include "insn.h" +#include #ifdef CONFIG_THUMB2_KERNEL #define NOP 0xf85deb04 /* pop.w {lr} */ diff --git a/arch/arm/kernel/insn.h b/arch/arm/kernel/insn.h deleted file mode 100644 index e96065da4dae..000000000000 --- a/arch/arm/kernel/insn.h +++ /dev/null @@ -1,29 +0,0 @@ -#ifndef __ASM_ARM_INSN_H -#define __ASM_ARM_INSN_H - -static inline unsigned long -arm_gen_nop(void) -{ -#ifdef CONFIG_THUMB2_KERNEL - return 0xf3af8000; /* nop.w */ -#else - return 0xe1a00000; /* mov r0, r0 */ -#endif -} - -unsigned long -__arm_gen_branch(unsigned long pc, unsigned long addr, bool link); - -static inline unsigned long -arm_gen_branch(unsigned long pc, unsigned long addr) -{ - return __arm_gen_branch(pc, addr, false); -} - -static inline unsigned long -arm_gen_branch_link(unsigned long pc, unsigned long addr) -{ - return __arm_gen_branch(pc, addr, true); -} - -#endif diff --git a/arch/arm/kernel/jump_label.c b/arch/arm/kernel/jump_label.c index d8da075959bf..e39cbf488cfe 100644 --- a/arch/arm/kernel/jump_label.c +++ b/arch/arm/kernel/jump_label.c @@ -1,8 +1,7 @@ #include #include #include - -#include "insn.h" +#include #ifdef HAVE_JUMP_LABEL diff --git a/arch/arm/probes/kprobes/Makefile b/arch/arm/probes/kprobes/Makefile index bc8d504c3d78..76a36bf102b7 100644 --- a/arch/arm/probes/kprobes/Makefile +++ b/arch/arm/probes/kprobes/Makefile @@ -7,5 +7,6 @@ obj-$(CONFIG_KPROBES) += actions-thumb.o checkers-thumb.o test-kprobes-objs += test-thumb.o else obj-$(CONFIG_KPROBES) += actions-arm.o checkers-arm.o +obj-$(CONFIG_OPTPROBES) += opt-arm.o test-kprobes-objs += test-arm.o endif diff --git a/arch/arm/probes/kprobes/core.c b/arch/arm/probes/kprobes/core.c index 3a58db4cc1c6..a4ec240ee7ba 100644 --- a/arch/arm/probes/kprobes/core.c +++ b/arch/arm/probes/kprobes/core.c @@ -163,19 +163,31 @@ void __kprobes arch_arm_kprobe(struct kprobe *p) * memory. It is also needed to atomically set the two half-words of a 32-bit * Thumb breakpoint. */ -int __kprobes __arch_disarm_kprobe(void *p) -{ - struct kprobe *kp = p; - void *addr = (void *)((uintptr_t)kp->addr & ~1); - - __patch_text(addr, kp->opcode); +struct patch { + void *addr; + unsigned int insn; +}; +static int __kprobes_remove_breakpoint(void *data) +{ + struct patch *p = data; + __patch_text(p->addr, p->insn); return 0; } +void __kprobes kprobes_remove_breakpoint(void *addr, unsigned int insn) +{ + struct patch p = { + .addr = addr, + .insn = insn, + }; + stop_machine(__kprobes_remove_breakpoint, &p, cpu_online_mask); +} + void __kprobes arch_disarm_kprobe(struct kprobe *p) { - stop_machine(__arch_disarm_kprobe, p, cpu_online_mask); + kprobes_remove_breakpoint((void *)((uintptr_t)p->addr & ~1), + p->opcode); } void __kprobes arch_remove_kprobe(struct kprobe *p) diff --git a/arch/arm/probes/kprobes/core.h b/arch/arm/probes/kprobes/core.h index f88c79fe632a..b3036c587a76 100644 --- a/arch/arm/probes/kprobes/core.h +++ b/arch/arm/probes/kprobes/core.h @@ -30,6 +30,8 @@ #define KPROBE_THUMB16_BREAKPOINT_INSTRUCTION 0xde18 #define KPROBE_THUMB32_BREAKPOINT_INSTRUCTION 0xf7f0a018 +extern void kprobes_remove_breakpoint(void *addr, unsigned int insn); + enum probes_insn __kprobes kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_probes_insn *asi, const struct decode_header *h); diff --git a/arch/arm/probes/kprobes/opt-arm.c b/arch/arm/probes/kprobes/opt-arm.c new file mode 100644 index 000000000000..13d5232118df --- /dev/null +++ b/arch/arm/probes/kprobes/opt-arm.c @@ -0,0 +1,322 @@ +/* + * Kernel Probes Jump Optimization (Optprobes) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. + * + * Copyright (C) IBM Corporation, 2002, 2004 + * Copyright (C) Hitachi Ltd., 2012 + * Copyright (C) Huawei Inc., 2014 + */ + +#include +#include +#include +#include +/* for arm_gen_branch */ +#include +/* for patch_text */ +#include + +#include "core.h" + +/* + * NOTE: the first sub and add instruction will be modified according + * to the stack cost of the instruction. + */ +asm ( + ".global optprobe_template_entry\n" + "optprobe_template_entry:\n" + ".global optprobe_template_sub_sp\n" + "optprobe_template_sub_sp:" + " sub sp, sp, #0xff\n" + " stmia sp, {r0 - r14} \n" + ".global optprobe_template_add_sp\n" + "optprobe_template_add_sp:" + " add r3, sp, #0xff\n" + " str r3, [sp, #52]\n" + " mrs r4, cpsr\n" + " str r4, [sp, #64]\n" + " mov r1, sp\n" + " ldr r0, 1f\n" + " ldr r2, 2f\n" + /* + * AEABI requires an 8-bytes alignment stack. If + * SP % 8 != 0 (SP % 4 == 0 should be ensured), + * alloc more bytes here. + */ + " and r4, sp, #4\n" + " sub sp, sp, r4\n" +#if __LINUX_ARM_ARCH__ >= 5 + " blx r2\n" +#else + " mov lr, pc\n" + " mov pc, r2\n" +#endif + " add sp, sp, r4\n" + " ldr r1, [sp, #64]\n" + " tst r1, #"__stringify(PSR_T_BIT)"\n" + " ldrne r2, [sp, #60]\n" + " orrne r2, #1\n" + " strne r2, [sp, #60] @ set bit0 of PC for thumb\n" + " msr cpsr_cxsf, r1\n" + " ldmia sp, {r0 - r15}\n" + ".global optprobe_template_val\n" + "optprobe_template_val:\n" + "1: .long 0\n" + ".global optprobe_template_call\n" + "optprobe_template_call:\n" + "2: .long 0\n" + ".global optprobe_template_end\n" + "optprobe_template_end:\n"); + +#define TMPL_VAL_IDX \ + ((unsigned long *)&optprobe_template_val - (unsigned long *)&optprobe_template_entry) +#define TMPL_CALL_IDX \ + ((unsigned long *)&optprobe_template_call - (unsigned long *)&optprobe_template_entry) +#define TMPL_END_IDX \ + ((unsigned long *)&optprobe_template_end - (unsigned long *)&optprobe_template_entry) +#define TMPL_ADD_SP \ + ((unsigned long *)&optprobe_template_add_sp - (unsigned long *)&optprobe_template_entry) +#define TMPL_SUB_SP \ + ((unsigned long *)&optprobe_template_sub_sp - (unsigned long *)&optprobe_template_entry) + +/* + * ARM can always optimize an instruction when using ARM ISA, except + * instructions like 'str r0, [sp, r1]' which store to stack and unable + * to determine stack space consumption statically. + */ +int arch_prepared_optinsn(struct arch_optimized_insn *optinsn) +{ + return optinsn->insn != NULL; +} + +/* + * In ARM ISA, kprobe opt always replace one instruction (4 bytes + * aligned and 4 bytes long). It is impossible to encounter another + * kprobe in the address range. So always return 0. + */ +int arch_check_optimized_kprobe(struct optimized_kprobe *op) +{ + return 0; +} + +/* Caller must ensure addr & 3 == 0 */ +static int can_optimize(struct kprobe *kp) +{ + if (kp->ainsn.stack_space < 0) + return 0; + /* + * 255 is the biggest imm can be used in 'sub r0, r0, #'. + * Number larger than 255 needs special encoding. + */ + if (kp->ainsn.stack_space > 255 - sizeof(struct pt_regs)) + return 0; + return 1; +} + +/* Free optimized instruction slot */ +static void +__arch_remove_optimized_kprobe(struct optimized_kprobe *op, int dirty) +{ + if (op->optinsn.insn) { + free_optinsn_slot(op->optinsn.insn, dirty); + op->optinsn.insn = NULL; + } +} + +extern void kprobe_handler(struct pt_regs *regs); + +static void +optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs) +{ + unsigned long flags; + struct kprobe *p = &op->kp; + struct kprobe_ctlblk *kcb = get_kprobe_ctlblk(); + + /* Save skipped registers */ + regs->ARM_pc = (unsigned long)op->kp.addr; + regs->ARM_ORIG_r0 = ~0UL; + + local_irq_save(flags); + + if (kprobe_running()) { + kprobes_inc_nmissed_count(&op->kp); + } else { + __this_cpu_write(current_kprobe, &op->kp); + kcb->kprobe_status = KPROBE_HIT_ACTIVE; + opt_pre_handler(&op->kp, regs); + __this_cpu_write(current_kprobe, NULL); + } + + /* In each case, we must singlestep the replaced instruction. */ + op->kp.ainsn.insn_singlestep(p->opcode, &p->ainsn, regs); + + local_irq_restore(flags); +} + +int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *orig) +{ + kprobe_opcode_t *code; + unsigned long rel_chk; + unsigned long val; + unsigned long stack_protect = sizeof(struct pt_regs); + + if (!can_optimize(orig)) + return -EILSEQ; + + code = get_optinsn_slot(); + if (!code) + return -ENOMEM; + + /* + * Verify if the address gap is in 32MiB range, because this uses + * a relative jump. + * + * kprobe opt use a 'b' instruction to branch to optinsn.insn. + * According to ARM manual, branch instruction is: + * + * 31 28 27 24 23 0 + * +------+---+---+---+---+----------------+ + * | cond | 1 | 0 | 1 | 0 | imm24 | + * +------+---+---+---+---+----------------+ + * + * imm24 is a signed 24 bits integer. The real branch offset is computed + * by: imm32 = SignExtend(imm24:'00', 32); + * + * So the maximum forward branch should be: + * (0x007fffff << 2) = 0x01fffffc = 0x1fffffc + * The maximum backword branch should be: + * (0xff800000 << 2) = 0xfe000000 = -0x2000000 + * + * We can simply check (rel & 0xfe000003): + * if rel is positive, (rel & 0xfe000000) shoule be 0 + * if rel is negitive, (rel & 0xfe000000) should be 0xfe000000 + * the last '3' is used for alignment checking. + */ + rel_chk = (unsigned long)((long)code - + (long)orig->addr + 8) & 0xfe000003; + + if ((rel_chk != 0) && (rel_chk != 0xfe000000)) { + /* + * Different from x86, we free code buf directly instead of + * calling __arch_remove_optimized_kprobe() because + * we have not fill any field in op. + */ + free_optinsn_slot(code, 0); + return -ERANGE; + } + + /* Copy arch-dep-instance from template. */ + memcpy(code, &optprobe_template_entry, + TMPL_END_IDX * sizeof(kprobe_opcode_t)); + + /* Adjust buffer according to instruction. */ + BUG_ON(orig->ainsn.stack_space < 0); + + stack_protect += orig->ainsn.stack_space; + + /* Should have been filtered by can_optimize(). */ + BUG_ON(stack_protect > 255); + + /* Create a 'sub sp, sp, #' */ + code[TMPL_SUB_SP] = __opcode_to_mem_arm(0xe24dd000 | stack_protect); + /* Create a 'add r3, sp, #' */ + code[TMPL_ADD_SP] = __opcode_to_mem_arm(0xe28d3000 | stack_protect); + + /* Set probe information */ + val = (unsigned long)op; + code[TMPL_VAL_IDX] = val; + + /* Set probe function call */ + val = (unsigned long)optimized_callback; + code[TMPL_CALL_IDX] = val; + + flush_icache_range((unsigned long)code, + (unsigned long)(&code[TMPL_END_IDX])); + + /* Set op->optinsn.insn means prepared. */ + op->optinsn.insn = code; + return 0; +} + +void __kprobes arch_optimize_kprobes(struct list_head *oplist) +{ + struct optimized_kprobe *op, *tmp; + + list_for_each_entry_safe(op, tmp, oplist, list) { + unsigned long insn; + WARN_ON(kprobe_disabled(&op->kp)); + + /* + * Backup instructions which will be replaced + * by jump address + */ + memcpy(op->optinsn.copied_insn, op->kp.addr, + RELATIVEJUMP_SIZE); + + insn = arm_gen_branch((unsigned long)op->kp.addr, + (unsigned long)op->optinsn.insn); + BUG_ON(insn == 0); + + /* + * Make it a conditional branch if replaced insn + * is consitional + */ + insn = (__mem_to_opcode_arm( + op->optinsn.copied_insn[0]) & 0xf0000000) | + (insn & 0x0fffffff); + + /* + * Similar to __arch_disarm_kprobe, operations which + * removing breakpoints must be wrapped by stop_machine + * to avoid racing. + */ + kprobes_remove_breakpoint(op->kp.addr, insn); + + list_del_init(&op->list); + } +} + +void arch_unoptimize_kprobe(struct optimized_kprobe *op) +{ + arch_arm_kprobe(&op->kp); +} + +/* + * Recover original instructions and breakpoints from relative jumps. + * Caller must call with locking kprobe_mutex. + */ +void arch_unoptimize_kprobes(struct list_head *oplist, + struct list_head *done_list) +{ + struct optimized_kprobe *op, *tmp; + + list_for_each_entry_safe(op, tmp, oplist, list) { + arch_unoptimize_kprobe(op); + list_move(&op->list, done_list); + } +} + +int arch_within_optimized_kprobe(struct optimized_kprobe *op, + unsigned long addr) +{ + return ((unsigned long)op->kp.addr <= addr && + (unsigned long)op->kp.addr + RELATIVEJUMP_SIZE > addr); +} + +void arch_remove_optimized_kprobe(struct optimized_kprobe *op) +{ + __arch_remove_optimized_kprobe(op, 1); +} -- cgit v1.2.3-59-g8ed1b From 4cd872d973c7e1ce6a41e36db9d9352152da32d4 Mon Sep 17 00:00:00 2001 From: "Jon Medhurst (Tixy)" Date: Mon, 5 Jan 2015 19:29:40 +0800 Subject: ARM: kprobes: Fix unreliable MRS instruction tests For the instruction 'mrs Rn, cpsr' the resulting value of Rn can vary due to external factors we can't control. So get the test code to mask out these indeterminate bits. Signed-off-by: Jon Medhurst --- arch/arm/probes/kprobes/test-arm.c | 6 +++--- arch/arm/probes/kprobes/test-core.c | 19 ++++++++++--------- arch/arm/probes/kprobes/test-core.h | 33 ++++++++++++++++++++++++++++----- arch/arm/probes/kprobes/test-thumb.c | 4 ++-- 4 files changed, 43 insertions(+), 19 deletions(-) (limited to 'arch') diff --git a/arch/arm/probes/kprobes/test-arm.c b/arch/arm/probes/kprobes/test-arm.c index 9b3b1b4a0939..e72b07e8cd9a 100644 --- a/arch/arm/probes/kprobes/test-arm.c +++ b/arch/arm/probes/kprobes/test-arm.c @@ -204,9 +204,9 @@ void kprobe_arm_test_cases(void) #endif TEST_GROUP("Miscellaneous instructions") - TEST("mrs r0, cpsr") - TEST("mrspl r7, cpsr") - TEST("mrs r14, cpsr") + TEST_RMASKED("mrs r",0,~PSR_IGNORE_BITS,", cpsr") + TEST_RMASKED("mrspl r",7,~PSR_IGNORE_BITS,", cpsr") + TEST_RMASKED("mrs r",14,~PSR_IGNORE_BITS,", cpsr") TEST_UNSUPPORTED(__inst_arm(0xe10ff000) " @ mrs r15, cpsr") TEST_UNSUPPORTED("mrs r0, spsr") TEST_UNSUPPORTED("mrs lr, spsr") diff --git a/arch/arm/probes/kprobes/test-core.c b/arch/arm/probes/kprobes/test-core.c index 7c5ddd5a6afd..e495127d7571 100644 --- a/arch/arm/probes/kprobes/test-core.c +++ b/arch/arm/probes/kprobes/test-core.c @@ -1056,15 +1056,6 @@ static int test_case_run_count; static bool test_case_is_thumb; static int test_instance; -/* - * We ignore the state of the imprecise abort disable flag (CPSR.A) because this - * can change randomly as the kernel doesn't take care to preserve or initialise - * this across context switches. Also, with Security Extentions, the flag may - * not be under control of the kernel; for this reason we ignore the state of - * the FIQ disable flag CPSR.F as well. - */ -#define PSR_IGNORE_BITS (PSR_A_BIT | PSR_F_BIT) - static unsigned long test_check_cc(int cc, unsigned long cpsr) { int ret = arm_check_condition(cc << 28, cpsr); @@ -1271,11 +1262,21 @@ test_case_pre_handler(struct kprobe *p, struct pt_regs *regs) static int __kprobes test_after_pre_handler(struct kprobe *p, struct pt_regs *regs) { + struct test_arg *args; + if (container_of(p, struct test_probe, kprobe)->hit == test_instance) return 0; /* Already run for this test instance */ result_regs = *regs; + + /* Mask out results which are indeterminate */ result_regs.ARM_cpsr &= ~PSR_IGNORE_BITS; + for (args = current_args; args[0].type != ARG_TYPE_END; ++args) + if (args[0].type == ARG_TYPE_REG_MASKED) { + struct test_arg_regptr *arg = + (struct test_arg_regptr *)args; + result_regs.uregs[arg->reg] &= arg->val; + } /* Undo any changes done to SP by the test case */ regs->ARM_sp = (unsigned long)current_stack; diff --git a/arch/arm/probes/kprobes/test-core.h b/arch/arm/probes/kprobes/test-core.h index 9991754947bc..94285203e9f7 100644 --- a/arch/arm/probes/kprobes/test-core.h +++ b/arch/arm/probes/kprobes/test-core.h @@ -45,10 +45,11 @@ extern int kprobe_test_cc_position; * */ -#define ARG_TYPE_END 0 -#define ARG_TYPE_REG 1 -#define ARG_TYPE_PTR 2 -#define ARG_TYPE_MEM 3 +#define ARG_TYPE_END 0 +#define ARG_TYPE_REG 1 +#define ARG_TYPE_PTR 2 +#define ARG_TYPE_MEM 3 +#define ARG_TYPE_REG_MASKED 4 #define ARG_FLAG_UNSUPPORTED 0x01 #define ARG_FLAG_SUPPORTED 0x02 @@ -61,7 +62,7 @@ struct test_arg { }; struct test_arg_regptr { - u8 type; /* ARG_TYPE_REG or ARG_TYPE_PTR */ + u8 type; /* ARG_TYPE_REG or ARG_TYPE_PTR or ARG_TYPE_REG_MASKED */ u8 reg; u8 _padding[2]; u32 val; @@ -138,6 +139,12 @@ struct test_arg_end { ".short 0 \n\t" \ ".word "#val" \n\t" +#define TEST_ARG_REG_MASKED(reg, val) \ + ".byte "__stringify(ARG_TYPE_REG_MASKED)" \n\t" \ + ".byte "#reg" \n\t" \ + ".short 0 \n\t" \ + ".word "#val" \n\t" + #define TEST_ARG_END(flags) \ ".byte "__stringify(ARG_TYPE_END)" \n\t" \ ".byte "TEST_ISA flags" \n\t" \ @@ -395,6 +402,22 @@ struct test_arg_end { " "codex" \n\t" \ TESTCASE_END +#define TEST_RMASKED(code1, reg, mask, code2) \ + TESTCASE_START(code1 #reg code2) \ + TEST_ARG_REG_MASKED(reg, mask) \ + TEST_ARG_END("") \ + TEST_INSTRUCTION(code1 #reg code2) \ + TESTCASE_END + +/* + * We ignore the state of the imprecise abort disable flag (CPSR.A) because this + * can change randomly as the kernel doesn't take care to preserve or initialise + * this across context switches. Also, with Security Extensions, the flag may + * not be under control of the kernel; for this reason we ignore the state of + * the FIQ disable flag CPSR.F as well. + */ +#define PSR_IGNORE_BITS (PSR_A_BIT | PSR_F_BIT) + /* * Macros for defining space directives spread over multiple lines. diff --git a/arch/arm/probes/kprobes/test-thumb.c b/arch/arm/probes/kprobes/test-thumb.c index e8cf193db1ea..b683b4517458 100644 --- a/arch/arm/probes/kprobes/test-thumb.c +++ b/arch/arm/probes/kprobes/test-thumb.c @@ -778,8 +778,8 @@ CONDITION_INSTRUCTIONS(22, TEST_UNSUPPORTED("subs pc, lr, #4") - TEST("mrs r0, cpsr") - TEST("mrs r14, cpsr") + TEST_RMASKED("mrs r",0,~PSR_IGNORE_BITS,", cpsr") + TEST_RMASKED("mrs r",14,~PSR_IGNORE_BITS,", cpsr") TEST_UNSUPPORTED(__inst_thumb32(0xf3ef8d00) " @ mrs sp, spsr") TEST_UNSUPPORTED(__inst_thumb32(0xf3ef8f00) " @ mrs pc, spsr") TEST_UNSUPPORTED("mrs r0, spsr") -- cgit v1.2.3-59-g8ed1b From 28a1899db30a9325498aef114055506286dc8010 Mon Sep 17 00:00:00 2001 From: Wang Nan Date: Mon, 5 Jan 2015 19:29:44 +0800 Subject: ARM: kprobes: check register usage for probed instruction. This patch utilizes the previously introduced checker to check register usage for probed ARM instruction and saves it in a mask. A further patch will use such information to avoid simulation or emulation. Signed-off-by: Wang Nan Reviewed-by: Jon Medhurst Signed-off-by: Jon Medhurst --- arch/arm/include/asm/probes.h | 1 + arch/arm/probes/decode.c | 7 +++ arch/arm/probes/kprobes/actions-arm.c | 2 +- arch/arm/probes/kprobes/checkers-arm.c | 93 ++++++++++++++++++++++++++++++++++ arch/arm/probes/kprobes/checkers.h | 1 + 5 files changed, 103 insertions(+), 1 deletion(-) (limited to 'arch') diff --git a/arch/arm/include/asm/probes.h b/arch/arm/include/asm/probes.h index cd9e81588d83..b668e60f759c 100644 --- a/arch/arm/include/asm/probes.h +++ b/arch/arm/include/asm/probes.h @@ -41,6 +41,7 @@ struct arch_probes_insn { probes_insn_singlestep_t *insn_singlestep; probes_insn_fn_t *insn_fn; int stack_space; + unsigned long register_usage_flags; }; #endif /* __ASSEMBLY__ */ diff --git a/arch/arm/probes/decode.c b/arch/arm/probes/decode.c index f9d7c423f2cc..880ebe0cdf19 100644 --- a/arch/arm/probes/decode.c +++ b/arch/arm/probes/decode.c @@ -435,6 +435,13 @@ probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi, */ asi->stack_space = 0; + /* + * Similarly to stack_space, register_usage_flags is filled by + * checkers. Its default value is set to ~0, which is 'all + * registers are used', to prevent any potential optimization. + */ + asi->register_usage_flags = ~0UL; + if (emulate) insn = prepare_emulated_insn(insn, asi, thumb); diff --git a/arch/arm/probes/kprobes/actions-arm.c b/arch/arm/probes/kprobes/actions-arm.c index 06988ef7eeb7..b9056d649607 100644 --- a/arch/arm/probes/kprobes/actions-arm.c +++ b/arch/arm/probes/kprobes/actions-arm.c @@ -341,4 +341,4 @@ const union decode_action kprobes_arm_actions[NUM_PROBES_ARM_ACTIONS] = { [PROBES_LDMSTM] = {.decoder = kprobe_decode_ldmstm} }; -const struct decode_checker *kprobes_arm_checkers[] = {arm_stack_checker, NULL}; +const struct decode_checker *kprobes_arm_checkers[] = {arm_stack_checker, arm_regs_checker, NULL}; diff --git a/arch/arm/probes/kprobes/checkers-arm.c b/arch/arm/probes/kprobes/checkers-arm.c index f8176631d2a5..7b9817333b68 100644 --- a/arch/arm/probes/kprobes/checkers-arm.c +++ b/arch/arm/probes/kprobes/checkers-arm.c @@ -97,3 +97,96 @@ const struct decode_checker arm_stack_checker[NUM_PROBES_ARM_ACTIONS] = { [PROBES_STORE] = {.checker = arm_check_stack}, [PROBES_LDMSTM] = {.checker = arm_check_stack}, }; + +static enum probes_insn __kprobes arm_check_regs_nouse(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + asi->register_usage_flags = 0; + return INSN_GOOD; +} + +static enum probes_insn arm_check_regs_normal(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + u32 regs = h->type_regs.bits >> DECODE_TYPE_BITS; + int i; + + asi->register_usage_flags = 0; + for (i = 0; i < 5; regs >>= 4, insn >>= 4, i++) + if (regs & 0xf) + asi->register_usage_flags |= 1 << (insn & 0xf); + + return INSN_GOOD; +} + + +static enum probes_insn arm_check_regs_ldmstm(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + unsigned int reglist = insn & 0xffff; + unsigned int rn = (insn >> 16) & 0xf; + asi->register_usage_flags = reglist | (1 << rn); + return INSN_GOOD; +} + +static enum probes_insn arm_check_regs_mov_ip_sp(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + /* Instruction is 'mov ip, sp' i.e. 'mov r12, r13' */ + asi->register_usage_flags = (1 << 12) | (1<< 13); + return INSN_GOOD; +} + +/* + * | Rn |Rt/d| | Rm | + * LDRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1101 xxxx + * STRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1111 xxxx + * | Rn |Rt/d| |imm4L| + * LDRD (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1101 xxxx + * STRD (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1111 xxxx + * + * Such instructions access Rt/d and its next register, so different + * from others, a specific checker is required to handle this extra + * implicit register usage. + */ +static enum probes_insn arm_check_regs_ldrdstrd(probes_opcode_t insn, + struct arch_probes_insn *asi, + const struct decode_header *h) +{ + int rdt = (insn >> 12) & 0xf; + arm_check_regs_normal(insn, asi, h); + asi->register_usage_flags |= 1 << (rdt + 1); + return INSN_GOOD; +} + + +const struct decode_checker arm_regs_checker[NUM_PROBES_ARM_ACTIONS] = { + [PROBES_MRS] = {.checker = arm_check_regs_normal}, + [PROBES_SATURATING_ARITHMETIC] = {.checker = arm_check_regs_normal}, + [PROBES_MUL1] = {.checker = arm_check_regs_normal}, + [PROBES_MUL2] = {.checker = arm_check_regs_normal}, + [PROBES_MUL_ADD_LONG] = {.checker = arm_check_regs_normal}, + [PROBES_MUL_ADD] = {.checker = arm_check_regs_normal}, + [PROBES_LOAD] = {.checker = arm_check_regs_normal}, + [PROBES_LOAD_EXTRA] = {.checker = arm_check_regs_normal}, + [PROBES_STORE] = {.checker = arm_check_regs_normal}, + [PROBES_STORE_EXTRA] = {.checker = arm_check_regs_normal}, + [PROBES_DATA_PROCESSING_REG] = {.checker = arm_check_regs_normal}, + [PROBES_DATA_PROCESSING_IMM] = {.checker = arm_check_regs_normal}, + [PROBES_SEV] = {.checker = arm_check_regs_nouse}, + [PROBES_WFE] = {.checker = arm_check_regs_nouse}, + [PROBES_SATURATE] = {.checker = arm_check_regs_normal}, + [PROBES_REV] = {.checker = arm_check_regs_normal}, + [PROBES_MMI] = {.checker = arm_check_regs_normal}, + [PROBES_PACK] = {.checker = arm_check_regs_normal}, + [PROBES_EXTEND] = {.checker = arm_check_regs_normal}, + [PROBES_EXTEND_ADD] = {.checker = arm_check_regs_normal}, + [PROBES_BITFIELD] = {.checker = arm_check_regs_normal}, + [PROBES_LDMSTM] = {.checker = arm_check_regs_ldmstm}, + [PROBES_MOV_IP_SP] = {.checker = arm_check_regs_mov_ip_sp}, + [PROBES_LDRSTRD] = {.checker = arm_check_regs_ldrdstrd}, +}; diff --git a/arch/arm/probes/kprobes/checkers.h b/arch/arm/probes/kprobes/checkers.h index bddfa0e82389..cf6c9e74d666 100644 --- a/arch/arm/probes/kprobes/checkers.h +++ b/arch/arm/probes/kprobes/checkers.h @@ -47,6 +47,7 @@ extern const union decode_action stack_check_actions[]; #ifndef CONFIG_THUMB2_KERNEL extern const struct decode_checker arm_stack_checker[]; +extern const struct decode_checker arm_regs_checker[]; #else #endif extern const struct decode_checker t32_stack_checker[]; -- cgit v1.2.3-59-g8ed1b From ee3a4020f7c91ffc3c1a680c88652371b9711809 Mon Sep 17 00:00:00 2001 From: Dmitry Eremin-Solenikov Date: Sun, 21 Dec 2014 16:07:43 +0100 Subject: ARM: 8250/1: sa1100: provide OSTIMER0 clock for pxa_timer Pxa_timer clocksource requires OSTIMER0 clock to be provided. Add dummy clock returning proper rate. Signed-off-by: Dmitry Eremin-Solenikov Signed-off-by: Russell King --- arch/arm/mach-sa1100/clock.c | 12 ++++++++++++ 1 file changed, 12 insertions(+) (limited to 'arch') diff --git a/arch/arm/mach-sa1100/clock.c b/arch/arm/mach-sa1100/clock.c index 03c75a811cb0..cbf53bb9c814 100644 --- a/arch/arm/mach-sa1100/clock.c +++ b/arch/arm/mach-sa1100/clock.c @@ -119,6 +119,17 @@ static DEFINE_CLK(gpio27, &clk_gpio27_ops); static DEFINE_CLK(cpu, &clk_cpu_ops); +static unsigned long clk_36864_get_rate(struct clk *clk) +{ + return 3686400; +} + +static struct clkops clk_36864_ops = { + .get_rate = clk_36864_get_rate, +}; + +static DEFINE_CLK(36864, &clk_36864_ops); + static struct clk_lookup sa11xx_clkregs[] = { CLKDEV_INIT("sa1111.0", NULL, &clk_gpio27), CLKDEV_INIT("sa1100-rtc", NULL, NULL), @@ -126,6 +137,7 @@ static struct clk_lookup sa11xx_clkregs[] = { CLKDEV_INIT("sa11x0-pcmcia", NULL, &clk_cpu), /* sa1111 names devices using internal offsets, PCMCIA is at 0x1800 */ CLKDEV_INIT("1800", NULL, &clk_cpu), + CLKDEV_INIT(NULL, "OSTIMER0", &clk_36864), }; static int __init sa11xx_clk_init(void) -- cgit v1.2.3-59-g8ed1b From 7a8ca0a0c480fedf91bdbadf8b90edd5374ce18b Mon Sep 17 00:00:00 2001 From: Dmitry Eremin-Solenikov Date: Sun, 21 Dec 2014 16:08:13 +0100 Subject: ARM: 8252/1: sa1100: use pxa_timer clocksource driver Use pxa_timer clocksource driver. Signed-off-by: Dmitry Eremin-Solenikov Signed-off-by: Russell King --- arch/arm/mach-sa1100/Makefile | 2 +- arch/arm/mach-sa1100/generic.c | 6 ++ arch/arm/mach-sa1100/time.c | 139 ----------------------------------------- 3 files changed, 7 insertions(+), 140 deletions(-) delete mode 100644 arch/arm/mach-sa1100/time.c (limited to 'arch') diff --git a/arch/arm/mach-sa1100/Makefile b/arch/arm/mach-sa1100/Makefile index f1114d11fe13..61ff91e76e0a 100644 --- a/arch/arm/mach-sa1100/Makefile +++ b/arch/arm/mach-sa1100/Makefile @@ -3,7 +3,7 @@ # # Common support -obj-y := clock.o generic.o irq.o time.o #nmi-oopser.o +obj-y := clock.o generic.o irq.o #nmi-oopser.o # Specific board support obj-$(CONFIG_SA1100_ASSABET) += assabet.o diff --git a/arch/arm/mach-sa1100/generic.c b/arch/arm/mach-sa1100/generic.c index d4ea142c4edd..40e0d8619a2d 100644 --- a/arch/arm/mach-sa1100/generic.c +++ b/arch/arm/mach-sa1100/generic.c @@ -33,6 +33,7 @@ #include #include "generic.h" +#include unsigned int reset_status; EXPORT_SYMBOL(reset_status); @@ -369,6 +370,11 @@ void __init sa1100_map_io(void) iotable_init(standard_io_desc, ARRAY_SIZE(standard_io_desc)); } +void __init sa1100_timer_init(void) +{ + pxa_timer_nodt_init(IRQ_OST0, io_p2v(0x90000000), 3686400); +} + /* * Disable the memory bus request/grant signals on the SA1110 to * ensure that we don't receive spurious memory requests. We set diff --git a/arch/arm/mach-sa1100/time.c b/arch/arm/mach-sa1100/time.c deleted file mode 100644 index 1dea6cfafb31..000000000000 --- a/arch/arm/mach-sa1100/time.c +++ /dev/null @@ -1,139 +0,0 @@ -/* - * linux/arch/arm/mach-sa1100/time.c - * - * Copyright (C) 1998 Deborah Wallach. - * Twiddles (C) 1999 Hugo Fiennes - * - * 2000/03/29 (C) Nicolas Pitre - * Rewritten: big cleanup, much simpler, better HZ accuracy. - * - */ -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include - -#define SA1100_CLOCK_FREQ 3686400 -#define SA1100_LATCH DIV_ROUND_CLOSEST(SA1100_CLOCK_FREQ, HZ) - -static u64 notrace sa1100_read_sched_clock(void) -{ - return readl_relaxed(OSCR); -} - -#define MIN_OSCR_DELTA 2 - -static irqreturn_t sa1100_ost0_interrupt(int irq, void *dev_id) -{ - struct clock_event_device *c = dev_id; - - /* Disarm the compare/match, signal the event. */ - writel_relaxed(readl_relaxed(OIER) & ~OIER_E0, OIER); - writel_relaxed(OSSR_M0, OSSR); - c->event_handler(c); - - return IRQ_HANDLED; -} - -static int -sa1100_osmr0_set_next_event(unsigned long delta, struct clock_event_device *c) -{ - unsigned long next, oscr; - - writel_relaxed(readl_relaxed(OIER) | OIER_E0, OIER); - next = readl_relaxed(OSCR) + delta; - writel_relaxed(next, OSMR0); - oscr = readl_relaxed(OSCR); - - return (signed)(next - oscr) <= MIN_OSCR_DELTA ? -ETIME : 0; -} - -static void -sa1100_osmr0_set_mode(enum clock_event_mode mode, struct clock_event_device *c) -{ - switch (mode) { - case CLOCK_EVT_MODE_ONESHOT: - case CLOCK_EVT_MODE_UNUSED: - case CLOCK_EVT_MODE_SHUTDOWN: - writel_relaxed(readl_relaxed(OIER) & ~OIER_E0, OIER); - writel_relaxed(OSSR_M0, OSSR); - break; - - case CLOCK_EVT_MODE_RESUME: - case CLOCK_EVT_MODE_PERIODIC: - break; - } -} - -#ifdef CONFIG_PM -unsigned long osmr[4], oier; - -static void sa1100_timer_suspend(struct clock_event_device *cedev) -{ - osmr[0] = readl_relaxed(OSMR0); - osmr[1] = readl_relaxed(OSMR1); - osmr[2] = readl_relaxed(OSMR2); - osmr[3] = readl_relaxed(OSMR3); - oier = readl_relaxed(OIER); -} - -static void sa1100_timer_resume(struct clock_event_device *cedev) -{ - writel_relaxed(0x0f, OSSR); - writel_relaxed(osmr[0], OSMR0); - writel_relaxed(osmr[1], OSMR1); - writel_relaxed(osmr[2], OSMR2); - writel_relaxed(osmr[3], OSMR3); - writel_relaxed(oier, OIER); - - /* - * OSMR0 is the system timer: make sure OSCR is sufficiently behind - */ - writel_relaxed(OSMR0 - SA1100_LATCH, OSCR); -} -#else -#define sa1100_timer_suspend NULL -#define sa1100_timer_resume NULL -#endif - -static struct clock_event_device ckevt_sa1100_osmr0 = { - .name = "osmr0", - .features = CLOCK_EVT_FEAT_ONESHOT, - .rating = 200, - .set_next_event = sa1100_osmr0_set_next_event, - .set_mode = sa1100_osmr0_set_mode, - .suspend = sa1100_timer_suspend, - .resume = sa1100_timer_resume, -}; - -static struct irqaction sa1100_timer_irq = { - .name = "ost0", - .flags = IRQF_TIMER | IRQF_IRQPOLL, - .handler = sa1100_ost0_interrupt, - .dev_id = &ckevt_sa1100_osmr0, -}; - -void __init sa1100_timer_init(void) -{ - writel_relaxed(0, OIER); - writel_relaxed(OSSR_M0 | OSSR_M1 | OSSR_M2 | OSSR_M3, OSSR); - - sched_clock_register(sa1100_read_sched_clock, 32, 3686400); - - ckevt_sa1100_osmr0.cpumask = cpumask_of(0); - - setup_irq(IRQ_OST0, &sa1100_timer_irq); - - clocksource_mmio_init(OSCR, "oscr", SA1100_CLOCK_FREQ, 200, 32, - clocksource_mmio_readl_up); - clockevents_config_and_register(&ckevt_sa1100_osmr0, 3686400, - MIN_OSCR_DELTA * 2, 0x7fffffff); -} -- cgit v1.2.3-59-g8ed1b From bfc9657d752c47d59dc0bab85ebdc19cf60100dd Mon Sep 17 00:00:00 2001 From: Wang Nan Date: Mon, 5 Jan 2015 19:34:47 +0800 Subject: ARM: optprobes: execute instruction during restoring if possible. This patch removes software emulation or simulation for most of probed instructions. If the instruction doesn't use PC relative addressing, it will be translated into following instructions in the restore code in code template: ldmia {r0 - r14} // restore all instruction except PC // direct execute the probed instruction b next_insn // branch to next instruction. Signed-off-by: Wang Nan Reviewed-by: Masami Hiramatsu Signed-off-by: Jon Medhurst --- arch/arm/include/asm/kprobes.h | 3 +++ arch/arm/include/asm/probes.h | 1 + arch/arm/probes/kprobes/opt-arm.c | 52 +++++++++++++++++++++++++++++++++++++-- 3 files changed, 54 insertions(+), 2 deletions(-) (limited to 'arch') diff --git a/arch/arm/include/asm/kprobes.h b/arch/arm/include/asm/kprobes.h index 50ff3bc7928e..3ea9be559726 100644 --- a/arch/arm/include/asm/kprobes.h +++ b/arch/arm/include/asm/kprobes.h @@ -57,6 +57,9 @@ extern __visible kprobe_opcode_t optprobe_template_call; extern __visible kprobe_opcode_t optprobe_template_end; extern __visible kprobe_opcode_t optprobe_template_sub_sp; extern __visible kprobe_opcode_t optprobe_template_add_sp; +extern __visible kprobe_opcode_t optprobe_template_restore_begin; +extern __visible kprobe_opcode_t optprobe_template_restore_orig_insn; +extern __visible kprobe_opcode_t optprobe_template_restore_end; #define MAX_OPTIMIZED_LENGTH 4 #define MAX_OPTINSN_SIZE \ diff --git a/arch/arm/include/asm/probes.h b/arch/arm/include/asm/probes.h index b668e60f759c..1e5b9bb92270 100644 --- a/arch/arm/include/asm/probes.h +++ b/arch/arm/include/asm/probes.h @@ -42,6 +42,7 @@ struct arch_probes_insn { probes_insn_fn_t *insn_fn; int stack_space; unsigned long register_usage_flags; + bool kprobe_direct_exec; }; #endif /* __ASSEMBLY__ */ diff --git a/arch/arm/probes/kprobes/opt-arm.c b/arch/arm/probes/kprobes/opt-arm.c index 13d5232118df..bcdecc25461b 100644 --- a/arch/arm/probes/kprobes/opt-arm.c +++ b/arch/arm/probes/kprobes/opt-arm.c @@ -31,6 +31,14 @@ #include "core.h" +/* + * See register_usage_flags. If the probed instruction doesn't use PC, + * we can copy it into template and have it executed directly without + * simulation or emulation. + */ +#define ARM_REG_PC 15 +#define can_kprobe_direct_exec(m) (!test_bit(ARM_REG_PC, &(m))) + /* * NOTE: the first sub and add instruction will be modified according * to the stack cost of the instruction. @@ -71,7 +79,15 @@ asm ( " orrne r2, #1\n" " strne r2, [sp, #60] @ set bit0 of PC for thumb\n" " msr cpsr_cxsf, r1\n" + ".global optprobe_template_restore_begin\n" + "optprobe_template_restore_begin:\n" " ldmia sp, {r0 - r15}\n" + ".global optprobe_template_restore_orig_insn\n" + "optprobe_template_restore_orig_insn:\n" + " nop\n" + ".global optprobe_template_restore_end\n" + "optprobe_template_restore_end:\n" + " nop\n" ".global optprobe_template_val\n" "optprobe_template_val:\n" "1: .long 0\n" @@ -91,6 +107,12 @@ asm ( ((unsigned long *)&optprobe_template_add_sp - (unsigned long *)&optprobe_template_entry) #define TMPL_SUB_SP \ ((unsigned long *)&optprobe_template_sub_sp - (unsigned long *)&optprobe_template_entry) +#define TMPL_RESTORE_BEGIN \ + ((unsigned long *)&optprobe_template_restore_begin - (unsigned long *)&optprobe_template_entry) +#define TMPL_RESTORE_ORIGN_INSN \ + ((unsigned long *)&optprobe_template_restore_orig_insn - (unsigned long *)&optprobe_template_entry) +#define TMPL_RESTORE_END \ + ((unsigned long *)&optprobe_template_restore_end - (unsigned long *)&optprobe_template_entry) /* * ARM can always optimize an instruction when using ARM ISA, except @@ -160,8 +182,12 @@ optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs) __this_cpu_write(current_kprobe, NULL); } - /* In each case, we must singlestep the replaced instruction. */ - op->kp.ainsn.insn_singlestep(p->opcode, &p->ainsn, regs); + /* + * We singlestep the replaced instruction only when it can't be + * executed directly during restore. + */ + if (!p->ainsn.kprobe_direct_exec) + op->kp.ainsn.insn_singlestep(p->opcode, &p->ainsn, regs); local_irq_restore(flags); } @@ -243,6 +269,28 @@ int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *or val = (unsigned long)optimized_callback; code[TMPL_CALL_IDX] = val; + /* If possible, copy insn and have it executed during restore */ + orig->ainsn.kprobe_direct_exec = false; + if (can_kprobe_direct_exec(orig->ainsn.register_usage_flags)) { + kprobe_opcode_t final_branch = arm_gen_branch( + (unsigned long)(&code[TMPL_RESTORE_END]), + (unsigned long)(op->kp.addr) + 4); + if (final_branch != 0) { + /* + * Replace original 'ldmia sp, {r0 - r15}' with + * 'ldmia {r0 - r14}', restore all registers except pc. + */ + code[TMPL_RESTORE_BEGIN] = __opcode_to_mem_arm(0xe89d7fff); + + /* The original probed instruction */ + code[TMPL_RESTORE_ORIGN_INSN] = __opcode_to_mem_arm(orig->opcode); + + /* Jump back to next instruction */ + code[TMPL_RESTORE_END] = __opcode_to_mem_arm(final_branch); + orig->ainsn.kprobe_direct_exec = true; + } + } + flush_icache_range((unsigned long)code, (unsigned long)(&code[TMPL_END_IDX])); -- cgit v1.2.3-59-g8ed1b From 0b7857dbeb256d1a9c0c606ec4b5f417e159040b Mon Sep 17 00:00:00 2001 From: Yalin Wang Date: Fri, 16 Jan 2015 02:45:55 +0100 Subject: ARM: 8287/1: add bitrev.h file to support rbit instruction This patch add bitrev.h file to support rbit instruction, so that we can do bitrev operation by hardware. Signed-off-by: Yalin Wang Signed-off-by: Russell King --- arch/arm/Kconfig | 1 + arch/arm/include/asm/bitrev.h | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+) create mode 100644 arch/arm/include/asm/bitrev.h (limited to 'arch') diff --git a/arch/arm/Kconfig b/arch/arm/Kconfig index 97d07ed60a0b..c3a986d45125 100644 --- a/arch/arm/Kconfig +++ b/arch/arm/Kconfig @@ -29,6 +29,7 @@ config ARM select HANDLE_DOMAIN_IRQ select HARDIRQS_SW_RESEND select HAVE_ARCH_AUDITSYSCALL if (AEABI && !OABI_COMPAT) + select HAVE_ARCH_BITREVERSE if (CPU_32v7M || CPU_32v7) && !CPU_32v6 select HAVE_ARCH_JUMP_LABEL if !XIP_KERNEL select HAVE_ARCH_KGDB select HAVE_ARCH_SECCOMP_FILTER if (AEABI && !OABI_COMPAT) diff --git a/arch/arm/include/asm/bitrev.h b/arch/arm/include/asm/bitrev.h new file mode 100644 index 000000000000..ec291c350ea3 --- /dev/null +++ b/arch/arm/include/asm/bitrev.h @@ -0,0 +1,20 @@ +#ifndef __ASM_BITREV_H +#define __ASM_BITREV_H + +static __always_inline __attribute_const__ u32 __arch_bitrev32(u32 x) +{ + __asm__ ("rbit %0, %1" : "=r" (x) : "r" (x)); + return x; +} + +static __always_inline __attribute_const__ u16 __arch_bitrev16(u16 x) +{ + return __arch_bitrev32((u32)x) >> 16; +} + +static __always_inline __attribute_const__ u8 __arch_bitrev8(u8 x) +{ + return __arch_bitrev32((u32)x) >> 24; +} + +#endif -- cgit v1.2.3-59-g8ed1b From 944e9df1d4f71f946aa044abc00726346e3c597c Mon Sep 17 00:00:00 2001 From: Marek Szyprowski Date: Thu, 8 Jan 2015 07:48:58 +0100 Subject: ARM: 8257/1: OMAP2+: use common l2cache initialization code This patch implements generic DT L2C initialisation (the one from init_IRQ in arch/arm/kernel/irq.c) for Omap4 and AM43 platforms and kills the SoC specific stuff in arch/arm/mach-omap2/omap4-common.c. Signed-off-by: Marek Szyprowski Tested-by: Nishanth Menon Acked-by: Nishanth Menon Acked-by: Tony Lindgren Signed-off-by: Russell King --- arch/arm/mach-omap2/board-generic.c | 6 ++++++ arch/arm/mach-omap2/common.h | 8 ++++++++ arch/arm/mach-omap2/omap4-common.c | 16 +--------------- 3 files changed, 15 insertions(+), 15 deletions(-) (limited to 'arch') diff --git a/arch/arm/mach-omap2/board-generic.c b/arch/arm/mach-omap2/board-generic.c index 608079a1aba6..c5c480b76da5 100644 --- a/arch/arm/mach-omap2/board-generic.c +++ b/arch/arm/mach-omap2/board-generic.c @@ -171,6 +171,9 @@ static const char *const omap4_boards_compat[] __initconst = { }; DT_MACHINE_START(OMAP4_DT, "Generic OMAP4 (Flattened Device Tree)") + .l2c_aux_val = OMAP_L2C_AUX_CTRL, + .l2c_aux_mask = 0xcf9fffff, + .l2c_write_sec = omap4_l2c310_write_sec, .reserve = omap_reserve, .smp = smp_ops(omap4_smp_ops), .map_io = omap4_map_io, @@ -214,6 +217,9 @@ static const char *const am43_boards_compat[] __initconst = { }; DT_MACHINE_START(AM43_DT, "Generic AM43 (Flattened Device Tree)") + .l2c_aux_val = OMAP_L2C_AUX_CTRL, + .l2c_aux_mask = 0xcf9fffff, + .l2c_write_sec = omap4_l2c310_write_sec, .map_io = am33xx_map_io, .init_early = am43xx_init_early, .init_late = am43xx_init_late, diff --git a/arch/arm/mach-omap2/common.h b/arch/arm/mach-omap2/common.h index 377eea849e7b..2610c9f8d29f 100644 --- a/arch/arm/mach-omap2/common.h +++ b/arch/arm/mach-omap2/common.h @@ -35,6 +35,7 @@ #include #include +#include #include "i2c.h" #include "serial.h" @@ -94,11 +95,18 @@ extern void omap3_gptimer_timer_init(void); extern void omap4_local_timer_init(void); #ifdef CONFIG_CACHE_L2X0 int omap_l2_cache_init(void); +#define OMAP_L2C_AUX_CTRL (L2C_AUX_CTRL_SHARED_OVERRIDE | \ + L310_AUX_CTRL_DATA_PREFETCH | \ + L310_AUX_CTRL_INSTR_PREFETCH) +void omap4_l2c310_write_sec(unsigned long val, unsigned reg); #else static inline int omap_l2_cache_init(void) { return 0; } + +#define OMAP_L2C_AUX_CTRL 0 +#define omap4_l2c310_write_sec NULL #endif extern void omap5_realtime_timer_init(void); diff --git a/arch/arm/mach-omap2/omap4-common.c b/arch/arm/mach-omap2/omap4-common.c index b7cb44abe49b..fe99ceff2e2d 100644 --- a/arch/arm/mach-omap2/omap4-common.c +++ b/arch/arm/mach-omap2/omap4-common.c @@ -166,7 +166,7 @@ void __iomem *omap4_get_l2cache_base(void) return l2cache_base; } -static void omap4_l2c310_write_sec(unsigned long val, unsigned reg) +void omap4_l2c310_write_sec(unsigned long val, unsigned reg) { unsigned smc_op; @@ -201,24 +201,10 @@ static void omap4_l2c310_write_sec(unsigned long val, unsigned reg) int __init omap_l2_cache_init(void) { - u32 aux_ctrl; - /* Static mapping, never released */ l2cache_base = ioremap(OMAP44XX_L2CACHE_BASE, SZ_4K); if (WARN_ON(!l2cache_base)) return -ENOMEM; - - /* 16-way associativity, parity disabled, way size - 64KB (es2.0 +) */ - aux_ctrl = L2C_AUX_CTRL_SHARED_OVERRIDE | - L310_AUX_CTRL_DATA_PREFETCH | - L310_AUX_CTRL_INSTR_PREFETCH; - - outer_cache.write_sec = omap4_l2c310_write_sec; - if (of_have_populated_dt()) - l2x0_of_init(aux_ctrl, 0xcf9fffff); - else - l2x0_init(l2cache_base, aux_ctrl, 0xcf9fffff); - return 0; } #endif -- cgit v1.2.3-59-g8ed1b From 00218241aa0846e75d31b1dbadb5f8a76be1cc97 Mon Sep 17 00:00:00 2001 From: Marek Szyprowski Date: Thu, 8 Jan 2015 07:49:41 +0100 Subject: ARM: 8258/1: l2c: use l2c_write_sec() for restoring latency and filter regs All four register for latency and filter settings cannot be written in non-secure mode and they should go through l2c_write_sec(). More on this can be found in CoreLink Level 2 Cache Controller L2C-310 Technical Reference Manual, 3.2. Register summary, table 3.1. This have been checked the TRM for r3p3, but it should be uniform for all revisions. Reported-by: Nishanth Menon Suggested-by: Tomasz Figa Signed-off-by: Marek Szyprowski Tested-by: Nishanth Menon Acked-by: Nishanth Menon Acked-by: Tony Lindgren Signed-off-by: Russell King --- arch/arm/mm/cache-l2x0.c | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) (limited to 'arch') diff --git a/arch/arm/mm/cache-l2x0.c b/arch/arm/mm/cache-l2x0.c index 5e65ca8dea62..b83c401ca50c 100644 --- a/arch/arm/mm/cache-l2x0.c +++ b/arch/arm/mm/cache-l2x0.c @@ -623,14 +623,14 @@ static void l2c310_resume(void) unsigned revision; /* restore pl310 setup */ - writel_relaxed(l2x0_saved_regs.tag_latency, - base + L310_TAG_LATENCY_CTRL); - writel_relaxed(l2x0_saved_regs.data_latency, - base + L310_DATA_LATENCY_CTRL); - writel_relaxed(l2x0_saved_regs.filter_end, - base + L310_ADDR_FILTER_END); - writel_relaxed(l2x0_saved_regs.filter_start, - base + L310_ADDR_FILTER_START); + l2c_write_sec(l2x0_saved_regs.tag_latency, base, + L310_TAG_LATENCY_CTRL); + l2c_write_sec(l2x0_saved_regs.data_latency, base, + L310_DATA_LATENCY_CTRL); + l2c_write_sec(l2x0_saved_regs.filter_end, base, + L310_ADDR_FILTER_END); + l2c_write_sec(l2x0_saved_regs.filter_start, base, + L310_ADDR_FILTER_START); revision = readl_relaxed(base + L2X0_CACHE_ID) & L2X0_CACHE_ID_RTL_MASK; @@ -1135,28 +1135,28 @@ static void __init l2c310_of_parse(const struct device_node *np, of_property_read_u32_array(np, "arm,tag-latency", tag, ARRAY_SIZE(tag)); if (tag[0] && tag[1] && tag[2]) - writel_relaxed( + l2c_write_sec( L310_LATENCY_CTRL_RD(tag[0] - 1) | L310_LATENCY_CTRL_WR(tag[1] - 1) | L310_LATENCY_CTRL_SETUP(tag[2] - 1), - l2x0_base + L310_TAG_LATENCY_CTRL); + l2x0_base, L310_TAG_LATENCY_CTRL); of_property_read_u32_array(np, "arm,data-latency", data, ARRAY_SIZE(data)); if (data[0] && data[1] && data[2]) - writel_relaxed( + l2c_write_sec( L310_LATENCY_CTRL_RD(data[0] - 1) | L310_LATENCY_CTRL_WR(data[1] - 1) | L310_LATENCY_CTRL_SETUP(data[2] - 1), - l2x0_base + L310_DATA_LATENCY_CTRL); + l2x0_base, L310_DATA_LATENCY_CTRL); of_property_read_u32_array(np, "arm,filter-ranges", filter, ARRAY_SIZE(filter)); if (filter[1]) { - writel_relaxed(ALIGN(filter[0] + filter[1], SZ_1M), - l2x0_base + L310_ADDR_FILTER_END); - writel_relaxed((filter[0] & ~(SZ_1M - 1)) | L310_ADDR_FILTER_EN, - l2x0_base + L310_ADDR_FILTER_START); + l2c_write_sec(ALIGN(filter[0] + filter[1], SZ_1M), + l2x0_base, L310_ADDR_FILTER_END); + l2c_write_sec((filter[0] & ~(SZ_1M - 1)) | L310_ADDR_FILTER_EN, + l2x0_base, L310_ADDR_FILTER_START); } ret = l2x0_cache_size_of_parse(np, aux_val, aux_mask, &assoc, SZ_512K); -- cgit v1.2.3-59-g8ed1b From 6b49241ac2525818508ee2baff9a58541c65421c Mon Sep 17 00:00:00 2001 From: Tomasz Figa Date: Thu, 8 Jan 2015 07:50:29 +0100 Subject: ARM: 8259/1: l2c: Refactor the driver to use commit-like interface Certain implementations of secure hypervisors (namely the one found on Samsung Exynos-based boards) do not provide access to individual L2C registers. This makes the .write_sec()-based interface insufficient and provoking ugly hacks. This patch is first step to make the driver not rely on availability of writes to individual registers. This is achieved by refactoring the driver to use a commit-like operation scheme: all register values are prepared first and stored in an instance of l2x0_regs struct and then a single callback is responsible to flush those values to the hardware. [mszyprow: rebased onto 'ARM: l2c: use l2c_write_sec() for restoring latency and filter regs' patch] Signed-off-by: Tomasz Figa Signed-off-by: Marek Szyprowski Tested-by: Nishanth Menon Acked-by: Tony Lindgren Signed-off-by: Russell King --- arch/arm/mm/cache-l2x0.c | 212 ++++++++++++++++++++++++++--------------------- 1 file changed, 116 insertions(+), 96 deletions(-) (limited to 'arch') diff --git a/arch/arm/mm/cache-l2x0.c b/arch/arm/mm/cache-l2x0.c index b83c401ca50c..dde0d54ac41e 100644 --- a/arch/arm/mm/cache-l2x0.c +++ b/arch/arm/mm/cache-l2x0.c @@ -41,12 +41,14 @@ struct l2c_init_data { void (*enable)(void __iomem *, u32, unsigned); void (*fixup)(void __iomem *, u32, struct outer_cache_fns *); void (*save)(void __iomem *); + void (*configure)(void __iomem *); struct outer_cache_fns outer_cache; }; #define CACHE_LINE_SIZE 32 static void __iomem *l2x0_base; +static const struct l2c_init_data *l2x0_data; static DEFINE_RAW_SPINLOCK(l2x0_lock); static u32 l2x0_way_mask; /* Bitmask of active ways */ static u32 l2x0_size; @@ -106,6 +108,14 @@ static inline void l2c_unlock(void __iomem *base, unsigned num) } } +static void l2c_configure(void __iomem *base) +{ + if (l2x0_data->configure) + l2x0_data->configure(base); + + l2c_write_sec(l2x0_saved_regs.aux_ctrl, base, L2X0_AUX_CTRL); +} + /* * Enable the L2 cache controller. This function must only be * called when the cache controller is known to be disabled. @@ -114,7 +124,12 @@ static void l2c_enable(void __iomem *base, u32 aux, unsigned num_lock) { unsigned long flags; - l2c_write_sec(aux, base, L2X0_AUX_CTRL); + /* Do not touch the controller if already enabled. */ + if (readl_relaxed(base + L2X0_CTRL) & L2X0_CTRL_EN) + return; + + l2x0_saved_regs.aux_ctrl = aux; + l2c_configure(base); l2c_unlock(base, num_lock); @@ -208,6 +223,11 @@ static void l2c_save(void __iomem *base) l2x0_saved_regs.aux_ctrl = readl_relaxed(l2x0_base + L2X0_AUX_CTRL); } +static void l2c_resume(void) +{ + l2c_enable(l2x0_base, l2x0_saved_regs.aux_ctrl, l2x0_data->num_lock); +} + /* * L2C-210 specific code. * @@ -288,14 +308,6 @@ static void l2c210_sync(void) __l2c210_cache_sync(l2x0_base); } -static void l2c210_resume(void) -{ - void __iomem *base = l2x0_base; - - if (!(readl_relaxed(base + L2X0_CTRL) & L2X0_CTRL_EN)) - l2c_enable(base, l2x0_saved_regs.aux_ctrl, 1); -} - static const struct l2c_init_data l2c210_data __initconst = { .type = "L2C-210", .way_size_0 = SZ_8K, @@ -309,7 +321,7 @@ static const struct l2c_init_data l2c210_data __initconst = { .flush_all = l2c210_flush_all, .disable = l2c_disable, .sync = l2c210_sync, - .resume = l2c210_resume, + .resume = l2c_resume, }, }; @@ -466,7 +478,7 @@ static const struct l2c_init_data l2c220_data = { .flush_all = l2c220_flush_all, .disable = l2c_disable, .sync = l2c220_sync, - .resume = l2c210_resume, + .resume = l2c_resume, }, }; @@ -615,39 +627,29 @@ static void __init l2c310_save(void __iomem *base) L310_POWER_CTRL); } -static void l2c310_resume(void) +static void l2c310_configure(void __iomem *base) { - void __iomem *base = l2x0_base; + unsigned revision; - if (!(readl_relaxed(base + L2X0_CTRL) & L2X0_CTRL_EN)) { - unsigned revision; - - /* restore pl310 setup */ - l2c_write_sec(l2x0_saved_regs.tag_latency, base, - L310_TAG_LATENCY_CTRL); - l2c_write_sec(l2x0_saved_regs.data_latency, base, - L310_DATA_LATENCY_CTRL); - l2c_write_sec(l2x0_saved_regs.filter_end, base, - L310_ADDR_FILTER_END); - l2c_write_sec(l2x0_saved_regs.filter_start, base, - L310_ADDR_FILTER_START); - - revision = readl_relaxed(base + L2X0_CACHE_ID) & - L2X0_CACHE_ID_RTL_MASK; - - if (revision >= L310_CACHE_ID_RTL_R2P0) - l2c_write_sec(l2x0_saved_regs.prefetch_ctrl, base, - L310_PREFETCH_CTRL); - if (revision >= L310_CACHE_ID_RTL_R3P0) - l2c_write_sec(l2x0_saved_regs.pwr_ctrl, base, - L310_POWER_CTRL); - - l2c_enable(base, l2x0_saved_regs.aux_ctrl, 8); - - /* Re-enable full-line-of-zeros for Cortex-A9 */ - if (l2x0_saved_regs.aux_ctrl & L310_AUX_CTRL_FULL_LINE_ZERO) - set_auxcr(get_auxcr() | BIT(3) | BIT(2) | BIT(1)); - } + /* restore pl310 setup */ + l2c_write_sec(l2x0_saved_regs.tag_latency, base, + L310_TAG_LATENCY_CTRL); + l2c_write_sec(l2x0_saved_regs.data_latency, base, + L310_DATA_LATENCY_CTRL); + l2c_write_sec(l2x0_saved_regs.filter_end, base, + L310_ADDR_FILTER_END); + l2c_write_sec(l2x0_saved_regs.filter_start, base, + L310_ADDR_FILTER_START); + + revision = readl_relaxed(base + L2X0_CACHE_ID) & + L2X0_CACHE_ID_RTL_MASK; + + if (revision >= L310_CACHE_ID_RTL_R2P0) + l2c_write_sec(l2x0_saved_regs.prefetch_ctrl, base, + L310_PREFETCH_CTRL); + if (revision >= L310_CACHE_ID_RTL_R3P0) + l2c_write_sec(l2x0_saved_regs.pwr_ctrl, base, + L310_POWER_CTRL); } static int l2c310_cpu_enable_flz(struct notifier_block *nb, unsigned long act, void *data) @@ -699,6 +701,23 @@ static void __init l2c310_enable(void __iomem *base, u32 aux, unsigned num_lock) aux &= ~(L310_AUX_CTRL_FULL_LINE_ZERO | L310_AUX_CTRL_EARLY_BRESP); } + /* r3p0 or later has power control register */ + if (rev >= L310_CACHE_ID_RTL_R3P0) + l2x0_saved_regs.pwr_ctrl = L310_DYNAMIC_CLK_GATING_EN | + L310_STNDBY_MODE_EN; + + /* + * Always enable non-secure access to the lockdown registers - + * we write to them as part of the L2C enable sequence so they + * need to be accessible. + */ + aux |= L310_AUX_CTRL_NS_LOCKDOWN; + + l2c_enable(base, aux, num_lock); + + /* Read back resulting AUX_CTRL value as it could have been altered. */ + aux = readl_relaxed(base + L2X0_AUX_CTRL); + if (aux & (L310_AUX_CTRL_DATA_PREFETCH | L310_AUX_CTRL_INSTR_PREFETCH)) { u32 prefetch = readl_relaxed(base + L310_PREFETCH_CTRL); @@ -712,23 +731,12 @@ static void __init l2c310_enable(void __iomem *base, u32 aux, unsigned num_lock) if (rev >= L310_CACHE_ID_RTL_R3P0) { u32 power_ctrl; - l2c_write_sec(L310_DYNAMIC_CLK_GATING_EN | L310_STNDBY_MODE_EN, - base, L310_POWER_CTRL); power_ctrl = readl_relaxed(base + L310_POWER_CTRL); pr_info("L2C-310 dynamic clock gating %sabled, standby mode %sabled\n", power_ctrl & L310_DYNAMIC_CLK_GATING_EN ? "en" : "dis", power_ctrl & L310_STNDBY_MODE_EN ? "en" : "dis"); } - /* - * Always enable non-secure access to the lockdown registers - - * we write to them as part of the L2C enable sequence so they - * need to be accessible. - */ - aux |= L310_AUX_CTRL_NS_LOCKDOWN; - - l2c_enable(base, aux, num_lock); - if (aux & L310_AUX_CTRL_FULL_LINE_ZERO) { set_auxcr(get_auxcr() | BIT(3) | BIT(2) | BIT(1)); cpu_notifier(l2c310_cpu_enable_flz, 0); @@ -760,11 +768,11 @@ static void __init l2c310_fixup(void __iomem *base, u32 cache_id, if (revision >= L310_CACHE_ID_RTL_R3P0 && revision < L310_CACHE_ID_RTL_R3P2) { - u32 val = readl_relaxed(base + L310_PREFETCH_CTRL); + u32 val = l2x0_saved_regs.prefetch_ctrl; /* I don't think bit23 is required here... but iMX6 does so */ if (val & (BIT(30) | BIT(23))) { val &= ~(BIT(30) | BIT(23)); - l2c_write_sec(val, base, L310_PREFETCH_CTRL); + l2x0_saved_regs.prefetch_ctrl = val; errata[n++] = "752271"; } } @@ -800,6 +808,15 @@ static void l2c310_disable(void) l2c_disable(); } +static void l2c310_resume(void) +{ + l2c_resume(); + + /* Re-enable full-line-of-zeros for Cortex-A9 */ + if (l2x0_saved_regs.aux_ctrl & L310_AUX_CTRL_FULL_LINE_ZERO) + set_auxcr(get_auxcr() | BIT(3) | BIT(2) | BIT(1)); +} + static const struct l2c_init_data l2c310_init_fns __initconst = { .type = "L2C-310", .way_size_0 = SZ_8K, @@ -807,6 +824,7 @@ static const struct l2c_init_data l2c310_init_fns __initconst = { .enable = l2c310_enable, .fixup = l2c310_fixup, .save = l2c310_save, + .configure = l2c310_configure, .outer_cache = { .inv_range = l2c210_inv_range, .clean_range = l2c210_clean_range, @@ -818,13 +836,21 @@ static const struct l2c_init_data l2c310_init_fns __initconst = { }, }; -static void __init __l2c_init(const struct l2c_init_data *data, - u32 aux_val, u32 aux_mask, u32 cache_id) +static int __init __l2c_init(const struct l2c_init_data *data, + u32 aux_val, u32 aux_mask, u32 cache_id) { struct outer_cache_fns fns; unsigned way_size_bits, ways; u32 aux, old_aux; + /* + * Save the pointer globally so that callbacks which do not receive + * context from callers can access the structure. + */ + l2x0_data = kmemdup(data, sizeof(*data), GFP_KERNEL); + if (!l2x0_data) + return -ENOMEM; + /* * Sanity check the aux values. aux_mask is the bits we preserve * from reading the hardware register, and aux_val is the bits we @@ -910,6 +936,8 @@ static void __init __l2c_init(const struct l2c_init_data *data, data->type, ways, l2x0_size >> 10); pr_info("%s: CACHE_ID 0x%08x, AUX_CTRL 0x%08x\n", data->type, cache_id, aux); + + return 0; } void __init l2x0_init(void __iomem *base, u32 aux_val, u32 aux_mask) @@ -936,6 +964,10 @@ void __init l2x0_init(void __iomem *base, u32 aux_val, u32 aux_mask) break; } + /* Read back current (default) hardware configuration */ + if (data->save) + data->save(l2x0_base); + __l2c_init(data, aux_val, aux_mask, cache_id); } @@ -1102,7 +1134,7 @@ static const struct l2c_init_data of_l2c210_data __initconst = { .flush_all = l2c210_flush_all, .disable = l2c_disable, .sync = l2c210_sync, - .resume = l2c210_resume, + .resume = l2c_resume, }, }; @@ -1120,7 +1152,7 @@ static const struct l2c_init_data of_l2c220_data __initconst = { .flush_all = l2c220_flush_all, .disable = l2c_disable, .sync = l2c220_sync, - .resume = l2c210_resume, + .resume = l2c_resume, }, }; @@ -1135,28 +1167,26 @@ static void __init l2c310_of_parse(const struct device_node *np, of_property_read_u32_array(np, "arm,tag-latency", tag, ARRAY_SIZE(tag)); if (tag[0] && tag[1] && tag[2]) - l2c_write_sec( + l2x0_saved_regs.tag_latency = L310_LATENCY_CTRL_RD(tag[0] - 1) | L310_LATENCY_CTRL_WR(tag[1] - 1) | - L310_LATENCY_CTRL_SETUP(tag[2] - 1), - l2x0_base, L310_TAG_LATENCY_CTRL); + L310_LATENCY_CTRL_SETUP(tag[2] - 1); of_property_read_u32_array(np, "arm,data-latency", data, ARRAY_SIZE(data)); if (data[0] && data[1] && data[2]) - l2c_write_sec( + l2x0_saved_regs.data_latency = L310_LATENCY_CTRL_RD(data[0] - 1) | L310_LATENCY_CTRL_WR(data[1] - 1) | - L310_LATENCY_CTRL_SETUP(data[2] - 1), - l2x0_base, L310_DATA_LATENCY_CTRL); + L310_LATENCY_CTRL_SETUP(data[2] - 1); of_property_read_u32_array(np, "arm,filter-ranges", filter, ARRAY_SIZE(filter)); if (filter[1]) { - l2c_write_sec(ALIGN(filter[0] + filter[1], SZ_1M), - l2x0_base, L310_ADDR_FILTER_END); - l2c_write_sec((filter[0] & ~(SZ_1M - 1)) | L310_ADDR_FILTER_EN, - l2x0_base, L310_ADDR_FILTER_START); + l2x0_saved_regs.filter_end = + ALIGN(filter[0] + filter[1], SZ_1M); + l2x0_saved_regs.filter_start = (filter[0] & ~(SZ_1M - 1)) + | L310_ADDR_FILTER_EN; } ret = l2x0_cache_size_of_parse(np, aux_val, aux_mask, &assoc, SZ_512K); @@ -1188,6 +1218,7 @@ static const struct l2c_init_data of_l2c310_data __initconst = { .enable = l2c310_enable, .fixup = l2c310_fixup, .save = l2c310_save, + .configure = l2c310_configure, .outer_cache = { .inv_range = l2c210_inv_range, .clean_range = l2c210_clean_range, @@ -1216,6 +1247,7 @@ static const struct l2c_init_data of_l2c310_coherent_data __initconst = { .enable = l2c310_enable, .fixup = l2c310_fixup, .save = l2c310_save, + .configure = l2c310_configure, .outer_cache = { .inv_range = l2c210_inv_range, .clean_range = l2c210_clean_range, @@ -1330,16 +1362,6 @@ static void aurora_save(void __iomem *base) l2x0_saved_regs.aux_ctrl = readl_relaxed(base + L2X0_AUX_CTRL); } -static void aurora_resume(void) -{ - void __iomem *base = l2x0_base; - - if (!(readl(base + L2X0_CTRL) & L2X0_CTRL_EN)) { - writel_relaxed(l2x0_saved_regs.aux_ctrl, base + L2X0_AUX_CTRL); - writel_relaxed(l2x0_saved_regs.ctrl, base + L2X0_CTRL); - } -} - /* * For Aurora cache in no outer mode, enable via the CP15 coprocessor * broadcasting of cache commands to L2. @@ -1401,7 +1423,7 @@ static const struct l2c_init_data of_aurora_with_outer_data __initconst = { .flush_all = l2x0_flush_all, .disable = l2x0_disable, .sync = l2x0_cache_sync, - .resume = aurora_resume, + .resume = l2c_resume, }, }; @@ -1414,7 +1436,7 @@ static const struct l2c_init_data of_aurora_no_outer_data __initconst = { .fixup = aurora_fixup, .save = aurora_save, .outer_cache = { - .resume = aurora_resume, + .resume = l2c_resume, }, }; @@ -1562,6 +1584,7 @@ static const struct l2c_init_data of_bcm_l2x0_data __initconst = { .of_parse = l2c310_of_parse, .enable = l2c310_enable, .save = l2c310_save, + .configure = l2c310_configure, .outer_cache = { .inv_range = bcm_inv_range, .clean_range = bcm_clean_range, @@ -1583,18 +1606,12 @@ static void __init tauros3_save(void __iomem *base) readl_relaxed(base + L310_PREFETCH_CTRL); } -static void tauros3_resume(void) +static void tauros3_configure(void __iomem *base) { - void __iomem *base = l2x0_base; - - if (!(readl_relaxed(base + L2X0_CTRL) & L2X0_CTRL_EN)) { - writel_relaxed(l2x0_saved_regs.aux2_ctrl, - base + TAUROS3_AUX2_CTRL); - writel_relaxed(l2x0_saved_regs.prefetch_ctrl, - base + L310_PREFETCH_CTRL); - - l2c_enable(base, l2x0_saved_regs.aux_ctrl, 8); - } + writel_relaxed(l2x0_saved_regs.aux2_ctrl, + base + TAUROS3_AUX2_CTRL); + writel_relaxed(l2x0_saved_regs.prefetch_ctrl, + base + L310_PREFETCH_CTRL); } static const struct l2c_init_data of_tauros3_data __initconst = { @@ -1603,9 +1620,10 @@ static const struct l2c_init_data of_tauros3_data __initconst = { .num_lock = 8, .enable = l2c_enable, .save = tauros3_save, + .configure = tauros3_configure, /* Tauros3 broadcasts L1 cache operations to L2 */ .outer_cache = { - .resume = tauros3_resume, + .resume = l2c_resume, }, }; @@ -1661,6 +1679,10 @@ int __init l2x0_of_init(u32 aux_val, u32 aux_mask) if (!of_property_read_bool(np, "cache-unified")) pr_err("L2C: device tree omits to specify unified cache\n"); + /* Read back current (default) hardware configuration */ + if (data->save) + data->save(l2x0_base); + /* L2 configuration can only be changed if the cache is disabled */ if (!(readl_relaxed(l2x0_base + L2X0_CTRL) & L2X0_CTRL_EN)) if (data->of_parse) @@ -1671,8 +1693,6 @@ int __init l2x0_of_init(u32 aux_val, u32 aux_mask) else cache_id = readl_relaxed(l2x0_base + L2X0_CACHE_ID); - __l2c_init(data, aux_val, aux_mask, cache_id); - - return 0; + return __l2c_init(data, aux_val, aux_mask, cache_id); } #endif -- cgit v1.2.3-59-g8ed1b From c6d1a2d0078a30eb6290428a858c4e790a0e8691 Mon Sep 17 00:00:00 2001 From: Tomasz Figa Date: Thu, 8 Jan 2015 07:51:07 +0100 Subject: ARM: 8260/1: l2c: Add interface to ask hypervisor to configure L2C Because certain secure hypervisor do not allow writes to individual L2C registers, but rather expect set of parameters to be passed as argument to secure monitor calls, there is a need to provide an interface for the L2C driver to ask the firmware to configure the hardware according to specified parameters. This patch adds such. Signed-off-by: Tomasz Figa Signed-off-by: Marek Szyprowski Tested-by: Nishanth Menon Acked-by: Nishanth Menon Acked-by: Tony Lindgren Signed-off-by: Russell King --- arch/arm/include/asm/outercache.h | 3 +++ arch/arm/mm/cache-l2x0.c | 6 ++++++ 2 files changed, 9 insertions(+) (limited to 'arch') diff --git a/arch/arm/include/asm/outercache.h b/arch/arm/include/asm/outercache.h index 891a56b35bcf..563b92fc2f41 100644 --- a/arch/arm/include/asm/outercache.h +++ b/arch/arm/include/asm/outercache.h @@ -23,6 +23,8 @@ #include +struct l2x0_regs; + struct outer_cache_fns { void (*inv_range)(unsigned long, unsigned long); void (*clean_range)(unsigned long, unsigned long); @@ -36,6 +38,7 @@ struct outer_cache_fns { /* This is an ARM L2C thing */ void (*write_sec)(unsigned long, unsigned); + void (*configure)(const struct l2x0_regs *); }; extern struct outer_cache_fns outer_cache; diff --git a/arch/arm/mm/cache-l2x0.c b/arch/arm/mm/cache-l2x0.c index dde0d54ac41e..5288153f28b8 100644 --- a/arch/arm/mm/cache-l2x0.c +++ b/arch/arm/mm/cache-l2x0.c @@ -110,6 +110,11 @@ static inline void l2c_unlock(void __iomem *base, unsigned num) static void l2c_configure(void __iomem *base) { + if (outer_cache.configure) { + outer_cache.configure(&l2x0_saved_regs); + return; + } + if (l2x0_data->configure) l2x0_data->configure(base); @@ -910,6 +915,7 @@ static int __init __l2c_init(const struct l2c_init_data *data, fns = data->outer_cache; fns.write_sec = outer_cache.write_sec; + fns.configure = outer_cache.configure; if (data->fixup) data->fixup(l2x0_base, cache_id, &fns); -- cgit v1.2.3-59-g8ed1b From 0c4c2edcaeee323fcc4f41a862456358eda50b57 Mon Sep 17 00:00:00 2001 From: Tomasz Figa Date: Thu, 8 Jan 2015 07:51:45 +0100 Subject: ARM: 8261/1: l2c: Get outer cache .write_sec callback from mach_desc only if not NULL Certain platforms (i.e. Exynos) might need to set .write_sec callback from firmware initialization which is happenning in .init_early callback of machine descriptor. However current code will overwrite the pointer with whatever is present in machine descriptor, even though it can be already set earlier. This patch fixes this by making the assignment conditional, depending on whether current .write_sec callback is NULL. Signed-off-by: Tomasz Figa Signed-off-by: Marek Szyprowski Tested-by: Nishanth Menon Acked-by: Nishanth Menon Acked-by: Tony Lindgren Signed-off-by: Russell King --- arch/arm/kernel/irq.c | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'arch') diff --git a/arch/arm/kernel/irq.c b/arch/arm/kernel/irq.c index ad857bada96c..350f188c92d2 100644 --- a/arch/arm/kernel/irq.c +++ b/arch/arm/kernel/irq.c @@ -109,7 +109,8 @@ void __init init_IRQ(void) if (IS_ENABLED(CONFIG_OF) && IS_ENABLED(CONFIG_CACHE_L2X0) && (machine_desc->l2c_aux_mask || machine_desc->l2c_aux_val)) { - outer_cache.write_sec = machine_desc->l2c_write_sec; + if (!outer_cache.write_sec) + outer_cache.write_sec = machine_desc->l2c_write_sec; ret = l2x0_of_init(machine_desc->l2c_aux_val, machine_desc->l2c_aux_mask); if (ret) -- cgit v1.2.3-59-g8ed1b From cf0681ca4cc5c9faa85a2df4c063b926fc09c977 Mon Sep 17 00:00:00 2001 From: Tomasz Figa Date: Thu, 8 Jan 2015 07:52:38 +0100 Subject: ARM: 8262/1: l2c: Add support for overriding prefetch settings Firmware on certain boards (e.g. ODROID-U3) can leave incorrect L2C prefetch settings configured in registers leading to crashes if L2C is enabled without overriding them. This patch introduces bindings to enable prefetch settings to be specified from DT and necessary support in the driver. [mszyprow: rebased onto v3.18-rc1, added error message when prefetch related dt property has been provided without any value] Signed-off-by: Tomasz Figa Signed-off-by: Marek Szyprowski Tested-by: Nishanth Menon Acked-by: Nishanth Menon Acked-by: Tony Lindgren Signed-off-by: Russell King --- Documentation/devicetree/bindings/arm/l2cc.txt | 10 +++++ arch/arm/mm/cache-l2x0.c | 54 ++++++++++++++++++++++++++ 2 files changed, 64 insertions(+) (limited to 'arch') diff --git a/Documentation/devicetree/bindings/arm/l2cc.txt b/Documentation/devicetree/bindings/arm/l2cc.txt index 292ef7ca3058..0dbabe9a6b0a 100644 --- a/Documentation/devicetree/bindings/arm/l2cc.txt +++ b/Documentation/devicetree/bindings/arm/l2cc.txt @@ -57,6 +57,16 @@ Optional properties: - cache-id-part: cache id part number to be used if it is not present on hardware - wt-override: If present then L2 is forced to Write through mode +- arm,double-linefill : Override double linefill enable setting. Enable if + non-zero, disable if zero. +- arm,double-linefill-incr : Override double linefill on INCR read. Enable + if non-zero, disable if zero. +- arm,double-linefill-wrap : Override double linefill on WRAP read. Enable + if non-zero, disable if zero. +- arm,prefetch-drop : Override prefetch drop enable setting. Enable if non-zero, + disable if zero. +- arm,prefetch-offset : Override prefetch offset value. Valid values are + 0-7, 15, 23, and 31. Example: diff --git a/arch/arm/mm/cache-l2x0.c b/arch/arm/mm/cache-l2x0.c index 5288153f28b8..01de13809454 100644 --- a/arch/arm/mm/cache-l2x0.c +++ b/arch/arm/mm/cache-l2x0.c @@ -1169,6 +1169,8 @@ static void __init l2c310_of_parse(const struct device_node *np, u32 tag[3] = { 0, 0, 0 }; u32 filter[2] = { 0, 0 }; u32 assoc; + u32 prefetch; + u32 val; int ret; of_property_read_u32_array(np, "arm,tag-latency", tag, ARRAY_SIZE(tag)); @@ -1214,6 +1216,58 @@ static void __init l2c310_of_parse(const struct device_node *np, assoc); break; } + + prefetch = l2x0_saved_regs.prefetch_ctrl; + + ret = of_property_read_u32(np, "arm,double-linefill", &val); + if (ret == 0) { + if (val) + prefetch |= L310_PREFETCH_CTRL_DBL_LINEFILL; + else + prefetch &= ~L310_PREFETCH_CTRL_DBL_LINEFILL; + } else if (ret != -EINVAL) { + pr_err("L2C-310 OF arm,double-linefill property value is missing\n"); + } + + ret = of_property_read_u32(np, "arm,double-linefill-incr", &val); + if (ret == 0) { + if (val) + prefetch |= L310_PREFETCH_CTRL_DBL_LINEFILL_INCR; + else + prefetch &= ~L310_PREFETCH_CTRL_DBL_LINEFILL_INCR; + } else if (ret != -EINVAL) { + pr_err("L2C-310 OF arm,double-linefill-incr property value is missing\n"); + } + + ret = of_property_read_u32(np, "arm,double-linefill-wrap", &val); + if (ret == 0) { + if (!val) + prefetch |= L310_PREFETCH_CTRL_DBL_LINEFILL_WRAP; + else + prefetch &= ~L310_PREFETCH_CTRL_DBL_LINEFILL_WRAP; + } else if (ret != -EINVAL) { + pr_err("L2C-310 OF arm,double-linefill-wrap property value is missing\n"); + } + + ret = of_property_read_u32(np, "arm,prefetch-drop", &val); + if (ret == 0) { + if (val) + prefetch |= L310_PREFETCH_CTRL_PREFETCH_DROP; + else + prefetch &= ~L310_PREFETCH_CTRL_PREFETCH_DROP; + } else if (ret != -EINVAL) { + pr_err("L2C-310 OF arm,prefetch-drop property value is missing\n"); + } + + ret = of_property_read_u32(np, "arm,prefetch-offset", &val); + if (ret == 0) { + prefetch &= ~L310_PREFETCH_CTRL_OFFSET_MASK; + prefetch |= val & L310_PREFETCH_CTRL_OFFSET_MASK; + } else if (ret != -EINVAL) { + pr_err("L2C-310 OF arm,prefetch-offset property value is missing\n"); + } + + l2x0_saved_regs.prefetch_ctrl = prefetch; } static const struct l2c_init_data of_l2c310_data __initconst = { -- cgit v1.2.3-59-g8ed1b From 5445b640f38efa9c571584d44df5673804194397 Mon Sep 17 00:00:00 2001 From: Tomasz Figa Date: Thu, 8 Jan 2015 07:53:20 +0100 Subject: ARM: 8263/1: EXYNOS: Add .write_sec outer cache callback for L2C-310 Exynos4 SoCs equipped with an L2C-310 cache controller and running under secure firmware require certain registers of aforementioned IP to be accessed only from secure mode. This means that SMC calls are required for certain register writes. To handle this, an implementation of .write_sec and .configure callbacks is provided by this patch. [added comment and reworked unconditional call to SMC_CMD_L2X0INVALL] Signed-off-by: Tomasz Figa Signed-off-by: Marek Szyprowski Acked-by: Arnd Bergmann Acked-by: Kukjin Kim Signed-off-by: Russell King --- arch/arm/mach-exynos/firmware.c | 50 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) (limited to 'arch') diff --git a/arch/arm/mach-exynos/firmware.c b/arch/arm/mach-exynos/firmware.c index 766f57d2f029..4791a3cc00f9 100644 --- a/arch/arm/mach-exynos/firmware.c +++ b/arch/arm/mach-exynos/firmware.c @@ -17,6 +17,7 @@ #include #include #include +#include #include #include @@ -136,6 +137,43 @@ static const struct firmware_ops exynos_firmware_ops = { .resume = IS_ENABLED(CONFIG_EXYNOS_CPU_SUSPEND) ? exynos_resume : NULL, }; +static void exynos_l2_write_sec(unsigned long val, unsigned reg) +{ + static int l2cache_enabled; + + switch (reg) { + case L2X0_CTRL: + if (val & L2X0_CTRL_EN) { + /* + * Before the cache can be enabled, due to firmware + * design, SMC_CMD_L2X0INVALL must be called. + */ + if (!l2cache_enabled) { + exynos_smc(SMC_CMD_L2X0INVALL, 0, 0, 0); + l2cache_enabled = 1; + } + } else { + l2cache_enabled = 0; + } + exynos_smc(SMC_CMD_L2X0CTRL, val, 0, 0); + break; + + case L2X0_DEBUG_CTRL: + exynos_smc(SMC_CMD_L2X0DEBUG, val, 0, 0); + break; + + default: + WARN_ONCE(1, "%s: ignoring write to reg 0x%x\n", __func__, reg); + } +} + +static void exynos_l2_configure(const struct l2x0_regs *regs) +{ + exynos_smc(SMC_CMD_L2X0SETUP1, regs->tag_latency, regs->data_latency, + regs->prefetch_ctrl); + exynos_smc(SMC_CMD_L2X0SETUP2, regs->pwr_ctrl, regs->aux_ctrl, 0); +} + void __init exynos_firmware_init(void) { struct device_node *nd; @@ -155,4 +193,16 @@ void __init exynos_firmware_init(void) pr_info("Running under secure firmware.\n"); register_firmware_ops(&exynos_firmware_ops); + + /* + * Exynos 4 SoCs (based on Cortex A9 and equipped with L2C-310), + * running under secure firmware, require certain registers of L2 + * cache controller to be written in secure mode. Here .write_sec + * callback is provided to perform necessary SMC calls. + */ + if (IS_ENABLED(CONFIG_CACHE_L2X0) && + read_cpuid_part() == ARM_CPU_PART_CORTEX_A9) { + outer_cache.write_sec = exynos_l2_write_sec; + outer_cache.configure = exynos_l2_configure; + } } -- cgit v1.2.3-59-g8ed1b From 30ad527a6499a7dc037baea84e8a19be7f3bf509 Mon Sep 17 00:00:00 2001 From: Tomasz Figa Date: Thu, 8 Jan 2015 07:53:56 +0100 Subject: ARM: 8264/1: EXYNOS: Add support for non-secure L2X0 resume On Exynos SoCs it is necessary to resume operation of L2C early in assembly code, because otherwise certain systems will crash. This patch adds necessary code to non-secure resume handler. [rewrote the code accessing l2x0_saved_regs] Signed-off-by: Tomasz Figa Signed-off-by: Marek Szyprowski Acked-by: Arnd Bergmann Acked-by: Kukjin Kim Signed-off-by: Russell King --- arch/arm/mach-exynos/sleep.S | 46 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) (limited to 'arch') diff --git a/arch/arm/mach-exynos/sleep.S b/arch/arm/mach-exynos/sleep.S index e3c373082bbe..31d25834b9c4 100644 --- a/arch/arm/mach-exynos/sleep.S +++ b/arch/arm/mach-exynos/sleep.S @@ -16,6 +16,8 @@ */ #include +#include +#include #include "smc.h" #define CPU_MASK 0xff0ffff0 @@ -74,6 +76,45 @@ ENTRY(exynos_cpu_resume_ns) mov r0, #SMC_CMD_C15RESUME dsb smc #0 +#ifdef CONFIG_CACHE_L2X0 + adr r0, 1f + ldr r2, [r0] + add r0, r2, r0 + + /* Check that the address has been initialised. */ + ldr r1, [r0, #L2X0_R_PHY_BASE] + teq r1, #0 + beq skip_l2x0 + + /* Check if controller has been enabled. */ + ldr r2, [r1, #L2X0_CTRL] + tst r2, #0x1 + bne skip_l2x0 + + ldr r1, [r0, #L2X0_R_TAG_LATENCY] + ldr r2, [r0, #L2X0_R_DATA_LATENCY] + ldr r3, [r0, #L2X0_R_PREFETCH_CTRL] + mov r0, #SMC_CMD_L2X0SETUP1 + smc #0 + + /* Reload saved regs pointer because smc corrupts registers. */ + adr r0, 1f + ldr r2, [r0] + add r0, r2, r0 + + ldr r1, [r0, #L2X0_R_PWR_CTRL] + ldr r2, [r0, #L2X0_R_AUX_CTRL] + mov r0, #SMC_CMD_L2X0SETUP2 + smc #0 + + mov r0, #SMC_CMD_L2X0INVALL + smc #0 + + mov r1, #1 + mov r0, #SMC_CMD_L2X0CTRL + smc #0 +skip_l2x0: +#endif /* CONFIG_CACHE_L2X0 */ skip_cp15: b cpu_resume ENDPROC(exynos_cpu_resume_ns) @@ -83,3 +124,8 @@ cp15_save_diag: .globl cp15_save_power cp15_save_power: .long 0 @ cp15 power control + +#ifdef CONFIG_CACHE_L2X0 + .align +1: .long l2x0_saved_regs - . +#endif /* CONFIG_CACHE_L2X0 */ -- cgit v1.2.3-59-g8ed1b From 56b60b8bce4ae87470da4ed95435433b0ba8795c Mon Sep 17 00:00:00 2001 From: Tomasz Figa Date: Thu, 8 Jan 2015 07:54:34 +0100 Subject: ARM: 8265/1: dts: exynos4: Add nodes for L2 cache controller This patch adds device tree nodes for L2 cache controller present on Exynos4 SoCs. Signed-off-by: Tomasz Figa Signed-off-by: Marek Szyprowski Acked-by: Arnd Bergmann Acked-by: Kukjin Kim Signed-off-by: Russell King --- arch/arm/boot/dts/exynos4210.dtsi | 9 +++++++++ arch/arm/boot/dts/exynos4x12.dtsi | 14 ++++++++++++++ 2 files changed, 23 insertions(+) (limited to 'arch') diff --git a/arch/arm/boot/dts/exynos4210.dtsi b/arch/arm/boot/dts/exynos4210.dtsi index bcc9e63c8070..8e45ea44317e 100644 --- a/arch/arm/boot/dts/exynos4210.dtsi +++ b/arch/arm/boot/dts/exynos4210.dtsi @@ -81,6 +81,15 @@ reg = <0x10023CA0 0x20>; }; + l2c: l2-cache-controller@10502000 { + compatible = "arm,pl310-cache"; + reg = <0x10502000 0x1000>; + cache-unified; + cache-level = <2>; + arm,tag-latency = <2 2 1>; + arm,data-latency = <2 2 1>; + }; + gic: interrupt-controller@10490000 { cpu-offset = <0x8000>; }; diff --git a/arch/arm/boot/dts/exynos4x12.dtsi b/arch/arm/boot/dts/exynos4x12.dtsi index 93b70402e943..8bc97c415c9a 100644 --- a/arch/arm/boot/dts/exynos4x12.dtsi +++ b/arch/arm/boot/dts/exynos4x12.dtsi @@ -54,6 +54,20 @@ reg = <0x10023CA0 0x20>; }; + l2c: l2-cache-controller@10502000 { + compatible = "arm,pl310-cache"; + reg = <0x10502000 0x1000>; + cache-unified; + cache-level = <2>; + arm,tag-latency = <2 2 1>; + arm,data-latency = <3 2 1>; + arm,double-linefill = <1>; + arm,double-linefill-incr = <0>; + arm,double-linefill-wrap = <1>; + arm,prefetch-drop = <1>; + arm,prefetch-offset = <7>; + }; + clock: clock-controller@10030000 { compatible = "samsung,exynos4412-clock"; reg = <0x10030000 0x20000>; -- cgit v1.2.3-59-g8ed1b From c25630381c6e093819d86d9618798db932cc2d90 Mon Sep 17 00:00:00 2001 From: Nicolas Pitre Date: Thu, 15 Jan 2015 23:41:54 +0100 Subject: ARM: 8285/1: remove ARMv3 user access code again This code was restored with commit 080fc66fb5 ("ARM: Bring back ARMv3 IO and user access code") because the RiscPC memory bus does not understand half-word load/stores. However only the IO code needed restoring since the alternative user access code contains no half-word accesses, is already used when CONFIG_PREEMPT is set and runs faster on a StrongARM. Signed-off-by: Nicolas Pitre Signed-off-by: Russell King --- arch/arm/lib/Makefile | 15 +- arch/arm/lib/uaccess.S | 564 ------------------------------------------------- 2 files changed, 2 insertions(+), 577 deletions(-) delete mode 100644 arch/arm/lib/uaccess.S (limited to 'arch') diff --git a/arch/arm/lib/Makefile b/arch/arm/lib/Makefile index 0573faab96ad..d8a780799506 100644 --- a/arch/arm/lib/Makefile +++ b/arch/arm/lib/Makefile @@ -15,19 +15,8 @@ lib-y := backtrace.o changebit.o csumipv6.o csumpartial.o \ io-readsb.o io-writesb.o io-readsl.o io-writesl.o \ call_with_stack.o bswapsdi2.o -mmu-y := clear_user.o copy_page.o getuser.o putuser.o - -# the code in uaccess.S is not preemption safe and -# probably faster on ARMv3 only -ifeq ($(CONFIG_PREEMPT),y) - mmu-y += copy_from_user.o copy_to_user.o -else -ifneq ($(CONFIG_CPU_32v3),y) - mmu-y += copy_from_user.o copy_to_user.o -else - mmu-y += uaccess.o -endif -endif +mmu-y := clear_user.o copy_page.o getuser.o putuser.o \ + copy_from_user.o copy_to_user.o # using lib_ here won't override already available weak symbols obj-$(CONFIG_UACCESS_WITH_MEMCPY) += uaccess_with_memcpy.o diff --git a/arch/arm/lib/uaccess.S b/arch/arm/lib/uaccess.S deleted file mode 100644 index e50520904b76..000000000000 --- a/arch/arm/lib/uaccess.S +++ /dev/null @@ -1,564 +0,0 @@ -/* - * linux/arch/arm/lib/uaccess.S - * - * Copyright (C) 1995, 1996,1997,1998 Russell King - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - * - * Routines to block copy data to/from user memory - * These are highly optimised both for the 4k page size - * and for various alignments. - */ -#include -#include -#include -#include - - .text - -#define PAGE_SHIFT 12 - -/* Prototype: int __copy_to_user(void *to, const char *from, size_t n) - * Purpose : copy a block to user memory from kernel memory - * Params : to - user memory - * : from - kernel memory - * : n - number of bytes to copy - * Returns : Number of bytes NOT copied. - */ - -.Lc2u_dest_not_aligned: - rsb ip, ip, #4 - cmp ip, #2 - ldrb r3, [r1], #1 -USER( TUSER( strb) r3, [r0], #1) @ May fault - ldrgeb r3, [r1], #1 -USER( TUSER( strgeb) r3, [r0], #1) @ May fault - ldrgtb r3, [r1], #1 -USER( TUSER( strgtb) r3, [r0], #1) @ May fault - sub r2, r2, ip - b .Lc2u_dest_aligned - -ENTRY(__copy_to_user) - stmfd sp!, {r2, r4 - r7, lr} - cmp r2, #4 - blt .Lc2u_not_enough - ands ip, r0, #3 - bne .Lc2u_dest_not_aligned -.Lc2u_dest_aligned: - - ands ip, r1, #3 - bne .Lc2u_src_not_aligned -/* - * Seeing as there has to be at least 8 bytes to copy, we can - * copy one word, and force a user-mode page fault... - */ - -.Lc2u_0fupi: subs r2, r2, #4 - addmi ip, r2, #4 - bmi .Lc2u_0nowords - ldr r3, [r1], #4 -USER( TUSER( str) r3, [r0], #4) @ May fault - mov ip, r0, lsl #32 - PAGE_SHIFT @ On each page, use a ld/st??t instruction - rsb ip, ip, #0 - movs ip, ip, lsr #32 - PAGE_SHIFT - beq .Lc2u_0fupi -/* - * ip = max no. of bytes to copy before needing another "strt" insn - */ - cmp r2, ip - movlt ip, r2 - sub r2, r2, ip - subs ip, ip, #32 - blt .Lc2u_0rem8lp - -.Lc2u_0cpy8lp: ldmia r1!, {r3 - r6} - stmia r0!, {r3 - r6} @ Shouldnt fault - ldmia r1!, {r3 - r6} - subs ip, ip, #32 - stmia r0!, {r3 - r6} @ Shouldnt fault - bpl .Lc2u_0cpy8lp - -.Lc2u_0rem8lp: cmn ip, #16 - ldmgeia r1!, {r3 - r6} - stmgeia r0!, {r3 - r6} @ Shouldnt fault - tst ip, #8 - ldmneia r1!, {r3 - r4} - stmneia r0!, {r3 - r4} @ Shouldnt fault - tst ip, #4 - ldrne r3, [r1], #4 - TUSER( strne) r3, [r0], #4 @ Shouldnt fault - ands ip, ip, #3 - beq .Lc2u_0fupi -.Lc2u_0nowords: teq ip, #0 - beq .Lc2u_finished -.Lc2u_nowords: cmp ip, #2 - ldrb r3, [r1], #1 -USER( TUSER( strb) r3, [r0], #1) @ May fault - ldrgeb r3, [r1], #1 -USER( TUSER( strgeb) r3, [r0], #1) @ May fault - ldrgtb r3, [r1], #1 -USER( TUSER( strgtb) r3, [r0], #1) @ May fault - b .Lc2u_finished - -.Lc2u_not_enough: - movs ip, r2 - bne .Lc2u_nowords -.Lc2u_finished: mov r0, #0 - ldmfd sp!, {r2, r4 - r7, pc} - -.Lc2u_src_not_aligned: - bic r1, r1, #3 - ldr r7, [r1], #4 - cmp ip, #2 - bgt .Lc2u_3fupi - beq .Lc2u_2fupi -.Lc2u_1fupi: subs r2, r2, #4 - addmi ip, r2, #4 - bmi .Lc2u_1nowords - mov r3, r7, lspull #8 - ldr r7, [r1], #4 - orr r3, r3, r7, lspush #24 -USER( TUSER( str) r3, [r0], #4) @ May fault - mov ip, r0, lsl #32 - PAGE_SHIFT - rsb ip, ip, #0 - movs ip, ip, lsr #32 - PAGE_SHIFT - beq .Lc2u_1fupi - cmp r2, ip - movlt ip, r2 - sub r2, r2, ip - subs ip, ip, #16 - blt .Lc2u_1rem8lp - -.Lc2u_1cpy8lp: mov r3, r7, lspull #8 - ldmia r1!, {r4 - r7} - subs ip, ip, #16 - orr r3, r3, r4, lspush #24 - mov r4, r4, lspull #8 - orr r4, r4, r5, lspush #24 - mov r5, r5, lspull #8 - orr r5, r5, r6, lspush #24 - mov r6, r6, lspull #8 - orr r6, r6, r7, lspush #24 - stmia r0!, {r3 - r6} @ Shouldnt fault - bpl .Lc2u_1cpy8lp - -.Lc2u_1rem8lp: tst ip, #8 - movne r3, r7, lspull #8 - ldmneia r1!, {r4, r7} - orrne r3, r3, r4, lspush #24 - movne r4, r4, lspull #8 - orrne r4, r4, r7, lspush #24 - stmneia r0!, {r3 - r4} @ Shouldnt fault - tst ip, #4 - movne r3, r7, lspull #8 - ldrne r7, [r1], #4 - orrne r3, r3, r7, lspush #24 - TUSER( strne) r3, [r0], #4 @ Shouldnt fault - ands ip, ip, #3 - beq .Lc2u_1fupi -.Lc2u_1nowords: mov r3, r7, get_byte_1 - teq ip, #0 - beq .Lc2u_finished - cmp ip, #2 -USER( TUSER( strb) r3, [r0], #1) @ May fault - movge r3, r7, get_byte_2 -USER( TUSER( strgeb) r3, [r0], #1) @ May fault - movgt r3, r7, get_byte_3 -USER( TUSER( strgtb) r3, [r0], #1) @ May fault - b .Lc2u_finished - -.Lc2u_2fupi: subs r2, r2, #4 - addmi ip, r2, #4 - bmi .Lc2u_2nowords - mov r3, r7, lspull #16 - ldr r7, [r1], #4 - orr r3, r3, r7, lspush #16 -USER( TUSER( str) r3, [r0], #4) @ May fault - mov ip, r0, lsl #32 - PAGE_SHIFT - rsb ip, ip, #0 - movs ip, ip, lsr #32 - PAGE_SHIFT - beq .Lc2u_2fupi - cmp r2, ip - movlt ip, r2 - sub r2, r2, ip - subs ip, ip, #16 - blt .Lc2u_2rem8lp - -.Lc2u_2cpy8lp: mov r3, r7, lspull #16 - ldmia r1!, {r4 - r7} - subs ip, ip, #16 - orr r3, r3, r4, lspush #16 - mov r4, r4, lspull #16 - orr r4, r4, r5, lspush #16 - mov r5, r5, lspull #16 - orr r5, r5, r6, lspush #16 - mov r6, r6, lspull #16 - orr r6, r6, r7, lspush #16 - stmia r0!, {r3 - r6} @ Shouldnt fault - bpl .Lc2u_2cpy8lp - -.Lc2u_2rem8lp: tst ip, #8 - movne r3, r7, lspull #16 - ldmneia r1!, {r4, r7} - orrne r3, r3, r4, lspush #16 - movne r4, r4, lspull #16 - orrne r4, r4, r7, lspush #16 - stmneia r0!, {r3 - r4} @ Shouldnt fault - tst ip, #4 - movne r3, r7, lspull #16 - ldrne r7, [r1], #4 - orrne r3, r3, r7, lspush #16 - TUSER( strne) r3, [r0], #4 @ Shouldnt fault - ands ip, ip, #3 - beq .Lc2u_2fupi -.Lc2u_2nowords: mov r3, r7, get_byte_2 - teq ip, #0 - beq .Lc2u_finished - cmp ip, #2 -USER( TUSER( strb) r3, [r0], #1) @ May fault - movge r3, r7, get_byte_3 -USER( TUSER( strgeb) r3, [r0], #1) @ May fault - ldrgtb r3, [r1], #0 -USER( TUSER( strgtb) r3, [r0], #1) @ May fault - b .Lc2u_finished - -.Lc2u_3fupi: subs r2, r2, #4 - addmi ip, r2, #4 - bmi .Lc2u_3nowords - mov r3, r7, lspull #24 - ldr r7, [r1], #4 - orr r3, r3, r7, lspush #8 -USER( TUSER( str) r3, [r0], #4) @ May fault - mov ip, r0, lsl #32 - PAGE_SHIFT - rsb ip, ip, #0 - movs ip, ip, lsr #32 - PAGE_SHIFT - beq .Lc2u_3fupi - cmp r2, ip - movlt ip, r2 - sub r2, r2, ip - subs ip, ip, #16 - blt .Lc2u_3rem8lp - -.Lc2u_3cpy8lp: mov r3, r7, lspull #24 - ldmia r1!, {r4 - r7} - subs ip, ip, #16 - orr r3, r3, r4, lspush #8 - mov r4, r4, lspull #24 - orr r4, r4, r5, lspush #8 - mov r5, r5, lspull #24 - orr r5, r5, r6, lspush #8 - mov r6, r6, lspull #24 - orr r6, r6, r7, lspush #8 - stmia r0!, {r3 - r6} @ Shouldnt fault - bpl .Lc2u_3cpy8lp - -.Lc2u_3rem8lp: tst ip, #8 - movne r3, r7, lspull #24 - ldmneia r1!, {r4, r7} - orrne r3, r3, r4, lspush #8 - movne r4, r4, lspull #24 - orrne r4, r4, r7, lspush #8 - stmneia r0!, {r3 - r4} @ Shouldnt fault - tst ip, #4 - movne r3, r7, lspull #24 - ldrne r7, [r1], #4 - orrne r3, r3, r7, lspush #8 - TUSER( strne) r3, [r0], #4 @ Shouldnt fault - ands ip, ip, #3 - beq .Lc2u_3fupi -.Lc2u_3nowords: mov r3, r7, get_byte_3 - teq ip, #0 - beq .Lc2u_finished - cmp ip, #2 -USER( TUSER( strb) r3, [r0], #1) @ May fault - ldrgeb r3, [r1], #1 -USER( TUSER( strgeb) r3, [r0], #1) @ May fault - ldrgtb r3, [r1], #0 -USER( TUSER( strgtb) r3, [r0], #1) @ May fault - b .Lc2u_finished -ENDPROC(__copy_to_user) - - .pushsection .fixup,"ax" - .align 0 -9001: ldmfd sp!, {r0, r4 - r7, pc} - .popsection - -/* Prototype: unsigned long __copy_from_user(void *to,const void *from,unsigned long n); - * Purpose : copy a block from user memory to kernel memory - * Params : to - kernel memory - * : from - user memory - * : n - number of bytes to copy - * Returns : Number of bytes NOT copied. - */ -.Lcfu_dest_not_aligned: - rsb ip, ip, #4 - cmp ip, #2 -USER( TUSER( ldrb) r3, [r1], #1) @ May fault - strb r3, [r0], #1 -USER( TUSER( ldrgeb) r3, [r1], #1) @ May fault - strgeb r3, [r0], #1 -USER( TUSER( ldrgtb) r3, [r1], #1) @ May fault - strgtb r3, [r0], #1 - sub r2, r2, ip - b .Lcfu_dest_aligned - -ENTRY(__copy_from_user) - stmfd sp!, {r0, r2, r4 - r7, lr} - cmp r2, #4 - blt .Lcfu_not_enough - ands ip, r0, #3 - bne .Lcfu_dest_not_aligned -.Lcfu_dest_aligned: - ands ip, r1, #3 - bne .Lcfu_src_not_aligned - -/* - * Seeing as there has to be at least 8 bytes to copy, we can - * copy one word, and force a user-mode page fault... - */ - -.Lcfu_0fupi: subs r2, r2, #4 - addmi ip, r2, #4 - bmi .Lcfu_0nowords -USER( TUSER( ldr) r3, [r1], #4) - str r3, [r0], #4 - mov ip, r1, lsl #32 - PAGE_SHIFT @ On each page, use a ld/st??t instruction - rsb ip, ip, #0 - movs ip, ip, lsr #32 - PAGE_SHIFT - beq .Lcfu_0fupi -/* - * ip = max no. of bytes to copy before needing another "strt" insn - */ - cmp r2, ip - movlt ip, r2 - sub r2, r2, ip - subs ip, ip, #32 - blt .Lcfu_0rem8lp - -.Lcfu_0cpy8lp: ldmia r1!, {r3 - r6} @ Shouldnt fault - stmia r0!, {r3 - r6} - ldmia r1!, {r3 - r6} @ Shouldnt fault - subs ip, ip, #32 - stmia r0!, {r3 - r6} - bpl .Lcfu_0cpy8lp - -.Lcfu_0rem8lp: cmn ip, #16 - ldmgeia r1!, {r3 - r6} @ Shouldnt fault - stmgeia r0!, {r3 - r6} - tst ip, #8 - ldmneia r1!, {r3 - r4} @ Shouldnt fault - stmneia r0!, {r3 - r4} - tst ip, #4 - TUSER( ldrne) r3, [r1], #4 @ Shouldnt fault - strne r3, [r0], #4 - ands ip, ip, #3 - beq .Lcfu_0fupi -.Lcfu_0nowords: teq ip, #0 - beq .Lcfu_finished -.Lcfu_nowords: cmp ip, #2 -USER( TUSER( ldrb) r3, [r1], #1) @ May fault - strb r3, [r0], #1 -USER( TUSER( ldrgeb) r3, [r1], #1) @ May fault - strgeb r3, [r0], #1 -USER( TUSER( ldrgtb) r3, [r1], #1) @ May fault - strgtb r3, [r0], #1 - b .Lcfu_finished - -.Lcfu_not_enough: - movs ip, r2 - bne .Lcfu_nowords -.Lcfu_finished: mov r0, #0 - add sp, sp, #8 - ldmfd sp!, {r4 - r7, pc} - -.Lcfu_src_not_aligned: - bic r1, r1, #3 -USER( TUSER( ldr) r7, [r1], #4) @ May fault - cmp ip, #2 - bgt .Lcfu_3fupi - beq .Lcfu_2fupi -.Lcfu_1fupi: subs r2, r2, #4 - addmi ip, r2, #4 - bmi .Lcfu_1nowords - mov r3, r7, lspull #8 -USER( TUSER( ldr) r7, [r1], #4) @ May fault - orr r3, r3, r7, lspush #24 - str r3, [r0], #4 - mov ip, r1, lsl #32 - PAGE_SHIFT - rsb ip, ip, #0 - movs ip, ip, lsr #32 - PAGE_SHIFT - beq .Lcfu_1fupi - cmp r2, ip - movlt ip, r2 - sub r2, r2, ip - subs ip, ip, #16 - blt .Lcfu_1rem8lp - -.Lcfu_1cpy8lp: mov r3, r7, lspull #8 - ldmia r1!, {r4 - r7} @ Shouldnt fault - subs ip, ip, #16 - orr r3, r3, r4, lspush #24 - mov r4, r4, lspull #8 - orr r4, r4, r5, lspush #24 - mov r5, r5, lspull #8 - orr r5, r5, r6, lspush #24 - mov r6, r6, lspull #8 - orr r6, r6, r7, lspush #24 - stmia r0!, {r3 - r6} - bpl .Lcfu_1cpy8lp - -.Lcfu_1rem8lp: tst ip, #8 - movne r3, r7, lspull #8 - ldmneia r1!, {r4, r7} @ Shouldnt fault - orrne r3, r3, r4, lspush #24 - movne r4, r4, lspull #8 - orrne r4, r4, r7, lspush #24 - stmneia r0!, {r3 - r4} - tst ip, #4 - movne r3, r7, lspull #8 -USER( TUSER( ldrne) r7, [r1], #4) @ May fault - orrne r3, r3, r7, lspush #24 - strne r3, [r0], #4 - ands ip, ip, #3 - beq .Lcfu_1fupi -.Lcfu_1nowords: mov r3, r7, get_byte_1 - teq ip, #0 - beq .Lcfu_finished - cmp ip, #2 - strb r3, [r0], #1 - movge r3, r7, get_byte_2 - strgeb r3, [r0], #1 - movgt r3, r7, get_byte_3 - strgtb r3, [r0], #1 - b .Lcfu_finished - -.Lcfu_2fupi: subs r2, r2, #4 - addmi ip, r2, #4 - bmi .Lcfu_2nowords - mov r3, r7, lspull #16 -USER( TUSER( ldr) r7, [r1], #4) @ May fault - orr r3, r3, r7, lspush #16 - str r3, [r0], #4 - mov ip, r1, lsl #32 - PAGE_SHIFT - rsb ip, ip, #0 - movs ip, ip, lsr #32 - PAGE_SHIFT - beq .Lcfu_2fupi - cmp r2, ip - movlt ip, r2 - sub r2, r2, ip - subs ip, ip, #16 - blt .Lcfu_2rem8lp - - -.Lcfu_2cpy8lp: mov r3, r7, lspull #16 - ldmia r1!, {r4 - r7} @ Shouldnt fault - subs ip, ip, #16 - orr r3, r3, r4, lspush #16 - mov r4, r4, lspull #16 - orr r4, r4, r5, lspush #16 - mov r5, r5, lspull #16 - orr r5, r5, r6, lspush #16 - mov r6, r6, lspull #16 - orr r6, r6, r7, lspush #16 - stmia r0!, {r3 - r6} - bpl .Lcfu_2cpy8lp - -.Lcfu_2rem8lp: tst ip, #8 - movne r3, r7, lspull #16 - ldmneia r1!, {r4, r7} @ Shouldnt fault - orrne r3, r3, r4, lspush #16 - movne r4, r4, lspull #16 - orrne r4, r4, r7, lspush #16 - stmneia r0!, {r3 - r4} - tst ip, #4 - movne r3, r7, lspull #16 -USER( TUSER( ldrne) r7, [r1], #4) @ May fault - orrne r3, r3, r7, lspush #16 - strne r3, [r0], #4 - ands ip, ip, #3 - beq .Lcfu_2fupi -.Lcfu_2nowords: mov r3, r7, get_byte_2 - teq ip, #0 - beq .Lcfu_finished - cmp ip, #2 - strb r3, [r0], #1 - movge r3, r7, get_byte_3 - strgeb r3, [r0], #1 -USER( TUSER( ldrgtb) r3, [r1], #0) @ May fault - strgtb r3, [r0], #1 - b .Lcfu_finished - -.Lcfu_3fupi: subs r2, r2, #4 - addmi ip, r2, #4 - bmi .Lcfu_3nowords - mov r3, r7, lspull #24 -USER( TUSER( ldr) r7, [r1], #4) @ May fault - orr r3, r3, r7, lspush #8 - str r3, [r0], #4 - mov ip, r1, lsl #32 - PAGE_SHIFT - rsb ip, ip, #0 - movs ip, ip, lsr #32 - PAGE_SHIFT - beq .Lcfu_3fupi - cmp r2, ip - movlt ip, r2 - sub r2, r2, ip - subs ip, ip, #16 - blt .Lcfu_3rem8lp - -.Lcfu_3cpy8lp: mov r3, r7, lspull #24 - ldmia r1!, {r4 - r7} @ Shouldnt fault - orr r3, r3, r4, lspush #8 - mov r4, r4, lspull #24 - orr r4, r4, r5, lspush #8 - mov r5, r5, lspull #24 - orr r5, r5, r6, lspush #8 - mov r6, r6, lspull #24 - orr r6, r6, r7, lspush #8 - stmia r0!, {r3 - r6} - subs ip, ip, #16 - bpl .Lcfu_3cpy8lp - -.Lcfu_3rem8lp: tst ip, #8 - movne r3, r7, lspull #24 - ldmneia r1!, {r4, r7} @ Shouldnt fault - orrne r3, r3, r4, lspush #8 - movne r4, r4, lspull #24 - orrne r4, r4, r7, lspush #8 - stmneia r0!, {r3 - r4} - tst ip, #4 - movne r3, r7, lspull #24 -USER( TUSER( ldrne) r7, [r1], #4) @ May fault - orrne r3, r3, r7, lspush #8 - strne r3, [r0], #4 - ands ip, ip, #3 - beq .Lcfu_3fupi -.Lcfu_3nowords: mov r3, r7, get_byte_3 - teq ip, #0 - beq .Lcfu_finished - cmp ip, #2 - strb r3, [r0], #1 -USER( TUSER( ldrgeb) r3, [r1], #1) @ May fault - strgeb r3, [r0], #1 -USER( TUSER( ldrgtb) r3, [r1], #1) @ May fault - strgtb r3, [r0], #1 - b .Lcfu_finished -ENDPROC(__copy_from_user) - - .pushsection .fixup,"ax" - .align 0 - /* - * We took an exception. r0 contains a pointer to - * the byte not copied. - */ -9001: ldr r2, [sp], #4 @ void *to - sub r2, r0, r2 @ bytes copied - ldr r1, [sp], #4 @ unsigned long count - subs r4, r1, r2 @ bytes left to copy - movne r1, r4 - blne __memzero - mov r0, r4 - ldmfd sp!, {r4 - r7, pc} - .popsection - -- cgit v1.2.3-59-g8ed1b From fb892bd0fdcb2e5eac9c105cf68def90396ed8cc Mon Sep 17 00:00:00 2001 From: Jon Medhurst Date: Mon, 19 Jan 2015 15:15:36 +0000 Subject: ARM: kprobes: Eliminate test code's use of BX instruction on ARMv4 CPUs Non-T variants of ARMv4 CPUs don't support the BX instruction so eliminate its use. Signed-off-by: Jon Medhurst --- arch/arm/probes/kprobes/test-arm.c | 3 +++ arch/arm/probes/kprobes/test-core.c | 10 +++++++--- 2 files changed, 10 insertions(+), 3 deletions(-) (limited to 'arch') diff --git a/arch/arm/probes/kprobes/test-arm.c b/arch/arm/probes/kprobes/test-arm.c index e72b07e8cd9a..8866aedfdea2 100644 --- a/arch/arm/probes/kprobes/test-arm.c +++ b/arch/arm/probes/kprobes/test-arm.c @@ -215,9 +215,12 @@ void kprobe_arm_test_cases(void) TEST_UNSUPPORTED("msr cpsr_f, lr") TEST_UNSUPPORTED("msr spsr, r0") +#if __LINUX_ARM_ARCH__ >= 5 || \ + (__LINUX_ARM_ARCH__ == 4 && !defined(CONFIG_CPU_32v4)) TEST_BF_R("bx r",0,2f,"") TEST_BB_R("bx r",7,2f,"") TEST_BF_R("bxeq r",14,2f,"") +#endif #if __LINUX_ARM_ARCH__ >= 5 TEST_R("clz r0, r",0, 0x0,"") diff --git a/arch/arm/probes/kprobes/test-core.c b/arch/arm/probes/kprobes/test-core.c index e495127d7571..9775de22e2ff 100644 --- a/arch/arm/probes/kprobes/test-core.c +++ b/arch/arm/probes/kprobes/test-core.c @@ -236,6 +236,8 @@ static int tests_failed; #ifndef CONFIG_THUMB2_KERNEL +#define RET(reg) "mov pc, "#reg + long arm_func(long r0, long r1); static void __used __naked __arm_kprobes_test_func(void) @@ -245,7 +247,7 @@ static void __used __naked __arm_kprobes_test_func(void) ".type arm_func, %%function \n\t" "arm_func: \n\t" "adds r0, r0, r1 \n\t" - "bx lr \n\t" + "mov pc, lr \n\t" ".code "NORMAL_ISA /* Back to Thumb if necessary */ : : : "r0", "r1", "cc" ); @@ -253,6 +255,8 @@ static void __used __naked __arm_kprobes_test_func(void) #else /* CONFIG_THUMB2_KERNEL */ +#define RET(reg) "bx "#reg + long thumb16_func(long r0, long r1); long thumb32even_func(long r0, long r1); long thumb32odd_func(long r0, long r1); @@ -494,7 +498,7 @@ static void __naked benchmark_nop(void) { __asm__ __volatile__ ( "nop \n\t" - "bx lr" + RET(lr)" \n\t" ); } @@ -977,7 +981,7 @@ void __naked __kprobes_test_case_start(void) "bic r0, lr, #1 @ r0 = inline data \n\t" "mov r1, sp \n\t" "bl kprobes_test_case_start \n\t" - "bx r0 \n\t" + RET(r0)" \n\t" ); } -- cgit v1.2.3-59-g8ed1b From ecba152356438fe28494d4d36d6d5755c32bf2ae Mon Sep 17 00:00:00 2001 From: Daniel Thompson Date: Fri, 9 Jan 2015 18:33:05 +0100 Subject: ARM: 8267/1: cnx3xxx: Remove spurious default for DEBUG_CNS3xxx The default value for DEBUG_CNS3xxx appears twice. This patch removes the one with the wrong sort order. Signed-off-by: Daniel Thompson Signed-off-by: Russell King --- arch/arm/Kconfig.debug | 1 - 1 file changed, 1 deletion(-) (limited to 'arch') diff --git a/arch/arm/Kconfig.debug b/arch/arm/Kconfig.debug index 5ddd4906f7a7..c5648cc1466d 100644 --- a/arch/arm/Kconfig.debug +++ b/arch/arm/Kconfig.debug @@ -1269,7 +1269,6 @@ config DEBUG_UART_PHYS DEBUG_S3C2410_UART2) default 0x78000000 if DEBUG_CNS3XXX default 0x7c0003f8 if FOOTBRIDGE - default 0x78000000 if DEBUG_CNS3XXX default 0x80010000 if DEBUG_ASM9260_UART default 0x80070000 if DEBUG_IMX23_UART default 0x80074000 if DEBUG_IMX28_UART -- cgit v1.2.3-59-g8ed1b From 34c64a5d830ac3905065ff1cc52091af15cb2f7e Mon Sep 17 00:00:00 2001 From: Daniel Thompson Date: Fri, 9 Jan 2015 18:33:16 +0100 Subject: ARM: 8268/1: configs: Enable DEBUG_LL_UART_8250 where needed All defconfigs touched by this patch already enable DEBUG_LL and by default DEBUG_LL_UART_NONE will be selected. This causes no issues today because due to some back compatibility magic we eventually need to remove it is not actually honoured. Nevertheless DEBUG_LL_UART_8250 is the right value for these platforms and should be set in the config files. Signed-off-by: Daniel Thompson Signed-off-by: Russell King --- arch/arm/configs/iop32x_defconfig | 1 + arch/arm/configs/iop33x_defconfig | 1 + arch/arm/configs/ixp4xx_defconfig | 1 + arch/arm/configs/lpc32xx_defconfig | 1 + arch/arm/configs/mv78xx0_defconfig | 1 + arch/arm/configs/orion5x_defconfig | 1 + arch/arm/configs/rpc_defconfig | 1 + 7 files changed, 7 insertions(+) (limited to 'arch') diff --git a/arch/arm/configs/iop32x_defconfig b/arch/arm/configs/iop32x_defconfig index 4f2ec3ac138e..c3058da631da 100644 --- a/arch/arm/configs/iop32x_defconfig +++ b/arch/arm/configs/iop32x_defconfig @@ -106,6 +106,7 @@ CONFIG_MAGIC_SYSRQ=y CONFIG_DEBUG_KERNEL=y CONFIG_DEBUG_USER=y CONFIG_DEBUG_LL=y +CONFIG_DEBUG_LL_UART_8250=y CONFIG_KEYS=y CONFIG_KEYS_DEBUG_PROC_KEYS=y CONFIG_CRYPTO_NULL=y diff --git a/arch/arm/configs/iop33x_defconfig b/arch/arm/configs/iop33x_defconfig index aa36128abca2..713faeee8cf4 100644 --- a/arch/arm/configs/iop33x_defconfig +++ b/arch/arm/configs/iop33x_defconfig @@ -87,5 +87,6 @@ CONFIG_DEBUG_KERNEL=y # CONFIG_RCU_CPU_STALL_DETECTOR is not set CONFIG_DEBUG_USER=y CONFIG_DEBUG_LL=y +CONFIG_DEBUG_LL_UART_8250=y # CONFIG_CRYPTO_ANSI_CPRNG is not set # CONFIG_CRC32 is not set diff --git a/arch/arm/configs/ixp4xx_defconfig b/arch/arm/configs/ixp4xx_defconfig index 1af665e847d1..24636cfdf6df 100644 --- a/arch/arm/configs/ixp4xx_defconfig +++ b/arch/arm/configs/ixp4xx_defconfig @@ -202,3 +202,4 @@ CONFIG_MAGIC_SYSRQ=y CONFIG_DEBUG_KERNEL=y CONFIG_DEBUG_ERRORS=y CONFIG_DEBUG_LL=y +CONFIG_DEBUG_LL_UART_8250=y diff --git a/arch/arm/configs/lpc32xx_defconfig b/arch/arm/configs/lpc32xx_defconfig index 9f56ca3985ae..c100b7df5441 100644 --- a/arch/arm/configs/lpc32xx_defconfig +++ b/arch/arm/configs/lpc32xx_defconfig @@ -204,6 +204,7 @@ CONFIG_DEBUG_INFO=y # CONFIG_FTRACE is not set # CONFIG_ARM_UNWIND is not set CONFIG_DEBUG_LL=y +CONFIG_DEBUG_LL_UART_8250=y CONFIG_EARLY_PRINTK=y CONFIG_CRYPTO_ANSI_CPRNG=y # CONFIG_CRYPTO_HW is not set diff --git a/arch/arm/configs/mv78xx0_defconfig b/arch/arm/configs/mv78xx0_defconfig index 0dae1c1f007a..85d10d2e3d66 100644 --- a/arch/arm/configs/mv78xx0_defconfig +++ b/arch/arm/configs/mv78xx0_defconfig @@ -132,6 +132,7 @@ CONFIG_SYSCTL_SYSCALL_CHECK=y CONFIG_DEBUG_USER=y CONFIG_DEBUG_ERRORS=y CONFIG_DEBUG_LL=y +CONFIG_DEBUG_LL_UART_8250=y CONFIG_CRYPTO_CBC=m CONFIG_CRYPTO_ECB=m CONFIG_CRYPTO_PCBC=m diff --git a/arch/arm/configs/orion5x_defconfig b/arch/arm/configs/orion5x_defconfig index 952430d9e2d9..855143fac6bd 100644 --- a/arch/arm/configs/orion5x_defconfig +++ b/arch/arm/configs/orion5x_defconfig @@ -156,6 +156,7 @@ CONFIG_LATENCYTOP=y # CONFIG_FTRACE is not set CONFIG_DEBUG_USER=y CONFIG_DEBUG_LL=y +CONFIG_DEBUG_LL_UART_8250=y CONFIG_CRYPTO_CBC=m CONFIG_CRYPTO_ECB=m CONFIG_CRYPTO_PCBC=m diff --git a/arch/arm/configs/rpc_defconfig b/arch/arm/configs/rpc_defconfig index 00515ef9782d..89631795a915 100644 --- a/arch/arm/configs/rpc_defconfig +++ b/arch/arm/configs/rpc_defconfig @@ -131,3 +131,4 @@ CONFIG_DEBUG_KERNEL=y CONFIG_DEBUG_USER=y CONFIG_DEBUG_ERRORS=y CONFIG_DEBUG_LL=y +CONFIG_DEBUG_LL_UART_8250=y -- cgit v1.2.3-59-g8ed1b From 6f5194553c84c3eb412c0cc59554b85be128f43d Mon Sep 17 00:00:00 2001 From: Daniel Thompson Date: Fri, 9 Jan 2015 18:33:28 +0100 Subject: ARM: 8269/1: Remove DEBUG_LL_UART_NONE Only a very small handful of platforms support DEBUG_LL_UART_NONE but it lurks in the menus of every single platform config ready to break the build. This is an especial problem for defconfig/oldconfig since it is often selected by default. This patch solves the problem by removing this option. Any platforms still depending upon this option must be migrated. Signed-off-by: Daniel Thompson Signed-off-by: Russell King --- arch/arm/Kconfig.debug | 9 --------- 1 file changed, 9 deletions(-) (limited to 'arch') diff --git a/arch/arm/Kconfig.debug b/arch/arm/Kconfig.debug index c5648cc1466d..6ee9b0d361ee 100644 --- a/arch/arm/Kconfig.debug +++ b/arch/arm/Kconfig.debug @@ -1031,15 +1031,6 @@ choice This option selects UART0 on VIA/Wondermedia System-on-a-chip devices, including VT8500, WM8505, WM8650 and WM8850. - config DEBUG_LL_UART_NONE - bool "No low-level debugging UART" - depends on !ARCH_MULTIPLATFORM - help - Say Y here if your platform doesn't provide a UART option - above. This relies on your platform choosing the right UART - definition internally in order for low-level debugging to - work. - config DEBUG_ICEDCC bool "Kernel low-level debugging via EmbeddedICE DCC channel" help -- cgit v1.2.3-59-g8ed1b From abbfb21efcd704e8b858e94e327bb014386ba0dd Mon Sep 17 00:00:00 2001 From: Daniel Thompson Date: Fri, 9 Jan 2015 18:33:36 +0100 Subject: ARM: 8270/1: ks8695: Migrate debug_ll macros to shared directory As part of the migration a couple of uart definitions have been copied from of the platform specific header files. Signed-off-by: Daniel Thompson Cc: Greg Ungerer Cc: Arnd Bergmann Signed-off-by: Russell King --- arch/arm/Kconfig.debug | 8 +++++ arch/arm/include/debug/ks8695.S | 40 +++++++++++++++++++++++++ arch/arm/mach-ks8695/include/mach/debug-macro.S | 36 ---------------------- 3 files changed, 48 insertions(+), 36 deletions(-) create mode 100644 arch/arm/include/debug/ks8695.S delete mode 100644 arch/arm/mach-ks8695/include/mach/debug-macro.S (limited to 'arch') diff --git a/arch/arm/Kconfig.debug b/arch/arm/Kconfig.debug index 6ee9b0d361ee..eba36e35bad2 100644 --- a/arch/arm/Kconfig.debug +++ b/arch/arm/Kconfig.debug @@ -397,6 +397,13 @@ choice Say Y here if you want the debug print routines to direct their output to UART1 serial port on KEYSTONE2 devices. + config DEBUG_KS8695_UART + bool "KS8695 Debug UART" + depends on ARCH_KS8695 + help + Say Y here if you want kernel low-level debugging support + on KS8695. + config DEBUG_MESON_UARTAO bool "Kernel low-level debugging via Meson6 UARTAO" depends on ARCH_MESON @@ -1174,6 +1181,7 @@ config DEBUG_LL_INCLUDE DEBUG_IMX6Q_UART || \ DEBUG_IMX6SL_UART || \ DEBUG_IMX6SX_UART + default "debug/ks8695.S" if DEBUG_KS8695_UART default "debug/msm.S" if DEBUG_MSM_UART || DEBUG_QCOM_UARTDM default "debug/omap2plus.S" if DEBUG_OMAP2PLUS_UART default "debug/renesas-scif.S" if DEBUG_R7S72100_SCIF2 diff --git a/arch/arm/include/debug/ks8695.S b/arch/arm/include/debug/ks8695.S new file mode 100644 index 000000000000..961da1f32ab3 --- /dev/null +++ b/arch/arm/include/debug/ks8695.S @@ -0,0 +1,40 @@ +/* + * arch/arm/include/debug/ks8695.S + * + * Copyright (C) 2006 Ben Dooks + * Copyright (C) 2006 Simtec Electronics + * + * KS8695 - Debug macros + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#define KS8695_UART_PA 0x03ffe000 +#define KS8695_UART_VA 0xf00fe000 +#define KS8695_URTH (0x04) +#define KS8695_URLS (0x14) +#define URLS_URTE (1 << 6) +#define URLS_URTHRE (1 << 5) + + .macro addruart, rp, rv, tmp + ldr \rp, =KS8695_UART_PA @ physical base address + ldr \rv, =KS8695_UART_VA @ virtual base address + .endm + + .macro senduart, rd, rx + str \rd, [\rx, #KS8695_URTH] @ Write to Transmit Holding Register + .endm + + .macro busyuart, rd, rx +1001: ldr \rd, [\rx, #KS8695_URLS] @ Read Line Status Register + tst \rd, #URLS_URTE @ Holding & Shift registers empty? + beq 1001b + .endm + + .macro waituart, rd, rx +1001: ldr \rd, [\rx, #KS8695_URLS] @ Read Line Status Register + tst \rd, #URLS_URTHRE @ Holding Register empty? + beq 1001b + .endm diff --git a/arch/arm/mach-ks8695/include/mach/debug-macro.S b/arch/arm/mach-ks8695/include/mach/debug-macro.S deleted file mode 100644 index a79e48981202..000000000000 --- a/arch/arm/mach-ks8695/include/mach/debug-macro.S +++ /dev/null @@ -1,36 +0,0 @@ -/* - * arch/arm/mach-ks8695/include/mach/debug-macro.S - * - * Copyright (C) 2006 Ben Dooks - * Copyright (C) 2006 Simtec Electronics - * - * KS8695 - Debug macros - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - */ - -#include -#include - - .macro addruart, rp, rv, tmp - ldr \rp, =KS8695_UART_PA @ physical base address - ldr \rv, =KS8695_UART_VA @ virtual base address - .endm - - .macro senduart, rd, rx - str \rd, [\rx, #KS8695_URTH] @ Write to Transmit Holding Register - .endm - - .macro busyuart, rd, rx -1001: ldr \rd, [\rx, #KS8695_URLS] @ Read Line Status Register - tst \rd, #URLS_URTE @ Holding & Shift registers empty? - beq 1001b - .endm - - .macro waituart, rd, rx -1001: ldr \rd, [\rx, #KS8695_URLS] @ Read Line Status Register - tst \rd, #URLS_URTHRE @ Holding Register empty? - beq 1001b - .endm -- cgit v1.2.3-59-g8ed1b From c26b999353339c235b9e166739926d0199eeecbf Mon Sep 17 00:00:00 2001 From: Daniel Thompson Date: Fri, 9 Jan 2015 18:33:50 +0100 Subject: ARM: 8271/1: omap1: Migrate debug_ll macros to use 8250.S The omap1's debug-macro.S is similar to the generic 8250 code. Compared to the 8520 code the omap1 macro automatically determines what UART to use based on breadcrumbs left by the bootloader and automatically copes with the eccentric register layout on OMAP7XX. This patch drops both these features and relies instead on the generic 8250 macros: 1. Dropping support for the bootloader breadcrumbs is identical to the way the migration was handled for OMAP2 (see 808b7e07464d...). 2. Support for OMAP7XX still exists but it must be configured by hand (DEBUG_OMAP7XXUART1/2/3) rather than handled at runtime. Signed-off-by: Daniel Thompson Cc: Arnd Bergmann Cc: linux-omap@vger.kernel.org Tested-by: Aaro Koskinen Acked-by: Tony Lindgren Signed-off-by: Russell King --- arch/arm/Kconfig.debug | 57 +++++++++++++- arch/arm/mach-omap1/include/mach/debug-macro.S | 101 ------------------------- 2 files changed, 56 insertions(+), 102 deletions(-) delete mode 100644 arch/arm/mach-omap1/include/mach/debug-macro.S (limited to 'arch') diff --git a/arch/arm/Kconfig.debug b/arch/arm/Kconfig.debug index eba36e35bad2..ec25d746b4dd 100644 --- a/arch/arm/Kconfig.debug +++ b/arch/arm/Kconfig.debug @@ -527,6 +527,30 @@ choice Say Y here if you want kernel low-level debugging support on TI-NSPIRE CX models. + config DEBUG_OMAP1UART1 + bool "Kernel low-level debugging via OMAP1 UART1" + depends on ARCH_OMAP1 + select DEBUG_UART_8250 + help + Say Y here if you want kernel low-level debugging support + on OMAP1 based platforms (except OMAP730) on the UART1. + + config DEBUG_OMAP1UART2 + bool "Kernel low-level debugging via OMAP1 UART2" + depends on ARCH_OMAP1 + select DEBUG_UART_8250 + help + Say Y here if you want kernel low-level debugging support + on OMAP1 based platforms (except OMAP730) on the UART2. + + config DEBUG_OMAP1UART3 + bool "Kernel low-level debugging via OMAP1 UART3" + depends on ARCH_OMAP1 + select DEBUG_UART_8250 + help + Say Y here if you want kernel low-level debugging support + on OMAP1 based platforms (except OMAP730) on the UART3. + config DEBUG_OMAP2UART1 bool "OMAP2/3/4 UART1 (omap2/3 sdp boards and some omap3 boards)" depends on ARCH_OMAP2PLUS @@ -569,6 +593,30 @@ choice depends on ARCH_OMAP2PLUS select DEBUG_OMAP2PLUS_UART + config DEBUG_OMAP7XXUART1 + bool "Kernel low-level debugging via OMAP730 UART1" + depends on ARCH_OMAP730 + select DEBUG_UART_8250 + help + Say Y here if you want kernel low-level debugging support + on OMAP730 based platforms on the UART1. + + config DEBUG_OMAP7XXUART2 + bool "Kernel low-level debugging via OMAP730 UART2" + depends on ARCH_OMAP730 + select DEBUG_UART_8250 + help + Say Y here if you want kernel low-level debugging support + on OMAP730 based platforms on the UART2. + + config DEBUG_OMAP7XXUART3 + bool "Kernel low-level debugging via OMAP730 UART3" + depends on ARCH_OMAP730 + select DEBUG_UART_8250 + help + Say Y here if you want kernel low-level debugging support + on OMAP730 based platforms on the UART3. + config DEBUG_TI81XXUART1 bool "Kernel low-level debugging messages via TI81XX UART1 (ti8148evm)" depends on ARCH_OMAP2PLUS @@ -1308,6 +1356,9 @@ config DEBUG_UART_PHYS default 0xffe40000 if DEBUG_RCAR_GEN1_SCIF0 default 0xffe42000 if DEBUG_RCAR_GEN1_SCIF2 default 0xfff36000 if DEBUG_HIGHBANK_UART + default 0xfffb0000 if DEBUG_OMAP1UART1 || DEBUG_OMAP7XXUART1 + default 0xfffb0800 if DEBUG_OMAP1UART2 || DEBUG_OMAP7XXUART2 + default 0xfffb9800 if DEBUG_OMAP1UART3 || DEBUG_OMAP7XXUART3 default 0xfffe8600 if DEBUG_UART_BCM63XX default 0xfffff700 if ARCH_IOP33X depends on DEBUG_LL_UART_8250 || DEBUG_LL_UART_PL01X || \ @@ -1390,6 +1441,9 @@ config DEBUG_UART_VIRT default 0xfef00000 if ARCH_IXP4XX && !CPU_BIG_ENDIAN default 0xfef00003 if ARCH_IXP4XX && CPU_BIG_ENDIAN default 0xfef36000 if DEBUG_HIGHBANK_UART + default 0xfefb0000 if DEBUG_OMAP1UART1 || DEBUG_OMAP7XXUART1 + default 0xfefb0800 if DEBUG_OMAP1UART2 || DEBUG_OMAP7XXUART2 + default 0xfefb9800 if DEBUG_OMAP1UART3 || DEBUG_OMAP7XXUART3 default 0xfefff700 if ARCH_IOP33X default 0xff003000 if DEBUG_U300_UART default DEBUG_UART_PHYS if !MMU @@ -1401,7 +1455,8 @@ config DEBUG_UART_VIRT config DEBUG_UART_8250_SHIFT int "Register offset shift for the 8250 debug UART" depends on DEBUG_LL_UART_8250 || DEBUG_UART_8250 - default 0 if FOOTBRIDGE || ARCH_IOP32X || DEBUG_BCM_5301X + default 0 if FOOTBRIDGE || ARCH_IOP32X || DEBUG_BCM_5301X || \ + DEBUG_OMAP7XXUART1 || DEBUG_OMAP7XXUART2 || DEBUG_OMAP7XXUART3 default 2 config DEBUG_UART_8250_WORD diff --git a/arch/arm/mach-omap1/include/mach/debug-macro.S b/arch/arm/mach-omap1/include/mach/debug-macro.S deleted file mode 100644 index 5c1a26c9f490..000000000000 --- a/arch/arm/mach-omap1/include/mach/debug-macro.S +++ /dev/null @@ -1,101 +0,0 @@ -/* arch/arm/mach-omap1/include/mach/debug-macro.S - * - * Debugging macro include header - * - * Copyright (C) 1994-1999 Russell King - * Moved from linux/arch/arm/kernel/debug.S by Ben Dooks - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - * -*/ - -#include - -#include "serial.h" - - .pushsection .data -omap_uart_phys: .word 0x0 -omap_uart_virt: .word 0x0 - .popsection - - /* - * Note that this code won't work if the bootloader passes - * a wrong machine ID number in r1. To debug, just hardcode - * the desired UART phys and virt addresses temporarily into - * the omap_uart_phys and omap_uart_virt above. - */ - .macro addruart, rp, rv, tmp - - /* Use omap_uart_phys/virt if already configured */ -9: adr \rp, 99f @ get effective addr of 99f - ldr \rv, [\rp] @ get absolute addr of 99f - sub \rv, \rv, \rp @ offset between the two - ldr \rp, [\rp, #4] @ abs addr of omap_uart_phys - sub \tmp, \rp, \rv @ make it effective - ldr \rp, [\tmp, #0] @ omap_uart_phys - ldr \rv, [\tmp, #4] @ omap_uart_virt - cmp \rp, #0 @ is port configured? - cmpne \rv, #0 - bne 100f @ already configured - - /* Check the debug UART configuration set in uncompress.h */ - and \rp, pc, #0xff000000 - ldr \rv, =OMAP_UART_INFO_OFS - ldr \rp, [\rp, \rv] - - /* Select the UART to use based on the UART1 scratchpad value */ -10: cmp \rp, #0 @ no port configured? - beq 11f @ if none, try to use UART1 - cmp \rp, #OMAP1UART1 - beq 11f @ configure OMAP1UART1 - cmp \rp, #OMAP1UART2 - beq 12f @ configure OMAP1UART2 - cmp \rp, #OMAP1UART3 - beq 13f @ configure OMAP2UART3 - - /* Configure the UART offset from the phys/virt base */ -11: mov \rp, #0x00fb0000 @ OMAP1UART1 - b 98f -12: mov \rp, #0x00fb0000 @ OMAP1UART1 - orr \rp, \rp, #0x00000800 @ OMAP1UART2 - b 98f -13: mov \rp, #0x00fb0000 @ OMAP1UART1 - orr \rp, \rp, #0x00000800 @ OMAP1UART2 - orr \rp, \rp, #0x00009000 @ OMAP1UART3 - - /* Store both phys and virt address for the uart */ -98: add \rp, \rp, #0xff000000 @ phys base - str \rp, [\tmp, #0] @ omap_uart_phys - sub \rp, \rp, #0xff000000 @ phys base - add \rp, \rp, #0xfe000000 @ virt base - str \rp, [\tmp, #4] @ omap_uart_virt - b 9b - - .align -99: .word . - .word omap_uart_phys - .ltorg - -100: - .endm - - .macro senduart,rd,rx - strb \rd, [\rx] - .endm - - .macro busyuart,rd,rx -1001: ldrb \rd, [\rx, #(UART_LSR << OMAP_PORT_SHIFT)] - and \rd, \rd, #(UART_LSR_TEMT | UART_LSR_THRE) - teq \rd, #(UART_LSR_TEMT | UART_LSR_THRE) - beq 1002f - ldrb \rd, [\rx, #(UART_LSR << OMAP7XX_PORT_SHIFT)] - and \rd, \rd, #(UART_LSR_TEMT | UART_LSR_THRE) - teq \rd, #(UART_LSR_TEMT | UART_LSR_THRE) - bne 1001b -1002: - .endm - - .macro waituart,rd,rx - .endm -- cgit v1.2.3-59-g8ed1b From 4d31e66412955b20081d6966c271a71d4abcefd7 Mon Sep 17 00:00:00 2001 From: Daniel Thompson Date: Fri, 9 Jan 2015 18:34:03 +0100 Subject: ARM: 8272/1: netx: Migrate DEBUG_LL macros to shared directory As part of the migration we introduce DEBUG_UART_PHYS/DEBUG_UART_VIRT which default to UART1 but allow a user to configure UART2 or UART3. We also introduce symbolic names for the registers and flags. Signed-off-by: Daniel Thompson Acked-by: Arnd Bergmann Cc: linux-arm-kernel@lists.infradead.org Signed-off-by: Russell King --- arch/arm/Kconfig.debug | 16 ++++++++++-- arch/arm/include/debug/netx.S | 36 +++++++++++++++++++++++++++ arch/arm/mach-netx/include/mach/debug-macro.S | 36 --------------------------- 3 files changed, 50 insertions(+), 38 deletions(-) create mode 100644 arch/arm/include/debug/netx.S delete mode 100644 arch/arm/mach-netx/include/mach/debug-macro.S (limited to 'arch') diff --git a/arch/arm/Kconfig.debug b/arch/arm/Kconfig.debug index ec25d746b4dd..ff43ea38fe1f 100644 --- a/arch/arm/Kconfig.debug +++ b/arch/arm/Kconfig.debug @@ -503,6 +503,13 @@ choice Say Y here if you want kernel low-level debugging support on Vybrid based platforms. + config DEBUG_NETX_UART + bool "Kernel low-level debugging messages via NetX UART" + depends on ARCH_NETX + help + Say Y here if you want kernel low-level debugging support + on Hilscher NetX based platforms. + config DEBUG_NOMADIK_UART bool "Kernel low-level debugging messages via NOMADIK UART" depends on ARCH_NOMADIK @@ -1231,6 +1238,7 @@ config DEBUG_LL_INCLUDE DEBUG_IMX6SX_UART default "debug/ks8695.S" if DEBUG_KS8695_UART default "debug/msm.S" if DEBUG_MSM_UART || DEBUG_QCOM_UARTDM + default "debug/netx.S" if DEBUG_NETX_UART default "debug/omap2plus.S" if DEBUG_OMAP2PLUS_UART default "debug/renesas-scif.S" if DEBUG_R7S72100_SCIF2 default "debug/renesas-scif.S" if DEBUG_RCAR_GEN1_SCIF0 @@ -1276,6 +1284,7 @@ config DEBUG_UART_BCM63XX config DEBUG_UART_PHYS hex "Physical base address of debug UART" + default 0x00100a00 if DEBUG_NETX_UART default 0x01c20000 if DEBUG_DAVINCI_DMx_UART0 default 0x01c28000 if DEBUG_SUNXI_UART0 default 0x01c28400 if DEBUG_SUNXI_UART1 @@ -1364,7 +1373,8 @@ config DEBUG_UART_PHYS depends on DEBUG_LL_UART_8250 || DEBUG_LL_UART_PL01X || \ DEBUG_LL_UART_EFM32 || \ DEBUG_UART_8250 || DEBUG_UART_PL01X || DEBUG_MESON_UARTAO || \ - DEBUG_MSM_UART || DEBUG_QCOM_UARTDM || DEBUG_R7S72100_SCIF2 || \ + DEBUG_MSM_UART || DEBUG_NETX_UART || \ + DEBUG_QCOM_UARTDM || DEBUG_R7S72100_SCIF2 || \ DEBUG_RCAR_GEN1_SCIF0 || DEBUG_RCAR_GEN1_SCIF2 || \ DEBUG_RCAR_GEN2_SCIF0 || DEBUG_RCAR_GEN2_SCIF2 || \ DEBUG_RMOBILE_SCIFA0 || DEBUG_RMOBILE_SCIFA1 || \ @@ -1373,6 +1383,7 @@ config DEBUG_UART_PHYS config DEBUG_UART_VIRT hex "Virtual base address of debug UART" + default 0xe0000a00 if DEBUG_NETX_UART default 0xe0010fe0 if ARCH_RPC default 0xe1000000 if DEBUG_MSM_UART default 0xf0000be0 if ARCH_EBSA110 @@ -1449,7 +1460,8 @@ config DEBUG_UART_VIRT default DEBUG_UART_PHYS if !MMU depends on DEBUG_LL_UART_8250 || DEBUG_LL_UART_PL01X || \ DEBUG_UART_8250 || DEBUG_UART_PL01X || DEBUG_MESON_UARTAO || \ - DEBUG_MSM_UART || DEBUG_QCOM_UARTDM || DEBUG_S3C24XX_UART || \ + DEBUG_MSM_UART || DEBUG_NETX_UART || \ + DEBUG_QCOM_UARTDM || DEBUG_S3C24XX_UART || \ DEBUG_UART_BCM63XX || DEBUG_ASM9260_UART config DEBUG_UART_8250_SHIFT diff --git a/arch/arm/include/debug/netx.S b/arch/arm/include/debug/netx.S new file mode 100644 index 000000000000..81e1b2af70f7 --- /dev/null +++ b/arch/arm/include/debug/netx.S @@ -0,0 +1,36 @@ +/* + * Debugging macro include header + * + * Copyright (C) 1994-1999 Russell King + * Moved from linux/arch/arm/kernel/debug.S by Ben Dooks + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * +*/ + +#define UART_DATA 0 +#define UART_FLAG 0x18 +#define UART_FLAG_BUSY (1 << 3) + + .macro addruart, rp, rv, tmp + ldr \rp, =CONFIG_DEBUG_UART_PHYS + ldr \rv, =CONFIG_DEBUG_UART_VIRT + .endm + + .macro senduart,rd,rx + str \rd, [\rx, #UART_DATA] + .endm + + .macro busyuart,rd,rx +1002: ldr \rd, [\rx, #UART_FLAG] + tst \rd, #UART_FLAG_BUSY + bne 1002b + .endm + + .macro waituart,rd,rx +1001: ldr \rd, [\rx, #UART_FLAG] + tst \rd, #UART_FLAG_BUSY + bne 1001b + .endm diff --git a/arch/arm/mach-netx/include/mach/debug-macro.S b/arch/arm/mach-netx/include/mach/debug-macro.S deleted file mode 100644 index 247781e096e2..000000000000 --- a/arch/arm/mach-netx/include/mach/debug-macro.S +++ /dev/null @@ -1,36 +0,0 @@ -/* arch/arm/mach-netx/include/mach/debug-macro.S - * - * Debugging macro include header - * - * Copyright (C) 1994-1999 Russell King - * Moved from linux/arch/arm/kernel/debug.S by Ben Dooks - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - * -*/ - -#include "hardware.h" - - .macro addruart, rp, rv, tmp - mov \rp, #0x00000a00 - orr \rv, \rp, #io_p2v(0x00100000) @ virtual - orr \rp, \rp, #0x00100000 @ physical - .endm - - .macro senduart,rd,rx - str \rd, [\rx, #0] - .endm - - .macro busyuart,rd,rx -1002: ldr \rd, [\rx, #0x18] - tst \rd, #(1 << 3) - bne 1002b - .endm - - .macro waituart,rd,rx -1001: ldr \rd, [\rx, #0x18] - tst \rd, #(1 << 3) - bne 1001b - .endm -- cgit v1.2.3-59-g8ed1b From d02fde7fc06a6ff88a9b4495712e31d54be01a4b Mon Sep 17 00:00:00 2001 From: Daniel Thompson Date: Fri, 9 Jan 2015 18:34:12 +0100 Subject: ARM: 8273/1: Seperate DEBUG_UART_PHYS from DEBUG_LL on EP93XX On EP93XX uncompress.h uses CONFIG_DEBUG_UART_PHYS instead of a hard coded serial port. This means the build breaks when DEBUG_LL (and DEBUG_LL_UART_PL01X) is not enabled. This is fixed by adding a new dependency. Signed-off-by: Daniel Thompson Acked-by: Arnd Bergmann Signed-off-by: Russell King --- arch/arm/Kconfig.debug | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'arch') diff --git a/arch/arm/Kconfig.debug b/arch/arm/Kconfig.debug index ff43ea38fe1f..e250505b9ae1 100644 --- a/arch/arm/Kconfig.debug +++ b/arch/arm/Kconfig.debug @@ -1370,7 +1370,8 @@ config DEBUG_UART_PHYS default 0xfffb9800 if DEBUG_OMAP1UART3 || DEBUG_OMAP7XXUART3 default 0xfffe8600 if DEBUG_UART_BCM63XX default 0xfffff700 if ARCH_IOP33X - depends on DEBUG_LL_UART_8250 || DEBUG_LL_UART_PL01X || \ + depends on ARCH_EP93XX || \ + DEBUG_LL_UART_8250 || DEBUG_LL_UART_PL01X || \ DEBUG_LL_UART_EFM32 || \ DEBUG_UART_8250 || DEBUG_UART_PL01X || DEBUG_MESON_UARTAO || \ DEBUG_MSM_UART || DEBUG_NETX_UART || \ -- cgit v1.2.3-59-g8ed1b From a61cbf51f0a63e40ebda56f1961cf5570dc067b3 Mon Sep 17 00:00:00 2001 From: Daniel Thompson Date: Fri, 9 Jan 2015 18:34:22 +0100 Subject: ARM: 8274/1: Fix DEBUG_LL for multi-platform kernels (without PL01X) When building a multi_v7_defconfig kernel it is not possible to configure DEBUG_LL to use any serial device except a ARM Primecell PL01X, or more accurately and worse, it is possible to configure a different serial device but KConfig does not honour this request. In fact this also overrides the user selection for some of the single platform kernels, for example I don't think DEBUG_LL can be targeted at ICE or semihosted supervisor for ARCH_VERSATILE. This happens because DEBUG_UART_PL01X is automatically enabled by some architectures and this means user decisions made regarding the DEBUG_LL backend will be overridden. Problem is fixed by removing the automatic enabling of this option. Signed-off-by: Daniel Thompson Acked-by: Arnd Bergmann Signed-off-by: Russell King --- arch/arm/Kconfig.debug | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) (limited to 'arch') diff --git a/arch/arm/Kconfig.debug b/arch/arm/Kconfig.debug index e250505b9ae1..a324ecdfeb21 100644 --- a/arch/arm/Kconfig.debug +++ b/arch/arm/Kconfig.debug @@ -1263,12 +1263,7 @@ config DEBUG_LL_INCLUDE # Compatibility options for PL01x config DEBUG_UART_PL01X - def_bool ARCH_EP93XX || \ - ARCH_INTEGRATOR || \ - ARCH_SPEAR3XX || \ - ARCH_SPEAR6XX || \ - ARCH_SPEAR13XX || \ - ARCH_VERSATILE + bool # Compatibility options for 8250 config DEBUG_UART_8250 -- cgit v1.2.3-59-g8ed1b From 35997a231021d603ddc73e1abfbe4f76d63f23b2 Mon Sep 17 00:00:00 2001 From: Brian Norris Date: Sat, 13 Dec 2014 04:06:22 +0100 Subject: ARM: 8248/1: pm: remove outdated comment As of commit abda1bd5f4e04054ce083c298fcd68a743e9df03 __cpu_suspend() takes only 2 arguments, and those arguments are passed by the platform code. This comment thus makes no sense, as cpu_suspend() is not actually hiding any arguments. Signed-off-by: Brian Norris Signed-off-by: Russell King --- arch/arm/kernel/suspend.c | 4 ---- 1 file changed, 4 deletions(-) (limited to 'arch') diff --git a/arch/arm/kernel/suspend.c b/arch/arm/kernel/suspend.c index 2835d35234ca..9a2f882a0a2d 100644 --- a/arch/arm/kernel/suspend.c +++ b/arch/arm/kernel/suspend.c @@ -14,10 +14,6 @@ extern int __cpu_suspend(unsigned long, int (*)(unsigned long), u32 cpuid); extern void cpu_resume_mmu(void); #ifdef CONFIG_MMU -/* - * Hide the first two arguments to __cpu_suspend - these are an implementation - * detail which platform code shouldn't have to know about. - */ int cpu_suspend(unsigned long arg, int (*fn)(unsigned long)) { struct mm_struct *mm = current->active_mm; -- cgit v1.2.3-59-g8ed1b From 99a468d779f6851a31053e6a33bf91391034010b Mon Sep 17 00:00:00 2001 From: "George G. Davis" Date: Fri, 16 Jan 2015 11:21:05 +0100 Subject: ARM: 8286/1: mm: Fix dma_contiguous_reserve comment DMA contiguous allocations have been able to use highmem since commit "95b0e65 ARM: mm: don't limit default CMA region only to low memory" but a comment still notes the earlier "low memory" limitation. Update the comment to remove the low memory limitation and fix the s/contigouos/contiguous/ typo while we're at it. Signed-off-by: George G. Davis Acked-by: Marek Szyprowski Signed-off-by: Russell King --- arch/arm/mm/init.c | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) (limited to 'arch') diff --git a/arch/arm/mm/init.c b/arch/arm/mm/init.c index 98ad9c79ea0e..d538dc73fcb0 100644 --- a/arch/arm/mm/init.c +++ b/arch/arm/mm/init.c @@ -319,10 +319,7 @@ void __init arm_memblock_init(const struct machine_desc *mdesc) early_init_fdt_scan_reserved_mem(); - /* - * reserve memory for DMA contigouos allocations, - * must come from DMA area inside low memory - */ + /* reserve memory for DMA contiguous allocations */ dma_contiguous_reserve(arm_dma_limit); arm_memblock_steal_permitted = false; -- cgit v1.2.3-59-g8ed1b From 7d57909bf69f213b4a9f278d78271e7c9a05f62f Mon Sep 17 00:00:00 2001 From: Masahiro Yamada Date: Tue, 20 Jan 2015 03:44:26 +0100 Subject: ARM: 8290/1: decompressor: fix a wrong comment This comment does not correspond to the actual code. When zImage is loaded at a lower *OR* higher address of the destination of Image, it won't overwrite itself. Signed-off-by: Masahiro Yamada Acked-by: Nicolas Pitre Signed-off-by: Russell King --- arch/arm/boot/compressed/head.S | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'arch') diff --git a/arch/arm/boot/compressed/head.S b/arch/arm/boot/compressed/head.S index 68be9017593d..9dff61479dff 100644 --- a/arch/arm/boot/compressed/head.S +++ b/arch/arm/boot/compressed/head.S @@ -178,7 +178,7 @@ not_angel: /* * Set up a page table only if it won't overwrite ourself. - * That means r4 < pc && r4 - 16k page directory > &_end. + * That means r4 < pc || r4 - 16k page directory > &_end. * Given that r4 > &_end is most unfrequent, we add a rough * additional 1MB of room for a possible appended DTB. */ -- cgit v1.2.3-59-g8ed1b From 7a06192834414f02465cb38f4ceb88a6f02392f6 Mon Sep 17 00:00:00 2001 From: Masahiro Yamada Date: Tue, 20 Jan 2015 03:49:35 +0100 Subject: ARM: 8291/1: replace magic number with PAGE_SHIFT macro in fixup_pv code This line converts PHYS_OFFSET into PHYS_PFN_OFFSET. It is better to use PAGE_SHIFT rather than the magic number 12. Signed-off-by: Masahiro Yamada Acked-by: Nicolas Pitre Signed-off-by: Russell King --- arch/arm/kernel/head.S | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'arch') diff --git a/arch/arm/kernel/head.S b/arch/arm/kernel/head.S index 664eee8c4a26..ecb7be8b0854 100644 --- a/arch/arm/kernel/head.S +++ b/arch/arm/kernel/head.S @@ -586,7 +586,7 @@ __fixup_pv_table: add r5, r5, r3 @ adjust table end address add r6, r6, r3 @ adjust __pv_phys_pfn_offset address add r7, r7, r3 @ adjust __pv_offset address - mov r0, r8, lsr #12 @ convert to PFN + mov r0, r8, lsr #PAGE_SHIFT @ convert to PFN str r0, [r6] @ save computed PHYS_OFFSET to __pv_phys_pfn_offset strcc ip, [r7, #HIGH_OFFSET] @ save to __pv_offset high bits mov r6, r3, lsr #24 @ constant for add/sub instructions -- cgit v1.2.3-59-g8ed1b From 83508093f448e929bf55d07dd08246d22b03d753 Mon Sep 17 00:00:00 2001 From: Dmitry Eremin-Solenikov Date: Thu, 15 Jan 2015 02:29:16 +0100 Subject: ARM: 8278/1: sa1100: split irq handling for low GPIOs Low GPIO pins use an interrupt in SC interrupts space. However it's possible to handle them as if all the GPIO interrupts are instead tied to single GPIO handler, which later decodes GEDR register and chain-calls next IRQ handler. So split first 11 interrupts into system part (IRQ_GPIO0_SC - IRQ_GPIO10_SC) which work exactly like the rest of system controller interrupts and real GPIO interrupts (IRQ_GPIO0..IRQ_GPIO10). A single handler sa1100_gpio_handler then decodes and calls next handler. Signed-off-by: Dmitry Eremin-Solenikov Tested-by: Linus Walleij Signed-off-by: Russell King --- arch/arm/mach-sa1100/include/mach/irqs.h | 73 +++++++++++++++------------ arch/arm/mach-sa1100/irq.c | 87 ++++++++++++++++++-------------- drivers/gpio/gpio-sa1100.c | 2 +- 3 files changed, 93 insertions(+), 69 deletions(-) (limited to 'arch') diff --git a/arch/arm/mach-sa1100/include/mach/irqs.h b/arch/arm/mach-sa1100/include/mach/irqs.h index de0983494c7e..734e30e406a3 100644 --- a/arch/arm/mach-sa1100/include/mach/irqs.h +++ b/arch/arm/mach-sa1100/include/mach/irqs.h @@ -8,17 +8,17 @@ * 2001/11/14 RMK Cleaned up and standardised a lot of the IRQs. */ -#define IRQ_GPIO0 1 -#define IRQ_GPIO1 2 -#define IRQ_GPIO2 3 -#define IRQ_GPIO3 4 -#define IRQ_GPIO4 5 -#define IRQ_GPIO5 6 -#define IRQ_GPIO6 7 -#define IRQ_GPIO7 8 -#define IRQ_GPIO8 9 -#define IRQ_GPIO9 10 -#define IRQ_GPIO10 11 +#define IRQ_GPIO0_SC 1 +#define IRQ_GPIO1_SC 2 +#define IRQ_GPIO2_SC 3 +#define IRQ_GPIO3_SC 4 +#define IRQ_GPIO4_SC 5 +#define IRQ_GPIO5_SC 6 +#define IRQ_GPIO6_SC 7 +#define IRQ_GPIO7_SC 8 +#define IRQ_GPIO8_SC 9 +#define IRQ_GPIO9_SC 10 +#define IRQ_GPIO10_SC 11 #define IRQ_GPIO11_27 12 #define IRQ_LCD 13 /* LCD controller */ #define IRQ_Ser0UDC 14 /* Ser. port 0 UDC */ @@ -41,32 +41,43 @@ #define IRQ_RTC1Hz 31 /* RTC 1 Hz clock */ #define IRQ_RTCAlrm 32 /* RTC Alarm */ -#define IRQ_GPIO11 33 -#define IRQ_GPIO12 34 -#define IRQ_GPIO13 35 -#define IRQ_GPIO14 36 -#define IRQ_GPIO15 37 -#define IRQ_GPIO16 38 -#define IRQ_GPIO17 39 -#define IRQ_GPIO18 40 -#define IRQ_GPIO19 41 -#define IRQ_GPIO20 42 -#define IRQ_GPIO21 43 -#define IRQ_GPIO22 44 -#define IRQ_GPIO23 45 -#define IRQ_GPIO24 46 -#define IRQ_GPIO25 47 -#define IRQ_GPIO26 48 -#define IRQ_GPIO27 49 +#define IRQ_GPIO0 33 +#define IRQ_GPIO1 34 +#define IRQ_GPIO2 35 +#define IRQ_GPIO3 36 +#define IRQ_GPIO4 37 +#define IRQ_GPIO5 38 +#define IRQ_GPIO6 39 +#define IRQ_GPIO7 40 +#define IRQ_GPIO8 41 +#define IRQ_GPIO9 42 +#define IRQ_GPIO10 43 +#define IRQ_GPIO11 44 +#define IRQ_GPIO12 45 +#define IRQ_GPIO13 46 +#define IRQ_GPIO14 47 +#define IRQ_GPIO15 48 +#define IRQ_GPIO16 49 +#define IRQ_GPIO17 50 +#define IRQ_GPIO18 51 +#define IRQ_GPIO19 52 +#define IRQ_GPIO20 53 +#define IRQ_GPIO21 54 +#define IRQ_GPIO22 55 +#define IRQ_GPIO23 56 +#define IRQ_GPIO24 57 +#define IRQ_GPIO25 58 +#define IRQ_GPIO26 59 +#define IRQ_GPIO27 60 /* * The next 16 interrupts are for board specific purposes. Since * the kernel can only run on one machine at a time, we can re-use * these. If you need more, increase IRQ_BOARD_END, but keep it - * within sensible limits. IRQs 49 to 64 are available. + * within sensible limits. IRQs 61 to 76 are available. */ -#define IRQ_BOARD_START 50 -#define IRQ_BOARD_END 66 +#define IRQ_BOARD_START 61 +#define IRQ_BOARD_END 77 /* * Figure out the MAX IRQ number. diff --git a/arch/arm/mach-sa1100/irq.c b/arch/arm/mach-sa1100/irq.c index 63e2901db416..2dc6a2a9c60c 100644 --- a/arch/arm/mach-sa1100/irq.c +++ b/arch/arm/mach-sa1100/irq.c @@ -87,7 +87,7 @@ static struct irq_domain *sa1100_normal_irqdomain; */ static int GPIO_IRQ_rising_edge; static int GPIO_IRQ_falling_edge; -static int GPIO_IRQ_mask = (1 << 11) - 1; +static int GPIO_IRQ_mask; static int sa1100_gpio_type(struct irq_data *d, unsigned int type) { @@ -124,6 +124,26 @@ static void sa1100_gpio_ack(struct irq_data *d) GEDR = BIT(d->hwirq); } +static void sa1100_gpio_mask(struct irq_data *d) +{ + unsigned int mask = BIT(d->hwirq); + + GPIO_IRQ_mask &= ~mask; + + GRER &= ~mask; + GFER &= ~mask; +} + +static void sa1100_gpio_unmask(struct irq_data *d) +{ + unsigned int mask = BIT(d->hwirq); + + GPIO_IRQ_mask |= mask; + + GRER = GPIO_IRQ_rising_edge & GPIO_IRQ_mask; + GFER = GPIO_IRQ_falling_edge & GPIO_IRQ_mask; +} + static int sa1100_gpio_wake(struct irq_data *d, unsigned int on) { if (on) @@ -139,8 +159,8 @@ static int sa1100_gpio_wake(struct irq_data *d, unsigned int on) static struct irq_chip sa1100_low_gpio_chip = { .name = "GPIO-l", .irq_ack = sa1100_gpio_ack, - .irq_mask = sa1100_mask_irq, - .irq_unmask = sa1100_unmask_irq, + .irq_mask = sa1100_gpio_mask, + .irq_unmask = sa1100_gpio_unmask, .irq_set_type = sa1100_gpio_type, .irq_set_wake = sa1100_gpio_wake, }; @@ -163,16 +183,16 @@ static struct irq_domain_ops sa1100_low_gpio_irqdomain_ops = { static struct irq_domain *sa1100_low_gpio_irqdomain; /* - * IRQ11 (GPIO11 through 27) handler. We enter here with the + * IRQ 0-11 (GPIO) handler. We enter here with the * irq_controller_lock held, and IRQs disabled. Decode the IRQ * and call the handler. */ static void -sa1100_high_gpio_handler(unsigned int irq, struct irq_desc *desc) +sa1100_gpio_handler(unsigned int irq, struct irq_desc *desc) { unsigned int mask; - mask = GEDR & 0xfffff800; + mask = GEDR; do { /* * clear down all currently active IRQ sources. @@ -180,8 +200,7 @@ sa1100_high_gpio_handler(unsigned int irq, struct irq_desc *desc) */ GEDR = mask; - irq = IRQ_GPIO11; - mask >>= 11; + irq = IRQ_GPIO0; do { if (mask & 1) generic_handle_irq(irq); @@ -189,7 +208,7 @@ sa1100_high_gpio_handler(unsigned int irq, struct irq_desc *desc) irq++; } while (mask); - mask = GEDR & 0xfffff800; + mask = GEDR; } while (mask); } @@ -198,31 +217,11 @@ sa1100_high_gpio_handler(unsigned int irq, struct irq_desc *desc) * In addition, the IRQs are all collected up into one bit in the * interrupt controller registers. */ -static void sa1100_high_gpio_mask(struct irq_data *d) -{ - unsigned int mask = BIT(d->hwirq); - - GPIO_IRQ_mask &= ~mask; - - GRER &= ~mask; - GFER &= ~mask; -} - -static void sa1100_high_gpio_unmask(struct irq_data *d) -{ - unsigned int mask = BIT(d->hwirq); - - GPIO_IRQ_mask |= mask; - - GRER = GPIO_IRQ_rising_edge & GPIO_IRQ_mask; - GFER = GPIO_IRQ_falling_edge & GPIO_IRQ_mask; -} - static struct irq_chip sa1100_high_gpio_chip = { .name = "GPIO-h", .irq_ack = sa1100_gpio_ack, - .irq_mask = sa1100_high_gpio_mask, - .irq_unmask = sa1100_high_gpio_unmask, + .irq_mask = sa1100_gpio_mask, + .irq_unmask = sa1100_gpio_unmask, .irq_set_type = sa1100_gpio_type, .irq_set_wake = sa1100_gpio_wake, }; @@ -325,7 +324,7 @@ sa1100_handle_irq(struct pt_regs *regs) if (mask == 0) break; - handle_IRQ(ffs(mask) - 1 + IRQ_GPIO0, regs); + handle_IRQ(ffs(mask) - 1 + IRQ_GPIO0_SC, regs); } while (1); } @@ -350,22 +349,36 @@ void __init sa1100_init_irq(void) */ ICCR = 1; + sa1100_normal_irqdomain = irq_domain_add_legacy(NULL, + 32, IRQ_GPIO0_SC, 0, + &sa1100_normal_irqdomain_ops, NULL); + sa1100_low_gpio_irqdomain = irq_domain_add_legacy(NULL, 11, IRQ_GPIO0, 0, &sa1100_low_gpio_irqdomain_ops, NULL); - sa1100_normal_irqdomain = irq_domain_add_legacy(NULL, - 21, IRQ_GPIO11_27, 11, - &sa1100_normal_irqdomain_ops, NULL); - sa1100_high_gpio_irqdomain = irq_domain_add_legacy(NULL, 17, IRQ_GPIO11, 11, &sa1100_high_gpio_irqdomain_ops, NULL); + /* + * Install handlers for GPIO 0-10 edge detect interrupts + */ + irq_set_chained_handler(IRQ_GPIO0_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO1_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO2_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO3_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO4_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO5_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO6_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO7_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO8_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO9_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO10_SC, sa1100_gpio_handler); /* * Install handler for GPIO 11-27 edge detect interrupts */ - irq_set_chained_handler(IRQ_GPIO11_27, sa1100_high_gpio_handler); + irq_set_chained_handler(IRQ_GPIO11_27, sa1100_gpio_handler); set_handle_irq(sa1100_handle_irq); diff --git a/drivers/gpio/gpio-sa1100.c b/drivers/gpio/gpio-sa1100.c index a90be34e4d5c..5b5d3c7bb84e 100644 --- a/drivers/gpio/gpio-sa1100.c +++ b/drivers/gpio/gpio-sa1100.c @@ -50,7 +50,7 @@ static int sa1100_direction_output(struct gpio_chip *chip, unsigned offset, int static int sa1100_to_irq(struct gpio_chip *chip, unsigned offset) { - return offset < 11 ? (IRQ_GPIO0 + offset) : (IRQ_GPIO11 - 11 + offset); + return IRQ_GPIO0 + offset; } static struct gpio_chip sa1100_gpio_chip = { -- cgit v1.2.3-59-g8ed1b From 590f2661069823cb14c6ca695a71b85e591d12a6 Mon Sep 17 00:00:00 2001 From: Dmitry Eremin-Solenikov Date: Thu, 15 Jan 2015 02:30:58 +0100 Subject: ARM: 8279/1: sa1100: merge both GPIO irqdomains Now there is no difference between low and high GPIO irqdomains. Merge them into single irqdomain handling all GPIOs. Signed-off-by: Dmitry Eremin-Solenikov Tested-by: Linus Walleij Signed-off-by: Russell King --- arch/arm/mach-sa1100/irq.c | 57 +++++++++------------------------------------- 1 file changed, 11 insertions(+), 46 deletions(-) (limited to 'arch') diff --git a/arch/arm/mach-sa1100/irq.c b/arch/arm/mach-sa1100/irq.c index 2dc6a2a9c60c..5589b23ed043 100644 --- a/arch/arm/mach-sa1100/irq.c +++ b/arch/arm/mach-sa1100/irq.c @@ -154,10 +154,10 @@ static int sa1100_gpio_wake(struct irq_data *d, unsigned int on) } /* - * This is for IRQs from 0 to 10. + * This is for GPIO IRQs */ -static struct irq_chip sa1100_low_gpio_chip = { - .name = "GPIO-l", +static struct irq_chip sa1100_gpio_chip = { + .name = "GPIO", .irq_ack = sa1100_gpio_ack, .irq_mask = sa1100_gpio_mask, .irq_unmask = sa1100_gpio_unmask, @@ -165,22 +165,22 @@ static struct irq_chip sa1100_low_gpio_chip = { .irq_set_wake = sa1100_gpio_wake, }; -static int sa1100_low_gpio_irqdomain_map(struct irq_domain *d, +static int sa1100_gpio_irqdomain_map(struct irq_domain *d, unsigned int irq, irq_hw_number_t hwirq) { - irq_set_chip_and_handler(irq, &sa1100_low_gpio_chip, + irq_set_chip_and_handler(irq, &sa1100_gpio_chip, handle_edge_irq); set_irq_flags(irq, IRQF_VALID | IRQF_PROBE); return 0; } -static struct irq_domain_ops sa1100_low_gpio_irqdomain_ops = { - .map = sa1100_low_gpio_irqdomain_map, +static struct irq_domain_ops sa1100_gpio_irqdomain_ops = { + .map = sa1100_gpio_irqdomain_map, .xlate = irq_domain_xlate_onetwocell, }; -static struct irq_domain *sa1100_low_gpio_irqdomain; +static struct irq_domain *sa1100_gpio_irqdomain; /* * IRQ 0-11 (GPIO) handler. We enter here with the @@ -212,37 +212,6 @@ sa1100_gpio_handler(unsigned int irq, struct irq_desc *desc) } while (mask); } -/* - * Like GPIO0 to 10, GPIO11-27 IRQs need to be handled specially. - * In addition, the IRQs are all collected up into one bit in the - * interrupt controller registers. - */ -static struct irq_chip sa1100_high_gpio_chip = { - .name = "GPIO-h", - .irq_ack = sa1100_gpio_ack, - .irq_mask = sa1100_gpio_mask, - .irq_unmask = sa1100_gpio_unmask, - .irq_set_type = sa1100_gpio_type, - .irq_set_wake = sa1100_gpio_wake, -}; - -static int sa1100_high_gpio_irqdomain_map(struct irq_domain *d, - unsigned int irq, irq_hw_number_t hwirq) -{ - irq_set_chip_and_handler(irq, &sa1100_high_gpio_chip, - handle_edge_irq); - set_irq_flags(irq, IRQF_VALID | IRQF_PROBE); - - return 0; -} - -static struct irq_domain_ops sa1100_high_gpio_irqdomain_ops = { - .map = sa1100_high_gpio_irqdomain_map, - .xlate = irq_domain_xlate_onetwocell, -}; - -static struct irq_domain *sa1100_high_gpio_irqdomain; - static struct resource irq_resource = DEFINE_RES_MEM_NAMED(0x90050000, SZ_64K, "irqs"); @@ -353,13 +322,9 @@ void __init sa1100_init_irq(void) 32, IRQ_GPIO0_SC, 0, &sa1100_normal_irqdomain_ops, NULL); - sa1100_low_gpio_irqdomain = irq_domain_add_legacy(NULL, - 11, IRQ_GPIO0, 0, - &sa1100_low_gpio_irqdomain_ops, NULL); - - sa1100_high_gpio_irqdomain = irq_domain_add_legacy(NULL, - 17, IRQ_GPIO11, 11, - &sa1100_high_gpio_irqdomain_ops, NULL); + sa1100_gpio_irqdomain = irq_domain_add_legacy(NULL, + 28, IRQ_GPIO0, 0, + &sa1100_gpio_irqdomain_ops, NULL); /* * Install handlers for GPIO 0-10 edge detect interrupts -- cgit v1.2.3-59-g8ed1b From a82be3f0f1a4a03ec43750b12cb48871ec2a2e28 Mon Sep 17 00:00:00 2001 From: Dmitry Eremin-Solenikov Date: Thu, 15 Jan 2015 02:31:48 +0100 Subject: ARM: 8280/1: sa1100: switch to irq_domain_add_simple() As now both SC and GPIO irq domains start from 0 hwirq and do not contain holes, switch to using irq_domain_add_simple() instead of irq_domain_add_legacy(). Signed-off-by: Dmitry Eremin-Solenikov Tested-by: Linus Walleij Signed-off-by: Russell King --- arch/arm/mach-sa1100/irq.c | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'arch') diff --git a/arch/arm/mach-sa1100/irq.c b/arch/arm/mach-sa1100/irq.c index 5589b23ed043..a9dfe8e16d95 100644 --- a/arch/arm/mach-sa1100/irq.c +++ b/arch/arm/mach-sa1100/irq.c @@ -318,12 +318,12 @@ void __init sa1100_init_irq(void) */ ICCR = 1; - sa1100_normal_irqdomain = irq_domain_add_legacy(NULL, - 32, IRQ_GPIO0_SC, 0, + sa1100_normal_irqdomain = irq_domain_add_simple(NULL, + 32, IRQ_GPIO0_SC, &sa1100_normal_irqdomain_ops, NULL); - sa1100_gpio_irqdomain = irq_domain_add_legacy(NULL, - 28, IRQ_GPIO0, 0, + sa1100_gpio_irqdomain = irq_domain_add_simple(NULL, + 28, IRQ_GPIO0, &sa1100_gpio_irqdomain_ops, NULL); /* -- cgit v1.2.3-59-g8ed1b From a0ea298d325616b58760c5182705df76912e0d73 Mon Sep 17 00:00:00 2001 From: Dmitry Eremin-Solenikov Date: Thu, 15 Jan 2015 02:32:26 +0100 Subject: ARM: 8281/1: sa1100: move GPIO-related IRQ code to gpio driver As a part of driver consolidation, move GPIO-related IRQ code to drivers/gpio/gpio-sa1100.c. The code does not use GPIOLIB_IRQCHIP (yet), because sa1100 does not have a device for gpios, which is a requirement for GPIOLIB_IRQCHIP. This will be the next step. Signed-off-by: Dmitry Eremin-Solenikov Tested-by: Linus Walleij Signed-off-by: Russell King --- arch/arm/mach-sa1100/irq.c | 174 --------------------------------------- drivers/gpio/gpio-sa1100.c | 197 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 197 insertions(+), 174 deletions(-) (limited to 'arch') diff --git a/arch/arm/mach-sa1100/irq.c b/arch/arm/mach-sa1100/irq.c index a9dfe8e16d95..a7d116af4a4f 100644 --- a/arch/arm/mach-sa1100/irq.c +++ b/arch/arm/mach-sa1100/irq.c @@ -80,138 +80,6 @@ static struct irq_domain_ops sa1100_normal_irqdomain_ops = { static struct irq_domain *sa1100_normal_irqdomain; -/* - * SA1100 GPIO edge detection for IRQs: - * IRQs are generated on Falling-Edge, Rising-Edge, or both. - * Use this instead of directly setting GRER/GFER. - */ -static int GPIO_IRQ_rising_edge; -static int GPIO_IRQ_falling_edge; -static int GPIO_IRQ_mask; - -static int sa1100_gpio_type(struct irq_data *d, unsigned int type) -{ - unsigned int mask; - - mask = BIT(d->hwirq); - - if (type == IRQ_TYPE_PROBE) { - if ((GPIO_IRQ_rising_edge | GPIO_IRQ_falling_edge) & mask) - return 0; - type = IRQ_TYPE_EDGE_RISING | IRQ_TYPE_EDGE_FALLING; - } - - if (type & IRQ_TYPE_EDGE_RISING) { - GPIO_IRQ_rising_edge |= mask; - } else - GPIO_IRQ_rising_edge &= ~mask; - if (type & IRQ_TYPE_EDGE_FALLING) { - GPIO_IRQ_falling_edge |= mask; - } else - GPIO_IRQ_falling_edge &= ~mask; - - GRER = GPIO_IRQ_rising_edge & GPIO_IRQ_mask; - GFER = GPIO_IRQ_falling_edge & GPIO_IRQ_mask; - - return 0; -} - -/* - * GPIO IRQs must be acknowledged. - */ -static void sa1100_gpio_ack(struct irq_data *d) -{ - GEDR = BIT(d->hwirq); -} - -static void sa1100_gpio_mask(struct irq_data *d) -{ - unsigned int mask = BIT(d->hwirq); - - GPIO_IRQ_mask &= ~mask; - - GRER &= ~mask; - GFER &= ~mask; -} - -static void sa1100_gpio_unmask(struct irq_data *d) -{ - unsigned int mask = BIT(d->hwirq); - - GPIO_IRQ_mask |= mask; - - GRER = GPIO_IRQ_rising_edge & GPIO_IRQ_mask; - GFER = GPIO_IRQ_falling_edge & GPIO_IRQ_mask; -} - -static int sa1100_gpio_wake(struct irq_data *d, unsigned int on) -{ - if (on) - PWER |= BIT(d->hwirq); - else - PWER &= ~BIT(d->hwirq); - return 0; -} - -/* - * This is for GPIO IRQs - */ -static struct irq_chip sa1100_gpio_chip = { - .name = "GPIO", - .irq_ack = sa1100_gpio_ack, - .irq_mask = sa1100_gpio_mask, - .irq_unmask = sa1100_gpio_unmask, - .irq_set_type = sa1100_gpio_type, - .irq_set_wake = sa1100_gpio_wake, -}; - -static int sa1100_gpio_irqdomain_map(struct irq_domain *d, - unsigned int irq, irq_hw_number_t hwirq) -{ - irq_set_chip_and_handler(irq, &sa1100_gpio_chip, - handle_edge_irq); - set_irq_flags(irq, IRQF_VALID | IRQF_PROBE); - - return 0; -} - -static struct irq_domain_ops sa1100_gpio_irqdomain_ops = { - .map = sa1100_gpio_irqdomain_map, - .xlate = irq_domain_xlate_onetwocell, -}; - -static struct irq_domain *sa1100_gpio_irqdomain; - -/* - * IRQ 0-11 (GPIO) handler. We enter here with the - * irq_controller_lock held, and IRQs disabled. Decode the IRQ - * and call the handler. - */ -static void -sa1100_gpio_handler(unsigned int irq, struct irq_desc *desc) -{ - unsigned int mask; - - mask = GEDR; - do { - /* - * clear down all currently active IRQ sources. - * We will be processing them all. - */ - GEDR = mask; - - irq = IRQ_GPIO0; - do { - if (mask & 1) - generic_handle_irq(irq); - mask >>= 1; - irq++; - } while (mask); - - mask = GEDR; - } while (mask); -} - static struct resource irq_resource = DEFINE_RES_MEM_NAMED(0x90050000, SZ_64K, "irqs"); @@ -238,17 +106,6 @@ static int sa1100irq_suspend(void) IC_GPIO6|IC_GPIO5|IC_GPIO4|IC_GPIO3|IC_GPIO2| IC_GPIO1|IC_GPIO0); - /* - * Set the appropriate edges for wakeup. - */ - GRER = PWER & GPIO_IRQ_rising_edge; - GFER = PWER & GPIO_IRQ_falling_edge; - - /* - * Clear any pending GPIO interrupts. - */ - GEDR = GEDR; - return 0; } @@ -260,9 +117,6 @@ static void sa1100irq_resume(void) ICCR = st->iccr; ICLR = st->iclr; - GRER = GPIO_IRQ_rising_edge & GPIO_IRQ_mask; - GFER = GPIO_IRQ_falling_edge & GPIO_IRQ_mask; - ICMR = st->icmr; } } @@ -307,11 +161,6 @@ void __init sa1100_init_irq(void) /* all IRQs are IRQ, not FIQ */ ICLR = 0; - /* clear all GPIO edge detects */ - GFER = 0; - GRER = 0; - GEDR = -1; - /* * Whatever the doc says, this has to be set for the wait-on-irq * instruction to work... on a SA1100 rev 9 at least. @@ -322,29 +171,6 @@ void __init sa1100_init_irq(void) 32, IRQ_GPIO0_SC, &sa1100_normal_irqdomain_ops, NULL); - sa1100_gpio_irqdomain = irq_domain_add_simple(NULL, - 28, IRQ_GPIO0, - &sa1100_gpio_irqdomain_ops, NULL); - - /* - * Install handlers for GPIO 0-10 edge detect interrupts - */ - irq_set_chained_handler(IRQ_GPIO0_SC, sa1100_gpio_handler); - irq_set_chained_handler(IRQ_GPIO1_SC, sa1100_gpio_handler); - irq_set_chained_handler(IRQ_GPIO2_SC, sa1100_gpio_handler); - irq_set_chained_handler(IRQ_GPIO3_SC, sa1100_gpio_handler); - irq_set_chained_handler(IRQ_GPIO4_SC, sa1100_gpio_handler); - irq_set_chained_handler(IRQ_GPIO5_SC, sa1100_gpio_handler); - irq_set_chained_handler(IRQ_GPIO6_SC, sa1100_gpio_handler); - irq_set_chained_handler(IRQ_GPIO7_SC, sa1100_gpio_handler); - irq_set_chained_handler(IRQ_GPIO8_SC, sa1100_gpio_handler); - irq_set_chained_handler(IRQ_GPIO9_SC, sa1100_gpio_handler); - irq_set_chained_handler(IRQ_GPIO10_SC, sa1100_gpio_handler); - /* - * Install handler for GPIO 11-27 edge detect interrupts - */ - irq_set_chained_handler(IRQ_GPIO11_27, sa1100_gpio_handler); - set_handle_irq(sa1100_handle_irq); sa1100_init_gpio(); diff --git a/drivers/gpio/gpio-sa1100.c b/drivers/gpio/gpio-sa1100.c index 5b5d3c7bb84e..bec397a60204 100644 --- a/drivers/gpio/gpio-sa1100.c +++ b/drivers/gpio/gpio-sa1100.c @@ -11,6 +11,7 @@ #include #include #include +#include #include #include @@ -64,7 +65,203 @@ static struct gpio_chip sa1100_gpio_chip = { .ngpio = GPIO_MAX + 1, }; +/* + * SA1100 GPIO edge detection for IRQs: + * IRQs are generated on Falling-Edge, Rising-Edge, or both. + * Use this instead of directly setting GRER/GFER. + */ +static int GPIO_IRQ_rising_edge; +static int GPIO_IRQ_falling_edge; +static int GPIO_IRQ_mask; + +static int sa1100_gpio_type(struct irq_data *d, unsigned int type) +{ + unsigned int mask; + + mask = BIT(d->hwirq); + + if (type == IRQ_TYPE_PROBE) { + if ((GPIO_IRQ_rising_edge | GPIO_IRQ_falling_edge) & mask) + return 0; + type = IRQ_TYPE_EDGE_RISING | IRQ_TYPE_EDGE_FALLING; + } + + if (type & IRQ_TYPE_EDGE_RISING) + GPIO_IRQ_rising_edge |= mask; + else + GPIO_IRQ_rising_edge &= ~mask; + if (type & IRQ_TYPE_EDGE_FALLING) + GPIO_IRQ_falling_edge |= mask; + else + GPIO_IRQ_falling_edge &= ~mask; + + GRER = GPIO_IRQ_rising_edge & GPIO_IRQ_mask; + GFER = GPIO_IRQ_falling_edge & GPIO_IRQ_mask; + + return 0; +} + +/* + * GPIO IRQs must be acknowledged. + */ +static void sa1100_gpio_ack(struct irq_data *d) +{ + GEDR = BIT(d->hwirq); +} + +static void sa1100_gpio_mask(struct irq_data *d) +{ + unsigned int mask = BIT(d->hwirq); + + GPIO_IRQ_mask &= ~mask; + + GRER &= ~mask; + GFER &= ~mask; +} + +static void sa1100_gpio_unmask(struct irq_data *d) +{ + unsigned int mask = BIT(d->hwirq); + + GPIO_IRQ_mask |= mask; + + GRER = GPIO_IRQ_rising_edge & GPIO_IRQ_mask; + GFER = GPIO_IRQ_falling_edge & GPIO_IRQ_mask; +} + +static int sa1100_gpio_wake(struct irq_data *d, unsigned int on) +{ + if (on) + PWER |= BIT(d->hwirq); + else + PWER &= ~BIT(d->hwirq); + return 0; +} + +/* + * This is for GPIO IRQs + */ +static struct irq_chip sa1100_gpio_irq_chip = { + .name = "GPIO", + .irq_ack = sa1100_gpio_ack, + .irq_mask = sa1100_gpio_mask, + .irq_unmask = sa1100_gpio_unmask, + .irq_set_type = sa1100_gpio_type, + .irq_set_wake = sa1100_gpio_wake, +}; + +static int sa1100_gpio_irqdomain_map(struct irq_domain *d, + unsigned int irq, irq_hw_number_t hwirq) +{ + irq_set_chip_and_handler(irq, &sa1100_gpio_irq_chip, + handle_edge_irq); + set_irq_flags(irq, IRQF_VALID | IRQF_PROBE); + + return 0; +} + +static struct irq_domain_ops sa1100_gpio_irqdomain_ops = { + .map = sa1100_gpio_irqdomain_map, + .xlate = irq_domain_xlate_onetwocell, +}; + +static struct irq_domain *sa1100_gpio_irqdomain; + +/* + * IRQ 0-11 (GPIO) handler. We enter here with the + * irq_controller_lock held, and IRQs disabled. Decode the IRQ + * and call the handler. + */ +static void +sa1100_gpio_handler(unsigned int irq, struct irq_desc *desc) +{ + unsigned int mask; + + mask = GEDR; + do { + /* + * clear down all currently active IRQ sources. + * We will be processing them all. + */ + GEDR = mask; + + irq = IRQ_GPIO0; + do { + if (mask & 1) + generic_handle_irq(irq); + mask >>= 1; + irq++; + } while (mask); + + mask = GEDR; + } while (mask); +} + +static int sa1100_gpio_suspend(void) +{ + /* + * Set the appropriate edges for wakeup. + */ + GRER = PWER & GPIO_IRQ_rising_edge; + GFER = PWER & GPIO_IRQ_falling_edge; + + /* + * Clear any pending GPIO interrupts. + */ + GEDR = GEDR; + + return 0; +} + +static void sa1100_gpio_resume(void) +{ + GRER = GPIO_IRQ_rising_edge & GPIO_IRQ_mask; + GFER = GPIO_IRQ_falling_edge & GPIO_IRQ_mask; +} + +static struct syscore_ops sa1100_gpio_syscore_ops = { + .suspend = sa1100_gpio_suspend, + .resume = sa1100_gpio_resume, +}; + +static int __init sa1100_gpio_init_devicefs(void) +{ + register_syscore_ops(&sa1100_gpio_syscore_ops); + return 0; +} + +device_initcall(sa1100_gpio_init_devicefs); + void __init sa1100_init_gpio(void) { + /* clear all GPIO edge detects */ + GFER = 0; + GRER = 0; + GEDR = -1; + gpiochip_add(&sa1100_gpio_chip); + + sa1100_gpio_irqdomain = irq_domain_add_simple(NULL, + 28, IRQ_GPIO0, + &sa1100_gpio_irqdomain_ops, NULL); + + /* + * Install handlers for GPIO 0-10 edge detect interrupts + */ + irq_set_chained_handler(IRQ_GPIO0_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO1_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO2_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO3_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO4_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO5_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO6_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO7_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO8_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO9_SC, sa1100_gpio_handler); + irq_set_chained_handler(IRQ_GPIO10_SC, sa1100_gpio_handler); + /* + * Install handler for GPIO 11-27 edge detect interrupts + */ + irq_set_chained_handler(IRQ_GPIO11_27, sa1100_gpio_handler); + } -- cgit v1.2.3-59-g8ed1b From 364e386917c8cdfadc5abd5a9b2c5cbac1c2133e Mon Sep 17 00:00:00 2001 From: Dmitry Eremin-Solenikov Date: Thu, 15 Jan 2015 02:33:23 +0100 Subject: ARM: 8282/1: sa1100: use handle_domain_irq Use handle_domain_irq instead of handle_IRQ to automatically map hardware irq number to virq. Signed-off-by: Dmitry Eremin-Solenikov Tested-by: Linus Walleij Signed-off-by: Russell King --- arch/arm/mach-sa1100/irq.c | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'arch') diff --git a/arch/arm/mach-sa1100/irq.c b/arch/arm/mach-sa1100/irq.c index a7d116af4a4f..65aebfa66fe5 100644 --- a/arch/arm/mach-sa1100/irq.c +++ b/arch/arm/mach-sa1100/irq.c @@ -147,7 +147,8 @@ sa1100_handle_irq(struct pt_regs *regs) if (mask == 0) break; - handle_IRQ(ffs(mask) - 1 + IRQ_GPIO0_SC, regs); + handle_domain_irq(sa1100_normal_irqdomain, + ffs(mask) - 1, regs); } while (1); } -- cgit v1.2.3-59-g8ed1b From 1ff990c01829adc24b3f88c07ec66a7c258a93e7 Mon Sep 17 00:00:00 2001 From: Dmitry Eremin-Solenikov Date: Thu, 15 Jan 2015 03:04:41 +0100 Subject: ARM: 8283/1: sa1100: collie: clear PWER register on machine init Let kernel drivers to control wakeup sources instead of hardcoding them in the collie.c board file. Signed-off-by: Dmitry Eremin-Solenikov Signed-off-by: Russell King --- arch/arm/mach-sa1100/collie.c | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'arch') diff --git a/arch/arm/mach-sa1100/collie.c b/arch/arm/mach-sa1100/collie.c index b90c7d828391..bd5155d04519 100644 --- a/arch/arm/mach-sa1100/collie.c +++ b/arch/arm/mach-sa1100/collie.c @@ -371,8 +371,7 @@ static void __init collie_init(void) PPC_LDD6 | PPC_LDD7 | PPC_L_PCLK | PPC_L_LCLK | PPC_L_FCLK | PPC_L_BIAS | PPC_TXD1 | PPC_TXD2 | PPC_TXD3 | PPC_TXD4 | PPC_SCLK | PPC_SFRM; - PWER = _COLLIE_GPIO_AC_IN | _COLLIE_GPIO_CO | _COLLIE_GPIO_ON_KEY | - _COLLIE_GPIO_WAKEUP | _COLLIE_GPIO_nREMOCON_INT | PWER_RTC; + PWER = 0; PGSR = _COLLIE_GPIO_nREMOCON_ON; -- cgit v1.2.3-59-g8ed1b From e461894dc2ce7778ccde1c3483c9b15a85a7fc5f Mon Sep 17 00:00:00 2001 From: Dmitry Eremin-Solenikov Date: Thu, 15 Jan 2015 03:06:22 +0100 Subject: ARM: 8284/1: sa1100: clear RCSR_SMR on resume StrongARM core uses RCSR SMR bit to tell to bootloader that it was reset by entering the sleep mode. After we have resumed, there is little point in having that bit enabled. Moreover, if this bit is set before reboot, the bootloader can become confused. Thus clear the SMR bit on resume just before clearing the scratchpad (resume address) register. Cc: stable@vger.kernel.org Signed-off-by: Dmitry Eremin-Solenikov Signed-off-by: Russell King --- arch/arm/mach-sa1100/pm.c | 1 + 1 file changed, 1 insertion(+) (limited to 'arch') diff --git a/arch/arm/mach-sa1100/pm.c b/arch/arm/mach-sa1100/pm.c index 6645d1e31f14..34853d5dfda2 100644 --- a/arch/arm/mach-sa1100/pm.c +++ b/arch/arm/mach-sa1100/pm.c @@ -81,6 +81,7 @@ static int sa11x0_pm_enter(suspend_state_t state) /* * Ensure not to come back here if it wasn't intended */ + RCSR = RCSR_SMR; PSPR = 0; /* -- cgit v1.2.3-59-g8ed1b From 20e783e39e55c2615fb61d1b3d139ee9edcf6772 Mon Sep 17 00:00:00 2001 From: Arnd Bergmann Date: Wed, 28 Jan 2015 17:54:38 +0100 Subject: ARM: 8296/1: cache-l2x0: clean up aurora cache handling The aurora cache controller is the only remaining user of a couple of functions in this file and are completely unused when that is disabled, leading to build warnings: arch/arm/mm/cache-l2x0.c:167:13: warning: 'l2x0_cache_sync' defined but not used [-Wunused-function] arch/arm/mm/cache-l2x0.c:184:13: warning: 'l2x0_flush_all' defined but not used [-Wunused-function] arch/arm/mm/cache-l2x0.c:194:13: warning: 'l2x0_disable' defined but not used [-Wunused-function] With the knowledge that the code is now aurora-specific, we can simplify it noticeably: - The pl310 errata workarounds are not needed on aurora and can be removed - As confirmed by Thomas Petazzoni from the data sheet, the cache_wait() macro is never needed. - No need to hold the lock across atomic cache sync - We can load the l2x0_base into a local variable across operations There should be no functional change in this patch, but readability and the generated object code improves, along with avoiding the warnings. (on Armada 370 RD and Armada XP GP, boot tested, plus a little bit of DMA traffic by reading data from a SD card) Acked-by: Thomas Petazzoni Tested-by: Thomas Petazzoni Signed-off-by: Arnd Bergmann Signed-off-by: Russell King --- arch/arm/mm/cache-l2x0.c | 111 ++++++++++++++++------------------------------- 1 file changed, 38 insertions(+), 73 deletions(-) (limited to 'arch') diff --git a/arch/arm/mm/cache-l2x0.c b/arch/arm/mm/cache-l2x0.c index 01de13809454..404c598da27d 100644 --- a/arch/arm/mm/cache-l2x0.c +++ b/arch/arm/mm/cache-l2x0.c @@ -156,73 +156,6 @@ static void l2c_disable(void) dsb(st); } -#ifdef CONFIG_CACHE_PL310 -static inline void cache_wait(void __iomem *reg, unsigned long mask) -{ - /* cache operations by line are atomic on PL310 */ -} -#else -#define cache_wait l2c_wait_mask -#endif - -static inline void cache_sync(void) -{ - void __iomem *base = l2x0_base; - - writel_relaxed(0, base + sync_reg_offset); - cache_wait(base + L2X0_CACHE_SYNC, 1); -} - -#if defined(CONFIG_PL310_ERRATA_588369) || defined(CONFIG_PL310_ERRATA_727915) -static inline void debug_writel(unsigned long val) -{ - l2c_set_debug(l2x0_base, val); -} -#else -/* Optimised out for non-errata case */ -static inline void debug_writel(unsigned long val) -{ -} -#endif - -static void l2x0_cache_sync(void) -{ - unsigned long flags; - - raw_spin_lock_irqsave(&l2x0_lock, flags); - cache_sync(); - raw_spin_unlock_irqrestore(&l2x0_lock, flags); -} - -static void __l2x0_flush_all(void) -{ - debug_writel(0x03); - __l2c_op_way(l2x0_base + L2X0_CLEAN_INV_WAY); - cache_sync(); - debug_writel(0x00); -} - -static void l2x0_flush_all(void) -{ - unsigned long flags; - - /* clean all ways */ - raw_spin_lock_irqsave(&l2x0_lock, flags); - __l2x0_flush_all(); - raw_spin_unlock_irqrestore(&l2x0_lock, flags); -} - -static void l2x0_disable(void) -{ - unsigned long flags; - - raw_spin_lock_irqsave(&l2x0_lock, flags); - __l2x0_flush_all(); - l2c_write_sec(0, l2x0_base, L2X0_CTRL); - dsb(st); - raw_spin_unlock_irqrestore(&l2x0_lock, flags); -} - static void l2c_save(void __iomem *base) { l2x0_saved_regs.aux_ctrl = readl_relaxed(l2x0_base + L2X0_AUX_CTRL); @@ -1349,14 +1282,15 @@ static unsigned long calc_range_end(unsigned long start, unsigned long end) static void aurora_pa_range(unsigned long start, unsigned long end, unsigned long offset) { + void __iomem *base = l2x0_base; unsigned long flags; raw_spin_lock_irqsave(&l2x0_lock, flags); - writel_relaxed(start, l2x0_base + AURORA_RANGE_BASE_ADDR_REG); - writel_relaxed(end, l2x0_base + offset); + writel_relaxed(start, base + AURORA_RANGE_BASE_ADDR_REG); + writel_relaxed(end, base + offset); raw_spin_unlock_irqrestore(&l2x0_lock, flags); - cache_sync(); + writel_relaxed(0, base + AURORA_SYNC_REG); } static void aurora_inv_range(unsigned long start, unsigned long end) @@ -1416,6 +1350,37 @@ static void aurora_flush_range(unsigned long start, unsigned long end) } } +static void aurora_flush_all(void) +{ + void __iomem *base = l2x0_base; + unsigned long flags; + + /* clean all ways */ + raw_spin_lock_irqsave(&l2x0_lock, flags); + __l2c_op_way(base + L2X0_CLEAN_INV_WAY); + raw_spin_unlock_irqrestore(&l2x0_lock, flags); + + writel_relaxed(0, base + AURORA_SYNC_REG); +} + +static void aurora_cache_sync(void) +{ + writel_relaxed(0, l2x0_base + AURORA_SYNC_REG); +} + +static void aurora_disable(void) +{ + void __iomem *base = l2x0_base; + unsigned long flags; + + raw_spin_lock_irqsave(&l2x0_lock, flags); + __l2c_op_way(base + L2X0_CLEAN_INV_WAY); + writel_relaxed(0, base + AURORA_SYNC_REG); + l2c_write_sec(0, base, L2X0_CTRL); + dsb(st); + raw_spin_unlock_irqrestore(&l2x0_lock, flags); +} + static void aurora_save(void __iomem *base) { l2x0_saved_regs.ctrl = readl_relaxed(base + L2X0_CTRL); @@ -1480,9 +1445,9 @@ static const struct l2c_init_data of_aurora_with_outer_data __initconst = { .inv_range = aurora_inv_range, .clean_range = aurora_clean_range, .flush_range = aurora_flush_range, - .flush_all = l2x0_flush_all, - .disable = l2x0_disable, - .sync = l2x0_cache_sync, + .flush_all = aurora_flush_all, + .disable = aurora_disable, + .sync = aurora_cache_sync, .resume = l2c_resume, }, }; -- cgit v1.2.3-59-g8ed1b From 1d88967900b87f94435581dad4ae319686c6ce10 Mon Sep 17 00:00:00 2001 From: Arnd Bergmann Date: Wed, 28 Jan 2015 17:55:31 +0100 Subject: ARM: 8297/1: cache-l2x0: optimize aurora range operations The aurora_inv_range(), aurora_clean_range() and aurora_flush_range() functions are highly redundant, both in source and in object code, and they are harder to understand than necessary. By moving the range loop into the aurora_pa_range() function, they become trivial wrappers, and the object code start looking like what one would expect for an optimal implementation. Further optimization may be possible by using the per-CPU "virtual" registers to avoid the spinlocks in most cases. (on Armada 370 RD and Armada XP GP, boot tested, plus a little bit of DMA traffic by reading data from a SD card) Reviewed-by: Thomas Petazzoni Tested-by: Thomas Petazzoni Signed-off-by: Arnd Bergmann Signed-off-by: Russell King --- arch/arm/mm/cache-l2x0.c | 68 ++++++++++++++++-------------------------------- 1 file changed, 22 insertions(+), 46 deletions(-) (limited to 'arch') diff --git a/arch/arm/mm/cache-l2x0.c b/arch/arm/mm/cache-l2x0.c index 404c598da27d..5ea2d6d417f7 100644 --- a/arch/arm/mm/cache-l2x0.c +++ b/arch/arm/mm/cache-l2x0.c @@ -1256,7 +1256,7 @@ static const struct l2c_init_data of_l2c310_coherent_data __initconst = { * noninclusive, while the hardware cache range operations use * inclusive start and end addresses. */ -static unsigned long calc_range_end(unsigned long start, unsigned long end) +static unsigned long aurora_range_end(unsigned long start, unsigned long end) { /* * Limit the number of cache lines processed at once, @@ -1275,26 +1275,13 @@ static unsigned long calc_range_end(unsigned long start, unsigned long end) return end; } -/* - * Make sure 'start' and 'end' reference the same page, as L2 is PIPT - * and range operations only do a TLB lookup on the start address. - */ static void aurora_pa_range(unsigned long start, unsigned long end, - unsigned long offset) + unsigned long offset) { void __iomem *base = l2x0_base; + unsigned long range_end; unsigned long flags; - raw_spin_lock_irqsave(&l2x0_lock, flags); - writel_relaxed(start, base + AURORA_RANGE_BASE_ADDR_REG); - writel_relaxed(end, base + offset); - raw_spin_unlock_irqrestore(&l2x0_lock, flags); - - writel_relaxed(0, base + AURORA_SYNC_REG); -} - -static void aurora_inv_range(unsigned long start, unsigned long end) -{ /* * round start and end adresses up to cache line size */ @@ -1302,15 +1289,24 @@ static void aurora_inv_range(unsigned long start, unsigned long end) end = ALIGN(end, CACHE_LINE_SIZE); /* - * Invalidate all full cache lines between 'start' and 'end'. + * perform operation on all full cache lines between 'start' and 'end' */ while (start < end) { - unsigned long range_end = calc_range_end(start, end); - aurora_pa_range(start, range_end - CACHE_LINE_SIZE, - AURORA_INVAL_RANGE_REG); + range_end = aurora_range_end(start, end); + + raw_spin_lock_irqsave(&l2x0_lock, flags); + writel_relaxed(start, base + AURORA_RANGE_BASE_ADDR_REG); + writel_relaxed(range_end - CACHE_LINE_SIZE, base + offset); + raw_spin_unlock_irqrestore(&l2x0_lock, flags); + + writel_relaxed(0, base + AURORA_SYNC_REG); start = range_end; } } +static void aurora_inv_range(unsigned long start, unsigned long end) +{ + aurora_pa_range(start, end, AURORA_INVAL_RANGE_REG); +} static void aurora_clean_range(unsigned long start, unsigned long end) { @@ -1318,36 +1314,16 @@ static void aurora_clean_range(unsigned long start, unsigned long end) * If L2 is forced to WT, the L2 will always be clean and we * don't need to do anything here. */ - if (!l2_wt_override) { - start &= ~(CACHE_LINE_SIZE - 1); - end = ALIGN(end, CACHE_LINE_SIZE); - while (start != end) { - unsigned long range_end = calc_range_end(start, end); - aurora_pa_range(start, range_end - CACHE_LINE_SIZE, - AURORA_CLEAN_RANGE_REG); - start = range_end; - } - } + if (!l2_wt_override) + aurora_pa_range(start, end, AURORA_CLEAN_RANGE_REG); } static void aurora_flush_range(unsigned long start, unsigned long end) { - start &= ~(CACHE_LINE_SIZE - 1); - end = ALIGN(end, CACHE_LINE_SIZE); - while (start != end) { - unsigned long range_end = calc_range_end(start, end); - /* - * If L2 is forced to WT, the L2 will always be clean and we - * just need to invalidate. - */ - if (l2_wt_override) - aurora_pa_range(start, range_end - CACHE_LINE_SIZE, - AURORA_INVAL_RANGE_REG); - else - aurora_pa_range(start, range_end - CACHE_LINE_SIZE, - AURORA_FLUSH_RANGE_REG); - start = range_end; - } + if (l2_wt_override) + aurora_pa_range(start, end, AURORA_INVAL_RANGE_REG); + else + aurora_pa_range(start, end, AURORA_FLUSH_RANGE_REG); } static void aurora_flush_all(void) -- cgit v1.2.3-59-g8ed1b From 4e1c0664de11e4b5861957ab4ddff2aeeffd042f Mon Sep 17 00:00:00 2001 From: Jon Medhurst Date: Tue, 10 Feb 2015 15:03:22 +0800 Subject: ARM: kprobes: Fix compilation error caused by superfluous '*' MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit There is a superfluous '*' in the definition of kprobe_decode_insn_t which on older versions of GCC (4.2.4) causes the compilation error: In file included from arch/arm/probes/kprobes/core.c:37: arch/arm/probes/kprobes/core.h:43: error: '[*]' not allowed in other than a declaration Fix this by removing the unneeded character. Reported-by: Janusz Użycki Signed-off-by: Jon Medhurst --- arch/arm/probes/kprobes/core.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'arch') diff --git a/arch/arm/probes/kprobes/core.h b/arch/arm/probes/kprobes/core.h index b3036c587a76..ec5d1f20a085 100644 --- a/arch/arm/probes/kprobes/core.h +++ b/arch/arm/probes/kprobes/core.h @@ -40,7 +40,7 @@ typedef enum probes_insn (kprobe_decode_insn_t)(probes_opcode_t, struct arch_probes_insn *, bool, const union decode_action *, - const struct decode_checker *[*]); + const struct decode_checker *[]); #ifdef CONFIG_THUMB2_KERNEL -- cgit v1.2.3-59-g8ed1b From ada63d40747128d922d69f73a37b65e1e5403cdf Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Thu, 29 Jan 2015 18:09:13 +0100 Subject: ARM: 8300/1: teach __asmeq that r11 == fp and r12 == ip The __asmeq macro is used inside inline asm statements to ensure that register asm variables that explicitly specify a register are mapped correctly onto those registers when used in inline asm input and output constraints. However, the string based matching fails to take into account that 'fp' is often referred to as 'r11' and 'ip' is often referred to as 'r12', (e.g., by clang), causing false negatives. Fix this by making __asmeq consider the ("fp","r11"), ("r11","fp"), ("ip","r12") and ("r12","ip") cases specifically. Reviewed-by: Alex Elder Acked-by: Nicolas Pitre Signed-off-by: Ard Biesheuvel Signed-off-by: Russell King --- arch/arm/include/asm/compiler.h | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) (limited to 'arch') diff --git a/arch/arm/include/asm/compiler.h b/arch/arm/include/asm/compiler.h index 8155db2f7fa1..29fe85e59439 100644 --- a/arch/arm/include/asm/compiler.h +++ b/arch/arm/include/asm/compiler.h @@ -8,8 +8,21 @@ * This string is meant to be concatenated with the inline asm string and * will cause compilation to stop on mismatch. * (for details, see gcc PR 15089) + * For compatibility with clang, we have to specifically take the equivalence + * of 'r11' <-> 'fp' and 'r12' <-> 'ip' into account as well. */ -#define __asmeq(x, y) ".ifnc " x "," y " ; .err ; .endif\n\t" +#define __asmeq(x, y) \ + ".ifnc " x "," y "; " \ + ".ifnc " x y ",fpr11; " \ + ".ifnc " x y ",r11fp; " \ + ".ifnc " x y ",ipr12; " \ + ".ifnc " x y ",r12ip; " \ + ".err; " \ + ".endif; " \ + ".endif; " \ + ".endif; " \ + ".endif; " \ + ".endif\n\t" #endif /* __ASM_ARM_COMPILER_H */ -- cgit v1.2.3-59-g8ed1b From bafe5865834c0cc0d0f937deecc275ccdd588ce8 Mon Sep 17 00:00:00 2001 From: Stephen Boyd Date: Sat, 31 Jan 2015 00:25:30 +0100 Subject: ARM: 8302/1: Add a secondary_startup that assumes ARM mode Some platforms always enter the kernel in ARM mode even if the kernel is compiled for THUMB2. Add a small wrapper on top of secondary_startup() that switches into THUMB2 mode. Signed-off-by: Stephen Boyd Acked-by: Catalin Marinas Signed-off-by: Russell King --- arch/arm/kernel/head.S | 7 +++++++ 1 file changed, 7 insertions(+) (limited to 'arch') diff --git a/arch/arm/kernel/head.S b/arch/arm/kernel/head.S index ecb7be8b0854..01963273c07a 100644 --- a/arch/arm/kernel/head.S +++ b/arch/arm/kernel/head.S @@ -346,6 +346,12 @@ __turn_mmu_on_loc: #if defined(CONFIG_SMP) .text +ENTRY(secondary_startup_arm) + .arm + THUMB( adr r9, BSYM(1f) ) @ Kernel is entered in ARM. + THUMB( bx r9 ) @ If this is a Thumb-2 kernel, + THUMB( .thumb ) @ switch to Thumb now. + THUMB(1: ) ENTRY(secondary_startup) /* * Common entry point for secondary CPUs. @@ -385,6 +391,7 @@ ENTRY(secondary_startup) THUMB( add r12, r10, #PROCINFO_INITFUNC ) THUMB( ret r12 ) ENDPROC(secondary_startup) +ENDPROC(secondary_startup_arm) /* * r6 = &secondary_data -- cgit v1.2.3-59-g8ed1b From 8684014d71e259f62f7dc24a20324e232806b2ef Mon Sep 17 00:00:00 2001 From: Stephen Boyd Date: Sat, 31 Jan 2015 00:25:31 +0100 Subject: ARM: 8301/1: qcom: Use secondary_startup_arm() On qcom platforms we always enter the kernel in ARM mode, regardless of the kernel being compiled for THUMB mode. Use secondary_startup_arm() to properly switch the mode to what the kernel expects if required. Signed-off-by: Stephen Boyd Acked-by: Catalin Marinas Signed-off-by: Russell King --- arch/arm/mach-qcom/platsmp.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'arch') diff --git a/arch/arm/mach-qcom/platsmp.c b/arch/arm/mach-qcom/platsmp.c index d6908569ecaf..09cffed4c0a4 100644 --- a/arch/arm/mach-qcom/platsmp.c +++ b/arch/arm/mach-qcom/platsmp.c @@ -44,7 +44,7 @@ #define APCS_SAW2_VCTL 0x14 #define APCS_SAW2_2_VCTL 0x1c -extern void secondary_startup(void); +extern void secondary_startup_arm(void); static DEFINE_SPINLOCK(boot_lock); @@ -337,7 +337,7 @@ static void __init qcom_smp_prepare_cpus(unsigned int max_cpus) flags |= cold_boot_flags[map]; } - if (scm_set_boot_addr(virt_to_phys(secondary_startup), flags)) { + if (scm_set_boot_addr(virt_to_phys(secondary_startup_arm), flags)) { for_each_present_cpu(cpu) { if (cpu == smp_processor_id()) continue; -- cgit v1.2.3-59-g8ed1b