kprobes-common.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562
  1. /*
  2. * arch/arm/kernel/kprobes-common.c
  3. *
  4. * Copyright (C) 2011 Jon Medhurst <tixy@yxit.co.uk>.
  5. *
  6. * Some contents moved here from arch/arm/include/asm/kprobes-arm.c which is
  7. * Copyright (C) 2006, 2007 Motorola Inc.
  8. *
  9. * This program is free software; you can redistribute it and/or modify
  10. * it under the terms of the GNU General Public License version 2 as
  11. * published by the Free Software Foundation.
  12. */
  13. #include <linux/kernel.h>
  14. #include <linux/kprobes.h>
  15. #include "kprobes.h"
  16. #ifndef find_str_pc_offset
  17. /*
  18. * For STR and STM instructions, an ARM core may choose to use either
  19. * a +8 or a +12 displacement from the current instruction's address.
  20. * Whichever value is chosen for a given core, it must be the same for
  21. * both instructions and may not change. This function measures it.
  22. */
  23. int str_pc_offset;
  24. void __init find_str_pc_offset(void)
  25. {
  26. int addr, scratch, ret;
  27. __asm__ (
  28. "sub %[ret], pc, #4 \n\t"
  29. "str pc, %[addr] \n\t"
  30. "ldr %[scr], %[addr] \n\t"
  31. "sub %[ret], %[scr], %[ret] \n\t"
  32. : [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr));
  33. str_pc_offset = ret;
  34. }
  35. #endif /* !find_str_pc_offset */
  36. #ifndef test_load_write_pc_interworking
  37. bool load_write_pc_interworks;
  38. void __init test_load_write_pc_interworking(void)
  39. {
  40. int arch = cpu_architecture();
  41. BUG_ON(arch == CPU_ARCH_UNKNOWN);
  42. load_write_pc_interworks = arch >= CPU_ARCH_ARMv5T;
  43. }
  44. #endif /* !test_load_write_pc_interworking */
  45. void __init arm_kprobe_decode_init(void)
  46. {
  47. find_str_pc_offset();
  48. test_load_write_pc_interworking();
  49. }
  50. static unsigned long __kprobes __check_eq(unsigned long cpsr)
  51. {
  52. return cpsr & PSR_Z_BIT;
  53. }
  54. static unsigned long __kprobes __check_ne(unsigned long cpsr)
  55. {
  56. return (~cpsr) & PSR_Z_BIT;
  57. }
  58. static unsigned long __kprobes __check_cs(unsigned long cpsr)
  59. {
  60. return cpsr & PSR_C_BIT;
  61. }
  62. static unsigned long __kprobes __check_cc(unsigned long cpsr)
  63. {
  64. return (~cpsr) & PSR_C_BIT;
  65. }
  66. static unsigned long __kprobes __check_mi(unsigned long cpsr)
  67. {
  68. return cpsr & PSR_N_BIT;
  69. }
  70. static unsigned long __kprobes __check_pl(unsigned long cpsr)
  71. {
  72. return (~cpsr) & PSR_N_BIT;
  73. }
  74. static unsigned long __kprobes __check_vs(unsigned long cpsr)
  75. {
  76. return cpsr & PSR_V_BIT;
  77. }
  78. static unsigned long __kprobes __check_vc(unsigned long cpsr)
  79. {
  80. return (~cpsr) & PSR_V_BIT;
  81. }
  82. static unsigned long __kprobes __check_hi(unsigned long cpsr)
  83. {
  84. cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
  85. return cpsr & PSR_C_BIT;
  86. }
  87. static unsigned long __kprobes __check_ls(unsigned long cpsr)
  88. {
  89. cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
  90. return (~cpsr) & PSR_C_BIT;
  91. }
  92. static unsigned long __kprobes __check_ge(unsigned long cpsr)
  93. {
  94. cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
  95. return (~cpsr) & PSR_N_BIT;
  96. }
  97. static unsigned long __kprobes __check_lt(unsigned long cpsr)
  98. {
  99. cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
  100. return cpsr & PSR_N_BIT;
  101. }
  102. static unsigned long __kprobes __check_gt(unsigned long cpsr)
  103. {
  104. unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
  105. temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
  106. return (~temp) & PSR_N_BIT;
  107. }
  108. static unsigned long __kprobes __check_le(unsigned long cpsr)
  109. {
  110. unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
  111. temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
  112. return temp & PSR_N_BIT;
  113. }
  114. static unsigned long __kprobes __check_al(unsigned long cpsr)
  115. {
  116. return true;
  117. }
  118. kprobe_check_cc * const kprobe_condition_checks[16] = {
  119. &__check_eq, &__check_ne, &__check_cs, &__check_cc,
  120. &__check_mi, &__check_pl, &__check_vs, &__check_vc,
  121. &__check_hi, &__check_ls, &__check_ge, &__check_lt,
  122. &__check_gt, &__check_le, &__check_al, &__check_al
  123. };
  124. void __kprobes kprobe_simulate_nop(struct kprobe *p, struct pt_regs *regs)
  125. {
  126. }
  127. void __kprobes kprobe_emulate_none(struct kprobe *p, struct pt_regs *regs)
  128. {
  129. p->ainsn.insn_fn();
  130. }
  131. static void __kprobes simulate_ldm1stm1(struct kprobe *p, struct pt_regs *regs)
  132. {
  133. kprobe_opcode_t insn = p->opcode;
  134. int rn = (insn >> 16) & 0xf;
  135. int lbit = insn & (1 << 20);
  136. int wbit = insn & (1 << 21);
  137. int ubit = insn & (1 << 23);
  138. int pbit = insn & (1 << 24);
  139. long *addr = (long *)regs->uregs[rn];
  140. int reg_bit_vector;
  141. int reg_count;
  142. reg_count = 0;
  143. reg_bit_vector = insn & 0xffff;
  144. while (reg_bit_vector) {
  145. reg_bit_vector &= (reg_bit_vector - 1);
  146. ++reg_count;
  147. }
  148. if (!ubit)
  149. addr -= reg_count;
  150. addr += (!pbit == !ubit);
  151. reg_bit_vector = insn & 0xffff;
  152. while (reg_bit_vector) {
  153. int reg = __ffs(reg_bit_vector);
  154. reg_bit_vector &= (reg_bit_vector - 1);
  155. if (lbit)
  156. regs->uregs[reg] = *addr++;
  157. else
  158. *addr++ = regs->uregs[reg];
  159. }
  160. if (wbit) {
  161. if (!ubit)
  162. addr -= reg_count;
  163. addr -= (!pbit == !ubit);
  164. regs->uregs[rn] = (long)addr;
  165. }
  166. }
  167. static void __kprobes simulate_stm1_pc(struct kprobe *p, struct pt_regs *regs)
  168. {
  169. regs->ARM_pc = (long)p->addr + str_pc_offset;
  170. simulate_ldm1stm1(p, regs);
  171. regs->ARM_pc = (long)p->addr + 4;
  172. }
  173. static void __kprobes simulate_ldm1_pc(struct kprobe *p, struct pt_regs *regs)
  174. {
  175. simulate_ldm1stm1(p, regs);
  176. load_write_pc(regs->ARM_pc, regs);
  177. }
  178. static void __kprobes
  179. emulate_generic_r0_12_noflags(struct kprobe *p, struct pt_regs *regs)
  180. {
  181. register void *rregs asm("r1") = regs;
  182. register void *rfn asm("lr") = p->ainsn.insn_fn;
  183. __asm__ __volatile__ (
  184. "stmdb sp!, {%[regs], r11} \n\t"
  185. "ldmia %[regs], {r0-r12} \n\t"
  186. #if __LINUX_ARM_ARCH__ >= 6
  187. "blx %[fn] \n\t"
  188. #else
  189. "str %[fn], [sp, #-4]! \n\t"
  190. "adr lr, 1f \n\t"
  191. "ldr pc, [sp], #4 \n\t"
  192. "1: \n\t"
  193. #endif
  194. "ldr lr, [sp], #4 \n\t" /* lr = regs */
  195. "stmia lr, {r0-r12} \n\t"
  196. "ldr r11, [sp], #4 \n\t"
  197. : [regs] "=r" (rregs), [fn] "=r" (rfn)
  198. : "0" (rregs), "1" (rfn)
  199. : "r0", "r2", "r3", "r4", "r5", "r6", "r7",
  200. "r8", "r9", "r10", "r12", "memory", "cc"
  201. );
  202. }
  203. static void __kprobes
  204. emulate_generic_r2_14_noflags(struct kprobe *p, struct pt_regs *regs)
  205. {
  206. emulate_generic_r0_12_noflags(p, (struct pt_regs *)(regs->uregs+2));
  207. }
  208. static void __kprobes
  209. emulate_ldm_r3_15(struct kprobe *p, struct pt_regs *regs)
  210. {
  211. emulate_generic_r0_12_noflags(p, (struct pt_regs *)(regs->uregs+3));
  212. load_write_pc(regs->ARM_pc, regs);
  213. }
  214. enum kprobe_insn __kprobes
  215. kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  216. {
  217. kprobe_insn_handler_t *handler = 0;
  218. unsigned reglist = insn & 0xffff;
  219. int is_ldm = insn & 0x100000;
  220. int rn = (insn >> 16) & 0xf;
  221. if (rn <= 12 && (reglist & 0xe000) == 0) {
  222. /* Instruction only uses registers in the range R0..R12 */
  223. handler = emulate_generic_r0_12_noflags;
  224. } else if (rn >= 2 && (reglist & 0x8003) == 0) {
  225. /* Instruction only uses registers in the range R2..R14 */
  226. rn -= 2;
  227. reglist >>= 2;
  228. handler = emulate_generic_r2_14_noflags;
  229. } else if (rn >= 3 && (reglist & 0x0007) == 0) {
  230. /* Instruction only uses registers in the range R3..R15 */
  231. if (is_ldm && (reglist & 0x8000)) {
  232. rn -= 3;
  233. reglist >>= 3;
  234. handler = emulate_ldm_r3_15;
  235. }
  236. }
  237. if (handler) {
  238. /* We can emulate the instruction in (possibly) modified form */
  239. asi->insn[0] = (insn & 0xfff00000) | (rn << 16) | reglist;
  240. asi->insn_handler = handler;
  241. return INSN_GOOD;
  242. }
  243. /* Fallback to slower simulation... */
  244. if (reglist & 0x8000)
  245. handler = is_ldm ? simulate_ldm1_pc : simulate_stm1_pc;
  246. else
  247. handler = simulate_ldm1stm1;
  248. asi->insn_handler = handler;
  249. return INSN_GOOD_NO_SLOT;
  250. }
  251. /*
  252. * Prepare an instruction slot to receive an instruction for emulating.
  253. * This is done by placing a subroutine return after the location where the
  254. * instruction will be placed. We also modify ARM instructions to be
  255. * unconditional as the condition code will already be checked before any
  256. * emulation handler is called.
  257. */
  258. static kprobe_opcode_t __kprobes
  259. prepare_emulated_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
  260. bool thumb)
  261. {
  262. #ifdef CONFIG_THUMB2_KERNEL
  263. if (thumb) {
  264. u16 *thumb_insn = (u16 *)asi->insn;
  265. thumb_insn[1] = 0x4770; /* Thumb bx lr */
  266. thumb_insn[2] = 0x4770; /* Thumb bx lr */
  267. return insn;
  268. }
  269. asi->insn[1] = 0xe12fff1e; /* ARM bx lr */
  270. #else
  271. asi->insn[1] = 0xe1a0f00e; /* mov pc, lr */
  272. #endif
  273. /* Make an ARM instruction unconditional */
  274. if (insn < 0xe0000000)
  275. insn = (insn | 0xe0000000) & ~0x10000000;
  276. return insn;
  277. }
  278. /*
  279. * Write a (probably modified) instruction into the slot previously prepared by
  280. * prepare_emulated_insn
  281. */
  282. static void __kprobes
  283. set_emulated_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
  284. bool thumb)
  285. {
  286. #ifdef CONFIG_THUMB2_KERNEL
  287. if (thumb) {
  288. u16 *ip = (u16 *)asi->insn;
  289. if (is_wide_instruction(insn))
  290. *ip++ = insn >> 16;
  291. *ip++ = insn;
  292. return;
  293. }
  294. #endif
  295. asi->insn[0] = insn;
  296. }
  297. /*
  298. * When we modify the register numbers encoded in an instruction to be emulated,
  299. * the new values come from this define. For ARM and 32-bit Thumb instructions
  300. * this gives...
  301. *
  302. * bit position 16 12 8 4 0
  303. * ---------------+---+---+---+---+---+
  304. * register r2 r0 r1 -- r3
  305. */
  306. #define INSN_NEW_BITS 0x00020103
  307. /* Each nibble has same value as that at INSN_NEW_BITS bit 16 */
  308. #define INSN_SAMEAS16_BITS 0x22222222
  309. /*
  310. * Validate and modify each of the registers encoded in an instruction.
  311. *
  312. * Each nibble in regs contains a value from enum decode_reg_type. For each
  313. * non-zero value, the corresponding nibble in pinsn is validated and modified
  314. * according to the type.
  315. */
  316. static bool __kprobes decode_regs(kprobe_opcode_t* pinsn, u32 regs)
  317. {
  318. kprobe_opcode_t insn = *pinsn;
  319. kprobe_opcode_t mask = 0xf; /* Start at least significant nibble */
  320. for (; regs != 0; regs >>= 4, mask <<= 4) {
  321. kprobe_opcode_t new_bits = INSN_NEW_BITS;
  322. switch (regs & 0xf) {
  323. case REG_TYPE_NONE:
  324. /* Nibble not a register, skip to next */
  325. continue;
  326. case REG_TYPE_ANY:
  327. /* Any register is allowed */
  328. break;
  329. case REG_TYPE_SAMEAS16:
  330. /* Replace register with same as at bit position 16 */
  331. new_bits = INSN_SAMEAS16_BITS;
  332. break;
  333. case REG_TYPE_SP:
  334. /* Only allow SP (R13) */
  335. if ((insn ^ 0xdddddddd) & mask)
  336. goto reject;
  337. break;
  338. case REG_TYPE_PC:
  339. /* Only allow PC (R15) */
  340. if ((insn ^ 0xffffffff) & mask)
  341. goto reject;
  342. break;
  343. case REG_TYPE_NOSP:
  344. /* Reject SP (R13) */
  345. if (((insn ^ 0xdddddddd) & mask) == 0)
  346. goto reject;
  347. break;
  348. case REG_TYPE_NOSPPC:
  349. case REG_TYPE_NOSPPCX:
  350. /* Reject SP and PC (R13 and R15) */
  351. if (((insn ^ 0xdddddddd) & 0xdddddddd & mask) == 0)
  352. goto reject;
  353. break;
  354. case REG_TYPE_NOPCWB:
  355. if (!is_writeback(insn))
  356. break; /* No writeback, so any register is OK */
  357. /* fall through... */
  358. case REG_TYPE_NOPC:
  359. case REG_TYPE_NOPCX:
  360. /* Reject PC (R15) */
  361. if (((insn ^ 0xffffffff) & mask) == 0)
  362. goto reject;
  363. break;
  364. }
  365. /* Replace value of nibble with new register number... */
  366. insn &= ~mask;
  367. insn |= new_bits & mask;
  368. }
  369. *pinsn = insn;
  370. return true;
  371. reject:
  372. return false;
  373. }
  374. static const int decode_struct_sizes[NUM_DECODE_TYPES] = {
  375. [DECODE_TYPE_TABLE] = sizeof(struct decode_table),
  376. [DECODE_TYPE_CUSTOM] = sizeof(struct decode_custom),
  377. [DECODE_TYPE_SIMULATE] = sizeof(struct decode_simulate),
  378. [DECODE_TYPE_EMULATE] = sizeof(struct decode_emulate),
  379. [DECODE_TYPE_OR] = sizeof(struct decode_or),
  380. [DECODE_TYPE_REJECT] = sizeof(struct decode_reject)
  381. };
  382. /*
  383. * kprobe_decode_insn operates on data tables in order to decode an ARM
  384. * architecture instruction onto which a kprobe has been placed.
  385. *
  386. * These instruction decoding tables are a concatenation of entries each
  387. * of which consist of one of the following structs:
  388. *
  389. * decode_table
  390. * decode_custom
  391. * decode_simulate
  392. * decode_emulate
  393. * decode_or
  394. * decode_reject
  395. *
  396. * Each of these starts with a struct decode_header which has the following
  397. * fields:
  398. *
  399. * type_regs
  400. * mask
  401. * value
  402. *
  403. * The least significant DECODE_TYPE_BITS of type_regs contains a value
  404. * from enum decode_type, this indicates which of the decode_* structs
  405. * the entry contains. The value DECODE_TYPE_END indicates the end of the
  406. * table.
  407. *
  408. * When the table is parsed, each entry is checked in turn to see if it
  409. * matches the instruction to be decoded using the test:
  410. *
  411. * (insn & mask) == value
  412. *
  413. * If no match is found before the end of the table is reached then decoding
  414. * fails with INSN_REJECTED.
  415. *
  416. * When a match is found, decode_regs() is called to validate and modify each
  417. * of the registers encoded in the instruction; the data it uses to do this
  418. * is (type_regs >> DECODE_TYPE_BITS). A validation failure will cause decoding
  419. * to fail with INSN_REJECTED.
  420. *
  421. * Once the instruction has passed the above tests, further processing
  422. * depends on the type of the table entry's decode struct.
  423. *
  424. */
  425. int __kprobes
  426. kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
  427. const union decode_item *table, bool thumb)
  428. {
  429. const struct decode_header *h = (struct decode_header *)table;
  430. const struct decode_header *next;
  431. bool matched = false;
  432. insn = prepare_emulated_insn(insn, asi, thumb);
  433. for (;; h = next) {
  434. enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK;
  435. u32 regs = h->type_regs.bits >> DECODE_TYPE_BITS;
  436. if (type == DECODE_TYPE_END)
  437. return INSN_REJECTED;
  438. next = (struct decode_header *)
  439. ((uintptr_t)h + decode_struct_sizes[type]);
  440. if (!matched && (insn & h->mask.bits) != h->value.bits)
  441. continue;
  442. if (!decode_regs(&insn, regs))
  443. return INSN_REJECTED;
  444. switch (type) {
  445. case DECODE_TYPE_TABLE: {
  446. struct decode_table *d = (struct decode_table *)h;
  447. next = (struct decode_header *)d->table.table;
  448. break;
  449. }
  450. case DECODE_TYPE_CUSTOM: {
  451. struct decode_custom *d = (struct decode_custom *)h;
  452. return (*d->decoder.decoder)(insn, asi);
  453. }
  454. case DECODE_TYPE_SIMULATE: {
  455. struct decode_simulate *d = (struct decode_simulate *)h;
  456. asi->insn_handler = d->handler.handler;
  457. return INSN_GOOD_NO_SLOT;
  458. }
  459. case DECODE_TYPE_EMULATE: {
  460. struct decode_emulate *d = (struct decode_emulate *)h;
  461. asi->insn_handler = d->handler.handler;
  462. set_emulated_insn(insn, asi, thumb);
  463. return INSN_GOOD;
  464. }
  465. case DECODE_TYPE_OR:
  466. matched = true;
  467. break;
  468. case DECODE_TYPE_REJECT:
  469. default:
  470. return INSN_REJECTED;
  471. }
  472. }
  473. }