kprobes-decode.c 47 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533
  1. /*
  2. * arch/arm/kernel/kprobes-decode.c
  3. *
  4. * Copyright (C) 2006, 2007 Motorola Inc.
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. *
  10. * This program is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * General Public License for more details.
  14. */
  15. /*
  16. * We do not have hardware single-stepping on ARM, This
  17. * effort is further complicated by the ARM not having a
  18. * "next PC" register. Instructions that change the PC
  19. * can't be safely single-stepped in a MP environment, so
  20. * we have a lot of work to do:
  21. *
  22. * In the prepare phase:
  23. * *) If it is an instruction that does anything
  24. * with the CPU mode, we reject it for a kprobe.
  25. * (This is out of laziness rather than need. The
  26. * instructions could be simulated.)
  27. *
  28. * *) Otherwise, decode the instruction rewriting its
  29. * registers to take fixed, ordered registers and
  30. * setting a handler for it to run the instruction.
  31. *
  32. * In the execution phase by an instruction's handler:
  33. *
  34. * *) If the PC is written to by the instruction, the
  35. * instruction must be fully simulated in software.
  36. * If it is a conditional instruction, the handler
  37. * will use insn[0] to copy its condition code to
  38. * set r0 to 1 and insn[1] to "mov pc, lr" to return.
  39. *
  40. * *) Otherwise, a modified form of the instruction is
  41. * directly executed. Its handler calls the
  42. * instruction in insn[0]. In insn[1] is a
  43. * "mov pc, lr" to return.
  44. *
  45. * Before calling, load up the reordered registers
  46. * from the original instruction's registers. If one
  47. * of the original input registers is the PC, compute
  48. * and adjust the appropriate input register.
  49. *
  50. * After call completes, copy the output registers to
  51. * the original instruction's original registers.
  52. *
  53. * We don't use a real breakpoint instruction since that
  54. * would have us in the kernel go from SVC mode to SVC
  55. * mode losing the link register. Instead we use an
  56. * undefined instruction. To simplify processing, the
  57. * undefined instruction used for kprobes must be reserved
  58. * exclusively for kprobes use.
  59. *
  60. * TODO: ifdef out some instruction decoding based on architecture.
  61. */
  62. #include <linux/kernel.h>
  63. #include <linux/kprobes.h>
  64. #define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit)))))
  65. #define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25)
  66. #define PSR_fs (PSR_f|PSR_s)
  67. #define KPROBE_RETURN_INSTRUCTION 0xe1a0f00e /* mov pc, lr */
  68. #define SET_R0_TRUE_INSTRUCTION 0xe3a00001 /* mov r0, #1 */
  69. #define truecc_insn(insn) (((insn) & 0xf0000000) | \
  70. (SET_R0_TRUE_INSTRUCTION & 0x0fffffff))
  71. typedef long (insn_0arg_fn_t)(void);
  72. typedef long (insn_1arg_fn_t)(long);
  73. typedef long (insn_2arg_fn_t)(long, long);
  74. typedef long (insn_3arg_fn_t)(long, long, long);
  75. typedef long (insn_4arg_fn_t)(long, long, long, long);
  76. typedef long long (insn_llret_0arg_fn_t)(void);
  77. typedef long long (insn_llret_3arg_fn_t)(long, long, long);
  78. typedef long long (insn_llret_4arg_fn_t)(long, long, long, long);
  79. union reg_pair {
  80. long long dr;
  81. #ifdef __LITTLE_ENDIAN
  82. struct { long r0, r1; };
  83. #else
  84. struct { long r1, r0; };
  85. #endif
  86. };
  87. /*
  88. * For STR and STM instructions, an ARM core may choose to use either
  89. * a +8 or a +12 displacement from the current instruction's address.
  90. * Whichever value is chosen for a given core, it must be the same for
  91. * both instructions and may not change. This function measures it.
  92. */
  93. static int str_pc_offset;
  94. static void __init find_str_pc_offset(void)
  95. {
  96. int addr, scratch, ret;
  97. __asm__ (
  98. "sub %[ret], pc, #4 \n\t"
  99. "str pc, %[addr] \n\t"
  100. "ldr %[scr], %[addr] \n\t"
  101. "sub %[ret], %[scr], %[ret] \n\t"
  102. : [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr));
  103. str_pc_offset = ret;
  104. }
  105. /*
  106. * The insnslot_?arg_r[w]flags() functions below are to keep the
  107. * msr -> *fn -> mrs instruction sequences indivisible so that
  108. * the state of the CPSR flags aren't inadvertently modified
  109. * just before or just after the call.
  110. */
  111. static inline long __kprobes
  112. insnslot_0arg_rflags(long cpsr, insn_0arg_fn_t *fn)
  113. {
  114. register long ret asm("r0");
  115. __asm__ __volatile__ (
  116. "msr cpsr_fs, %[cpsr] \n\t"
  117. "mov lr, pc \n\t"
  118. "mov pc, %[fn] \n\t"
  119. : "=r" (ret)
  120. : [cpsr] "r" (cpsr), [fn] "r" (fn)
  121. : "lr", "cc"
  122. );
  123. return ret;
  124. }
  125. static inline long long __kprobes
  126. insnslot_llret_0arg_rflags(long cpsr, insn_llret_0arg_fn_t *fn)
  127. {
  128. register long ret0 asm("r0");
  129. register long ret1 asm("r1");
  130. union reg_pair fnr;
  131. __asm__ __volatile__ (
  132. "msr cpsr_fs, %[cpsr] \n\t"
  133. "mov lr, pc \n\t"
  134. "mov pc, %[fn] \n\t"
  135. : "=r" (ret0), "=r" (ret1)
  136. : [cpsr] "r" (cpsr), [fn] "r" (fn)
  137. : "lr", "cc"
  138. );
  139. fnr.r0 = ret0;
  140. fnr.r1 = ret1;
  141. return fnr.dr;
  142. }
  143. static inline long __kprobes
  144. insnslot_1arg_rflags(long r0, long cpsr, insn_1arg_fn_t *fn)
  145. {
  146. register long rr0 asm("r0") = r0;
  147. register long ret asm("r0");
  148. __asm__ __volatile__ (
  149. "msr cpsr_fs, %[cpsr] \n\t"
  150. "mov lr, pc \n\t"
  151. "mov pc, %[fn] \n\t"
  152. : "=r" (ret)
  153. : "0" (rr0), [cpsr] "r" (cpsr), [fn] "r" (fn)
  154. : "lr", "cc"
  155. );
  156. return ret;
  157. }
  158. static inline long __kprobes
  159. insnslot_2arg_rflags(long r0, long r1, long cpsr, insn_2arg_fn_t *fn)
  160. {
  161. register long rr0 asm("r0") = r0;
  162. register long rr1 asm("r1") = r1;
  163. register long ret asm("r0");
  164. __asm__ __volatile__ (
  165. "msr cpsr_fs, %[cpsr] \n\t"
  166. "mov lr, pc \n\t"
  167. "mov pc, %[fn] \n\t"
  168. : "=r" (ret)
  169. : "0" (rr0), "r" (rr1),
  170. [cpsr] "r" (cpsr), [fn] "r" (fn)
  171. : "lr", "cc"
  172. );
  173. return ret;
  174. }
  175. static inline long __kprobes
  176. insnslot_3arg_rflags(long r0, long r1, long r2, long cpsr, insn_3arg_fn_t *fn)
  177. {
  178. register long rr0 asm("r0") = r0;
  179. register long rr1 asm("r1") = r1;
  180. register long rr2 asm("r2") = r2;
  181. register long ret asm("r0");
  182. __asm__ __volatile__ (
  183. "msr cpsr_fs, %[cpsr] \n\t"
  184. "mov lr, pc \n\t"
  185. "mov pc, %[fn] \n\t"
  186. : "=r" (ret)
  187. : "0" (rr0), "r" (rr1), "r" (rr2),
  188. [cpsr] "r" (cpsr), [fn] "r" (fn)
  189. : "lr", "cc"
  190. );
  191. return ret;
  192. }
  193. static inline long long __kprobes
  194. insnslot_llret_3arg_rflags(long r0, long r1, long r2, long cpsr,
  195. insn_llret_3arg_fn_t *fn)
  196. {
  197. register long rr0 asm("r0") = r0;
  198. register long rr1 asm("r1") = r1;
  199. register long rr2 asm("r2") = r2;
  200. register long ret0 asm("r0");
  201. register long ret1 asm("r1");
  202. union reg_pair fnr;
  203. __asm__ __volatile__ (
  204. "msr cpsr_fs, %[cpsr] \n\t"
  205. "mov lr, pc \n\t"
  206. "mov pc, %[fn] \n\t"
  207. : "=r" (ret0), "=r" (ret1)
  208. : "0" (rr0), "r" (rr1), "r" (rr2),
  209. [cpsr] "r" (cpsr), [fn] "r" (fn)
  210. : "lr", "cc"
  211. );
  212. fnr.r0 = ret0;
  213. fnr.r1 = ret1;
  214. return fnr.dr;
  215. }
  216. static inline long __kprobes
  217. insnslot_4arg_rflags(long r0, long r1, long r2, long r3, long cpsr,
  218. insn_4arg_fn_t *fn)
  219. {
  220. register long rr0 asm("r0") = r0;
  221. register long rr1 asm("r1") = r1;
  222. register long rr2 asm("r2") = r2;
  223. register long rr3 asm("r3") = r3;
  224. register long ret asm("r0");
  225. __asm__ __volatile__ (
  226. "msr cpsr_fs, %[cpsr] \n\t"
  227. "mov lr, pc \n\t"
  228. "mov pc, %[fn] \n\t"
  229. : "=r" (ret)
  230. : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
  231. [cpsr] "r" (cpsr), [fn] "r" (fn)
  232. : "lr", "cc"
  233. );
  234. return ret;
  235. }
  236. static inline long __kprobes
  237. insnslot_1arg_rwflags(long r0, long *cpsr, insn_1arg_fn_t *fn)
  238. {
  239. register long rr0 asm("r0") = r0;
  240. register long ret asm("r0");
  241. long oldcpsr = *cpsr;
  242. long newcpsr;
  243. __asm__ __volatile__ (
  244. "msr cpsr_fs, %[oldcpsr] \n\t"
  245. "mov lr, pc \n\t"
  246. "mov pc, %[fn] \n\t"
  247. "mrs %[newcpsr], cpsr \n\t"
  248. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  249. : "0" (rr0), [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  250. : "lr", "cc"
  251. );
  252. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  253. return ret;
  254. }
  255. static inline long __kprobes
  256. insnslot_2arg_rwflags(long r0, long r1, long *cpsr, insn_2arg_fn_t *fn)
  257. {
  258. register long rr0 asm("r0") = r0;
  259. register long rr1 asm("r1") = r1;
  260. register long ret asm("r0");
  261. long oldcpsr = *cpsr;
  262. long newcpsr;
  263. __asm__ __volatile__ (
  264. "msr cpsr_fs, %[oldcpsr] \n\t"
  265. "mov lr, pc \n\t"
  266. "mov pc, %[fn] \n\t"
  267. "mrs %[newcpsr], cpsr \n\t"
  268. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  269. : "0" (rr0), "r" (rr1), [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  270. : "lr", "cc"
  271. );
  272. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  273. return ret;
  274. }
  275. static inline long __kprobes
  276. insnslot_3arg_rwflags(long r0, long r1, long r2, long *cpsr,
  277. insn_3arg_fn_t *fn)
  278. {
  279. register long rr0 asm("r0") = r0;
  280. register long rr1 asm("r1") = r1;
  281. register long rr2 asm("r2") = r2;
  282. register long ret asm("r0");
  283. long oldcpsr = *cpsr;
  284. long newcpsr;
  285. __asm__ __volatile__ (
  286. "msr cpsr_fs, %[oldcpsr] \n\t"
  287. "mov lr, pc \n\t"
  288. "mov pc, %[fn] \n\t"
  289. "mrs %[newcpsr], cpsr \n\t"
  290. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  291. : "0" (rr0), "r" (rr1), "r" (rr2),
  292. [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  293. : "lr", "cc"
  294. );
  295. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  296. return ret;
  297. }
  298. static inline long __kprobes
  299. insnslot_4arg_rwflags(long r0, long r1, long r2, long r3, long *cpsr,
  300. insn_4arg_fn_t *fn)
  301. {
  302. register long rr0 asm("r0") = r0;
  303. register long rr1 asm("r1") = r1;
  304. register long rr2 asm("r2") = r2;
  305. register long rr3 asm("r3") = r3;
  306. register long ret asm("r0");
  307. long oldcpsr = *cpsr;
  308. long newcpsr;
  309. __asm__ __volatile__ (
  310. "msr cpsr_fs, %[oldcpsr] \n\t"
  311. "mov lr, pc \n\t"
  312. "mov pc, %[fn] \n\t"
  313. "mrs %[newcpsr], cpsr \n\t"
  314. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  315. : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
  316. [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  317. : "lr", "cc"
  318. );
  319. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  320. return ret;
  321. }
  322. static inline long long __kprobes
  323. insnslot_llret_4arg_rwflags(long r0, long r1, long r2, long r3, long *cpsr,
  324. insn_llret_4arg_fn_t *fn)
  325. {
  326. register long rr0 asm("r0") = r0;
  327. register long rr1 asm("r1") = r1;
  328. register long rr2 asm("r2") = r2;
  329. register long rr3 asm("r3") = r3;
  330. register long ret0 asm("r0");
  331. register long ret1 asm("r1");
  332. long oldcpsr = *cpsr;
  333. long newcpsr;
  334. union reg_pair fnr;
  335. __asm__ __volatile__ (
  336. "msr cpsr_fs, %[oldcpsr] \n\t"
  337. "mov lr, pc \n\t"
  338. "mov pc, %[fn] \n\t"
  339. "mrs %[newcpsr], cpsr \n\t"
  340. : "=r" (ret0), "=r" (ret1), [newcpsr] "=r" (newcpsr)
  341. : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
  342. [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  343. : "lr", "cc"
  344. );
  345. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  346. fnr.r0 = ret0;
  347. fnr.r1 = ret1;
  348. return fnr.dr;
  349. }
  350. /*
  351. * To avoid the complications of mimicing single-stepping on a
  352. * processor without a Next-PC or a single-step mode, and to
  353. * avoid having to deal with the side-effects of boosting, we
  354. * simulate or emulate (almost) all ARM instructions.
  355. *
  356. * "Simulation" is where the instruction's behavior is duplicated in
  357. * C code. "Emulation" is where the original instruction is rewritten
  358. * and executed, often by altering its registers.
  359. *
  360. * By having all behavior of the kprobe'd instruction completed before
  361. * returning from the kprobe_handler(), all locks (scheduler and
  362. * interrupt) can safely be released. There is no need for secondary
  363. * breakpoints, no race with MP or preemptable kernels, nor having to
  364. * clean up resources counts at a later time impacting overall system
  365. * performance. By rewriting the instruction, only the minimum registers
  366. * need to be loaded and saved back optimizing performance.
  367. *
  368. * Calling the insnslot_*_rwflags version of a function doesn't hurt
  369. * anything even when the CPSR flags aren't updated by the
  370. * instruction. It's just a little slower in return for saving
  371. * a little space by not having a duplicate function that doesn't
  372. * update the flags. (The same optimization can be said for
  373. * instructions that do or don't perform register writeback)
  374. * Also, instructions can either read the flags, only write the
  375. * flags, or read and write the flags. To save combinations
  376. * rather than for sheer performance, flag functions just assume
  377. * read and write of flags.
  378. */
  379. static void __kprobes simulate_bbl(struct kprobe *p, struct pt_regs *regs)
  380. {
  381. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  382. kprobe_opcode_t insn = p->opcode;
  383. long iaddr = (long)p->addr;
  384. int disp = branch_displacement(insn);
  385. if (!insnslot_1arg_rflags(0, regs->ARM_cpsr, i_fn))
  386. return;
  387. if (insn & (1 << 24))
  388. regs->ARM_lr = iaddr + 4;
  389. regs->ARM_pc = iaddr + 8 + disp;
  390. }
  391. static void __kprobes simulate_blx1(struct kprobe *p, struct pt_regs *regs)
  392. {
  393. kprobe_opcode_t insn = p->opcode;
  394. long iaddr = (long)p->addr;
  395. int disp = branch_displacement(insn);
  396. regs->ARM_lr = iaddr + 4;
  397. regs->ARM_pc = iaddr + 8 + disp + ((insn >> 23) & 0x2);
  398. regs->ARM_cpsr |= PSR_T_BIT;
  399. }
  400. static void __kprobes simulate_blx2bx(struct kprobe *p, struct pt_regs *regs)
  401. {
  402. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  403. kprobe_opcode_t insn = p->opcode;
  404. int rm = insn & 0xf;
  405. long rmv = regs->uregs[rm];
  406. if (!insnslot_1arg_rflags(0, regs->ARM_cpsr, i_fn))
  407. return;
  408. if (insn & (1 << 5))
  409. regs->ARM_lr = (long)p->addr + 4;
  410. regs->ARM_pc = rmv & ~0x1;
  411. regs->ARM_cpsr &= ~PSR_T_BIT;
  412. if (rmv & 0x1)
  413. regs->ARM_cpsr |= PSR_T_BIT;
  414. }
  415. static void __kprobes simulate_ldm1stm1(struct kprobe *p, struct pt_regs *regs)
  416. {
  417. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  418. kprobe_opcode_t insn = p->opcode;
  419. int rn = (insn >> 16) & 0xf;
  420. int lbit = insn & (1 << 20);
  421. int wbit = insn & (1 << 21);
  422. int ubit = insn & (1 << 23);
  423. int pbit = insn & (1 << 24);
  424. long *addr = (long *)regs->uregs[rn];
  425. int reg_bit_vector;
  426. int reg_count;
  427. if (!insnslot_1arg_rflags(0, regs->ARM_cpsr, i_fn))
  428. return;
  429. reg_count = 0;
  430. reg_bit_vector = insn & 0xffff;
  431. while (reg_bit_vector) {
  432. reg_bit_vector &= (reg_bit_vector - 1);
  433. ++reg_count;
  434. }
  435. if (!ubit)
  436. addr -= reg_count;
  437. addr += (!pbit == !ubit);
  438. reg_bit_vector = insn & 0xffff;
  439. while (reg_bit_vector) {
  440. int reg = __ffs(reg_bit_vector);
  441. reg_bit_vector &= (reg_bit_vector - 1);
  442. if (lbit)
  443. regs->uregs[reg] = *addr++;
  444. else
  445. *addr++ = regs->uregs[reg];
  446. }
  447. if (wbit) {
  448. if (!ubit)
  449. addr -= reg_count;
  450. addr -= (!pbit == !ubit);
  451. regs->uregs[rn] = (long)addr;
  452. }
  453. }
  454. static void __kprobes simulate_stm1_pc(struct kprobe *p, struct pt_regs *regs)
  455. {
  456. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  457. if (!insnslot_1arg_rflags(0, regs->ARM_cpsr, i_fn))
  458. return;
  459. regs->ARM_pc = (long)p->addr + str_pc_offset;
  460. simulate_ldm1stm1(p, regs);
  461. regs->ARM_pc = (long)p->addr + 4;
  462. }
  463. static void __kprobes simulate_mov_ipsp(struct kprobe *p, struct pt_regs *regs)
  464. {
  465. regs->uregs[12] = regs->uregs[13];
  466. }
  467. static void __kprobes emulate_ldcstc(struct kprobe *p, struct pt_regs *regs)
  468. {
  469. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  470. kprobe_opcode_t insn = p->opcode;
  471. int rn = (insn >> 16) & 0xf;
  472. long rnv = regs->uregs[rn];
  473. /* Save Rn in case of writeback. */
  474. regs->uregs[rn] = insnslot_1arg_rflags(rnv, regs->ARM_cpsr, i_fn);
  475. }
  476. static void __kprobes emulate_ldrd(struct kprobe *p, struct pt_regs *regs)
  477. {
  478. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  479. kprobe_opcode_t insn = p->opcode;
  480. int rd = (insn >> 12) & 0xf;
  481. int rn = (insn >> 16) & 0xf;
  482. int rm = insn & 0xf; /* rm may be invalid, don't care. */
  483. /* Not following the C calling convention here, so need asm(). */
  484. __asm__ __volatile__ (
  485. "ldr r0, %[rn] \n\t"
  486. "ldr r1, %[rm] \n\t"
  487. "msr cpsr_fs, %[cpsr]\n\t"
  488. "mov lr, pc \n\t"
  489. "mov pc, %[i_fn] \n\t"
  490. "str r0, %[rn] \n\t" /* in case of writeback */
  491. "str r2, %[rd0] \n\t"
  492. "str r3, %[rd1] \n\t"
  493. : [rn] "+m" (regs->uregs[rn]),
  494. [rd0] "=m" (regs->uregs[rd]),
  495. [rd1] "=m" (regs->uregs[rd+1])
  496. : [rm] "m" (regs->uregs[rm]),
  497. [cpsr] "r" (regs->ARM_cpsr),
  498. [i_fn] "r" (i_fn)
  499. : "r0", "r1", "r2", "r3", "lr", "cc"
  500. );
  501. }
  502. static void __kprobes emulate_strd(struct kprobe *p, struct pt_regs *regs)
  503. {
  504. insn_4arg_fn_t *i_fn = (insn_4arg_fn_t *)&p->ainsn.insn[0];
  505. kprobe_opcode_t insn = p->opcode;
  506. int rd = (insn >> 12) & 0xf;
  507. int rn = (insn >> 16) & 0xf;
  508. int rm = insn & 0xf;
  509. long rnv = regs->uregs[rn];
  510. long rmv = regs->uregs[rm]; /* rm/rmv may be invalid, don't care. */
  511. regs->uregs[rn] = insnslot_4arg_rflags(rnv, rmv, regs->uregs[rd],
  512. regs->uregs[rd+1],
  513. regs->ARM_cpsr, i_fn);
  514. }
  515. static void __kprobes emulate_ldr(struct kprobe *p, struct pt_regs *regs)
  516. {
  517. insn_llret_3arg_fn_t *i_fn = (insn_llret_3arg_fn_t *)&p->ainsn.insn[0];
  518. kprobe_opcode_t insn = p->opcode;
  519. long ppc = (long)p->addr + 8;
  520. union reg_pair fnr;
  521. int rd = (insn >> 12) & 0xf;
  522. int rn = (insn >> 16) & 0xf;
  523. int rm = insn & 0xf;
  524. long rdv;
  525. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  526. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  527. long cpsr = regs->ARM_cpsr;
  528. fnr.dr = insnslot_llret_3arg_rflags(rnv, 0, rmv, cpsr, i_fn);
  529. if (rn != 15)
  530. regs->uregs[rn] = fnr.r0; /* Save Rn in case of writeback. */
  531. rdv = fnr.r1;
  532. if (rd == 15) {
  533. #if __LINUX_ARM_ARCH__ >= 5
  534. cpsr &= ~PSR_T_BIT;
  535. if (rdv & 0x1)
  536. cpsr |= PSR_T_BIT;
  537. regs->ARM_cpsr = cpsr;
  538. rdv &= ~0x1;
  539. #else
  540. rdv &= ~0x2;
  541. #endif
  542. }
  543. regs->uregs[rd] = rdv;
  544. }
  545. static void __kprobes emulate_str(struct kprobe *p, struct pt_regs *regs)
  546. {
  547. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  548. kprobe_opcode_t insn = p->opcode;
  549. long iaddr = (long)p->addr;
  550. int rd = (insn >> 12) & 0xf;
  551. int rn = (insn >> 16) & 0xf;
  552. int rm = insn & 0xf;
  553. long rdv = (rd == 15) ? iaddr + str_pc_offset : regs->uregs[rd];
  554. long rnv = (rn == 15) ? iaddr + 8 : regs->uregs[rn];
  555. long rmv = regs->uregs[rm]; /* rm/rmv may be invalid, don't care. */
  556. long rnv_wb;
  557. rnv_wb = insnslot_3arg_rflags(rnv, rdv, rmv, regs->ARM_cpsr, i_fn);
  558. if (rn != 15)
  559. regs->uregs[rn] = rnv_wb; /* Save Rn in case of writeback. */
  560. }
  561. static void __kprobes emulate_mrrc(struct kprobe *p, struct pt_regs *regs)
  562. {
  563. insn_llret_0arg_fn_t *i_fn = (insn_llret_0arg_fn_t *)&p->ainsn.insn[0];
  564. kprobe_opcode_t insn = p->opcode;
  565. union reg_pair fnr;
  566. int rd = (insn >> 12) & 0xf;
  567. int rn = (insn >> 16) & 0xf;
  568. fnr.dr = insnslot_llret_0arg_rflags(regs->ARM_cpsr, i_fn);
  569. regs->uregs[rn] = fnr.r0;
  570. regs->uregs[rd] = fnr.r1;
  571. }
  572. static void __kprobes emulate_mcrr(struct kprobe *p, struct pt_regs *regs)
  573. {
  574. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  575. kprobe_opcode_t insn = p->opcode;
  576. int rd = (insn >> 12) & 0xf;
  577. int rn = (insn >> 16) & 0xf;
  578. long rnv = regs->uregs[rn];
  579. long rdv = regs->uregs[rd];
  580. insnslot_2arg_rflags(rnv, rdv, regs->ARM_cpsr, i_fn);
  581. }
  582. static void __kprobes emulate_sat(struct kprobe *p, struct pt_regs *regs)
  583. {
  584. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  585. kprobe_opcode_t insn = p->opcode;
  586. int rd = (insn >> 12) & 0xf;
  587. int rm = insn & 0xf;
  588. long rmv = regs->uregs[rm];
  589. /* Writes Q flag */
  590. regs->uregs[rd] = insnslot_1arg_rwflags(rmv, &regs->ARM_cpsr, i_fn);
  591. }
  592. static void __kprobes emulate_sel(struct kprobe *p, struct pt_regs *regs)
  593. {
  594. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  595. kprobe_opcode_t insn = p->opcode;
  596. int rd = (insn >> 12) & 0xf;
  597. int rn = (insn >> 16) & 0xf;
  598. int rm = insn & 0xf;
  599. long rnv = regs->uregs[rn];
  600. long rmv = regs->uregs[rm];
  601. /* Reads GE bits */
  602. regs->uregs[rd] = insnslot_2arg_rflags(rnv, rmv, regs->ARM_cpsr, i_fn);
  603. }
  604. static void __kprobes emulate_none(struct kprobe *p, struct pt_regs *regs)
  605. {
  606. insn_0arg_fn_t *i_fn = (insn_0arg_fn_t *)&p->ainsn.insn[0];
  607. insnslot_0arg_rflags(regs->ARM_cpsr, i_fn);
  608. }
  609. static void __kprobes emulate_rd12(struct kprobe *p, struct pt_regs *regs)
  610. {
  611. insn_0arg_fn_t *i_fn = (insn_0arg_fn_t *)&p->ainsn.insn[0];
  612. kprobe_opcode_t insn = p->opcode;
  613. int rd = (insn >> 12) & 0xf;
  614. regs->uregs[rd] = insnslot_0arg_rflags(regs->ARM_cpsr, i_fn);
  615. }
  616. static void __kprobes emulate_ird12(struct kprobe *p, struct pt_regs *regs)
  617. {
  618. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  619. kprobe_opcode_t insn = p->opcode;
  620. int ird = (insn >> 12) & 0xf;
  621. insnslot_1arg_rflags(regs->uregs[ird], regs->ARM_cpsr, i_fn);
  622. }
  623. static void __kprobes emulate_rn16(struct kprobe *p, struct pt_regs *regs)
  624. {
  625. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  626. kprobe_opcode_t insn = p->opcode;
  627. int rn = (insn >> 16) & 0xf;
  628. long rnv = regs->uregs[rn];
  629. insnslot_1arg_rflags(rnv, regs->ARM_cpsr, i_fn);
  630. }
  631. static void __kprobes emulate_rd12rm0(struct kprobe *p, struct pt_regs *regs)
  632. {
  633. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  634. kprobe_opcode_t insn = p->opcode;
  635. int rd = (insn >> 12) & 0xf;
  636. int rm = insn & 0xf;
  637. long rmv = regs->uregs[rm];
  638. regs->uregs[rd] = insnslot_1arg_rflags(rmv, regs->ARM_cpsr, i_fn);
  639. }
  640. static void __kprobes
  641. emulate_rd12rn16rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  642. {
  643. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  644. kprobe_opcode_t insn = p->opcode;
  645. int rd = (insn >> 12) & 0xf;
  646. int rn = (insn >> 16) & 0xf;
  647. int rm = insn & 0xf;
  648. long rnv = regs->uregs[rn];
  649. long rmv = regs->uregs[rm];
  650. regs->uregs[rd] =
  651. insnslot_2arg_rwflags(rnv, rmv, &regs->ARM_cpsr, i_fn);
  652. }
  653. static void __kprobes
  654. emulate_rd16rn12rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  655. {
  656. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  657. kprobe_opcode_t insn = p->opcode;
  658. int rd = (insn >> 16) & 0xf;
  659. int rn = (insn >> 12) & 0xf;
  660. int rs = (insn >> 8) & 0xf;
  661. int rm = insn & 0xf;
  662. long rnv = regs->uregs[rn];
  663. long rsv = regs->uregs[rs];
  664. long rmv = regs->uregs[rm];
  665. regs->uregs[rd] =
  666. insnslot_3arg_rwflags(rnv, rsv, rmv, &regs->ARM_cpsr, i_fn);
  667. }
  668. static void __kprobes
  669. emulate_rd16rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  670. {
  671. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  672. kprobe_opcode_t insn = p->opcode;
  673. int rd = (insn >> 16) & 0xf;
  674. int rs = (insn >> 8) & 0xf;
  675. int rm = insn & 0xf;
  676. long rsv = regs->uregs[rs];
  677. long rmv = regs->uregs[rm];
  678. regs->uregs[rd] =
  679. insnslot_2arg_rwflags(rsv, rmv, &regs->ARM_cpsr, i_fn);
  680. }
  681. static void __kprobes
  682. emulate_rdhi16rdlo12rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  683. {
  684. insn_llret_4arg_fn_t *i_fn = (insn_llret_4arg_fn_t *)&p->ainsn.insn[0];
  685. kprobe_opcode_t insn = p->opcode;
  686. union reg_pair fnr;
  687. int rdhi = (insn >> 16) & 0xf;
  688. int rdlo = (insn >> 12) & 0xf;
  689. int rs = (insn >> 8) & 0xf;
  690. int rm = insn & 0xf;
  691. long rsv = regs->uregs[rs];
  692. long rmv = regs->uregs[rm];
  693. fnr.dr = insnslot_llret_4arg_rwflags(regs->uregs[rdhi],
  694. regs->uregs[rdlo], rsv, rmv,
  695. &regs->ARM_cpsr, i_fn);
  696. regs->uregs[rdhi] = fnr.r0;
  697. regs->uregs[rdlo] = fnr.r1;
  698. }
  699. static void __kprobes
  700. emulate_alu_imm_rflags(struct kprobe *p, struct pt_regs *regs)
  701. {
  702. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  703. kprobe_opcode_t insn = p->opcode;
  704. int rd = (insn >> 12) & 0xf;
  705. int rn = (insn >> 16) & 0xf;
  706. long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
  707. regs->uregs[rd] = insnslot_1arg_rflags(rnv, regs->ARM_cpsr, i_fn);
  708. }
  709. static void __kprobes
  710. emulate_alu_imm_rwflags(struct kprobe *p, struct pt_regs *regs)
  711. {
  712. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  713. kprobe_opcode_t insn = p->opcode;
  714. int rd = (insn >> 12) & 0xf;
  715. int rn = (insn >> 16) & 0xf;
  716. long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
  717. regs->uregs[rd] = insnslot_1arg_rwflags(rnv, &regs->ARM_cpsr, i_fn);
  718. }
  719. static void __kprobes
  720. emulate_alu_rflags(struct kprobe *p, struct pt_regs *regs)
  721. {
  722. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  723. kprobe_opcode_t insn = p->opcode;
  724. long ppc = (long)p->addr + 8;
  725. int rd = (insn >> 12) & 0xf;
  726. int rn = (insn >> 16) & 0xf; /* rn/rnv/rs/rsv may be */
  727. int rs = (insn >> 8) & 0xf; /* invalid, don't care. */
  728. int rm = insn & 0xf;
  729. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  730. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  731. long rsv = regs->uregs[rs];
  732. regs->uregs[rd] =
  733. insnslot_3arg_rflags(rnv, rmv, rsv, regs->ARM_cpsr, i_fn);
  734. }
  735. static void __kprobes
  736. emulate_alu_rwflags(struct kprobe *p, struct pt_regs *regs)
  737. {
  738. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  739. kprobe_opcode_t insn = p->opcode;
  740. long ppc = (long)p->addr + 8;
  741. int rd = (insn >> 12) & 0xf;
  742. int rn = (insn >> 16) & 0xf; /* rn/rnv/rs/rsv may be */
  743. int rs = (insn >> 8) & 0xf; /* invalid, don't care. */
  744. int rm = insn & 0xf;
  745. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  746. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  747. long rsv = regs->uregs[rs];
  748. regs->uregs[rd] =
  749. insnslot_3arg_rwflags(rnv, rmv, rsv, &regs->ARM_cpsr, i_fn);
  750. }
  751. static enum kprobe_insn __kprobes
  752. prep_emulate_ldr_str(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  753. {
  754. int ibit = (insn & (1 << 26)) ? 25 : 22;
  755. insn &= 0xfff00fff;
  756. insn |= 0x00001000; /* Rn = r0, Rd = r1 */
  757. if (insn & (1 << ibit)) {
  758. insn &= ~0xf;
  759. insn |= 2; /* Rm = r2 */
  760. }
  761. asi->insn[0] = insn;
  762. asi->insn_handler = (insn & (1 << 20)) ? emulate_ldr : emulate_str;
  763. return INSN_GOOD;
  764. }
  765. static enum kprobe_insn __kprobes
  766. prep_emulate_rd12rm0(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  767. {
  768. insn &= 0xffff0ff0; /* Rd = r0, Rm = r0 */
  769. asi->insn[0] = insn;
  770. asi->insn_handler = emulate_rd12rm0;
  771. return INSN_GOOD;
  772. }
  773. static enum kprobe_insn __kprobes
  774. prep_emulate_rd12(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  775. {
  776. insn &= 0xffff0fff; /* Rd = r0 */
  777. asi->insn[0] = insn;
  778. asi->insn_handler = emulate_rd12;
  779. return INSN_GOOD;
  780. }
  781. static enum kprobe_insn __kprobes
  782. prep_emulate_rd12rn16rm0_wflags(kprobe_opcode_t insn,
  783. struct arch_specific_insn *asi)
  784. {
  785. insn &= 0xfff00ff0; /* Rd = r0, Rn = r0 */
  786. insn |= 0x00000001; /* Rm = r1 */
  787. asi->insn[0] = insn;
  788. asi->insn_handler = emulate_rd12rn16rm0_rwflags;
  789. return INSN_GOOD;
  790. }
  791. static enum kprobe_insn __kprobes
  792. prep_emulate_rd16rs8rm0_wflags(kprobe_opcode_t insn,
  793. struct arch_specific_insn *asi)
  794. {
  795. insn &= 0xfff0f0f0; /* Rd = r0, Rs = r0 */
  796. insn |= 0x00000001; /* Rm = r1 */
  797. asi->insn[0] = insn;
  798. asi->insn_handler = emulate_rd16rs8rm0_rwflags;
  799. return INSN_GOOD;
  800. }
  801. static enum kprobe_insn __kprobes
  802. prep_emulate_rd16rn12rs8rm0_wflags(kprobe_opcode_t insn,
  803. struct arch_specific_insn *asi)
  804. {
  805. insn &= 0xfff000f0; /* Rd = r0, Rn = r0 */
  806. insn |= 0x00000102; /* Rs = r1, Rm = r2 */
  807. asi->insn[0] = insn;
  808. asi->insn_handler = emulate_rd16rn12rs8rm0_rwflags;
  809. return INSN_GOOD;
  810. }
  811. static enum kprobe_insn __kprobes
  812. prep_emulate_rdhi16rdlo12rs8rm0_wflags(kprobe_opcode_t insn,
  813. struct arch_specific_insn *asi)
  814. {
  815. insn &= 0xfff000f0; /* RdHi = r0, RdLo = r1 */
  816. insn |= 0x00001203; /* Rs = r2, Rm = r3 */
  817. asi->insn[0] = insn;
  818. asi->insn_handler = emulate_rdhi16rdlo12rs8rm0_rwflags;
  819. return INSN_GOOD;
  820. }
  821. /*
  822. * For the instruction masking and comparisons in all the "space_*"
  823. * functions below, Do _not_ rearrange the order of tests unless
  824. * you're very, very sure of what you are doing. For the sake of
  825. * efficiency, the masks for some tests sometimes assume other test
  826. * have been done prior to them so the number of patterns to test
  827. * for an instruction set can be as broad as possible to reduce the
  828. * number of tests needed.
  829. */
  830. static enum kprobe_insn __kprobes
  831. space_1111(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  832. {
  833. /* CPS mmod == 1 : 1111 0001 0000 xx10 xxxx xxxx xx0x xxxx */
  834. /* RFE : 1111 100x x0x1 xxxx xxxx 1010 xxxx xxxx */
  835. /* SRS : 1111 100x x1x0 1101 xxxx 0101 xxxx xxxx */
  836. if ((insn & 0xfff30020) == 0xf1020000 ||
  837. (insn & 0xfe500f00) == 0xf8100a00 ||
  838. (insn & 0xfe5f0f00) == 0xf84d0500)
  839. return INSN_REJECTED;
  840. /* PLD : 1111 01x1 x101 xxxx xxxx xxxx xxxx xxxx : */
  841. if ((insn & 0xfd700000) == 0xf4500000) {
  842. insn &= 0xfff0ffff; /* Rn = r0 */
  843. asi->insn[0] = insn;
  844. asi->insn_handler = emulate_rn16;
  845. return INSN_GOOD;
  846. }
  847. /* BLX(1) : 1111 101x xxxx xxxx xxxx xxxx xxxx xxxx : */
  848. if ((insn & 0xfe000000) == 0xfa000000) {
  849. asi->insn_handler = simulate_blx1;
  850. return INSN_GOOD_NO_SLOT;
  851. }
  852. /* SETEND : 1111 0001 0000 0001 xxxx xxxx 0000 xxxx */
  853. /* CDP2 : 1111 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
  854. if ((insn & 0xffff00f0) == 0xf1010000 ||
  855. (insn & 0xff000010) == 0xfe000000) {
  856. asi->insn[0] = insn;
  857. asi->insn_handler = emulate_none;
  858. return INSN_GOOD;
  859. }
  860. /* MCRR2 : 1111 1100 0100 xxxx xxxx xxxx xxxx xxxx : (Rd != Rn) */
  861. /* MRRC2 : 1111 1100 0101 xxxx xxxx xxxx xxxx xxxx : (Rd != Rn) */
  862. if ((insn & 0xffe00000) == 0xfc400000) {
  863. insn &= 0xfff00fff; /* Rn = r0 */
  864. insn |= 0x00001000; /* Rd = r1 */
  865. asi->insn[0] = insn;
  866. asi->insn_handler =
  867. (insn & (1 << 20)) ? emulate_mrrc : emulate_mcrr;
  868. return INSN_GOOD;
  869. }
  870. /* LDC2 : 1111 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
  871. /* STC2 : 1111 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
  872. if ((insn & 0xfe000000) == 0xfc000000) {
  873. insn &= 0xfff0ffff; /* Rn = r0 */
  874. asi->insn[0] = insn;
  875. asi->insn_handler = emulate_ldcstc;
  876. return INSN_GOOD;
  877. }
  878. /* MCR2 : 1111 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
  879. /* MRC2 : 1111 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
  880. insn &= 0xffff0fff; /* Rd = r0 */
  881. asi->insn[0] = insn;
  882. asi->insn_handler = (insn & (1 << 20)) ? emulate_rd12 : emulate_ird12;
  883. return INSN_GOOD;
  884. }
  885. static enum kprobe_insn __kprobes
  886. space_cccc_000x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  887. {
  888. /* cccc 0001 0xx0 xxxx xxxx xxxx xxxx xxx0 xxxx */
  889. if ((insn & 0x0f900010) == 0x01000000) {
  890. /* BXJ : cccc 0001 0010 xxxx xxxx xxxx 0010 xxxx */
  891. /* MSR : cccc 0001 0x10 xxxx xxxx xxxx 0000 xxxx */
  892. if ((insn & 0x0ff000f0) == 0x01200020 ||
  893. (insn & 0x0fb000f0) == 0x01200000)
  894. return INSN_REJECTED;
  895. /* MRS : cccc 0001 0x00 xxxx xxxx xxxx 0000 xxxx */
  896. if ((insn & 0x0fb00010) == 0x01000000)
  897. return prep_emulate_rd12(insn, asi);
  898. /* SMLALxy : cccc 0001 0100 xxxx xxxx xxxx 1xx0 xxxx */
  899. if ((insn & 0x0ff00090) == 0x01400080)
  900. return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn, asi);
  901. /* SMULWy : cccc 0001 0010 xxxx xxxx xxxx 1x10 xxxx */
  902. /* SMULxy : cccc 0001 0110 xxxx xxxx xxxx 1xx0 xxxx */
  903. if ((insn & 0x0ff000b0) == 0x012000a0 ||
  904. (insn & 0x0ff00090) == 0x01600080)
  905. return prep_emulate_rd16rs8rm0_wflags(insn, asi);
  906. /* SMLAxy : cccc 0001 0000 xxxx xxxx xxxx 1xx0 xxxx : Q */
  907. /* SMLAWy : cccc 0001 0010 xxxx xxxx xxxx 0x00 xxxx : Q */
  908. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  909. }
  910. /* cccc 0001 0xx0 xxxx xxxx xxxx xxxx 0xx1 xxxx */
  911. else if ((insn & 0x0f900090) == 0x01000010) {
  912. /* BKPT : 1110 0001 0010 xxxx xxxx xxxx 0111 xxxx */
  913. if ((insn & 0xfff000f0) == 0xe1200070)
  914. return INSN_REJECTED;
  915. /* BLX(2) : cccc 0001 0010 xxxx xxxx xxxx 0011 xxxx */
  916. /* BX : cccc 0001 0010 xxxx xxxx xxxx 0001 xxxx */
  917. if ((insn & 0x0ff000d0) == 0x01200010) {
  918. asi->insn[0] = truecc_insn(insn);
  919. asi->insn_handler = simulate_blx2bx;
  920. return INSN_GOOD;
  921. }
  922. /* CLZ : cccc 0001 0110 xxxx xxxx xxxx 0001 xxxx */
  923. if ((insn & 0x0ff000f0) == 0x01600010)
  924. return prep_emulate_rd12rm0(insn, asi);
  925. /* QADD : cccc 0001 0000 xxxx xxxx xxxx 0101 xxxx :Q */
  926. /* QSUB : cccc 0001 0010 xxxx xxxx xxxx 0101 xxxx :Q */
  927. /* QDADD : cccc 0001 0100 xxxx xxxx xxxx 0101 xxxx :Q */
  928. /* QDSUB : cccc 0001 0110 xxxx xxxx xxxx 0101 xxxx :Q */
  929. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  930. }
  931. /* cccc 0000 xxxx xxxx xxxx xxxx xxxx 1001 xxxx */
  932. else if ((insn & 0x0f000090) == 0x00000090) {
  933. /* MUL : cccc 0000 0000 xxxx xxxx xxxx 1001 xxxx : */
  934. /* MULS : cccc 0000 0001 xxxx xxxx xxxx 1001 xxxx :cc */
  935. /* MLA : cccc 0000 0010 xxxx xxxx xxxx 1001 xxxx : */
  936. /* MLAS : cccc 0000 0011 xxxx xxxx xxxx 1001 xxxx :cc */
  937. /* UMAAL : cccc 0000 0100 xxxx xxxx xxxx 1001 xxxx : */
  938. /* UMULL : cccc 0000 1000 xxxx xxxx xxxx 1001 xxxx : */
  939. /* UMULLS : cccc 0000 1001 xxxx xxxx xxxx 1001 xxxx :cc */
  940. /* UMLAL : cccc 0000 1010 xxxx xxxx xxxx 1001 xxxx : */
  941. /* UMLALS : cccc 0000 1011 xxxx xxxx xxxx 1001 xxxx :cc */
  942. /* SMULL : cccc 0000 1100 xxxx xxxx xxxx 1001 xxxx : */
  943. /* SMULLS : cccc 0000 1101 xxxx xxxx xxxx 1001 xxxx :cc */
  944. /* SMLAL : cccc 0000 1110 xxxx xxxx xxxx 1001 xxxx : */
  945. /* SMLALS : cccc 0000 1111 xxxx xxxx xxxx 1001 xxxx :cc */
  946. if ((insn & 0x0fe000f0) == 0x00000090) {
  947. return prep_emulate_rd16rs8rm0_wflags(insn, asi);
  948. } else if ((insn & 0x0fe000f0) == 0x00200090) {
  949. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  950. } else {
  951. return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn, asi);
  952. }
  953. }
  954. /* cccc 000x xxxx xxxx xxxx xxxx xxxx 1xx1 xxxx */
  955. else if ((insn & 0x0e000090) == 0x00000090) {
  956. /* SWP : cccc 0001 0000 xxxx xxxx xxxx 1001 xxxx */
  957. /* SWPB : cccc 0001 0100 xxxx xxxx xxxx 1001 xxxx */
  958. /* LDRD : cccc 000x xxx0 xxxx xxxx xxxx 1101 xxxx */
  959. /* STRD : cccc 000x xxx0 xxxx xxxx xxxx 1111 xxxx */
  960. /* STREX : cccc 0001 1000 xxxx xxxx xxxx 1001 xxxx */
  961. /* LDREX : cccc 0001 1001 xxxx xxxx xxxx 1001 xxxx */
  962. /* LDRH : cccc 000x xxx1 xxxx xxxx xxxx 1011 xxxx */
  963. /* STRH : cccc 000x xxx0 xxxx xxxx xxxx 1011 xxxx */
  964. /* LDRSB : cccc 000x xxx1 xxxx xxxx xxxx 1101 xxxx */
  965. /* LDRSH : cccc 000x xxx1 xxxx xxxx xxxx 1111 xxxx */
  966. if ((insn & 0x0fb000f0) == 0x01000090) {
  967. /* SWP/SWPB */
  968. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  969. } else if ((insn & 0x0e1000d0) == 0x00000d0) {
  970. /* STRD/LDRD */
  971. insn &= 0xfff00fff;
  972. insn |= 0x00002000; /* Rn = r0, Rd = r2 */
  973. if (insn & (1 << 22)) {
  974. /* I bit */
  975. insn &= ~0xf;
  976. insn |= 1; /* Rm = r1 */
  977. }
  978. asi->insn[0] = insn;
  979. asi->insn_handler =
  980. (insn & (1 << 5)) ? emulate_strd : emulate_ldrd;
  981. return INSN_GOOD;
  982. }
  983. return prep_emulate_ldr_str(insn, asi);
  984. }
  985. /* cccc 000x xxxx xxxx xxxx xxxx xxxx xxxx xxxx */
  986. /*
  987. * ALU op with S bit and Rd == 15 :
  988. * cccc 000x xxx1 xxxx 1111 xxxx xxxx xxxx
  989. */
  990. if ((insn & 0x0e10f000) == 0x0010f000)
  991. return INSN_REJECTED;
  992. /*
  993. * "mov ip, sp" is the most common kprobe'd instruction by far.
  994. * Check and optimize for it explicitly.
  995. */
  996. if (insn == 0xe1a0c00d) {
  997. asi->insn_handler = simulate_mov_ipsp;
  998. return INSN_GOOD_NO_SLOT;
  999. }
  1000. /*
  1001. * Data processing: Immediate-shift / Register-shift
  1002. * ALU op : cccc 000x xxxx xxxx xxxx xxxx xxxx xxxx
  1003. * CPY : cccc 0001 1010 xxxx xxxx 0000 0000 xxxx
  1004. * MOV : cccc 0001 101x xxxx xxxx xxxx xxxx xxxx
  1005. * *S (bit 20) updates condition codes
  1006. * ADC/SBC/RSC reads the C flag
  1007. */
  1008. insn &= 0xfff00ff0; /* Rn = r0, Rd = r0 */
  1009. insn |= 0x00000001; /* Rm = r1 */
  1010. if (insn & 0x010) {
  1011. insn &= 0xfffff0ff; /* register shift */
  1012. insn |= 0x00000200; /* Rs = r2 */
  1013. }
  1014. asi->insn[0] = insn;
  1015. asi->insn_handler = (insn & (1 << 20)) ? /* S-bit */
  1016. emulate_alu_rwflags : emulate_alu_rflags;
  1017. return INSN_GOOD;
  1018. }
  1019. static enum kprobe_insn __kprobes
  1020. space_cccc_001x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1021. {
  1022. /*
  1023. * MSR : cccc 0011 0x10 xxxx xxxx xxxx xxxx xxxx
  1024. * Undef : cccc 0011 0100 xxxx xxxx xxxx xxxx xxxx
  1025. * ALU op with S bit and Rd == 15 :
  1026. * cccc 001x xxx1 xxxx 1111 xxxx xxxx xxxx
  1027. */
  1028. if ((insn & 0x0fb00000) == 0x03200000 || /* MSR */
  1029. (insn & 0x0ff00000) == 0x03400000 || /* Undef */
  1030. (insn & 0x0e10f000) == 0x0210f000) /* ALU s-bit, R15 */
  1031. return INSN_REJECTED;
  1032. /*
  1033. * Data processing: 32-bit Immediate
  1034. * ALU op : cccc 001x xxxx xxxx xxxx xxxx xxxx xxxx
  1035. * MOV : cccc 0011 101x xxxx xxxx xxxx xxxx xxxx
  1036. * *S (bit 20) updates condition codes
  1037. * ADC/SBC/RSC reads the C flag
  1038. */
  1039. insn &= 0xffff0fff; /* Rd = r0 */
  1040. asi->insn[0] = insn;
  1041. asi->insn_handler = (insn & (1 << 20)) ? /* S-bit */
  1042. emulate_alu_imm_rwflags : emulate_alu_imm_rflags;
  1043. return INSN_GOOD;
  1044. }
  1045. static enum kprobe_insn __kprobes
  1046. space_cccc_0110__1(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1047. {
  1048. /* SEL : cccc 0110 1000 xxxx xxxx xxxx 1011 xxxx GE: !!! */
  1049. if ((insn & 0x0ff000f0) == 0x068000b0) {
  1050. insn &= 0xfff00ff0; /* Rd = r0, Rn = r0 */
  1051. insn |= 0x00000001; /* Rm = r1 */
  1052. asi->insn[0] = insn;
  1053. asi->insn_handler = emulate_sel;
  1054. return INSN_GOOD;
  1055. }
  1056. /* SSAT : cccc 0110 101x xxxx xxxx xxxx xx01 xxxx :Q */
  1057. /* USAT : cccc 0110 111x xxxx xxxx xxxx xx01 xxxx :Q */
  1058. /* SSAT16 : cccc 0110 1010 xxxx xxxx xxxx 0011 xxxx :Q */
  1059. /* USAT16 : cccc 0110 1110 xxxx xxxx xxxx 0011 xxxx :Q */
  1060. if ((insn & 0x0fa00030) == 0x06a00010 ||
  1061. (insn & 0x0fb000f0) == 0x06a00030) {
  1062. insn &= 0xffff0ff0; /* Rd = r0, Rm = r0 */
  1063. asi->insn[0] = insn;
  1064. asi->insn_handler = emulate_sat;
  1065. return INSN_GOOD;
  1066. }
  1067. /* REV : cccc 0110 1011 xxxx xxxx xxxx 0011 xxxx */
  1068. /* REV16 : cccc 0110 1011 xxxx xxxx xxxx 1011 xxxx */
  1069. /* REVSH : cccc 0110 1111 xxxx xxxx xxxx 1011 xxxx */
  1070. if ((insn & 0x0ff00070) == 0x06b00030 ||
  1071. (insn & 0x0ff000f0) == 0x06f000b0)
  1072. return prep_emulate_rd12rm0(insn, asi);
  1073. /* SADD16 : cccc 0110 0001 xxxx xxxx xxxx 0001 xxxx :GE */
  1074. /* SADDSUBX : cccc 0110 0001 xxxx xxxx xxxx 0011 xxxx :GE */
  1075. /* SSUBADDX : cccc 0110 0001 xxxx xxxx xxxx 0101 xxxx :GE */
  1076. /* SSUB16 : cccc 0110 0001 xxxx xxxx xxxx 0111 xxxx :GE */
  1077. /* SADD8 : cccc 0110 0001 xxxx xxxx xxxx 1001 xxxx :GE */
  1078. /* SSUB8 : cccc 0110 0001 xxxx xxxx xxxx 1111 xxxx :GE */
  1079. /* QADD16 : cccc 0110 0010 xxxx xxxx xxxx 0001 xxxx : */
  1080. /* QADDSUBX : cccc 0110 0010 xxxx xxxx xxxx 0011 xxxx : */
  1081. /* QSUBADDX : cccc 0110 0010 xxxx xxxx xxxx 0101 xxxx : */
  1082. /* QSUB16 : cccc 0110 0010 xxxx xxxx xxxx 0111 xxxx : */
  1083. /* QADD8 : cccc 0110 0010 xxxx xxxx xxxx 1001 xxxx : */
  1084. /* QSUB8 : cccc 0110 0010 xxxx xxxx xxxx 1111 xxxx : */
  1085. /* SHADD16 : cccc 0110 0011 xxxx xxxx xxxx 0001 xxxx : */
  1086. /* SHADDSUBX : cccc 0110 0011 xxxx xxxx xxxx 0011 xxxx : */
  1087. /* SHSUBADDX : cccc 0110 0011 xxxx xxxx xxxx 0101 xxxx : */
  1088. /* SHSUB16 : cccc 0110 0011 xxxx xxxx xxxx 0111 xxxx : */
  1089. /* SHADD8 : cccc 0110 0011 xxxx xxxx xxxx 1001 xxxx : */
  1090. /* SHSUB8 : cccc 0110 0011 xxxx xxxx xxxx 1111 xxxx : */
  1091. /* UADD16 : cccc 0110 0101 xxxx xxxx xxxx 0001 xxxx :GE */
  1092. /* UADDSUBX : cccc 0110 0101 xxxx xxxx xxxx 0011 xxxx :GE */
  1093. /* USUBADDX : cccc 0110 0101 xxxx xxxx xxxx 0101 xxxx :GE */
  1094. /* USUB16 : cccc 0110 0101 xxxx xxxx xxxx 0111 xxxx :GE */
  1095. /* UADD8 : cccc 0110 0101 xxxx xxxx xxxx 1001 xxxx :GE */
  1096. /* USUB8 : cccc 0110 0101 xxxx xxxx xxxx 1111 xxxx :GE */
  1097. /* UQADD16 : cccc 0110 0110 xxxx xxxx xxxx 0001 xxxx : */
  1098. /* UQADDSUBX : cccc 0110 0110 xxxx xxxx xxxx 0011 xxxx : */
  1099. /* UQSUBADDX : cccc 0110 0110 xxxx xxxx xxxx 0101 xxxx : */
  1100. /* UQSUB16 : cccc 0110 0110 xxxx xxxx xxxx 0111 xxxx : */
  1101. /* UQADD8 : cccc 0110 0110 xxxx xxxx xxxx 1001 xxxx : */
  1102. /* UQSUB8 : cccc 0110 0110 xxxx xxxx xxxx 1111 xxxx : */
  1103. /* UHADD16 : cccc 0110 0111 xxxx xxxx xxxx 0001 xxxx : */
  1104. /* UHADDSUBX : cccc 0110 0111 xxxx xxxx xxxx 0011 xxxx : */
  1105. /* UHSUBADDX : cccc 0110 0111 xxxx xxxx xxxx 0101 xxxx : */
  1106. /* UHSUB16 : cccc 0110 0111 xxxx xxxx xxxx 0111 xxxx : */
  1107. /* UHADD8 : cccc 0110 0111 xxxx xxxx xxxx 1001 xxxx : */
  1108. /* UHSUB8 : cccc 0110 0111 xxxx xxxx xxxx 1111 xxxx : */
  1109. /* PKHBT : cccc 0110 1000 xxxx xxxx xxxx x001 xxxx : */
  1110. /* PKHTB : cccc 0110 1000 xxxx xxxx xxxx x101 xxxx : */
  1111. /* SXTAB16 : cccc 0110 1000 xxxx xxxx xxxx 0111 xxxx : */
  1112. /* SXTB : cccc 0110 1010 xxxx xxxx xxxx 0111 xxxx : */
  1113. /* SXTAB : cccc 0110 1010 xxxx xxxx xxxx 0111 xxxx : */
  1114. /* SXTAH : cccc 0110 1011 xxxx xxxx xxxx 0111 xxxx : */
  1115. /* UXTAB16 : cccc 0110 1100 xxxx xxxx xxxx 0111 xxxx : */
  1116. /* UXTAB : cccc 0110 1110 xxxx xxxx xxxx 0111 xxxx : */
  1117. /* UXTAH : cccc 0110 1111 xxxx xxxx xxxx 0111 xxxx : */
  1118. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  1119. }
  1120. static enum kprobe_insn __kprobes
  1121. space_cccc_0111__1(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1122. {
  1123. /* Undef : cccc 0111 1111 xxxx xxxx xxxx 1111 xxxx */
  1124. if ((insn & 0x0ff000f0) == 0x03f000f0)
  1125. return INSN_REJECTED;
  1126. /* USADA8 : cccc 0111 1000 xxxx xxxx xxxx 0001 xxxx */
  1127. /* USAD8 : cccc 0111 1000 xxxx 1111 xxxx 0001 xxxx */
  1128. if ((insn & 0x0ff000f0) == 0x07800010)
  1129. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  1130. /* SMLALD : cccc 0111 0100 xxxx xxxx xxxx 00x1 xxxx */
  1131. /* SMLSLD : cccc 0111 0100 xxxx xxxx xxxx 01x1 xxxx */
  1132. if ((insn & 0x0ff00090) == 0x07400010)
  1133. return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn, asi);
  1134. /* SMLAD : cccc 0111 0000 xxxx xxxx xxxx 00x1 xxxx :Q */
  1135. /* SMLSD : cccc 0111 0000 xxxx xxxx xxxx 01x1 xxxx :Q */
  1136. /* SMMLA : cccc 0111 0101 xxxx xxxx xxxx 00x1 xxxx : */
  1137. /* SMMLS : cccc 0111 0101 xxxx xxxx xxxx 11x1 xxxx : */
  1138. if ((insn & 0x0ff00090) == 0x07000010 ||
  1139. (insn & 0x0ff000d0) == 0x07500010 ||
  1140. (insn & 0x0ff000d0) == 0x075000d0)
  1141. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  1142. /* SMUSD : cccc 0111 0000 xxxx xxxx xxxx 01x1 xxxx : */
  1143. /* SMUAD : cccc 0111 0000 xxxx 1111 xxxx 00x1 xxxx :Q */
  1144. /* SMMUL : cccc 0111 0101 xxxx 1111 xxxx 00x1 xxxx : */
  1145. return prep_emulate_rd16rs8rm0_wflags(insn, asi);
  1146. }
  1147. static enum kprobe_insn __kprobes
  1148. space_cccc_01xx(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1149. {
  1150. /* LDR : cccc 01xx x0x1 xxxx xxxx xxxx xxxx xxxx */
  1151. /* LDRB : cccc 01xx x1x1 xxxx xxxx xxxx xxxx xxxx */
  1152. /* LDRBT : cccc 01x0 x111 xxxx xxxx xxxx xxxx xxxx */
  1153. /* LDRT : cccc 01x0 x011 xxxx xxxx xxxx xxxx xxxx */
  1154. /* STR : cccc 01xx x0x0 xxxx xxxx xxxx xxxx xxxx */
  1155. /* STRB : cccc 01xx x1x0 xxxx xxxx xxxx xxxx xxxx */
  1156. /* STRBT : cccc 01x0 x110 xxxx xxxx xxxx xxxx xxxx */
  1157. /* STRT : cccc 01x0 x010 xxxx xxxx xxxx xxxx xxxx */
  1158. return prep_emulate_ldr_str(insn, asi);
  1159. }
  1160. static enum kprobe_insn __kprobes
  1161. space_cccc_100x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1162. {
  1163. /* LDM(2) : cccc 100x x101 xxxx 0xxx xxxx xxxx xxxx */
  1164. /* LDM(3) : cccc 100x x1x1 xxxx 1xxx xxxx xxxx xxxx */
  1165. if ((insn & 0x0e708000) == 0x85000000 ||
  1166. (insn & 0x0e508000) == 0x85010000)
  1167. return INSN_REJECTED;
  1168. /* LDM(1) : cccc 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
  1169. /* STM(1) : cccc 100x x0x0 xxxx xxxx xxxx xxxx xxxx */
  1170. asi->insn[0] = truecc_insn(insn);
  1171. asi->insn_handler = ((insn & 0x108000) == 0x008000) ? /* STM & R15 */
  1172. simulate_stm1_pc : simulate_ldm1stm1;
  1173. return INSN_GOOD;
  1174. }
  1175. static enum kprobe_insn __kprobes
  1176. space_cccc_101x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1177. {
  1178. /* B : cccc 1010 xxxx xxxx xxxx xxxx xxxx xxxx */
  1179. /* BL : cccc 1011 xxxx xxxx xxxx xxxx xxxx xxxx */
  1180. asi->insn[0] = truecc_insn(insn);
  1181. asi->insn_handler = simulate_bbl;
  1182. return INSN_GOOD;
  1183. }
  1184. static enum kprobe_insn __kprobes
  1185. space_cccc_1100_010x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1186. {
  1187. /* MCRR : cccc 1100 0100 xxxx xxxx xxxx xxxx xxxx : (Rd!=Rn) */
  1188. /* MRRC : cccc 1100 0101 xxxx xxxx xxxx xxxx xxxx : (Rd!=Rn) */
  1189. insn &= 0xfff00fff;
  1190. insn |= 0x00001000; /* Rn = r0, Rd = r1 */
  1191. asi->insn[0] = insn;
  1192. asi->insn_handler = (insn & (1 << 20)) ? emulate_mrrc : emulate_mcrr;
  1193. return INSN_GOOD;
  1194. }
  1195. static enum kprobe_insn __kprobes
  1196. space_cccc_110x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1197. {
  1198. /* LDC : cccc 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
  1199. /* STC : cccc 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
  1200. insn &= 0xfff0ffff; /* Rn = r0 */
  1201. asi->insn[0] = insn;
  1202. asi->insn_handler = emulate_ldcstc;
  1203. return INSN_GOOD;
  1204. }
  1205. static enum kprobe_insn __kprobes
  1206. space_cccc_111x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1207. {
  1208. /* BKPT : 1110 0001 0010 xxxx xxxx xxxx 0111 xxxx */
  1209. /* SWI : cccc 1111 xxxx xxxx xxxx xxxx xxxx xxxx */
  1210. if ((insn & 0xfff000f0) == 0xe1200070 ||
  1211. (insn & 0x0f000000) == 0x0f000000)
  1212. return INSN_REJECTED;
  1213. /* CDP : cccc 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
  1214. if ((insn & 0x0f000010) == 0x0e000000) {
  1215. asi->insn[0] = insn;
  1216. asi->insn_handler = emulate_none;
  1217. return INSN_GOOD;
  1218. }
  1219. /* MCR : cccc 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
  1220. /* MRC : cccc 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
  1221. insn &= 0xffff0fff; /* Rd = r0 */
  1222. asi->insn[0] = insn;
  1223. asi->insn_handler = (insn & (1 << 20)) ? emulate_rd12 : emulate_ird12;
  1224. return INSN_GOOD;
  1225. }
  1226. /* Return:
  1227. * INSN_REJECTED If instruction is one not allowed to kprobe,
  1228. * INSN_GOOD If instruction is supported and uses instruction slot,
  1229. * INSN_GOOD_NO_SLOT If instruction is supported but doesn't use its slot.
  1230. *
  1231. * For instructions we don't want to kprobe (INSN_REJECTED return result):
  1232. * These are generally ones that modify the processor state making
  1233. * them "hard" to simulate such as switches processor modes or
  1234. * make accesses in alternate modes. Any of these could be simulated
  1235. * if the work was put into it, but low return considering they
  1236. * should also be very rare.
  1237. */
  1238. enum kprobe_insn __kprobes
  1239. arm_kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1240. {
  1241. asi->insn[1] = KPROBE_RETURN_INSTRUCTION;
  1242. if ((insn & 0xf0000000) == 0xf0000000) {
  1243. return space_1111(insn, asi);
  1244. } else if ((insn & 0x0e000000) == 0x00000000) {
  1245. return space_cccc_000x(insn, asi);
  1246. } else if ((insn & 0x0e000000) == 0x02000000) {
  1247. return space_cccc_001x(insn, asi);
  1248. } else if ((insn & 0x0f000010) == 0x06000010) {
  1249. return space_cccc_0110__1(insn, asi);
  1250. } else if ((insn & 0x0f000010) == 0x07000010) {
  1251. return space_cccc_0111__1(insn, asi);
  1252. } else if ((insn & 0x0c000000) == 0x04000000) {
  1253. return space_cccc_01xx(insn, asi);
  1254. } else if ((insn & 0x0e000000) == 0x08000000) {
  1255. return space_cccc_100x(insn, asi);
  1256. } else if ((insn & 0x0e000000) == 0x0a000000) {
  1257. return space_cccc_101x(insn, asi);
  1258. } else if ((insn & 0x0fe00000) == 0x0c400000) {
  1259. return space_cccc_1100_010x(insn, asi);
  1260. } else if ((insn & 0x0e000000) == 0x0c000000) {
  1261. return space_cccc_110x(insn, asi);
  1262. }
  1263. return space_cccc_111x(insn, asi);
  1264. }
  1265. void __init arm_kprobe_decode_init(void)
  1266. {
  1267. find_str_pc_offset();
  1268. }
  1269. /*
  1270. * All ARM instructions listed below.
  1271. *
  1272. * Instructions and their general purpose registers are given.
  1273. * If a particular register may not use R15, it is prefixed with a "!".
  1274. * If marked with a "*" means the value returned by reading R15
  1275. * is implementation defined.
  1276. *
  1277. * ADC/ADD/AND/BIC/CMN/CMP/EOR/MOV/MVN/ORR/RSB/RSC/SBC/SUB/TEQ
  1278. * TST: Rd, Rn, Rm, !Rs
  1279. * BX: Rm
  1280. * BLX(2): !Rm
  1281. * BX: Rm (R15 legal, but discouraged)
  1282. * BXJ: !Rm,
  1283. * CLZ: !Rd, !Rm
  1284. * CPY: Rd, Rm
  1285. * LDC/2,STC/2 immediate offset & unindex: Rn
  1286. * LDC/2,STC/2 immediate pre/post-indexed: !Rn
  1287. * LDM(1/3): !Rn, register_list
  1288. * LDM(2): !Rn, !register_list
  1289. * LDR,STR,PLD immediate offset: Rd, Rn
  1290. * LDR,STR,PLD register offset: Rd, Rn, !Rm
  1291. * LDR,STR,PLD scaled register offset: Rd, !Rn, !Rm
  1292. * LDR,STR immediate pre/post-indexed: Rd, !Rn
  1293. * LDR,STR register pre/post-indexed: Rd, !Rn, !Rm
  1294. * LDR,STR scaled register pre/post-indexed: Rd, !Rn, !Rm
  1295. * LDRB,STRB immediate offset: !Rd, Rn
  1296. * LDRB,STRB register offset: !Rd, Rn, !Rm
  1297. * LDRB,STRB scaled register offset: !Rd, !Rn, !Rm
  1298. * LDRB,STRB immediate pre/post-indexed: !Rd, !Rn
  1299. * LDRB,STRB register pre/post-indexed: !Rd, !Rn, !Rm
  1300. * LDRB,STRB scaled register pre/post-indexed: !Rd, !Rn, !Rm
  1301. * LDRT,LDRBT,STRBT immediate pre/post-indexed: !Rd, !Rn
  1302. * LDRT,LDRBT,STRBT register pre/post-indexed: !Rd, !Rn, !Rm
  1303. * LDRT,LDRBT,STRBT scaled register pre/post-indexed: !Rd, !Rn, !Rm
  1304. * LDRH/SH/SB/D,STRH/SH/SB/D immediate offset: !Rd, Rn
  1305. * LDRH/SH/SB/D,STRH/SH/SB/D register offset: !Rd, Rn, !Rm
  1306. * LDRH/SH/SB/D,STRH/SH/SB/D immediate pre/post-indexed: !Rd, !Rn
  1307. * LDRH/SH/SB/D,STRH/SH/SB/D register pre/post-indexed: !Rd, !Rn, !Rm
  1308. * LDREX: !Rd, !Rn
  1309. * MCR/2: !Rd
  1310. * MCRR/2,MRRC/2: !Rd, !Rn
  1311. * MLA: !Rd, !Rn, !Rm, !Rs
  1312. * MOV: Rd
  1313. * MRC/2: !Rd (if Rd==15, only changes cond codes, not the register)
  1314. * MRS,MSR: !Rd
  1315. * MUL: !Rd, !Rm, !Rs
  1316. * PKH{BT,TB}: !Rd, !Rn, !Rm
  1317. * QDADD,[U]QADD/16/8/SUBX: !Rd, !Rm, !Rn
  1318. * QDSUB,[U]QSUB/16/8/ADDX: !Rd, !Rm, !Rn
  1319. * REV/16/SH: !Rd, !Rm
  1320. * RFE: !Rn
  1321. * {S,U}[H]ADD{16,8,SUBX},{S,U}[H]SUB{16,8,ADDX}: !Rd, !Rn, !Rm
  1322. * SEL: !Rd, !Rn, !Rm
  1323. * SMLA<x><y>,SMLA{D,W<y>},SMLSD,SMML{A,S}: !Rd, !Rn, !Rm, !Rs
  1324. * SMLAL<x><y>,SMLA{D,LD},SMLSLD,SMMULL,SMULW<y>: !RdHi, !RdLo, !Rm, !Rs
  1325. * SMMUL,SMUAD,SMUL<x><y>,SMUSD: !Rd, !Rm, !Rs
  1326. * SSAT/16: !Rd, !Rm
  1327. * STM(1/2): !Rn, register_list* (R15 in reg list not recommended)
  1328. * STRT immediate pre/post-indexed: Rd*, !Rn
  1329. * STRT register pre/post-indexed: Rd*, !Rn, !Rm
  1330. * STRT scaled register pre/post-indexed: Rd*, !Rn, !Rm
  1331. * STREX: !Rd, !Rn, !Rm
  1332. * SWP/B: !Rd, !Rn, !Rm
  1333. * {S,U}XTA{B,B16,H}: !Rd, !Rn, !Rm
  1334. * {S,U}XT{B,B16,H}: !Rd, !Rm
  1335. * UM{AA,LA,UL}L: !RdHi, !RdLo, !Rm, !Rs
  1336. * USA{D8,A8,T,T16}: !Rd, !Rm, !Rs
  1337. *
  1338. * May transfer control by writing R15 (possible mode changes or alternate
  1339. * mode accesses marked by "*"):
  1340. * ALU op (* with s-bit), B, BL, BKPT, BLX(1/2), BX, BXJ, CPS*, CPY,
  1341. * LDM(1), LDM(2/3)*, LDR, MOV, RFE*, SWI*
  1342. *
  1343. * Instructions that do not take general registers, nor transfer control:
  1344. * CDP/2, SETEND, SRS*
  1345. */