kprobes-arm.c 49 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576
  1. /*
  2. * arch/arm/kernel/kprobes-decode.c
  3. *
  4. * Copyright (C) 2006, 2007 Motorola Inc.
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. *
  10. * This program is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * General Public License for more details.
  14. */
  15. /*
  16. * We do not have hardware single-stepping on ARM, This
  17. * effort is further complicated by the ARM not having a
  18. * "next PC" register. Instructions that change the PC
  19. * can't be safely single-stepped in a MP environment, so
  20. * we have a lot of work to do:
  21. *
  22. * In the prepare phase:
  23. * *) If it is an instruction that does anything
  24. * with the CPU mode, we reject it for a kprobe.
  25. * (This is out of laziness rather than need. The
  26. * instructions could be simulated.)
  27. *
  28. * *) Otherwise, decode the instruction rewriting its
  29. * registers to take fixed, ordered registers and
  30. * setting a handler for it to run the instruction.
  31. *
  32. * In the execution phase by an instruction's handler:
  33. *
  34. * *) If the PC is written to by the instruction, the
  35. * instruction must be fully simulated in software.
  36. *
  37. * *) Otherwise, a modified form of the instruction is
  38. * directly executed. Its handler calls the
  39. * instruction in insn[0]. In insn[1] is a
  40. * "mov pc, lr" to return.
  41. *
  42. * Before calling, load up the reordered registers
  43. * from the original instruction's registers. If one
  44. * of the original input registers is the PC, compute
  45. * and adjust the appropriate input register.
  46. *
  47. * After call completes, copy the output registers to
  48. * the original instruction's original registers.
  49. *
  50. * We don't use a real breakpoint instruction since that
  51. * would have us in the kernel go from SVC mode to SVC
  52. * mode losing the link register. Instead we use an
  53. * undefined instruction. To simplify processing, the
  54. * undefined instruction used for kprobes must be reserved
  55. * exclusively for kprobes use.
  56. *
  57. * TODO: ifdef out some instruction decoding based on architecture.
  58. */
  59. #include <linux/kernel.h>
  60. #include <linux/kprobes.h>
  61. #include "kprobes.h"
  62. #define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit)))))
  63. #define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25)
  64. #define is_r15(insn, bitpos) (((insn) & (0xf << bitpos)) == (0xf << bitpos))
  65. #define PSR_fs (PSR_f|PSR_s)
  66. #define KPROBE_RETURN_INSTRUCTION 0xe1a0f00e /* mov pc, lr */
  67. typedef long (insn_0arg_fn_t)(void);
  68. typedef long (insn_1arg_fn_t)(long);
  69. typedef long (insn_2arg_fn_t)(long, long);
  70. typedef long (insn_3arg_fn_t)(long, long, long);
  71. typedef long (insn_4arg_fn_t)(long, long, long, long);
  72. typedef long long (insn_llret_0arg_fn_t)(void);
  73. typedef long long (insn_llret_3arg_fn_t)(long, long, long);
  74. typedef long long (insn_llret_4arg_fn_t)(long, long, long, long);
  75. union reg_pair {
  76. long long dr;
  77. #ifdef __LITTLE_ENDIAN
  78. struct { long r0, r1; };
  79. #else
  80. struct { long r1, r0; };
  81. #endif
  82. };
  83. /*
  84. * For STR and STM instructions, an ARM core may choose to use either
  85. * a +8 or a +12 displacement from the current instruction's address.
  86. * Whichever value is chosen for a given core, it must be the same for
  87. * both instructions and may not change. This function measures it.
  88. */
  89. static int str_pc_offset;
  90. static void __init find_str_pc_offset(void)
  91. {
  92. int addr, scratch, ret;
  93. __asm__ (
  94. "sub %[ret], pc, #4 \n\t"
  95. "str pc, %[addr] \n\t"
  96. "ldr %[scr], %[addr] \n\t"
  97. "sub %[ret], %[scr], %[ret] \n\t"
  98. : [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr));
  99. str_pc_offset = ret;
  100. }
  101. /*
  102. * The insnslot_?arg_r[w]flags() functions below are to keep the
  103. * msr -> *fn -> mrs instruction sequences indivisible so that
  104. * the state of the CPSR flags aren't inadvertently modified
  105. * just before or just after the call.
  106. */
  107. static inline long __kprobes
  108. insnslot_0arg_rflags(long cpsr, insn_0arg_fn_t *fn)
  109. {
  110. register long ret asm("r0");
  111. __asm__ __volatile__ (
  112. "msr cpsr_fs, %[cpsr] \n\t"
  113. "mov lr, pc \n\t"
  114. "mov pc, %[fn] \n\t"
  115. : "=r" (ret)
  116. : [cpsr] "r" (cpsr), [fn] "r" (fn)
  117. : "lr", "cc"
  118. );
  119. return ret;
  120. }
  121. static inline long long __kprobes
  122. insnslot_llret_0arg_rflags(long cpsr, insn_llret_0arg_fn_t *fn)
  123. {
  124. register long ret0 asm("r0");
  125. register long ret1 asm("r1");
  126. union reg_pair fnr;
  127. __asm__ __volatile__ (
  128. "msr cpsr_fs, %[cpsr] \n\t"
  129. "mov lr, pc \n\t"
  130. "mov pc, %[fn] \n\t"
  131. : "=r" (ret0), "=r" (ret1)
  132. : [cpsr] "r" (cpsr), [fn] "r" (fn)
  133. : "lr", "cc"
  134. );
  135. fnr.r0 = ret0;
  136. fnr.r1 = ret1;
  137. return fnr.dr;
  138. }
  139. static inline long __kprobes
  140. insnslot_1arg_rflags(long r0, long cpsr, insn_1arg_fn_t *fn)
  141. {
  142. register long rr0 asm("r0") = r0;
  143. register long ret asm("r0");
  144. __asm__ __volatile__ (
  145. "msr cpsr_fs, %[cpsr] \n\t"
  146. "mov lr, pc \n\t"
  147. "mov pc, %[fn] \n\t"
  148. : "=r" (ret)
  149. : "0" (rr0), [cpsr] "r" (cpsr), [fn] "r" (fn)
  150. : "lr", "cc"
  151. );
  152. return ret;
  153. }
  154. static inline long __kprobes
  155. insnslot_2arg_rflags(long r0, long r1, long cpsr, insn_2arg_fn_t *fn)
  156. {
  157. register long rr0 asm("r0") = r0;
  158. register long rr1 asm("r1") = r1;
  159. register long ret asm("r0");
  160. __asm__ __volatile__ (
  161. "msr cpsr_fs, %[cpsr] \n\t"
  162. "mov lr, pc \n\t"
  163. "mov pc, %[fn] \n\t"
  164. : "=r" (ret)
  165. : "0" (rr0), "r" (rr1),
  166. [cpsr] "r" (cpsr), [fn] "r" (fn)
  167. : "lr", "cc"
  168. );
  169. return ret;
  170. }
  171. static inline long __kprobes
  172. insnslot_3arg_rflags(long r0, long r1, long r2, long cpsr, insn_3arg_fn_t *fn)
  173. {
  174. register long rr0 asm("r0") = r0;
  175. register long rr1 asm("r1") = r1;
  176. register long rr2 asm("r2") = r2;
  177. register long ret asm("r0");
  178. __asm__ __volatile__ (
  179. "msr cpsr_fs, %[cpsr] \n\t"
  180. "mov lr, pc \n\t"
  181. "mov pc, %[fn] \n\t"
  182. : "=r" (ret)
  183. : "0" (rr0), "r" (rr1), "r" (rr2),
  184. [cpsr] "r" (cpsr), [fn] "r" (fn)
  185. : "lr", "cc"
  186. );
  187. return ret;
  188. }
  189. static inline long long __kprobes
  190. insnslot_llret_3arg_rflags(long r0, long r1, long r2, long cpsr,
  191. insn_llret_3arg_fn_t *fn)
  192. {
  193. register long rr0 asm("r0") = r0;
  194. register long rr1 asm("r1") = r1;
  195. register long rr2 asm("r2") = r2;
  196. register long ret0 asm("r0");
  197. register long ret1 asm("r1");
  198. union reg_pair fnr;
  199. __asm__ __volatile__ (
  200. "msr cpsr_fs, %[cpsr] \n\t"
  201. "mov lr, pc \n\t"
  202. "mov pc, %[fn] \n\t"
  203. : "=r" (ret0), "=r" (ret1)
  204. : "0" (rr0), "r" (rr1), "r" (rr2),
  205. [cpsr] "r" (cpsr), [fn] "r" (fn)
  206. : "lr", "cc"
  207. );
  208. fnr.r0 = ret0;
  209. fnr.r1 = ret1;
  210. return fnr.dr;
  211. }
  212. static inline long __kprobes
  213. insnslot_4arg_rflags(long r0, long r1, long r2, long r3, long cpsr,
  214. insn_4arg_fn_t *fn)
  215. {
  216. register long rr0 asm("r0") = r0;
  217. register long rr1 asm("r1") = r1;
  218. register long rr2 asm("r2") = r2;
  219. register long rr3 asm("r3") = r3;
  220. register long ret asm("r0");
  221. __asm__ __volatile__ (
  222. "msr cpsr_fs, %[cpsr] \n\t"
  223. "mov lr, pc \n\t"
  224. "mov pc, %[fn] \n\t"
  225. : "=r" (ret)
  226. : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
  227. [cpsr] "r" (cpsr), [fn] "r" (fn)
  228. : "lr", "cc"
  229. );
  230. return ret;
  231. }
  232. static inline long __kprobes
  233. insnslot_1arg_rwflags(long r0, long *cpsr, insn_1arg_fn_t *fn)
  234. {
  235. register long rr0 asm("r0") = r0;
  236. register long ret asm("r0");
  237. long oldcpsr = *cpsr;
  238. long newcpsr;
  239. __asm__ __volatile__ (
  240. "msr cpsr_fs, %[oldcpsr] \n\t"
  241. "mov lr, pc \n\t"
  242. "mov pc, %[fn] \n\t"
  243. "mrs %[newcpsr], cpsr \n\t"
  244. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  245. : "0" (rr0), [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  246. : "lr", "cc"
  247. );
  248. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  249. return ret;
  250. }
  251. static inline long __kprobes
  252. insnslot_2arg_rwflags(long r0, long r1, long *cpsr, insn_2arg_fn_t *fn)
  253. {
  254. register long rr0 asm("r0") = r0;
  255. register long rr1 asm("r1") = r1;
  256. register long ret asm("r0");
  257. long oldcpsr = *cpsr;
  258. long newcpsr;
  259. __asm__ __volatile__ (
  260. "msr cpsr_fs, %[oldcpsr] \n\t"
  261. "mov lr, pc \n\t"
  262. "mov pc, %[fn] \n\t"
  263. "mrs %[newcpsr], cpsr \n\t"
  264. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  265. : "0" (rr0), "r" (rr1), [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  266. : "lr", "cc"
  267. );
  268. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  269. return ret;
  270. }
  271. static inline long __kprobes
  272. insnslot_3arg_rwflags(long r0, long r1, long r2, long *cpsr,
  273. insn_3arg_fn_t *fn)
  274. {
  275. register long rr0 asm("r0") = r0;
  276. register long rr1 asm("r1") = r1;
  277. register long rr2 asm("r2") = r2;
  278. register long ret asm("r0");
  279. long oldcpsr = *cpsr;
  280. long newcpsr;
  281. __asm__ __volatile__ (
  282. "msr cpsr_fs, %[oldcpsr] \n\t"
  283. "mov lr, pc \n\t"
  284. "mov pc, %[fn] \n\t"
  285. "mrs %[newcpsr], cpsr \n\t"
  286. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  287. : "0" (rr0), "r" (rr1), "r" (rr2),
  288. [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  289. : "lr", "cc"
  290. );
  291. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  292. return ret;
  293. }
  294. static inline long __kprobes
  295. insnslot_4arg_rwflags(long r0, long r1, long r2, long r3, long *cpsr,
  296. insn_4arg_fn_t *fn)
  297. {
  298. register long rr0 asm("r0") = r0;
  299. register long rr1 asm("r1") = r1;
  300. register long rr2 asm("r2") = r2;
  301. register long rr3 asm("r3") = r3;
  302. register long ret asm("r0");
  303. long oldcpsr = *cpsr;
  304. long newcpsr;
  305. __asm__ __volatile__ (
  306. "msr cpsr_fs, %[oldcpsr] \n\t"
  307. "mov lr, pc \n\t"
  308. "mov pc, %[fn] \n\t"
  309. "mrs %[newcpsr], cpsr \n\t"
  310. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  311. : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
  312. [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  313. : "lr", "cc"
  314. );
  315. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  316. return ret;
  317. }
  318. static inline long long __kprobes
  319. insnslot_llret_4arg_rwflags(long r0, long r1, long r2, long r3, long *cpsr,
  320. insn_llret_4arg_fn_t *fn)
  321. {
  322. register long rr0 asm("r0") = r0;
  323. register long rr1 asm("r1") = r1;
  324. register long rr2 asm("r2") = r2;
  325. register long rr3 asm("r3") = r3;
  326. register long ret0 asm("r0");
  327. register long ret1 asm("r1");
  328. long oldcpsr = *cpsr;
  329. long newcpsr;
  330. union reg_pair fnr;
  331. __asm__ __volatile__ (
  332. "msr cpsr_fs, %[oldcpsr] \n\t"
  333. "mov lr, pc \n\t"
  334. "mov pc, %[fn] \n\t"
  335. "mrs %[newcpsr], cpsr \n\t"
  336. : "=r" (ret0), "=r" (ret1), [newcpsr] "=r" (newcpsr)
  337. : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
  338. [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  339. : "lr", "cc"
  340. );
  341. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  342. fnr.r0 = ret0;
  343. fnr.r1 = ret1;
  344. return fnr.dr;
  345. }
  346. /*
  347. * To avoid the complications of mimicing single-stepping on a
  348. * processor without a Next-PC or a single-step mode, and to
  349. * avoid having to deal with the side-effects of boosting, we
  350. * simulate or emulate (almost) all ARM instructions.
  351. *
  352. * "Simulation" is where the instruction's behavior is duplicated in
  353. * C code. "Emulation" is where the original instruction is rewritten
  354. * and executed, often by altering its registers.
  355. *
  356. * By having all behavior of the kprobe'd instruction completed before
  357. * returning from the kprobe_handler(), all locks (scheduler and
  358. * interrupt) can safely be released. There is no need for secondary
  359. * breakpoints, no race with MP or preemptable kernels, nor having to
  360. * clean up resources counts at a later time impacting overall system
  361. * performance. By rewriting the instruction, only the minimum registers
  362. * need to be loaded and saved back optimizing performance.
  363. *
  364. * Calling the insnslot_*_rwflags version of a function doesn't hurt
  365. * anything even when the CPSR flags aren't updated by the
  366. * instruction. It's just a little slower in return for saving
  367. * a little space by not having a duplicate function that doesn't
  368. * update the flags. (The same optimization can be said for
  369. * instructions that do or don't perform register writeback)
  370. * Also, instructions can either read the flags, only write the
  371. * flags, or read and write the flags. To save combinations
  372. * rather than for sheer performance, flag functions just assume
  373. * read and write of flags.
  374. */
  375. static void __kprobes simulate_bbl(struct kprobe *p, struct pt_regs *regs)
  376. {
  377. kprobe_opcode_t insn = p->opcode;
  378. long iaddr = (long)p->addr;
  379. int disp = branch_displacement(insn);
  380. if (insn & (1 << 24))
  381. regs->ARM_lr = iaddr + 4;
  382. regs->ARM_pc = iaddr + 8 + disp;
  383. }
  384. static void __kprobes simulate_blx1(struct kprobe *p, struct pt_regs *regs)
  385. {
  386. kprobe_opcode_t insn = p->opcode;
  387. long iaddr = (long)p->addr;
  388. int disp = branch_displacement(insn);
  389. regs->ARM_lr = iaddr + 4;
  390. regs->ARM_pc = iaddr + 8 + disp + ((insn >> 23) & 0x2);
  391. regs->ARM_cpsr |= PSR_T_BIT;
  392. }
  393. static void __kprobes simulate_blx2bx(struct kprobe *p, struct pt_regs *regs)
  394. {
  395. kprobe_opcode_t insn = p->opcode;
  396. int rm = insn & 0xf;
  397. long rmv = regs->uregs[rm];
  398. if (insn & (1 << 5))
  399. regs->ARM_lr = (long)p->addr + 4;
  400. regs->ARM_pc = rmv & ~0x1;
  401. regs->ARM_cpsr &= ~PSR_T_BIT;
  402. if (rmv & 0x1)
  403. regs->ARM_cpsr |= PSR_T_BIT;
  404. }
  405. static void __kprobes simulate_mrs(struct kprobe *p, struct pt_regs *regs)
  406. {
  407. kprobe_opcode_t insn = p->opcode;
  408. int rd = (insn >> 12) & 0xf;
  409. unsigned long mask = 0xf8ff03df; /* Mask out execution state */
  410. regs->uregs[rd] = regs->ARM_cpsr & mask;
  411. }
  412. static void __kprobes simulate_ldm1stm1(struct kprobe *p, struct pt_regs *regs)
  413. {
  414. kprobe_opcode_t insn = p->opcode;
  415. int rn = (insn >> 16) & 0xf;
  416. int lbit = insn & (1 << 20);
  417. int wbit = insn & (1 << 21);
  418. int ubit = insn & (1 << 23);
  419. int pbit = insn & (1 << 24);
  420. long *addr = (long *)regs->uregs[rn];
  421. int reg_bit_vector;
  422. int reg_count;
  423. reg_count = 0;
  424. reg_bit_vector = insn & 0xffff;
  425. while (reg_bit_vector) {
  426. reg_bit_vector &= (reg_bit_vector - 1);
  427. ++reg_count;
  428. }
  429. if (!ubit)
  430. addr -= reg_count;
  431. addr += (!pbit == !ubit);
  432. reg_bit_vector = insn & 0xffff;
  433. while (reg_bit_vector) {
  434. int reg = __ffs(reg_bit_vector);
  435. reg_bit_vector &= (reg_bit_vector - 1);
  436. if (lbit)
  437. regs->uregs[reg] = *addr++;
  438. else
  439. *addr++ = regs->uregs[reg];
  440. }
  441. if (wbit) {
  442. if (!ubit)
  443. addr -= reg_count;
  444. addr -= (!pbit == !ubit);
  445. regs->uregs[rn] = (long)addr;
  446. }
  447. }
  448. static void __kprobes simulate_stm1_pc(struct kprobe *p, struct pt_regs *regs)
  449. {
  450. regs->ARM_pc = (long)p->addr + str_pc_offset;
  451. simulate_ldm1stm1(p, regs);
  452. regs->ARM_pc = (long)p->addr + 4;
  453. }
  454. static void __kprobes simulate_mov_ipsp(struct kprobe *p, struct pt_regs *regs)
  455. {
  456. regs->uregs[12] = regs->uregs[13];
  457. }
  458. static void __kprobes emulate_ldrd(struct kprobe *p, struct pt_regs *regs)
  459. {
  460. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  461. kprobe_opcode_t insn = p->opcode;
  462. long ppc = (long)p->addr + 8;
  463. int rd = (insn >> 12) & 0xf;
  464. int rn = (insn >> 16) & 0xf;
  465. int rm = insn & 0xf; /* rm may be invalid, don't care. */
  466. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  467. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  468. /* Not following the C calling convention here, so need asm(). */
  469. __asm__ __volatile__ (
  470. "ldr r0, %[rn] \n\t"
  471. "ldr r1, %[rm] \n\t"
  472. "msr cpsr_fs, %[cpsr]\n\t"
  473. "mov lr, pc \n\t"
  474. "mov pc, %[i_fn] \n\t"
  475. "str r0, %[rn] \n\t" /* in case of writeback */
  476. "str r2, %[rd0] \n\t"
  477. "str r3, %[rd1] \n\t"
  478. : [rn] "+m" (rnv),
  479. [rd0] "=m" (regs->uregs[rd]),
  480. [rd1] "=m" (regs->uregs[rd+1])
  481. : [rm] "m" (rmv),
  482. [cpsr] "r" (regs->ARM_cpsr),
  483. [i_fn] "r" (i_fn)
  484. : "r0", "r1", "r2", "r3", "lr", "cc"
  485. );
  486. if (is_writeback(insn))
  487. regs->uregs[rn] = rnv;
  488. }
  489. static void __kprobes emulate_strd(struct kprobe *p, struct pt_regs *regs)
  490. {
  491. insn_4arg_fn_t *i_fn = (insn_4arg_fn_t *)&p->ainsn.insn[0];
  492. kprobe_opcode_t insn = p->opcode;
  493. long ppc = (long)p->addr + 8;
  494. int rd = (insn >> 12) & 0xf;
  495. int rn = (insn >> 16) & 0xf;
  496. int rm = insn & 0xf;
  497. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  498. /* rm/rmv may be invalid, don't care. */
  499. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  500. long rnv_wb;
  501. rnv_wb = insnslot_4arg_rflags(rnv, rmv, regs->uregs[rd],
  502. regs->uregs[rd+1],
  503. regs->ARM_cpsr, i_fn);
  504. if (is_writeback(insn))
  505. regs->uregs[rn] = rnv_wb;
  506. }
  507. static void __kprobes emulate_ldr(struct kprobe *p, struct pt_regs *regs)
  508. {
  509. insn_llret_3arg_fn_t *i_fn = (insn_llret_3arg_fn_t *)&p->ainsn.insn[0];
  510. kprobe_opcode_t insn = p->opcode;
  511. long ppc = (long)p->addr + 8;
  512. union reg_pair fnr;
  513. int rd = (insn >> 12) & 0xf;
  514. int rn = (insn >> 16) & 0xf;
  515. int rm = insn & 0xf;
  516. long rdv;
  517. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  518. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  519. long cpsr = regs->ARM_cpsr;
  520. fnr.dr = insnslot_llret_3arg_rflags(rnv, 0, rmv, cpsr, i_fn);
  521. if (rn != 15)
  522. regs->uregs[rn] = fnr.r0; /* Save Rn in case of writeback. */
  523. rdv = fnr.r1;
  524. if (rd == 15) {
  525. #if __LINUX_ARM_ARCH__ >= 5
  526. cpsr &= ~PSR_T_BIT;
  527. if (rdv & 0x1)
  528. cpsr |= PSR_T_BIT;
  529. regs->ARM_cpsr = cpsr;
  530. rdv &= ~0x1;
  531. #else
  532. rdv &= ~0x2;
  533. #endif
  534. }
  535. regs->uregs[rd] = rdv;
  536. }
  537. static void __kprobes emulate_str(struct kprobe *p, struct pt_regs *regs)
  538. {
  539. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  540. kprobe_opcode_t insn = p->opcode;
  541. long iaddr = (long)p->addr;
  542. int rd = (insn >> 12) & 0xf;
  543. int rn = (insn >> 16) & 0xf;
  544. int rm = insn & 0xf;
  545. long rdv = (rd == 15) ? iaddr + str_pc_offset : regs->uregs[rd];
  546. long rnv = (rn == 15) ? iaddr + 8 : regs->uregs[rn];
  547. long rmv = regs->uregs[rm]; /* rm/rmv may be invalid, don't care. */
  548. long rnv_wb;
  549. rnv_wb = insnslot_3arg_rflags(rnv, rdv, rmv, regs->ARM_cpsr, i_fn);
  550. if (rn != 15)
  551. regs->uregs[rn] = rnv_wb; /* Save Rn in case of writeback. */
  552. }
  553. static void __kprobes emulate_sat(struct kprobe *p, struct pt_regs *regs)
  554. {
  555. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  556. kprobe_opcode_t insn = p->opcode;
  557. int rd = (insn >> 12) & 0xf;
  558. int rm = insn & 0xf;
  559. long rmv = regs->uregs[rm];
  560. /* Writes Q flag */
  561. regs->uregs[rd] = insnslot_1arg_rwflags(rmv, &regs->ARM_cpsr, i_fn);
  562. }
  563. static void __kprobes emulate_sel(struct kprobe *p, struct pt_regs *regs)
  564. {
  565. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  566. kprobe_opcode_t insn = p->opcode;
  567. int rd = (insn >> 12) & 0xf;
  568. int rn = (insn >> 16) & 0xf;
  569. int rm = insn & 0xf;
  570. long rnv = regs->uregs[rn];
  571. long rmv = regs->uregs[rm];
  572. /* Reads GE bits */
  573. regs->uregs[rd] = insnslot_2arg_rflags(rnv, rmv, regs->ARM_cpsr, i_fn);
  574. }
  575. static void __kprobes emulate_none(struct kprobe *p, struct pt_regs *regs)
  576. {
  577. insn_0arg_fn_t *i_fn = (insn_0arg_fn_t *)&p->ainsn.insn[0];
  578. insnslot_0arg_rflags(regs->ARM_cpsr, i_fn);
  579. }
  580. static void __kprobes emulate_nop(struct kprobe *p, struct pt_regs *regs)
  581. {
  582. }
  583. static void __kprobes
  584. emulate_rd12_modify(struct kprobe *p, struct pt_regs *regs)
  585. {
  586. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  587. kprobe_opcode_t insn = p->opcode;
  588. int rd = (insn >> 12) & 0xf;
  589. long rdv = regs->uregs[rd];
  590. regs->uregs[rd] = insnslot_1arg_rflags(rdv, regs->ARM_cpsr, i_fn);
  591. }
  592. static void __kprobes
  593. emulate_rd12rn0_modify(struct kprobe *p, struct pt_regs *regs)
  594. {
  595. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  596. kprobe_opcode_t insn = p->opcode;
  597. int rd = (insn >> 12) & 0xf;
  598. int rn = insn & 0xf;
  599. long rdv = regs->uregs[rd];
  600. long rnv = regs->uregs[rn];
  601. regs->uregs[rd] = insnslot_2arg_rflags(rdv, rnv, regs->ARM_cpsr, i_fn);
  602. }
  603. static void __kprobes emulate_rd12rm0(struct kprobe *p, struct pt_regs *regs)
  604. {
  605. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  606. kprobe_opcode_t insn = p->opcode;
  607. int rd = (insn >> 12) & 0xf;
  608. int rm = insn & 0xf;
  609. long rmv = regs->uregs[rm];
  610. regs->uregs[rd] = insnslot_1arg_rflags(rmv, regs->ARM_cpsr, i_fn);
  611. }
  612. static void __kprobes
  613. emulate_rd12rn16rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  614. {
  615. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  616. kprobe_opcode_t insn = p->opcode;
  617. int rd = (insn >> 12) & 0xf;
  618. int rn = (insn >> 16) & 0xf;
  619. int rm = insn & 0xf;
  620. long rnv = regs->uregs[rn];
  621. long rmv = regs->uregs[rm];
  622. regs->uregs[rd] =
  623. insnslot_2arg_rwflags(rnv, rmv, &regs->ARM_cpsr, i_fn);
  624. }
  625. static void __kprobes
  626. emulate_rd16rn12rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  627. {
  628. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  629. kprobe_opcode_t insn = p->opcode;
  630. int rd = (insn >> 16) & 0xf;
  631. int rn = (insn >> 12) & 0xf;
  632. int rs = (insn >> 8) & 0xf;
  633. int rm = insn & 0xf;
  634. long rnv = regs->uregs[rn];
  635. long rsv = regs->uregs[rs];
  636. long rmv = regs->uregs[rm];
  637. regs->uregs[rd] =
  638. insnslot_3arg_rwflags(rnv, rsv, rmv, &regs->ARM_cpsr, i_fn);
  639. }
  640. static void __kprobes
  641. emulate_rd16rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  642. {
  643. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  644. kprobe_opcode_t insn = p->opcode;
  645. int rd = (insn >> 16) & 0xf;
  646. int rs = (insn >> 8) & 0xf;
  647. int rm = insn & 0xf;
  648. long rsv = regs->uregs[rs];
  649. long rmv = regs->uregs[rm];
  650. regs->uregs[rd] =
  651. insnslot_2arg_rwflags(rsv, rmv, &regs->ARM_cpsr, i_fn);
  652. }
  653. static void __kprobes
  654. emulate_rdhi16rdlo12rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  655. {
  656. insn_llret_4arg_fn_t *i_fn = (insn_llret_4arg_fn_t *)&p->ainsn.insn[0];
  657. kprobe_opcode_t insn = p->opcode;
  658. union reg_pair fnr;
  659. int rdhi = (insn >> 16) & 0xf;
  660. int rdlo = (insn >> 12) & 0xf;
  661. int rs = (insn >> 8) & 0xf;
  662. int rm = insn & 0xf;
  663. long rsv = regs->uregs[rs];
  664. long rmv = regs->uregs[rm];
  665. fnr.dr = insnslot_llret_4arg_rwflags(regs->uregs[rdhi],
  666. regs->uregs[rdlo], rsv, rmv,
  667. &regs->ARM_cpsr, i_fn);
  668. regs->uregs[rdhi] = fnr.r0;
  669. regs->uregs[rdlo] = fnr.r1;
  670. }
  671. static void __kprobes
  672. emulate_alu_imm_rflags(struct kprobe *p, struct pt_regs *regs)
  673. {
  674. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  675. kprobe_opcode_t insn = p->opcode;
  676. int rd = (insn >> 12) & 0xf;
  677. int rn = (insn >> 16) & 0xf;
  678. long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
  679. regs->uregs[rd] = insnslot_1arg_rflags(rnv, regs->ARM_cpsr, i_fn);
  680. }
  681. static void __kprobes
  682. emulate_alu_imm_rwflags(struct kprobe *p, struct pt_regs *regs)
  683. {
  684. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  685. kprobe_opcode_t insn = p->opcode;
  686. int rd = (insn >> 12) & 0xf;
  687. int rn = (insn >> 16) & 0xf;
  688. long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
  689. regs->uregs[rd] = insnslot_1arg_rwflags(rnv, &regs->ARM_cpsr, i_fn);
  690. }
  691. static void __kprobes
  692. emulate_alu_tests_imm(struct kprobe *p, struct pt_regs *regs)
  693. {
  694. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  695. kprobe_opcode_t insn = p->opcode;
  696. int rn = (insn >> 16) & 0xf;
  697. long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
  698. insnslot_1arg_rwflags(rnv, &regs->ARM_cpsr, i_fn);
  699. }
  700. static void __kprobes
  701. emulate_alu_rflags(struct kprobe *p, struct pt_regs *regs)
  702. {
  703. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  704. kprobe_opcode_t insn = p->opcode;
  705. long ppc = (long)p->addr + 8;
  706. int rd = (insn >> 12) & 0xf;
  707. int rn = (insn >> 16) & 0xf; /* rn/rnv/rs/rsv may be */
  708. int rs = (insn >> 8) & 0xf; /* invalid, don't care. */
  709. int rm = insn & 0xf;
  710. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  711. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  712. long rsv = regs->uregs[rs];
  713. regs->uregs[rd] =
  714. insnslot_3arg_rflags(rnv, rmv, rsv, regs->ARM_cpsr, i_fn);
  715. }
  716. static void __kprobes
  717. emulate_alu_rwflags(struct kprobe *p, struct pt_regs *regs)
  718. {
  719. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  720. kprobe_opcode_t insn = p->opcode;
  721. long ppc = (long)p->addr + 8;
  722. int rd = (insn >> 12) & 0xf;
  723. int rn = (insn >> 16) & 0xf; /* rn/rnv/rs/rsv may be */
  724. int rs = (insn >> 8) & 0xf; /* invalid, don't care. */
  725. int rm = insn & 0xf;
  726. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  727. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  728. long rsv = regs->uregs[rs];
  729. regs->uregs[rd] =
  730. insnslot_3arg_rwflags(rnv, rmv, rsv, &regs->ARM_cpsr, i_fn);
  731. }
  732. static void __kprobes
  733. emulate_alu_tests(struct kprobe *p, struct pt_regs *regs)
  734. {
  735. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  736. kprobe_opcode_t insn = p->opcode;
  737. long ppc = (long)p->addr + 8;
  738. int rn = (insn >> 16) & 0xf;
  739. int rs = (insn >> 8) & 0xf; /* rs/rsv may be invalid, don't care. */
  740. int rm = insn & 0xf;
  741. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  742. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  743. long rsv = regs->uregs[rs];
  744. insnslot_3arg_rwflags(rnv, rmv, rsv, &regs->ARM_cpsr, i_fn);
  745. }
  746. static enum kprobe_insn __kprobes
  747. prep_emulate_ldr_str(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  748. {
  749. int not_imm = (insn & (1 << 26)) ? (insn & (1 << 25))
  750. : (~insn & (1 << 22));
  751. if (is_writeback(insn) && is_r15(insn, 16))
  752. return INSN_REJECTED; /* Writeback to PC */
  753. insn &= 0xfff00fff;
  754. insn |= 0x00001000; /* Rn = r0, Rd = r1 */
  755. if (not_imm) {
  756. insn &= ~0xf;
  757. insn |= 2; /* Rm = r2 */
  758. }
  759. asi->insn[0] = insn;
  760. asi->insn_handler = (insn & (1 << 20)) ? emulate_ldr : emulate_str;
  761. return INSN_GOOD;
  762. }
  763. static enum kprobe_insn __kprobes
  764. prep_emulate_rd12_modify(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  765. {
  766. if (is_r15(insn, 12))
  767. return INSN_REJECTED; /* Rd is PC */
  768. insn &= 0xffff0fff; /* Rd = r0 */
  769. asi->insn[0] = insn;
  770. asi->insn_handler = emulate_rd12_modify;
  771. return INSN_GOOD;
  772. }
  773. static enum kprobe_insn __kprobes
  774. prep_emulate_rd12rn0_modify(kprobe_opcode_t insn,
  775. struct arch_specific_insn *asi)
  776. {
  777. if (is_r15(insn, 12))
  778. return INSN_REJECTED; /* Rd is PC */
  779. insn &= 0xffff0ff0; /* Rd = r0 */
  780. insn |= 0x00000001; /* Rn = r1 */
  781. asi->insn[0] = insn;
  782. asi->insn_handler = emulate_rd12rn0_modify;
  783. return INSN_GOOD;
  784. }
  785. static enum kprobe_insn __kprobes
  786. prep_emulate_rd12rm0(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  787. {
  788. if (is_r15(insn, 12))
  789. return INSN_REJECTED; /* Rd is PC */
  790. insn &= 0xffff0ff0; /* Rd = r0, Rm = r0 */
  791. asi->insn[0] = insn;
  792. asi->insn_handler = emulate_rd12rm0;
  793. return INSN_GOOD;
  794. }
  795. static enum kprobe_insn __kprobes
  796. prep_emulate_rd12rn16rm0_wflags(kprobe_opcode_t insn,
  797. struct arch_specific_insn *asi)
  798. {
  799. if (is_r15(insn, 12))
  800. return INSN_REJECTED; /* Rd is PC */
  801. insn &= 0xfff00ff0; /* Rd = r0, Rn = r0 */
  802. insn |= 0x00000001; /* Rm = r1 */
  803. asi->insn[0] = insn;
  804. asi->insn_handler = emulate_rd12rn16rm0_rwflags;
  805. return INSN_GOOD;
  806. }
  807. static enum kprobe_insn __kprobes
  808. prep_emulate_rd16rs8rm0_wflags(kprobe_opcode_t insn,
  809. struct arch_specific_insn *asi)
  810. {
  811. if (is_r15(insn, 16))
  812. return INSN_REJECTED; /* Rd is PC */
  813. insn &= 0xfff0f0f0; /* Rd = r0, Rs = r0 */
  814. insn |= 0x00000001; /* Rm = r1 */
  815. asi->insn[0] = insn;
  816. asi->insn_handler = emulate_rd16rs8rm0_rwflags;
  817. return INSN_GOOD;
  818. }
  819. static enum kprobe_insn __kprobes
  820. prep_emulate_rd16rn12rs8rm0_wflags(kprobe_opcode_t insn,
  821. struct arch_specific_insn *asi)
  822. {
  823. if (is_r15(insn, 16))
  824. return INSN_REJECTED; /* Rd is PC */
  825. insn &= 0xfff000f0; /* Rd = r0, Rn = r0 */
  826. insn |= 0x00000102; /* Rs = r1, Rm = r2 */
  827. asi->insn[0] = insn;
  828. asi->insn_handler = emulate_rd16rn12rs8rm0_rwflags;
  829. return INSN_GOOD;
  830. }
  831. static enum kprobe_insn __kprobes
  832. prep_emulate_rdhi16rdlo12rs8rm0_wflags(kprobe_opcode_t insn,
  833. struct arch_specific_insn *asi)
  834. {
  835. if (is_r15(insn, 16) || is_r15(insn, 12))
  836. return INSN_REJECTED; /* RdHi or RdLo is PC */
  837. insn &= 0xfff000f0; /* RdHi = r0, RdLo = r1 */
  838. insn |= 0x00001203; /* Rs = r2, Rm = r3 */
  839. asi->insn[0] = insn;
  840. asi->insn_handler = emulate_rdhi16rdlo12rs8rm0_rwflags;
  841. return INSN_GOOD;
  842. }
  843. /*
  844. * For the instruction masking and comparisons in all the "space_*"
  845. * functions below, Do _not_ rearrange the order of tests unless
  846. * you're very, very sure of what you are doing. For the sake of
  847. * efficiency, the masks for some tests sometimes assume other test
  848. * have been done prior to them so the number of patterns to test
  849. * for an instruction set can be as broad as possible to reduce the
  850. * number of tests needed.
  851. */
  852. static enum kprobe_insn __kprobes
  853. space_1111(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  854. {
  855. /* memory hint : 1111 0100 x001 xxxx xxxx xxxx xxxx xxxx : */
  856. /* PLDI : 1111 0100 x101 xxxx xxxx xxxx xxxx xxxx : */
  857. /* PLDW : 1111 0101 x001 xxxx xxxx xxxx xxxx xxxx : */
  858. /* PLD : 1111 0101 x101 xxxx xxxx xxxx xxxx xxxx : */
  859. if ((insn & 0xfe300000) == 0xf4100000) {
  860. asi->insn_handler = emulate_nop;
  861. return INSN_GOOD_NO_SLOT;
  862. }
  863. /* BLX(1) : 1111 101x xxxx xxxx xxxx xxxx xxxx xxxx : */
  864. if ((insn & 0xfe000000) == 0xfa000000) {
  865. asi->insn_handler = simulate_blx1;
  866. return INSN_GOOD_NO_SLOT;
  867. }
  868. /* CPS : 1111 0001 0000 xxx0 xxxx xxxx xx0x xxxx */
  869. /* SETEND: 1111 0001 0000 0001 xxxx xxxx 0000 xxxx */
  870. /* SRS : 1111 100x x1x0 xxxx xxxx xxxx xxxx xxxx */
  871. /* RFE : 1111 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
  872. /* Coprocessor instructions... */
  873. /* MCRR2 : 1111 1100 0100 xxxx xxxx xxxx xxxx xxxx : (Rd != Rn) */
  874. /* MRRC2 : 1111 1100 0101 xxxx xxxx xxxx xxxx xxxx : (Rd != Rn) */
  875. /* LDC2 : 1111 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
  876. /* STC2 : 1111 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
  877. /* CDP2 : 1111 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
  878. /* MCR2 : 1111 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
  879. /* MRC2 : 1111 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
  880. return INSN_REJECTED;
  881. }
  882. static enum kprobe_insn __kprobes
  883. space_cccc_000x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  884. {
  885. /* cccc 0001 0xx0 xxxx xxxx xxxx xxxx xxx0 xxxx */
  886. if ((insn & 0x0f900010) == 0x01000000) {
  887. /* MRS cpsr : cccc 0001 0000 xxxx xxxx xxxx 0000 xxxx */
  888. if ((insn & 0x0ff000f0) == 0x01000000) {
  889. if (is_r15(insn, 12))
  890. return INSN_REJECTED; /* Rd is PC */
  891. asi->insn_handler = simulate_mrs;
  892. return INSN_GOOD_NO_SLOT;
  893. }
  894. /* SMLALxy : cccc 0001 0100 xxxx xxxx xxxx 1xx0 xxxx */
  895. if ((insn & 0x0ff00090) == 0x01400080)
  896. return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn,
  897. asi);
  898. /* SMULWy : cccc 0001 0010 xxxx xxxx xxxx 1x10 xxxx */
  899. /* SMULxy : cccc 0001 0110 xxxx xxxx xxxx 1xx0 xxxx */
  900. if ((insn & 0x0ff000b0) == 0x012000a0 ||
  901. (insn & 0x0ff00090) == 0x01600080)
  902. return prep_emulate_rd16rs8rm0_wflags(insn, asi);
  903. /* SMLAxy : cccc 0001 0000 xxxx xxxx xxxx 1xx0 xxxx : Q */
  904. /* SMLAWy : cccc 0001 0010 xxxx xxxx xxxx 1x00 xxxx : Q */
  905. if ((insn & 0x0ff00090) == 0x01000080 ||
  906. (insn & 0x0ff000b0) == 0x01200080)
  907. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  908. /* BXJ : cccc 0001 0010 xxxx xxxx xxxx 0010 xxxx */
  909. /* MSR : cccc 0001 0x10 xxxx xxxx xxxx 0000 xxxx */
  910. /* MRS spsr : cccc 0001 0100 xxxx xxxx xxxx 0000 xxxx */
  911. /* Other instruction encodings aren't yet defined */
  912. return INSN_REJECTED;
  913. }
  914. /* cccc 0001 0xx0 xxxx xxxx xxxx xxxx 0xx1 xxxx */
  915. else if ((insn & 0x0f900090) == 0x01000010) {
  916. /* BLX(2) : cccc 0001 0010 xxxx xxxx xxxx 0011 xxxx */
  917. /* BX : cccc 0001 0010 xxxx xxxx xxxx 0001 xxxx */
  918. if ((insn & 0x0ff000d0) == 0x01200010) {
  919. if ((insn & 0x0ff000ff) == 0x0120003f)
  920. return INSN_REJECTED; /* BLX pc */
  921. asi->insn_handler = simulate_blx2bx;
  922. return INSN_GOOD_NO_SLOT;
  923. }
  924. /* CLZ : cccc 0001 0110 xxxx xxxx xxxx 0001 xxxx */
  925. if ((insn & 0x0ff000f0) == 0x01600010)
  926. return prep_emulate_rd12rm0(insn, asi);
  927. /* QADD : cccc 0001 0000 xxxx xxxx xxxx 0101 xxxx :Q */
  928. /* QSUB : cccc 0001 0010 xxxx xxxx xxxx 0101 xxxx :Q */
  929. /* QDADD : cccc 0001 0100 xxxx xxxx xxxx 0101 xxxx :Q */
  930. /* QDSUB : cccc 0001 0110 xxxx xxxx xxxx 0101 xxxx :Q */
  931. if ((insn & 0x0f9000f0) == 0x01000050)
  932. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  933. /* BKPT : 1110 0001 0010 xxxx xxxx xxxx 0111 xxxx */
  934. /* SMC : cccc 0001 0110 xxxx xxxx xxxx 0111 xxxx */
  935. /* Other instruction encodings aren't yet defined */
  936. return INSN_REJECTED;
  937. }
  938. /* cccc 0000 xxxx xxxx xxxx xxxx xxxx 1001 xxxx */
  939. else if ((insn & 0x0f0000f0) == 0x00000090) {
  940. /* MUL : cccc 0000 0000 xxxx xxxx xxxx 1001 xxxx : */
  941. /* MULS : cccc 0000 0001 xxxx xxxx xxxx 1001 xxxx :cc */
  942. /* MLA : cccc 0000 0010 xxxx xxxx xxxx 1001 xxxx : */
  943. /* MLAS : cccc 0000 0011 xxxx xxxx xxxx 1001 xxxx :cc */
  944. /* UMAAL : cccc 0000 0100 xxxx xxxx xxxx 1001 xxxx : */
  945. /* undef : cccc 0000 0101 xxxx xxxx xxxx 1001 xxxx : */
  946. /* MLS : cccc 0000 0110 xxxx xxxx xxxx 1001 xxxx : */
  947. /* undef : cccc 0000 0111 xxxx xxxx xxxx 1001 xxxx : */
  948. /* UMULL : cccc 0000 1000 xxxx xxxx xxxx 1001 xxxx : */
  949. /* UMULLS : cccc 0000 1001 xxxx xxxx xxxx 1001 xxxx :cc */
  950. /* UMLAL : cccc 0000 1010 xxxx xxxx xxxx 1001 xxxx : */
  951. /* UMLALS : cccc 0000 1011 xxxx xxxx xxxx 1001 xxxx :cc */
  952. /* SMULL : cccc 0000 1100 xxxx xxxx xxxx 1001 xxxx : */
  953. /* SMULLS : cccc 0000 1101 xxxx xxxx xxxx 1001 xxxx :cc */
  954. /* SMLAL : cccc 0000 1110 xxxx xxxx xxxx 1001 xxxx : */
  955. /* SMLALS : cccc 0000 1111 xxxx xxxx xxxx 1001 xxxx :cc */
  956. if ((insn & 0x00d00000) == 0x00500000)
  957. return INSN_REJECTED;
  958. else if ((insn & 0x00e00000) == 0x00000000)
  959. return prep_emulate_rd16rs8rm0_wflags(insn, asi);
  960. else if ((insn & 0x00a00000) == 0x00200000)
  961. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  962. else
  963. return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn,
  964. asi);
  965. }
  966. /* cccc 000x xxxx xxxx xxxx xxxx xxxx 1xx1 xxxx */
  967. else if ((insn & 0x0e000090) == 0x00000090) {
  968. /* SWP : cccc 0001 0000 xxxx xxxx xxxx 1001 xxxx */
  969. /* SWPB : cccc 0001 0100 xxxx xxxx xxxx 1001 xxxx */
  970. /* ??? : cccc 0001 0x01 xxxx xxxx xxxx 1001 xxxx */
  971. /* ??? : cccc 0001 0x10 xxxx xxxx xxxx 1001 xxxx */
  972. /* ??? : cccc 0001 0x11 xxxx xxxx xxxx 1001 xxxx */
  973. /* STREX : cccc 0001 1000 xxxx xxxx xxxx 1001 xxxx */
  974. /* LDREX : cccc 0001 1001 xxxx xxxx xxxx 1001 xxxx */
  975. /* STREXD: cccc 0001 1010 xxxx xxxx xxxx 1001 xxxx */
  976. /* LDREXD: cccc 0001 1011 xxxx xxxx xxxx 1001 xxxx */
  977. /* STREXB: cccc 0001 1100 xxxx xxxx xxxx 1001 xxxx */
  978. /* LDREXB: cccc 0001 1101 xxxx xxxx xxxx 1001 xxxx */
  979. /* STREXH: cccc 0001 1110 xxxx xxxx xxxx 1001 xxxx */
  980. /* LDREXH: cccc 0001 1111 xxxx xxxx xxxx 1001 xxxx */
  981. /* LDRD : cccc 000x xxx0 xxxx xxxx xxxx 1101 xxxx */
  982. /* STRD : cccc 000x xxx0 xxxx xxxx xxxx 1111 xxxx */
  983. /* LDRH : cccc 000x xxx1 xxxx xxxx xxxx 1011 xxxx */
  984. /* STRH : cccc 000x xxx0 xxxx xxxx xxxx 1011 xxxx */
  985. /* LDRSB : cccc 000x xxx1 xxxx xxxx xxxx 1101 xxxx */
  986. /* LDRSH : cccc 000x xxx1 xxxx xxxx xxxx 1111 xxxx */
  987. if ((insn & 0x0f0000f0) == 0x01000090) {
  988. if ((insn & 0x0fb000f0) == 0x01000090) {
  989. /* SWP/SWPB */
  990. return prep_emulate_rd12rn16rm0_wflags(insn,
  991. asi);
  992. } else {
  993. /* STREX/LDREX variants and unallocaed space */
  994. return INSN_REJECTED;
  995. }
  996. } else if ((insn & 0x0e1000d0) == 0x00000d0) {
  997. /* STRD/LDRD */
  998. if ((insn & 0x0000e000) == 0x0000e000)
  999. return INSN_REJECTED; /* Rd is LR or PC */
  1000. if (is_writeback(insn) && is_r15(insn, 16))
  1001. return INSN_REJECTED; /* Writeback to PC */
  1002. insn &= 0xfff00fff;
  1003. insn |= 0x00002000; /* Rn = r0, Rd = r2 */
  1004. if (!(insn & (1 << 22))) {
  1005. /* Register index */
  1006. insn &= ~0xf;
  1007. insn |= 1; /* Rm = r1 */
  1008. }
  1009. asi->insn[0] = insn;
  1010. asi->insn_handler =
  1011. (insn & (1 << 5)) ? emulate_strd : emulate_ldrd;
  1012. return INSN_GOOD;
  1013. }
  1014. /* LDRH/STRH/LDRSB/LDRSH */
  1015. if (is_r15(insn, 12))
  1016. return INSN_REJECTED; /* Rd is PC */
  1017. return prep_emulate_ldr_str(insn, asi);
  1018. }
  1019. /* cccc 000x xxxx xxxx xxxx xxxx xxxx xxxx xxxx */
  1020. /*
  1021. * ALU op with S bit and Rd == 15 :
  1022. * cccc 000x xxx1 xxxx 1111 xxxx xxxx xxxx
  1023. */
  1024. if ((insn & 0x0e10f000) == 0x0010f000)
  1025. return INSN_REJECTED;
  1026. /*
  1027. * "mov ip, sp" is the most common kprobe'd instruction by far.
  1028. * Check and optimize for it explicitly.
  1029. */
  1030. if (insn == 0xe1a0c00d) {
  1031. asi->insn_handler = simulate_mov_ipsp;
  1032. return INSN_GOOD_NO_SLOT;
  1033. }
  1034. /*
  1035. * Data processing: Immediate-shift / Register-shift
  1036. * ALU op : cccc 000x xxxx xxxx xxxx xxxx xxxx xxxx
  1037. * CPY : cccc 0001 1010 xxxx xxxx 0000 0000 xxxx
  1038. * MOV : cccc 0001 101x xxxx xxxx xxxx xxxx xxxx
  1039. * *S (bit 20) updates condition codes
  1040. * ADC/SBC/RSC reads the C flag
  1041. */
  1042. insn &= 0xfff00ff0; /* Rn = r0, Rd = r0 */
  1043. insn |= 0x00000001; /* Rm = r1 */
  1044. if (insn & 0x010) {
  1045. insn &= 0xfffff0ff; /* register shift */
  1046. insn |= 0x00000200; /* Rs = r2 */
  1047. }
  1048. asi->insn[0] = insn;
  1049. if ((insn & 0x0f900000) == 0x01100000) {
  1050. /*
  1051. * TST : cccc 0001 0001 xxxx xxxx xxxx xxxx xxxx
  1052. * TEQ : cccc 0001 0011 xxxx xxxx xxxx xxxx xxxx
  1053. * CMP : cccc 0001 0101 xxxx xxxx xxxx xxxx xxxx
  1054. * CMN : cccc 0001 0111 xxxx xxxx xxxx xxxx xxxx
  1055. */
  1056. asi->insn_handler = emulate_alu_tests;
  1057. } else {
  1058. /* ALU ops which write to Rd */
  1059. asi->insn_handler = (insn & (1 << 20)) ? /* S-bit */
  1060. emulate_alu_rwflags : emulate_alu_rflags;
  1061. }
  1062. return INSN_GOOD;
  1063. }
  1064. static enum kprobe_insn __kprobes
  1065. space_cccc_001x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1066. {
  1067. /* MOVW : cccc 0011 0000 xxxx xxxx xxxx xxxx xxxx */
  1068. /* MOVT : cccc 0011 0100 xxxx xxxx xxxx xxxx xxxx */
  1069. if ((insn & 0x0fb00000) == 0x03000000)
  1070. return prep_emulate_rd12_modify(insn, asi);
  1071. /* hints : cccc 0011 0010 0000 xxxx xxxx xxxx xxxx */
  1072. if ((insn & 0x0fff0000) == 0x03200000) {
  1073. unsigned op2 = insn & 0x000000ff;
  1074. if (op2 == 0x01 || op2 == 0x04) {
  1075. /* YIELD : cccc 0011 0010 0000 xxxx xxxx 0000 0001 */
  1076. /* SEV : cccc 0011 0010 0000 xxxx xxxx 0000 0100 */
  1077. asi->insn[0] = insn;
  1078. asi->insn_handler = emulate_none;
  1079. return INSN_GOOD;
  1080. } else if (op2 <= 0x03) {
  1081. /* NOP : cccc 0011 0010 0000 xxxx xxxx 0000 0000 */
  1082. /* WFE : cccc 0011 0010 0000 xxxx xxxx 0000 0010 */
  1083. /* WFI : cccc 0011 0010 0000 xxxx xxxx 0000 0011 */
  1084. /*
  1085. * We make WFE and WFI true NOPs to avoid stalls due
  1086. * to missing events whilst processing the probe.
  1087. */
  1088. asi->insn_handler = emulate_nop;
  1089. return INSN_GOOD_NO_SLOT;
  1090. }
  1091. /* For DBG and unallocated hints it's safest to reject them */
  1092. return INSN_REJECTED;
  1093. }
  1094. /*
  1095. * MSR : cccc 0011 0x10 xxxx xxxx xxxx xxxx xxxx
  1096. * ALU op with S bit and Rd == 15 :
  1097. * cccc 001x xxx1 xxxx 1111 xxxx xxxx xxxx
  1098. */
  1099. if ((insn & 0x0fb00000) == 0x03200000 || /* MSR */
  1100. (insn & 0x0e10f000) == 0x0210f000) /* ALU s-bit, R15 */
  1101. return INSN_REJECTED;
  1102. /*
  1103. * Data processing: 32-bit Immediate
  1104. * ALU op : cccc 001x xxxx xxxx xxxx xxxx xxxx xxxx
  1105. * MOV : cccc 0011 101x xxxx xxxx xxxx xxxx xxxx
  1106. * *S (bit 20) updates condition codes
  1107. * ADC/SBC/RSC reads the C flag
  1108. */
  1109. insn &= 0xfff00fff; /* Rn = r0 and Rd = r0 */
  1110. asi->insn[0] = insn;
  1111. if ((insn & 0x0f900000) == 0x03100000) {
  1112. /*
  1113. * TST : cccc 0011 0001 xxxx xxxx xxxx xxxx xxxx
  1114. * TEQ : cccc 0011 0011 xxxx xxxx xxxx xxxx xxxx
  1115. * CMP : cccc 0011 0101 xxxx xxxx xxxx xxxx xxxx
  1116. * CMN : cccc 0011 0111 xxxx xxxx xxxx xxxx xxxx
  1117. */
  1118. asi->insn_handler = emulate_alu_tests_imm;
  1119. } else {
  1120. /* ALU ops which write to Rd */
  1121. asi->insn_handler = (insn & (1 << 20)) ? /* S-bit */
  1122. emulate_alu_imm_rwflags : emulate_alu_imm_rflags;
  1123. }
  1124. return INSN_GOOD;
  1125. }
  1126. static enum kprobe_insn __kprobes
  1127. space_cccc_0110__1(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1128. {
  1129. /* SEL : cccc 0110 1000 xxxx xxxx xxxx 1011 xxxx GE: !!! */
  1130. if ((insn & 0x0ff000f0) == 0x068000b0) {
  1131. if (is_r15(insn, 12))
  1132. return INSN_REJECTED; /* Rd is PC */
  1133. insn &= 0xfff00ff0; /* Rd = r0, Rn = r0 */
  1134. insn |= 0x00000001; /* Rm = r1 */
  1135. asi->insn[0] = insn;
  1136. asi->insn_handler = emulate_sel;
  1137. return INSN_GOOD;
  1138. }
  1139. /* SSAT : cccc 0110 101x xxxx xxxx xxxx xx01 xxxx :Q */
  1140. /* USAT : cccc 0110 111x xxxx xxxx xxxx xx01 xxxx :Q */
  1141. /* SSAT16 : cccc 0110 1010 xxxx xxxx xxxx 0011 xxxx :Q */
  1142. /* USAT16 : cccc 0110 1110 xxxx xxxx xxxx 0011 xxxx :Q */
  1143. if ((insn & 0x0fa00030) == 0x06a00010 ||
  1144. (insn & 0x0fb000f0) == 0x06a00030) {
  1145. if (is_r15(insn, 12))
  1146. return INSN_REJECTED; /* Rd is PC */
  1147. insn &= 0xffff0ff0; /* Rd = r0, Rm = r0 */
  1148. asi->insn[0] = insn;
  1149. asi->insn_handler = emulate_sat;
  1150. return INSN_GOOD;
  1151. }
  1152. /* REV : cccc 0110 1011 xxxx xxxx xxxx 0011 xxxx */
  1153. /* REV16 : cccc 0110 1011 xxxx xxxx xxxx 1011 xxxx */
  1154. /* RBIT : cccc 0110 1111 xxxx xxxx xxxx 0011 xxxx */
  1155. /* REVSH : cccc 0110 1111 xxxx xxxx xxxx 1011 xxxx */
  1156. if ((insn & 0x0ff00070) == 0x06b00030 ||
  1157. (insn & 0x0ff00070) == 0x06f00030)
  1158. return prep_emulate_rd12rm0(insn, asi);
  1159. /* ??? : cccc 0110 0000 xxxx xxxx xxxx xxx1 xxxx : */
  1160. /* SADD16 : cccc 0110 0001 xxxx xxxx xxxx 0001 xxxx :GE */
  1161. /* SADDSUBX : cccc 0110 0001 xxxx xxxx xxxx 0011 xxxx :GE */
  1162. /* SSUBADDX : cccc 0110 0001 xxxx xxxx xxxx 0101 xxxx :GE */
  1163. /* SSUB16 : cccc 0110 0001 xxxx xxxx xxxx 0111 xxxx :GE */
  1164. /* SADD8 : cccc 0110 0001 xxxx xxxx xxxx 1001 xxxx :GE */
  1165. /* ??? : cccc 0110 0001 xxxx xxxx xxxx 1011 xxxx : */
  1166. /* ??? : cccc 0110 0001 xxxx xxxx xxxx 1101 xxxx : */
  1167. /* SSUB8 : cccc 0110 0001 xxxx xxxx xxxx 1111 xxxx :GE */
  1168. /* QADD16 : cccc 0110 0010 xxxx xxxx xxxx 0001 xxxx : */
  1169. /* QADDSUBX : cccc 0110 0010 xxxx xxxx xxxx 0011 xxxx : */
  1170. /* QSUBADDX : cccc 0110 0010 xxxx xxxx xxxx 0101 xxxx : */
  1171. /* QSUB16 : cccc 0110 0010 xxxx xxxx xxxx 0111 xxxx : */
  1172. /* QADD8 : cccc 0110 0010 xxxx xxxx xxxx 1001 xxxx : */
  1173. /* ??? : cccc 0110 0010 xxxx xxxx xxxx 1011 xxxx : */
  1174. /* ??? : cccc 0110 0010 xxxx xxxx xxxx 1101 xxxx : */
  1175. /* QSUB8 : cccc 0110 0010 xxxx xxxx xxxx 1111 xxxx : */
  1176. /* SHADD16 : cccc 0110 0011 xxxx xxxx xxxx 0001 xxxx : */
  1177. /* SHADDSUBX : cccc 0110 0011 xxxx xxxx xxxx 0011 xxxx : */
  1178. /* SHSUBADDX : cccc 0110 0011 xxxx xxxx xxxx 0101 xxxx : */
  1179. /* SHSUB16 : cccc 0110 0011 xxxx xxxx xxxx 0111 xxxx : */
  1180. /* SHADD8 : cccc 0110 0011 xxxx xxxx xxxx 1001 xxxx : */
  1181. /* ??? : cccc 0110 0011 xxxx xxxx xxxx 1011 xxxx : */
  1182. /* ??? : cccc 0110 0011 xxxx xxxx xxxx 1101 xxxx : */
  1183. /* SHSUB8 : cccc 0110 0011 xxxx xxxx xxxx 1111 xxxx : */
  1184. /* ??? : cccc 0110 0100 xxxx xxxx xxxx xxx1 xxxx : */
  1185. /* UADD16 : cccc 0110 0101 xxxx xxxx xxxx 0001 xxxx :GE */
  1186. /* UADDSUBX : cccc 0110 0101 xxxx xxxx xxxx 0011 xxxx :GE */
  1187. /* USUBADDX : cccc 0110 0101 xxxx xxxx xxxx 0101 xxxx :GE */
  1188. /* USUB16 : cccc 0110 0101 xxxx xxxx xxxx 0111 xxxx :GE */
  1189. /* UADD8 : cccc 0110 0101 xxxx xxxx xxxx 1001 xxxx :GE */
  1190. /* ??? : cccc 0110 0101 xxxx xxxx xxxx 1011 xxxx : */
  1191. /* ??? : cccc 0110 0101 xxxx xxxx xxxx 1101 xxxx : */
  1192. /* USUB8 : cccc 0110 0101 xxxx xxxx xxxx 1111 xxxx :GE */
  1193. /* UQADD16 : cccc 0110 0110 xxxx xxxx xxxx 0001 xxxx : */
  1194. /* UQADDSUBX : cccc 0110 0110 xxxx xxxx xxxx 0011 xxxx : */
  1195. /* UQSUBADDX : cccc 0110 0110 xxxx xxxx xxxx 0101 xxxx : */
  1196. /* UQSUB16 : cccc 0110 0110 xxxx xxxx xxxx 0111 xxxx : */
  1197. /* UQADD8 : cccc 0110 0110 xxxx xxxx xxxx 1001 xxxx : */
  1198. /* ??? : cccc 0110 0110 xxxx xxxx xxxx 1011 xxxx : */
  1199. /* ??? : cccc 0110 0110 xxxx xxxx xxxx 1101 xxxx : */
  1200. /* UQSUB8 : cccc 0110 0110 xxxx xxxx xxxx 1111 xxxx : */
  1201. /* UHADD16 : cccc 0110 0111 xxxx xxxx xxxx 0001 xxxx : */
  1202. /* UHADDSUBX : cccc 0110 0111 xxxx xxxx xxxx 0011 xxxx : */
  1203. /* UHSUBADDX : cccc 0110 0111 xxxx xxxx xxxx 0101 xxxx : */
  1204. /* UHSUB16 : cccc 0110 0111 xxxx xxxx xxxx 0111 xxxx : */
  1205. /* UHADD8 : cccc 0110 0111 xxxx xxxx xxxx 1001 xxxx : */
  1206. /* ??? : cccc 0110 0111 xxxx xxxx xxxx 1011 xxxx : */
  1207. /* ??? : cccc 0110 0111 xxxx xxxx xxxx 1101 xxxx : */
  1208. /* UHSUB8 : cccc 0110 0111 xxxx xxxx xxxx 1111 xxxx : */
  1209. if ((insn & 0x0f800010) == 0x06000010) {
  1210. if ((insn & 0x00300000) == 0x00000000 ||
  1211. (insn & 0x000000e0) == 0x000000a0 ||
  1212. (insn & 0x000000e0) == 0x000000c0)
  1213. return INSN_REJECTED; /* Unallocated space */
  1214. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  1215. }
  1216. /* PKHBT : cccc 0110 1000 xxxx xxxx xxxx x001 xxxx : */
  1217. /* PKHTB : cccc 0110 1000 xxxx xxxx xxxx x101 xxxx : */
  1218. if ((insn & 0x0ff00030) == 0x06800010)
  1219. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  1220. /* SXTAB16 : cccc 0110 1000 xxxx xxxx xxxx 0111 xxxx : */
  1221. /* SXTB16 : cccc 0110 1000 1111 xxxx xxxx 0111 xxxx : */
  1222. /* ??? : cccc 0110 1001 xxxx xxxx xxxx 0111 xxxx : */
  1223. /* SXTAB : cccc 0110 1010 xxxx xxxx xxxx 0111 xxxx : */
  1224. /* SXTB : cccc 0110 1010 1111 xxxx xxxx 0111 xxxx : */
  1225. /* SXTAH : cccc 0110 1011 xxxx xxxx xxxx 0111 xxxx : */
  1226. /* SXTH : cccc 0110 1011 1111 xxxx xxxx 0111 xxxx : */
  1227. /* UXTAB16 : cccc 0110 1100 xxxx xxxx xxxx 0111 xxxx : */
  1228. /* UXTB16 : cccc 0110 1100 1111 xxxx xxxx 0111 xxxx : */
  1229. /* ??? : cccc 0110 1101 xxxx xxxx xxxx 0111 xxxx : */
  1230. /* UXTAB : cccc 0110 1110 xxxx xxxx xxxx 0111 xxxx : */
  1231. /* UXTB : cccc 0110 1110 1111 xxxx xxxx 0111 xxxx : */
  1232. /* UXTAH : cccc 0110 1111 xxxx xxxx xxxx 0111 xxxx : */
  1233. /* UXTH : cccc 0110 1111 1111 xxxx xxxx 0111 xxxx : */
  1234. if ((insn & 0x0f8000f0) == 0x06800070) {
  1235. if ((insn & 0x00300000) == 0x00100000)
  1236. return INSN_REJECTED; /* Unallocated space */
  1237. if ((insn & 0x000f0000) == 0x000f0000)
  1238. return prep_emulate_rd12rm0(insn, asi);
  1239. else
  1240. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  1241. }
  1242. /* Other instruction encodings aren't yet defined */
  1243. return INSN_REJECTED;
  1244. }
  1245. static enum kprobe_insn __kprobes
  1246. space_cccc_0111__1(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1247. {
  1248. /* Undef : cccc 0111 1111 xxxx xxxx xxxx 1111 xxxx */
  1249. if ((insn & 0x0ff000f0) == 0x03f000f0)
  1250. return INSN_REJECTED;
  1251. /* SMLALD : cccc 0111 0100 xxxx xxxx xxxx 00x1 xxxx */
  1252. /* SMLSLD : cccc 0111 0100 xxxx xxxx xxxx 01x1 xxxx */
  1253. if ((insn & 0x0ff00090) == 0x07400010)
  1254. return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn, asi);
  1255. /* SMLAD : cccc 0111 0000 xxxx xxxx xxxx 00x1 xxxx :Q */
  1256. /* SMUAD : cccc 0111 0000 xxxx 1111 xxxx 00x1 xxxx :Q */
  1257. /* SMLSD : cccc 0111 0000 xxxx xxxx xxxx 01x1 xxxx :Q */
  1258. /* SMUSD : cccc 0111 0000 xxxx 1111 xxxx 01x1 xxxx : */
  1259. /* SMMLA : cccc 0111 0101 xxxx xxxx xxxx 00x1 xxxx : */
  1260. /* SMMUL : cccc 0111 0101 xxxx 1111 xxxx 00x1 xxxx : */
  1261. /* USADA8 : cccc 0111 1000 xxxx xxxx xxxx 0001 xxxx : */
  1262. /* USAD8 : cccc 0111 1000 xxxx 1111 xxxx 0001 xxxx : */
  1263. if ((insn & 0x0ff00090) == 0x07000010 ||
  1264. (insn & 0x0ff000d0) == 0x07500010 ||
  1265. (insn & 0x0ff000f0) == 0x07800010) {
  1266. if ((insn & 0x0000f000) == 0x0000f000)
  1267. return prep_emulate_rd16rs8rm0_wflags(insn, asi);
  1268. else
  1269. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  1270. }
  1271. /* SMMLS : cccc 0111 0101 xxxx xxxx xxxx 11x1 xxxx : */
  1272. if ((insn & 0x0ff000d0) == 0x075000d0)
  1273. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  1274. /* SBFX : cccc 0111 101x xxxx xxxx xxxx x101 xxxx : */
  1275. /* UBFX : cccc 0111 111x xxxx xxxx xxxx x101 xxxx : */
  1276. if ((insn & 0x0fa00070) == 0x07a00050)
  1277. return prep_emulate_rd12rm0(insn, asi);
  1278. /* BFI : cccc 0111 110x xxxx xxxx xxxx x001 xxxx : */
  1279. /* BFC : cccc 0111 110x xxxx xxxx xxxx x001 1111 : */
  1280. if ((insn & 0x0fe00070) == 0x07c00010) {
  1281. if ((insn & 0x0000000f) == 0x0000000f)
  1282. return prep_emulate_rd12_modify(insn, asi);
  1283. else
  1284. return prep_emulate_rd12rn0_modify(insn, asi);
  1285. }
  1286. return INSN_REJECTED;
  1287. }
  1288. static enum kprobe_insn __kprobes
  1289. space_cccc_01xx(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1290. {
  1291. /* LDR : cccc 01xx x0x1 xxxx xxxx xxxx xxxx xxxx */
  1292. /* LDRB : cccc 01xx x1x1 xxxx xxxx xxxx xxxx xxxx */
  1293. /* LDRBT : cccc 01x0 x111 xxxx xxxx xxxx xxxx xxxx */
  1294. /* LDRT : cccc 01x0 x011 xxxx xxxx xxxx xxxx xxxx */
  1295. /* STR : cccc 01xx x0x0 xxxx xxxx xxxx xxxx xxxx */
  1296. /* STRB : cccc 01xx x1x0 xxxx xxxx xxxx xxxx xxxx */
  1297. /* STRBT : cccc 01x0 x110 xxxx xxxx xxxx xxxx xxxx */
  1298. /* STRT : cccc 01x0 x010 xxxx xxxx xxxx xxxx xxxx */
  1299. if ((insn & 0x00500000) == 0x00500000 && is_r15(insn, 12))
  1300. return INSN_REJECTED; /* LDRB into PC */
  1301. return prep_emulate_ldr_str(insn, asi);
  1302. }
  1303. static enum kprobe_insn __kprobes
  1304. space_cccc_100x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1305. {
  1306. /* LDM(2) : cccc 100x x101 xxxx 0xxx xxxx xxxx xxxx */
  1307. /* LDM(3) : cccc 100x x1x1 xxxx 1xxx xxxx xxxx xxxx */
  1308. if ((insn & 0x0e708000) == 0x85000000 ||
  1309. (insn & 0x0e508000) == 0x85010000)
  1310. return INSN_REJECTED;
  1311. /* LDM(1) : cccc 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
  1312. /* STM(1) : cccc 100x x0x0 xxxx xxxx xxxx xxxx xxxx */
  1313. asi->insn_handler = ((insn & 0x108000) == 0x008000) ? /* STM & R15 */
  1314. simulate_stm1_pc : simulate_ldm1stm1;
  1315. return INSN_GOOD_NO_SLOT;
  1316. }
  1317. static enum kprobe_insn __kprobes
  1318. space_cccc_101x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1319. {
  1320. /* B : cccc 1010 xxxx xxxx xxxx xxxx xxxx xxxx */
  1321. /* BL : cccc 1011 xxxx xxxx xxxx xxxx xxxx xxxx */
  1322. asi->insn_handler = simulate_bbl;
  1323. return INSN_GOOD_NO_SLOT;
  1324. }
  1325. static enum kprobe_insn __kprobes
  1326. space_cccc_11xx(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1327. {
  1328. /* Coprocessor instructions... */
  1329. /* MCRR : cccc 1100 0100 xxxx xxxx xxxx xxxx xxxx : (Rd!=Rn) */
  1330. /* MRRC : cccc 1100 0101 xxxx xxxx xxxx xxxx xxxx : (Rd!=Rn) */
  1331. /* LDC : cccc 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
  1332. /* STC : cccc 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
  1333. /* CDP : cccc 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
  1334. /* MCR : cccc 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
  1335. /* MRC : cccc 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
  1336. /* SVC : cccc 1111 xxxx xxxx xxxx xxxx xxxx xxxx */
  1337. return INSN_REJECTED;
  1338. }
  1339. /* Return:
  1340. * INSN_REJECTED If instruction is one not allowed to kprobe,
  1341. * INSN_GOOD If instruction is supported and uses instruction slot,
  1342. * INSN_GOOD_NO_SLOT If instruction is supported but doesn't use its slot.
  1343. *
  1344. * For instructions we don't want to kprobe (INSN_REJECTED return result):
  1345. * These are generally ones that modify the processor state making
  1346. * them "hard" to simulate such as switches processor modes or
  1347. * make accesses in alternate modes. Any of these could be simulated
  1348. * if the work was put into it, but low return considering they
  1349. * should also be very rare.
  1350. */
  1351. enum kprobe_insn __kprobes
  1352. arm_kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1353. {
  1354. asi->insn_check_cc = kprobe_condition_checks[insn>>28];
  1355. asi->insn[1] = KPROBE_RETURN_INSTRUCTION;
  1356. if ((insn & 0xf0000000) == 0xf0000000)
  1357. return space_1111(insn, asi);
  1358. else if ((insn & 0x0e000000) == 0x00000000)
  1359. return space_cccc_000x(insn, asi);
  1360. else if ((insn & 0x0e000000) == 0x02000000)
  1361. return space_cccc_001x(insn, asi);
  1362. else if ((insn & 0x0f000010) == 0x06000010)
  1363. return space_cccc_0110__1(insn, asi);
  1364. else if ((insn & 0x0f000010) == 0x07000010)
  1365. return space_cccc_0111__1(insn, asi);
  1366. else if ((insn & 0x0c000000) == 0x04000000)
  1367. return space_cccc_01xx(insn, asi);
  1368. else if ((insn & 0x0e000000) == 0x08000000)
  1369. return space_cccc_100x(insn, asi);
  1370. else if ((insn & 0x0e000000) == 0x0a000000)
  1371. return space_cccc_101x(insn, asi);
  1372. return space_cccc_11xx(insn, asi);
  1373. }
  1374. void __init arm_kprobe_decode_init(void)
  1375. {
  1376. find_str_pc_offset();
  1377. }