kprobes-decode.c 53 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714
  1. /*
  2. * arch/arm/kernel/kprobes-decode.c
  3. *
  4. * Copyright (C) 2006, 2007 Motorola Inc.
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. *
  10. * This program is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * General Public License for more details.
  14. */
  15. /*
  16. * We do not have hardware single-stepping on ARM, This
  17. * effort is further complicated by the ARM not having a
  18. * "next PC" register. Instructions that change the PC
  19. * can't be safely single-stepped in a MP environment, so
  20. * we have a lot of work to do:
  21. *
  22. * In the prepare phase:
  23. * *) If it is an instruction that does anything
  24. * with the CPU mode, we reject it for a kprobe.
  25. * (This is out of laziness rather than need. The
  26. * instructions could be simulated.)
  27. *
  28. * *) Otherwise, decode the instruction rewriting its
  29. * registers to take fixed, ordered registers and
  30. * setting a handler for it to run the instruction.
  31. *
  32. * In the execution phase by an instruction's handler:
  33. *
  34. * *) If the PC is written to by the instruction, the
  35. * instruction must be fully simulated in software.
  36. * If it is a conditional instruction, the handler
  37. * will use insn[0] to copy its condition code to
  38. * set r0 to 1 and insn[1] to "mov pc, lr" to return.
  39. *
  40. * *) Otherwise, a modified form of the instruction is
  41. * directly executed. Its handler calls the
  42. * instruction in insn[0]. In insn[1] is a
  43. * "mov pc, lr" to return.
  44. *
  45. * Before calling, load up the reordered registers
  46. * from the original instruction's registers. If one
  47. * of the original input registers is the PC, compute
  48. * and adjust the appropriate input register.
  49. *
  50. * After call completes, copy the output registers to
  51. * the original instruction's original registers.
  52. *
  53. * We don't use a real breakpoint instruction since that
  54. * would have us in the kernel go from SVC mode to SVC
  55. * mode losing the link register. Instead we use an
  56. * undefined instruction. To simplify processing, the
  57. * undefined instruction used for kprobes must be reserved
  58. * exclusively for kprobes use.
  59. *
  60. * TODO: ifdef out some instruction decoding based on architecture.
  61. */
  62. #include <linux/kernel.h>
  63. #include <linux/kprobes.h>
  64. #define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit)))))
  65. #define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25)
  66. #define is_r15(insn, bitpos) (((insn) & (0xf << bitpos)) == (0xf << bitpos))
  67. #define PSR_fs (PSR_f|PSR_s)
  68. #define KPROBE_RETURN_INSTRUCTION 0xe1a0f00e /* mov pc, lr */
  69. typedef long (insn_0arg_fn_t)(void);
  70. typedef long (insn_1arg_fn_t)(long);
  71. typedef long (insn_2arg_fn_t)(long, long);
  72. typedef long (insn_3arg_fn_t)(long, long, long);
  73. typedef long (insn_4arg_fn_t)(long, long, long, long);
  74. typedef long long (insn_llret_0arg_fn_t)(void);
  75. typedef long long (insn_llret_3arg_fn_t)(long, long, long);
  76. typedef long long (insn_llret_4arg_fn_t)(long, long, long, long);
  77. union reg_pair {
  78. long long dr;
  79. #ifdef __LITTLE_ENDIAN
  80. struct { long r0, r1; };
  81. #else
  82. struct { long r1, r0; };
  83. #endif
  84. };
  85. /*
  86. * For STR and STM instructions, an ARM core may choose to use either
  87. * a +8 or a +12 displacement from the current instruction's address.
  88. * Whichever value is chosen for a given core, it must be the same for
  89. * both instructions and may not change. This function measures it.
  90. */
  91. static int str_pc_offset;
  92. static void __init find_str_pc_offset(void)
  93. {
  94. int addr, scratch, ret;
  95. __asm__ (
  96. "sub %[ret], pc, #4 \n\t"
  97. "str pc, %[addr] \n\t"
  98. "ldr %[scr], %[addr] \n\t"
  99. "sub %[ret], %[scr], %[ret] \n\t"
  100. : [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr));
  101. str_pc_offset = ret;
  102. }
  103. /*
  104. * The insnslot_?arg_r[w]flags() functions below are to keep the
  105. * msr -> *fn -> mrs instruction sequences indivisible so that
  106. * the state of the CPSR flags aren't inadvertently modified
  107. * just before or just after the call.
  108. */
  109. static inline long __kprobes
  110. insnslot_0arg_rflags(long cpsr, insn_0arg_fn_t *fn)
  111. {
  112. register long ret asm("r0");
  113. __asm__ __volatile__ (
  114. "msr cpsr_fs, %[cpsr] \n\t"
  115. "mov lr, pc \n\t"
  116. "mov pc, %[fn] \n\t"
  117. : "=r" (ret)
  118. : [cpsr] "r" (cpsr), [fn] "r" (fn)
  119. : "lr", "cc"
  120. );
  121. return ret;
  122. }
  123. static inline long long __kprobes
  124. insnslot_llret_0arg_rflags(long cpsr, insn_llret_0arg_fn_t *fn)
  125. {
  126. register long ret0 asm("r0");
  127. register long ret1 asm("r1");
  128. union reg_pair fnr;
  129. __asm__ __volatile__ (
  130. "msr cpsr_fs, %[cpsr] \n\t"
  131. "mov lr, pc \n\t"
  132. "mov pc, %[fn] \n\t"
  133. : "=r" (ret0), "=r" (ret1)
  134. : [cpsr] "r" (cpsr), [fn] "r" (fn)
  135. : "lr", "cc"
  136. );
  137. fnr.r0 = ret0;
  138. fnr.r1 = ret1;
  139. return fnr.dr;
  140. }
  141. static inline long __kprobes
  142. insnslot_1arg_rflags(long r0, long cpsr, insn_1arg_fn_t *fn)
  143. {
  144. register long rr0 asm("r0") = r0;
  145. register long ret asm("r0");
  146. __asm__ __volatile__ (
  147. "msr cpsr_fs, %[cpsr] \n\t"
  148. "mov lr, pc \n\t"
  149. "mov pc, %[fn] \n\t"
  150. : "=r" (ret)
  151. : "0" (rr0), [cpsr] "r" (cpsr), [fn] "r" (fn)
  152. : "lr", "cc"
  153. );
  154. return ret;
  155. }
  156. static inline long __kprobes
  157. insnslot_2arg_rflags(long r0, long r1, long cpsr, insn_2arg_fn_t *fn)
  158. {
  159. register long rr0 asm("r0") = r0;
  160. register long rr1 asm("r1") = r1;
  161. register long ret asm("r0");
  162. __asm__ __volatile__ (
  163. "msr cpsr_fs, %[cpsr] \n\t"
  164. "mov lr, pc \n\t"
  165. "mov pc, %[fn] \n\t"
  166. : "=r" (ret)
  167. : "0" (rr0), "r" (rr1),
  168. [cpsr] "r" (cpsr), [fn] "r" (fn)
  169. : "lr", "cc"
  170. );
  171. return ret;
  172. }
  173. static inline long __kprobes
  174. insnslot_3arg_rflags(long r0, long r1, long r2, long cpsr, insn_3arg_fn_t *fn)
  175. {
  176. register long rr0 asm("r0") = r0;
  177. register long rr1 asm("r1") = r1;
  178. register long rr2 asm("r2") = r2;
  179. register long ret asm("r0");
  180. __asm__ __volatile__ (
  181. "msr cpsr_fs, %[cpsr] \n\t"
  182. "mov lr, pc \n\t"
  183. "mov pc, %[fn] \n\t"
  184. : "=r" (ret)
  185. : "0" (rr0), "r" (rr1), "r" (rr2),
  186. [cpsr] "r" (cpsr), [fn] "r" (fn)
  187. : "lr", "cc"
  188. );
  189. return ret;
  190. }
  191. static inline long long __kprobes
  192. insnslot_llret_3arg_rflags(long r0, long r1, long r2, long cpsr,
  193. insn_llret_3arg_fn_t *fn)
  194. {
  195. register long rr0 asm("r0") = r0;
  196. register long rr1 asm("r1") = r1;
  197. register long rr2 asm("r2") = r2;
  198. register long ret0 asm("r0");
  199. register long ret1 asm("r1");
  200. union reg_pair fnr;
  201. __asm__ __volatile__ (
  202. "msr cpsr_fs, %[cpsr] \n\t"
  203. "mov lr, pc \n\t"
  204. "mov pc, %[fn] \n\t"
  205. : "=r" (ret0), "=r" (ret1)
  206. : "0" (rr0), "r" (rr1), "r" (rr2),
  207. [cpsr] "r" (cpsr), [fn] "r" (fn)
  208. : "lr", "cc"
  209. );
  210. fnr.r0 = ret0;
  211. fnr.r1 = ret1;
  212. return fnr.dr;
  213. }
  214. static inline long __kprobes
  215. insnslot_4arg_rflags(long r0, long r1, long r2, long r3, long cpsr,
  216. insn_4arg_fn_t *fn)
  217. {
  218. register long rr0 asm("r0") = r0;
  219. register long rr1 asm("r1") = r1;
  220. register long rr2 asm("r2") = r2;
  221. register long rr3 asm("r3") = r3;
  222. register long ret asm("r0");
  223. __asm__ __volatile__ (
  224. "msr cpsr_fs, %[cpsr] \n\t"
  225. "mov lr, pc \n\t"
  226. "mov pc, %[fn] \n\t"
  227. : "=r" (ret)
  228. : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
  229. [cpsr] "r" (cpsr), [fn] "r" (fn)
  230. : "lr", "cc"
  231. );
  232. return ret;
  233. }
  234. static inline long __kprobes
  235. insnslot_1arg_rwflags(long r0, long *cpsr, insn_1arg_fn_t *fn)
  236. {
  237. register long rr0 asm("r0") = r0;
  238. register long ret asm("r0");
  239. long oldcpsr = *cpsr;
  240. long newcpsr;
  241. __asm__ __volatile__ (
  242. "msr cpsr_fs, %[oldcpsr] \n\t"
  243. "mov lr, pc \n\t"
  244. "mov pc, %[fn] \n\t"
  245. "mrs %[newcpsr], cpsr \n\t"
  246. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  247. : "0" (rr0), [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  248. : "lr", "cc"
  249. );
  250. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  251. return ret;
  252. }
  253. static inline long __kprobes
  254. insnslot_2arg_rwflags(long r0, long r1, long *cpsr, insn_2arg_fn_t *fn)
  255. {
  256. register long rr0 asm("r0") = r0;
  257. register long rr1 asm("r1") = r1;
  258. register long ret asm("r0");
  259. long oldcpsr = *cpsr;
  260. long newcpsr;
  261. __asm__ __volatile__ (
  262. "msr cpsr_fs, %[oldcpsr] \n\t"
  263. "mov lr, pc \n\t"
  264. "mov pc, %[fn] \n\t"
  265. "mrs %[newcpsr], cpsr \n\t"
  266. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  267. : "0" (rr0), "r" (rr1), [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  268. : "lr", "cc"
  269. );
  270. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  271. return ret;
  272. }
  273. static inline long __kprobes
  274. insnslot_3arg_rwflags(long r0, long r1, long r2, long *cpsr,
  275. insn_3arg_fn_t *fn)
  276. {
  277. register long rr0 asm("r0") = r0;
  278. register long rr1 asm("r1") = r1;
  279. register long rr2 asm("r2") = r2;
  280. register long ret asm("r0");
  281. long oldcpsr = *cpsr;
  282. long newcpsr;
  283. __asm__ __volatile__ (
  284. "msr cpsr_fs, %[oldcpsr] \n\t"
  285. "mov lr, pc \n\t"
  286. "mov pc, %[fn] \n\t"
  287. "mrs %[newcpsr], cpsr \n\t"
  288. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  289. : "0" (rr0), "r" (rr1), "r" (rr2),
  290. [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  291. : "lr", "cc"
  292. );
  293. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  294. return ret;
  295. }
  296. static inline long __kprobes
  297. insnslot_4arg_rwflags(long r0, long r1, long r2, long r3, long *cpsr,
  298. insn_4arg_fn_t *fn)
  299. {
  300. register long rr0 asm("r0") = r0;
  301. register long rr1 asm("r1") = r1;
  302. register long rr2 asm("r2") = r2;
  303. register long rr3 asm("r3") = r3;
  304. register long ret asm("r0");
  305. long oldcpsr = *cpsr;
  306. long newcpsr;
  307. __asm__ __volatile__ (
  308. "msr cpsr_fs, %[oldcpsr] \n\t"
  309. "mov lr, pc \n\t"
  310. "mov pc, %[fn] \n\t"
  311. "mrs %[newcpsr], cpsr \n\t"
  312. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  313. : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
  314. [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  315. : "lr", "cc"
  316. );
  317. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  318. return ret;
  319. }
  320. static inline long long __kprobes
  321. insnslot_llret_4arg_rwflags(long r0, long r1, long r2, long r3, long *cpsr,
  322. insn_llret_4arg_fn_t *fn)
  323. {
  324. register long rr0 asm("r0") = r0;
  325. register long rr1 asm("r1") = r1;
  326. register long rr2 asm("r2") = r2;
  327. register long rr3 asm("r3") = r3;
  328. register long ret0 asm("r0");
  329. register long ret1 asm("r1");
  330. long oldcpsr = *cpsr;
  331. long newcpsr;
  332. union reg_pair fnr;
  333. __asm__ __volatile__ (
  334. "msr cpsr_fs, %[oldcpsr] \n\t"
  335. "mov lr, pc \n\t"
  336. "mov pc, %[fn] \n\t"
  337. "mrs %[newcpsr], cpsr \n\t"
  338. : "=r" (ret0), "=r" (ret1), [newcpsr] "=r" (newcpsr)
  339. : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
  340. [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  341. : "lr", "cc"
  342. );
  343. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  344. fnr.r0 = ret0;
  345. fnr.r1 = ret1;
  346. return fnr.dr;
  347. }
  348. /*
  349. * To avoid the complications of mimicing single-stepping on a
  350. * processor without a Next-PC or a single-step mode, and to
  351. * avoid having to deal with the side-effects of boosting, we
  352. * simulate or emulate (almost) all ARM instructions.
  353. *
  354. * "Simulation" is where the instruction's behavior is duplicated in
  355. * C code. "Emulation" is where the original instruction is rewritten
  356. * and executed, often by altering its registers.
  357. *
  358. * By having all behavior of the kprobe'd instruction completed before
  359. * returning from the kprobe_handler(), all locks (scheduler and
  360. * interrupt) can safely be released. There is no need for secondary
  361. * breakpoints, no race with MP or preemptable kernels, nor having to
  362. * clean up resources counts at a later time impacting overall system
  363. * performance. By rewriting the instruction, only the minimum registers
  364. * need to be loaded and saved back optimizing performance.
  365. *
  366. * Calling the insnslot_*_rwflags version of a function doesn't hurt
  367. * anything even when the CPSR flags aren't updated by the
  368. * instruction. It's just a little slower in return for saving
  369. * a little space by not having a duplicate function that doesn't
  370. * update the flags. (The same optimization can be said for
  371. * instructions that do or don't perform register writeback)
  372. * Also, instructions can either read the flags, only write the
  373. * flags, or read and write the flags. To save combinations
  374. * rather than for sheer performance, flag functions just assume
  375. * read and write of flags.
  376. */
  377. static void __kprobes simulate_bbl(struct kprobe *p, struct pt_regs *regs)
  378. {
  379. kprobe_opcode_t insn = p->opcode;
  380. long iaddr = (long)p->addr;
  381. int disp = branch_displacement(insn);
  382. if (insn & (1 << 24))
  383. regs->ARM_lr = iaddr + 4;
  384. regs->ARM_pc = iaddr + 8 + disp;
  385. }
  386. static void __kprobes simulate_blx1(struct kprobe *p, struct pt_regs *regs)
  387. {
  388. kprobe_opcode_t insn = p->opcode;
  389. long iaddr = (long)p->addr;
  390. int disp = branch_displacement(insn);
  391. regs->ARM_lr = iaddr + 4;
  392. regs->ARM_pc = iaddr + 8 + disp + ((insn >> 23) & 0x2);
  393. regs->ARM_cpsr |= PSR_T_BIT;
  394. }
  395. static void __kprobes simulate_blx2bx(struct kprobe *p, struct pt_regs *regs)
  396. {
  397. kprobe_opcode_t insn = p->opcode;
  398. int rm = insn & 0xf;
  399. long rmv = regs->uregs[rm];
  400. if (insn & (1 << 5))
  401. regs->ARM_lr = (long)p->addr + 4;
  402. regs->ARM_pc = rmv & ~0x1;
  403. regs->ARM_cpsr &= ~PSR_T_BIT;
  404. if (rmv & 0x1)
  405. regs->ARM_cpsr |= PSR_T_BIT;
  406. }
  407. static void __kprobes simulate_mrs(struct kprobe *p, struct pt_regs *regs)
  408. {
  409. kprobe_opcode_t insn = p->opcode;
  410. int rd = (insn >> 12) & 0xf;
  411. unsigned long mask = 0xf8ff03df; /* Mask out execution state */
  412. regs->uregs[rd] = regs->ARM_cpsr & mask;
  413. }
  414. static void __kprobes simulate_ldm1stm1(struct kprobe *p, struct pt_regs *regs)
  415. {
  416. kprobe_opcode_t insn = p->opcode;
  417. int rn = (insn >> 16) & 0xf;
  418. int lbit = insn & (1 << 20);
  419. int wbit = insn & (1 << 21);
  420. int ubit = insn & (1 << 23);
  421. int pbit = insn & (1 << 24);
  422. long *addr = (long *)regs->uregs[rn];
  423. int reg_bit_vector;
  424. int reg_count;
  425. reg_count = 0;
  426. reg_bit_vector = insn & 0xffff;
  427. while (reg_bit_vector) {
  428. reg_bit_vector &= (reg_bit_vector - 1);
  429. ++reg_count;
  430. }
  431. if (!ubit)
  432. addr -= reg_count;
  433. addr += (!pbit == !ubit);
  434. reg_bit_vector = insn & 0xffff;
  435. while (reg_bit_vector) {
  436. int reg = __ffs(reg_bit_vector);
  437. reg_bit_vector &= (reg_bit_vector - 1);
  438. if (lbit)
  439. regs->uregs[reg] = *addr++;
  440. else
  441. *addr++ = regs->uregs[reg];
  442. }
  443. if (wbit) {
  444. if (!ubit)
  445. addr -= reg_count;
  446. addr -= (!pbit == !ubit);
  447. regs->uregs[rn] = (long)addr;
  448. }
  449. }
  450. static void __kprobes simulate_stm1_pc(struct kprobe *p, struct pt_regs *regs)
  451. {
  452. regs->ARM_pc = (long)p->addr + str_pc_offset;
  453. simulate_ldm1stm1(p, regs);
  454. regs->ARM_pc = (long)p->addr + 4;
  455. }
  456. static void __kprobes simulate_mov_ipsp(struct kprobe *p, struct pt_regs *regs)
  457. {
  458. regs->uregs[12] = regs->uregs[13];
  459. }
  460. static void __kprobes emulate_ldcstc(struct kprobe *p, struct pt_regs *regs)
  461. {
  462. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  463. kprobe_opcode_t insn = p->opcode;
  464. int rn = (insn >> 16) & 0xf;
  465. long rnv = regs->uregs[rn];
  466. /* Save Rn in case of writeback. */
  467. regs->uregs[rn] = insnslot_1arg_rflags(rnv, regs->ARM_cpsr, i_fn);
  468. }
  469. static void __kprobes emulate_ldrd(struct kprobe *p, struct pt_regs *regs)
  470. {
  471. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  472. kprobe_opcode_t insn = p->opcode;
  473. long ppc = (long)p->addr + 8;
  474. int rd = (insn >> 12) & 0xf;
  475. int rn = (insn >> 16) & 0xf;
  476. int rm = insn & 0xf; /* rm may be invalid, don't care. */
  477. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  478. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  479. /* Not following the C calling convention here, so need asm(). */
  480. __asm__ __volatile__ (
  481. "ldr r0, %[rn] \n\t"
  482. "ldr r1, %[rm] \n\t"
  483. "msr cpsr_fs, %[cpsr]\n\t"
  484. "mov lr, pc \n\t"
  485. "mov pc, %[i_fn] \n\t"
  486. "str r0, %[rn] \n\t" /* in case of writeback */
  487. "str r2, %[rd0] \n\t"
  488. "str r3, %[rd1] \n\t"
  489. : [rn] "+m" (rnv),
  490. [rd0] "=m" (regs->uregs[rd]),
  491. [rd1] "=m" (regs->uregs[rd+1])
  492. : [rm] "m" (rmv),
  493. [cpsr] "r" (regs->ARM_cpsr),
  494. [i_fn] "r" (i_fn)
  495. : "r0", "r1", "r2", "r3", "lr", "cc"
  496. );
  497. if (rn != 15)
  498. regs->uregs[rn] = rnv; /* Save Rn in case of writeback. */
  499. }
  500. static void __kprobes emulate_strd(struct kprobe *p, struct pt_regs *regs)
  501. {
  502. insn_4arg_fn_t *i_fn = (insn_4arg_fn_t *)&p->ainsn.insn[0];
  503. kprobe_opcode_t insn = p->opcode;
  504. long ppc = (long)p->addr + 8;
  505. int rd = (insn >> 12) & 0xf;
  506. int rn = (insn >> 16) & 0xf;
  507. int rm = insn & 0xf;
  508. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  509. /* rm/rmv may be invalid, don't care. */
  510. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  511. long rnv_wb;
  512. rnv_wb = insnslot_4arg_rflags(rnv, rmv, regs->uregs[rd],
  513. regs->uregs[rd+1],
  514. regs->ARM_cpsr, i_fn);
  515. if (rn != 15)
  516. regs->uregs[rn] = rnv_wb; /* Save Rn in case of writeback. */
  517. }
  518. static void __kprobes emulate_ldr(struct kprobe *p, struct pt_regs *regs)
  519. {
  520. insn_llret_3arg_fn_t *i_fn = (insn_llret_3arg_fn_t *)&p->ainsn.insn[0];
  521. kprobe_opcode_t insn = p->opcode;
  522. long ppc = (long)p->addr + 8;
  523. union reg_pair fnr;
  524. int rd = (insn >> 12) & 0xf;
  525. int rn = (insn >> 16) & 0xf;
  526. int rm = insn & 0xf;
  527. long rdv;
  528. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  529. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  530. long cpsr = regs->ARM_cpsr;
  531. fnr.dr = insnslot_llret_3arg_rflags(rnv, 0, rmv, cpsr, i_fn);
  532. if (rn != 15)
  533. regs->uregs[rn] = fnr.r0; /* Save Rn in case of writeback. */
  534. rdv = fnr.r1;
  535. if (rd == 15) {
  536. #if __LINUX_ARM_ARCH__ >= 5
  537. cpsr &= ~PSR_T_BIT;
  538. if (rdv & 0x1)
  539. cpsr |= PSR_T_BIT;
  540. regs->ARM_cpsr = cpsr;
  541. rdv &= ~0x1;
  542. #else
  543. rdv &= ~0x2;
  544. #endif
  545. }
  546. regs->uregs[rd] = rdv;
  547. }
  548. static void __kprobes emulate_str(struct kprobe *p, struct pt_regs *regs)
  549. {
  550. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  551. kprobe_opcode_t insn = p->opcode;
  552. long iaddr = (long)p->addr;
  553. int rd = (insn >> 12) & 0xf;
  554. int rn = (insn >> 16) & 0xf;
  555. int rm = insn & 0xf;
  556. long rdv = (rd == 15) ? iaddr + str_pc_offset : regs->uregs[rd];
  557. long rnv = (rn == 15) ? iaddr + 8 : regs->uregs[rn];
  558. long rmv = regs->uregs[rm]; /* rm/rmv may be invalid, don't care. */
  559. long rnv_wb;
  560. rnv_wb = insnslot_3arg_rflags(rnv, rdv, rmv, regs->ARM_cpsr, i_fn);
  561. if (rn != 15)
  562. regs->uregs[rn] = rnv_wb; /* Save Rn in case of writeback. */
  563. }
  564. static void __kprobes emulate_mrrc(struct kprobe *p, struct pt_regs *regs)
  565. {
  566. insn_llret_0arg_fn_t *i_fn = (insn_llret_0arg_fn_t *)&p->ainsn.insn[0];
  567. kprobe_opcode_t insn = p->opcode;
  568. union reg_pair fnr;
  569. int rd = (insn >> 12) & 0xf;
  570. int rn = (insn >> 16) & 0xf;
  571. fnr.dr = insnslot_llret_0arg_rflags(regs->ARM_cpsr, i_fn);
  572. regs->uregs[rn] = fnr.r0;
  573. regs->uregs[rd] = fnr.r1;
  574. }
  575. static void __kprobes emulate_mcrr(struct kprobe *p, struct pt_regs *regs)
  576. {
  577. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  578. kprobe_opcode_t insn = p->opcode;
  579. int rd = (insn >> 12) & 0xf;
  580. int rn = (insn >> 16) & 0xf;
  581. long rnv = regs->uregs[rn];
  582. long rdv = regs->uregs[rd];
  583. insnslot_2arg_rflags(rnv, rdv, regs->ARM_cpsr, i_fn);
  584. }
  585. static void __kprobes emulate_sat(struct kprobe *p, struct pt_regs *regs)
  586. {
  587. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  588. kprobe_opcode_t insn = p->opcode;
  589. int rd = (insn >> 12) & 0xf;
  590. int rm = insn & 0xf;
  591. long rmv = regs->uregs[rm];
  592. /* Writes Q flag */
  593. regs->uregs[rd] = insnslot_1arg_rwflags(rmv, &regs->ARM_cpsr, i_fn);
  594. }
  595. static void __kprobes emulate_sel(struct kprobe *p, struct pt_regs *regs)
  596. {
  597. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  598. kprobe_opcode_t insn = p->opcode;
  599. int rd = (insn >> 12) & 0xf;
  600. int rn = (insn >> 16) & 0xf;
  601. int rm = insn & 0xf;
  602. long rnv = regs->uregs[rn];
  603. long rmv = regs->uregs[rm];
  604. /* Reads GE bits */
  605. regs->uregs[rd] = insnslot_2arg_rflags(rnv, rmv, regs->ARM_cpsr, i_fn);
  606. }
  607. static void __kprobes emulate_none(struct kprobe *p, struct pt_regs *regs)
  608. {
  609. insn_0arg_fn_t *i_fn = (insn_0arg_fn_t *)&p->ainsn.insn[0];
  610. insnslot_0arg_rflags(regs->ARM_cpsr, i_fn);
  611. }
  612. static void __kprobes emulate_rd12(struct kprobe *p, struct pt_regs *regs)
  613. {
  614. insn_0arg_fn_t *i_fn = (insn_0arg_fn_t *)&p->ainsn.insn[0];
  615. kprobe_opcode_t insn = p->opcode;
  616. int rd = (insn >> 12) & 0xf;
  617. regs->uregs[rd] = insnslot_0arg_rflags(regs->ARM_cpsr, i_fn);
  618. }
  619. static void __kprobes emulate_ird12(struct kprobe *p, struct pt_regs *regs)
  620. {
  621. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  622. kprobe_opcode_t insn = p->opcode;
  623. int ird = (insn >> 12) & 0xf;
  624. insnslot_1arg_rflags(regs->uregs[ird], regs->ARM_cpsr, i_fn);
  625. }
  626. static void __kprobes emulate_rn16(struct kprobe *p, struct pt_regs *regs)
  627. {
  628. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  629. kprobe_opcode_t insn = p->opcode;
  630. int rn = (insn >> 16) & 0xf;
  631. long rnv = regs->uregs[rn];
  632. insnslot_1arg_rflags(rnv, regs->ARM_cpsr, i_fn);
  633. }
  634. static void __kprobes emulate_rd12rm0(struct kprobe *p, struct pt_regs *regs)
  635. {
  636. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  637. kprobe_opcode_t insn = p->opcode;
  638. int rd = (insn >> 12) & 0xf;
  639. int rm = insn & 0xf;
  640. long rmv = regs->uregs[rm];
  641. regs->uregs[rd] = insnslot_1arg_rflags(rmv, regs->ARM_cpsr, i_fn);
  642. }
  643. static void __kprobes
  644. emulate_rd12rn16rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  645. {
  646. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  647. kprobe_opcode_t insn = p->opcode;
  648. int rd = (insn >> 12) & 0xf;
  649. int rn = (insn >> 16) & 0xf;
  650. int rm = insn & 0xf;
  651. long rnv = regs->uregs[rn];
  652. long rmv = regs->uregs[rm];
  653. regs->uregs[rd] =
  654. insnslot_2arg_rwflags(rnv, rmv, &regs->ARM_cpsr, i_fn);
  655. }
  656. static void __kprobes
  657. emulate_rd16rn12rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  658. {
  659. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  660. kprobe_opcode_t insn = p->opcode;
  661. int rd = (insn >> 16) & 0xf;
  662. int rn = (insn >> 12) & 0xf;
  663. int rs = (insn >> 8) & 0xf;
  664. int rm = insn & 0xf;
  665. long rnv = regs->uregs[rn];
  666. long rsv = regs->uregs[rs];
  667. long rmv = regs->uregs[rm];
  668. regs->uregs[rd] =
  669. insnslot_3arg_rwflags(rnv, rsv, rmv, &regs->ARM_cpsr, i_fn);
  670. }
  671. static void __kprobes
  672. emulate_rd16rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  673. {
  674. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  675. kprobe_opcode_t insn = p->opcode;
  676. int rd = (insn >> 16) & 0xf;
  677. int rs = (insn >> 8) & 0xf;
  678. int rm = insn & 0xf;
  679. long rsv = regs->uregs[rs];
  680. long rmv = regs->uregs[rm];
  681. regs->uregs[rd] =
  682. insnslot_2arg_rwflags(rsv, rmv, &regs->ARM_cpsr, i_fn);
  683. }
  684. static void __kprobes
  685. emulate_rdhi16rdlo12rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  686. {
  687. insn_llret_4arg_fn_t *i_fn = (insn_llret_4arg_fn_t *)&p->ainsn.insn[0];
  688. kprobe_opcode_t insn = p->opcode;
  689. union reg_pair fnr;
  690. int rdhi = (insn >> 16) & 0xf;
  691. int rdlo = (insn >> 12) & 0xf;
  692. int rs = (insn >> 8) & 0xf;
  693. int rm = insn & 0xf;
  694. long rsv = regs->uregs[rs];
  695. long rmv = regs->uregs[rm];
  696. fnr.dr = insnslot_llret_4arg_rwflags(regs->uregs[rdhi],
  697. regs->uregs[rdlo], rsv, rmv,
  698. &regs->ARM_cpsr, i_fn);
  699. regs->uregs[rdhi] = fnr.r0;
  700. regs->uregs[rdlo] = fnr.r1;
  701. }
  702. static void __kprobes
  703. emulate_alu_imm_rflags(struct kprobe *p, struct pt_regs *regs)
  704. {
  705. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  706. kprobe_opcode_t insn = p->opcode;
  707. int rd = (insn >> 12) & 0xf;
  708. int rn = (insn >> 16) & 0xf;
  709. long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
  710. regs->uregs[rd] = insnslot_1arg_rflags(rnv, regs->ARM_cpsr, i_fn);
  711. }
  712. static void __kprobes
  713. emulate_alu_imm_rwflags(struct kprobe *p, struct pt_regs *regs)
  714. {
  715. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  716. kprobe_opcode_t insn = p->opcode;
  717. int rd = (insn >> 12) & 0xf;
  718. int rn = (insn >> 16) & 0xf;
  719. long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
  720. regs->uregs[rd] = insnslot_1arg_rwflags(rnv, &regs->ARM_cpsr, i_fn);
  721. }
  722. static void __kprobes
  723. emulate_alu_tests_imm(struct kprobe *p, struct pt_regs *regs)
  724. {
  725. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  726. kprobe_opcode_t insn = p->opcode;
  727. int rn = (insn >> 16) & 0xf;
  728. long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
  729. insnslot_1arg_rwflags(rnv, &regs->ARM_cpsr, i_fn);
  730. }
  731. static void __kprobes
  732. emulate_alu_rflags(struct kprobe *p, struct pt_regs *regs)
  733. {
  734. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  735. kprobe_opcode_t insn = p->opcode;
  736. long ppc = (long)p->addr + 8;
  737. int rd = (insn >> 12) & 0xf;
  738. int rn = (insn >> 16) & 0xf; /* rn/rnv/rs/rsv may be */
  739. int rs = (insn >> 8) & 0xf; /* invalid, don't care. */
  740. int rm = insn & 0xf;
  741. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  742. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  743. long rsv = regs->uregs[rs];
  744. regs->uregs[rd] =
  745. insnslot_3arg_rflags(rnv, rmv, rsv, regs->ARM_cpsr, i_fn);
  746. }
  747. static void __kprobes
  748. emulate_alu_rwflags(struct kprobe *p, struct pt_regs *regs)
  749. {
  750. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  751. kprobe_opcode_t insn = p->opcode;
  752. long ppc = (long)p->addr + 8;
  753. int rd = (insn >> 12) & 0xf;
  754. int rn = (insn >> 16) & 0xf; /* rn/rnv/rs/rsv may be */
  755. int rs = (insn >> 8) & 0xf; /* invalid, don't care. */
  756. int rm = insn & 0xf;
  757. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  758. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  759. long rsv = regs->uregs[rs];
  760. regs->uregs[rd] =
  761. insnslot_3arg_rwflags(rnv, rmv, rsv, &regs->ARM_cpsr, i_fn);
  762. }
  763. static void __kprobes
  764. emulate_alu_tests(struct kprobe *p, struct pt_regs *regs)
  765. {
  766. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  767. kprobe_opcode_t insn = p->opcode;
  768. long ppc = (long)p->addr + 8;
  769. int rn = (insn >> 16) & 0xf;
  770. int rs = (insn >> 8) & 0xf; /* rs/rsv may be invalid, don't care. */
  771. int rm = insn & 0xf;
  772. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  773. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  774. long rsv = regs->uregs[rs];
  775. insnslot_3arg_rwflags(rnv, rmv, rsv, &regs->ARM_cpsr, i_fn);
  776. }
  777. static enum kprobe_insn __kprobes
  778. prep_emulate_ldr_str(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  779. {
  780. int ibit = (insn & (1 << 26)) ? 25 : 22;
  781. insn &= 0xfff00fff;
  782. insn |= 0x00001000; /* Rn = r0, Rd = r1 */
  783. if (insn & (1 << ibit)) {
  784. insn &= ~0xf;
  785. insn |= 2; /* Rm = r2 */
  786. }
  787. asi->insn[0] = insn;
  788. asi->insn_handler = (insn & (1 << 20)) ? emulate_ldr : emulate_str;
  789. return INSN_GOOD;
  790. }
  791. static enum kprobe_insn __kprobes
  792. prep_emulate_rd12rm0(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  793. {
  794. if (is_r15(insn, 12))
  795. return INSN_REJECTED; /* Rd is PC */
  796. insn &= 0xffff0ff0; /* Rd = r0, Rm = r0 */
  797. asi->insn[0] = insn;
  798. asi->insn_handler = emulate_rd12rm0;
  799. return INSN_GOOD;
  800. }
  801. static enum kprobe_insn __kprobes
  802. prep_emulate_rd12rn16rm0_wflags(kprobe_opcode_t insn,
  803. struct arch_specific_insn *asi)
  804. {
  805. if (is_r15(insn, 12))
  806. return INSN_REJECTED; /* Rd is PC */
  807. insn &= 0xfff00ff0; /* Rd = r0, Rn = r0 */
  808. insn |= 0x00000001; /* Rm = r1 */
  809. asi->insn[0] = insn;
  810. asi->insn_handler = emulate_rd12rn16rm0_rwflags;
  811. return INSN_GOOD;
  812. }
  813. static enum kprobe_insn __kprobes
  814. prep_emulate_rd16rs8rm0_wflags(kprobe_opcode_t insn,
  815. struct arch_specific_insn *asi)
  816. {
  817. if (is_r15(insn, 16))
  818. return INSN_REJECTED; /* Rd is PC */
  819. insn &= 0xfff0f0f0; /* Rd = r0, Rs = r0 */
  820. insn |= 0x00000001; /* Rm = r1 */
  821. asi->insn[0] = insn;
  822. asi->insn_handler = emulate_rd16rs8rm0_rwflags;
  823. return INSN_GOOD;
  824. }
  825. static enum kprobe_insn __kprobes
  826. prep_emulate_rd16rn12rs8rm0_wflags(kprobe_opcode_t insn,
  827. struct arch_specific_insn *asi)
  828. {
  829. if (is_r15(insn, 16))
  830. return INSN_REJECTED; /* Rd is PC */
  831. insn &= 0xfff000f0; /* Rd = r0, Rn = r0 */
  832. insn |= 0x00000102; /* Rs = r1, Rm = r2 */
  833. asi->insn[0] = insn;
  834. asi->insn_handler = emulate_rd16rn12rs8rm0_rwflags;
  835. return INSN_GOOD;
  836. }
  837. static enum kprobe_insn __kprobes
  838. prep_emulate_rdhi16rdlo12rs8rm0_wflags(kprobe_opcode_t insn,
  839. struct arch_specific_insn *asi)
  840. {
  841. if (is_r15(insn, 16) || is_r15(insn, 12))
  842. return INSN_REJECTED; /* RdHi or RdLo is PC */
  843. insn &= 0xfff000f0; /* RdHi = r0, RdLo = r1 */
  844. insn |= 0x00001203; /* Rs = r2, Rm = r3 */
  845. asi->insn[0] = insn;
  846. asi->insn_handler = emulate_rdhi16rdlo12rs8rm0_rwflags;
  847. return INSN_GOOD;
  848. }
  849. /*
  850. * For the instruction masking and comparisons in all the "space_*"
  851. * functions below, Do _not_ rearrange the order of tests unless
  852. * you're very, very sure of what you are doing. For the sake of
  853. * efficiency, the masks for some tests sometimes assume other test
  854. * have been done prior to them so the number of patterns to test
  855. * for an instruction set can be as broad as possible to reduce the
  856. * number of tests needed.
  857. */
  858. static enum kprobe_insn __kprobes
  859. space_1111(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  860. {
  861. /* CPS mmod == 1 : 1111 0001 0000 xx10 xxxx xxxx xx0x xxxx */
  862. /* RFE : 1111 100x x0x1 xxxx xxxx 1010 xxxx xxxx */
  863. /* SRS : 1111 100x x1x0 1101 xxxx 0101 xxxx xxxx */
  864. if ((insn & 0xfff30020) == 0xf1020000 ||
  865. (insn & 0xfe500f00) == 0xf8100a00 ||
  866. (insn & 0xfe5f0f00) == 0xf84d0500)
  867. return INSN_REJECTED;
  868. /* PLD : 1111 01x1 x101 xxxx xxxx xxxx xxxx xxxx : */
  869. if ((insn & 0xfd700000) == 0xf4500000) {
  870. insn &= 0xfff0ffff; /* Rn = r0 */
  871. asi->insn[0] = insn;
  872. asi->insn_handler = emulate_rn16;
  873. return INSN_GOOD;
  874. }
  875. /* BLX(1) : 1111 101x xxxx xxxx xxxx xxxx xxxx xxxx : */
  876. if ((insn & 0xfe000000) == 0xfa000000) {
  877. asi->insn_handler = simulate_blx1;
  878. return INSN_GOOD_NO_SLOT;
  879. }
  880. /* SETEND : 1111 0001 0000 0001 xxxx xxxx 0000 xxxx */
  881. /* CDP2 : 1111 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
  882. if ((insn & 0xffff00f0) == 0xf1010000 ||
  883. (insn & 0xff000010) == 0xfe000000) {
  884. asi->insn[0] = insn;
  885. asi->insn_handler = emulate_none;
  886. return INSN_GOOD;
  887. }
  888. /* MCRR2 : 1111 1100 0100 xxxx xxxx xxxx xxxx xxxx : (Rd != Rn) */
  889. /* MRRC2 : 1111 1100 0101 xxxx xxxx xxxx xxxx xxxx : (Rd != Rn) */
  890. if ((insn & 0xffe00000) == 0xfc400000) {
  891. insn &= 0xfff00fff; /* Rn = r0 */
  892. insn |= 0x00001000; /* Rd = r1 */
  893. asi->insn[0] = insn;
  894. asi->insn_handler =
  895. (insn & (1 << 20)) ? emulate_mrrc : emulate_mcrr;
  896. return INSN_GOOD;
  897. }
  898. /* LDC2 : 1111 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
  899. /* STC2 : 1111 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
  900. if ((insn & 0xfe000000) == 0xfc000000) {
  901. insn &= 0xfff0ffff; /* Rn = r0 */
  902. asi->insn[0] = insn;
  903. asi->insn_handler = emulate_ldcstc;
  904. return INSN_GOOD;
  905. }
  906. /* MCR2 : 1111 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
  907. /* MRC2 : 1111 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
  908. insn &= 0xffff0fff; /* Rd = r0 */
  909. asi->insn[0] = insn;
  910. asi->insn_handler = (insn & (1 << 20)) ? emulate_rd12 : emulate_ird12;
  911. return INSN_GOOD;
  912. }
  913. static enum kprobe_insn __kprobes
  914. space_cccc_000x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  915. {
  916. /* cccc 0001 0xx0 xxxx xxxx xxxx xxxx xxx0 xxxx */
  917. if ((insn & 0x0f900010) == 0x01000000) {
  918. /* BXJ : cccc 0001 0010 xxxx xxxx xxxx 0010 xxxx */
  919. /* MSR : cccc 0001 0x10 xxxx xxxx xxxx 0000 xxxx */
  920. /* MRS spsr : cccc 0001 0100 xxxx xxxx xxxx 0000 xxxx */
  921. if ((insn & 0x0ff000f0) == 0x01200020 ||
  922. (insn & 0x0fb000f0) == 0x01200000 ||
  923. (insn & 0x0ff000f0) == 0x01400000)
  924. return INSN_REJECTED;
  925. /* MRS cpsr : cccc 0001 0000 xxxx xxxx xxxx 0000 xxxx */
  926. if ((insn & 0x0ff000f0) == 0x01000000) {
  927. if (is_r15(insn, 12))
  928. return INSN_REJECTED; /* Rd is PC */
  929. asi->insn_handler = simulate_mrs;
  930. return INSN_GOOD_NO_SLOT;
  931. }
  932. /* SMLALxy : cccc 0001 0100 xxxx xxxx xxxx 1xx0 xxxx */
  933. if ((insn & 0x0ff00090) == 0x01400080)
  934. return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn, asi);
  935. /* SMULWy : cccc 0001 0010 xxxx xxxx xxxx 1x10 xxxx */
  936. /* SMULxy : cccc 0001 0110 xxxx xxxx xxxx 1xx0 xxxx */
  937. if ((insn & 0x0ff000b0) == 0x012000a0 ||
  938. (insn & 0x0ff00090) == 0x01600080)
  939. return prep_emulate_rd16rs8rm0_wflags(insn, asi);
  940. /* SMLAxy : cccc 0001 0000 xxxx xxxx xxxx 1xx0 xxxx : Q */
  941. /* SMLAWy : cccc 0001 0010 xxxx xxxx xxxx 1x00 xxxx : Q */
  942. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  943. }
  944. /* cccc 0001 0xx0 xxxx xxxx xxxx xxxx 0xx1 xxxx */
  945. else if ((insn & 0x0f900090) == 0x01000010) {
  946. /* BKPT : 1110 0001 0010 xxxx xxxx xxxx 0111 xxxx */
  947. if ((insn & 0xfff000f0) == 0xe1200070)
  948. return INSN_REJECTED;
  949. /* BLX(2) : cccc 0001 0010 xxxx xxxx xxxx 0011 xxxx */
  950. /* BX : cccc 0001 0010 xxxx xxxx xxxx 0001 xxxx */
  951. if ((insn & 0x0ff000d0) == 0x01200010) {
  952. if ((insn & 0x0ff000ff) == 0x0120003f)
  953. return INSN_REJECTED; /* BLX pc */
  954. asi->insn_handler = simulate_blx2bx;
  955. return INSN_GOOD_NO_SLOT;
  956. }
  957. /* CLZ : cccc 0001 0110 xxxx xxxx xxxx 0001 xxxx */
  958. if ((insn & 0x0ff000f0) == 0x01600010)
  959. return prep_emulate_rd12rm0(insn, asi);
  960. /* QADD : cccc 0001 0000 xxxx xxxx xxxx 0101 xxxx :Q */
  961. /* QSUB : cccc 0001 0010 xxxx xxxx xxxx 0101 xxxx :Q */
  962. /* QDADD : cccc 0001 0100 xxxx xxxx xxxx 0101 xxxx :Q */
  963. /* QDSUB : cccc 0001 0110 xxxx xxxx xxxx 0101 xxxx :Q */
  964. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  965. }
  966. /* cccc 0000 xxxx xxxx xxxx xxxx xxxx 1001 xxxx */
  967. else if ((insn & 0x0f0000f0) == 0x00000090) {
  968. /* MUL : cccc 0000 0000 xxxx xxxx xxxx 1001 xxxx : */
  969. /* MULS : cccc 0000 0001 xxxx xxxx xxxx 1001 xxxx :cc */
  970. /* MLA : cccc 0000 0010 xxxx xxxx xxxx 1001 xxxx : */
  971. /* MLAS : cccc 0000 0011 xxxx xxxx xxxx 1001 xxxx :cc */
  972. /* UMAAL : cccc 0000 0100 xxxx xxxx xxxx 1001 xxxx : */
  973. /* undef : cccc 0000 0101 xxxx xxxx xxxx 1001 xxxx : */
  974. /* MLS : cccc 0000 0110 xxxx xxxx xxxx 1001 xxxx : */
  975. /* undef : cccc 0000 0111 xxxx xxxx xxxx 1001 xxxx : */
  976. /* UMULL : cccc 0000 1000 xxxx xxxx xxxx 1001 xxxx : */
  977. /* UMULLS : cccc 0000 1001 xxxx xxxx xxxx 1001 xxxx :cc */
  978. /* UMLAL : cccc 0000 1010 xxxx xxxx xxxx 1001 xxxx : */
  979. /* UMLALS : cccc 0000 1011 xxxx xxxx xxxx 1001 xxxx :cc */
  980. /* SMULL : cccc 0000 1100 xxxx xxxx xxxx 1001 xxxx : */
  981. /* SMULLS : cccc 0000 1101 xxxx xxxx xxxx 1001 xxxx :cc */
  982. /* SMLAL : cccc 0000 1110 xxxx xxxx xxxx 1001 xxxx : */
  983. /* SMLALS : cccc 0000 1111 xxxx xxxx xxxx 1001 xxxx :cc */
  984. if ((insn & 0x00d00000) == 0x00500000) {
  985. return INSN_REJECTED;
  986. } else if ((insn & 0x00e00000) == 0x00000000) {
  987. return prep_emulate_rd16rs8rm0_wflags(insn, asi);
  988. } else if ((insn & 0x00a00000) == 0x00200000) {
  989. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  990. } else {
  991. return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn, asi);
  992. }
  993. }
  994. /* cccc 000x xxxx xxxx xxxx xxxx xxxx 1xx1 xxxx */
  995. else if ((insn & 0x0e000090) == 0x00000090) {
  996. /* SWP : cccc 0001 0000 xxxx xxxx xxxx 1001 xxxx */
  997. /* SWPB : cccc 0001 0100 xxxx xxxx xxxx 1001 xxxx */
  998. /* ??? : cccc 0001 0x01 xxxx xxxx xxxx 1001 xxxx */
  999. /* ??? : cccc 0001 0x10 xxxx xxxx xxxx 1001 xxxx */
  1000. /* ??? : cccc 0001 0x11 xxxx xxxx xxxx 1001 xxxx */
  1001. /* STREX : cccc 0001 1000 xxxx xxxx xxxx 1001 xxxx */
  1002. /* LDREX : cccc 0001 1001 xxxx xxxx xxxx 1001 xxxx */
  1003. /* STREXD: cccc 0001 1010 xxxx xxxx xxxx 1001 xxxx */
  1004. /* LDREXD: cccc 0001 1011 xxxx xxxx xxxx 1001 xxxx */
  1005. /* STREXB: cccc 0001 1100 xxxx xxxx xxxx 1001 xxxx */
  1006. /* LDREXB: cccc 0001 1101 xxxx xxxx xxxx 1001 xxxx */
  1007. /* STREXH: cccc 0001 1110 xxxx xxxx xxxx 1001 xxxx */
  1008. /* LDREXH: cccc 0001 1111 xxxx xxxx xxxx 1001 xxxx */
  1009. /* LDRD : cccc 000x xxx0 xxxx xxxx xxxx 1101 xxxx */
  1010. /* STRD : cccc 000x xxx0 xxxx xxxx xxxx 1111 xxxx */
  1011. /* LDRH : cccc 000x xxx1 xxxx xxxx xxxx 1011 xxxx */
  1012. /* STRH : cccc 000x xxx0 xxxx xxxx xxxx 1011 xxxx */
  1013. /* LDRSB : cccc 000x xxx1 xxxx xxxx xxxx 1101 xxxx */
  1014. /* LDRSH : cccc 000x xxx1 xxxx xxxx xxxx 1111 xxxx */
  1015. if ((insn & 0x0f0000f0) == 0x01000090) {
  1016. if ((insn & 0x0fb000f0) == 0x01000090) {
  1017. /* SWP/SWPB */
  1018. return prep_emulate_rd12rn16rm0_wflags(insn,
  1019. asi);
  1020. } else {
  1021. /* STREX/LDREX variants and unallocaed space */
  1022. return INSN_REJECTED;
  1023. }
  1024. } else if ((insn & 0x0e1000d0) == 0x00000d0) {
  1025. /* STRD/LDRD */
  1026. insn &= 0xfff00fff;
  1027. insn |= 0x00002000; /* Rn = r0, Rd = r2 */
  1028. if (insn & (1 << 22)) {
  1029. /* I bit */
  1030. insn &= ~0xf;
  1031. insn |= 1; /* Rm = r1 */
  1032. }
  1033. asi->insn[0] = insn;
  1034. asi->insn_handler =
  1035. (insn & (1 << 5)) ? emulate_strd : emulate_ldrd;
  1036. return INSN_GOOD;
  1037. }
  1038. return prep_emulate_ldr_str(insn, asi);
  1039. }
  1040. /* cccc 000x xxxx xxxx xxxx xxxx xxxx xxxx xxxx */
  1041. /*
  1042. * ALU op with S bit and Rd == 15 :
  1043. * cccc 000x xxx1 xxxx 1111 xxxx xxxx xxxx
  1044. */
  1045. if ((insn & 0x0e10f000) == 0x0010f000)
  1046. return INSN_REJECTED;
  1047. /*
  1048. * "mov ip, sp" is the most common kprobe'd instruction by far.
  1049. * Check and optimize for it explicitly.
  1050. */
  1051. if (insn == 0xe1a0c00d) {
  1052. asi->insn_handler = simulate_mov_ipsp;
  1053. return INSN_GOOD_NO_SLOT;
  1054. }
  1055. /*
  1056. * Data processing: Immediate-shift / Register-shift
  1057. * ALU op : cccc 000x xxxx xxxx xxxx xxxx xxxx xxxx
  1058. * CPY : cccc 0001 1010 xxxx xxxx 0000 0000 xxxx
  1059. * MOV : cccc 0001 101x xxxx xxxx xxxx xxxx xxxx
  1060. * *S (bit 20) updates condition codes
  1061. * ADC/SBC/RSC reads the C flag
  1062. */
  1063. insn &= 0xfff00ff0; /* Rn = r0, Rd = r0 */
  1064. insn |= 0x00000001; /* Rm = r1 */
  1065. if (insn & 0x010) {
  1066. insn &= 0xfffff0ff; /* register shift */
  1067. insn |= 0x00000200; /* Rs = r2 */
  1068. }
  1069. asi->insn[0] = insn;
  1070. if ((insn & 0x0f900000) == 0x01100000) {
  1071. /*
  1072. * TST : cccc 0001 0001 xxxx xxxx xxxx xxxx xxxx
  1073. * TEQ : cccc 0001 0011 xxxx xxxx xxxx xxxx xxxx
  1074. * CMP : cccc 0001 0101 xxxx xxxx xxxx xxxx xxxx
  1075. * CMN : cccc 0001 0111 xxxx xxxx xxxx xxxx xxxx
  1076. */
  1077. asi->insn_handler = emulate_alu_tests;
  1078. } else {
  1079. /* ALU ops which write to Rd */
  1080. asi->insn_handler = (insn & (1 << 20)) ? /* S-bit */
  1081. emulate_alu_rwflags : emulate_alu_rflags;
  1082. }
  1083. return INSN_GOOD;
  1084. }
  1085. static enum kprobe_insn __kprobes
  1086. space_cccc_001x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1087. {
  1088. /*
  1089. * MSR : cccc 0011 0x10 xxxx xxxx xxxx xxxx xxxx
  1090. * Undef : cccc 0011 0100 xxxx xxxx xxxx xxxx xxxx
  1091. * ALU op with S bit and Rd == 15 :
  1092. * cccc 001x xxx1 xxxx 1111 xxxx xxxx xxxx
  1093. */
  1094. if ((insn & 0x0fb00000) == 0x03200000 || /* MSR */
  1095. (insn & 0x0ff00000) == 0x03400000 || /* Undef */
  1096. (insn & 0x0e10f000) == 0x0210f000) /* ALU s-bit, R15 */
  1097. return INSN_REJECTED;
  1098. /*
  1099. * Data processing: 32-bit Immediate
  1100. * ALU op : cccc 001x xxxx xxxx xxxx xxxx xxxx xxxx
  1101. * MOV : cccc 0011 101x xxxx xxxx xxxx xxxx xxxx
  1102. * *S (bit 20) updates condition codes
  1103. * ADC/SBC/RSC reads the C flag
  1104. */
  1105. insn &= 0xfff00fff; /* Rn = r0 and Rd = r0 */
  1106. asi->insn[0] = insn;
  1107. if ((insn & 0x0f900000) == 0x03100000) {
  1108. /*
  1109. * TST : cccc 0011 0001 xxxx xxxx xxxx xxxx xxxx
  1110. * TEQ : cccc 0011 0011 xxxx xxxx xxxx xxxx xxxx
  1111. * CMP : cccc 0011 0101 xxxx xxxx xxxx xxxx xxxx
  1112. * CMN : cccc 0011 0111 xxxx xxxx xxxx xxxx xxxx
  1113. */
  1114. asi->insn_handler = emulate_alu_tests_imm;
  1115. } else {
  1116. /* ALU ops which write to Rd */
  1117. asi->insn_handler = (insn & (1 << 20)) ? /* S-bit */
  1118. emulate_alu_imm_rwflags : emulate_alu_imm_rflags;
  1119. }
  1120. return INSN_GOOD;
  1121. }
  1122. static enum kprobe_insn __kprobes
  1123. space_cccc_0110__1(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1124. {
  1125. /* SEL : cccc 0110 1000 xxxx xxxx xxxx 1011 xxxx GE: !!! */
  1126. if ((insn & 0x0ff000f0) == 0x068000b0) {
  1127. if (is_r15(insn, 12))
  1128. return INSN_REJECTED; /* Rd is PC */
  1129. insn &= 0xfff00ff0; /* Rd = r0, Rn = r0 */
  1130. insn |= 0x00000001; /* Rm = r1 */
  1131. asi->insn[0] = insn;
  1132. asi->insn_handler = emulate_sel;
  1133. return INSN_GOOD;
  1134. }
  1135. /* SSAT : cccc 0110 101x xxxx xxxx xxxx xx01 xxxx :Q */
  1136. /* USAT : cccc 0110 111x xxxx xxxx xxxx xx01 xxxx :Q */
  1137. /* SSAT16 : cccc 0110 1010 xxxx xxxx xxxx 0011 xxxx :Q */
  1138. /* USAT16 : cccc 0110 1110 xxxx xxxx xxxx 0011 xxxx :Q */
  1139. if ((insn & 0x0fa00030) == 0x06a00010 ||
  1140. (insn & 0x0fb000f0) == 0x06a00030) {
  1141. if (is_r15(insn, 12))
  1142. return INSN_REJECTED; /* Rd is PC */
  1143. insn &= 0xffff0ff0; /* Rd = r0, Rm = r0 */
  1144. asi->insn[0] = insn;
  1145. asi->insn_handler = emulate_sat;
  1146. return INSN_GOOD;
  1147. }
  1148. /* REV : cccc 0110 1011 xxxx xxxx xxxx 0011 xxxx */
  1149. /* REV16 : cccc 0110 1011 xxxx xxxx xxxx 1011 xxxx */
  1150. /* REVSH : cccc 0110 1111 xxxx xxxx xxxx 1011 xxxx */
  1151. if ((insn & 0x0ff00070) == 0x06b00030 ||
  1152. (insn & 0x0ff000f0) == 0x06f000b0)
  1153. return prep_emulate_rd12rm0(insn, asi);
  1154. /* SADD16 : cccc 0110 0001 xxxx xxxx xxxx 0001 xxxx :GE */
  1155. /* SADDSUBX : cccc 0110 0001 xxxx xxxx xxxx 0011 xxxx :GE */
  1156. /* SSUBADDX : cccc 0110 0001 xxxx xxxx xxxx 0101 xxxx :GE */
  1157. /* SSUB16 : cccc 0110 0001 xxxx xxxx xxxx 0111 xxxx :GE */
  1158. /* SADD8 : cccc 0110 0001 xxxx xxxx xxxx 1001 xxxx :GE */
  1159. /* SSUB8 : cccc 0110 0001 xxxx xxxx xxxx 1111 xxxx :GE */
  1160. /* QADD16 : cccc 0110 0010 xxxx xxxx xxxx 0001 xxxx : */
  1161. /* QADDSUBX : cccc 0110 0010 xxxx xxxx xxxx 0011 xxxx : */
  1162. /* QSUBADDX : cccc 0110 0010 xxxx xxxx xxxx 0101 xxxx : */
  1163. /* QSUB16 : cccc 0110 0010 xxxx xxxx xxxx 0111 xxxx : */
  1164. /* QADD8 : cccc 0110 0010 xxxx xxxx xxxx 1001 xxxx : */
  1165. /* QSUB8 : cccc 0110 0010 xxxx xxxx xxxx 1111 xxxx : */
  1166. /* SHADD16 : cccc 0110 0011 xxxx xxxx xxxx 0001 xxxx : */
  1167. /* SHADDSUBX : cccc 0110 0011 xxxx xxxx xxxx 0011 xxxx : */
  1168. /* SHSUBADDX : cccc 0110 0011 xxxx xxxx xxxx 0101 xxxx : */
  1169. /* SHSUB16 : cccc 0110 0011 xxxx xxxx xxxx 0111 xxxx : */
  1170. /* SHADD8 : cccc 0110 0011 xxxx xxxx xxxx 1001 xxxx : */
  1171. /* SHSUB8 : cccc 0110 0011 xxxx xxxx xxxx 1111 xxxx : */
  1172. /* UADD16 : cccc 0110 0101 xxxx xxxx xxxx 0001 xxxx :GE */
  1173. /* UADDSUBX : cccc 0110 0101 xxxx xxxx xxxx 0011 xxxx :GE */
  1174. /* USUBADDX : cccc 0110 0101 xxxx xxxx xxxx 0101 xxxx :GE */
  1175. /* USUB16 : cccc 0110 0101 xxxx xxxx xxxx 0111 xxxx :GE */
  1176. /* UADD8 : cccc 0110 0101 xxxx xxxx xxxx 1001 xxxx :GE */
  1177. /* USUB8 : cccc 0110 0101 xxxx xxxx xxxx 1111 xxxx :GE */
  1178. /* UQADD16 : cccc 0110 0110 xxxx xxxx xxxx 0001 xxxx : */
  1179. /* UQADDSUBX : cccc 0110 0110 xxxx xxxx xxxx 0011 xxxx : */
  1180. /* UQSUBADDX : cccc 0110 0110 xxxx xxxx xxxx 0101 xxxx : */
  1181. /* UQSUB16 : cccc 0110 0110 xxxx xxxx xxxx 0111 xxxx : */
  1182. /* UQADD8 : cccc 0110 0110 xxxx xxxx xxxx 1001 xxxx : */
  1183. /* UQSUB8 : cccc 0110 0110 xxxx xxxx xxxx 1111 xxxx : */
  1184. /* UHADD16 : cccc 0110 0111 xxxx xxxx xxxx 0001 xxxx : */
  1185. /* UHADDSUBX : cccc 0110 0111 xxxx xxxx xxxx 0011 xxxx : */
  1186. /* UHSUBADDX : cccc 0110 0111 xxxx xxxx xxxx 0101 xxxx : */
  1187. /* UHSUB16 : cccc 0110 0111 xxxx xxxx xxxx 0111 xxxx : */
  1188. /* UHADD8 : cccc 0110 0111 xxxx xxxx xxxx 1001 xxxx : */
  1189. /* UHSUB8 : cccc 0110 0111 xxxx xxxx xxxx 1111 xxxx : */
  1190. /* PKHBT : cccc 0110 1000 xxxx xxxx xxxx x001 xxxx : */
  1191. /* PKHTB : cccc 0110 1000 xxxx xxxx xxxx x101 xxxx : */
  1192. /* SXTAB16 : cccc 0110 1000 xxxx xxxx xxxx 0111 xxxx : */
  1193. /* SXTB : cccc 0110 1010 xxxx xxxx xxxx 0111 xxxx : */
  1194. /* SXTAB : cccc 0110 1010 xxxx xxxx xxxx 0111 xxxx : */
  1195. /* SXTAH : cccc 0110 1011 xxxx xxxx xxxx 0111 xxxx : */
  1196. /* UXTAB16 : cccc 0110 1100 xxxx xxxx xxxx 0111 xxxx : */
  1197. /* UXTAB : cccc 0110 1110 xxxx xxxx xxxx 0111 xxxx : */
  1198. /* UXTAH : cccc 0110 1111 xxxx xxxx xxxx 0111 xxxx : */
  1199. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  1200. }
  1201. static enum kprobe_insn __kprobes
  1202. space_cccc_0111__1(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1203. {
  1204. /* Undef : cccc 0111 1111 xxxx xxxx xxxx 1111 xxxx */
  1205. if ((insn & 0x0ff000f0) == 0x03f000f0)
  1206. return INSN_REJECTED;
  1207. /* USADA8 : cccc 0111 1000 xxxx xxxx xxxx 0001 xxxx */
  1208. /* USAD8 : cccc 0111 1000 xxxx 1111 xxxx 0001 xxxx */
  1209. if ((insn & 0x0ff000f0) == 0x07800010)
  1210. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  1211. /* SMLALD : cccc 0111 0100 xxxx xxxx xxxx 00x1 xxxx */
  1212. /* SMLSLD : cccc 0111 0100 xxxx xxxx xxxx 01x1 xxxx */
  1213. if ((insn & 0x0ff00090) == 0x07400010)
  1214. return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn, asi);
  1215. /* SMLAD : cccc 0111 0000 xxxx xxxx xxxx 00x1 xxxx :Q */
  1216. /* SMLSD : cccc 0111 0000 xxxx xxxx xxxx 01x1 xxxx :Q */
  1217. /* SMMLA : cccc 0111 0101 xxxx xxxx xxxx 00x1 xxxx : */
  1218. /* SMMLS : cccc 0111 0101 xxxx xxxx xxxx 11x1 xxxx : */
  1219. if ((insn & 0x0ff00090) == 0x07000010 ||
  1220. (insn & 0x0ff000d0) == 0x07500010 ||
  1221. (insn & 0x0ff000d0) == 0x075000d0)
  1222. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  1223. /* SMUSD : cccc 0111 0000 xxxx xxxx xxxx 01x1 xxxx : */
  1224. /* SMUAD : cccc 0111 0000 xxxx 1111 xxxx 00x1 xxxx :Q */
  1225. /* SMMUL : cccc 0111 0101 xxxx 1111 xxxx 00x1 xxxx : */
  1226. return prep_emulate_rd16rs8rm0_wflags(insn, asi);
  1227. }
  1228. static enum kprobe_insn __kprobes
  1229. space_cccc_01xx(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1230. {
  1231. /* LDR : cccc 01xx x0x1 xxxx xxxx xxxx xxxx xxxx */
  1232. /* LDRB : cccc 01xx x1x1 xxxx xxxx xxxx xxxx xxxx */
  1233. /* LDRBT : cccc 01x0 x111 xxxx xxxx xxxx xxxx xxxx */
  1234. /* LDRT : cccc 01x0 x011 xxxx xxxx xxxx xxxx xxxx */
  1235. /* STR : cccc 01xx x0x0 xxxx xxxx xxxx xxxx xxxx */
  1236. /* STRB : cccc 01xx x1x0 xxxx xxxx xxxx xxxx xxxx */
  1237. /* STRBT : cccc 01x0 x110 xxxx xxxx xxxx xxxx xxxx */
  1238. /* STRT : cccc 01x0 x010 xxxx xxxx xxxx xxxx xxxx */
  1239. return prep_emulate_ldr_str(insn, asi);
  1240. }
  1241. static enum kprobe_insn __kprobes
  1242. space_cccc_100x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1243. {
  1244. /* LDM(2) : cccc 100x x101 xxxx 0xxx xxxx xxxx xxxx */
  1245. /* LDM(3) : cccc 100x x1x1 xxxx 1xxx xxxx xxxx xxxx */
  1246. if ((insn & 0x0e708000) == 0x85000000 ||
  1247. (insn & 0x0e508000) == 0x85010000)
  1248. return INSN_REJECTED;
  1249. /* LDM(1) : cccc 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
  1250. /* STM(1) : cccc 100x x0x0 xxxx xxxx xxxx xxxx xxxx */
  1251. asi->insn_handler = ((insn & 0x108000) == 0x008000) ? /* STM & R15 */
  1252. simulate_stm1_pc : simulate_ldm1stm1;
  1253. return INSN_GOOD_NO_SLOT;
  1254. }
  1255. static enum kprobe_insn __kprobes
  1256. space_cccc_101x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1257. {
  1258. /* B : cccc 1010 xxxx xxxx xxxx xxxx xxxx xxxx */
  1259. /* BL : cccc 1011 xxxx xxxx xxxx xxxx xxxx xxxx */
  1260. asi->insn_handler = simulate_bbl;
  1261. return INSN_GOOD_NO_SLOT;
  1262. }
  1263. static enum kprobe_insn __kprobes
  1264. space_cccc_1100_010x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1265. {
  1266. /* MCRR : cccc 1100 0100 xxxx xxxx xxxx xxxx xxxx : (Rd!=Rn) */
  1267. /* MRRC : cccc 1100 0101 xxxx xxxx xxxx xxxx xxxx : (Rd!=Rn) */
  1268. if (is_r15(insn, 16) || is_r15(insn, 12))
  1269. return INSN_REJECTED; /* Rn or Rd is PC */
  1270. insn &= 0xfff00fff;
  1271. insn |= 0x00001000; /* Rn = r0, Rd = r1 */
  1272. asi->insn[0] = insn;
  1273. asi->insn_handler = (insn & (1 << 20)) ? emulate_mrrc : emulate_mcrr;
  1274. return INSN_GOOD;
  1275. }
  1276. static enum kprobe_insn __kprobes
  1277. space_cccc_110x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1278. {
  1279. /* LDC : cccc 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
  1280. /* STC : cccc 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
  1281. insn &= 0xfff0ffff; /* Rn = r0 */
  1282. asi->insn[0] = insn;
  1283. asi->insn_handler = emulate_ldcstc;
  1284. return INSN_GOOD;
  1285. }
  1286. static enum kprobe_insn __kprobes
  1287. space_cccc_111x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1288. {
  1289. /* BKPT : 1110 0001 0010 xxxx xxxx xxxx 0111 xxxx */
  1290. /* SWI : cccc 1111 xxxx xxxx xxxx xxxx xxxx xxxx */
  1291. if ((insn & 0xfff000f0) == 0xe1200070 ||
  1292. (insn & 0x0f000000) == 0x0f000000)
  1293. return INSN_REJECTED;
  1294. /* CDP : cccc 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
  1295. if ((insn & 0x0f000010) == 0x0e000000) {
  1296. asi->insn[0] = insn;
  1297. asi->insn_handler = emulate_none;
  1298. return INSN_GOOD;
  1299. }
  1300. /* MCR : cccc 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
  1301. /* MRC : cccc 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
  1302. insn &= 0xffff0fff; /* Rd = r0 */
  1303. asi->insn[0] = insn;
  1304. asi->insn_handler = (insn & (1 << 20)) ? emulate_rd12 : emulate_ird12;
  1305. return INSN_GOOD;
  1306. }
  1307. static unsigned long __kprobes __check_eq(unsigned long cpsr)
  1308. {
  1309. return cpsr & PSR_Z_BIT;
  1310. }
  1311. static unsigned long __kprobes __check_ne(unsigned long cpsr)
  1312. {
  1313. return (~cpsr) & PSR_Z_BIT;
  1314. }
  1315. static unsigned long __kprobes __check_cs(unsigned long cpsr)
  1316. {
  1317. return cpsr & PSR_C_BIT;
  1318. }
  1319. static unsigned long __kprobes __check_cc(unsigned long cpsr)
  1320. {
  1321. return (~cpsr) & PSR_C_BIT;
  1322. }
  1323. static unsigned long __kprobes __check_mi(unsigned long cpsr)
  1324. {
  1325. return cpsr & PSR_N_BIT;
  1326. }
  1327. static unsigned long __kprobes __check_pl(unsigned long cpsr)
  1328. {
  1329. return (~cpsr) & PSR_N_BIT;
  1330. }
  1331. static unsigned long __kprobes __check_vs(unsigned long cpsr)
  1332. {
  1333. return cpsr & PSR_V_BIT;
  1334. }
  1335. static unsigned long __kprobes __check_vc(unsigned long cpsr)
  1336. {
  1337. return (~cpsr) & PSR_V_BIT;
  1338. }
  1339. static unsigned long __kprobes __check_hi(unsigned long cpsr)
  1340. {
  1341. cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
  1342. return cpsr & PSR_C_BIT;
  1343. }
  1344. static unsigned long __kprobes __check_ls(unsigned long cpsr)
  1345. {
  1346. cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
  1347. return (~cpsr) & PSR_C_BIT;
  1348. }
  1349. static unsigned long __kprobes __check_ge(unsigned long cpsr)
  1350. {
  1351. cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
  1352. return (~cpsr) & PSR_N_BIT;
  1353. }
  1354. static unsigned long __kprobes __check_lt(unsigned long cpsr)
  1355. {
  1356. cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
  1357. return cpsr & PSR_N_BIT;
  1358. }
  1359. static unsigned long __kprobes __check_gt(unsigned long cpsr)
  1360. {
  1361. unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
  1362. temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
  1363. return (~temp) & PSR_N_BIT;
  1364. }
  1365. static unsigned long __kprobes __check_le(unsigned long cpsr)
  1366. {
  1367. unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
  1368. temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
  1369. return temp & PSR_N_BIT;
  1370. }
  1371. static unsigned long __kprobes __check_al(unsigned long cpsr)
  1372. {
  1373. return true;
  1374. }
  1375. static kprobe_check_cc * const condition_checks[16] = {
  1376. &__check_eq, &__check_ne, &__check_cs, &__check_cc,
  1377. &__check_mi, &__check_pl, &__check_vs, &__check_vc,
  1378. &__check_hi, &__check_ls, &__check_ge, &__check_lt,
  1379. &__check_gt, &__check_le, &__check_al, &__check_al
  1380. };
  1381. /* Return:
  1382. * INSN_REJECTED If instruction is one not allowed to kprobe,
  1383. * INSN_GOOD If instruction is supported and uses instruction slot,
  1384. * INSN_GOOD_NO_SLOT If instruction is supported but doesn't use its slot.
  1385. *
  1386. * For instructions we don't want to kprobe (INSN_REJECTED return result):
  1387. * These are generally ones that modify the processor state making
  1388. * them "hard" to simulate such as switches processor modes or
  1389. * make accesses in alternate modes. Any of these could be simulated
  1390. * if the work was put into it, but low return considering they
  1391. * should also be very rare.
  1392. */
  1393. enum kprobe_insn __kprobes
  1394. arm_kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1395. {
  1396. asi->insn_check_cc = condition_checks[insn>>28];
  1397. asi->insn[1] = KPROBE_RETURN_INSTRUCTION;
  1398. if ((insn & 0xf0000000) == 0xf0000000) {
  1399. return space_1111(insn, asi);
  1400. } else if ((insn & 0x0e000000) == 0x00000000) {
  1401. return space_cccc_000x(insn, asi);
  1402. } else if ((insn & 0x0e000000) == 0x02000000) {
  1403. return space_cccc_001x(insn, asi);
  1404. } else if ((insn & 0x0f000010) == 0x06000010) {
  1405. return space_cccc_0110__1(insn, asi);
  1406. } else if ((insn & 0x0f000010) == 0x07000010) {
  1407. return space_cccc_0111__1(insn, asi);
  1408. } else if ((insn & 0x0c000000) == 0x04000000) {
  1409. return space_cccc_01xx(insn, asi);
  1410. } else if ((insn & 0x0e000000) == 0x08000000) {
  1411. return space_cccc_100x(insn, asi);
  1412. } else if ((insn & 0x0e000000) == 0x0a000000) {
  1413. return space_cccc_101x(insn, asi);
  1414. } else if ((insn & 0x0fe00000) == 0x0c400000) {
  1415. return space_cccc_1100_010x(insn, asi);
  1416. } else if ((insn & 0x0e000000) == 0x0c000000) {
  1417. return space_cccc_110x(insn, asi);
  1418. }
  1419. return space_cccc_111x(insn, asi);
  1420. }
  1421. void __init arm_kprobe_decode_init(void)
  1422. {
  1423. find_str_pc_offset();
  1424. }
  1425. /*
  1426. * All ARM instructions listed below.
  1427. *
  1428. * Instructions and their general purpose registers are given.
  1429. * If a particular register may not use R15, it is prefixed with a "!".
  1430. * If marked with a "*" means the value returned by reading R15
  1431. * is implementation defined.
  1432. *
  1433. * ADC/ADD/AND/BIC/CMN/CMP/EOR/MOV/MVN/ORR/RSB/RSC/SBC/SUB/TEQ
  1434. * TST: Rd, Rn, Rm, !Rs
  1435. * BX: Rm
  1436. * BLX(2): !Rm
  1437. * BX: Rm (R15 legal, but discouraged)
  1438. * BXJ: !Rm,
  1439. * CLZ: !Rd, !Rm
  1440. * CPY: Rd, Rm
  1441. * LDC/2,STC/2 immediate offset & unindex: Rn
  1442. * LDC/2,STC/2 immediate pre/post-indexed: !Rn
  1443. * LDM(1/3): !Rn, register_list
  1444. * LDM(2): !Rn, !register_list
  1445. * LDR,STR,PLD immediate offset: Rd, Rn
  1446. * LDR,STR,PLD register offset: Rd, Rn, !Rm
  1447. * LDR,STR,PLD scaled register offset: Rd, !Rn, !Rm
  1448. * LDR,STR immediate pre/post-indexed: Rd, !Rn
  1449. * LDR,STR register pre/post-indexed: Rd, !Rn, !Rm
  1450. * LDR,STR scaled register pre/post-indexed: Rd, !Rn, !Rm
  1451. * LDRB,STRB immediate offset: !Rd, Rn
  1452. * LDRB,STRB register offset: !Rd, Rn, !Rm
  1453. * LDRB,STRB scaled register offset: !Rd, !Rn, !Rm
  1454. * LDRB,STRB immediate pre/post-indexed: !Rd, !Rn
  1455. * LDRB,STRB register pre/post-indexed: !Rd, !Rn, !Rm
  1456. * LDRB,STRB scaled register pre/post-indexed: !Rd, !Rn, !Rm
  1457. * LDRT,LDRBT,STRBT immediate pre/post-indexed: !Rd, !Rn
  1458. * LDRT,LDRBT,STRBT register pre/post-indexed: !Rd, !Rn, !Rm
  1459. * LDRT,LDRBT,STRBT scaled register pre/post-indexed: !Rd, !Rn, !Rm
  1460. * LDRH/SH/SB/D,STRH/SH/SB/D immediate offset: !Rd, Rn
  1461. * LDRH/SH/SB/D,STRH/SH/SB/D register offset: !Rd, Rn, !Rm
  1462. * LDRH/SH/SB/D,STRH/SH/SB/D immediate pre/post-indexed: !Rd, !Rn
  1463. * LDRH/SH/SB/D,STRH/SH/SB/D register pre/post-indexed: !Rd, !Rn, !Rm
  1464. * LDREX: !Rd, !Rn
  1465. * MCR/2: !Rd
  1466. * MCRR/2,MRRC/2: !Rd, !Rn
  1467. * MLA: !Rd, !Rn, !Rm, !Rs
  1468. * MOV: Rd
  1469. * MRC/2: !Rd (if Rd==15, only changes cond codes, not the register)
  1470. * MRS,MSR: !Rd
  1471. * MUL: !Rd, !Rm, !Rs
  1472. * PKH{BT,TB}: !Rd, !Rn, !Rm
  1473. * QDADD,[U]QADD/16/8/SUBX: !Rd, !Rm, !Rn
  1474. * QDSUB,[U]QSUB/16/8/ADDX: !Rd, !Rm, !Rn
  1475. * REV/16/SH: !Rd, !Rm
  1476. * RFE: !Rn
  1477. * {S,U}[H]ADD{16,8,SUBX},{S,U}[H]SUB{16,8,ADDX}: !Rd, !Rn, !Rm
  1478. * SEL: !Rd, !Rn, !Rm
  1479. * SMLA<x><y>,SMLA{D,W<y>},SMLSD,SMML{A,S}: !Rd, !Rn, !Rm, !Rs
  1480. * SMLAL<x><y>,SMLA{D,LD},SMLSLD,SMMULL,SMULW<y>: !RdHi, !RdLo, !Rm, !Rs
  1481. * SMMUL,SMUAD,SMUL<x><y>,SMUSD: !Rd, !Rm, !Rs
  1482. * SSAT/16: !Rd, !Rm
  1483. * STM(1/2): !Rn, register_list* (R15 in reg list not recommended)
  1484. * STRT immediate pre/post-indexed: Rd*, !Rn
  1485. * STRT register pre/post-indexed: Rd*, !Rn, !Rm
  1486. * STRT scaled register pre/post-indexed: Rd*, !Rn, !Rm
  1487. * STREX: !Rd, !Rn, !Rm
  1488. * SWP/B: !Rd, !Rn, !Rm
  1489. * {S,U}XTA{B,B16,H}: !Rd, !Rn, !Rm
  1490. * {S,U}XT{B,B16,H}: !Rd, !Rm
  1491. * UM{AA,LA,UL}L: !RdHi, !RdLo, !Rm, !Rs
  1492. * USA{D8,A8,T,T16}: !Rd, !Rm, !Rs
  1493. *
  1494. * May transfer control by writing R15 (possible mode changes or alternate
  1495. * mode accesses marked by "*"):
  1496. * ALU op (* with s-bit), B, BL, BKPT, BLX(1/2), BX, BXJ, CPS*, CPY,
  1497. * LDM(1), LDM(2/3)*, LDR, MOV, RFE*, SWI*
  1498. *
  1499. * Instructions that do not take general registers, nor transfer control:
  1500. * CDP/2, SETEND, SRS*
  1501. */