kprobes-decode.c 51 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670
  1. /*
  2. * arch/arm/kernel/kprobes-decode.c
  3. *
  4. * Copyright (C) 2006, 2007 Motorola Inc.
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. *
  10. * This program is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * General Public License for more details.
  14. */
  15. /*
  16. * We do not have hardware single-stepping on ARM, This
  17. * effort is further complicated by the ARM not having a
  18. * "next PC" register. Instructions that change the PC
  19. * can't be safely single-stepped in a MP environment, so
  20. * we have a lot of work to do:
  21. *
  22. * In the prepare phase:
  23. * *) If it is an instruction that does anything
  24. * with the CPU mode, we reject it for a kprobe.
  25. * (This is out of laziness rather than need. The
  26. * instructions could be simulated.)
  27. *
  28. * *) Otherwise, decode the instruction rewriting its
  29. * registers to take fixed, ordered registers and
  30. * setting a handler for it to run the instruction.
  31. *
  32. * In the execution phase by an instruction's handler:
  33. *
  34. * *) If the PC is written to by the instruction, the
  35. * instruction must be fully simulated in software.
  36. *
  37. * *) Otherwise, a modified form of the instruction is
  38. * directly executed. Its handler calls the
  39. * instruction in insn[0]. In insn[1] is a
  40. * "mov pc, lr" to return.
  41. *
  42. * Before calling, load up the reordered registers
  43. * from the original instruction's registers. If one
  44. * of the original input registers is the PC, compute
  45. * and adjust the appropriate input register.
  46. *
  47. * After call completes, copy the output registers to
  48. * the original instruction's original registers.
  49. *
  50. * We don't use a real breakpoint instruction since that
  51. * would have us in the kernel go from SVC mode to SVC
  52. * mode losing the link register. Instead we use an
  53. * undefined instruction. To simplify processing, the
  54. * undefined instruction used for kprobes must be reserved
  55. * exclusively for kprobes use.
  56. *
  57. * TODO: ifdef out some instruction decoding based on architecture.
  58. */
  59. #include <linux/kernel.h>
  60. #include <linux/kprobes.h>
  61. #define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit)))))
  62. #define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25)
  63. #define is_r15(insn, bitpos) (((insn) & (0xf << bitpos)) == (0xf << bitpos))
  64. /*
  65. * Test if load/store instructions writeback the address register.
  66. * if P (bit 24) == 0 or W (bit 21) == 1
  67. */
  68. #define is_writeback(insn) ((insn ^ 0x01000000) & 0x01200000)
  69. #define PSR_fs (PSR_f|PSR_s)
  70. #define KPROBE_RETURN_INSTRUCTION 0xe1a0f00e /* mov pc, lr */
  71. typedef long (insn_0arg_fn_t)(void);
  72. typedef long (insn_1arg_fn_t)(long);
  73. typedef long (insn_2arg_fn_t)(long, long);
  74. typedef long (insn_3arg_fn_t)(long, long, long);
  75. typedef long (insn_4arg_fn_t)(long, long, long, long);
  76. typedef long long (insn_llret_0arg_fn_t)(void);
  77. typedef long long (insn_llret_3arg_fn_t)(long, long, long);
  78. typedef long long (insn_llret_4arg_fn_t)(long, long, long, long);
  79. union reg_pair {
  80. long long dr;
  81. #ifdef __LITTLE_ENDIAN
  82. struct { long r0, r1; };
  83. #else
  84. struct { long r1, r0; };
  85. #endif
  86. };
  87. /*
  88. * For STR and STM instructions, an ARM core may choose to use either
  89. * a +8 or a +12 displacement from the current instruction's address.
  90. * Whichever value is chosen for a given core, it must be the same for
  91. * both instructions and may not change. This function measures it.
  92. */
  93. static int str_pc_offset;
  94. static void __init find_str_pc_offset(void)
  95. {
  96. int addr, scratch, ret;
  97. __asm__ (
  98. "sub %[ret], pc, #4 \n\t"
  99. "str pc, %[addr] \n\t"
  100. "ldr %[scr], %[addr] \n\t"
  101. "sub %[ret], %[scr], %[ret] \n\t"
  102. : [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr));
  103. str_pc_offset = ret;
  104. }
  105. /*
  106. * The insnslot_?arg_r[w]flags() functions below are to keep the
  107. * msr -> *fn -> mrs instruction sequences indivisible so that
  108. * the state of the CPSR flags aren't inadvertently modified
  109. * just before or just after the call.
  110. */
  111. static inline long __kprobes
  112. insnslot_0arg_rflags(long cpsr, insn_0arg_fn_t *fn)
  113. {
  114. register long ret asm("r0");
  115. __asm__ __volatile__ (
  116. "msr cpsr_fs, %[cpsr] \n\t"
  117. "mov lr, pc \n\t"
  118. "mov pc, %[fn] \n\t"
  119. : "=r" (ret)
  120. : [cpsr] "r" (cpsr), [fn] "r" (fn)
  121. : "lr", "cc"
  122. );
  123. return ret;
  124. }
  125. static inline long long __kprobes
  126. insnslot_llret_0arg_rflags(long cpsr, insn_llret_0arg_fn_t *fn)
  127. {
  128. register long ret0 asm("r0");
  129. register long ret1 asm("r1");
  130. union reg_pair fnr;
  131. __asm__ __volatile__ (
  132. "msr cpsr_fs, %[cpsr] \n\t"
  133. "mov lr, pc \n\t"
  134. "mov pc, %[fn] \n\t"
  135. : "=r" (ret0), "=r" (ret1)
  136. : [cpsr] "r" (cpsr), [fn] "r" (fn)
  137. : "lr", "cc"
  138. );
  139. fnr.r0 = ret0;
  140. fnr.r1 = ret1;
  141. return fnr.dr;
  142. }
  143. static inline long __kprobes
  144. insnslot_1arg_rflags(long r0, long cpsr, insn_1arg_fn_t *fn)
  145. {
  146. register long rr0 asm("r0") = r0;
  147. register long ret asm("r0");
  148. __asm__ __volatile__ (
  149. "msr cpsr_fs, %[cpsr] \n\t"
  150. "mov lr, pc \n\t"
  151. "mov pc, %[fn] \n\t"
  152. : "=r" (ret)
  153. : "0" (rr0), [cpsr] "r" (cpsr), [fn] "r" (fn)
  154. : "lr", "cc"
  155. );
  156. return ret;
  157. }
  158. static inline long __kprobes
  159. insnslot_2arg_rflags(long r0, long r1, long cpsr, insn_2arg_fn_t *fn)
  160. {
  161. register long rr0 asm("r0") = r0;
  162. register long rr1 asm("r1") = r1;
  163. register long ret asm("r0");
  164. __asm__ __volatile__ (
  165. "msr cpsr_fs, %[cpsr] \n\t"
  166. "mov lr, pc \n\t"
  167. "mov pc, %[fn] \n\t"
  168. : "=r" (ret)
  169. : "0" (rr0), "r" (rr1),
  170. [cpsr] "r" (cpsr), [fn] "r" (fn)
  171. : "lr", "cc"
  172. );
  173. return ret;
  174. }
  175. static inline long __kprobes
  176. insnslot_3arg_rflags(long r0, long r1, long r2, long cpsr, insn_3arg_fn_t *fn)
  177. {
  178. register long rr0 asm("r0") = r0;
  179. register long rr1 asm("r1") = r1;
  180. register long rr2 asm("r2") = r2;
  181. register long ret asm("r0");
  182. __asm__ __volatile__ (
  183. "msr cpsr_fs, %[cpsr] \n\t"
  184. "mov lr, pc \n\t"
  185. "mov pc, %[fn] \n\t"
  186. : "=r" (ret)
  187. : "0" (rr0), "r" (rr1), "r" (rr2),
  188. [cpsr] "r" (cpsr), [fn] "r" (fn)
  189. : "lr", "cc"
  190. );
  191. return ret;
  192. }
  193. static inline long long __kprobes
  194. insnslot_llret_3arg_rflags(long r0, long r1, long r2, long cpsr,
  195. insn_llret_3arg_fn_t *fn)
  196. {
  197. register long rr0 asm("r0") = r0;
  198. register long rr1 asm("r1") = r1;
  199. register long rr2 asm("r2") = r2;
  200. register long ret0 asm("r0");
  201. register long ret1 asm("r1");
  202. union reg_pair fnr;
  203. __asm__ __volatile__ (
  204. "msr cpsr_fs, %[cpsr] \n\t"
  205. "mov lr, pc \n\t"
  206. "mov pc, %[fn] \n\t"
  207. : "=r" (ret0), "=r" (ret1)
  208. : "0" (rr0), "r" (rr1), "r" (rr2),
  209. [cpsr] "r" (cpsr), [fn] "r" (fn)
  210. : "lr", "cc"
  211. );
  212. fnr.r0 = ret0;
  213. fnr.r1 = ret1;
  214. return fnr.dr;
  215. }
  216. static inline long __kprobes
  217. insnslot_4arg_rflags(long r0, long r1, long r2, long r3, long cpsr,
  218. insn_4arg_fn_t *fn)
  219. {
  220. register long rr0 asm("r0") = r0;
  221. register long rr1 asm("r1") = r1;
  222. register long rr2 asm("r2") = r2;
  223. register long rr3 asm("r3") = r3;
  224. register long ret asm("r0");
  225. __asm__ __volatile__ (
  226. "msr cpsr_fs, %[cpsr] \n\t"
  227. "mov lr, pc \n\t"
  228. "mov pc, %[fn] \n\t"
  229. : "=r" (ret)
  230. : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
  231. [cpsr] "r" (cpsr), [fn] "r" (fn)
  232. : "lr", "cc"
  233. );
  234. return ret;
  235. }
  236. static inline long __kprobes
  237. insnslot_1arg_rwflags(long r0, long *cpsr, insn_1arg_fn_t *fn)
  238. {
  239. register long rr0 asm("r0") = r0;
  240. register long ret asm("r0");
  241. long oldcpsr = *cpsr;
  242. long newcpsr;
  243. __asm__ __volatile__ (
  244. "msr cpsr_fs, %[oldcpsr] \n\t"
  245. "mov lr, pc \n\t"
  246. "mov pc, %[fn] \n\t"
  247. "mrs %[newcpsr], cpsr \n\t"
  248. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  249. : "0" (rr0), [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  250. : "lr", "cc"
  251. );
  252. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  253. return ret;
  254. }
  255. static inline long __kprobes
  256. insnslot_2arg_rwflags(long r0, long r1, long *cpsr, insn_2arg_fn_t *fn)
  257. {
  258. register long rr0 asm("r0") = r0;
  259. register long rr1 asm("r1") = r1;
  260. register long ret asm("r0");
  261. long oldcpsr = *cpsr;
  262. long newcpsr;
  263. __asm__ __volatile__ (
  264. "msr cpsr_fs, %[oldcpsr] \n\t"
  265. "mov lr, pc \n\t"
  266. "mov pc, %[fn] \n\t"
  267. "mrs %[newcpsr], cpsr \n\t"
  268. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  269. : "0" (rr0), "r" (rr1), [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  270. : "lr", "cc"
  271. );
  272. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  273. return ret;
  274. }
  275. static inline long __kprobes
  276. insnslot_3arg_rwflags(long r0, long r1, long r2, long *cpsr,
  277. insn_3arg_fn_t *fn)
  278. {
  279. register long rr0 asm("r0") = r0;
  280. register long rr1 asm("r1") = r1;
  281. register long rr2 asm("r2") = r2;
  282. register long ret asm("r0");
  283. long oldcpsr = *cpsr;
  284. long newcpsr;
  285. __asm__ __volatile__ (
  286. "msr cpsr_fs, %[oldcpsr] \n\t"
  287. "mov lr, pc \n\t"
  288. "mov pc, %[fn] \n\t"
  289. "mrs %[newcpsr], cpsr \n\t"
  290. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  291. : "0" (rr0), "r" (rr1), "r" (rr2),
  292. [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  293. : "lr", "cc"
  294. );
  295. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  296. return ret;
  297. }
  298. static inline long __kprobes
  299. insnslot_4arg_rwflags(long r0, long r1, long r2, long r3, long *cpsr,
  300. insn_4arg_fn_t *fn)
  301. {
  302. register long rr0 asm("r0") = r0;
  303. register long rr1 asm("r1") = r1;
  304. register long rr2 asm("r2") = r2;
  305. register long rr3 asm("r3") = r3;
  306. register long ret asm("r0");
  307. long oldcpsr = *cpsr;
  308. long newcpsr;
  309. __asm__ __volatile__ (
  310. "msr cpsr_fs, %[oldcpsr] \n\t"
  311. "mov lr, pc \n\t"
  312. "mov pc, %[fn] \n\t"
  313. "mrs %[newcpsr], cpsr \n\t"
  314. : "=r" (ret), [newcpsr] "=r" (newcpsr)
  315. : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
  316. [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  317. : "lr", "cc"
  318. );
  319. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  320. return ret;
  321. }
  322. static inline long long __kprobes
  323. insnslot_llret_4arg_rwflags(long r0, long r1, long r2, long r3, long *cpsr,
  324. insn_llret_4arg_fn_t *fn)
  325. {
  326. register long rr0 asm("r0") = r0;
  327. register long rr1 asm("r1") = r1;
  328. register long rr2 asm("r2") = r2;
  329. register long rr3 asm("r3") = r3;
  330. register long ret0 asm("r0");
  331. register long ret1 asm("r1");
  332. long oldcpsr = *cpsr;
  333. long newcpsr;
  334. union reg_pair fnr;
  335. __asm__ __volatile__ (
  336. "msr cpsr_fs, %[oldcpsr] \n\t"
  337. "mov lr, pc \n\t"
  338. "mov pc, %[fn] \n\t"
  339. "mrs %[newcpsr], cpsr \n\t"
  340. : "=r" (ret0), "=r" (ret1), [newcpsr] "=r" (newcpsr)
  341. : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
  342. [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
  343. : "lr", "cc"
  344. );
  345. *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
  346. fnr.r0 = ret0;
  347. fnr.r1 = ret1;
  348. return fnr.dr;
  349. }
  350. /*
  351. * To avoid the complications of mimicing single-stepping on a
  352. * processor without a Next-PC or a single-step mode, and to
  353. * avoid having to deal with the side-effects of boosting, we
  354. * simulate or emulate (almost) all ARM instructions.
  355. *
  356. * "Simulation" is where the instruction's behavior is duplicated in
  357. * C code. "Emulation" is where the original instruction is rewritten
  358. * and executed, often by altering its registers.
  359. *
  360. * By having all behavior of the kprobe'd instruction completed before
  361. * returning from the kprobe_handler(), all locks (scheduler and
  362. * interrupt) can safely be released. There is no need for secondary
  363. * breakpoints, no race with MP or preemptable kernels, nor having to
  364. * clean up resources counts at a later time impacting overall system
  365. * performance. By rewriting the instruction, only the minimum registers
  366. * need to be loaded and saved back optimizing performance.
  367. *
  368. * Calling the insnslot_*_rwflags version of a function doesn't hurt
  369. * anything even when the CPSR flags aren't updated by the
  370. * instruction. It's just a little slower in return for saving
  371. * a little space by not having a duplicate function that doesn't
  372. * update the flags. (The same optimization can be said for
  373. * instructions that do or don't perform register writeback)
  374. * Also, instructions can either read the flags, only write the
  375. * flags, or read and write the flags. To save combinations
  376. * rather than for sheer performance, flag functions just assume
  377. * read and write of flags.
  378. */
  379. static void __kprobes simulate_bbl(struct kprobe *p, struct pt_regs *regs)
  380. {
  381. kprobe_opcode_t insn = p->opcode;
  382. long iaddr = (long)p->addr;
  383. int disp = branch_displacement(insn);
  384. if (insn & (1 << 24))
  385. regs->ARM_lr = iaddr + 4;
  386. regs->ARM_pc = iaddr + 8 + disp;
  387. }
  388. static void __kprobes simulate_blx1(struct kprobe *p, struct pt_regs *regs)
  389. {
  390. kprobe_opcode_t insn = p->opcode;
  391. long iaddr = (long)p->addr;
  392. int disp = branch_displacement(insn);
  393. regs->ARM_lr = iaddr + 4;
  394. regs->ARM_pc = iaddr + 8 + disp + ((insn >> 23) & 0x2);
  395. regs->ARM_cpsr |= PSR_T_BIT;
  396. }
  397. static void __kprobes simulate_blx2bx(struct kprobe *p, struct pt_regs *regs)
  398. {
  399. kprobe_opcode_t insn = p->opcode;
  400. int rm = insn & 0xf;
  401. long rmv = regs->uregs[rm];
  402. if (insn & (1 << 5))
  403. regs->ARM_lr = (long)p->addr + 4;
  404. regs->ARM_pc = rmv & ~0x1;
  405. regs->ARM_cpsr &= ~PSR_T_BIT;
  406. if (rmv & 0x1)
  407. regs->ARM_cpsr |= PSR_T_BIT;
  408. }
  409. static void __kprobes simulate_mrs(struct kprobe *p, struct pt_regs *regs)
  410. {
  411. kprobe_opcode_t insn = p->opcode;
  412. int rd = (insn >> 12) & 0xf;
  413. unsigned long mask = 0xf8ff03df; /* Mask out execution state */
  414. regs->uregs[rd] = regs->ARM_cpsr & mask;
  415. }
  416. static void __kprobes simulate_ldm1stm1(struct kprobe *p, struct pt_regs *regs)
  417. {
  418. kprobe_opcode_t insn = p->opcode;
  419. int rn = (insn >> 16) & 0xf;
  420. int lbit = insn & (1 << 20);
  421. int wbit = insn & (1 << 21);
  422. int ubit = insn & (1 << 23);
  423. int pbit = insn & (1 << 24);
  424. long *addr = (long *)regs->uregs[rn];
  425. int reg_bit_vector;
  426. int reg_count;
  427. reg_count = 0;
  428. reg_bit_vector = insn & 0xffff;
  429. while (reg_bit_vector) {
  430. reg_bit_vector &= (reg_bit_vector - 1);
  431. ++reg_count;
  432. }
  433. if (!ubit)
  434. addr -= reg_count;
  435. addr += (!pbit == !ubit);
  436. reg_bit_vector = insn & 0xffff;
  437. while (reg_bit_vector) {
  438. int reg = __ffs(reg_bit_vector);
  439. reg_bit_vector &= (reg_bit_vector - 1);
  440. if (lbit)
  441. regs->uregs[reg] = *addr++;
  442. else
  443. *addr++ = regs->uregs[reg];
  444. }
  445. if (wbit) {
  446. if (!ubit)
  447. addr -= reg_count;
  448. addr -= (!pbit == !ubit);
  449. regs->uregs[rn] = (long)addr;
  450. }
  451. }
  452. static void __kprobes simulate_stm1_pc(struct kprobe *p, struct pt_regs *regs)
  453. {
  454. regs->ARM_pc = (long)p->addr + str_pc_offset;
  455. simulate_ldm1stm1(p, regs);
  456. regs->ARM_pc = (long)p->addr + 4;
  457. }
  458. static void __kprobes simulate_mov_ipsp(struct kprobe *p, struct pt_regs *regs)
  459. {
  460. regs->uregs[12] = regs->uregs[13];
  461. }
  462. static void __kprobes emulate_ldrd(struct kprobe *p, struct pt_regs *regs)
  463. {
  464. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  465. kprobe_opcode_t insn = p->opcode;
  466. long ppc = (long)p->addr + 8;
  467. int rd = (insn >> 12) & 0xf;
  468. int rn = (insn >> 16) & 0xf;
  469. int rm = insn & 0xf; /* rm may be invalid, don't care. */
  470. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  471. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  472. /* Not following the C calling convention here, so need asm(). */
  473. __asm__ __volatile__ (
  474. "ldr r0, %[rn] \n\t"
  475. "ldr r1, %[rm] \n\t"
  476. "msr cpsr_fs, %[cpsr]\n\t"
  477. "mov lr, pc \n\t"
  478. "mov pc, %[i_fn] \n\t"
  479. "str r0, %[rn] \n\t" /* in case of writeback */
  480. "str r2, %[rd0] \n\t"
  481. "str r3, %[rd1] \n\t"
  482. : [rn] "+m" (rnv),
  483. [rd0] "=m" (regs->uregs[rd]),
  484. [rd1] "=m" (regs->uregs[rd+1])
  485. : [rm] "m" (rmv),
  486. [cpsr] "r" (regs->ARM_cpsr),
  487. [i_fn] "r" (i_fn)
  488. : "r0", "r1", "r2", "r3", "lr", "cc"
  489. );
  490. if (is_writeback(insn))
  491. regs->uregs[rn] = rnv;
  492. }
  493. static void __kprobes emulate_strd(struct kprobe *p, struct pt_regs *regs)
  494. {
  495. insn_4arg_fn_t *i_fn = (insn_4arg_fn_t *)&p->ainsn.insn[0];
  496. kprobe_opcode_t insn = p->opcode;
  497. long ppc = (long)p->addr + 8;
  498. int rd = (insn >> 12) & 0xf;
  499. int rn = (insn >> 16) & 0xf;
  500. int rm = insn & 0xf;
  501. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  502. /* rm/rmv may be invalid, don't care. */
  503. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  504. long rnv_wb;
  505. rnv_wb = insnslot_4arg_rflags(rnv, rmv, regs->uregs[rd],
  506. regs->uregs[rd+1],
  507. regs->ARM_cpsr, i_fn);
  508. if (is_writeback(insn))
  509. regs->uregs[rn] = rnv_wb;
  510. }
  511. static void __kprobes emulate_ldr(struct kprobe *p, struct pt_regs *regs)
  512. {
  513. insn_llret_3arg_fn_t *i_fn = (insn_llret_3arg_fn_t *)&p->ainsn.insn[0];
  514. kprobe_opcode_t insn = p->opcode;
  515. long ppc = (long)p->addr + 8;
  516. union reg_pair fnr;
  517. int rd = (insn >> 12) & 0xf;
  518. int rn = (insn >> 16) & 0xf;
  519. int rm = insn & 0xf;
  520. long rdv;
  521. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  522. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  523. long cpsr = regs->ARM_cpsr;
  524. fnr.dr = insnslot_llret_3arg_rflags(rnv, 0, rmv, cpsr, i_fn);
  525. if (rn != 15)
  526. regs->uregs[rn] = fnr.r0; /* Save Rn in case of writeback. */
  527. rdv = fnr.r1;
  528. if (rd == 15) {
  529. #if __LINUX_ARM_ARCH__ >= 5
  530. cpsr &= ~PSR_T_BIT;
  531. if (rdv & 0x1)
  532. cpsr |= PSR_T_BIT;
  533. regs->ARM_cpsr = cpsr;
  534. rdv &= ~0x1;
  535. #else
  536. rdv &= ~0x2;
  537. #endif
  538. }
  539. regs->uregs[rd] = rdv;
  540. }
  541. static void __kprobes emulate_str(struct kprobe *p, struct pt_regs *regs)
  542. {
  543. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  544. kprobe_opcode_t insn = p->opcode;
  545. long iaddr = (long)p->addr;
  546. int rd = (insn >> 12) & 0xf;
  547. int rn = (insn >> 16) & 0xf;
  548. int rm = insn & 0xf;
  549. long rdv = (rd == 15) ? iaddr + str_pc_offset : regs->uregs[rd];
  550. long rnv = (rn == 15) ? iaddr + 8 : regs->uregs[rn];
  551. long rmv = regs->uregs[rm]; /* rm/rmv may be invalid, don't care. */
  552. long rnv_wb;
  553. rnv_wb = insnslot_3arg_rflags(rnv, rdv, rmv, regs->ARM_cpsr, i_fn);
  554. if (rn != 15)
  555. regs->uregs[rn] = rnv_wb; /* Save Rn in case of writeback. */
  556. }
  557. static void __kprobes emulate_sat(struct kprobe *p, struct pt_regs *regs)
  558. {
  559. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  560. kprobe_opcode_t insn = p->opcode;
  561. int rd = (insn >> 12) & 0xf;
  562. int rm = insn & 0xf;
  563. long rmv = regs->uregs[rm];
  564. /* Writes Q flag */
  565. regs->uregs[rd] = insnslot_1arg_rwflags(rmv, &regs->ARM_cpsr, i_fn);
  566. }
  567. static void __kprobes emulate_sel(struct kprobe *p, struct pt_regs *regs)
  568. {
  569. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  570. kprobe_opcode_t insn = p->opcode;
  571. int rd = (insn >> 12) & 0xf;
  572. int rn = (insn >> 16) & 0xf;
  573. int rm = insn & 0xf;
  574. long rnv = regs->uregs[rn];
  575. long rmv = regs->uregs[rm];
  576. /* Reads GE bits */
  577. regs->uregs[rd] = insnslot_2arg_rflags(rnv, rmv, regs->ARM_cpsr, i_fn);
  578. }
  579. static void __kprobes emulate_none(struct kprobe *p, struct pt_regs *regs)
  580. {
  581. insn_0arg_fn_t *i_fn = (insn_0arg_fn_t *)&p->ainsn.insn[0];
  582. insnslot_0arg_rflags(regs->ARM_cpsr, i_fn);
  583. }
  584. static void __kprobes emulate_nop(struct kprobe *p, struct pt_regs *regs)
  585. {
  586. }
  587. static void __kprobes
  588. emulate_rd12_modify(struct kprobe *p, struct pt_regs *regs)
  589. {
  590. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  591. kprobe_opcode_t insn = p->opcode;
  592. int rd = (insn >> 12) & 0xf;
  593. long rdv = regs->uregs[rd];
  594. regs->uregs[rd] = insnslot_1arg_rflags(rdv, regs->ARM_cpsr, i_fn);
  595. }
  596. static void __kprobes
  597. emulate_rd12rn0_modify(struct kprobe *p, struct pt_regs *regs)
  598. {
  599. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  600. kprobe_opcode_t insn = p->opcode;
  601. int rd = (insn >> 12) & 0xf;
  602. int rn = insn & 0xf;
  603. long rdv = regs->uregs[rd];
  604. long rnv = regs->uregs[rn];
  605. regs->uregs[rd] = insnslot_2arg_rflags(rdv, rnv, regs->ARM_cpsr, i_fn);
  606. }
  607. static void __kprobes emulate_rd12rm0(struct kprobe *p, struct pt_regs *regs)
  608. {
  609. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  610. kprobe_opcode_t insn = p->opcode;
  611. int rd = (insn >> 12) & 0xf;
  612. int rm = insn & 0xf;
  613. long rmv = regs->uregs[rm];
  614. regs->uregs[rd] = insnslot_1arg_rflags(rmv, regs->ARM_cpsr, i_fn);
  615. }
  616. static void __kprobes
  617. emulate_rd12rn16rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  618. {
  619. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  620. kprobe_opcode_t insn = p->opcode;
  621. int rd = (insn >> 12) & 0xf;
  622. int rn = (insn >> 16) & 0xf;
  623. int rm = insn & 0xf;
  624. long rnv = regs->uregs[rn];
  625. long rmv = regs->uregs[rm];
  626. regs->uregs[rd] =
  627. insnslot_2arg_rwflags(rnv, rmv, &regs->ARM_cpsr, i_fn);
  628. }
  629. static void __kprobes
  630. emulate_rd16rn12rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  631. {
  632. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  633. kprobe_opcode_t insn = p->opcode;
  634. int rd = (insn >> 16) & 0xf;
  635. int rn = (insn >> 12) & 0xf;
  636. int rs = (insn >> 8) & 0xf;
  637. int rm = insn & 0xf;
  638. long rnv = regs->uregs[rn];
  639. long rsv = regs->uregs[rs];
  640. long rmv = regs->uregs[rm];
  641. regs->uregs[rd] =
  642. insnslot_3arg_rwflags(rnv, rsv, rmv, &regs->ARM_cpsr, i_fn);
  643. }
  644. static void __kprobes
  645. emulate_rd16rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  646. {
  647. insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
  648. kprobe_opcode_t insn = p->opcode;
  649. int rd = (insn >> 16) & 0xf;
  650. int rs = (insn >> 8) & 0xf;
  651. int rm = insn & 0xf;
  652. long rsv = regs->uregs[rs];
  653. long rmv = regs->uregs[rm];
  654. regs->uregs[rd] =
  655. insnslot_2arg_rwflags(rsv, rmv, &regs->ARM_cpsr, i_fn);
  656. }
  657. static void __kprobes
  658. emulate_rdhi16rdlo12rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
  659. {
  660. insn_llret_4arg_fn_t *i_fn = (insn_llret_4arg_fn_t *)&p->ainsn.insn[0];
  661. kprobe_opcode_t insn = p->opcode;
  662. union reg_pair fnr;
  663. int rdhi = (insn >> 16) & 0xf;
  664. int rdlo = (insn >> 12) & 0xf;
  665. int rs = (insn >> 8) & 0xf;
  666. int rm = insn & 0xf;
  667. long rsv = regs->uregs[rs];
  668. long rmv = regs->uregs[rm];
  669. fnr.dr = insnslot_llret_4arg_rwflags(regs->uregs[rdhi],
  670. regs->uregs[rdlo], rsv, rmv,
  671. &regs->ARM_cpsr, i_fn);
  672. regs->uregs[rdhi] = fnr.r0;
  673. regs->uregs[rdlo] = fnr.r1;
  674. }
  675. static void __kprobes
  676. emulate_alu_imm_rflags(struct kprobe *p, struct pt_regs *regs)
  677. {
  678. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  679. kprobe_opcode_t insn = p->opcode;
  680. int rd = (insn >> 12) & 0xf;
  681. int rn = (insn >> 16) & 0xf;
  682. long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
  683. regs->uregs[rd] = insnslot_1arg_rflags(rnv, regs->ARM_cpsr, i_fn);
  684. }
  685. static void __kprobes
  686. emulate_alu_imm_rwflags(struct kprobe *p, struct pt_regs *regs)
  687. {
  688. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  689. kprobe_opcode_t insn = p->opcode;
  690. int rd = (insn >> 12) & 0xf;
  691. int rn = (insn >> 16) & 0xf;
  692. long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
  693. regs->uregs[rd] = insnslot_1arg_rwflags(rnv, &regs->ARM_cpsr, i_fn);
  694. }
  695. static void __kprobes
  696. emulate_alu_tests_imm(struct kprobe *p, struct pt_regs *regs)
  697. {
  698. insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
  699. kprobe_opcode_t insn = p->opcode;
  700. int rn = (insn >> 16) & 0xf;
  701. long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
  702. insnslot_1arg_rwflags(rnv, &regs->ARM_cpsr, i_fn);
  703. }
  704. static void __kprobes
  705. emulate_alu_rflags(struct kprobe *p, struct pt_regs *regs)
  706. {
  707. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  708. kprobe_opcode_t insn = p->opcode;
  709. long ppc = (long)p->addr + 8;
  710. int rd = (insn >> 12) & 0xf;
  711. int rn = (insn >> 16) & 0xf; /* rn/rnv/rs/rsv may be */
  712. int rs = (insn >> 8) & 0xf; /* invalid, don't care. */
  713. int rm = insn & 0xf;
  714. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  715. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  716. long rsv = regs->uregs[rs];
  717. regs->uregs[rd] =
  718. insnslot_3arg_rflags(rnv, rmv, rsv, regs->ARM_cpsr, i_fn);
  719. }
  720. static void __kprobes
  721. emulate_alu_rwflags(struct kprobe *p, struct pt_regs *regs)
  722. {
  723. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  724. kprobe_opcode_t insn = p->opcode;
  725. long ppc = (long)p->addr + 8;
  726. int rd = (insn >> 12) & 0xf;
  727. int rn = (insn >> 16) & 0xf; /* rn/rnv/rs/rsv may be */
  728. int rs = (insn >> 8) & 0xf; /* invalid, don't care. */
  729. int rm = insn & 0xf;
  730. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  731. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  732. long rsv = regs->uregs[rs];
  733. regs->uregs[rd] =
  734. insnslot_3arg_rwflags(rnv, rmv, rsv, &regs->ARM_cpsr, i_fn);
  735. }
  736. static void __kprobes
  737. emulate_alu_tests(struct kprobe *p, struct pt_regs *regs)
  738. {
  739. insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
  740. kprobe_opcode_t insn = p->opcode;
  741. long ppc = (long)p->addr + 8;
  742. int rn = (insn >> 16) & 0xf;
  743. int rs = (insn >> 8) & 0xf; /* rs/rsv may be invalid, don't care. */
  744. int rm = insn & 0xf;
  745. long rnv = (rn == 15) ? ppc : regs->uregs[rn];
  746. long rmv = (rm == 15) ? ppc : regs->uregs[rm];
  747. long rsv = regs->uregs[rs];
  748. insnslot_3arg_rwflags(rnv, rmv, rsv, &regs->ARM_cpsr, i_fn);
  749. }
  750. static enum kprobe_insn __kprobes
  751. prep_emulate_ldr_str(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  752. {
  753. int not_imm = (insn & (1 << 26)) ? (insn & (1 << 25))
  754. : (~insn & (1 << 22));
  755. if (is_writeback(insn) && is_r15(insn, 16))
  756. return INSN_REJECTED; /* Writeback to PC */
  757. insn &= 0xfff00fff;
  758. insn |= 0x00001000; /* Rn = r0, Rd = r1 */
  759. if (not_imm) {
  760. insn &= ~0xf;
  761. insn |= 2; /* Rm = r2 */
  762. }
  763. asi->insn[0] = insn;
  764. asi->insn_handler = (insn & (1 << 20)) ? emulate_ldr : emulate_str;
  765. return INSN_GOOD;
  766. }
  767. static enum kprobe_insn __kprobes
  768. prep_emulate_rd12_modify(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  769. {
  770. if (is_r15(insn, 12))
  771. return INSN_REJECTED; /* Rd is PC */
  772. insn &= 0xffff0fff; /* Rd = r0 */
  773. asi->insn[0] = insn;
  774. asi->insn_handler = emulate_rd12_modify;
  775. return INSN_GOOD;
  776. }
  777. static enum kprobe_insn __kprobes
  778. prep_emulate_rd12rn0_modify(kprobe_opcode_t insn,
  779. struct arch_specific_insn *asi)
  780. {
  781. if (is_r15(insn, 12))
  782. return INSN_REJECTED; /* Rd is PC */
  783. insn &= 0xffff0ff0; /* Rd = r0 */
  784. insn |= 0x00000001; /* Rn = r1 */
  785. asi->insn[0] = insn;
  786. asi->insn_handler = emulate_rd12rn0_modify;
  787. return INSN_GOOD;
  788. }
  789. static enum kprobe_insn __kprobes
  790. prep_emulate_rd12rm0(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  791. {
  792. if (is_r15(insn, 12))
  793. return INSN_REJECTED; /* Rd is PC */
  794. insn &= 0xffff0ff0; /* Rd = r0, Rm = r0 */
  795. asi->insn[0] = insn;
  796. asi->insn_handler = emulate_rd12rm0;
  797. return INSN_GOOD;
  798. }
  799. static enum kprobe_insn __kprobes
  800. prep_emulate_rd12rn16rm0_wflags(kprobe_opcode_t insn,
  801. struct arch_specific_insn *asi)
  802. {
  803. if (is_r15(insn, 12))
  804. return INSN_REJECTED; /* Rd is PC */
  805. insn &= 0xfff00ff0; /* Rd = r0, Rn = r0 */
  806. insn |= 0x00000001; /* Rm = r1 */
  807. asi->insn[0] = insn;
  808. asi->insn_handler = emulate_rd12rn16rm0_rwflags;
  809. return INSN_GOOD;
  810. }
  811. static enum kprobe_insn __kprobes
  812. prep_emulate_rd16rs8rm0_wflags(kprobe_opcode_t insn,
  813. struct arch_specific_insn *asi)
  814. {
  815. if (is_r15(insn, 16))
  816. return INSN_REJECTED; /* Rd is PC */
  817. insn &= 0xfff0f0f0; /* Rd = r0, Rs = r0 */
  818. insn |= 0x00000001; /* Rm = r1 */
  819. asi->insn[0] = insn;
  820. asi->insn_handler = emulate_rd16rs8rm0_rwflags;
  821. return INSN_GOOD;
  822. }
  823. static enum kprobe_insn __kprobes
  824. prep_emulate_rd16rn12rs8rm0_wflags(kprobe_opcode_t insn,
  825. struct arch_specific_insn *asi)
  826. {
  827. if (is_r15(insn, 16))
  828. return INSN_REJECTED; /* Rd is PC */
  829. insn &= 0xfff000f0; /* Rd = r0, Rn = r0 */
  830. insn |= 0x00000102; /* Rs = r1, Rm = r2 */
  831. asi->insn[0] = insn;
  832. asi->insn_handler = emulate_rd16rn12rs8rm0_rwflags;
  833. return INSN_GOOD;
  834. }
  835. static enum kprobe_insn __kprobes
  836. prep_emulate_rdhi16rdlo12rs8rm0_wflags(kprobe_opcode_t insn,
  837. struct arch_specific_insn *asi)
  838. {
  839. if (is_r15(insn, 16) || is_r15(insn, 12))
  840. return INSN_REJECTED; /* RdHi or RdLo is PC */
  841. insn &= 0xfff000f0; /* RdHi = r0, RdLo = r1 */
  842. insn |= 0x00001203; /* Rs = r2, Rm = r3 */
  843. asi->insn[0] = insn;
  844. asi->insn_handler = emulate_rdhi16rdlo12rs8rm0_rwflags;
  845. return INSN_GOOD;
  846. }
  847. /*
  848. * For the instruction masking and comparisons in all the "space_*"
  849. * functions below, Do _not_ rearrange the order of tests unless
  850. * you're very, very sure of what you are doing. For the sake of
  851. * efficiency, the masks for some tests sometimes assume other test
  852. * have been done prior to them so the number of patterns to test
  853. * for an instruction set can be as broad as possible to reduce the
  854. * number of tests needed.
  855. */
  856. static enum kprobe_insn __kprobes
  857. space_1111(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  858. {
  859. /* memory hint : 1111 0100 x001 xxxx xxxx xxxx xxxx xxxx : */
  860. /* PLDI : 1111 0100 x101 xxxx xxxx xxxx xxxx xxxx : */
  861. /* PLDW : 1111 0101 x001 xxxx xxxx xxxx xxxx xxxx : */
  862. /* PLD : 1111 0101 x101 xxxx xxxx xxxx xxxx xxxx : */
  863. if ((insn & 0xfe300000) == 0xf4100000) {
  864. asi->insn_handler = emulate_nop;
  865. return INSN_GOOD_NO_SLOT;
  866. }
  867. /* BLX(1) : 1111 101x xxxx xxxx xxxx xxxx xxxx xxxx : */
  868. if ((insn & 0xfe000000) == 0xfa000000) {
  869. asi->insn_handler = simulate_blx1;
  870. return INSN_GOOD_NO_SLOT;
  871. }
  872. /* CPS : 1111 0001 0000 xxx0 xxxx xxxx xx0x xxxx */
  873. /* SETEND: 1111 0001 0000 0001 xxxx xxxx 0000 xxxx */
  874. /* SRS : 1111 100x x1x0 xxxx xxxx xxxx xxxx xxxx */
  875. /* RFE : 1111 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
  876. /* Coprocessor instructions... */
  877. /* MCRR2 : 1111 1100 0100 xxxx xxxx xxxx xxxx xxxx : (Rd != Rn) */
  878. /* MRRC2 : 1111 1100 0101 xxxx xxxx xxxx xxxx xxxx : (Rd != Rn) */
  879. /* LDC2 : 1111 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
  880. /* STC2 : 1111 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
  881. /* CDP2 : 1111 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
  882. /* MCR2 : 1111 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
  883. /* MRC2 : 1111 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
  884. return INSN_REJECTED;
  885. }
  886. static enum kprobe_insn __kprobes
  887. space_cccc_000x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  888. {
  889. /* cccc 0001 0xx0 xxxx xxxx xxxx xxxx xxx0 xxxx */
  890. if ((insn & 0x0f900010) == 0x01000000) {
  891. /* MRS cpsr : cccc 0001 0000 xxxx xxxx xxxx 0000 xxxx */
  892. if ((insn & 0x0ff000f0) == 0x01000000) {
  893. if (is_r15(insn, 12))
  894. return INSN_REJECTED; /* Rd is PC */
  895. asi->insn_handler = simulate_mrs;
  896. return INSN_GOOD_NO_SLOT;
  897. }
  898. /* SMLALxy : cccc 0001 0100 xxxx xxxx xxxx 1xx0 xxxx */
  899. if ((insn & 0x0ff00090) == 0x01400080)
  900. return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn,
  901. asi);
  902. /* SMULWy : cccc 0001 0010 xxxx xxxx xxxx 1x10 xxxx */
  903. /* SMULxy : cccc 0001 0110 xxxx xxxx xxxx 1xx0 xxxx */
  904. if ((insn & 0x0ff000b0) == 0x012000a0 ||
  905. (insn & 0x0ff00090) == 0x01600080)
  906. return prep_emulate_rd16rs8rm0_wflags(insn, asi);
  907. /* SMLAxy : cccc 0001 0000 xxxx xxxx xxxx 1xx0 xxxx : Q */
  908. /* SMLAWy : cccc 0001 0010 xxxx xxxx xxxx 1x00 xxxx : Q */
  909. if ((insn & 0x0ff00090) == 0x01000080 ||
  910. (insn & 0x0ff000b0) == 0x01200080)
  911. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  912. /* BXJ : cccc 0001 0010 xxxx xxxx xxxx 0010 xxxx */
  913. /* MSR : cccc 0001 0x10 xxxx xxxx xxxx 0000 xxxx */
  914. /* MRS spsr : cccc 0001 0100 xxxx xxxx xxxx 0000 xxxx */
  915. /* Other instruction encodings aren't yet defined */
  916. return INSN_REJECTED;
  917. }
  918. /* cccc 0001 0xx0 xxxx xxxx xxxx xxxx 0xx1 xxxx */
  919. else if ((insn & 0x0f900090) == 0x01000010) {
  920. /* BLX(2) : cccc 0001 0010 xxxx xxxx xxxx 0011 xxxx */
  921. /* BX : cccc 0001 0010 xxxx xxxx xxxx 0001 xxxx */
  922. if ((insn & 0x0ff000d0) == 0x01200010) {
  923. if ((insn & 0x0ff000ff) == 0x0120003f)
  924. return INSN_REJECTED; /* BLX pc */
  925. asi->insn_handler = simulate_blx2bx;
  926. return INSN_GOOD_NO_SLOT;
  927. }
  928. /* CLZ : cccc 0001 0110 xxxx xxxx xxxx 0001 xxxx */
  929. if ((insn & 0x0ff000f0) == 0x01600010)
  930. return prep_emulate_rd12rm0(insn, asi);
  931. /* QADD : cccc 0001 0000 xxxx xxxx xxxx 0101 xxxx :Q */
  932. /* QSUB : cccc 0001 0010 xxxx xxxx xxxx 0101 xxxx :Q */
  933. /* QDADD : cccc 0001 0100 xxxx xxxx xxxx 0101 xxxx :Q */
  934. /* QDSUB : cccc 0001 0110 xxxx xxxx xxxx 0101 xxxx :Q */
  935. if ((insn & 0x0f9000f0) == 0x01000050)
  936. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  937. /* BKPT : 1110 0001 0010 xxxx xxxx xxxx 0111 xxxx */
  938. /* SMC : cccc 0001 0110 xxxx xxxx xxxx 0111 xxxx */
  939. /* Other instruction encodings aren't yet defined */
  940. return INSN_REJECTED;
  941. }
  942. /* cccc 0000 xxxx xxxx xxxx xxxx xxxx 1001 xxxx */
  943. else if ((insn & 0x0f0000f0) == 0x00000090) {
  944. /* MUL : cccc 0000 0000 xxxx xxxx xxxx 1001 xxxx : */
  945. /* MULS : cccc 0000 0001 xxxx xxxx xxxx 1001 xxxx :cc */
  946. /* MLA : cccc 0000 0010 xxxx xxxx xxxx 1001 xxxx : */
  947. /* MLAS : cccc 0000 0011 xxxx xxxx xxxx 1001 xxxx :cc */
  948. /* UMAAL : cccc 0000 0100 xxxx xxxx xxxx 1001 xxxx : */
  949. /* undef : cccc 0000 0101 xxxx xxxx xxxx 1001 xxxx : */
  950. /* MLS : cccc 0000 0110 xxxx xxxx xxxx 1001 xxxx : */
  951. /* undef : cccc 0000 0111 xxxx xxxx xxxx 1001 xxxx : */
  952. /* UMULL : cccc 0000 1000 xxxx xxxx xxxx 1001 xxxx : */
  953. /* UMULLS : cccc 0000 1001 xxxx xxxx xxxx 1001 xxxx :cc */
  954. /* UMLAL : cccc 0000 1010 xxxx xxxx xxxx 1001 xxxx : */
  955. /* UMLALS : cccc 0000 1011 xxxx xxxx xxxx 1001 xxxx :cc */
  956. /* SMULL : cccc 0000 1100 xxxx xxxx xxxx 1001 xxxx : */
  957. /* SMULLS : cccc 0000 1101 xxxx xxxx xxxx 1001 xxxx :cc */
  958. /* SMLAL : cccc 0000 1110 xxxx xxxx xxxx 1001 xxxx : */
  959. /* SMLALS : cccc 0000 1111 xxxx xxxx xxxx 1001 xxxx :cc */
  960. if ((insn & 0x00d00000) == 0x00500000)
  961. return INSN_REJECTED;
  962. else if ((insn & 0x00e00000) == 0x00000000)
  963. return prep_emulate_rd16rs8rm0_wflags(insn, asi);
  964. else if ((insn & 0x00a00000) == 0x00200000)
  965. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  966. else
  967. return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn,
  968. asi);
  969. }
  970. /* cccc 000x xxxx xxxx xxxx xxxx xxxx 1xx1 xxxx */
  971. else if ((insn & 0x0e000090) == 0x00000090) {
  972. /* SWP : cccc 0001 0000 xxxx xxxx xxxx 1001 xxxx */
  973. /* SWPB : cccc 0001 0100 xxxx xxxx xxxx 1001 xxxx */
  974. /* ??? : cccc 0001 0x01 xxxx xxxx xxxx 1001 xxxx */
  975. /* ??? : cccc 0001 0x10 xxxx xxxx xxxx 1001 xxxx */
  976. /* ??? : cccc 0001 0x11 xxxx xxxx xxxx 1001 xxxx */
  977. /* STREX : cccc 0001 1000 xxxx xxxx xxxx 1001 xxxx */
  978. /* LDREX : cccc 0001 1001 xxxx xxxx xxxx 1001 xxxx */
  979. /* STREXD: cccc 0001 1010 xxxx xxxx xxxx 1001 xxxx */
  980. /* LDREXD: cccc 0001 1011 xxxx xxxx xxxx 1001 xxxx */
  981. /* STREXB: cccc 0001 1100 xxxx xxxx xxxx 1001 xxxx */
  982. /* LDREXB: cccc 0001 1101 xxxx xxxx xxxx 1001 xxxx */
  983. /* STREXH: cccc 0001 1110 xxxx xxxx xxxx 1001 xxxx */
  984. /* LDREXH: cccc 0001 1111 xxxx xxxx xxxx 1001 xxxx */
  985. /* LDRD : cccc 000x xxx0 xxxx xxxx xxxx 1101 xxxx */
  986. /* STRD : cccc 000x xxx0 xxxx xxxx xxxx 1111 xxxx */
  987. /* LDRH : cccc 000x xxx1 xxxx xxxx xxxx 1011 xxxx */
  988. /* STRH : cccc 000x xxx0 xxxx xxxx xxxx 1011 xxxx */
  989. /* LDRSB : cccc 000x xxx1 xxxx xxxx xxxx 1101 xxxx */
  990. /* LDRSH : cccc 000x xxx1 xxxx xxxx xxxx 1111 xxxx */
  991. if ((insn & 0x0f0000f0) == 0x01000090) {
  992. if ((insn & 0x0fb000f0) == 0x01000090) {
  993. /* SWP/SWPB */
  994. return prep_emulate_rd12rn16rm0_wflags(insn,
  995. asi);
  996. } else {
  997. /* STREX/LDREX variants and unallocaed space */
  998. return INSN_REJECTED;
  999. }
  1000. } else if ((insn & 0x0e1000d0) == 0x00000d0) {
  1001. /* STRD/LDRD */
  1002. if ((insn & 0x0000e000) == 0x0000e000)
  1003. return INSN_REJECTED; /* Rd is LR or PC */
  1004. if (is_writeback(insn) && is_r15(insn, 16))
  1005. return INSN_REJECTED; /* Writeback to PC */
  1006. insn &= 0xfff00fff;
  1007. insn |= 0x00002000; /* Rn = r0, Rd = r2 */
  1008. if (!(insn & (1 << 22))) {
  1009. /* Register index */
  1010. insn &= ~0xf;
  1011. insn |= 1; /* Rm = r1 */
  1012. }
  1013. asi->insn[0] = insn;
  1014. asi->insn_handler =
  1015. (insn & (1 << 5)) ? emulate_strd : emulate_ldrd;
  1016. return INSN_GOOD;
  1017. }
  1018. /* LDRH/STRH/LDRSB/LDRSH */
  1019. if (is_r15(insn, 12))
  1020. return INSN_REJECTED; /* Rd is PC */
  1021. return prep_emulate_ldr_str(insn, asi);
  1022. }
  1023. /* cccc 000x xxxx xxxx xxxx xxxx xxxx xxxx xxxx */
  1024. /*
  1025. * ALU op with S bit and Rd == 15 :
  1026. * cccc 000x xxx1 xxxx 1111 xxxx xxxx xxxx
  1027. */
  1028. if ((insn & 0x0e10f000) == 0x0010f000)
  1029. return INSN_REJECTED;
  1030. /*
  1031. * "mov ip, sp" is the most common kprobe'd instruction by far.
  1032. * Check and optimize for it explicitly.
  1033. */
  1034. if (insn == 0xe1a0c00d) {
  1035. asi->insn_handler = simulate_mov_ipsp;
  1036. return INSN_GOOD_NO_SLOT;
  1037. }
  1038. /*
  1039. * Data processing: Immediate-shift / Register-shift
  1040. * ALU op : cccc 000x xxxx xxxx xxxx xxxx xxxx xxxx
  1041. * CPY : cccc 0001 1010 xxxx xxxx 0000 0000 xxxx
  1042. * MOV : cccc 0001 101x xxxx xxxx xxxx xxxx xxxx
  1043. * *S (bit 20) updates condition codes
  1044. * ADC/SBC/RSC reads the C flag
  1045. */
  1046. insn &= 0xfff00ff0; /* Rn = r0, Rd = r0 */
  1047. insn |= 0x00000001; /* Rm = r1 */
  1048. if (insn & 0x010) {
  1049. insn &= 0xfffff0ff; /* register shift */
  1050. insn |= 0x00000200; /* Rs = r2 */
  1051. }
  1052. asi->insn[0] = insn;
  1053. if ((insn & 0x0f900000) == 0x01100000) {
  1054. /*
  1055. * TST : cccc 0001 0001 xxxx xxxx xxxx xxxx xxxx
  1056. * TEQ : cccc 0001 0011 xxxx xxxx xxxx xxxx xxxx
  1057. * CMP : cccc 0001 0101 xxxx xxxx xxxx xxxx xxxx
  1058. * CMN : cccc 0001 0111 xxxx xxxx xxxx xxxx xxxx
  1059. */
  1060. asi->insn_handler = emulate_alu_tests;
  1061. } else {
  1062. /* ALU ops which write to Rd */
  1063. asi->insn_handler = (insn & (1 << 20)) ? /* S-bit */
  1064. emulate_alu_rwflags : emulate_alu_rflags;
  1065. }
  1066. return INSN_GOOD;
  1067. }
  1068. static enum kprobe_insn __kprobes
  1069. space_cccc_001x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1070. {
  1071. /* MOVW : cccc 0011 0000 xxxx xxxx xxxx xxxx xxxx */
  1072. /* MOVT : cccc 0011 0100 xxxx xxxx xxxx xxxx xxxx */
  1073. if ((insn & 0x0fb00000) == 0x03000000)
  1074. return prep_emulate_rd12_modify(insn, asi);
  1075. /* hints : cccc 0011 0010 0000 xxxx xxxx xxxx xxxx */
  1076. if ((insn & 0x0fff0000) == 0x03200000) {
  1077. unsigned op2 = insn & 0x000000ff;
  1078. if (op2 == 0x01 || op2 == 0x04) {
  1079. /* YIELD : cccc 0011 0010 0000 xxxx xxxx 0000 0001 */
  1080. /* SEV : cccc 0011 0010 0000 xxxx xxxx 0000 0100 */
  1081. asi->insn[0] = insn;
  1082. asi->insn_handler = emulate_none;
  1083. return INSN_GOOD;
  1084. } else if (op2 <= 0x03) {
  1085. /* NOP : cccc 0011 0010 0000 xxxx xxxx 0000 0000 */
  1086. /* WFE : cccc 0011 0010 0000 xxxx xxxx 0000 0010 */
  1087. /* WFI : cccc 0011 0010 0000 xxxx xxxx 0000 0011 */
  1088. /*
  1089. * We make WFE and WFI true NOPs to avoid stalls due
  1090. * to missing events whilst processing the probe.
  1091. */
  1092. asi->insn_handler = emulate_nop;
  1093. return INSN_GOOD_NO_SLOT;
  1094. }
  1095. /* For DBG and unallocated hints it's safest to reject them */
  1096. return INSN_REJECTED;
  1097. }
  1098. /*
  1099. * MSR : cccc 0011 0x10 xxxx xxxx xxxx xxxx xxxx
  1100. * ALU op with S bit and Rd == 15 :
  1101. * cccc 001x xxx1 xxxx 1111 xxxx xxxx xxxx
  1102. */
  1103. if ((insn & 0x0fb00000) == 0x03200000 || /* MSR */
  1104. (insn & 0x0e10f000) == 0x0210f000) /* ALU s-bit, R15 */
  1105. return INSN_REJECTED;
  1106. /*
  1107. * Data processing: 32-bit Immediate
  1108. * ALU op : cccc 001x xxxx xxxx xxxx xxxx xxxx xxxx
  1109. * MOV : cccc 0011 101x xxxx xxxx xxxx xxxx xxxx
  1110. * *S (bit 20) updates condition codes
  1111. * ADC/SBC/RSC reads the C flag
  1112. */
  1113. insn &= 0xfff00fff; /* Rn = r0 and Rd = r0 */
  1114. asi->insn[0] = insn;
  1115. if ((insn & 0x0f900000) == 0x03100000) {
  1116. /*
  1117. * TST : cccc 0011 0001 xxxx xxxx xxxx xxxx xxxx
  1118. * TEQ : cccc 0011 0011 xxxx xxxx xxxx xxxx xxxx
  1119. * CMP : cccc 0011 0101 xxxx xxxx xxxx xxxx xxxx
  1120. * CMN : cccc 0011 0111 xxxx xxxx xxxx xxxx xxxx
  1121. */
  1122. asi->insn_handler = emulate_alu_tests_imm;
  1123. } else {
  1124. /* ALU ops which write to Rd */
  1125. asi->insn_handler = (insn & (1 << 20)) ? /* S-bit */
  1126. emulate_alu_imm_rwflags : emulate_alu_imm_rflags;
  1127. }
  1128. return INSN_GOOD;
  1129. }
  1130. static enum kprobe_insn __kprobes
  1131. space_cccc_0110__1(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1132. {
  1133. /* SEL : cccc 0110 1000 xxxx xxxx xxxx 1011 xxxx GE: !!! */
  1134. if ((insn & 0x0ff000f0) == 0x068000b0) {
  1135. if (is_r15(insn, 12))
  1136. return INSN_REJECTED; /* Rd is PC */
  1137. insn &= 0xfff00ff0; /* Rd = r0, Rn = r0 */
  1138. insn |= 0x00000001; /* Rm = r1 */
  1139. asi->insn[0] = insn;
  1140. asi->insn_handler = emulate_sel;
  1141. return INSN_GOOD;
  1142. }
  1143. /* SSAT : cccc 0110 101x xxxx xxxx xxxx xx01 xxxx :Q */
  1144. /* USAT : cccc 0110 111x xxxx xxxx xxxx xx01 xxxx :Q */
  1145. /* SSAT16 : cccc 0110 1010 xxxx xxxx xxxx 0011 xxxx :Q */
  1146. /* USAT16 : cccc 0110 1110 xxxx xxxx xxxx 0011 xxxx :Q */
  1147. if ((insn & 0x0fa00030) == 0x06a00010 ||
  1148. (insn & 0x0fb000f0) == 0x06a00030) {
  1149. if (is_r15(insn, 12))
  1150. return INSN_REJECTED; /* Rd is PC */
  1151. insn &= 0xffff0ff0; /* Rd = r0, Rm = r0 */
  1152. asi->insn[0] = insn;
  1153. asi->insn_handler = emulate_sat;
  1154. return INSN_GOOD;
  1155. }
  1156. /* REV : cccc 0110 1011 xxxx xxxx xxxx 0011 xxxx */
  1157. /* REV16 : cccc 0110 1011 xxxx xxxx xxxx 1011 xxxx */
  1158. /* RBIT : cccc 0110 1111 xxxx xxxx xxxx 0011 xxxx */
  1159. /* REVSH : cccc 0110 1111 xxxx xxxx xxxx 1011 xxxx */
  1160. if ((insn & 0x0ff00070) == 0x06b00030 ||
  1161. (insn & 0x0ff00070) == 0x06f00030)
  1162. return prep_emulate_rd12rm0(insn, asi);
  1163. /* ??? : cccc 0110 0000 xxxx xxxx xxxx xxx1 xxxx : */
  1164. /* SADD16 : cccc 0110 0001 xxxx xxxx xxxx 0001 xxxx :GE */
  1165. /* SADDSUBX : cccc 0110 0001 xxxx xxxx xxxx 0011 xxxx :GE */
  1166. /* SSUBADDX : cccc 0110 0001 xxxx xxxx xxxx 0101 xxxx :GE */
  1167. /* SSUB16 : cccc 0110 0001 xxxx xxxx xxxx 0111 xxxx :GE */
  1168. /* SADD8 : cccc 0110 0001 xxxx xxxx xxxx 1001 xxxx :GE */
  1169. /* ??? : cccc 0110 0001 xxxx xxxx xxxx 1011 xxxx : */
  1170. /* ??? : cccc 0110 0001 xxxx xxxx xxxx 1101 xxxx : */
  1171. /* SSUB8 : cccc 0110 0001 xxxx xxxx xxxx 1111 xxxx :GE */
  1172. /* QADD16 : cccc 0110 0010 xxxx xxxx xxxx 0001 xxxx : */
  1173. /* QADDSUBX : cccc 0110 0010 xxxx xxxx xxxx 0011 xxxx : */
  1174. /* QSUBADDX : cccc 0110 0010 xxxx xxxx xxxx 0101 xxxx : */
  1175. /* QSUB16 : cccc 0110 0010 xxxx xxxx xxxx 0111 xxxx : */
  1176. /* QADD8 : cccc 0110 0010 xxxx xxxx xxxx 1001 xxxx : */
  1177. /* ??? : cccc 0110 0010 xxxx xxxx xxxx 1011 xxxx : */
  1178. /* ??? : cccc 0110 0010 xxxx xxxx xxxx 1101 xxxx : */
  1179. /* QSUB8 : cccc 0110 0010 xxxx xxxx xxxx 1111 xxxx : */
  1180. /* SHADD16 : cccc 0110 0011 xxxx xxxx xxxx 0001 xxxx : */
  1181. /* SHADDSUBX : cccc 0110 0011 xxxx xxxx xxxx 0011 xxxx : */
  1182. /* SHSUBADDX : cccc 0110 0011 xxxx xxxx xxxx 0101 xxxx : */
  1183. /* SHSUB16 : cccc 0110 0011 xxxx xxxx xxxx 0111 xxxx : */
  1184. /* SHADD8 : cccc 0110 0011 xxxx xxxx xxxx 1001 xxxx : */
  1185. /* ??? : cccc 0110 0011 xxxx xxxx xxxx 1011 xxxx : */
  1186. /* ??? : cccc 0110 0011 xxxx xxxx xxxx 1101 xxxx : */
  1187. /* SHSUB8 : cccc 0110 0011 xxxx xxxx xxxx 1111 xxxx : */
  1188. /* ??? : cccc 0110 0100 xxxx xxxx xxxx xxx1 xxxx : */
  1189. /* UADD16 : cccc 0110 0101 xxxx xxxx xxxx 0001 xxxx :GE */
  1190. /* UADDSUBX : cccc 0110 0101 xxxx xxxx xxxx 0011 xxxx :GE */
  1191. /* USUBADDX : cccc 0110 0101 xxxx xxxx xxxx 0101 xxxx :GE */
  1192. /* USUB16 : cccc 0110 0101 xxxx xxxx xxxx 0111 xxxx :GE */
  1193. /* UADD8 : cccc 0110 0101 xxxx xxxx xxxx 1001 xxxx :GE */
  1194. /* ??? : cccc 0110 0101 xxxx xxxx xxxx 1011 xxxx : */
  1195. /* ??? : cccc 0110 0101 xxxx xxxx xxxx 1101 xxxx : */
  1196. /* USUB8 : cccc 0110 0101 xxxx xxxx xxxx 1111 xxxx :GE */
  1197. /* UQADD16 : cccc 0110 0110 xxxx xxxx xxxx 0001 xxxx : */
  1198. /* UQADDSUBX : cccc 0110 0110 xxxx xxxx xxxx 0011 xxxx : */
  1199. /* UQSUBADDX : cccc 0110 0110 xxxx xxxx xxxx 0101 xxxx : */
  1200. /* UQSUB16 : cccc 0110 0110 xxxx xxxx xxxx 0111 xxxx : */
  1201. /* UQADD8 : cccc 0110 0110 xxxx xxxx xxxx 1001 xxxx : */
  1202. /* ??? : cccc 0110 0110 xxxx xxxx xxxx 1011 xxxx : */
  1203. /* ??? : cccc 0110 0110 xxxx xxxx xxxx 1101 xxxx : */
  1204. /* UQSUB8 : cccc 0110 0110 xxxx xxxx xxxx 1111 xxxx : */
  1205. /* UHADD16 : cccc 0110 0111 xxxx xxxx xxxx 0001 xxxx : */
  1206. /* UHADDSUBX : cccc 0110 0111 xxxx xxxx xxxx 0011 xxxx : */
  1207. /* UHSUBADDX : cccc 0110 0111 xxxx xxxx xxxx 0101 xxxx : */
  1208. /* UHSUB16 : cccc 0110 0111 xxxx xxxx xxxx 0111 xxxx : */
  1209. /* UHADD8 : cccc 0110 0111 xxxx xxxx xxxx 1001 xxxx : */
  1210. /* ??? : cccc 0110 0111 xxxx xxxx xxxx 1011 xxxx : */
  1211. /* ??? : cccc 0110 0111 xxxx xxxx xxxx 1101 xxxx : */
  1212. /* UHSUB8 : cccc 0110 0111 xxxx xxxx xxxx 1111 xxxx : */
  1213. if ((insn & 0x0f800010) == 0x06000010) {
  1214. if ((insn & 0x00300000) == 0x00000000 ||
  1215. (insn & 0x000000e0) == 0x000000a0 ||
  1216. (insn & 0x000000e0) == 0x000000c0)
  1217. return INSN_REJECTED; /* Unallocated space */
  1218. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  1219. }
  1220. /* PKHBT : cccc 0110 1000 xxxx xxxx xxxx x001 xxxx : */
  1221. /* PKHTB : cccc 0110 1000 xxxx xxxx xxxx x101 xxxx : */
  1222. if ((insn & 0x0ff00030) == 0x06800010)
  1223. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  1224. /* SXTAB16 : cccc 0110 1000 xxxx xxxx xxxx 0111 xxxx : */
  1225. /* SXTB16 : cccc 0110 1000 1111 xxxx xxxx 0111 xxxx : */
  1226. /* ??? : cccc 0110 1001 xxxx xxxx xxxx 0111 xxxx : */
  1227. /* SXTAB : cccc 0110 1010 xxxx xxxx xxxx 0111 xxxx : */
  1228. /* SXTB : cccc 0110 1010 1111 xxxx xxxx 0111 xxxx : */
  1229. /* SXTAH : cccc 0110 1011 xxxx xxxx xxxx 0111 xxxx : */
  1230. /* SXTH : cccc 0110 1011 1111 xxxx xxxx 0111 xxxx : */
  1231. /* UXTAB16 : cccc 0110 1100 xxxx xxxx xxxx 0111 xxxx : */
  1232. /* UXTB16 : cccc 0110 1100 1111 xxxx xxxx 0111 xxxx : */
  1233. /* ??? : cccc 0110 1101 xxxx xxxx xxxx 0111 xxxx : */
  1234. /* UXTAB : cccc 0110 1110 xxxx xxxx xxxx 0111 xxxx : */
  1235. /* UXTB : cccc 0110 1110 1111 xxxx xxxx 0111 xxxx : */
  1236. /* UXTAH : cccc 0110 1111 xxxx xxxx xxxx 0111 xxxx : */
  1237. /* UXTH : cccc 0110 1111 1111 xxxx xxxx 0111 xxxx : */
  1238. if ((insn & 0x0f8000f0) == 0x06800070) {
  1239. if ((insn & 0x00300000) == 0x00100000)
  1240. return INSN_REJECTED; /* Unallocated space */
  1241. if ((insn & 0x000f0000) == 0x000f0000)
  1242. return prep_emulate_rd12rm0(insn, asi);
  1243. else
  1244. return prep_emulate_rd12rn16rm0_wflags(insn, asi);
  1245. }
  1246. /* Other instruction encodings aren't yet defined */
  1247. return INSN_REJECTED;
  1248. }
  1249. static enum kprobe_insn __kprobes
  1250. space_cccc_0111__1(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1251. {
  1252. /* Undef : cccc 0111 1111 xxxx xxxx xxxx 1111 xxxx */
  1253. if ((insn & 0x0ff000f0) == 0x03f000f0)
  1254. return INSN_REJECTED;
  1255. /* SMLALD : cccc 0111 0100 xxxx xxxx xxxx 00x1 xxxx */
  1256. /* SMLSLD : cccc 0111 0100 xxxx xxxx xxxx 01x1 xxxx */
  1257. if ((insn & 0x0ff00090) == 0x07400010)
  1258. return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn, asi);
  1259. /* SMLAD : cccc 0111 0000 xxxx xxxx xxxx 00x1 xxxx :Q */
  1260. /* SMUAD : cccc 0111 0000 xxxx 1111 xxxx 00x1 xxxx :Q */
  1261. /* SMLSD : cccc 0111 0000 xxxx xxxx xxxx 01x1 xxxx :Q */
  1262. /* SMUSD : cccc 0111 0000 xxxx 1111 xxxx 01x1 xxxx : */
  1263. /* SMMLA : cccc 0111 0101 xxxx xxxx xxxx 00x1 xxxx : */
  1264. /* SMMUL : cccc 0111 0101 xxxx 1111 xxxx 00x1 xxxx : */
  1265. /* USADA8 : cccc 0111 1000 xxxx xxxx xxxx 0001 xxxx : */
  1266. /* USAD8 : cccc 0111 1000 xxxx 1111 xxxx 0001 xxxx : */
  1267. if ((insn & 0x0ff00090) == 0x07000010 ||
  1268. (insn & 0x0ff000d0) == 0x07500010 ||
  1269. (insn & 0x0ff000f0) == 0x07800010) {
  1270. if ((insn & 0x0000f000) == 0x0000f000)
  1271. return prep_emulate_rd16rs8rm0_wflags(insn, asi);
  1272. else
  1273. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  1274. }
  1275. /* SMMLS : cccc 0111 0101 xxxx xxxx xxxx 11x1 xxxx : */
  1276. if ((insn & 0x0ff000d0) == 0x075000d0)
  1277. return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
  1278. /* SBFX : cccc 0111 101x xxxx xxxx xxxx x101 xxxx : */
  1279. /* UBFX : cccc 0111 111x xxxx xxxx xxxx x101 xxxx : */
  1280. if ((insn & 0x0fa00070) == 0x07a00050)
  1281. return prep_emulate_rd12rm0(insn, asi);
  1282. /* BFI : cccc 0111 110x xxxx xxxx xxxx x001 xxxx : */
  1283. /* BFC : cccc 0111 110x xxxx xxxx xxxx x001 1111 : */
  1284. if ((insn & 0x0fe00070) == 0x07c00010) {
  1285. if ((insn & 0x0000000f) == 0x0000000f)
  1286. return prep_emulate_rd12_modify(insn, asi);
  1287. else
  1288. return prep_emulate_rd12rn0_modify(insn, asi);
  1289. }
  1290. return INSN_REJECTED;
  1291. }
  1292. static enum kprobe_insn __kprobes
  1293. space_cccc_01xx(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1294. {
  1295. /* LDR : cccc 01xx x0x1 xxxx xxxx xxxx xxxx xxxx */
  1296. /* LDRB : cccc 01xx x1x1 xxxx xxxx xxxx xxxx xxxx */
  1297. /* LDRBT : cccc 01x0 x111 xxxx xxxx xxxx xxxx xxxx */
  1298. /* LDRT : cccc 01x0 x011 xxxx xxxx xxxx xxxx xxxx */
  1299. /* STR : cccc 01xx x0x0 xxxx xxxx xxxx xxxx xxxx */
  1300. /* STRB : cccc 01xx x1x0 xxxx xxxx xxxx xxxx xxxx */
  1301. /* STRBT : cccc 01x0 x110 xxxx xxxx xxxx xxxx xxxx */
  1302. /* STRT : cccc 01x0 x010 xxxx xxxx xxxx xxxx xxxx */
  1303. if ((insn & 0x00500000) == 0x00500000 && is_r15(insn, 12))
  1304. return INSN_REJECTED; /* LDRB into PC */
  1305. return prep_emulate_ldr_str(insn, asi);
  1306. }
  1307. static enum kprobe_insn __kprobes
  1308. space_cccc_100x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1309. {
  1310. /* LDM(2) : cccc 100x x101 xxxx 0xxx xxxx xxxx xxxx */
  1311. /* LDM(3) : cccc 100x x1x1 xxxx 1xxx xxxx xxxx xxxx */
  1312. if ((insn & 0x0e708000) == 0x85000000 ||
  1313. (insn & 0x0e508000) == 0x85010000)
  1314. return INSN_REJECTED;
  1315. /* LDM(1) : cccc 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
  1316. /* STM(1) : cccc 100x x0x0 xxxx xxxx xxxx xxxx xxxx */
  1317. asi->insn_handler = ((insn & 0x108000) == 0x008000) ? /* STM & R15 */
  1318. simulate_stm1_pc : simulate_ldm1stm1;
  1319. return INSN_GOOD_NO_SLOT;
  1320. }
  1321. static enum kprobe_insn __kprobes
  1322. space_cccc_101x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1323. {
  1324. /* B : cccc 1010 xxxx xxxx xxxx xxxx xxxx xxxx */
  1325. /* BL : cccc 1011 xxxx xxxx xxxx xxxx xxxx xxxx */
  1326. asi->insn_handler = simulate_bbl;
  1327. return INSN_GOOD_NO_SLOT;
  1328. }
  1329. static enum kprobe_insn __kprobes
  1330. space_cccc_11xx(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1331. {
  1332. /* Coprocessor instructions... */
  1333. /* MCRR : cccc 1100 0100 xxxx xxxx xxxx xxxx xxxx : (Rd!=Rn) */
  1334. /* MRRC : cccc 1100 0101 xxxx xxxx xxxx xxxx xxxx : (Rd!=Rn) */
  1335. /* LDC : cccc 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
  1336. /* STC : cccc 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
  1337. /* CDP : cccc 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
  1338. /* MCR : cccc 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
  1339. /* MRC : cccc 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
  1340. /* SVC : cccc 1111 xxxx xxxx xxxx xxxx xxxx xxxx */
  1341. return INSN_REJECTED;
  1342. }
  1343. static unsigned long __kprobes __check_eq(unsigned long cpsr)
  1344. {
  1345. return cpsr & PSR_Z_BIT;
  1346. }
  1347. static unsigned long __kprobes __check_ne(unsigned long cpsr)
  1348. {
  1349. return (~cpsr) & PSR_Z_BIT;
  1350. }
  1351. static unsigned long __kprobes __check_cs(unsigned long cpsr)
  1352. {
  1353. return cpsr & PSR_C_BIT;
  1354. }
  1355. static unsigned long __kprobes __check_cc(unsigned long cpsr)
  1356. {
  1357. return (~cpsr) & PSR_C_BIT;
  1358. }
  1359. static unsigned long __kprobes __check_mi(unsigned long cpsr)
  1360. {
  1361. return cpsr & PSR_N_BIT;
  1362. }
  1363. static unsigned long __kprobes __check_pl(unsigned long cpsr)
  1364. {
  1365. return (~cpsr) & PSR_N_BIT;
  1366. }
  1367. static unsigned long __kprobes __check_vs(unsigned long cpsr)
  1368. {
  1369. return cpsr & PSR_V_BIT;
  1370. }
  1371. static unsigned long __kprobes __check_vc(unsigned long cpsr)
  1372. {
  1373. return (~cpsr) & PSR_V_BIT;
  1374. }
  1375. static unsigned long __kprobes __check_hi(unsigned long cpsr)
  1376. {
  1377. cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
  1378. return cpsr & PSR_C_BIT;
  1379. }
  1380. static unsigned long __kprobes __check_ls(unsigned long cpsr)
  1381. {
  1382. cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
  1383. return (~cpsr) & PSR_C_BIT;
  1384. }
  1385. static unsigned long __kprobes __check_ge(unsigned long cpsr)
  1386. {
  1387. cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
  1388. return (~cpsr) & PSR_N_BIT;
  1389. }
  1390. static unsigned long __kprobes __check_lt(unsigned long cpsr)
  1391. {
  1392. cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
  1393. return cpsr & PSR_N_BIT;
  1394. }
  1395. static unsigned long __kprobes __check_gt(unsigned long cpsr)
  1396. {
  1397. unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
  1398. temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
  1399. return (~temp) & PSR_N_BIT;
  1400. }
  1401. static unsigned long __kprobes __check_le(unsigned long cpsr)
  1402. {
  1403. unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
  1404. temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
  1405. return temp & PSR_N_BIT;
  1406. }
  1407. static unsigned long __kprobes __check_al(unsigned long cpsr)
  1408. {
  1409. return true;
  1410. }
  1411. static kprobe_check_cc * const condition_checks[16] = {
  1412. &__check_eq, &__check_ne, &__check_cs, &__check_cc,
  1413. &__check_mi, &__check_pl, &__check_vs, &__check_vc,
  1414. &__check_hi, &__check_ls, &__check_ge, &__check_lt,
  1415. &__check_gt, &__check_le, &__check_al, &__check_al
  1416. };
  1417. /* Return:
  1418. * INSN_REJECTED If instruction is one not allowed to kprobe,
  1419. * INSN_GOOD If instruction is supported and uses instruction slot,
  1420. * INSN_GOOD_NO_SLOT If instruction is supported but doesn't use its slot.
  1421. *
  1422. * For instructions we don't want to kprobe (INSN_REJECTED return result):
  1423. * These are generally ones that modify the processor state making
  1424. * them "hard" to simulate such as switches processor modes or
  1425. * make accesses in alternate modes. Any of these could be simulated
  1426. * if the work was put into it, but low return considering they
  1427. * should also be very rare.
  1428. */
  1429. enum kprobe_insn __kprobes
  1430. arm_kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi)
  1431. {
  1432. asi->insn_check_cc = condition_checks[insn>>28];
  1433. asi->insn[1] = KPROBE_RETURN_INSTRUCTION;
  1434. if ((insn & 0xf0000000) == 0xf0000000)
  1435. return space_1111(insn, asi);
  1436. else if ((insn & 0x0e000000) == 0x00000000)
  1437. return space_cccc_000x(insn, asi);
  1438. else if ((insn & 0x0e000000) == 0x02000000)
  1439. return space_cccc_001x(insn, asi);
  1440. else if ((insn & 0x0f000010) == 0x06000010)
  1441. return space_cccc_0110__1(insn, asi);
  1442. else if ((insn & 0x0f000010) == 0x07000010)
  1443. return space_cccc_0111__1(insn, asi);
  1444. else if ((insn & 0x0c000000) == 0x04000000)
  1445. return space_cccc_01xx(insn, asi);
  1446. else if ((insn & 0x0e000000) == 0x08000000)
  1447. return space_cccc_100x(insn, asi);
  1448. else if ((insn & 0x0e000000) == 0x0a000000)
  1449. return space_cccc_101x(insn, asi);
  1450. return space_cccc_11xx(insn, asi);
  1451. }
  1452. void __init arm_kprobe_decode_init(void)
  1453. {
  1454. find_str_pc_offset();
  1455. }