x86_emulate.c 37 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409
  1. /******************************************************************************
  2. * x86_emulate.c
  3. *
  4. * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
  5. *
  6. * Copyright (c) 2005 Keir Fraser
  7. *
  8. * Linux coding style, mod r/m decoder, segment base fixes, real-mode
  9. * privieged instructions:
  10. *
  11. * Copyright (C) 2006 Qumranet
  12. *
  13. * Avi Kivity <avi@qumranet.com>
  14. * Yaniv Kamay <yaniv@qumranet.com>
  15. *
  16. * This work is licensed under the terms of the GNU GPL, version 2. See
  17. * the COPYING file in the top-level directory.
  18. *
  19. * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
  20. */
  21. #ifndef __KERNEL__
  22. #include <stdio.h>
  23. #include <stdint.h>
  24. #include <public/xen.h>
  25. #define DPRINTF(_f, _a ...) printf( _f , ## _a )
  26. #else
  27. #include "kvm.h"
  28. #define DPRINTF(x...) do {} while (0)
  29. #endif
  30. #include "x86_emulate.h"
  31. #include <linux/module.h>
  32. /*
  33. * Opcode effective-address decode tables.
  34. * Note that we only emulate instructions that have at least one memory
  35. * operand (excluding implicit stack references). We assume that stack
  36. * references and instruction fetches will never occur in special memory
  37. * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
  38. * not be handled.
  39. */
  40. /* Operand sizes: 8-bit operands or specified/overridden size. */
  41. #define ByteOp (1<<0) /* 8-bit operands. */
  42. /* Destination operand type. */
  43. #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
  44. #define DstReg (2<<1) /* Register operand. */
  45. #define DstMem (3<<1) /* Memory operand. */
  46. #define DstMask (3<<1)
  47. /* Source operand type. */
  48. #define SrcNone (0<<3) /* No source operand. */
  49. #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
  50. #define SrcReg (1<<3) /* Register operand. */
  51. #define SrcMem (2<<3) /* Memory operand. */
  52. #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
  53. #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
  54. #define SrcImm (5<<3) /* Immediate operand. */
  55. #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
  56. #define SrcMask (7<<3)
  57. /* Generic ModRM decode. */
  58. #define ModRM (1<<6)
  59. /* Destination is only written; never read. */
  60. #define Mov (1<<7)
  61. static u8 opcode_table[256] = {
  62. /* 0x00 - 0x07 */
  63. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  64. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  65. 0, 0, 0, 0,
  66. /* 0x08 - 0x0F */
  67. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  68. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  69. 0, 0, 0, 0,
  70. /* 0x10 - 0x17 */
  71. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  72. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  73. 0, 0, 0, 0,
  74. /* 0x18 - 0x1F */
  75. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  76. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  77. 0, 0, 0, 0,
  78. /* 0x20 - 0x27 */
  79. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  80. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  81. 0, 0, 0, 0,
  82. /* 0x28 - 0x2F */
  83. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  84. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  85. 0, 0, 0, 0,
  86. /* 0x30 - 0x37 */
  87. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  88. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  89. 0, 0, 0, 0,
  90. /* 0x38 - 0x3F */
  91. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  92. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  93. 0, 0, 0, 0,
  94. /* 0x40 - 0x4F */
  95. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  96. /* 0x50 - 0x5F */
  97. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  98. /* 0x60 - 0x6F */
  99. 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
  100. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  101. /* 0x70 - 0x7F */
  102. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  103. /* 0x80 - 0x87 */
  104. ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
  105. ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
  106. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  107. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  108. /* 0x88 - 0x8F */
  109. ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
  110. ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  111. 0, 0, 0, DstMem | SrcNone | ModRM | Mov,
  112. /* 0x90 - 0x9F */
  113. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  114. /* 0xA0 - 0xA7 */
  115. ByteOp | DstReg | SrcMem | Mov, DstReg | SrcMem | Mov,
  116. ByteOp | DstMem | SrcReg | Mov, DstMem | SrcReg | Mov,
  117. ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
  118. ByteOp | ImplicitOps, ImplicitOps,
  119. /* 0xA8 - 0xAF */
  120. 0, 0, ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
  121. ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
  122. ByteOp | ImplicitOps, ImplicitOps,
  123. /* 0xB0 - 0xBF */
  124. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  125. /* 0xC0 - 0xC7 */
  126. ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM, 0, 0,
  127. 0, 0, ByteOp | DstMem | SrcImm | ModRM | Mov,
  128. DstMem | SrcImm | ModRM | Mov,
  129. /* 0xC8 - 0xCF */
  130. 0, 0, 0, 0, 0, 0, 0, 0,
  131. /* 0xD0 - 0xD7 */
  132. ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
  133. ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
  134. 0, 0, 0, 0,
  135. /* 0xD8 - 0xDF */
  136. 0, 0, 0, 0, 0, 0, 0, 0,
  137. /* 0xE0 - 0xEF */
  138. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  139. /* 0xF0 - 0xF7 */
  140. 0, 0, 0, 0,
  141. 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
  142. /* 0xF8 - 0xFF */
  143. 0, 0, 0, 0,
  144. 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
  145. };
  146. static u8 twobyte_table[256] = {
  147. /* 0x00 - 0x0F */
  148. 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
  149. 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
  150. /* 0x10 - 0x1F */
  151. 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
  152. /* 0x20 - 0x2F */
  153. ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
  154. 0, 0, 0, 0, 0, 0, 0, 0,
  155. /* 0x30 - 0x3F */
  156. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  157. /* 0x40 - 0x47 */
  158. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  159. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  160. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  161. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  162. /* 0x48 - 0x4F */
  163. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  164. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  165. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  166. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  167. /* 0x50 - 0x5F */
  168. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  169. /* 0x60 - 0x6F */
  170. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  171. /* 0x70 - 0x7F */
  172. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  173. /* 0x80 - 0x8F */
  174. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  175. /* 0x90 - 0x9F */
  176. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  177. /* 0xA0 - 0xA7 */
  178. 0, 0, 0, DstMem | SrcReg | ModRM, 0, 0, 0, 0,
  179. /* 0xA8 - 0xAF */
  180. 0, 0, 0, DstMem | SrcReg | ModRM, 0, 0, 0, 0,
  181. /* 0xB0 - 0xB7 */
  182. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
  183. DstMem | SrcReg | ModRM,
  184. 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
  185. DstReg | SrcMem16 | ModRM | Mov,
  186. /* 0xB8 - 0xBF */
  187. 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM,
  188. 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
  189. DstReg | SrcMem16 | ModRM | Mov,
  190. /* 0xC0 - 0xCF */
  191. 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
  192. /* 0xD0 - 0xDF */
  193. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  194. /* 0xE0 - 0xEF */
  195. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  196. /* 0xF0 - 0xFF */
  197. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
  198. };
  199. /*
  200. * Tell the emulator that of the Group 7 instructions (sgdt, lidt, etc.) we
  201. * are interested only in invlpg and not in any of the rest.
  202. *
  203. * invlpg is a special instruction in that the data it references may not
  204. * be mapped.
  205. */
  206. void kvm_emulator_want_group7_invlpg(void)
  207. {
  208. twobyte_table[1] &= ~SrcMem;
  209. }
  210. EXPORT_SYMBOL_GPL(kvm_emulator_want_group7_invlpg);
  211. /* Type, address-of, and value of an instruction's operand. */
  212. struct operand {
  213. enum { OP_REG, OP_MEM, OP_IMM } type;
  214. unsigned int bytes;
  215. unsigned long val, orig_val, *ptr;
  216. };
  217. /* EFLAGS bit definitions. */
  218. #define EFLG_OF (1<<11)
  219. #define EFLG_DF (1<<10)
  220. #define EFLG_SF (1<<7)
  221. #define EFLG_ZF (1<<6)
  222. #define EFLG_AF (1<<4)
  223. #define EFLG_PF (1<<2)
  224. #define EFLG_CF (1<<0)
  225. /*
  226. * Instruction emulation:
  227. * Most instructions are emulated directly via a fragment of inline assembly
  228. * code. This allows us to save/restore EFLAGS and thus very easily pick up
  229. * any modified flags.
  230. */
  231. #if defined(CONFIG_X86_64)
  232. #define _LO32 "k" /* force 32-bit operand */
  233. #define _STK "%%rsp" /* stack pointer */
  234. #elif defined(__i386__)
  235. #define _LO32 "" /* force 32-bit operand */
  236. #define _STK "%%esp" /* stack pointer */
  237. #endif
  238. /*
  239. * These EFLAGS bits are restored from saved value during emulation, and
  240. * any changes are written back to the saved value after emulation.
  241. */
  242. #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
  243. /* Before executing instruction: restore necessary bits in EFLAGS. */
  244. #define _PRE_EFLAGS(_sav, _msk, _tmp) \
  245. /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
  246. "push %"_sav"; " \
  247. "movl %"_msk",%"_LO32 _tmp"; " \
  248. "andl %"_LO32 _tmp",("_STK"); " \
  249. "pushf; " \
  250. "notl %"_LO32 _tmp"; " \
  251. "andl %"_LO32 _tmp",("_STK"); " \
  252. "pop %"_tmp"; " \
  253. "orl %"_LO32 _tmp",("_STK"); " \
  254. "popf; " \
  255. /* _sav &= ~msk; */ \
  256. "movl %"_msk",%"_LO32 _tmp"; " \
  257. "notl %"_LO32 _tmp"; " \
  258. "andl %"_LO32 _tmp",%"_sav"; "
  259. /* After executing instruction: write-back necessary bits in EFLAGS. */
  260. #define _POST_EFLAGS(_sav, _msk, _tmp) \
  261. /* _sav |= EFLAGS & _msk; */ \
  262. "pushf; " \
  263. "pop %"_tmp"; " \
  264. "andl %"_msk",%"_LO32 _tmp"; " \
  265. "orl %"_LO32 _tmp",%"_sav"; "
  266. /* Raw emulation: instruction has two explicit operands. */
  267. #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
  268. do { \
  269. unsigned long _tmp; \
  270. \
  271. switch ((_dst).bytes) { \
  272. case 2: \
  273. __asm__ __volatile__ ( \
  274. _PRE_EFLAGS("0","4","2") \
  275. _op"w %"_wx"3,%1; " \
  276. _POST_EFLAGS("0","4","2") \
  277. : "=m" (_eflags), "=m" ((_dst).val), \
  278. "=&r" (_tmp) \
  279. : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
  280. break; \
  281. case 4: \
  282. __asm__ __volatile__ ( \
  283. _PRE_EFLAGS("0","4","2") \
  284. _op"l %"_lx"3,%1; " \
  285. _POST_EFLAGS("0","4","2") \
  286. : "=m" (_eflags), "=m" ((_dst).val), \
  287. "=&r" (_tmp) \
  288. : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
  289. break; \
  290. case 8: \
  291. __emulate_2op_8byte(_op, _src, _dst, \
  292. _eflags, _qx, _qy); \
  293. break; \
  294. } \
  295. } while (0)
  296. #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
  297. do { \
  298. unsigned long _tmp; \
  299. switch ( (_dst).bytes ) \
  300. { \
  301. case 1: \
  302. __asm__ __volatile__ ( \
  303. _PRE_EFLAGS("0","4","2") \
  304. _op"b %"_bx"3,%1; " \
  305. _POST_EFLAGS("0","4","2") \
  306. : "=m" (_eflags), "=m" ((_dst).val), \
  307. "=&r" (_tmp) \
  308. : _by ((_src).val), "i" (EFLAGS_MASK) ); \
  309. break; \
  310. default: \
  311. __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
  312. _wx, _wy, _lx, _ly, _qx, _qy); \
  313. break; \
  314. } \
  315. } while (0)
  316. /* Source operand is byte-sized and may be restricted to just %cl. */
  317. #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
  318. __emulate_2op(_op, _src, _dst, _eflags, \
  319. "b", "c", "b", "c", "b", "c", "b", "c")
  320. /* Source operand is byte, word, long or quad sized. */
  321. #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
  322. __emulate_2op(_op, _src, _dst, _eflags, \
  323. "b", "q", "w", "r", _LO32, "r", "", "r")
  324. /* Source operand is word, long or quad sized. */
  325. #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
  326. __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
  327. "w", "r", _LO32, "r", "", "r")
  328. /* Instruction has only one explicit operand (no source operand). */
  329. #define emulate_1op(_op, _dst, _eflags) \
  330. do { \
  331. unsigned long _tmp; \
  332. \
  333. switch ( (_dst).bytes ) \
  334. { \
  335. case 1: \
  336. __asm__ __volatile__ ( \
  337. _PRE_EFLAGS("0","3","2") \
  338. _op"b %1; " \
  339. _POST_EFLAGS("0","3","2") \
  340. : "=m" (_eflags), "=m" ((_dst).val), \
  341. "=&r" (_tmp) \
  342. : "i" (EFLAGS_MASK) ); \
  343. break; \
  344. case 2: \
  345. __asm__ __volatile__ ( \
  346. _PRE_EFLAGS("0","3","2") \
  347. _op"w %1; " \
  348. _POST_EFLAGS("0","3","2") \
  349. : "=m" (_eflags), "=m" ((_dst).val), \
  350. "=&r" (_tmp) \
  351. : "i" (EFLAGS_MASK) ); \
  352. break; \
  353. case 4: \
  354. __asm__ __volatile__ ( \
  355. _PRE_EFLAGS("0","3","2") \
  356. _op"l %1; " \
  357. _POST_EFLAGS("0","3","2") \
  358. : "=m" (_eflags), "=m" ((_dst).val), \
  359. "=&r" (_tmp) \
  360. : "i" (EFLAGS_MASK) ); \
  361. break; \
  362. case 8: \
  363. __emulate_1op_8byte(_op, _dst, _eflags); \
  364. break; \
  365. } \
  366. } while (0)
  367. /* Emulate an instruction with quadword operands (x86/64 only). */
  368. #if defined(CONFIG_X86_64)
  369. #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
  370. do { \
  371. __asm__ __volatile__ ( \
  372. _PRE_EFLAGS("0","4","2") \
  373. _op"q %"_qx"3,%1; " \
  374. _POST_EFLAGS("0","4","2") \
  375. : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
  376. : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
  377. } while (0)
  378. #define __emulate_1op_8byte(_op, _dst, _eflags) \
  379. do { \
  380. __asm__ __volatile__ ( \
  381. _PRE_EFLAGS("0","3","2") \
  382. _op"q %1; " \
  383. _POST_EFLAGS("0","3","2") \
  384. : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
  385. : "i" (EFLAGS_MASK) ); \
  386. } while (0)
  387. #elif defined(__i386__)
  388. #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
  389. #define __emulate_1op_8byte(_op, _dst, _eflags)
  390. #endif /* __i386__ */
  391. /* Fetch next part of the instruction being emulated. */
  392. #define insn_fetch(_type, _size, _eip) \
  393. ({ unsigned long _x; \
  394. rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
  395. (_size), ctxt); \
  396. if ( rc != 0 ) \
  397. goto done; \
  398. (_eip) += (_size); \
  399. (_type)_x; \
  400. })
  401. /* Access/update address held in a register, based on addressing mode. */
  402. #define register_address(base, reg) \
  403. ((base) + ((ad_bytes == sizeof(unsigned long)) ? (reg) : \
  404. ((reg) & ((1UL << (ad_bytes << 3)) - 1))))
  405. #define register_address_increment(reg, inc) \
  406. do { \
  407. /* signed type ensures sign extension to long */ \
  408. int _inc = (inc); \
  409. if ( ad_bytes == sizeof(unsigned long) ) \
  410. (reg) += _inc; \
  411. else \
  412. (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
  413. (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
  414. } while (0)
  415. void *decode_register(u8 modrm_reg, unsigned long *regs,
  416. int highbyte_regs)
  417. {
  418. void *p;
  419. p = &regs[modrm_reg];
  420. if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
  421. p = (unsigned char *)&regs[modrm_reg & 3] + 1;
  422. return p;
  423. }
  424. static int read_descriptor(struct x86_emulate_ctxt *ctxt,
  425. struct x86_emulate_ops *ops,
  426. void *ptr,
  427. u16 *size, unsigned long *address, int op_bytes)
  428. {
  429. int rc;
  430. if (op_bytes == 2)
  431. op_bytes = 3;
  432. *address = 0;
  433. rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2, ctxt);
  434. if (rc)
  435. return rc;
  436. rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes, ctxt);
  437. return rc;
  438. }
  439. int
  440. x86_emulate_memop(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
  441. {
  442. u8 b, d, sib, twobyte = 0, rex_prefix = 0;
  443. u8 modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
  444. unsigned long *override_base = NULL;
  445. unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
  446. int rc = 0;
  447. struct operand src, dst;
  448. unsigned long cr2 = ctxt->cr2;
  449. int mode = ctxt->mode;
  450. unsigned long modrm_ea;
  451. int use_modrm_ea, index_reg = 0, base_reg = 0, scale, rip_relative = 0;
  452. /* Shadow copy of register state. Committed on successful emulation. */
  453. unsigned long _regs[NR_VCPU_REGS];
  454. unsigned long _eip = ctxt->vcpu->rip, _eflags = ctxt->eflags;
  455. unsigned long modrm_val = 0;
  456. memcpy(_regs, ctxt->vcpu->regs, sizeof _regs);
  457. switch (mode) {
  458. case X86EMUL_MODE_REAL:
  459. case X86EMUL_MODE_PROT16:
  460. op_bytes = ad_bytes = 2;
  461. break;
  462. case X86EMUL_MODE_PROT32:
  463. op_bytes = ad_bytes = 4;
  464. break;
  465. #ifdef CONFIG_X86_64
  466. case X86EMUL_MODE_PROT64:
  467. op_bytes = 4;
  468. ad_bytes = 8;
  469. break;
  470. #endif
  471. default:
  472. return -1;
  473. }
  474. /* Legacy prefixes. */
  475. for (i = 0; i < 8; i++) {
  476. switch (b = insn_fetch(u8, 1, _eip)) {
  477. case 0x66: /* operand-size override */
  478. op_bytes ^= 6; /* switch between 2/4 bytes */
  479. break;
  480. case 0x67: /* address-size override */
  481. if (mode == X86EMUL_MODE_PROT64)
  482. ad_bytes ^= 12; /* switch between 4/8 bytes */
  483. else
  484. ad_bytes ^= 6; /* switch between 2/4 bytes */
  485. break;
  486. case 0x2e: /* CS override */
  487. override_base = &ctxt->cs_base;
  488. break;
  489. case 0x3e: /* DS override */
  490. override_base = &ctxt->ds_base;
  491. break;
  492. case 0x26: /* ES override */
  493. override_base = &ctxt->es_base;
  494. break;
  495. case 0x64: /* FS override */
  496. override_base = &ctxt->fs_base;
  497. break;
  498. case 0x65: /* GS override */
  499. override_base = &ctxt->gs_base;
  500. break;
  501. case 0x36: /* SS override */
  502. override_base = &ctxt->ss_base;
  503. break;
  504. case 0xf0: /* LOCK */
  505. lock_prefix = 1;
  506. break;
  507. case 0xf3: /* REP/REPE/REPZ */
  508. rep_prefix = 1;
  509. break;
  510. case 0xf2: /* REPNE/REPNZ */
  511. break;
  512. default:
  513. goto done_prefixes;
  514. }
  515. }
  516. done_prefixes:
  517. /* REX prefix. */
  518. if ((mode == X86EMUL_MODE_PROT64) && ((b & 0xf0) == 0x40)) {
  519. rex_prefix = b;
  520. if (b & 8)
  521. op_bytes = 8; /* REX.W */
  522. modrm_reg = (b & 4) << 1; /* REX.R */
  523. index_reg = (b & 2) << 2; /* REX.X */
  524. modrm_rm = base_reg = (b & 1) << 3; /* REG.B */
  525. b = insn_fetch(u8, 1, _eip);
  526. }
  527. /* Opcode byte(s). */
  528. d = opcode_table[b];
  529. if (d == 0) {
  530. /* Two-byte opcode? */
  531. if (b == 0x0f) {
  532. twobyte = 1;
  533. b = insn_fetch(u8, 1, _eip);
  534. d = twobyte_table[b];
  535. }
  536. /* Unrecognised? */
  537. if (d == 0)
  538. goto cannot_emulate;
  539. }
  540. /* ModRM and SIB bytes. */
  541. if (d & ModRM) {
  542. modrm = insn_fetch(u8, 1, _eip);
  543. modrm_mod |= (modrm & 0xc0) >> 6;
  544. modrm_reg |= (modrm & 0x38) >> 3;
  545. modrm_rm |= (modrm & 0x07);
  546. modrm_ea = 0;
  547. use_modrm_ea = 1;
  548. if (modrm_mod == 3) {
  549. modrm_val = *(unsigned long *)
  550. decode_register(modrm_rm, _regs, d & ByteOp);
  551. goto modrm_done;
  552. }
  553. if (ad_bytes == 2) {
  554. unsigned bx = _regs[VCPU_REGS_RBX];
  555. unsigned bp = _regs[VCPU_REGS_RBP];
  556. unsigned si = _regs[VCPU_REGS_RSI];
  557. unsigned di = _regs[VCPU_REGS_RDI];
  558. /* 16-bit ModR/M decode. */
  559. switch (modrm_mod) {
  560. case 0:
  561. if (modrm_rm == 6)
  562. modrm_ea += insn_fetch(u16, 2, _eip);
  563. break;
  564. case 1:
  565. modrm_ea += insn_fetch(s8, 1, _eip);
  566. break;
  567. case 2:
  568. modrm_ea += insn_fetch(u16, 2, _eip);
  569. break;
  570. }
  571. switch (modrm_rm) {
  572. case 0:
  573. modrm_ea += bx + si;
  574. break;
  575. case 1:
  576. modrm_ea += bx + di;
  577. break;
  578. case 2:
  579. modrm_ea += bp + si;
  580. break;
  581. case 3:
  582. modrm_ea += bp + di;
  583. break;
  584. case 4:
  585. modrm_ea += si;
  586. break;
  587. case 5:
  588. modrm_ea += di;
  589. break;
  590. case 6:
  591. if (modrm_mod != 0)
  592. modrm_ea += bp;
  593. break;
  594. case 7:
  595. modrm_ea += bx;
  596. break;
  597. }
  598. if (modrm_rm == 2 || modrm_rm == 3 ||
  599. (modrm_rm == 6 && modrm_mod != 0))
  600. if (!override_base)
  601. override_base = &ctxt->ss_base;
  602. modrm_ea = (u16)modrm_ea;
  603. } else {
  604. /* 32/64-bit ModR/M decode. */
  605. switch (modrm_rm) {
  606. case 4:
  607. case 12:
  608. sib = insn_fetch(u8, 1, _eip);
  609. index_reg |= (sib >> 3) & 7;
  610. base_reg |= sib & 7;
  611. scale = sib >> 6;
  612. switch (base_reg) {
  613. case 5:
  614. if (modrm_mod != 0)
  615. modrm_ea += _regs[base_reg];
  616. else
  617. modrm_ea += insn_fetch(s32, 4, _eip);
  618. break;
  619. default:
  620. modrm_ea += _regs[base_reg];
  621. }
  622. switch (index_reg) {
  623. case 4:
  624. break;
  625. default:
  626. modrm_ea += _regs[index_reg] << scale;
  627. }
  628. break;
  629. case 5:
  630. if (modrm_mod != 0)
  631. modrm_ea += _regs[modrm_rm];
  632. else if (mode == X86EMUL_MODE_PROT64)
  633. rip_relative = 1;
  634. break;
  635. default:
  636. modrm_ea += _regs[modrm_rm];
  637. break;
  638. }
  639. switch (modrm_mod) {
  640. case 0:
  641. if (modrm_rm == 5)
  642. modrm_ea += insn_fetch(s32, 4, _eip);
  643. break;
  644. case 1:
  645. modrm_ea += insn_fetch(s8, 1, _eip);
  646. break;
  647. case 2:
  648. modrm_ea += insn_fetch(s32, 4, _eip);
  649. break;
  650. }
  651. }
  652. if (!override_base)
  653. override_base = &ctxt->ds_base;
  654. if (mode == X86EMUL_MODE_PROT64 &&
  655. override_base != &ctxt->fs_base &&
  656. override_base != &ctxt->gs_base)
  657. override_base = NULL;
  658. if (override_base)
  659. modrm_ea += *override_base;
  660. if (rip_relative) {
  661. modrm_ea += _eip;
  662. switch (d & SrcMask) {
  663. case SrcImmByte:
  664. modrm_ea += 1;
  665. break;
  666. case SrcImm:
  667. if (d & ByteOp)
  668. modrm_ea += 1;
  669. else
  670. if (op_bytes == 8)
  671. modrm_ea += 4;
  672. else
  673. modrm_ea += op_bytes;
  674. }
  675. }
  676. if (ad_bytes != 8)
  677. modrm_ea = (u32)modrm_ea;
  678. cr2 = modrm_ea;
  679. modrm_done:
  680. ;
  681. }
  682. /* Decode and fetch the destination operand: register or memory. */
  683. switch (d & DstMask) {
  684. case ImplicitOps:
  685. /* Special instructions do their own operand decoding. */
  686. goto special_insn;
  687. case DstReg:
  688. dst.type = OP_REG;
  689. if ((d & ByteOp)
  690. && !(twobyte_table && (b == 0xb6 || b == 0xb7))) {
  691. dst.ptr = decode_register(modrm_reg, _regs,
  692. (rex_prefix == 0));
  693. dst.val = *(u8 *) dst.ptr;
  694. dst.bytes = 1;
  695. } else {
  696. dst.ptr = decode_register(modrm_reg, _regs, 0);
  697. switch ((dst.bytes = op_bytes)) {
  698. case 2:
  699. dst.val = *(u16 *)dst.ptr;
  700. break;
  701. case 4:
  702. dst.val = *(u32 *)dst.ptr;
  703. break;
  704. case 8:
  705. dst.val = *(u64 *)dst.ptr;
  706. break;
  707. }
  708. }
  709. break;
  710. case DstMem:
  711. dst.type = OP_MEM;
  712. dst.ptr = (unsigned long *)cr2;
  713. dst.bytes = (d & ByteOp) ? 1 : op_bytes;
  714. if (!(d & Mov) && /* optimisation - avoid slow emulated read */
  715. ((rc = ops->read_emulated((unsigned long)dst.ptr,
  716. &dst.val, dst.bytes, ctxt)) != 0))
  717. goto done;
  718. break;
  719. }
  720. dst.orig_val = dst.val;
  721. /*
  722. * Decode and fetch the source operand: register, memory
  723. * or immediate.
  724. */
  725. switch (d & SrcMask) {
  726. case SrcNone:
  727. break;
  728. case SrcReg:
  729. src.type = OP_REG;
  730. if (d & ByteOp) {
  731. src.ptr = decode_register(modrm_reg, _regs,
  732. (rex_prefix == 0));
  733. src.val = src.orig_val = *(u8 *) src.ptr;
  734. src.bytes = 1;
  735. } else {
  736. src.ptr = decode_register(modrm_reg, _regs, 0);
  737. switch ((src.bytes = op_bytes)) {
  738. case 2:
  739. src.val = src.orig_val = *(u16 *) src.ptr;
  740. break;
  741. case 4:
  742. src.val = src.orig_val = *(u32 *) src.ptr;
  743. break;
  744. case 8:
  745. src.val = src.orig_val = *(u64 *) src.ptr;
  746. break;
  747. }
  748. }
  749. break;
  750. case SrcMem16:
  751. src.bytes = 2;
  752. goto srcmem_common;
  753. case SrcMem32:
  754. src.bytes = 4;
  755. goto srcmem_common;
  756. case SrcMem:
  757. src.bytes = (d & ByteOp) ? 1 : op_bytes;
  758. srcmem_common:
  759. src.type = OP_MEM;
  760. src.ptr = (unsigned long *)cr2;
  761. if ((rc = ops->read_emulated((unsigned long)src.ptr,
  762. &src.val, src.bytes, ctxt)) != 0)
  763. goto done;
  764. src.orig_val = src.val;
  765. break;
  766. case SrcImm:
  767. src.type = OP_IMM;
  768. src.ptr = (unsigned long *)_eip;
  769. src.bytes = (d & ByteOp) ? 1 : op_bytes;
  770. if (src.bytes == 8)
  771. src.bytes = 4;
  772. /* NB. Immediates are sign-extended as necessary. */
  773. switch (src.bytes) {
  774. case 1:
  775. src.val = insn_fetch(s8, 1, _eip);
  776. break;
  777. case 2:
  778. src.val = insn_fetch(s16, 2, _eip);
  779. break;
  780. case 4:
  781. src.val = insn_fetch(s32, 4, _eip);
  782. break;
  783. }
  784. break;
  785. case SrcImmByte:
  786. src.type = OP_IMM;
  787. src.ptr = (unsigned long *)_eip;
  788. src.bytes = 1;
  789. src.val = insn_fetch(s8, 1, _eip);
  790. break;
  791. }
  792. if (twobyte)
  793. goto twobyte_insn;
  794. switch (b) {
  795. case 0x00 ... 0x05:
  796. add: /* add */
  797. emulate_2op_SrcV("add", src, dst, _eflags);
  798. break;
  799. case 0x08 ... 0x0d:
  800. or: /* or */
  801. emulate_2op_SrcV("or", src, dst, _eflags);
  802. break;
  803. case 0x10 ... 0x15:
  804. adc: /* adc */
  805. emulate_2op_SrcV("adc", src, dst, _eflags);
  806. break;
  807. case 0x18 ... 0x1d:
  808. sbb: /* sbb */
  809. emulate_2op_SrcV("sbb", src, dst, _eflags);
  810. break;
  811. case 0x20 ... 0x25:
  812. and: /* and */
  813. emulate_2op_SrcV("and", src, dst, _eflags);
  814. break;
  815. case 0x28 ... 0x2d:
  816. sub: /* sub */
  817. emulate_2op_SrcV("sub", src, dst, _eflags);
  818. break;
  819. case 0x30 ... 0x35:
  820. xor: /* xor */
  821. emulate_2op_SrcV("xor", src, dst, _eflags);
  822. break;
  823. case 0x38 ... 0x3d:
  824. cmp: /* cmp */
  825. emulate_2op_SrcV("cmp", src, dst, _eflags);
  826. break;
  827. case 0x63: /* movsxd */
  828. if (mode != X86EMUL_MODE_PROT64)
  829. goto cannot_emulate;
  830. dst.val = (s32) src.val;
  831. break;
  832. case 0x80 ... 0x83: /* Grp1 */
  833. switch (modrm_reg) {
  834. case 0:
  835. goto add;
  836. case 1:
  837. goto or;
  838. case 2:
  839. goto adc;
  840. case 3:
  841. goto sbb;
  842. case 4:
  843. goto and;
  844. case 5:
  845. goto sub;
  846. case 6:
  847. goto xor;
  848. case 7:
  849. goto cmp;
  850. }
  851. break;
  852. case 0x84 ... 0x85:
  853. test: /* test */
  854. emulate_2op_SrcV("test", src, dst, _eflags);
  855. break;
  856. case 0x86 ... 0x87: /* xchg */
  857. /* Write back the register source. */
  858. switch (dst.bytes) {
  859. case 1:
  860. *(u8 *) src.ptr = (u8) dst.val;
  861. break;
  862. case 2:
  863. *(u16 *) src.ptr = (u16) dst.val;
  864. break;
  865. case 4:
  866. *src.ptr = (u32) dst.val;
  867. break; /* 64b reg: zero-extend */
  868. case 8:
  869. *src.ptr = dst.val;
  870. break;
  871. }
  872. /*
  873. * Write back the memory destination with implicit LOCK
  874. * prefix.
  875. */
  876. dst.val = src.val;
  877. lock_prefix = 1;
  878. break;
  879. case 0xa0 ... 0xa1: /* mov */
  880. dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
  881. dst.val = src.val;
  882. _eip += ad_bytes; /* skip src displacement */
  883. break;
  884. case 0xa2 ... 0xa3: /* mov */
  885. dst.val = (unsigned long)_regs[VCPU_REGS_RAX];
  886. _eip += ad_bytes; /* skip dst displacement */
  887. break;
  888. case 0x88 ... 0x8b: /* mov */
  889. case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
  890. dst.val = src.val;
  891. break;
  892. case 0x8f: /* pop (sole member of Grp1a) */
  893. /* 64-bit mode: POP always pops a 64-bit operand. */
  894. if (mode == X86EMUL_MODE_PROT64)
  895. dst.bytes = 8;
  896. if ((rc = ops->read_std(register_address(ctxt->ss_base,
  897. _regs[VCPU_REGS_RSP]),
  898. &dst.val, dst.bytes, ctxt)) != 0)
  899. goto done;
  900. register_address_increment(_regs[VCPU_REGS_RSP], dst.bytes);
  901. break;
  902. case 0xc0 ... 0xc1:
  903. grp2: /* Grp2 */
  904. switch (modrm_reg) {
  905. case 0: /* rol */
  906. emulate_2op_SrcB("rol", src, dst, _eflags);
  907. break;
  908. case 1: /* ror */
  909. emulate_2op_SrcB("ror", src, dst, _eflags);
  910. break;
  911. case 2: /* rcl */
  912. emulate_2op_SrcB("rcl", src, dst, _eflags);
  913. break;
  914. case 3: /* rcr */
  915. emulate_2op_SrcB("rcr", src, dst, _eflags);
  916. break;
  917. case 4: /* sal/shl */
  918. case 6: /* sal/shl */
  919. emulate_2op_SrcB("sal", src, dst, _eflags);
  920. break;
  921. case 5: /* shr */
  922. emulate_2op_SrcB("shr", src, dst, _eflags);
  923. break;
  924. case 7: /* sar */
  925. emulate_2op_SrcB("sar", src, dst, _eflags);
  926. break;
  927. }
  928. break;
  929. case 0xd0 ... 0xd1: /* Grp2 */
  930. src.val = 1;
  931. goto grp2;
  932. case 0xd2 ... 0xd3: /* Grp2 */
  933. src.val = _regs[VCPU_REGS_RCX];
  934. goto grp2;
  935. case 0xf6 ... 0xf7: /* Grp3 */
  936. switch (modrm_reg) {
  937. case 0 ... 1: /* test */
  938. /*
  939. * Special case in Grp3: test has an immediate
  940. * source operand.
  941. */
  942. src.type = OP_IMM;
  943. src.ptr = (unsigned long *)_eip;
  944. src.bytes = (d & ByteOp) ? 1 : op_bytes;
  945. if (src.bytes == 8)
  946. src.bytes = 4;
  947. switch (src.bytes) {
  948. case 1:
  949. src.val = insn_fetch(s8, 1, _eip);
  950. break;
  951. case 2:
  952. src.val = insn_fetch(s16, 2, _eip);
  953. break;
  954. case 4:
  955. src.val = insn_fetch(s32, 4, _eip);
  956. break;
  957. }
  958. goto test;
  959. case 2: /* not */
  960. dst.val = ~dst.val;
  961. break;
  962. case 3: /* neg */
  963. emulate_1op("neg", dst, _eflags);
  964. break;
  965. default:
  966. goto cannot_emulate;
  967. }
  968. break;
  969. case 0xfe ... 0xff: /* Grp4/Grp5 */
  970. switch (modrm_reg) {
  971. case 0: /* inc */
  972. emulate_1op("inc", dst, _eflags);
  973. break;
  974. case 1: /* dec */
  975. emulate_1op("dec", dst, _eflags);
  976. break;
  977. case 6: /* push */
  978. /* 64-bit mode: PUSH always pushes a 64-bit operand. */
  979. if (mode == X86EMUL_MODE_PROT64) {
  980. dst.bytes = 8;
  981. if ((rc = ops->read_std((unsigned long)dst.ptr,
  982. &dst.val, 8,
  983. ctxt)) != 0)
  984. goto done;
  985. }
  986. register_address_increment(_regs[VCPU_REGS_RSP],
  987. -dst.bytes);
  988. if ((rc = ops->write_std(
  989. register_address(ctxt->ss_base,
  990. _regs[VCPU_REGS_RSP]),
  991. dst.val, dst.bytes, ctxt)) != 0)
  992. goto done;
  993. dst.val = dst.orig_val; /* skanky: disable writeback */
  994. break;
  995. default:
  996. goto cannot_emulate;
  997. }
  998. break;
  999. }
  1000. writeback:
  1001. if ((d & Mov) || (dst.orig_val != dst.val)) {
  1002. switch (dst.type) {
  1003. case OP_REG:
  1004. /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
  1005. switch (dst.bytes) {
  1006. case 1:
  1007. *(u8 *)dst.ptr = (u8)dst.val;
  1008. break;
  1009. case 2:
  1010. *(u16 *)dst.ptr = (u16)dst.val;
  1011. break;
  1012. case 4:
  1013. *dst.ptr = (u32)dst.val;
  1014. break; /* 64b: zero-ext */
  1015. case 8:
  1016. *dst.ptr = dst.val;
  1017. break;
  1018. }
  1019. break;
  1020. case OP_MEM:
  1021. if (lock_prefix)
  1022. rc = ops->cmpxchg_emulated((unsigned long)dst.
  1023. ptr, dst.orig_val,
  1024. dst.val, dst.bytes,
  1025. ctxt);
  1026. else
  1027. rc = ops->write_emulated((unsigned long)dst.ptr,
  1028. dst.val, dst.bytes,
  1029. ctxt);
  1030. if (rc != 0)
  1031. goto done;
  1032. default:
  1033. break;
  1034. }
  1035. }
  1036. /* Commit shadow register state. */
  1037. memcpy(ctxt->vcpu->regs, _regs, sizeof _regs);
  1038. ctxt->eflags = _eflags;
  1039. ctxt->vcpu->rip = _eip;
  1040. done:
  1041. return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
  1042. special_insn:
  1043. if (twobyte)
  1044. goto twobyte_special_insn;
  1045. if (rep_prefix) {
  1046. if (_regs[VCPU_REGS_RCX] == 0) {
  1047. ctxt->vcpu->rip = _eip;
  1048. goto done;
  1049. }
  1050. _regs[VCPU_REGS_RCX]--;
  1051. _eip = ctxt->vcpu->rip;
  1052. }
  1053. switch (b) {
  1054. case 0xa4 ... 0xa5: /* movs */
  1055. dst.type = OP_MEM;
  1056. dst.bytes = (d & ByteOp) ? 1 : op_bytes;
  1057. dst.ptr = (unsigned long *)register_address(ctxt->es_base,
  1058. _regs[VCPU_REGS_RDI]);
  1059. if ((rc = ops->read_emulated(register_address(
  1060. override_base ? *override_base : ctxt->ds_base,
  1061. _regs[VCPU_REGS_RSI]), &dst.val, dst.bytes, ctxt)) != 0)
  1062. goto done;
  1063. register_address_increment(_regs[VCPU_REGS_RSI],
  1064. (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
  1065. register_address_increment(_regs[VCPU_REGS_RDI],
  1066. (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
  1067. break;
  1068. case 0xa6 ... 0xa7: /* cmps */
  1069. DPRINTF("Urk! I don't handle CMPS.\n");
  1070. goto cannot_emulate;
  1071. case 0xaa ... 0xab: /* stos */
  1072. dst.type = OP_MEM;
  1073. dst.bytes = (d & ByteOp) ? 1 : op_bytes;
  1074. dst.ptr = (unsigned long *)cr2;
  1075. dst.val = _regs[VCPU_REGS_RAX];
  1076. register_address_increment(_regs[VCPU_REGS_RDI],
  1077. (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
  1078. break;
  1079. case 0xac ... 0xad: /* lods */
  1080. dst.type = OP_REG;
  1081. dst.bytes = (d & ByteOp) ? 1 : op_bytes;
  1082. dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
  1083. if ((rc = ops->read_emulated(cr2, &dst.val, dst.bytes, ctxt)) != 0)
  1084. goto done;
  1085. register_address_increment(_regs[VCPU_REGS_RSI],
  1086. (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
  1087. break;
  1088. case 0xae ... 0xaf: /* scas */
  1089. DPRINTF("Urk! I don't handle SCAS.\n");
  1090. goto cannot_emulate;
  1091. }
  1092. goto writeback;
  1093. twobyte_insn:
  1094. switch (b) {
  1095. case 0x01: /* lgdt, lidt, lmsw */
  1096. switch (modrm_reg) {
  1097. u16 size;
  1098. unsigned long address;
  1099. case 2: /* lgdt */
  1100. rc = read_descriptor(ctxt, ops, src.ptr,
  1101. &size, &address, op_bytes);
  1102. if (rc)
  1103. goto done;
  1104. realmode_lgdt(ctxt->vcpu, size, address);
  1105. break;
  1106. case 3: /* lidt */
  1107. rc = read_descriptor(ctxt, ops, src.ptr,
  1108. &size, &address, op_bytes);
  1109. if (rc)
  1110. goto done;
  1111. realmode_lidt(ctxt->vcpu, size, address);
  1112. break;
  1113. case 4: /* smsw */
  1114. if (modrm_mod != 3)
  1115. goto cannot_emulate;
  1116. *(u16 *)&_regs[modrm_rm]
  1117. = realmode_get_cr(ctxt->vcpu, 0);
  1118. break;
  1119. case 6: /* lmsw */
  1120. if (modrm_mod != 3)
  1121. goto cannot_emulate;
  1122. realmode_lmsw(ctxt->vcpu, (u16)modrm_val, &_eflags);
  1123. break;
  1124. case 7: /* invlpg*/
  1125. emulate_invlpg(ctxt->vcpu, cr2);
  1126. break;
  1127. default:
  1128. goto cannot_emulate;
  1129. }
  1130. break;
  1131. case 0x21: /* mov from dr to reg */
  1132. if (modrm_mod != 3)
  1133. goto cannot_emulate;
  1134. rc = emulator_get_dr(ctxt, modrm_reg, &_regs[modrm_rm]);
  1135. break;
  1136. case 0x23: /* mov from reg to dr */
  1137. if (modrm_mod != 3)
  1138. goto cannot_emulate;
  1139. rc = emulator_set_dr(ctxt, modrm_reg, _regs[modrm_rm]);
  1140. break;
  1141. case 0x40 ... 0x4f: /* cmov */
  1142. dst.val = dst.orig_val = src.val;
  1143. d &= ~Mov; /* default to no move */
  1144. /*
  1145. * First, assume we're decoding an even cmov opcode
  1146. * (lsb == 0).
  1147. */
  1148. switch ((b & 15) >> 1) {
  1149. case 0: /* cmovo */
  1150. d |= (_eflags & EFLG_OF) ? Mov : 0;
  1151. break;
  1152. case 1: /* cmovb/cmovc/cmovnae */
  1153. d |= (_eflags & EFLG_CF) ? Mov : 0;
  1154. break;
  1155. case 2: /* cmovz/cmove */
  1156. d |= (_eflags & EFLG_ZF) ? Mov : 0;
  1157. break;
  1158. case 3: /* cmovbe/cmovna */
  1159. d |= (_eflags & (EFLG_CF | EFLG_ZF)) ? Mov : 0;
  1160. break;
  1161. case 4: /* cmovs */
  1162. d |= (_eflags & EFLG_SF) ? Mov : 0;
  1163. break;
  1164. case 5: /* cmovp/cmovpe */
  1165. d |= (_eflags & EFLG_PF) ? Mov : 0;
  1166. break;
  1167. case 7: /* cmovle/cmovng */
  1168. d |= (_eflags & EFLG_ZF) ? Mov : 0;
  1169. /* fall through */
  1170. case 6: /* cmovl/cmovnge */
  1171. d |= (!(_eflags & EFLG_SF) !=
  1172. !(_eflags & EFLG_OF)) ? Mov : 0;
  1173. break;
  1174. }
  1175. /* Odd cmov opcodes (lsb == 1) have inverted sense. */
  1176. d ^= (b & 1) ? Mov : 0;
  1177. break;
  1178. case 0xb0 ... 0xb1: /* cmpxchg */
  1179. /*
  1180. * Save real source value, then compare EAX against
  1181. * destination.
  1182. */
  1183. src.orig_val = src.val;
  1184. src.val = _regs[VCPU_REGS_RAX];
  1185. emulate_2op_SrcV("cmp", src, dst, _eflags);
  1186. /* Always write back. The question is: where to? */
  1187. d |= Mov;
  1188. if (_eflags & EFLG_ZF) {
  1189. /* Success: write back to memory. */
  1190. dst.val = src.orig_val;
  1191. } else {
  1192. /* Failure: write the value we saw to EAX. */
  1193. dst.type = OP_REG;
  1194. dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
  1195. }
  1196. break;
  1197. case 0xa3:
  1198. bt: /* bt */
  1199. src.val &= (dst.bytes << 3) - 1; /* only subword offset */
  1200. emulate_2op_SrcV_nobyte("bt", src, dst, _eflags);
  1201. break;
  1202. case 0xb3:
  1203. btr: /* btr */
  1204. src.val &= (dst.bytes << 3) - 1; /* only subword offset */
  1205. emulate_2op_SrcV_nobyte("btr", src, dst, _eflags);
  1206. break;
  1207. case 0xab:
  1208. bts: /* bts */
  1209. src.val &= (dst.bytes << 3) - 1; /* only subword offset */
  1210. emulate_2op_SrcV_nobyte("bts", src, dst, _eflags);
  1211. break;
  1212. case 0xb6 ... 0xb7: /* movzx */
  1213. dst.bytes = op_bytes;
  1214. dst.val = (d & ByteOp) ? (u8) src.val : (u16) src.val;
  1215. break;
  1216. case 0xbb:
  1217. btc: /* btc */
  1218. src.val &= (dst.bytes << 3) - 1; /* only subword offset */
  1219. emulate_2op_SrcV_nobyte("btc", src, dst, _eflags);
  1220. break;
  1221. case 0xba: /* Grp8 */
  1222. switch (modrm_reg & 3) {
  1223. case 0:
  1224. goto bt;
  1225. case 1:
  1226. goto bts;
  1227. case 2:
  1228. goto btr;
  1229. case 3:
  1230. goto btc;
  1231. }
  1232. break;
  1233. case 0xbe ... 0xbf: /* movsx */
  1234. dst.bytes = op_bytes;
  1235. dst.val = (d & ByteOp) ? (s8) src.val : (s16) src.val;
  1236. break;
  1237. }
  1238. goto writeback;
  1239. twobyte_special_insn:
  1240. /* Disable writeback. */
  1241. dst.orig_val = dst.val;
  1242. switch (b) {
  1243. case 0x0d: /* GrpP (prefetch) */
  1244. case 0x18: /* Grp16 (prefetch/nop) */
  1245. break;
  1246. case 0x06:
  1247. emulate_clts(ctxt->vcpu);
  1248. break;
  1249. case 0x20: /* mov cr, reg */
  1250. if (modrm_mod != 3)
  1251. goto cannot_emulate;
  1252. _regs[modrm_rm] = realmode_get_cr(ctxt->vcpu, modrm_reg);
  1253. break;
  1254. case 0x22: /* mov reg, cr */
  1255. if (modrm_mod != 3)
  1256. goto cannot_emulate;
  1257. realmode_set_cr(ctxt->vcpu, modrm_reg, modrm_val, &_eflags);
  1258. break;
  1259. case 0xc7: /* Grp9 (cmpxchg8b) */
  1260. #if defined(__i386__)
  1261. {
  1262. unsigned long old_lo, old_hi;
  1263. if (((rc = ops->read_emulated(cr2 + 0, &old_lo, 4,
  1264. ctxt)) != 0)
  1265. || ((rc = ops->read_emulated(cr2 + 4, &old_hi, 4,
  1266. ctxt)) != 0))
  1267. goto done;
  1268. if ((old_lo != _regs[VCPU_REGS_RAX])
  1269. || (old_hi != _regs[VCPU_REGS_RDI])) {
  1270. _regs[VCPU_REGS_RAX] = old_lo;
  1271. _regs[VCPU_REGS_RDX] = old_hi;
  1272. _eflags &= ~EFLG_ZF;
  1273. } else if (ops->cmpxchg8b_emulated == NULL) {
  1274. rc = X86EMUL_UNHANDLEABLE;
  1275. goto done;
  1276. } else {
  1277. if ((rc = ops->cmpxchg8b_emulated(cr2, old_lo,
  1278. old_hi,
  1279. _regs[VCPU_REGS_RBX],
  1280. _regs[VCPU_REGS_RCX],
  1281. ctxt)) != 0)
  1282. goto done;
  1283. _eflags |= EFLG_ZF;
  1284. }
  1285. break;
  1286. }
  1287. #elif defined(CONFIG_X86_64)
  1288. {
  1289. unsigned long old, new;
  1290. if ((rc = ops->read_emulated(cr2, &old, 8, ctxt)) != 0)
  1291. goto done;
  1292. if (((u32) (old >> 0) != (u32) _regs[VCPU_REGS_RAX]) ||
  1293. ((u32) (old >> 32) != (u32) _regs[VCPU_REGS_RDX])) {
  1294. _regs[VCPU_REGS_RAX] = (u32) (old >> 0);
  1295. _regs[VCPU_REGS_RDX] = (u32) (old >> 32);
  1296. _eflags &= ~EFLG_ZF;
  1297. } else {
  1298. new = (_regs[VCPU_REGS_RCX] << 32) | (u32) _regs[VCPU_REGS_RBX];
  1299. if ((rc = ops->cmpxchg_emulated(cr2, old,
  1300. new, 8, ctxt)) != 0)
  1301. goto done;
  1302. _eflags |= EFLG_ZF;
  1303. }
  1304. break;
  1305. }
  1306. #endif
  1307. }
  1308. goto writeback;
  1309. cannot_emulate:
  1310. DPRINTF("Cannot emulate %02x\n", b);
  1311. return -1;
  1312. }
  1313. #ifdef __XEN__
  1314. #include <asm/mm.h>
  1315. #include <asm/uaccess.h>
  1316. int
  1317. x86_emulate_read_std(unsigned long addr,
  1318. unsigned long *val,
  1319. unsigned int bytes, struct x86_emulate_ctxt *ctxt)
  1320. {
  1321. unsigned int rc;
  1322. *val = 0;
  1323. if ((rc = copy_from_user((void *)val, (void *)addr, bytes)) != 0) {
  1324. propagate_page_fault(addr + bytes - rc, 0); /* read fault */
  1325. return X86EMUL_PROPAGATE_FAULT;
  1326. }
  1327. return X86EMUL_CONTINUE;
  1328. }
  1329. int
  1330. x86_emulate_write_std(unsigned long addr,
  1331. unsigned long val,
  1332. unsigned int bytes, struct x86_emulate_ctxt *ctxt)
  1333. {
  1334. unsigned int rc;
  1335. if ((rc = copy_to_user((void *)addr, (void *)&val, bytes)) != 0) {
  1336. propagate_page_fault(addr + bytes - rc, PGERR_write_access);
  1337. return X86EMUL_PROPAGATE_FAULT;
  1338. }
  1339. return X86EMUL_CONTINUE;
  1340. }
  1341. #endif