x86_emulate.c 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440
  1. /******************************************************************************
  2. * x86_emulate.c
  3. *
  4. * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
  5. *
  6. * Copyright (c) 2005 Keir Fraser
  7. *
  8. * Linux coding style, mod r/m decoder, segment base fixes, real-mode
  9. * privieged instructions:
  10. *
  11. * Copyright (C) 2006 Qumranet
  12. *
  13. * Avi Kivity <avi@qumranet.com>
  14. * Yaniv Kamay <yaniv@qumranet.com>
  15. *
  16. * This work is licensed under the terms of the GNU GPL, version 2. See
  17. * the COPYING file in the top-level directory.
  18. *
  19. * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
  20. */
  21. #ifndef __KERNEL__
  22. #include <stdio.h>
  23. #include <stdint.h>
  24. #include <public/xen.h>
  25. #define DPRINTF(_f, _a ...) printf( _f , ## _a )
  26. #else
  27. #include "kvm.h"
  28. #define DPRINTF(x...) do {} while (0)
  29. #endif
  30. #include "x86_emulate.h"
  31. #include <linux/module.h>
  32. /*
  33. * Opcode effective-address decode tables.
  34. * Note that we only emulate instructions that have at least one memory
  35. * operand (excluding implicit stack references). We assume that stack
  36. * references and instruction fetches will never occur in special memory
  37. * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
  38. * not be handled.
  39. */
  40. /* Operand sizes: 8-bit operands or specified/overridden size. */
  41. #define ByteOp (1<<0) /* 8-bit operands. */
  42. /* Destination operand type. */
  43. #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
  44. #define DstReg (2<<1) /* Register operand. */
  45. #define DstMem (3<<1) /* Memory operand. */
  46. #define DstMask (3<<1)
  47. /* Source operand type. */
  48. #define SrcNone (0<<3) /* No source operand. */
  49. #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
  50. #define SrcReg (1<<3) /* Register operand. */
  51. #define SrcMem (2<<3) /* Memory operand. */
  52. #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
  53. #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
  54. #define SrcImm (5<<3) /* Immediate operand. */
  55. #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
  56. #define SrcMask (7<<3)
  57. /* Generic ModRM decode. */
  58. #define ModRM (1<<6)
  59. /* Destination is only written; never read. */
  60. #define Mov (1<<7)
  61. #define BitOp (1<<8)
  62. static u8 opcode_table[256] = {
  63. /* 0x00 - 0x07 */
  64. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  65. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  66. 0, 0, 0, 0,
  67. /* 0x08 - 0x0F */
  68. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  69. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  70. 0, 0, 0, 0,
  71. /* 0x10 - 0x17 */
  72. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  73. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  74. 0, 0, 0, 0,
  75. /* 0x18 - 0x1F */
  76. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  77. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  78. 0, 0, 0, 0,
  79. /* 0x20 - 0x27 */
  80. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  81. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  82. 0, 0, 0, 0,
  83. /* 0x28 - 0x2F */
  84. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  85. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  86. 0, 0, 0, 0,
  87. /* 0x30 - 0x37 */
  88. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  89. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  90. 0, 0, 0, 0,
  91. /* 0x38 - 0x3F */
  92. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  93. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  94. 0, 0, 0, 0,
  95. /* 0x40 - 0x4F */
  96. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  97. /* 0x50 - 0x57 */
  98. 0, 0, 0, 0, 0, 0, 0, 0,
  99. /* 0x58 - 0x5F */
  100. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  101. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  102. /* 0x60 - 0x6F */
  103. 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
  104. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  105. /* 0x70 - 0x7F */
  106. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  107. /* 0x80 - 0x87 */
  108. ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
  109. ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
  110. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  111. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  112. /* 0x88 - 0x8F */
  113. ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
  114. ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  115. 0, 0, 0, DstMem | SrcNone | ModRM | Mov,
  116. /* 0x90 - 0x9F */
  117. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  118. /* 0xA0 - 0xA7 */
  119. ByteOp | DstReg | SrcMem | Mov, DstReg | SrcMem | Mov,
  120. ByteOp | DstMem | SrcReg | Mov, DstMem | SrcReg | Mov,
  121. ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
  122. ByteOp | ImplicitOps, ImplicitOps,
  123. /* 0xA8 - 0xAF */
  124. 0, 0, ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
  125. ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
  126. ByteOp | ImplicitOps, ImplicitOps,
  127. /* 0xB0 - 0xBF */
  128. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  129. /* 0xC0 - 0xC7 */
  130. ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
  131. 0, ImplicitOps, 0, 0,
  132. ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
  133. /* 0xC8 - 0xCF */
  134. 0, 0, 0, 0, 0, 0, 0, 0,
  135. /* 0xD0 - 0xD7 */
  136. ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
  137. ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
  138. 0, 0, 0, 0,
  139. /* 0xD8 - 0xDF */
  140. 0, 0, 0, 0, 0, 0, 0, 0,
  141. /* 0xE0 - 0xEF */
  142. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  143. /* 0xF0 - 0xF7 */
  144. 0, 0, 0, 0,
  145. ImplicitOps, 0,
  146. ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
  147. /* 0xF8 - 0xFF */
  148. 0, 0, 0, 0,
  149. 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
  150. };
  151. static u16 twobyte_table[256] = {
  152. /* 0x00 - 0x0F */
  153. 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
  154. 0, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
  155. /* 0x10 - 0x1F */
  156. 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
  157. /* 0x20 - 0x2F */
  158. ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
  159. 0, 0, 0, 0, 0, 0, 0, 0,
  160. /* 0x30 - 0x3F */
  161. ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  162. /* 0x40 - 0x47 */
  163. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  164. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  165. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  166. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  167. /* 0x48 - 0x4F */
  168. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  169. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  170. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  171. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  172. /* 0x50 - 0x5F */
  173. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  174. /* 0x60 - 0x6F */
  175. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  176. /* 0x70 - 0x7F */
  177. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  178. /* 0x80 - 0x8F */
  179. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  180. /* 0x90 - 0x9F */
  181. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  182. /* 0xA0 - 0xA7 */
  183. 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
  184. /* 0xA8 - 0xAF */
  185. 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
  186. /* 0xB0 - 0xB7 */
  187. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
  188. DstMem | SrcReg | ModRM | BitOp,
  189. 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
  190. DstReg | SrcMem16 | ModRM | Mov,
  191. /* 0xB8 - 0xBF */
  192. 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
  193. 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
  194. DstReg | SrcMem16 | ModRM | Mov,
  195. /* 0xC0 - 0xCF */
  196. 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
  197. /* 0xD0 - 0xDF */
  198. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  199. /* 0xE0 - 0xEF */
  200. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  201. /* 0xF0 - 0xFF */
  202. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
  203. };
  204. /*
  205. * Tell the emulator that of the Group 7 instructions (sgdt, lidt, etc.) we
  206. * are interested only in invlpg and not in any of the rest.
  207. *
  208. * invlpg is a special instruction in that the data it references may not
  209. * be mapped.
  210. */
  211. void kvm_emulator_want_group7_invlpg(void)
  212. {
  213. twobyte_table[1] &= ~SrcMem;
  214. }
  215. EXPORT_SYMBOL_GPL(kvm_emulator_want_group7_invlpg);
  216. /* Type, address-of, and value of an instruction's operand. */
  217. struct operand {
  218. enum { OP_REG, OP_MEM, OP_IMM } type;
  219. unsigned int bytes;
  220. unsigned long val, orig_val, *ptr;
  221. };
  222. /* EFLAGS bit definitions. */
  223. #define EFLG_OF (1<<11)
  224. #define EFLG_DF (1<<10)
  225. #define EFLG_SF (1<<7)
  226. #define EFLG_ZF (1<<6)
  227. #define EFLG_AF (1<<4)
  228. #define EFLG_PF (1<<2)
  229. #define EFLG_CF (1<<0)
  230. /*
  231. * Instruction emulation:
  232. * Most instructions are emulated directly via a fragment of inline assembly
  233. * code. This allows us to save/restore EFLAGS and thus very easily pick up
  234. * any modified flags.
  235. */
  236. #if defined(CONFIG_X86_64)
  237. #define _LO32 "k" /* force 32-bit operand */
  238. #define _STK "%%rsp" /* stack pointer */
  239. #elif defined(__i386__)
  240. #define _LO32 "" /* force 32-bit operand */
  241. #define _STK "%%esp" /* stack pointer */
  242. #endif
  243. /*
  244. * These EFLAGS bits are restored from saved value during emulation, and
  245. * any changes are written back to the saved value after emulation.
  246. */
  247. #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
  248. /* Before executing instruction: restore necessary bits in EFLAGS. */
  249. #define _PRE_EFLAGS(_sav, _msk, _tmp) \
  250. /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
  251. "push %"_sav"; " \
  252. "movl %"_msk",%"_LO32 _tmp"; " \
  253. "andl %"_LO32 _tmp",("_STK"); " \
  254. "pushf; " \
  255. "notl %"_LO32 _tmp"; " \
  256. "andl %"_LO32 _tmp",("_STK"); " \
  257. "pop %"_tmp"; " \
  258. "orl %"_LO32 _tmp",("_STK"); " \
  259. "popf; " \
  260. /* _sav &= ~msk; */ \
  261. "movl %"_msk",%"_LO32 _tmp"; " \
  262. "notl %"_LO32 _tmp"; " \
  263. "andl %"_LO32 _tmp",%"_sav"; "
  264. /* After executing instruction: write-back necessary bits in EFLAGS. */
  265. #define _POST_EFLAGS(_sav, _msk, _tmp) \
  266. /* _sav |= EFLAGS & _msk; */ \
  267. "pushf; " \
  268. "pop %"_tmp"; " \
  269. "andl %"_msk",%"_LO32 _tmp"; " \
  270. "orl %"_LO32 _tmp",%"_sav"; "
  271. /* Raw emulation: instruction has two explicit operands. */
  272. #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
  273. do { \
  274. unsigned long _tmp; \
  275. \
  276. switch ((_dst).bytes) { \
  277. case 2: \
  278. __asm__ __volatile__ ( \
  279. _PRE_EFLAGS("0","4","2") \
  280. _op"w %"_wx"3,%1; " \
  281. _POST_EFLAGS("0","4","2") \
  282. : "=m" (_eflags), "=m" ((_dst).val), \
  283. "=&r" (_tmp) \
  284. : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
  285. break; \
  286. case 4: \
  287. __asm__ __volatile__ ( \
  288. _PRE_EFLAGS("0","4","2") \
  289. _op"l %"_lx"3,%1; " \
  290. _POST_EFLAGS("0","4","2") \
  291. : "=m" (_eflags), "=m" ((_dst).val), \
  292. "=&r" (_tmp) \
  293. : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
  294. break; \
  295. case 8: \
  296. __emulate_2op_8byte(_op, _src, _dst, \
  297. _eflags, _qx, _qy); \
  298. break; \
  299. } \
  300. } while (0)
  301. #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
  302. do { \
  303. unsigned long _tmp; \
  304. switch ( (_dst).bytes ) \
  305. { \
  306. case 1: \
  307. __asm__ __volatile__ ( \
  308. _PRE_EFLAGS("0","4","2") \
  309. _op"b %"_bx"3,%1; " \
  310. _POST_EFLAGS("0","4","2") \
  311. : "=m" (_eflags), "=m" ((_dst).val), \
  312. "=&r" (_tmp) \
  313. : _by ((_src).val), "i" (EFLAGS_MASK) ); \
  314. break; \
  315. default: \
  316. __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
  317. _wx, _wy, _lx, _ly, _qx, _qy); \
  318. break; \
  319. } \
  320. } while (0)
  321. /* Source operand is byte-sized and may be restricted to just %cl. */
  322. #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
  323. __emulate_2op(_op, _src, _dst, _eflags, \
  324. "b", "c", "b", "c", "b", "c", "b", "c")
  325. /* Source operand is byte, word, long or quad sized. */
  326. #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
  327. __emulate_2op(_op, _src, _dst, _eflags, \
  328. "b", "q", "w", "r", _LO32, "r", "", "r")
  329. /* Source operand is word, long or quad sized. */
  330. #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
  331. __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
  332. "w", "r", _LO32, "r", "", "r")
  333. /* Instruction has only one explicit operand (no source operand). */
  334. #define emulate_1op(_op, _dst, _eflags) \
  335. do { \
  336. unsigned long _tmp; \
  337. \
  338. switch ( (_dst).bytes ) \
  339. { \
  340. case 1: \
  341. __asm__ __volatile__ ( \
  342. _PRE_EFLAGS("0","3","2") \
  343. _op"b %1; " \
  344. _POST_EFLAGS("0","3","2") \
  345. : "=m" (_eflags), "=m" ((_dst).val), \
  346. "=&r" (_tmp) \
  347. : "i" (EFLAGS_MASK) ); \
  348. break; \
  349. case 2: \
  350. __asm__ __volatile__ ( \
  351. _PRE_EFLAGS("0","3","2") \
  352. _op"w %1; " \
  353. _POST_EFLAGS("0","3","2") \
  354. : "=m" (_eflags), "=m" ((_dst).val), \
  355. "=&r" (_tmp) \
  356. : "i" (EFLAGS_MASK) ); \
  357. break; \
  358. case 4: \
  359. __asm__ __volatile__ ( \
  360. _PRE_EFLAGS("0","3","2") \
  361. _op"l %1; " \
  362. _POST_EFLAGS("0","3","2") \
  363. : "=m" (_eflags), "=m" ((_dst).val), \
  364. "=&r" (_tmp) \
  365. : "i" (EFLAGS_MASK) ); \
  366. break; \
  367. case 8: \
  368. __emulate_1op_8byte(_op, _dst, _eflags); \
  369. break; \
  370. } \
  371. } while (0)
  372. /* Emulate an instruction with quadword operands (x86/64 only). */
  373. #if defined(CONFIG_X86_64)
  374. #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
  375. do { \
  376. __asm__ __volatile__ ( \
  377. _PRE_EFLAGS("0","4","2") \
  378. _op"q %"_qx"3,%1; " \
  379. _POST_EFLAGS("0","4","2") \
  380. : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
  381. : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
  382. } while (0)
  383. #define __emulate_1op_8byte(_op, _dst, _eflags) \
  384. do { \
  385. __asm__ __volatile__ ( \
  386. _PRE_EFLAGS("0","3","2") \
  387. _op"q %1; " \
  388. _POST_EFLAGS("0","3","2") \
  389. : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
  390. : "i" (EFLAGS_MASK) ); \
  391. } while (0)
  392. #elif defined(__i386__)
  393. #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
  394. #define __emulate_1op_8byte(_op, _dst, _eflags)
  395. #endif /* __i386__ */
  396. /* Fetch next part of the instruction being emulated. */
  397. #define insn_fetch(_type, _size, _eip) \
  398. ({ unsigned long _x; \
  399. rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
  400. (_size), ctxt); \
  401. if ( rc != 0 ) \
  402. goto done; \
  403. (_eip) += (_size); \
  404. (_type)_x; \
  405. })
  406. /* Access/update address held in a register, based on addressing mode. */
  407. #define register_address(base, reg) \
  408. ((base) + ((ad_bytes == sizeof(unsigned long)) ? (reg) : \
  409. ((reg) & ((1UL << (ad_bytes << 3)) - 1))))
  410. #define register_address_increment(reg, inc) \
  411. do { \
  412. /* signed type ensures sign extension to long */ \
  413. int _inc = (inc); \
  414. if ( ad_bytes == sizeof(unsigned long) ) \
  415. (reg) += _inc; \
  416. else \
  417. (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
  418. (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
  419. } while (0)
  420. void *decode_register(u8 modrm_reg, unsigned long *regs,
  421. int highbyte_regs)
  422. {
  423. void *p;
  424. p = &regs[modrm_reg];
  425. if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
  426. p = (unsigned char *)&regs[modrm_reg & 3] + 1;
  427. return p;
  428. }
  429. static int read_descriptor(struct x86_emulate_ctxt *ctxt,
  430. struct x86_emulate_ops *ops,
  431. void *ptr,
  432. u16 *size, unsigned long *address, int op_bytes)
  433. {
  434. int rc;
  435. if (op_bytes == 2)
  436. op_bytes = 3;
  437. *address = 0;
  438. rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2, ctxt);
  439. if (rc)
  440. return rc;
  441. rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes, ctxt);
  442. return rc;
  443. }
  444. int
  445. x86_emulate_memop(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
  446. {
  447. unsigned d;
  448. u8 b, sib, twobyte = 0, rex_prefix = 0;
  449. u8 modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
  450. unsigned long *override_base = NULL;
  451. unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
  452. int rc = 0;
  453. struct operand src, dst;
  454. unsigned long cr2 = ctxt->cr2;
  455. int mode = ctxt->mode;
  456. unsigned long modrm_ea;
  457. int use_modrm_ea, index_reg = 0, base_reg = 0, scale, rip_relative = 0;
  458. int no_wb = 0;
  459. u64 msr_data;
  460. /* Shadow copy of register state. Committed on successful emulation. */
  461. unsigned long _regs[NR_VCPU_REGS];
  462. unsigned long _eip = ctxt->vcpu->rip, _eflags = ctxt->eflags;
  463. unsigned long modrm_val = 0;
  464. memcpy(_regs, ctxt->vcpu->regs, sizeof _regs);
  465. switch (mode) {
  466. case X86EMUL_MODE_REAL:
  467. case X86EMUL_MODE_PROT16:
  468. op_bytes = ad_bytes = 2;
  469. break;
  470. case X86EMUL_MODE_PROT32:
  471. op_bytes = ad_bytes = 4;
  472. break;
  473. #ifdef CONFIG_X86_64
  474. case X86EMUL_MODE_PROT64:
  475. op_bytes = 4;
  476. ad_bytes = 8;
  477. break;
  478. #endif
  479. default:
  480. return -1;
  481. }
  482. /* Legacy prefixes. */
  483. for (i = 0; i < 8; i++) {
  484. switch (b = insn_fetch(u8, 1, _eip)) {
  485. case 0x66: /* operand-size override */
  486. op_bytes ^= 6; /* switch between 2/4 bytes */
  487. break;
  488. case 0x67: /* address-size override */
  489. if (mode == X86EMUL_MODE_PROT64)
  490. ad_bytes ^= 12; /* switch between 4/8 bytes */
  491. else
  492. ad_bytes ^= 6; /* switch between 2/4 bytes */
  493. break;
  494. case 0x2e: /* CS override */
  495. override_base = &ctxt->cs_base;
  496. break;
  497. case 0x3e: /* DS override */
  498. override_base = &ctxt->ds_base;
  499. break;
  500. case 0x26: /* ES override */
  501. override_base = &ctxt->es_base;
  502. break;
  503. case 0x64: /* FS override */
  504. override_base = &ctxt->fs_base;
  505. break;
  506. case 0x65: /* GS override */
  507. override_base = &ctxt->gs_base;
  508. break;
  509. case 0x36: /* SS override */
  510. override_base = &ctxt->ss_base;
  511. break;
  512. case 0xf0: /* LOCK */
  513. lock_prefix = 1;
  514. break;
  515. case 0xf3: /* REP/REPE/REPZ */
  516. rep_prefix = 1;
  517. break;
  518. case 0xf2: /* REPNE/REPNZ */
  519. break;
  520. default:
  521. goto done_prefixes;
  522. }
  523. }
  524. done_prefixes:
  525. /* REX prefix. */
  526. if ((mode == X86EMUL_MODE_PROT64) && ((b & 0xf0) == 0x40)) {
  527. rex_prefix = b;
  528. if (b & 8)
  529. op_bytes = 8; /* REX.W */
  530. modrm_reg = (b & 4) << 1; /* REX.R */
  531. index_reg = (b & 2) << 2; /* REX.X */
  532. modrm_rm = base_reg = (b & 1) << 3; /* REG.B */
  533. b = insn_fetch(u8, 1, _eip);
  534. }
  535. /* Opcode byte(s). */
  536. d = opcode_table[b];
  537. if (d == 0) {
  538. /* Two-byte opcode? */
  539. if (b == 0x0f) {
  540. twobyte = 1;
  541. b = insn_fetch(u8, 1, _eip);
  542. d = twobyte_table[b];
  543. }
  544. /* Unrecognised? */
  545. if (d == 0)
  546. goto cannot_emulate;
  547. }
  548. /* ModRM and SIB bytes. */
  549. if (d & ModRM) {
  550. modrm = insn_fetch(u8, 1, _eip);
  551. modrm_mod |= (modrm & 0xc0) >> 6;
  552. modrm_reg |= (modrm & 0x38) >> 3;
  553. modrm_rm |= (modrm & 0x07);
  554. modrm_ea = 0;
  555. use_modrm_ea = 1;
  556. if (modrm_mod == 3) {
  557. modrm_val = *(unsigned long *)
  558. decode_register(modrm_rm, _regs, d & ByteOp);
  559. goto modrm_done;
  560. }
  561. if (ad_bytes == 2) {
  562. unsigned bx = _regs[VCPU_REGS_RBX];
  563. unsigned bp = _regs[VCPU_REGS_RBP];
  564. unsigned si = _regs[VCPU_REGS_RSI];
  565. unsigned di = _regs[VCPU_REGS_RDI];
  566. /* 16-bit ModR/M decode. */
  567. switch (modrm_mod) {
  568. case 0:
  569. if (modrm_rm == 6)
  570. modrm_ea += insn_fetch(u16, 2, _eip);
  571. break;
  572. case 1:
  573. modrm_ea += insn_fetch(s8, 1, _eip);
  574. break;
  575. case 2:
  576. modrm_ea += insn_fetch(u16, 2, _eip);
  577. break;
  578. }
  579. switch (modrm_rm) {
  580. case 0:
  581. modrm_ea += bx + si;
  582. break;
  583. case 1:
  584. modrm_ea += bx + di;
  585. break;
  586. case 2:
  587. modrm_ea += bp + si;
  588. break;
  589. case 3:
  590. modrm_ea += bp + di;
  591. break;
  592. case 4:
  593. modrm_ea += si;
  594. break;
  595. case 5:
  596. modrm_ea += di;
  597. break;
  598. case 6:
  599. if (modrm_mod != 0)
  600. modrm_ea += bp;
  601. break;
  602. case 7:
  603. modrm_ea += bx;
  604. break;
  605. }
  606. if (modrm_rm == 2 || modrm_rm == 3 ||
  607. (modrm_rm == 6 && modrm_mod != 0))
  608. if (!override_base)
  609. override_base = &ctxt->ss_base;
  610. modrm_ea = (u16)modrm_ea;
  611. } else {
  612. /* 32/64-bit ModR/M decode. */
  613. switch (modrm_rm) {
  614. case 4:
  615. case 12:
  616. sib = insn_fetch(u8, 1, _eip);
  617. index_reg |= (sib >> 3) & 7;
  618. base_reg |= sib & 7;
  619. scale = sib >> 6;
  620. switch (base_reg) {
  621. case 5:
  622. if (modrm_mod != 0)
  623. modrm_ea += _regs[base_reg];
  624. else
  625. modrm_ea += insn_fetch(s32, 4, _eip);
  626. break;
  627. default:
  628. modrm_ea += _regs[base_reg];
  629. }
  630. switch (index_reg) {
  631. case 4:
  632. break;
  633. default:
  634. modrm_ea += _regs[index_reg] << scale;
  635. }
  636. break;
  637. case 5:
  638. if (modrm_mod != 0)
  639. modrm_ea += _regs[modrm_rm];
  640. else if (mode == X86EMUL_MODE_PROT64)
  641. rip_relative = 1;
  642. break;
  643. default:
  644. modrm_ea += _regs[modrm_rm];
  645. break;
  646. }
  647. switch (modrm_mod) {
  648. case 0:
  649. if (modrm_rm == 5)
  650. modrm_ea += insn_fetch(s32, 4, _eip);
  651. break;
  652. case 1:
  653. modrm_ea += insn_fetch(s8, 1, _eip);
  654. break;
  655. case 2:
  656. modrm_ea += insn_fetch(s32, 4, _eip);
  657. break;
  658. }
  659. }
  660. if (!override_base)
  661. override_base = &ctxt->ds_base;
  662. if (mode == X86EMUL_MODE_PROT64 &&
  663. override_base != &ctxt->fs_base &&
  664. override_base != &ctxt->gs_base)
  665. override_base = NULL;
  666. if (override_base)
  667. modrm_ea += *override_base;
  668. if (rip_relative) {
  669. modrm_ea += _eip;
  670. switch (d & SrcMask) {
  671. case SrcImmByte:
  672. modrm_ea += 1;
  673. break;
  674. case SrcImm:
  675. if (d & ByteOp)
  676. modrm_ea += 1;
  677. else
  678. if (op_bytes == 8)
  679. modrm_ea += 4;
  680. else
  681. modrm_ea += op_bytes;
  682. }
  683. }
  684. if (ad_bytes != 8)
  685. modrm_ea = (u32)modrm_ea;
  686. cr2 = modrm_ea;
  687. modrm_done:
  688. ;
  689. }
  690. /*
  691. * Decode and fetch the source operand: register, memory
  692. * or immediate.
  693. */
  694. switch (d & SrcMask) {
  695. case SrcNone:
  696. break;
  697. case SrcReg:
  698. src.type = OP_REG;
  699. if (d & ByteOp) {
  700. src.ptr = decode_register(modrm_reg, _regs,
  701. (rex_prefix == 0));
  702. src.val = src.orig_val = *(u8 *) src.ptr;
  703. src.bytes = 1;
  704. } else {
  705. src.ptr = decode_register(modrm_reg, _regs, 0);
  706. switch ((src.bytes = op_bytes)) {
  707. case 2:
  708. src.val = src.orig_val = *(u16 *) src.ptr;
  709. break;
  710. case 4:
  711. src.val = src.orig_val = *(u32 *) src.ptr;
  712. break;
  713. case 8:
  714. src.val = src.orig_val = *(u64 *) src.ptr;
  715. break;
  716. }
  717. }
  718. break;
  719. case SrcMem16:
  720. src.bytes = 2;
  721. goto srcmem_common;
  722. case SrcMem32:
  723. src.bytes = 4;
  724. goto srcmem_common;
  725. case SrcMem:
  726. src.bytes = (d & ByteOp) ? 1 : op_bytes;
  727. srcmem_common:
  728. src.type = OP_MEM;
  729. src.ptr = (unsigned long *)cr2;
  730. if ((rc = ops->read_emulated((unsigned long)src.ptr,
  731. &src.val, src.bytes, ctxt)) != 0)
  732. goto done;
  733. src.orig_val = src.val;
  734. break;
  735. case SrcImm:
  736. src.type = OP_IMM;
  737. src.ptr = (unsigned long *)_eip;
  738. src.bytes = (d & ByteOp) ? 1 : op_bytes;
  739. if (src.bytes == 8)
  740. src.bytes = 4;
  741. /* NB. Immediates are sign-extended as necessary. */
  742. switch (src.bytes) {
  743. case 1:
  744. src.val = insn_fetch(s8, 1, _eip);
  745. break;
  746. case 2:
  747. src.val = insn_fetch(s16, 2, _eip);
  748. break;
  749. case 4:
  750. src.val = insn_fetch(s32, 4, _eip);
  751. break;
  752. }
  753. break;
  754. case SrcImmByte:
  755. src.type = OP_IMM;
  756. src.ptr = (unsigned long *)_eip;
  757. src.bytes = 1;
  758. src.val = insn_fetch(s8, 1, _eip);
  759. break;
  760. }
  761. /* Decode and fetch the destination operand: register or memory. */
  762. switch (d & DstMask) {
  763. case ImplicitOps:
  764. /* Special instructions do their own operand decoding. */
  765. goto special_insn;
  766. case DstReg:
  767. dst.type = OP_REG;
  768. if ((d & ByteOp)
  769. && !(twobyte_table && (b == 0xb6 || b == 0xb7))) {
  770. dst.ptr = decode_register(modrm_reg, _regs,
  771. (rex_prefix == 0));
  772. dst.val = *(u8 *) dst.ptr;
  773. dst.bytes = 1;
  774. } else {
  775. dst.ptr = decode_register(modrm_reg, _regs, 0);
  776. switch ((dst.bytes = op_bytes)) {
  777. case 2:
  778. dst.val = *(u16 *)dst.ptr;
  779. break;
  780. case 4:
  781. dst.val = *(u32 *)dst.ptr;
  782. break;
  783. case 8:
  784. dst.val = *(u64 *)dst.ptr;
  785. break;
  786. }
  787. }
  788. break;
  789. case DstMem:
  790. dst.type = OP_MEM;
  791. dst.ptr = (unsigned long *)cr2;
  792. dst.bytes = (d & ByteOp) ? 1 : op_bytes;
  793. if (d & BitOp) {
  794. unsigned long mask = ~(dst.bytes * 8 - 1);
  795. dst.ptr = (void *)dst.ptr + (src.val & mask) / 8;
  796. }
  797. if (!(d & Mov) && /* optimisation - avoid slow emulated read */
  798. ((rc = ops->read_emulated((unsigned long)dst.ptr,
  799. &dst.val, dst.bytes, ctxt)) != 0))
  800. goto done;
  801. break;
  802. }
  803. dst.orig_val = dst.val;
  804. if (twobyte)
  805. goto twobyte_insn;
  806. switch (b) {
  807. case 0x00 ... 0x05:
  808. add: /* add */
  809. emulate_2op_SrcV("add", src, dst, _eflags);
  810. break;
  811. case 0x08 ... 0x0d:
  812. or: /* or */
  813. emulate_2op_SrcV("or", src, dst, _eflags);
  814. break;
  815. case 0x10 ... 0x15:
  816. adc: /* adc */
  817. emulate_2op_SrcV("adc", src, dst, _eflags);
  818. break;
  819. case 0x18 ... 0x1d:
  820. sbb: /* sbb */
  821. emulate_2op_SrcV("sbb", src, dst, _eflags);
  822. break;
  823. case 0x20 ... 0x25:
  824. and: /* and */
  825. emulate_2op_SrcV("and", src, dst, _eflags);
  826. break;
  827. case 0x28 ... 0x2d:
  828. sub: /* sub */
  829. emulate_2op_SrcV("sub", src, dst, _eflags);
  830. break;
  831. case 0x30 ... 0x35:
  832. xor: /* xor */
  833. emulate_2op_SrcV("xor", src, dst, _eflags);
  834. break;
  835. case 0x38 ... 0x3d:
  836. cmp: /* cmp */
  837. emulate_2op_SrcV("cmp", src, dst, _eflags);
  838. break;
  839. case 0x63: /* movsxd */
  840. if (mode != X86EMUL_MODE_PROT64)
  841. goto cannot_emulate;
  842. dst.val = (s32) src.val;
  843. break;
  844. case 0x80 ... 0x83: /* Grp1 */
  845. switch (modrm_reg) {
  846. case 0:
  847. goto add;
  848. case 1:
  849. goto or;
  850. case 2:
  851. goto adc;
  852. case 3:
  853. goto sbb;
  854. case 4:
  855. goto and;
  856. case 5:
  857. goto sub;
  858. case 6:
  859. goto xor;
  860. case 7:
  861. goto cmp;
  862. }
  863. break;
  864. case 0x84 ... 0x85:
  865. test: /* test */
  866. emulate_2op_SrcV("test", src, dst, _eflags);
  867. break;
  868. case 0x86 ... 0x87: /* xchg */
  869. /* Write back the register source. */
  870. switch (dst.bytes) {
  871. case 1:
  872. *(u8 *) src.ptr = (u8) dst.val;
  873. break;
  874. case 2:
  875. *(u16 *) src.ptr = (u16) dst.val;
  876. break;
  877. case 4:
  878. *src.ptr = (u32) dst.val;
  879. break; /* 64b reg: zero-extend */
  880. case 8:
  881. *src.ptr = dst.val;
  882. break;
  883. }
  884. /*
  885. * Write back the memory destination with implicit LOCK
  886. * prefix.
  887. */
  888. dst.val = src.val;
  889. lock_prefix = 1;
  890. break;
  891. case 0xa0 ... 0xa1: /* mov */
  892. dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
  893. dst.val = src.val;
  894. _eip += ad_bytes; /* skip src displacement */
  895. break;
  896. case 0xa2 ... 0xa3: /* mov */
  897. dst.val = (unsigned long)_regs[VCPU_REGS_RAX];
  898. _eip += ad_bytes; /* skip dst displacement */
  899. break;
  900. case 0x88 ... 0x8b: /* mov */
  901. case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
  902. dst.val = src.val;
  903. break;
  904. case 0x8f: /* pop (sole member of Grp1a) */
  905. /* 64-bit mode: POP always pops a 64-bit operand. */
  906. if (mode == X86EMUL_MODE_PROT64)
  907. dst.bytes = 8;
  908. if ((rc = ops->read_std(register_address(ctxt->ss_base,
  909. _regs[VCPU_REGS_RSP]),
  910. &dst.val, dst.bytes, ctxt)) != 0)
  911. goto done;
  912. register_address_increment(_regs[VCPU_REGS_RSP], dst.bytes);
  913. break;
  914. case 0xc0 ... 0xc1:
  915. grp2: /* Grp2 */
  916. switch (modrm_reg) {
  917. case 0: /* rol */
  918. emulate_2op_SrcB("rol", src, dst, _eflags);
  919. break;
  920. case 1: /* ror */
  921. emulate_2op_SrcB("ror", src, dst, _eflags);
  922. break;
  923. case 2: /* rcl */
  924. emulate_2op_SrcB("rcl", src, dst, _eflags);
  925. break;
  926. case 3: /* rcr */
  927. emulate_2op_SrcB("rcr", src, dst, _eflags);
  928. break;
  929. case 4: /* sal/shl */
  930. case 6: /* sal/shl */
  931. emulate_2op_SrcB("sal", src, dst, _eflags);
  932. break;
  933. case 5: /* shr */
  934. emulate_2op_SrcB("shr", src, dst, _eflags);
  935. break;
  936. case 7: /* sar */
  937. emulate_2op_SrcB("sar", src, dst, _eflags);
  938. break;
  939. }
  940. break;
  941. case 0xd0 ... 0xd1: /* Grp2 */
  942. src.val = 1;
  943. goto grp2;
  944. case 0xd2 ... 0xd3: /* Grp2 */
  945. src.val = _regs[VCPU_REGS_RCX];
  946. goto grp2;
  947. case 0xf6 ... 0xf7: /* Grp3 */
  948. switch (modrm_reg) {
  949. case 0 ... 1: /* test */
  950. /*
  951. * Special case in Grp3: test has an immediate
  952. * source operand.
  953. */
  954. src.type = OP_IMM;
  955. src.ptr = (unsigned long *)_eip;
  956. src.bytes = (d & ByteOp) ? 1 : op_bytes;
  957. if (src.bytes == 8)
  958. src.bytes = 4;
  959. switch (src.bytes) {
  960. case 1:
  961. src.val = insn_fetch(s8, 1, _eip);
  962. break;
  963. case 2:
  964. src.val = insn_fetch(s16, 2, _eip);
  965. break;
  966. case 4:
  967. src.val = insn_fetch(s32, 4, _eip);
  968. break;
  969. }
  970. goto test;
  971. case 2: /* not */
  972. dst.val = ~dst.val;
  973. break;
  974. case 3: /* neg */
  975. emulate_1op("neg", dst, _eflags);
  976. break;
  977. default:
  978. goto cannot_emulate;
  979. }
  980. break;
  981. case 0xfe ... 0xff: /* Grp4/Grp5 */
  982. switch (modrm_reg) {
  983. case 0: /* inc */
  984. emulate_1op("inc", dst, _eflags);
  985. break;
  986. case 1: /* dec */
  987. emulate_1op("dec", dst, _eflags);
  988. break;
  989. case 6: /* push */
  990. /* 64-bit mode: PUSH always pushes a 64-bit operand. */
  991. if (mode == X86EMUL_MODE_PROT64) {
  992. dst.bytes = 8;
  993. if ((rc = ops->read_std((unsigned long)dst.ptr,
  994. &dst.val, 8,
  995. ctxt)) != 0)
  996. goto done;
  997. }
  998. register_address_increment(_regs[VCPU_REGS_RSP],
  999. -dst.bytes);
  1000. if ((rc = ops->write_std(
  1001. register_address(ctxt->ss_base,
  1002. _regs[VCPU_REGS_RSP]),
  1003. &dst.val, dst.bytes, ctxt)) != 0)
  1004. goto done;
  1005. no_wb = 1;
  1006. break;
  1007. default:
  1008. goto cannot_emulate;
  1009. }
  1010. break;
  1011. }
  1012. writeback:
  1013. if (!no_wb) {
  1014. switch (dst.type) {
  1015. case OP_REG:
  1016. /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
  1017. switch (dst.bytes) {
  1018. case 1:
  1019. *(u8 *)dst.ptr = (u8)dst.val;
  1020. break;
  1021. case 2:
  1022. *(u16 *)dst.ptr = (u16)dst.val;
  1023. break;
  1024. case 4:
  1025. *dst.ptr = (u32)dst.val;
  1026. break; /* 64b: zero-ext */
  1027. case 8:
  1028. *dst.ptr = dst.val;
  1029. break;
  1030. }
  1031. break;
  1032. case OP_MEM:
  1033. if (lock_prefix)
  1034. rc = ops->cmpxchg_emulated((unsigned long)dst.
  1035. ptr, &dst.orig_val,
  1036. &dst.val, dst.bytes,
  1037. ctxt);
  1038. else
  1039. rc = ops->write_emulated((unsigned long)dst.ptr,
  1040. &dst.val, dst.bytes,
  1041. ctxt);
  1042. if (rc != 0)
  1043. goto done;
  1044. default:
  1045. break;
  1046. }
  1047. }
  1048. /* Commit shadow register state. */
  1049. memcpy(ctxt->vcpu->regs, _regs, sizeof _regs);
  1050. ctxt->eflags = _eflags;
  1051. ctxt->vcpu->rip = _eip;
  1052. done:
  1053. return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
  1054. special_insn:
  1055. if (twobyte)
  1056. goto twobyte_special_insn;
  1057. if (rep_prefix) {
  1058. if (_regs[VCPU_REGS_RCX] == 0) {
  1059. ctxt->vcpu->rip = _eip;
  1060. goto done;
  1061. }
  1062. _regs[VCPU_REGS_RCX]--;
  1063. _eip = ctxt->vcpu->rip;
  1064. }
  1065. switch (b) {
  1066. case 0xa4 ... 0xa5: /* movs */
  1067. dst.type = OP_MEM;
  1068. dst.bytes = (d & ByteOp) ? 1 : op_bytes;
  1069. dst.ptr = (unsigned long *)register_address(ctxt->es_base,
  1070. _regs[VCPU_REGS_RDI]);
  1071. if ((rc = ops->read_emulated(register_address(
  1072. override_base ? *override_base : ctxt->ds_base,
  1073. _regs[VCPU_REGS_RSI]), &dst.val, dst.bytes, ctxt)) != 0)
  1074. goto done;
  1075. register_address_increment(_regs[VCPU_REGS_RSI],
  1076. (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
  1077. register_address_increment(_regs[VCPU_REGS_RDI],
  1078. (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
  1079. break;
  1080. case 0xa6 ... 0xa7: /* cmps */
  1081. DPRINTF("Urk! I don't handle CMPS.\n");
  1082. goto cannot_emulate;
  1083. case 0xaa ... 0xab: /* stos */
  1084. dst.type = OP_MEM;
  1085. dst.bytes = (d & ByteOp) ? 1 : op_bytes;
  1086. dst.ptr = (unsigned long *)cr2;
  1087. dst.val = _regs[VCPU_REGS_RAX];
  1088. register_address_increment(_regs[VCPU_REGS_RDI],
  1089. (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
  1090. break;
  1091. case 0xac ... 0xad: /* lods */
  1092. dst.type = OP_REG;
  1093. dst.bytes = (d & ByteOp) ? 1 : op_bytes;
  1094. dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
  1095. if ((rc = ops->read_emulated(cr2, &dst.val, dst.bytes, ctxt)) != 0)
  1096. goto done;
  1097. register_address_increment(_regs[VCPU_REGS_RSI],
  1098. (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
  1099. break;
  1100. case 0xae ... 0xaf: /* scas */
  1101. DPRINTF("Urk! I don't handle SCAS.\n");
  1102. goto cannot_emulate;
  1103. case 0xf4: /* hlt */
  1104. ctxt->vcpu->halt_request = 1;
  1105. goto done;
  1106. case 0xc3: /* ret */
  1107. dst.ptr = &_eip;
  1108. goto pop_instruction;
  1109. case 0x58 ... 0x5f: /* pop reg */
  1110. dst.ptr = (unsigned long *)&_regs[b & 0x7];
  1111. pop_instruction:
  1112. if ((rc = ops->read_std(register_address(ctxt->ss_base,
  1113. _regs[VCPU_REGS_RSP]), dst.ptr, op_bytes, ctxt)) != 0)
  1114. goto done;
  1115. register_address_increment(_regs[VCPU_REGS_RSP], op_bytes);
  1116. no_wb = 1; /* Disable writeback. */
  1117. break;
  1118. }
  1119. goto writeback;
  1120. twobyte_insn:
  1121. switch (b) {
  1122. case 0x01: /* lgdt, lidt, lmsw */
  1123. /* Disable writeback. */
  1124. no_wb = 1;
  1125. switch (modrm_reg) {
  1126. u16 size;
  1127. unsigned long address;
  1128. case 2: /* lgdt */
  1129. rc = read_descriptor(ctxt, ops, src.ptr,
  1130. &size, &address, op_bytes);
  1131. if (rc)
  1132. goto done;
  1133. realmode_lgdt(ctxt->vcpu, size, address);
  1134. break;
  1135. case 3: /* lidt */
  1136. rc = read_descriptor(ctxt, ops, src.ptr,
  1137. &size, &address, op_bytes);
  1138. if (rc)
  1139. goto done;
  1140. realmode_lidt(ctxt->vcpu, size, address);
  1141. break;
  1142. case 4: /* smsw */
  1143. if (modrm_mod != 3)
  1144. goto cannot_emulate;
  1145. *(u16 *)&_regs[modrm_rm]
  1146. = realmode_get_cr(ctxt->vcpu, 0);
  1147. break;
  1148. case 6: /* lmsw */
  1149. if (modrm_mod != 3)
  1150. goto cannot_emulate;
  1151. realmode_lmsw(ctxt->vcpu, (u16)modrm_val, &_eflags);
  1152. break;
  1153. case 7: /* invlpg*/
  1154. emulate_invlpg(ctxt->vcpu, cr2);
  1155. break;
  1156. default:
  1157. goto cannot_emulate;
  1158. }
  1159. break;
  1160. case 0x21: /* mov from dr to reg */
  1161. no_wb = 1;
  1162. if (modrm_mod != 3)
  1163. goto cannot_emulate;
  1164. rc = emulator_get_dr(ctxt, modrm_reg, &_regs[modrm_rm]);
  1165. break;
  1166. case 0x23: /* mov from reg to dr */
  1167. no_wb = 1;
  1168. if (modrm_mod != 3)
  1169. goto cannot_emulate;
  1170. rc = emulator_set_dr(ctxt, modrm_reg, _regs[modrm_rm]);
  1171. break;
  1172. case 0x40 ... 0x4f: /* cmov */
  1173. dst.val = dst.orig_val = src.val;
  1174. d &= ~Mov; /* default to no move */
  1175. /*
  1176. * First, assume we're decoding an even cmov opcode
  1177. * (lsb == 0).
  1178. */
  1179. switch ((b & 15) >> 1) {
  1180. case 0: /* cmovo */
  1181. d |= (_eflags & EFLG_OF) ? Mov : 0;
  1182. break;
  1183. case 1: /* cmovb/cmovc/cmovnae */
  1184. d |= (_eflags & EFLG_CF) ? Mov : 0;
  1185. break;
  1186. case 2: /* cmovz/cmove */
  1187. d |= (_eflags & EFLG_ZF) ? Mov : 0;
  1188. break;
  1189. case 3: /* cmovbe/cmovna */
  1190. d |= (_eflags & (EFLG_CF | EFLG_ZF)) ? Mov : 0;
  1191. break;
  1192. case 4: /* cmovs */
  1193. d |= (_eflags & EFLG_SF) ? Mov : 0;
  1194. break;
  1195. case 5: /* cmovp/cmovpe */
  1196. d |= (_eflags & EFLG_PF) ? Mov : 0;
  1197. break;
  1198. case 7: /* cmovle/cmovng */
  1199. d |= (_eflags & EFLG_ZF) ? Mov : 0;
  1200. /* fall through */
  1201. case 6: /* cmovl/cmovnge */
  1202. d |= (!(_eflags & EFLG_SF) !=
  1203. !(_eflags & EFLG_OF)) ? Mov : 0;
  1204. break;
  1205. }
  1206. /* Odd cmov opcodes (lsb == 1) have inverted sense. */
  1207. d ^= (b & 1) ? Mov : 0;
  1208. break;
  1209. case 0xb0 ... 0xb1: /* cmpxchg */
  1210. /*
  1211. * Save real source value, then compare EAX against
  1212. * destination.
  1213. */
  1214. src.orig_val = src.val;
  1215. src.val = _regs[VCPU_REGS_RAX];
  1216. emulate_2op_SrcV("cmp", src, dst, _eflags);
  1217. /* Always write back. The question is: where to? */
  1218. d |= Mov;
  1219. if (_eflags & EFLG_ZF) {
  1220. /* Success: write back to memory. */
  1221. dst.val = src.orig_val;
  1222. } else {
  1223. /* Failure: write the value we saw to EAX. */
  1224. dst.type = OP_REG;
  1225. dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
  1226. }
  1227. break;
  1228. case 0xa3:
  1229. bt: /* bt */
  1230. src.val &= (dst.bytes << 3) - 1; /* only subword offset */
  1231. emulate_2op_SrcV_nobyte("bt", src, dst, _eflags);
  1232. break;
  1233. case 0xb3:
  1234. btr: /* btr */
  1235. src.val &= (dst.bytes << 3) - 1; /* only subword offset */
  1236. emulate_2op_SrcV_nobyte("btr", src, dst, _eflags);
  1237. break;
  1238. case 0xab:
  1239. bts: /* bts */
  1240. src.val &= (dst.bytes << 3) - 1; /* only subword offset */
  1241. emulate_2op_SrcV_nobyte("bts", src, dst, _eflags);
  1242. break;
  1243. case 0xb6 ... 0xb7: /* movzx */
  1244. dst.bytes = op_bytes;
  1245. dst.val = (d & ByteOp) ? (u8) src.val : (u16) src.val;
  1246. break;
  1247. case 0xbb:
  1248. btc: /* btc */
  1249. src.val &= (dst.bytes << 3) - 1; /* only subword offset */
  1250. emulate_2op_SrcV_nobyte("btc", src, dst, _eflags);
  1251. break;
  1252. case 0xba: /* Grp8 */
  1253. switch (modrm_reg & 3) {
  1254. case 0:
  1255. goto bt;
  1256. case 1:
  1257. goto bts;
  1258. case 2:
  1259. goto btr;
  1260. case 3:
  1261. goto btc;
  1262. }
  1263. break;
  1264. case 0xbe ... 0xbf: /* movsx */
  1265. dst.bytes = op_bytes;
  1266. dst.val = (d & ByteOp) ? (s8) src.val : (s16) src.val;
  1267. break;
  1268. }
  1269. goto writeback;
  1270. twobyte_special_insn:
  1271. /* Disable writeback. */
  1272. no_wb = 1;
  1273. switch (b) {
  1274. case 0x09: /* wbinvd */
  1275. break;
  1276. case 0x0d: /* GrpP (prefetch) */
  1277. case 0x18: /* Grp16 (prefetch/nop) */
  1278. break;
  1279. case 0x06:
  1280. emulate_clts(ctxt->vcpu);
  1281. break;
  1282. case 0x20: /* mov cr, reg */
  1283. if (modrm_mod != 3)
  1284. goto cannot_emulate;
  1285. _regs[modrm_rm] = realmode_get_cr(ctxt->vcpu, modrm_reg);
  1286. break;
  1287. case 0x22: /* mov reg, cr */
  1288. if (modrm_mod != 3)
  1289. goto cannot_emulate;
  1290. realmode_set_cr(ctxt->vcpu, modrm_reg, modrm_val, &_eflags);
  1291. break;
  1292. case 0x30:
  1293. /* wrmsr */
  1294. msr_data = (u32)_regs[VCPU_REGS_RAX]
  1295. | ((u64)_regs[VCPU_REGS_RDX] << 32);
  1296. rc = kvm_set_msr(ctxt->vcpu, _regs[VCPU_REGS_RCX], msr_data);
  1297. if (rc) {
  1298. kvm_arch_ops->inject_gp(ctxt->vcpu, 0);
  1299. _eip = ctxt->vcpu->rip;
  1300. }
  1301. rc = X86EMUL_CONTINUE;
  1302. break;
  1303. case 0x32:
  1304. /* rdmsr */
  1305. rc = kvm_get_msr(ctxt->vcpu, _regs[VCPU_REGS_RCX], &msr_data);
  1306. if (rc) {
  1307. kvm_arch_ops->inject_gp(ctxt->vcpu, 0);
  1308. _eip = ctxt->vcpu->rip;
  1309. } else {
  1310. _regs[VCPU_REGS_RAX] = (u32)msr_data;
  1311. _regs[VCPU_REGS_RDX] = msr_data >> 32;
  1312. }
  1313. rc = X86EMUL_CONTINUE;
  1314. break;
  1315. case 0xc7: /* Grp9 (cmpxchg8b) */
  1316. {
  1317. u64 old, new;
  1318. if ((rc = ops->read_emulated(cr2, &old, 8, ctxt)) != 0)
  1319. goto done;
  1320. if (((u32) (old >> 0) != (u32) _regs[VCPU_REGS_RAX]) ||
  1321. ((u32) (old >> 32) != (u32) _regs[VCPU_REGS_RDX])) {
  1322. _regs[VCPU_REGS_RAX] = (u32) (old >> 0);
  1323. _regs[VCPU_REGS_RDX] = (u32) (old >> 32);
  1324. _eflags &= ~EFLG_ZF;
  1325. } else {
  1326. new = ((u64)_regs[VCPU_REGS_RCX] << 32)
  1327. | (u32) _regs[VCPU_REGS_RBX];
  1328. if ((rc = ops->cmpxchg_emulated(cr2, &old,
  1329. &new, 8, ctxt)) != 0)
  1330. goto done;
  1331. _eflags |= EFLG_ZF;
  1332. }
  1333. break;
  1334. }
  1335. }
  1336. goto writeback;
  1337. cannot_emulate:
  1338. DPRINTF("Cannot emulate %02x\n", b);
  1339. return -1;
  1340. }
  1341. #ifdef __XEN__
  1342. #include <asm/mm.h>
  1343. #include <asm/uaccess.h>
  1344. int
  1345. x86_emulate_read_std(unsigned long addr,
  1346. unsigned long *val,
  1347. unsigned int bytes, struct x86_emulate_ctxt *ctxt)
  1348. {
  1349. unsigned int rc;
  1350. *val = 0;
  1351. if ((rc = copy_from_user((void *)val, (void *)addr, bytes)) != 0) {
  1352. propagate_page_fault(addr + bytes - rc, 0); /* read fault */
  1353. return X86EMUL_PROPAGATE_FAULT;
  1354. }
  1355. return X86EMUL_CONTINUE;
  1356. }
  1357. int
  1358. x86_emulate_write_std(unsigned long addr,
  1359. unsigned long val,
  1360. unsigned int bytes, struct x86_emulate_ctxt *ctxt)
  1361. {
  1362. unsigned int rc;
  1363. if ((rc = copy_to_user((void *)addr, (void *)&val, bytes)) != 0) {
  1364. propagate_page_fault(addr + bytes - rc, PGERR_write_access);
  1365. return X86EMUL_PROPAGATE_FAULT;
  1366. }
  1367. return X86EMUL_CONTINUE;
  1368. }
  1369. #endif