x86_emulate.c 54 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033
  1. /******************************************************************************
  2. * x86_emulate.c
  3. *
  4. * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
  5. *
  6. * Copyright (c) 2005 Keir Fraser
  7. *
  8. * Linux coding style, mod r/m decoder, segment base fixes, real-mode
  9. * privileged instructions:
  10. *
  11. * Copyright (C) 2006 Qumranet
  12. *
  13. * Avi Kivity <avi@qumranet.com>
  14. * Yaniv Kamay <yaniv@qumranet.com>
  15. *
  16. * This work is licensed under the terms of the GNU GPL, version 2. See
  17. * the COPYING file in the top-level directory.
  18. *
  19. * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
  20. */
  21. #ifndef __KERNEL__
  22. #include <stdio.h>
  23. #include <stdint.h>
  24. #include <public/xen.h>
  25. #define DPRINTF(_f, _a ...) printf(_f , ## _a)
  26. #else
  27. #include <linux/kvm_host.h>
  28. #define DPRINTF(x...) do {} while (0)
  29. #endif
  30. #include <linux/module.h>
  31. #include <asm/kvm_x86_emulate.h>
  32. /*
  33. * Opcode effective-address decode tables.
  34. * Note that we only emulate instructions that have at least one memory
  35. * operand (excluding implicit stack references). We assume that stack
  36. * references and instruction fetches will never occur in special memory
  37. * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
  38. * not be handled.
  39. */
  40. /* Operand sizes: 8-bit operands or specified/overridden size. */
  41. #define ByteOp (1<<0) /* 8-bit operands. */
  42. /* Destination operand type. */
  43. #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
  44. #define DstReg (2<<1) /* Register operand. */
  45. #define DstMem (3<<1) /* Memory operand. */
  46. #define DstMask (3<<1)
  47. /* Source operand type. */
  48. #define SrcNone (0<<3) /* No source operand. */
  49. #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
  50. #define SrcReg (1<<3) /* Register operand. */
  51. #define SrcMem (2<<3) /* Memory operand. */
  52. #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
  53. #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
  54. #define SrcImm (5<<3) /* Immediate operand. */
  55. #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
  56. #define SrcMask (7<<3)
  57. /* Generic ModRM decode. */
  58. #define ModRM (1<<6)
  59. /* Destination is only written; never read. */
  60. #define Mov (1<<7)
  61. #define BitOp (1<<8)
  62. #define MemAbs (1<<9) /* Memory operand is absolute displacement */
  63. #define String (1<<10) /* String instruction (rep capable) */
  64. #define Stack (1<<11) /* Stack instruction (push/pop) */
  65. #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
  66. #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
  67. #define GroupMask 0xff /* Group number stored in bits 0:7 */
  68. enum {
  69. Group1_80, Group1_81, Group1_82, Group1_83,
  70. Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
  71. };
  72. static u16 opcode_table[256] = {
  73. /* 0x00 - 0x07 */
  74. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  75. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  76. 0, 0, 0, 0,
  77. /* 0x08 - 0x0F */
  78. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  79. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  80. 0, 0, 0, 0,
  81. /* 0x10 - 0x17 */
  82. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  83. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  84. 0, 0, 0, 0,
  85. /* 0x18 - 0x1F */
  86. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  87. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  88. 0, 0, 0, 0,
  89. /* 0x20 - 0x27 */
  90. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  91. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  92. SrcImmByte, SrcImm, 0, 0,
  93. /* 0x28 - 0x2F */
  94. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  95. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  96. 0, 0, 0, 0,
  97. /* 0x30 - 0x37 */
  98. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  99. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  100. 0, 0, 0, 0,
  101. /* 0x38 - 0x3F */
  102. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  103. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  104. 0, 0, 0, 0,
  105. /* 0x40 - 0x47 */
  106. DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
  107. /* 0x48 - 0x4F */
  108. DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
  109. /* 0x50 - 0x57 */
  110. SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
  111. SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
  112. /* 0x58 - 0x5F */
  113. DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
  114. DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
  115. /* 0x60 - 0x67 */
  116. 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
  117. 0, 0, 0, 0,
  118. /* 0x68 - 0x6F */
  119. SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
  120. SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
  121. SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
  122. /* 0x70 - 0x77 */
  123. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  124. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  125. /* 0x78 - 0x7F */
  126. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  127. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  128. /* 0x80 - 0x87 */
  129. Group | Group1_80, Group | Group1_81,
  130. Group | Group1_82, Group | Group1_83,
  131. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  132. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  133. /* 0x88 - 0x8F */
  134. ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
  135. ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  136. DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
  137. DstReg | SrcMem | ModRM | Mov, Group | Group1A,
  138. /* 0x90 - 0x97 */
  139. DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
  140. /* 0x98 - 0x9F */
  141. 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
  142. /* 0xA0 - 0xA7 */
  143. ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
  144. ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
  145. ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
  146. ByteOp | ImplicitOps | String, ImplicitOps | String,
  147. /* 0xA8 - 0xAF */
  148. 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
  149. ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
  150. ByteOp | ImplicitOps | String, ImplicitOps | String,
  151. /* 0xB0 - 0xBF */
  152. 0, 0, 0, 0, 0, 0, 0, 0,
  153. DstReg | SrcImm | Mov, 0, 0, 0, 0, 0, 0, 0,
  154. /* 0xC0 - 0xC7 */
  155. ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
  156. 0, ImplicitOps | Stack, 0, 0,
  157. ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
  158. /* 0xC8 - 0xCF */
  159. 0, 0, 0, 0, 0, 0, 0, 0,
  160. /* 0xD0 - 0xD7 */
  161. ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
  162. ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
  163. 0, 0, 0, 0,
  164. /* 0xD8 - 0xDF */
  165. 0, 0, 0, 0, 0, 0, 0, 0,
  166. /* 0xE0 - 0xE7 */
  167. 0, 0, 0, 0, 0, 0, 0, 0,
  168. /* 0xE8 - 0xEF */
  169. ImplicitOps | Stack, SrcImm | ImplicitOps,
  170. ImplicitOps, SrcImmByte | ImplicitOps,
  171. 0, 0, 0, 0,
  172. /* 0xF0 - 0xF7 */
  173. 0, 0, 0, 0,
  174. ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
  175. /* 0xF8 - 0xFF */
  176. ImplicitOps, 0, ImplicitOps, ImplicitOps,
  177. 0, 0, Group | Group4, Group | Group5,
  178. };
  179. static u16 twobyte_table[256] = {
  180. /* 0x00 - 0x0F */
  181. 0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
  182. ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
  183. /* 0x10 - 0x1F */
  184. 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
  185. /* 0x20 - 0x2F */
  186. ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
  187. 0, 0, 0, 0, 0, 0, 0, 0,
  188. /* 0x30 - 0x3F */
  189. ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  190. /* 0x40 - 0x47 */
  191. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  192. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  193. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  194. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  195. /* 0x48 - 0x4F */
  196. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  197. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  198. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  199. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  200. /* 0x50 - 0x5F */
  201. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  202. /* 0x60 - 0x6F */
  203. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  204. /* 0x70 - 0x7F */
  205. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  206. /* 0x80 - 0x8F */
  207. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  208. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  209. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  210. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  211. /* 0x90 - 0x9F */
  212. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  213. /* 0xA0 - 0xA7 */
  214. 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
  215. /* 0xA8 - 0xAF */
  216. 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
  217. /* 0xB0 - 0xB7 */
  218. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
  219. DstMem | SrcReg | ModRM | BitOp,
  220. 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
  221. DstReg | SrcMem16 | ModRM | Mov,
  222. /* 0xB8 - 0xBF */
  223. 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
  224. 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
  225. DstReg | SrcMem16 | ModRM | Mov,
  226. /* 0xC0 - 0xCF */
  227. 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
  228. 0, 0, 0, 0, 0, 0, 0, 0,
  229. /* 0xD0 - 0xDF */
  230. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  231. /* 0xE0 - 0xEF */
  232. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  233. /* 0xF0 - 0xFF */
  234. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
  235. };
  236. static u16 group_table[] = {
  237. [Group1_80*8] =
  238. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  239. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  240. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  241. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  242. [Group1_81*8] =
  243. DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
  244. DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
  245. DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
  246. DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
  247. [Group1_82*8] =
  248. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  249. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  250. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  251. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  252. [Group1_83*8] =
  253. DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
  254. DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
  255. DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
  256. DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
  257. [Group1A*8] =
  258. DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
  259. [Group3_Byte*8] =
  260. ByteOp | SrcImm | DstMem | ModRM, 0,
  261. ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
  262. 0, 0, 0, 0,
  263. [Group3*8] =
  264. DstMem | SrcImm | ModRM | SrcImm, 0,
  265. DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
  266. 0, 0, 0, 0,
  267. [Group4*8] =
  268. ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
  269. 0, 0, 0, 0, 0, 0,
  270. [Group5*8] =
  271. DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM, 0, 0,
  272. SrcMem | ModRM, 0, SrcMem | ModRM | Stack, 0,
  273. [Group7*8] =
  274. 0, 0, ModRM | SrcMem, ModRM | SrcMem,
  275. SrcNone | ModRM | DstMem | Mov, 0,
  276. SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
  277. };
  278. static u16 group2_table[] = {
  279. [Group7*8] =
  280. SrcNone | ModRM, 0, 0, 0,
  281. SrcNone | ModRM | DstMem | Mov, 0,
  282. SrcMem16 | ModRM | Mov, 0,
  283. };
  284. /* EFLAGS bit definitions. */
  285. #define EFLG_OF (1<<11)
  286. #define EFLG_DF (1<<10)
  287. #define EFLG_SF (1<<7)
  288. #define EFLG_ZF (1<<6)
  289. #define EFLG_AF (1<<4)
  290. #define EFLG_PF (1<<2)
  291. #define EFLG_CF (1<<0)
  292. /*
  293. * Instruction emulation:
  294. * Most instructions are emulated directly via a fragment of inline assembly
  295. * code. This allows us to save/restore EFLAGS and thus very easily pick up
  296. * any modified flags.
  297. */
  298. #if defined(CONFIG_X86_64)
  299. #define _LO32 "k" /* force 32-bit operand */
  300. #define _STK "%%rsp" /* stack pointer */
  301. #elif defined(__i386__)
  302. #define _LO32 "" /* force 32-bit operand */
  303. #define _STK "%%esp" /* stack pointer */
  304. #endif
  305. /*
  306. * These EFLAGS bits are restored from saved value during emulation, and
  307. * any changes are written back to the saved value after emulation.
  308. */
  309. #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
  310. /* Before executing instruction: restore necessary bits in EFLAGS. */
  311. #define _PRE_EFLAGS(_sav, _msk, _tmp) \
  312. /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
  313. "movl %"_sav",%"_LO32 _tmp"; " \
  314. "push %"_tmp"; " \
  315. "push %"_tmp"; " \
  316. "movl %"_msk",%"_LO32 _tmp"; " \
  317. "andl %"_LO32 _tmp",("_STK"); " \
  318. "pushf; " \
  319. "notl %"_LO32 _tmp"; " \
  320. "andl %"_LO32 _tmp",("_STK"); " \
  321. "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
  322. "pop %"_tmp"; " \
  323. "orl %"_LO32 _tmp",("_STK"); " \
  324. "popf; " \
  325. "pop %"_sav"; "
  326. /* After executing instruction: write-back necessary bits in EFLAGS. */
  327. #define _POST_EFLAGS(_sav, _msk, _tmp) \
  328. /* _sav |= EFLAGS & _msk; */ \
  329. "pushf; " \
  330. "pop %"_tmp"; " \
  331. "andl %"_msk",%"_LO32 _tmp"; " \
  332. "orl %"_LO32 _tmp",%"_sav"; "
  333. /* Raw emulation: instruction has two explicit operands. */
  334. #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
  335. do { \
  336. unsigned long _tmp; \
  337. \
  338. switch ((_dst).bytes) { \
  339. case 2: \
  340. __asm__ __volatile__ ( \
  341. _PRE_EFLAGS("0", "4", "2") \
  342. _op"w %"_wx"3,%1; " \
  343. _POST_EFLAGS("0", "4", "2") \
  344. : "=m" (_eflags), "=m" ((_dst).val), \
  345. "=&r" (_tmp) \
  346. : _wy ((_src).val), "i" (EFLAGS_MASK)); \
  347. break; \
  348. case 4: \
  349. __asm__ __volatile__ ( \
  350. _PRE_EFLAGS("0", "4", "2") \
  351. _op"l %"_lx"3,%1; " \
  352. _POST_EFLAGS("0", "4", "2") \
  353. : "=m" (_eflags), "=m" ((_dst).val), \
  354. "=&r" (_tmp) \
  355. : _ly ((_src).val), "i" (EFLAGS_MASK)); \
  356. break; \
  357. case 8: \
  358. __emulate_2op_8byte(_op, _src, _dst, \
  359. _eflags, _qx, _qy); \
  360. break; \
  361. } \
  362. } while (0)
  363. #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
  364. do { \
  365. unsigned long __tmp; \
  366. switch ((_dst).bytes) { \
  367. case 1: \
  368. __asm__ __volatile__ ( \
  369. _PRE_EFLAGS("0", "4", "2") \
  370. _op"b %"_bx"3,%1; " \
  371. _POST_EFLAGS("0", "4", "2") \
  372. : "=m" (_eflags), "=m" ((_dst).val), \
  373. "=&r" (__tmp) \
  374. : _by ((_src).val), "i" (EFLAGS_MASK)); \
  375. break; \
  376. default: \
  377. __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
  378. _wx, _wy, _lx, _ly, _qx, _qy); \
  379. break; \
  380. } \
  381. } while (0)
  382. /* Source operand is byte-sized and may be restricted to just %cl. */
  383. #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
  384. __emulate_2op(_op, _src, _dst, _eflags, \
  385. "b", "c", "b", "c", "b", "c", "b", "c")
  386. /* Source operand is byte, word, long or quad sized. */
  387. #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
  388. __emulate_2op(_op, _src, _dst, _eflags, \
  389. "b", "q", "w", "r", _LO32, "r", "", "r")
  390. /* Source operand is word, long or quad sized. */
  391. #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
  392. __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
  393. "w", "r", _LO32, "r", "", "r")
  394. /* Instruction has only one explicit operand (no source operand). */
  395. #define emulate_1op(_op, _dst, _eflags) \
  396. do { \
  397. unsigned long _tmp; \
  398. \
  399. switch ((_dst).bytes) { \
  400. case 1: \
  401. __asm__ __volatile__ ( \
  402. _PRE_EFLAGS("0", "3", "2") \
  403. _op"b %1; " \
  404. _POST_EFLAGS("0", "3", "2") \
  405. : "=m" (_eflags), "=m" ((_dst).val), \
  406. "=&r" (_tmp) \
  407. : "i" (EFLAGS_MASK)); \
  408. break; \
  409. case 2: \
  410. __asm__ __volatile__ ( \
  411. _PRE_EFLAGS("0", "3", "2") \
  412. _op"w %1; " \
  413. _POST_EFLAGS("0", "3", "2") \
  414. : "=m" (_eflags), "=m" ((_dst).val), \
  415. "=&r" (_tmp) \
  416. : "i" (EFLAGS_MASK)); \
  417. break; \
  418. case 4: \
  419. __asm__ __volatile__ ( \
  420. _PRE_EFLAGS("0", "3", "2") \
  421. _op"l %1; " \
  422. _POST_EFLAGS("0", "3", "2") \
  423. : "=m" (_eflags), "=m" ((_dst).val), \
  424. "=&r" (_tmp) \
  425. : "i" (EFLAGS_MASK)); \
  426. break; \
  427. case 8: \
  428. __emulate_1op_8byte(_op, _dst, _eflags); \
  429. break; \
  430. } \
  431. } while (0)
  432. /* Emulate an instruction with quadword operands (x86/64 only). */
  433. #if defined(CONFIG_X86_64)
  434. #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
  435. do { \
  436. __asm__ __volatile__ ( \
  437. _PRE_EFLAGS("0", "4", "2") \
  438. _op"q %"_qx"3,%1; " \
  439. _POST_EFLAGS("0", "4", "2") \
  440. : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
  441. : _qy ((_src).val), "i" (EFLAGS_MASK)); \
  442. } while (0)
  443. #define __emulate_1op_8byte(_op, _dst, _eflags) \
  444. do { \
  445. __asm__ __volatile__ ( \
  446. _PRE_EFLAGS("0", "3", "2") \
  447. _op"q %1; " \
  448. _POST_EFLAGS("0", "3", "2") \
  449. : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
  450. : "i" (EFLAGS_MASK)); \
  451. } while (0)
  452. #elif defined(__i386__)
  453. #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
  454. #define __emulate_1op_8byte(_op, _dst, _eflags)
  455. #endif /* __i386__ */
  456. /* Fetch next part of the instruction being emulated. */
  457. #define insn_fetch(_type, _size, _eip) \
  458. ({ unsigned long _x; \
  459. rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
  460. if (rc != 0) \
  461. goto done; \
  462. (_eip) += (_size); \
  463. (_type)_x; \
  464. })
  465. static inline unsigned long ad_mask(struct decode_cache *c)
  466. {
  467. return (1UL << (c->ad_bytes << 3)) - 1;
  468. }
  469. /* Access/update address held in a register, based on addressing mode. */
  470. static inline unsigned long
  471. address_mask(struct decode_cache *c, unsigned long reg)
  472. {
  473. if (c->ad_bytes == sizeof(unsigned long))
  474. return reg;
  475. else
  476. return reg & ad_mask(c);
  477. }
  478. static inline unsigned long
  479. register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
  480. {
  481. return base + address_mask(c, reg);
  482. }
  483. static inline void
  484. register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
  485. {
  486. if (c->ad_bytes == sizeof(unsigned long))
  487. *reg += inc;
  488. else
  489. *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
  490. }
  491. static inline void jmp_rel(struct decode_cache *c, int rel)
  492. {
  493. register_address_increment(c, &c->eip, rel);
  494. }
  495. static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
  496. struct x86_emulate_ops *ops,
  497. unsigned long linear, u8 *dest)
  498. {
  499. struct fetch_cache *fc = &ctxt->decode.fetch;
  500. int rc;
  501. int size;
  502. if (linear < fc->start || linear >= fc->end) {
  503. size = min(15UL, PAGE_SIZE - offset_in_page(linear));
  504. rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
  505. if (rc)
  506. return rc;
  507. fc->start = linear;
  508. fc->end = linear + size;
  509. }
  510. *dest = fc->data[linear - fc->start];
  511. return 0;
  512. }
  513. static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
  514. struct x86_emulate_ops *ops,
  515. unsigned long eip, void *dest, unsigned size)
  516. {
  517. int rc = 0;
  518. eip += ctxt->cs_base;
  519. while (size--) {
  520. rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
  521. if (rc)
  522. return rc;
  523. }
  524. return 0;
  525. }
  526. /*
  527. * Given the 'reg' portion of a ModRM byte, and a register block, return a
  528. * pointer into the block that addresses the relevant register.
  529. * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
  530. */
  531. static void *decode_register(u8 modrm_reg, unsigned long *regs,
  532. int highbyte_regs)
  533. {
  534. void *p;
  535. p = &regs[modrm_reg];
  536. if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
  537. p = (unsigned char *)&regs[modrm_reg & 3] + 1;
  538. return p;
  539. }
  540. static int read_descriptor(struct x86_emulate_ctxt *ctxt,
  541. struct x86_emulate_ops *ops,
  542. void *ptr,
  543. u16 *size, unsigned long *address, int op_bytes)
  544. {
  545. int rc;
  546. if (op_bytes == 2)
  547. op_bytes = 3;
  548. *address = 0;
  549. rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
  550. ctxt->vcpu);
  551. if (rc)
  552. return rc;
  553. rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
  554. ctxt->vcpu);
  555. return rc;
  556. }
  557. static int test_cc(unsigned int condition, unsigned int flags)
  558. {
  559. int rc = 0;
  560. switch ((condition & 15) >> 1) {
  561. case 0: /* o */
  562. rc |= (flags & EFLG_OF);
  563. break;
  564. case 1: /* b/c/nae */
  565. rc |= (flags & EFLG_CF);
  566. break;
  567. case 2: /* z/e */
  568. rc |= (flags & EFLG_ZF);
  569. break;
  570. case 3: /* be/na */
  571. rc |= (flags & (EFLG_CF|EFLG_ZF));
  572. break;
  573. case 4: /* s */
  574. rc |= (flags & EFLG_SF);
  575. break;
  576. case 5: /* p/pe */
  577. rc |= (flags & EFLG_PF);
  578. break;
  579. case 7: /* le/ng */
  580. rc |= (flags & EFLG_ZF);
  581. /* fall through */
  582. case 6: /* l/nge */
  583. rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
  584. break;
  585. }
  586. /* Odd condition identifiers (lsb == 1) have inverted sense. */
  587. return (!!rc ^ (condition & 1));
  588. }
  589. static void decode_register_operand(struct operand *op,
  590. struct decode_cache *c,
  591. int inhibit_bytereg)
  592. {
  593. unsigned reg = c->modrm_reg;
  594. int highbyte_regs = c->rex_prefix == 0;
  595. if (!(c->d & ModRM))
  596. reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
  597. op->type = OP_REG;
  598. if ((c->d & ByteOp) && !inhibit_bytereg) {
  599. op->ptr = decode_register(reg, c->regs, highbyte_regs);
  600. op->val = *(u8 *)op->ptr;
  601. op->bytes = 1;
  602. } else {
  603. op->ptr = decode_register(reg, c->regs, 0);
  604. op->bytes = c->op_bytes;
  605. switch (op->bytes) {
  606. case 2:
  607. op->val = *(u16 *)op->ptr;
  608. break;
  609. case 4:
  610. op->val = *(u32 *)op->ptr;
  611. break;
  612. case 8:
  613. op->val = *(u64 *) op->ptr;
  614. break;
  615. }
  616. }
  617. op->orig_val = op->val;
  618. }
  619. static int decode_modrm(struct x86_emulate_ctxt *ctxt,
  620. struct x86_emulate_ops *ops)
  621. {
  622. struct decode_cache *c = &ctxt->decode;
  623. u8 sib;
  624. int index_reg = 0, base_reg = 0, scale, rip_relative = 0;
  625. int rc = 0;
  626. if (c->rex_prefix) {
  627. c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
  628. index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
  629. c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
  630. }
  631. c->modrm = insn_fetch(u8, 1, c->eip);
  632. c->modrm_mod |= (c->modrm & 0xc0) >> 6;
  633. c->modrm_reg |= (c->modrm & 0x38) >> 3;
  634. c->modrm_rm |= (c->modrm & 0x07);
  635. c->modrm_ea = 0;
  636. c->use_modrm_ea = 1;
  637. if (c->modrm_mod == 3) {
  638. c->modrm_ptr = decode_register(c->modrm_rm,
  639. c->regs, c->d & ByteOp);
  640. c->modrm_val = *(unsigned long *)c->modrm_ptr;
  641. return rc;
  642. }
  643. if (c->ad_bytes == 2) {
  644. unsigned bx = c->regs[VCPU_REGS_RBX];
  645. unsigned bp = c->regs[VCPU_REGS_RBP];
  646. unsigned si = c->regs[VCPU_REGS_RSI];
  647. unsigned di = c->regs[VCPU_REGS_RDI];
  648. /* 16-bit ModR/M decode. */
  649. switch (c->modrm_mod) {
  650. case 0:
  651. if (c->modrm_rm == 6)
  652. c->modrm_ea += insn_fetch(u16, 2, c->eip);
  653. break;
  654. case 1:
  655. c->modrm_ea += insn_fetch(s8, 1, c->eip);
  656. break;
  657. case 2:
  658. c->modrm_ea += insn_fetch(u16, 2, c->eip);
  659. break;
  660. }
  661. switch (c->modrm_rm) {
  662. case 0:
  663. c->modrm_ea += bx + si;
  664. break;
  665. case 1:
  666. c->modrm_ea += bx + di;
  667. break;
  668. case 2:
  669. c->modrm_ea += bp + si;
  670. break;
  671. case 3:
  672. c->modrm_ea += bp + di;
  673. break;
  674. case 4:
  675. c->modrm_ea += si;
  676. break;
  677. case 5:
  678. c->modrm_ea += di;
  679. break;
  680. case 6:
  681. if (c->modrm_mod != 0)
  682. c->modrm_ea += bp;
  683. break;
  684. case 7:
  685. c->modrm_ea += bx;
  686. break;
  687. }
  688. if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
  689. (c->modrm_rm == 6 && c->modrm_mod != 0))
  690. if (!c->override_base)
  691. c->override_base = &ctxt->ss_base;
  692. c->modrm_ea = (u16)c->modrm_ea;
  693. } else {
  694. /* 32/64-bit ModR/M decode. */
  695. switch (c->modrm_rm) {
  696. case 4:
  697. case 12:
  698. sib = insn_fetch(u8, 1, c->eip);
  699. index_reg |= (sib >> 3) & 7;
  700. base_reg |= sib & 7;
  701. scale = sib >> 6;
  702. switch (base_reg) {
  703. case 5:
  704. if (c->modrm_mod != 0)
  705. c->modrm_ea += c->regs[base_reg];
  706. else
  707. c->modrm_ea +=
  708. insn_fetch(s32, 4, c->eip);
  709. break;
  710. default:
  711. c->modrm_ea += c->regs[base_reg];
  712. }
  713. switch (index_reg) {
  714. case 4:
  715. break;
  716. default:
  717. c->modrm_ea += c->regs[index_reg] << scale;
  718. }
  719. break;
  720. case 5:
  721. if (c->modrm_mod != 0)
  722. c->modrm_ea += c->regs[c->modrm_rm];
  723. else if (ctxt->mode == X86EMUL_MODE_PROT64)
  724. rip_relative = 1;
  725. break;
  726. default:
  727. c->modrm_ea += c->regs[c->modrm_rm];
  728. break;
  729. }
  730. switch (c->modrm_mod) {
  731. case 0:
  732. if (c->modrm_rm == 5)
  733. c->modrm_ea += insn_fetch(s32, 4, c->eip);
  734. break;
  735. case 1:
  736. c->modrm_ea += insn_fetch(s8, 1, c->eip);
  737. break;
  738. case 2:
  739. c->modrm_ea += insn_fetch(s32, 4, c->eip);
  740. break;
  741. }
  742. }
  743. if (rip_relative) {
  744. c->modrm_ea += c->eip;
  745. switch (c->d & SrcMask) {
  746. case SrcImmByte:
  747. c->modrm_ea += 1;
  748. break;
  749. case SrcImm:
  750. if (c->d & ByteOp)
  751. c->modrm_ea += 1;
  752. else
  753. if (c->op_bytes == 8)
  754. c->modrm_ea += 4;
  755. else
  756. c->modrm_ea += c->op_bytes;
  757. }
  758. }
  759. done:
  760. return rc;
  761. }
  762. static int decode_abs(struct x86_emulate_ctxt *ctxt,
  763. struct x86_emulate_ops *ops)
  764. {
  765. struct decode_cache *c = &ctxt->decode;
  766. int rc = 0;
  767. switch (c->ad_bytes) {
  768. case 2:
  769. c->modrm_ea = insn_fetch(u16, 2, c->eip);
  770. break;
  771. case 4:
  772. c->modrm_ea = insn_fetch(u32, 4, c->eip);
  773. break;
  774. case 8:
  775. c->modrm_ea = insn_fetch(u64, 8, c->eip);
  776. break;
  777. }
  778. done:
  779. return rc;
  780. }
  781. int
  782. x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
  783. {
  784. struct decode_cache *c = &ctxt->decode;
  785. int rc = 0;
  786. int mode = ctxt->mode;
  787. int def_op_bytes, def_ad_bytes, group;
  788. /* Shadow copy of register state. Committed on successful emulation. */
  789. memset(c, 0, sizeof(struct decode_cache));
  790. c->eip = ctxt->vcpu->arch.rip;
  791. memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
  792. switch (mode) {
  793. case X86EMUL_MODE_REAL:
  794. case X86EMUL_MODE_PROT16:
  795. def_op_bytes = def_ad_bytes = 2;
  796. break;
  797. case X86EMUL_MODE_PROT32:
  798. def_op_bytes = def_ad_bytes = 4;
  799. break;
  800. #ifdef CONFIG_X86_64
  801. case X86EMUL_MODE_PROT64:
  802. def_op_bytes = 4;
  803. def_ad_bytes = 8;
  804. break;
  805. #endif
  806. default:
  807. return -1;
  808. }
  809. c->op_bytes = def_op_bytes;
  810. c->ad_bytes = def_ad_bytes;
  811. /* Legacy prefixes. */
  812. for (;;) {
  813. switch (c->b = insn_fetch(u8, 1, c->eip)) {
  814. case 0x66: /* operand-size override */
  815. /* switch between 2/4 bytes */
  816. c->op_bytes = def_op_bytes ^ 6;
  817. break;
  818. case 0x67: /* address-size override */
  819. if (mode == X86EMUL_MODE_PROT64)
  820. /* switch between 4/8 bytes */
  821. c->ad_bytes = def_ad_bytes ^ 12;
  822. else
  823. /* switch between 2/4 bytes */
  824. c->ad_bytes = def_ad_bytes ^ 6;
  825. break;
  826. case 0x2e: /* CS override */
  827. c->override_base = &ctxt->cs_base;
  828. break;
  829. case 0x3e: /* DS override */
  830. c->override_base = &ctxt->ds_base;
  831. break;
  832. case 0x26: /* ES override */
  833. c->override_base = &ctxt->es_base;
  834. break;
  835. case 0x64: /* FS override */
  836. c->override_base = &ctxt->fs_base;
  837. break;
  838. case 0x65: /* GS override */
  839. c->override_base = &ctxt->gs_base;
  840. break;
  841. case 0x36: /* SS override */
  842. c->override_base = &ctxt->ss_base;
  843. break;
  844. case 0x40 ... 0x4f: /* REX */
  845. if (mode != X86EMUL_MODE_PROT64)
  846. goto done_prefixes;
  847. c->rex_prefix = c->b;
  848. continue;
  849. case 0xf0: /* LOCK */
  850. c->lock_prefix = 1;
  851. break;
  852. case 0xf2: /* REPNE/REPNZ */
  853. c->rep_prefix = REPNE_PREFIX;
  854. break;
  855. case 0xf3: /* REP/REPE/REPZ */
  856. c->rep_prefix = REPE_PREFIX;
  857. break;
  858. default:
  859. goto done_prefixes;
  860. }
  861. /* Any legacy prefix after a REX prefix nullifies its effect. */
  862. c->rex_prefix = 0;
  863. }
  864. done_prefixes:
  865. /* REX prefix. */
  866. if (c->rex_prefix)
  867. if (c->rex_prefix & 8)
  868. c->op_bytes = 8; /* REX.W */
  869. /* Opcode byte(s). */
  870. c->d = opcode_table[c->b];
  871. if (c->d == 0) {
  872. /* Two-byte opcode? */
  873. if (c->b == 0x0f) {
  874. c->twobyte = 1;
  875. c->b = insn_fetch(u8, 1, c->eip);
  876. c->d = twobyte_table[c->b];
  877. }
  878. }
  879. if (c->d & Group) {
  880. group = c->d & GroupMask;
  881. c->modrm = insn_fetch(u8, 1, c->eip);
  882. --c->eip;
  883. group = (group << 3) + ((c->modrm >> 3) & 7);
  884. if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
  885. c->d = group2_table[group];
  886. else
  887. c->d = group_table[group];
  888. }
  889. /* Unrecognised? */
  890. if (c->d == 0) {
  891. DPRINTF("Cannot emulate %02x\n", c->b);
  892. return -1;
  893. }
  894. if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
  895. c->op_bytes = 8;
  896. /* ModRM and SIB bytes. */
  897. if (c->d & ModRM)
  898. rc = decode_modrm(ctxt, ops);
  899. else if (c->d & MemAbs)
  900. rc = decode_abs(ctxt, ops);
  901. if (rc)
  902. goto done;
  903. if (!c->override_base)
  904. c->override_base = &ctxt->ds_base;
  905. if (mode == X86EMUL_MODE_PROT64 &&
  906. c->override_base != &ctxt->fs_base &&
  907. c->override_base != &ctxt->gs_base)
  908. c->override_base = NULL;
  909. if (c->override_base)
  910. c->modrm_ea += *c->override_base;
  911. if (c->ad_bytes != 8)
  912. c->modrm_ea = (u32)c->modrm_ea;
  913. /*
  914. * Decode and fetch the source operand: register, memory
  915. * or immediate.
  916. */
  917. switch (c->d & SrcMask) {
  918. case SrcNone:
  919. break;
  920. case SrcReg:
  921. decode_register_operand(&c->src, c, 0);
  922. break;
  923. case SrcMem16:
  924. c->src.bytes = 2;
  925. goto srcmem_common;
  926. case SrcMem32:
  927. c->src.bytes = 4;
  928. goto srcmem_common;
  929. case SrcMem:
  930. c->src.bytes = (c->d & ByteOp) ? 1 :
  931. c->op_bytes;
  932. /* Don't fetch the address for invlpg: it could be unmapped. */
  933. if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
  934. break;
  935. srcmem_common:
  936. /*
  937. * For instructions with a ModR/M byte, switch to register
  938. * access if Mod = 3.
  939. */
  940. if ((c->d & ModRM) && c->modrm_mod == 3) {
  941. c->src.type = OP_REG;
  942. c->src.val = c->modrm_val;
  943. c->src.ptr = c->modrm_ptr;
  944. break;
  945. }
  946. c->src.type = OP_MEM;
  947. break;
  948. case SrcImm:
  949. c->src.type = OP_IMM;
  950. c->src.ptr = (unsigned long *)c->eip;
  951. c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  952. if (c->src.bytes == 8)
  953. c->src.bytes = 4;
  954. /* NB. Immediates are sign-extended as necessary. */
  955. switch (c->src.bytes) {
  956. case 1:
  957. c->src.val = insn_fetch(s8, 1, c->eip);
  958. break;
  959. case 2:
  960. c->src.val = insn_fetch(s16, 2, c->eip);
  961. break;
  962. case 4:
  963. c->src.val = insn_fetch(s32, 4, c->eip);
  964. break;
  965. }
  966. break;
  967. case SrcImmByte:
  968. c->src.type = OP_IMM;
  969. c->src.ptr = (unsigned long *)c->eip;
  970. c->src.bytes = 1;
  971. c->src.val = insn_fetch(s8, 1, c->eip);
  972. break;
  973. }
  974. /* Decode and fetch the destination operand: register or memory. */
  975. switch (c->d & DstMask) {
  976. case ImplicitOps:
  977. /* Special instructions do their own operand decoding. */
  978. return 0;
  979. case DstReg:
  980. decode_register_operand(&c->dst, c,
  981. c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
  982. break;
  983. case DstMem:
  984. if ((c->d & ModRM) && c->modrm_mod == 3) {
  985. c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  986. c->dst.type = OP_REG;
  987. c->dst.val = c->dst.orig_val = c->modrm_val;
  988. c->dst.ptr = c->modrm_ptr;
  989. break;
  990. }
  991. c->dst.type = OP_MEM;
  992. break;
  993. }
  994. done:
  995. return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
  996. }
  997. static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
  998. {
  999. struct decode_cache *c = &ctxt->decode;
  1000. c->dst.type = OP_MEM;
  1001. c->dst.bytes = c->op_bytes;
  1002. c->dst.val = c->src.val;
  1003. register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
  1004. c->dst.ptr = (void *) register_address(c, ctxt->ss_base,
  1005. c->regs[VCPU_REGS_RSP]);
  1006. }
  1007. static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
  1008. struct x86_emulate_ops *ops)
  1009. {
  1010. struct decode_cache *c = &ctxt->decode;
  1011. int rc;
  1012. rc = ops->read_std(register_address(c, ctxt->ss_base,
  1013. c->regs[VCPU_REGS_RSP]),
  1014. &c->dst.val, c->dst.bytes, ctxt->vcpu);
  1015. if (rc != 0)
  1016. return rc;
  1017. register_address_increment(c, &c->regs[VCPU_REGS_RSP], c->dst.bytes);
  1018. return 0;
  1019. }
  1020. static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
  1021. {
  1022. struct decode_cache *c = &ctxt->decode;
  1023. switch (c->modrm_reg) {
  1024. case 0: /* rol */
  1025. emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
  1026. break;
  1027. case 1: /* ror */
  1028. emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
  1029. break;
  1030. case 2: /* rcl */
  1031. emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
  1032. break;
  1033. case 3: /* rcr */
  1034. emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
  1035. break;
  1036. case 4: /* sal/shl */
  1037. case 6: /* sal/shl */
  1038. emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
  1039. break;
  1040. case 5: /* shr */
  1041. emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
  1042. break;
  1043. case 7: /* sar */
  1044. emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
  1045. break;
  1046. }
  1047. }
  1048. static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
  1049. struct x86_emulate_ops *ops)
  1050. {
  1051. struct decode_cache *c = &ctxt->decode;
  1052. int rc = 0;
  1053. switch (c->modrm_reg) {
  1054. case 0 ... 1: /* test */
  1055. emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
  1056. break;
  1057. case 2: /* not */
  1058. c->dst.val = ~c->dst.val;
  1059. break;
  1060. case 3: /* neg */
  1061. emulate_1op("neg", c->dst, ctxt->eflags);
  1062. break;
  1063. default:
  1064. DPRINTF("Cannot emulate %02x\n", c->b);
  1065. rc = X86EMUL_UNHANDLEABLE;
  1066. break;
  1067. }
  1068. return rc;
  1069. }
  1070. static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
  1071. struct x86_emulate_ops *ops)
  1072. {
  1073. struct decode_cache *c = &ctxt->decode;
  1074. switch (c->modrm_reg) {
  1075. case 0: /* inc */
  1076. emulate_1op("inc", c->dst, ctxt->eflags);
  1077. break;
  1078. case 1: /* dec */
  1079. emulate_1op("dec", c->dst, ctxt->eflags);
  1080. break;
  1081. case 4: /* jmp abs */
  1082. c->eip = c->src.val;
  1083. break;
  1084. case 6: /* push */
  1085. emulate_push(ctxt);
  1086. break;
  1087. }
  1088. return 0;
  1089. }
  1090. static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
  1091. struct x86_emulate_ops *ops,
  1092. unsigned long memop)
  1093. {
  1094. struct decode_cache *c = &ctxt->decode;
  1095. u64 old, new;
  1096. int rc;
  1097. rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
  1098. if (rc != 0)
  1099. return rc;
  1100. if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
  1101. ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
  1102. c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
  1103. c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
  1104. ctxt->eflags &= ~EFLG_ZF;
  1105. } else {
  1106. new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
  1107. (u32) c->regs[VCPU_REGS_RBX];
  1108. rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
  1109. if (rc != 0)
  1110. return rc;
  1111. ctxt->eflags |= EFLG_ZF;
  1112. }
  1113. return 0;
  1114. }
  1115. static inline int writeback(struct x86_emulate_ctxt *ctxt,
  1116. struct x86_emulate_ops *ops)
  1117. {
  1118. int rc;
  1119. struct decode_cache *c = &ctxt->decode;
  1120. switch (c->dst.type) {
  1121. case OP_REG:
  1122. /* The 4-byte case *is* correct:
  1123. * in 64-bit mode we zero-extend.
  1124. */
  1125. switch (c->dst.bytes) {
  1126. case 1:
  1127. *(u8 *)c->dst.ptr = (u8)c->dst.val;
  1128. break;
  1129. case 2:
  1130. *(u16 *)c->dst.ptr = (u16)c->dst.val;
  1131. break;
  1132. case 4:
  1133. *c->dst.ptr = (u32)c->dst.val;
  1134. break; /* 64b: zero-ext */
  1135. case 8:
  1136. *c->dst.ptr = c->dst.val;
  1137. break;
  1138. }
  1139. break;
  1140. case OP_MEM:
  1141. if (c->lock_prefix)
  1142. rc = ops->cmpxchg_emulated(
  1143. (unsigned long)c->dst.ptr,
  1144. &c->dst.orig_val,
  1145. &c->dst.val,
  1146. c->dst.bytes,
  1147. ctxt->vcpu);
  1148. else
  1149. rc = ops->write_emulated(
  1150. (unsigned long)c->dst.ptr,
  1151. &c->dst.val,
  1152. c->dst.bytes,
  1153. ctxt->vcpu);
  1154. if (rc != 0)
  1155. return rc;
  1156. break;
  1157. case OP_NONE:
  1158. /* no writeback */
  1159. break;
  1160. default:
  1161. break;
  1162. }
  1163. return 0;
  1164. }
  1165. int
  1166. x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
  1167. {
  1168. unsigned long memop = 0;
  1169. u64 msr_data;
  1170. unsigned long saved_eip = 0;
  1171. struct decode_cache *c = &ctxt->decode;
  1172. int rc = 0;
  1173. /* Shadow copy of register state. Committed on successful emulation.
  1174. * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
  1175. * modify them.
  1176. */
  1177. memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
  1178. saved_eip = c->eip;
  1179. if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
  1180. memop = c->modrm_ea;
  1181. if (c->rep_prefix && (c->d & String)) {
  1182. /* All REP prefixes have the same first termination condition */
  1183. if (c->regs[VCPU_REGS_RCX] == 0) {
  1184. ctxt->vcpu->arch.rip = c->eip;
  1185. goto done;
  1186. }
  1187. /* The second termination condition only applies for REPE
  1188. * and REPNE. Test if the repeat string operation prefix is
  1189. * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
  1190. * corresponding termination condition according to:
  1191. * - if REPE/REPZ and ZF = 0 then done
  1192. * - if REPNE/REPNZ and ZF = 1 then done
  1193. */
  1194. if ((c->b == 0xa6) || (c->b == 0xa7) ||
  1195. (c->b == 0xae) || (c->b == 0xaf)) {
  1196. if ((c->rep_prefix == REPE_PREFIX) &&
  1197. ((ctxt->eflags & EFLG_ZF) == 0)) {
  1198. ctxt->vcpu->arch.rip = c->eip;
  1199. goto done;
  1200. }
  1201. if ((c->rep_prefix == REPNE_PREFIX) &&
  1202. ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
  1203. ctxt->vcpu->arch.rip = c->eip;
  1204. goto done;
  1205. }
  1206. }
  1207. c->regs[VCPU_REGS_RCX]--;
  1208. c->eip = ctxt->vcpu->arch.rip;
  1209. }
  1210. if (c->src.type == OP_MEM) {
  1211. c->src.ptr = (unsigned long *)memop;
  1212. c->src.val = 0;
  1213. rc = ops->read_emulated((unsigned long)c->src.ptr,
  1214. &c->src.val,
  1215. c->src.bytes,
  1216. ctxt->vcpu);
  1217. if (rc != 0)
  1218. goto done;
  1219. c->src.orig_val = c->src.val;
  1220. }
  1221. if ((c->d & DstMask) == ImplicitOps)
  1222. goto special_insn;
  1223. if (c->dst.type == OP_MEM) {
  1224. c->dst.ptr = (unsigned long *)memop;
  1225. c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  1226. c->dst.val = 0;
  1227. if (c->d & BitOp) {
  1228. unsigned long mask = ~(c->dst.bytes * 8 - 1);
  1229. c->dst.ptr = (void *)c->dst.ptr +
  1230. (c->src.val & mask) / 8;
  1231. }
  1232. if (!(c->d & Mov) &&
  1233. /* optimisation - avoid slow emulated read */
  1234. ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
  1235. &c->dst.val,
  1236. c->dst.bytes, ctxt->vcpu)) != 0))
  1237. goto done;
  1238. }
  1239. c->dst.orig_val = c->dst.val;
  1240. special_insn:
  1241. if (c->twobyte)
  1242. goto twobyte_insn;
  1243. switch (c->b) {
  1244. case 0x00 ... 0x05:
  1245. add: /* add */
  1246. emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
  1247. break;
  1248. case 0x08 ... 0x0d:
  1249. or: /* or */
  1250. emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
  1251. break;
  1252. case 0x10 ... 0x15:
  1253. adc: /* adc */
  1254. emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
  1255. break;
  1256. case 0x18 ... 0x1d:
  1257. sbb: /* sbb */
  1258. emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
  1259. break;
  1260. case 0x20 ... 0x23:
  1261. and: /* and */
  1262. emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
  1263. break;
  1264. case 0x24: /* and al imm8 */
  1265. c->dst.type = OP_REG;
  1266. c->dst.ptr = &c->regs[VCPU_REGS_RAX];
  1267. c->dst.val = *(u8 *)c->dst.ptr;
  1268. c->dst.bytes = 1;
  1269. c->dst.orig_val = c->dst.val;
  1270. goto and;
  1271. case 0x25: /* and ax imm16, or eax imm32 */
  1272. c->dst.type = OP_REG;
  1273. c->dst.bytes = c->op_bytes;
  1274. c->dst.ptr = &c->regs[VCPU_REGS_RAX];
  1275. if (c->op_bytes == 2)
  1276. c->dst.val = *(u16 *)c->dst.ptr;
  1277. else
  1278. c->dst.val = *(u32 *)c->dst.ptr;
  1279. c->dst.orig_val = c->dst.val;
  1280. goto and;
  1281. case 0x28 ... 0x2d:
  1282. sub: /* sub */
  1283. emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
  1284. break;
  1285. case 0x30 ... 0x35:
  1286. xor: /* xor */
  1287. emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
  1288. break;
  1289. case 0x38 ... 0x3d:
  1290. cmp: /* cmp */
  1291. emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
  1292. break;
  1293. case 0x40 ... 0x47: /* inc r16/r32 */
  1294. emulate_1op("inc", c->dst, ctxt->eflags);
  1295. break;
  1296. case 0x48 ... 0x4f: /* dec r16/r32 */
  1297. emulate_1op("dec", c->dst, ctxt->eflags);
  1298. break;
  1299. case 0x50 ... 0x57: /* push reg */
  1300. c->dst.type = OP_MEM;
  1301. c->dst.bytes = c->op_bytes;
  1302. c->dst.val = c->src.val;
  1303. register_address_increment(c, &c->regs[VCPU_REGS_RSP],
  1304. -c->op_bytes);
  1305. c->dst.ptr = (void *) register_address(
  1306. c, ctxt->ss_base, c->regs[VCPU_REGS_RSP]);
  1307. break;
  1308. case 0x58 ... 0x5f: /* pop reg */
  1309. pop_instruction:
  1310. if ((rc = ops->read_std(register_address(c, ctxt->ss_base,
  1311. c->regs[VCPU_REGS_RSP]), c->dst.ptr,
  1312. c->op_bytes, ctxt->vcpu)) != 0)
  1313. goto done;
  1314. register_address_increment(c, &c->regs[VCPU_REGS_RSP],
  1315. c->op_bytes);
  1316. c->dst.type = OP_NONE; /* Disable writeback. */
  1317. break;
  1318. case 0x63: /* movsxd */
  1319. if (ctxt->mode != X86EMUL_MODE_PROT64)
  1320. goto cannot_emulate;
  1321. c->dst.val = (s32) c->src.val;
  1322. break;
  1323. case 0x68: /* push imm */
  1324. case 0x6a: /* push imm8 */
  1325. emulate_push(ctxt);
  1326. break;
  1327. case 0x6c: /* insb */
  1328. case 0x6d: /* insw/insd */
  1329. if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
  1330. 1,
  1331. (c->d & ByteOp) ? 1 : c->op_bytes,
  1332. c->rep_prefix ?
  1333. address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
  1334. (ctxt->eflags & EFLG_DF),
  1335. register_address(c, ctxt->es_base,
  1336. c->regs[VCPU_REGS_RDI]),
  1337. c->rep_prefix,
  1338. c->regs[VCPU_REGS_RDX]) == 0) {
  1339. c->eip = saved_eip;
  1340. return -1;
  1341. }
  1342. return 0;
  1343. case 0x6e: /* outsb */
  1344. case 0x6f: /* outsw/outsd */
  1345. if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
  1346. 0,
  1347. (c->d & ByteOp) ? 1 : c->op_bytes,
  1348. c->rep_prefix ?
  1349. address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
  1350. (ctxt->eflags & EFLG_DF),
  1351. register_address(c, c->override_base ?
  1352. *c->override_base :
  1353. ctxt->ds_base,
  1354. c->regs[VCPU_REGS_RSI]),
  1355. c->rep_prefix,
  1356. c->regs[VCPU_REGS_RDX]) == 0) {
  1357. c->eip = saved_eip;
  1358. return -1;
  1359. }
  1360. return 0;
  1361. case 0x70 ... 0x7f: /* jcc (short) */ {
  1362. int rel = insn_fetch(s8, 1, c->eip);
  1363. if (test_cc(c->b, ctxt->eflags))
  1364. jmp_rel(c, rel);
  1365. break;
  1366. }
  1367. case 0x80 ... 0x83: /* Grp1 */
  1368. switch (c->modrm_reg) {
  1369. case 0:
  1370. goto add;
  1371. case 1:
  1372. goto or;
  1373. case 2:
  1374. goto adc;
  1375. case 3:
  1376. goto sbb;
  1377. case 4:
  1378. goto and;
  1379. case 5:
  1380. goto sub;
  1381. case 6:
  1382. goto xor;
  1383. case 7:
  1384. goto cmp;
  1385. }
  1386. break;
  1387. case 0x84 ... 0x85:
  1388. emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
  1389. break;
  1390. case 0x86 ... 0x87: /* xchg */
  1391. xchg:
  1392. /* Write back the register source. */
  1393. switch (c->dst.bytes) {
  1394. case 1:
  1395. *(u8 *) c->src.ptr = (u8) c->dst.val;
  1396. break;
  1397. case 2:
  1398. *(u16 *) c->src.ptr = (u16) c->dst.val;
  1399. break;
  1400. case 4:
  1401. *c->src.ptr = (u32) c->dst.val;
  1402. break; /* 64b reg: zero-extend */
  1403. case 8:
  1404. *c->src.ptr = c->dst.val;
  1405. break;
  1406. }
  1407. /*
  1408. * Write back the memory destination with implicit LOCK
  1409. * prefix.
  1410. */
  1411. c->dst.val = c->src.val;
  1412. c->lock_prefix = 1;
  1413. break;
  1414. case 0x88 ... 0x8b: /* mov */
  1415. goto mov;
  1416. case 0x8c: { /* mov r/m, sreg */
  1417. struct kvm_segment segreg;
  1418. if (c->modrm_reg <= 5)
  1419. kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
  1420. else {
  1421. printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
  1422. c->modrm);
  1423. goto cannot_emulate;
  1424. }
  1425. c->dst.val = segreg.selector;
  1426. break;
  1427. }
  1428. case 0x8d: /* lea r16/r32, m */
  1429. c->dst.val = c->modrm_ea;
  1430. break;
  1431. case 0x8e: { /* mov seg, r/m16 */
  1432. uint16_t sel;
  1433. int type_bits;
  1434. int err;
  1435. sel = c->src.val;
  1436. if (c->modrm_reg <= 5) {
  1437. type_bits = (c->modrm_reg == 1) ? 9 : 1;
  1438. err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
  1439. type_bits, c->modrm_reg);
  1440. } else {
  1441. printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
  1442. c->modrm);
  1443. goto cannot_emulate;
  1444. }
  1445. if (err < 0)
  1446. goto cannot_emulate;
  1447. c->dst.type = OP_NONE; /* Disable writeback. */
  1448. break;
  1449. }
  1450. case 0x8f: /* pop (sole member of Grp1a) */
  1451. rc = emulate_grp1a(ctxt, ops);
  1452. if (rc != 0)
  1453. goto done;
  1454. break;
  1455. case 0x90: /* nop / xchg r8,rax */
  1456. if (!(c->rex_prefix & 1)) { /* nop */
  1457. c->dst.type = OP_NONE;
  1458. break;
  1459. }
  1460. case 0x91 ... 0x97: /* xchg reg,rax */
  1461. c->src.type = c->dst.type = OP_REG;
  1462. c->src.bytes = c->dst.bytes = c->op_bytes;
  1463. c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
  1464. c->src.val = *(c->src.ptr);
  1465. goto xchg;
  1466. case 0x9c: /* pushf */
  1467. c->src.val = (unsigned long) ctxt->eflags;
  1468. emulate_push(ctxt);
  1469. break;
  1470. case 0x9d: /* popf */
  1471. c->dst.ptr = (unsigned long *) &ctxt->eflags;
  1472. goto pop_instruction;
  1473. case 0xa0 ... 0xa1: /* mov */
  1474. c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
  1475. c->dst.val = c->src.val;
  1476. break;
  1477. case 0xa2 ... 0xa3: /* mov */
  1478. c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
  1479. break;
  1480. case 0xa4 ... 0xa5: /* movs */
  1481. c->dst.type = OP_MEM;
  1482. c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  1483. c->dst.ptr = (unsigned long *)register_address(c,
  1484. ctxt->es_base,
  1485. c->regs[VCPU_REGS_RDI]);
  1486. if ((rc = ops->read_emulated(register_address(c,
  1487. c->override_base ? *c->override_base :
  1488. ctxt->ds_base,
  1489. c->regs[VCPU_REGS_RSI]),
  1490. &c->dst.val,
  1491. c->dst.bytes, ctxt->vcpu)) != 0)
  1492. goto done;
  1493. register_address_increment(c, &c->regs[VCPU_REGS_RSI],
  1494. (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
  1495. : c->dst.bytes);
  1496. register_address_increment(c, &c->regs[VCPU_REGS_RDI],
  1497. (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
  1498. : c->dst.bytes);
  1499. break;
  1500. case 0xa6 ... 0xa7: /* cmps */
  1501. c->src.type = OP_NONE; /* Disable writeback. */
  1502. c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  1503. c->src.ptr = (unsigned long *)register_address(c,
  1504. c->override_base ? *c->override_base :
  1505. ctxt->ds_base,
  1506. c->regs[VCPU_REGS_RSI]);
  1507. if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
  1508. &c->src.val,
  1509. c->src.bytes,
  1510. ctxt->vcpu)) != 0)
  1511. goto done;
  1512. c->dst.type = OP_NONE; /* Disable writeback. */
  1513. c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  1514. c->dst.ptr = (unsigned long *)register_address(c,
  1515. ctxt->es_base,
  1516. c->regs[VCPU_REGS_RDI]);
  1517. if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
  1518. &c->dst.val,
  1519. c->dst.bytes,
  1520. ctxt->vcpu)) != 0)
  1521. goto done;
  1522. DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
  1523. emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
  1524. register_address_increment(c, &c->regs[VCPU_REGS_RSI],
  1525. (ctxt->eflags & EFLG_DF) ? -c->src.bytes
  1526. : c->src.bytes);
  1527. register_address_increment(c, &c->regs[VCPU_REGS_RDI],
  1528. (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
  1529. : c->dst.bytes);
  1530. break;
  1531. case 0xaa ... 0xab: /* stos */
  1532. c->dst.type = OP_MEM;
  1533. c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  1534. c->dst.ptr = (unsigned long *)register_address(c,
  1535. ctxt->es_base,
  1536. c->regs[VCPU_REGS_RDI]);
  1537. c->dst.val = c->regs[VCPU_REGS_RAX];
  1538. register_address_increment(c, &c->regs[VCPU_REGS_RDI],
  1539. (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
  1540. : c->dst.bytes);
  1541. break;
  1542. case 0xac ... 0xad: /* lods */
  1543. c->dst.type = OP_REG;
  1544. c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  1545. c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
  1546. if ((rc = ops->read_emulated(register_address(c,
  1547. c->override_base ? *c->override_base :
  1548. ctxt->ds_base,
  1549. c->regs[VCPU_REGS_RSI]),
  1550. &c->dst.val,
  1551. c->dst.bytes,
  1552. ctxt->vcpu)) != 0)
  1553. goto done;
  1554. register_address_increment(c, &c->regs[VCPU_REGS_RSI],
  1555. (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
  1556. : c->dst.bytes);
  1557. break;
  1558. case 0xae ... 0xaf: /* scas */
  1559. DPRINTF("Urk! I don't handle SCAS.\n");
  1560. goto cannot_emulate;
  1561. case 0xb8: /* mov r, imm */
  1562. goto mov;
  1563. case 0xc0 ... 0xc1:
  1564. emulate_grp2(ctxt);
  1565. break;
  1566. case 0xc3: /* ret */
  1567. c->dst.ptr = &c->eip;
  1568. goto pop_instruction;
  1569. case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
  1570. mov:
  1571. c->dst.val = c->src.val;
  1572. break;
  1573. case 0xd0 ... 0xd1: /* Grp2 */
  1574. c->src.val = 1;
  1575. emulate_grp2(ctxt);
  1576. break;
  1577. case 0xd2 ... 0xd3: /* Grp2 */
  1578. c->src.val = c->regs[VCPU_REGS_RCX];
  1579. emulate_grp2(ctxt);
  1580. break;
  1581. case 0xe8: /* call (near) */ {
  1582. long int rel;
  1583. switch (c->op_bytes) {
  1584. case 2:
  1585. rel = insn_fetch(s16, 2, c->eip);
  1586. break;
  1587. case 4:
  1588. rel = insn_fetch(s32, 4, c->eip);
  1589. break;
  1590. default:
  1591. DPRINTF("Call: Invalid op_bytes\n");
  1592. goto cannot_emulate;
  1593. }
  1594. c->src.val = (unsigned long) c->eip;
  1595. jmp_rel(c, rel);
  1596. c->op_bytes = c->ad_bytes;
  1597. emulate_push(ctxt);
  1598. break;
  1599. }
  1600. case 0xe9: /* jmp rel */
  1601. goto jmp;
  1602. case 0xea: /* jmp far */ {
  1603. uint32_t eip;
  1604. uint16_t sel;
  1605. switch (c->op_bytes) {
  1606. case 2:
  1607. eip = insn_fetch(u16, 2, c->eip);
  1608. break;
  1609. case 4:
  1610. eip = insn_fetch(u32, 4, c->eip);
  1611. break;
  1612. default:
  1613. DPRINTF("jmp far: Invalid op_bytes\n");
  1614. goto cannot_emulate;
  1615. }
  1616. sel = insn_fetch(u16, 2, c->eip);
  1617. if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) {
  1618. DPRINTF("jmp far: Failed to load CS descriptor\n");
  1619. goto cannot_emulate;
  1620. }
  1621. c->eip = eip;
  1622. break;
  1623. }
  1624. case 0xeb:
  1625. jmp: /* jmp rel short */
  1626. jmp_rel(c, c->src.val);
  1627. c->dst.type = OP_NONE; /* Disable writeback. */
  1628. break;
  1629. case 0xf4: /* hlt */
  1630. ctxt->vcpu->arch.halt_request = 1;
  1631. goto done;
  1632. case 0xf5: /* cmc */
  1633. /* complement carry flag from eflags reg */
  1634. ctxt->eflags ^= EFLG_CF;
  1635. c->dst.type = OP_NONE; /* Disable writeback. */
  1636. break;
  1637. case 0xf6 ... 0xf7: /* Grp3 */
  1638. rc = emulate_grp3(ctxt, ops);
  1639. if (rc != 0)
  1640. goto done;
  1641. break;
  1642. case 0xf8: /* clc */
  1643. ctxt->eflags &= ~EFLG_CF;
  1644. c->dst.type = OP_NONE; /* Disable writeback. */
  1645. break;
  1646. case 0xfa: /* cli */
  1647. ctxt->eflags &= ~X86_EFLAGS_IF;
  1648. c->dst.type = OP_NONE; /* Disable writeback. */
  1649. break;
  1650. case 0xfb: /* sti */
  1651. ctxt->eflags |= X86_EFLAGS_IF;
  1652. c->dst.type = OP_NONE; /* Disable writeback. */
  1653. break;
  1654. case 0xfe ... 0xff: /* Grp4/Grp5 */
  1655. rc = emulate_grp45(ctxt, ops);
  1656. if (rc != 0)
  1657. goto done;
  1658. break;
  1659. }
  1660. writeback:
  1661. rc = writeback(ctxt, ops);
  1662. if (rc != 0)
  1663. goto done;
  1664. /* Commit shadow register state. */
  1665. memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
  1666. ctxt->vcpu->arch.rip = c->eip;
  1667. done:
  1668. if (rc == X86EMUL_UNHANDLEABLE) {
  1669. c->eip = saved_eip;
  1670. return -1;
  1671. }
  1672. return 0;
  1673. twobyte_insn:
  1674. switch (c->b) {
  1675. case 0x01: /* lgdt, lidt, lmsw */
  1676. switch (c->modrm_reg) {
  1677. u16 size;
  1678. unsigned long address;
  1679. case 0: /* vmcall */
  1680. if (c->modrm_mod != 3 || c->modrm_rm != 1)
  1681. goto cannot_emulate;
  1682. rc = kvm_fix_hypercall(ctxt->vcpu);
  1683. if (rc)
  1684. goto done;
  1685. /* Let the processor re-execute the fixed hypercall */
  1686. c->eip = ctxt->vcpu->arch.rip;
  1687. /* Disable writeback. */
  1688. c->dst.type = OP_NONE;
  1689. break;
  1690. case 2: /* lgdt */
  1691. rc = read_descriptor(ctxt, ops, c->src.ptr,
  1692. &size, &address, c->op_bytes);
  1693. if (rc)
  1694. goto done;
  1695. realmode_lgdt(ctxt->vcpu, size, address);
  1696. /* Disable writeback. */
  1697. c->dst.type = OP_NONE;
  1698. break;
  1699. case 3: /* lidt/vmmcall */
  1700. if (c->modrm_mod == 3 && c->modrm_rm == 1) {
  1701. rc = kvm_fix_hypercall(ctxt->vcpu);
  1702. if (rc)
  1703. goto done;
  1704. kvm_emulate_hypercall(ctxt->vcpu);
  1705. } else {
  1706. rc = read_descriptor(ctxt, ops, c->src.ptr,
  1707. &size, &address,
  1708. c->op_bytes);
  1709. if (rc)
  1710. goto done;
  1711. realmode_lidt(ctxt->vcpu, size, address);
  1712. }
  1713. /* Disable writeback. */
  1714. c->dst.type = OP_NONE;
  1715. break;
  1716. case 4: /* smsw */
  1717. c->dst.bytes = 2;
  1718. c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
  1719. break;
  1720. case 6: /* lmsw */
  1721. realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
  1722. &ctxt->eflags);
  1723. c->dst.type = OP_NONE;
  1724. break;
  1725. case 7: /* invlpg*/
  1726. emulate_invlpg(ctxt->vcpu, memop);
  1727. /* Disable writeback. */
  1728. c->dst.type = OP_NONE;
  1729. break;
  1730. default:
  1731. goto cannot_emulate;
  1732. }
  1733. break;
  1734. case 0x06:
  1735. emulate_clts(ctxt->vcpu);
  1736. c->dst.type = OP_NONE;
  1737. break;
  1738. case 0x08: /* invd */
  1739. case 0x09: /* wbinvd */
  1740. case 0x0d: /* GrpP (prefetch) */
  1741. case 0x18: /* Grp16 (prefetch/nop) */
  1742. c->dst.type = OP_NONE;
  1743. break;
  1744. case 0x20: /* mov cr, reg */
  1745. if (c->modrm_mod != 3)
  1746. goto cannot_emulate;
  1747. c->regs[c->modrm_rm] =
  1748. realmode_get_cr(ctxt->vcpu, c->modrm_reg);
  1749. c->dst.type = OP_NONE; /* no writeback */
  1750. break;
  1751. case 0x21: /* mov from dr to reg */
  1752. if (c->modrm_mod != 3)
  1753. goto cannot_emulate;
  1754. rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
  1755. if (rc)
  1756. goto cannot_emulate;
  1757. c->dst.type = OP_NONE; /* no writeback */
  1758. break;
  1759. case 0x22: /* mov reg, cr */
  1760. if (c->modrm_mod != 3)
  1761. goto cannot_emulate;
  1762. realmode_set_cr(ctxt->vcpu,
  1763. c->modrm_reg, c->modrm_val, &ctxt->eflags);
  1764. c->dst.type = OP_NONE;
  1765. break;
  1766. case 0x23: /* mov from reg to dr */
  1767. if (c->modrm_mod != 3)
  1768. goto cannot_emulate;
  1769. rc = emulator_set_dr(ctxt, c->modrm_reg,
  1770. c->regs[c->modrm_rm]);
  1771. if (rc)
  1772. goto cannot_emulate;
  1773. c->dst.type = OP_NONE; /* no writeback */
  1774. break;
  1775. case 0x30:
  1776. /* wrmsr */
  1777. msr_data = (u32)c->regs[VCPU_REGS_RAX]
  1778. | ((u64)c->regs[VCPU_REGS_RDX] << 32);
  1779. rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
  1780. if (rc) {
  1781. kvm_inject_gp(ctxt->vcpu, 0);
  1782. c->eip = ctxt->vcpu->arch.rip;
  1783. }
  1784. rc = X86EMUL_CONTINUE;
  1785. c->dst.type = OP_NONE;
  1786. break;
  1787. case 0x32:
  1788. /* rdmsr */
  1789. rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
  1790. if (rc) {
  1791. kvm_inject_gp(ctxt->vcpu, 0);
  1792. c->eip = ctxt->vcpu->arch.rip;
  1793. } else {
  1794. c->regs[VCPU_REGS_RAX] = (u32)msr_data;
  1795. c->regs[VCPU_REGS_RDX] = msr_data >> 32;
  1796. }
  1797. rc = X86EMUL_CONTINUE;
  1798. c->dst.type = OP_NONE;
  1799. break;
  1800. case 0x40 ... 0x4f: /* cmov */
  1801. c->dst.val = c->dst.orig_val = c->src.val;
  1802. if (!test_cc(c->b, ctxt->eflags))
  1803. c->dst.type = OP_NONE; /* no writeback */
  1804. break;
  1805. case 0x80 ... 0x8f: /* jnz rel, etc*/ {
  1806. long int rel;
  1807. switch (c->op_bytes) {
  1808. case 2:
  1809. rel = insn_fetch(s16, 2, c->eip);
  1810. break;
  1811. case 4:
  1812. rel = insn_fetch(s32, 4, c->eip);
  1813. break;
  1814. case 8:
  1815. rel = insn_fetch(s64, 8, c->eip);
  1816. break;
  1817. default:
  1818. DPRINTF("jnz: Invalid op_bytes\n");
  1819. goto cannot_emulate;
  1820. }
  1821. if (test_cc(c->b, ctxt->eflags))
  1822. jmp_rel(c, rel);
  1823. c->dst.type = OP_NONE;
  1824. break;
  1825. }
  1826. case 0xa3:
  1827. bt: /* bt */
  1828. c->dst.type = OP_NONE;
  1829. /* only subword offset */
  1830. c->src.val &= (c->dst.bytes << 3) - 1;
  1831. emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
  1832. break;
  1833. case 0xab:
  1834. bts: /* bts */
  1835. /* only subword offset */
  1836. c->src.val &= (c->dst.bytes << 3) - 1;
  1837. emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
  1838. break;
  1839. case 0xb0 ... 0xb1: /* cmpxchg */
  1840. /*
  1841. * Save real source value, then compare EAX against
  1842. * destination.
  1843. */
  1844. c->src.orig_val = c->src.val;
  1845. c->src.val = c->regs[VCPU_REGS_RAX];
  1846. emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
  1847. if (ctxt->eflags & EFLG_ZF) {
  1848. /* Success: write back to memory. */
  1849. c->dst.val = c->src.orig_val;
  1850. } else {
  1851. /* Failure: write the value we saw to EAX. */
  1852. c->dst.type = OP_REG;
  1853. c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
  1854. }
  1855. break;
  1856. case 0xb3:
  1857. btr: /* btr */
  1858. /* only subword offset */
  1859. c->src.val &= (c->dst.bytes << 3) - 1;
  1860. emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
  1861. break;
  1862. case 0xb6 ... 0xb7: /* movzx */
  1863. c->dst.bytes = c->op_bytes;
  1864. c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
  1865. : (u16) c->src.val;
  1866. break;
  1867. case 0xba: /* Grp8 */
  1868. switch (c->modrm_reg & 3) {
  1869. case 0:
  1870. goto bt;
  1871. case 1:
  1872. goto bts;
  1873. case 2:
  1874. goto btr;
  1875. case 3:
  1876. goto btc;
  1877. }
  1878. break;
  1879. case 0xbb:
  1880. btc: /* btc */
  1881. /* only subword offset */
  1882. c->src.val &= (c->dst.bytes << 3) - 1;
  1883. emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
  1884. break;
  1885. case 0xbe ... 0xbf: /* movsx */
  1886. c->dst.bytes = c->op_bytes;
  1887. c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
  1888. (s16) c->src.val;
  1889. break;
  1890. case 0xc3: /* movnti */
  1891. c->dst.bytes = c->op_bytes;
  1892. c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
  1893. (u64) c->src.val;
  1894. break;
  1895. case 0xc7: /* Grp9 (cmpxchg8b) */
  1896. rc = emulate_grp9(ctxt, ops, memop);
  1897. if (rc != 0)
  1898. goto done;
  1899. c->dst.type = OP_NONE;
  1900. break;
  1901. }
  1902. goto writeback;
  1903. cannot_emulate:
  1904. DPRINTF("Cannot emulate %02x\n", c->b);
  1905. c->eip = saved_eip;
  1906. return -1;
  1907. }