x86_emulate.c 56 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095
  1. /******************************************************************************
  2. * x86_emulate.c
  3. *
  4. * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
  5. *
  6. * Copyright (c) 2005 Keir Fraser
  7. *
  8. * Linux coding style, mod r/m decoder, segment base fixes, real-mode
  9. * privileged instructions:
  10. *
  11. * Copyright (C) 2006 Qumranet
  12. *
  13. * Avi Kivity <avi@qumranet.com>
  14. * Yaniv Kamay <yaniv@qumranet.com>
  15. *
  16. * This work is licensed under the terms of the GNU GPL, version 2. See
  17. * the COPYING file in the top-level directory.
  18. *
  19. * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
  20. */
  21. #ifndef __KERNEL__
  22. #include <stdio.h>
  23. #include <stdint.h>
  24. #include <public/xen.h>
  25. #define DPRINTF(_f, _a ...) printf(_f , ## _a)
  26. #else
  27. #include <linux/kvm_host.h>
  28. #include "kvm_cache_regs.h"
  29. #define DPRINTF(x...) do {} while (0)
  30. #endif
  31. #include <linux/module.h>
  32. #include <asm/kvm_x86_emulate.h>
  33. /*
  34. * Opcode effective-address decode tables.
  35. * Note that we only emulate instructions that have at least one memory
  36. * operand (excluding implicit stack references). We assume that stack
  37. * references and instruction fetches will never occur in special memory
  38. * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
  39. * not be handled.
  40. */
  41. /* Operand sizes: 8-bit operands or specified/overridden size. */
  42. #define ByteOp (1<<0) /* 8-bit operands. */
  43. /* Destination operand type. */
  44. #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
  45. #define DstReg (2<<1) /* Register operand. */
  46. #define DstMem (3<<1) /* Memory operand. */
  47. #define DstAcc (4<<1) /* Destination Accumulator */
  48. #define DstMask (7<<1)
  49. /* Source operand type. */
  50. #define SrcNone (0<<4) /* No source operand. */
  51. #define SrcImplicit (0<<4) /* Source operand is implicit in the opcode. */
  52. #define SrcReg (1<<4) /* Register operand. */
  53. #define SrcMem (2<<4) /* Memory operand. */
  54. #define SrcMem16 (3<<4) /* Memory operand (16-bit). */
  55. #define SrcMem32 (4<<4) /* Memory operand (32-bit). */
  56. #define SrcImm (5<<4) /* Immediate operand. */
  57. #define SrcImmByte (6<<4) /* 8-bit sign-extended immediate operand. */
  58. #define SrcMask (7<<4)
  59. /* Generic ModRM decode. */
  60. #define ModRM (1<<7)
  61. /* Destination is only written; never read. */
  62. #define Mov (1<<8)
  63. #define BitOp (1<<9)
  64. #define MemAbs (1<<10) /* Memory operand is absolute displacement */
  65. #define String (1<<12) /* String instruction (rep capable) */
  66. #define Stack (1<<13) /* Stack instruction (push/pop) */
  67. #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
  68. #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
  69. #define GroupMask 0xff /* Group number stored in bits 0:7 */
  70. enum {
  71. Group1_80, Group1_81, Group1_82, Group1_83,
  72. Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
  73. };
  74. static u16 opcode_table[256] = {
  75. /* 0x00 - 0x07 */
  76. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  77. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  78. 0, 0, 0, 0,
  79. /* 0x08 - 0x0F */
  80. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  81. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  82. 0, 0, 0, 0,
  83. /* 0x10 - 0x17 */
  84. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  85. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  86. 0, 0, 0, 0,
  87. /* 0x18 - 0x1F */
  88. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  89. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  90. 0, 0, 0, 0,
  91. /* 0x20 - 0x27 */
  92. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  93. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  94. SrcImmByte, SrcImm, 0, 0,
  95. /* 0x28 - 0x2F */
  96. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  97. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  98. 0, 0, 0, 0,
  99. /* 0x30 - 0x37 */
  100. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  101. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  102. 0, 0, 0, 0,
  103. /* 0x38 - 0x3F */
  104. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  105. ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  106. 0, 0, 0, 0,
  107. /* 0x40 - 0x47 */
  108. DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
  109. /* 0x48 - 0x4F */
  110. DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
  111. /* 0x50 - 0x57 */
  112. SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
  113. SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
  114. /* 0x58 - 0x5F */
  115. DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
  116. DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
  117. /* 0x60 - 0x67 */
  118. 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
  119. 0, 0, 0, 0,
  120. /* 0x68 - 0x6F */
  121. SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
  122. SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
  123. SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
  124. /* 0x70 - 0x77 */
  125. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  126. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  127. /* 0x78 - 0x7F */
  128. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  129. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  130. /* 0x80 - 0x87 */
  131. Group | Group1_80, Group | Group1_81,
  132. Group | Group1_82, Group | Group1_83,
  133. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  134. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  135. /* 0x88 - 0x8F */
  136. ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
  137. ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  138. DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
  139. DstReg | SrcMem | ModRM | Mov, Group | Group1A,
  140. /* 0x90 - 0x97 */
  141. DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
  142. /* 0x98 - 0x9F */
  143. 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
  144. /* 0xA0 - 0xA7 */
  145. ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
  146. ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
  147. ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
  148. ByteOp | ImplicitOps | String, ImplicitOps | String,
  149. /* 0xA8 - 0xAF */
  150. 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
  151. ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
  152. ByteOp | ImplicitOps | String, ImplicitOps | String,
  153. /* 0xB0 - 0xB7 */
  154. ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
  155. ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
  156. ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
  157. ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
  158. /* 0xB8 - 0xBF */
  159. DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
  160. DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
  161. DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
  162. DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
  163. /* 0xC0 - 0xC7 */
  164. ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
  165. 0, ImplicitOps | Stack, 0, 0,
  166. ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
  167. /* 0xC8 - 0xCF */
  168. 0, 0, 0, 0, 0, 0, 0, 0,
  169. /* 0xD0 - 0xD7 */
  170. ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
  171. ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
  172. 0, 0, 0, 0,
  173. /* 0xD8 - 0xDF */
  174. 0, 0, 0, 0, 0, 0, 0, 0,
  175. /* 0xE0 - 0xE7 */
  176. 0, 0, 0, 0,
  177. SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
  178. SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
  179. /* 0xE8 - 0xEF */
  180. ImplicitOps | Stack, SrcImm | ImplicitOps,
  181. ImplicitOps, SrcImmByte | ImplicitOps,
  182. SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
  183. SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
  184. /* 0xF0 - 0xF7 */
  185. 0, 0, 0, 0,
  186. ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
  187. /* 0xF8 - 0xFF */
  188. ImplicitOps, 0, ImplicitOps, ImplicitOps,
  189. ImplicitOps, ImplicitOps, Group | Group4, Group | Group5,
  190. };
  191. static u16 twobyte_table[256] = {
  192. /* 0x00 - 0x0F */
  193. 0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
  194. ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
  195. /* 0x10 - 0x1F */
  196. 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
  197. /* 0x20 - 0x2F */
  198. ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
  199. 0, 0, 0, 0, 0, 0, 0, 0,
  200. /* 0x30 - 0x3F */
  201. ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  202. /* 0x40 - 0x47 */
  203. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  204. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  205. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  206. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  207. /* 0x48 - 0x4F */
  208. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  209. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  210. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  211. DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
  212. /* 0x50 - 0x5F */
  213. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  214. /* 0x60 - 0x6F */
  215. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  216. /* 0x70 - 0x7F */
  217. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  218. /* 0x80 - 0x8F */
  219. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  220. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  221. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  222. ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
  223. /* 0x90 - 0x9F */
  224. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  225. /* 0xA0 - 0xA7 */
  226. 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
  227. /* 0xA8 - 0xAF */
  228. 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, ModRM, 0,
  229. /* 0xB0 - 0xB7 */
  230. ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
  231. DstMem | SrcReg | ModRM | BitOp,
  232. 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
  233. DstReg | SrcMem16 | ModRM | Mov,
  234. /* 0xB8 - 0xBF */
  235. 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
  236. 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
  237. DstReg | SrcMem16 | ModRM | Mov,
  238. /* 0xC0 - 0xCF */
  239. 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
  240. 0, 0, 0, 0, 0, 0, 0, 0,
  241. /* 0xD0 - 0xDF */
  242. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  243. /* 0xE0 - 0xEF */
  244. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  245. /* 0xF0 - 0xFF */
  246. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
  247. };
  248. static u16 group_table[] = {
  249. [Group1_80*8] =
  250. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  251. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  252. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  253. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  254. [Group1_81*8] =
  255. DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
  256. DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
  257. DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
  258. DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
  259. [Group1_82*8] =
  260. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  261. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  262. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  263. ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
  264. [Group1_83*8] =
  265. DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
  266. DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
  267. DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
  268. DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
  269. [Group1A*8] =
  270. DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
  271. [Group3_Byte*8] =
  272. ByteOp | SrcImm | DstMem | ModRM, 0,
  273. ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
  274. 0, 0, 0, 0,
  275. [Group3*8] =
  276. DstMem | SrcImm | ModRM, 0,
  277. DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
  278. 0, 0, 0, 0,
  279. [Group4*8] =
  280. ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
  281. 0, 0, 0, 0, 0, 0,
  282. [Group5*8] =
  283. DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
  284. SrcMem | ModRM | Stack, 0,
  285. SrcMem | ModRM | Stack, 0, SrcMem | ModRM | Stack, 0,
  286. [Group7*8] =
  287. 0, 0, ModRM | SrcMem, ModRM | SrcMem,
  288. SrcNone | ModRM | DstMem | Mov, 0,
  289. SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
  290. };
  291. static u16 group2_table[] = {
  292. [Group7*8] =
  293. SrcNone | ModRM, 0, 0, 0,
  294. SrcNone | ModRM | DstMem | Mov, 0,
  295. SrcMem16 | ModRM | Mov, 0,
  296. };
  297. /* EFLAGS bit definitions. */
  298. #define EFLG_OF (1<<11)
  299. #define EFLG_DF (1<<10)
  300. #define EFLG_SF (1<<7)
  301. #define EFLG_ZF (1<<6)
  302. #define EFLG_AF (1<<4)
  303. #define EFLG_PF (1<<2)
  304. #define EFLG_CF (1<<0)
  305. /*
  306. * Instruction emulation:
  307. * Most instructions are emulated directly via a fragment of inline assembly
  308. * code. This allows us to save/restore EFLAGS and thus very easily pick up
  309. * any modified flags.
  310. */
  311. #if defined(CONFIG_X86_64)
  312. #define _LO32 "k" /* force 32-bit operand */
  313. #define _STK "%%rsp" /* stack pointer */
  314. #elif defined(__i386__)
  315. #define _LO32 "" /* force 32-bit operand */
  316. #define _STK "%%esp" /* stack pointer */
  317. #endif
  318. /*
  319. * These EFLAGS bits are restored from saved value during emulation, and
  320. * any changes are written back to the saved value after emulation.
  321. */
  322. #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
  323. /* Before executing instruction: restore necessary bits in EFLAGS. */
  324. #define _PRE_EFLAGS(_sav, _msk, _tmp) \
  325. /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
  326. "movl %"_sav",%"_LO32 _tmp"; " \
  327. "push %"_tmp"; " \
  328. "push %"_tmp"; " \
  329. "movl %"_msk",%"_LO32 _tmp"; " \
  330. "andl %"_LO32 _tmp",("_STK"); " \
  331. "pushf; " \
  332. "notl %"_LO32 _tmp"; " \
  333. "andl %"_LO32 _tmp",("_STK"); " \
  334. "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
  335. "pop %"_tmp"; " \
  336. "orl %"_LO32 _tmp",("_STK"); " \
  337. "popf; " \
  338. "pop %"_sav"; "
  339. /* After executing instruction: write-back necessary bits in EFLAGS. */
  340. #define _POST_EFLAGS(_sav, _msk, _tmp) \
  341. /* _sav |= EFLAGS & _msk; */ \
  342. "pushf; " \
  343. "pop %"_tmp"; " \
  344. "andl %"_msk",%"_LO32 _tmp"; " \
  345. "orl %"_LO32 _tmp",%"_sav"; "
  346. /* Raw emulation: instruction has two explicit operands. */
  347. #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
  348. do { \
  349. unsigned long _tmp; \
  350. \
  351. switch ((_dst).bytes) { \
  352. case 2: \
  353. __asm__ __volatile__ ( \
  354. _PRE_EFLAGS("0", "4", "2") \
  355. _op"w %"_wx"3,%1; " \
  356. _POST_EFLAGS("0", "4", "2") \
  357. : "=m" (_eflags), "=m" ((_dst).val), \
  358. "=&r" (_tmp) \
  359. : _wy ((_src).val), "i" (EFLAGS_MASK)); \
  360. break; \
  361. case 4: \
  362. __asm__ __volatile__ ( \
  363. _PRE_EFLAGS("0", "4", "2") \
  364. _op"l %"_lx"3,%1; " \
  365. _POST_EFLAGS("0", "4", "2") \
  366. : "=m" (_eflags), "=m" ((_dst).val), \
  367. "=&r" (_tmp) \
  368. : _ly ((_src).val), "i" (EFLAGS_MASK)); \
  369. break; \
  370. case 8: \
  371. __emulate_2op_8byte(_op, _src, _dst, \
  372. _eflags, _qx, _qy); \
  373. break; \
  374. } \
  375. } while (0)
  376. #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
  377. do { \
  378. unsigned long __tmp; \
  379. switch ((_dst).bytes) { \
  380. case 1: \
  381. __asm__ __volatile__ ( \
  382. _PRE_EFLAGS("0", "4", "2") \
  383. _op"b %"_bx"3,%1; " \
  384. _POST_EFLAGS("0", "4", "2") \
  385. : "=m" (_eflags), "=m" ((_dst).val), \
  386. "=&r" (__tmp) \
  387. : _by ((_src).val), "i" (EFLAGS_MASK)); \
  388. break; \
  389. default: \
  390. __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
  391. _wx, _wy, _lx, _ly, _qx, _qy); \
  392. break; \
  393. } \
  394. } while (0)
  395. /* Source operand is byte-sized and may be restricted to just %cl. */
  396. #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
  397. __emulate_2op(_op, _src, _dst, _eflags, \
  398. "b", "c", "b", "c", "b", "c", "b", "c")
  399. /* Source operand is byte, word, long or quad sized. */
  400. #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
  401. __emulate_2op(_op, _src, _dst, _eflags, \
  402. "b", "q", "w", "r", _LO32, "r", "", "r")
  403. /* Source operand is word, long or quad sized. */
  404. #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
  405. __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
  406. "w", "r", _LO32, "r", "", "r")
  407. /* Instruction has only one explicit operand (no source operand). */
  408. #define emulate_1op(_op, _dst, _eflags) \
  409. do { \
  410. unsigned long _tmp; \
  411. \
  412. switch ((_dst).bytes) { \
  413. case 1: \
  414. __asm__ __volatile__ ( \
  415. _PRE_EFLAGS("0", "3", "2") \
  416. _op"b %1; " \
  417. _POST_EFLAGS("0", "3", "2") \
  418. : "=m" (_eflags), "=m" ((_dst).val), \
  419. "=&r" (_tmp) \
  420. : "i" (EFLAGS_MASK)); \
  421. break; \
  422. case 2: \
  423. __asm__ __volatile__ ( \
  424. _PRE_EFLAGS("0", "3", "2") \
  425. _op"w %1; " \
  426. _POST_EFLAGS("0", "3", "2") \
  427. : "=m" (_eflags), "=m" ((_dst).val), \
  428. "=&r" (_tmp) \
  429. : "i" (EFLAGS_MASK)); \
  430. break; \
  431. case 4: \
  432. __asm__ __volatile__ ( \
  433. _PRE_EFLAGS("0", "3", "2") \
  434. _op"l %1; " \
  435. _POST_EFLAGS("0", "3", "2") \
  436. : "=m" (_eflags), "=m" ((_dst).val), \
  437. "=&r" (_tmp) \
  438. : "i" (EFLAGS_MASK)); \
  439. break; \
  440. case 8: \
  441. __emulate_1op_8byte(_op, _dst, _eflags); \
  442. break; \
  443. } \
  444. } while (0)
  445. /* Emulate an instruction with quadword operands (x86/64 only). */
  446. #if defined(CONFIG_X86_64)
  447. #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
  448. do { \
  449. __asm__ __volatile__ ( \
  450. _PRE_EFLAGS("0", "4", "2") \
  451. _op"q %"_qx"3,%1; " \
  452. _POST_EFLAGS("0", "4", "2") \
  453. : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
  454. : _qy ((_src).val), "i" (EFLAGS_MASK)); \
  455. } while (0)
  456. #define __emulate_1op_8byte(_op, _dst, _eflags) \
  457. do { \
  458. __asm__ __volatile__ ( \
  459. _PRE_EFLAGS("0", "3", "2") \
  460. _op"q %1; " \
  461. _POST_EFLAGS("0", "3", "2") \
  462. : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
  463. : "i" (EFLAGS_MASK)); \
  464. } while (0)
  465. #elif defined(__i386__)
  466. #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
  467. #define __emulate_1op_8byte(_op, _dst, _eflags)
  468. #endif /* __i386__ */
  469. /* Fetch next part of the instruction being emulated. */
  470. #define insn_fetch(_type, _size, _eip) \
  471. ({ unsigned long _x; \
  472. rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
  473. if (rc != 0) \
  474. goto done; \
  475. (_eip) += (_size); \
  476. (_type)_x; \
  477. })
  478. static inline unsigned long ad_mask(struct decode_cache *c)
  479. {
  480. return (1UL << (c->ad_bytes << 3)) - 1;
  481. }
  482. /* Access/update address held in a register, based on addressing mode. */
  483. static inline unsigned long
  484. address_mask(struct decode_cache *c, unsigned long reg)
  485. {
  486. if (c->ad_bytes == sizeof(unsigned long))
  487. return reg;
  488. else
  489. return reg & ad_mask(c);
  490. }
  491. static inline unsigned long
  492. register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
  493. {
  494. return base + address_mask(c, reg);
  495. }
  496. static inline void
  497. register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
  498. {
  499. if (c->ad_bytes == sizeof(unsigned long))
  500. *reg += inc;
  501. else
  502. *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
  503. }
  504. static inline void jmp_rel(struct decode_cache *c, int rel)
  505. {
  506. register_address_increment(c, &c->eip, rel);
  507. }
  508. static void set_seg_override(struct decode_cache *c, int seg)
  509. {
  510. c->has_seg_override = true;
  511. c->seg_override = seg;
  512. }
  513. static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
  514. {
  515. if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
  516. return 0;
  517. return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
  518. }
  519. static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
  520. struct decode_cache *c)
  521. {
  522. if (!c->has_seg_override)
  523. return 0;
  524. return seg_base(ctxt, c->seg_override);
  525. }
  526. static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
  527. {
  528. return seg_base(ctxt, VCPU_SREG_ES);
  529. }
  530. static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
  531. {
  532. return seg_base(ctxt, VCPU_SREG_SS);
  533. }
  534. static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
  535. struct x86_emulate_ops *ops,
  536. unsigned long linear, u8 *dest)
  537. {
  538. struct fetch_cache *fc = &ctxt->decode.fetch;
  539. int rc;
  540. int size;
  541. if (linear < fc->start || linear >= fc->end) {
  542. size = min(15UL, PAGE_SIZE - offset_in_page(linear));
  543. rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
  544. if (rc)
  545. return rc;
  546. fc->start = linear;
  547. fc->end = linear + size;
  548. }
  549. *dest = fc->data[linear - fc->start];
  550. return 0;
  551. }
  552. static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
  553. struct x86_emulate_ops *ops,
  554. unsigned long eip, void *dest, unsigned size)
  555. {
  556. int rc = 0;
  557. eip += ctxt->cs_base;
  558. while (size--) {
  559. rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
  560. if (rc)
  561. return rc;
  562. }
  563. return 0;
  564. }
  565. /*
  566. * Given the 'reg' portion of a ModRM byte, and a register block, return a
  567. * pointer into the block that addresses the relevant register.
  568. * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
  569. */
  570. static void *decode_register(u8 modrm_reg, unsigned long *regs,
  571. int highbyte_regs)
  572. {
  573. void *p;
  574. p = &regs[modrm_reg];
  575. if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
  576. p = (unsigned char *)&regs[modrm_reg & 3] + 1;
  577. return p;
  578. }
  579. static int read_descriptor(struct x86_emulate_ctxt *ctxt,
  580. struct x86_emulate_ops *ops,
  581. void *ptr,
  582. u16 *size, unsigned long *address, int op_bytes)
  583. {
  584. int rc;
  585. if (op_bytes == 2)
  586. op_bytes = 3;
  587. *address = 0;
  588. rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
  589. ctxt->vcpu);
  590. if (rc)
  591. return rc;
  592. rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
  593. ctxt->vcpu);
  594. return rc;
  595. }
  596. static int test_cc(unsigned int condition, unsigned int flags)
  597. {
  598. int rc = 0;
  599. switch ((condition & 15) >> 1) {
  600. case 0: /* o */
  601. rc |= (flags & EFLG_OF);
  602. break;
  603. case 1: /* b/c/nae */
  604. rc |= (flags & EFLG_CF);
  605. break;
  606. case 2: /* z/e */
  607. rc |= (flags & EFLG_ZF);
  608. break;
  609. case 3: /* be/na */
  610. rc |= (flags & (EFLG_CF|EFLG_ZF));
  611. break;
  612. case 4: /* s */
  613. rc |= (flags & EFLG_SF);
  614. break;
  615. case 5: /* p/pe */
  616. rc |= (flags & EFLG_PF);
  617. break;
  618. case 7: /* le/ng */
  619. rc |= (flags & EFLG_ZF);
  620. /* fall through */
  621. case 6: /* l/nge */
  622. rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
  623. break;
  624. }
  625. /* Odd condition identifiers (lsb == 1) have inverted sense. */
  626. return (!!rc ^ (condition & 1));
  627. }
  628. static void decode_register_operand(struct operand *op,
  629. struct decode_cache *c,
  630. int inhibit_bytereg)
  631. {
  632. unsigned reg = c->modrm_reg;
  633. int highbyte_regs = c->rex_prefix == 0;
  634. if (!(c->d & ModRM))
  635. reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
  636. op->type = OP_REG;
  637. if ((c->d & ByteOp) && !inhibit_bytereg) {
  638. op->ptr = decode_register(reg, c->regs, highbyte_regs);
  639. op->val = *(u8 *)op->ptr;
  640. op->bytes = 1;
  641. } else {
  642. op->ptr = decode_register(reg, c->regs, 0);
  643. op->bytes = c->op_bytes;
  644. switch (op->bytes) {
  645. case 2:
  646. op->val = *(u16 *)op->ptr;
  647. break;
  648. case 4:
  649. op->val = *(u32 *)op->ptr;
  650. break;
  651. case 8:
  652. op->val = *(u64 *) op->ptr;
  653. break;
  654. }
  655. }
  656. op->orig_val = op->val;
  657. }
  658. static int decode_modrm(struct x86_emulate_ctxt *ctxt,
  659. struct x86_emulate_ops *ops)
  660. {
  661. struct decode_cache *c = &ctxt->decode;
  662. u8 sib;
  663. int index_reg = 0, base_reg = 0, scale;
  664. int rc = 0;
  665. if (c->rex_prefix) {
  666. c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
  667. index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
  668. c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
  669. }
  670. c->modrm = insn_fetch(u8, 1, c->eip);
  671. c->modrm_mod |= (c->modrm & 0xc0) >> 6;
  672. c->modrm_reg |= (c->modrm & 0x38) >> 3;
  673. c->modrm_rm |= (c->modrm & 0x07);
  674. c->modrm_ea = 0;
  675. c->use_modrm_ea = 1;
  676. if (c->modrm_mod == 3) {
  677. c->modrm_ptr = decode_register(c->modrm_rm,
  678. c->regs, c->d & ByteOp);
  679. c->modrm_val = *(unsigned long *)c->modrm_ptr;
  680. return rc;
  681. }
  682. if (c->ad_bytes == 2) {
  683. unsigned bx = c->regs[VCPU_REGS_RBX];
  684. unsigned bp = c->regs[VCPU_REGS_RBP];
  685. unsigned si = c->regs[VCPU_REGS_RSI];
  686. unsigned di = c->regs[VCPU_REGS_RDI];
  687. /* 16-bit ModR/M decode. */
  688. switch (c->modrm_mod) {
  689. case 0:
  690. if (c->modrm_rm == 6)
  691. c->modrm_ea += insn_fetch(u16, 2, c->eip);
  692. break;
  693. case 1:
  694. c->modrm_ea += insn_fetch(s8, 1, c->eip);
  695. break;
  696. case 2:
  697. c->modrm_ea += insn_fetch(u16, 2, c->eip);
  698. break;
  699. }
  700. switch (c->modrm_rm) {
  701. case 0:
  702. c->modrm_ea += bx + si;
  703. break;
  704. case 1:
  705. c->modrm_ea += bx + di;
  706. break;
  707. case 2:
  708. c->modrm_ea += bp + si;
  709. break;
  710. case 3:
  711. c->modrm_ea += bp + di;
  712. break;
  713. case 4:
  714. c->modrm_ea += si;
  715. break;
  716. case 5:
  717. c->modrm_ea += di;
  718. break;
  719. case 6:
  720. if (c->modrm_mod != 0)
  721. c->modrm_ea += bp;
  722. break;
  723. case 7:
  724. c->modrm_ea += bx;
  725. break;
  726. }
  727. if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
  728. (c->modrm_rm == 6 && c->modrm_mod != 0))
  729. if (!c->has_seg_override)
  730. set_seg_override(c, VCPU_SREG_SS);
  731. c->modrm_ea = (u16)c->modrm_ea;
  732. } else {
  733. /* 32/64-bit ModR/M decode. */
  734. if ((c->modrm_rm & 7) == 4) {
  735. sib = insn_fetch(u8, 1, c->eip);
  736. index_reg |= (sib >> 3) & 7;
  737. base_reg |= sib & 7;
  738. scale = sib >> 6;
  739. if ((base_reg & 7) == 5 && c->modrm_mod == 0)
  740. c->modrm_ea += insn_fetch(s32, 4, c->eip);
  741. else
  742. c->modrm_ea += c->regs[base_reg];
  743. if (index_reg != 4)
  744. c->modrm_ea += c->regs[index_reg] << scale;
  745. } else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
  746. if (ctxt->mode == X86EMUL_MODE_PROT64)
  747. c->rip_relative = 1;
  748. } else
  749. c->modrm_ea += c->regs[c->modrm_rm];
  750. switch (c->modrm_mod) {
  751. case 0:
  752. if (c->modrm_rm == 5)
  753. c->modrm_ea += insn_fetch(s32, 4, c->eip);
  754. break;
  755. case 1:
  756. c->modrm_ea += insn_fetch(s8, 1, c->eip);
  757. break;
  758. case 2:
  759. c->modrm_ea += insn_fetch(s32, 4, c->eip);
  760. break;
  761. }
  762. }
  763. done:
  764. return rc;
  765. }
  766. static int decode_abs(struct x86_emulate_ctxt *ctxt,
  767. struct x86_emulate_ops *ops)
  768. {
  769. struct decode_cache *c = &ctxt->decode;
  770. int rc = 0;
  771. switch (c->ad_bytes) {
  772. case 2:
  773. c->modrm_ea = insn_fetch(u16, 2, c->eip);
  774. break;
  775. case 4:
  776. c->modrm_ea = insn_fetch(u32, 4, c->eip);
  777. break;
  778. case 8:
  779. c->modrm_ea = insn_fetch(u64, 8, c->eip);
  780. break;
  781. }
  782. done:
  783. return rc;
  784. }
  785. int
  786. x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
  787. {
  788. struct decode_cache *c = &ctxt->decode;
  789. int rc = 0;
  790. int mode = ctxt->mode;
  791. int def_op_bytes, def_ad_bytes, group;
  792. /* Shadow copy of register state. Committed on successful emulation. */
  793. memset(c, 0, sizeof(struct decode_cache));
  794. c->eip = kvm_rip_read(ctxt->vcpu);
  795. ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
  796. memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
  797. switch (mode) {
  798. case X86EMUL_MODE_REAL:
  799. case X86EMUL_MODE_PROT16:
  800. def_op_bytes = def_ad_bytes = 2;
  801. break;
  802. case X86EMUL_MODE_PROT32:
  803. def_op_bytes = def_ad_bytes = 4;
  804. break;
  805. #ifdef CONFIG_X86_64
  806. case X86EMUL_MODE_PROT64:
  807. def_op_bytes = 4;
  808. def_ad_bytes = 8;
  809. break;
  810. #endif
  811. default:
  812. return -1;
  813. }
  814. c->op_bytes = def_op_bytes;
  815. c->ad_bytes = def_ad_bytes;
  816. /* Legacy prefixes. */
  817. for (;;) {
  818. switch (c->b = insn_fetch(u8, 1, c->eip)) {
  819. case 0x66: /* operand-size override */
  820. /* switch between 2/4 bytes */
  821. c->op_bytes = def_op_bytes ^ 6;
  822. break;
  823. case 0x67: /* address-size override */
  824. if (mode == X86EMUL_MODE_PROT64)
  825. /* switch between 4/8 bytes */
  826. c->ad_bytes = def_ad_bytes ^ 12;
  827. else
  828. /* switch between 2/4 bytes */
  829. c->ad_bytes = def_ad_bytes ^ 6;
  830. break;
  831. case 0x26: /* ES override */
  832. case 0x2e: /* CS override */
  833. case 0x36: /* SS override */
  834. case 0x3e: /* DS override */
  835. set_seg_override(c, (c->b >> 3) & 3);
  836. break;
  837. case 0x64: /* FS override */
  838. case 0x65: /* GS override */
  839. set_seg_override(c, c->b & 7);
  840. break;
  841. case 0x40 ... 0x4f: /* REX */
  842. if (mode != X86EMUL_MODE_PROT64)
  843. goto done_prefixes;
  844. c->rex_prefix = c->b;
  845. continue;
  846. case 0xf0: /* LOCK */
  847. c->lock_prefix = 1;
  848. break;
  849. case 0xf2: /* REPNE/REPNZ */
  850. c->rep_prefix = REPNE_PREFIX;
  851. break;
  852. case 0xf3: /* REP/REPE/REPZ */
  853. c->rep_prefix = REPE_PREFIX;
  854. break;
  855. default:
  856. goto done_prefixes;
  857. }
  858. /* Any legacy prefix after a REX prefix nullifies its effect. */
  859. c->rex_prefix = 0;
  860. }
  861. done_prefixes:
  862. /* REX prefix. */
  863. if (c->rex_prefix)
  864. if (c->rex_prefix & 8)
  865. c->op_bytes = 8; /* REX.W */
  866. /* Opcode byte(s). */
  867. c->d = opcode_table[c->b];
  868. if (c->d == 0) {
  869. /* Two-byte opcode? */
  870. if (c->b == 0x0f) {
  871. c->twobyte = 1;
  872. c->b = insn_fetch(u8, 1, c->eip);
  873. c->d = twobyte_table[c->b];
  874. }
  875. }
  876. if (c->d & Group) {
  877. group = c->d & GroupMask;
  878. c->modrm = insn_fetch(u8, 1, c->eip);
  879. --c->eip;
  880. group = (group << 3) + ((c->modrm >> 3) & 7);
  881. if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
  882. c->d = group2_table[group];
  883. else
  884. c->d = group_table[group];
  885. }
  886. /* Unrecognised? */
  887. if (c->d == 0) {
  888. DPRINTF("Cannot emulate %02x\n", c->b);
  889. return -1;
  890. }
  891. if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
  892. c->op_bytes = 8;
  893. /* ModRM and SIB bytes. */
  894. if (c->d & ModRM)
  895. rc = decode_modrm(ctxt, ops);
  896. else if (c->d & MemAbs)
  897. rc = decode_abs(ctxt, ops);
  898. if (rc)
  899. goto done;
  900. if (!c->has_seg_override)
  901. set_seg_override(c, VCPU_SREG_DS);
  902. if (!(!c->twobyte && c->b == 0x8d))
  903. c->modrm_ea += seg_override_base(ctxt, c);
  904. if (c->ad_bytes != 8)
  905. c->modrm_ea = (u32)c->modrm_ea;
  906. /*
  907. * Decode and fetch the source operand: register, memory
  908. * or immediate.
  909. */
  910. switch (c->d & SrcMask) {
  911. case SrcNone:
  912. break;
  913. case SrcReg:
  914. decode_register_operand(&c->src, c, 0);
  915. break;
  916. case SrcMem16:
  917. c->src.bytes = 2;
  918. goto srcmem_common;
  919. case SrcMem32:
  920. c->src.bytes = 4;
  921. goto srcmem_common;
  922. case SrcMem:
  923. c->src.bytes = (c->d & ByteOp) ? 1 :
  924. c->op_bytes;
  925. /* Don't fetch the address for invlpg: it could be unmapped. */
  926. if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
  927. break;
  928. srcmem_common:
  929. /*
  930. * For instructions with a ModR/M byte, switch to register
  931. * access if Mod = 3.
  932. */
  933. if ((c->d & ModRM) && c->modrm_mod == 3) {
  934. c->src.type = OP_REG;
  935. c->src.val = c->modrm_val;
  936. c->src.ptr = c->modrm_ptr;
  937. break;
  938. }
  939. c->src.type = OP_MEM;
  940. break;
  941. case SrcImm:
  942. c->src.type = OP_IMM;
  943. c->src.ptr = (unsigned long *)c->eip;
  944. c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  945. if (c->src.bytes == 8)
  946. c->src.bytes = 4;
  947. /* NB. Immediates are sign-extended as necessary. */
  948. switch (c->src.bytes) {
  949. case 1:
  950. c->src.val = insn_fetch(s8, 1, c->eip);
  951. break;
  952. case 2:
  953. c->src.val = insn_fetch(s16, 2, c->eip);
  954. break;
  955. case 4:
  956. c->src.val = insn_fetch(s32, 4, c->eip);
  957. break;
  958. }
  959. break;
  960. case SrcImmByte:
  961. c->src.type = OP_IMM;
  962. c->src.ptr = (unsigned long *)c->eip;
  963. c->src.bytes = 1;
  964. c->src.val = insn_fetch(s8, 1, c->eip);
  965. break;
  966. }
  967. /* Decode and fetch the destination operand: register or memory. */
  968. switch (c->d & DstMask) {
  969. case ImplicitOps:
  970. /* Special instructions do their own operand decoding. */
  971. return 0;
  972. case DstReg:
  973. decode_register_operand(&c->dst, c,
  974. c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
  975. break;
  976. case DstMem:
  977. if ((c->d & ModRM) && c->modrm_mod == 3) {
  978. c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  979. c->dst.type = OP_REG;
  980. c->dst.val = c->dst.orig_val = c->modrm_val;
  981. c->dst.ptr = c->modrm_ptr;
  982. break;
  983. }
  984. c->dst.type = OP_MEM;
  985. break;
  986. case DstAcc:
  987. c->dst.type = OP_REG;
  988. c->dst.bytes = c->op_bytes;
  989. c->dst.ptr = &c->regs[VCPU_REGS_RAX];
  990. switch (c->op_bytes) {
  991. case 1:
  992. c->dst.val = *(u8 *)c->dst.ptr;
  993. break;
  994. case 2:
  995. c->dst.val = *(u16 *)c->dst.ptr;
  996. break;
  997. case 4:
  998. c->dst.val = *(u32 *)c->dst.ptr;
  999. break;
  1000. }
  1001. c->dst.orig_val = c->dst.val;
  1002. break;
  1003. }
  1004. if (c->rip_relative)
  1005. c->modrm_ea += c->eip;
  1006. done:
  1007. return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
  1008. }
  1009. static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
  1010. {
  1011. struct decode_cache *c = &ctxt->decode;
  1012. c->dst.type = OP_MEM;
  1013. c->dst.bytes = c->op_bytes;
  1014. c->dst.val = c->src.val;
  1015. register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
  1016. c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
  1017. c->regs[VCPU_REGS_RSP]);
  1018. }
  1019. static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
  1020. struct x86_emulate_ops *ops)
  1021. {
  1022. struct decode_cache *c = &ctxt->decode;
  1023. int rc;
  1024. rc = ops->read_std(register_address(c, ss_base(ctxt),
  1025. c->regs[VCPU_REGS_RSP]),
  1026. &c->dst.val, c->dst.bytes, ctxt->vcpu);
  1027. if (rc != 0)
  1028. return rc;
  1029. register_address_increment(c, &c->regs[VCPU_REGS_RSP], c->dst.bytes);
  1030. return 0;
  1031. }
  1032. static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
  1033. {
  1034. struct decode_cache *c = &ctxt->decode;
  1035. switch (c->modrm_reg) {
  1036. case 0: /* rol */
  1037. emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
  1038. break;
  1039. case 1: /* ror */
  1040. emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
  1041. break;
  1042. case 2: /* rcl */
  1043. emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
  1044. break;
  1045. case 3: /* rcr */
  1046. emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
  1047. break;
  1048. case 4: /* sal/shl */
  1049. case 6: /* sal/shl */
  1050. emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
  1051. break;
  1052. case 5: /* shr */
  1053. emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
  1054. break;
  1055. case 7: /* sar */
  1056. emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
  1057. break;
  1058. }
  1059. }
  1060. static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
  1061. struct x86_emulate_ops *ops)
  1062. {
  1063. struct decode_cache *c = &ctxt->decode;
  1064. int rc = 0;
  1065. switch (c->modrm_reg) {
  1066. case 0 ... 1: /* test */
  1067. emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
  1068. break;
  1069. case 2: /* not */
  1070. c->dst.val = ~c->dst.val;
  1071. break;
  1072. case 3: /* neg */
  1073. emulate_1op("neg", c->dst, ctxt->eflags);
  1074. break;
  1075. default:
  1076. DPRINTF("Cannot emulate %02x\n", c->b);
  1077. rc = X86EMUL_UNHANDLEABLE;
  1078. break;
  1079. }
  1080. return rc;
  1081. }
  1082. static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
  1083. struct x86_emulate_ops *ops)
  1084. {
  1085. struct decode_cache *c = &ctxt->decode;
  1086. switch (c->modrm_reg) {
  1087. case 0: /* inc */
  1088. emulate_1op("inc", c->dst, ctxt->eflags);
  1089. break;
  1090. case 1: /* dec */
  1091. emulate_1op("dec", c->dst, ctxt->eflags);
  1092. break;
  1093. case 2: /* call near abs */ {
  1094. long int old_eip;
  1095. old_eip = c->eip;
  1096. c->eip = c->src.val;
  1097. c->src.val = old_eip;
  1098. emulate_push(ctxt);
  1099. break;
  1100. }
  1101. case 4: /* jmp abs */
  1102. c->eip = c->src.val;
  1103. break;
  1104. case 6: /* push */
  1105. emulate_push(ctxt);
  1106. break;
  1107. }
  1108. return 0;
  1109. }
  1110. static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
  1111. struct x86_emulate_ops *ops,
  1112. unsigned long memop)
  1113. {
  1114. struct decode_cache *c = &ctxt->decode;
  1115. u64 old, new;
  1116. int rc;
  1117. rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
  1118. if (rc != 0)
  1119. return rc;
  1120. if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
  1121. ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
  1122. c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
  1123. c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
  1124. ctxt->eflags &= ~EFLG_ZF;
  1125. } else {
  1126. new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
  1127. (u32) c->regs[VCPU_REGS_RBX];
  1128. rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
  1129. if (rc != 0)
  1130. return rc;
  1131. ctxt->eflags |= EFLG_ZF;
  1132. }
  1133. return 0;
  1134. }
  1135. static inline int writeback(struct x86_emulate_ctxt *ctxt,
  1136. struct x86_emulate_ops *ops)
  1137. {
  1138. int rc;
  1139. struct decode_cache *c = &ctxt->decode;
  1140. switch (c->dst.type) {
  1141. case OP_REG:
  1142. /* The 4-byte case *is* correct:
  1143. * in 64-bit mode we zero-extend.
  1144. */
  1145. switch (c->dst.bytes) {
  1146. case 1:
  1147. *(u8 *)c->dst.ptr = (u8)c->dst.val;
  1148. break;
  1149. case 2:
  1150. *(u16 *)c->dst.ptr = (u16)c->dst.val;
  1151. break;
  1152. case 4:
  1153. *c->dst.ptr = (u32)c->dst.val;
  1154. break; /* 64b: zero-ext */
  1155. case 8:
  1156. *c->dst.ptr = c->dst.val;
  1157. break;
  1158. }
  1159. break;
  1160. case OP_MEM:
  1161. if (c->lock_prefix)
  1162. rc = ops->cmpxchg_emulated(
  1163. (unsigned long)c->dst.ptr,
  1164. &c->dst.orig_val,
  1165. &c->dst.val,
  1166. c->dst.bytes,
  1167. ctxt->vcpu);
  1168. else
  1169. rc = ops->write_emulated(
  1170. (unsigned long)c->dst.ptr,
  1171. &c->dst.val,
  1172. c->dst.bytes,
  1173. ctxt->vcpu);
  1174. if (rc != 0)
  1175. return rc;
  1176. break;
  1177. case OP_NONE:
  1178. /* no writeback */
  1179. break;
  1180. default:
  1181. break;
  1182. }
  1183. return 0;
  1184. }
  1185. int
  1186. x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
  1187. {
  1188. unsigned long memop = 0;
  1189. u64 msr_data;
  1190. unsigned long saved_eip = 0;
  1191. struct decode_cache *c = &ctxt->decode;
  1192. unsigned int port;
  1193. int io_dir_in;
  1194. int rc = 0;
  1195. /* Shadow copy of register state. Committed on successful emulation.
  1196. * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
  1197. * modify them.
  1198. */
  1199. memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
  1200. saved_eip = c->eip;
  1201. if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
  1202. memop = c->modrm_ea;
  1203. if (c->rep_prefix && (c->d & String)) {
  1204. /* All REP prefixes have the same first termination condition */
  1205. if (c->regs[VCPU_REGS_RCX] == 0) {
  1206. kvm_rip_write(ctxt->vcpu, c->eip);
  1207. goto done;
  1208. }
  1209. /* The second termination condition only applies for REPE
  1210. * and REPNE. Test if the repeat string operation prefix is
  1211. * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
  1212. * corresponding termination condition according to:
  1213. * - if REPE/REPZ and ZF = 0 then done
  1214. * - if REPNE/REPNZ and ZF = 1 then done
  1215. */
  1216. if ((c->b == 0xa6) || (c->b == 0xa7) ||
  1217. (c->b == 0xae) || (c->b == 0xaf)) {
  1218. if ((c->rep_prefix == REPE_PREFIX) &&
  1219. ((ctxt->eflags & EFLG_ZF) == 0)) {
  1220. kvm_rip_write(ctxt->vcpu, c->eip);
  1221. goto done;
  1222. }
  1223. if ((c->rep_prefix == REPNE_PREFIX) &&
  1224. ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
  1225. kvm_rip_write(ctxt->vcpu, c->eip);
  1226. goto done;
  1227. }
  1228. }
  1229. c->regs[VCPU_REGS_RCX]--;
  1230. c->eip = kvm_rip_read(ctxt->vcpu);
  1231. }
  1232. if (c->src.type == OP_MEM) {
  1233. c->src.ptr = (unsigned long *)memop;
  1234. c->src.val = 0;
  1235. rc = ops->read_emulated((unsigned long)c->src.ptr,
  1236. &c->src.val,
  1237. c->src.bytes,
  1238. ctxt->vcpu);
  1239. if (rc != 0)
  1240. goto done;
  1241. c->src.orig_val = c->src.val;
  1242. }
  1243. if ((c->d & DstMask) == ImplicitOps)
  1244. goto special_insn;
  1245. if (c->dst.type == OP_MEM) {
  1246. c->dst.ptr = (unsigned long *)memop;
  1247. c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  1248. c->dst.val = 0;
  1249. if (c->d & BitOp) {
  1250. unsigned long mask = ~(c->dst.bytes * 8 - 1);
  1251. c->dst.ptr = (void *)c->dst.ptr +
  1252. (c->src.val & mask) / 8;
  1253. }
  1254. if (!(c->d & Mov) &&
  1255. /* optimisation - avoid slow emulated read */
  1256. ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
  1257. &c->dst.val,
  1258. c->dst.bytes, ctxt->vcpu)) != 0))
  1259. goto done;
  1260. }
  1261. c->dst.orig_val = c->dst.val;
  1262. special_insn:
  1263. if (c->twobyte)
  1264. goto twobyte_insn;
  1265. switch (c->b) {
  1266. case 0x00 ... 0x05:
  1267. add: /* add */
  1268. emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
  1269. break;
  1270. case 0x08 ... 0x0d:
  1271. or: /* or */
  1272. emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
  1273. break;
  1274. case 0x10 ... 0x15:
  1275. adc: /* adc */
  1276. emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
  1277. break;
  1278. case 0x18 ... 0x1d:
  1279. sbb: /* sbb */
  1280. emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
  1281. break;
  1282. case 0x20 ... 0x23:
  1283. and: /* and */
  1284. emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
  1285. break;
  1286. case 0x24: /* and al imm8 */
  1287. c->dst.type = OP_REG;
  1288. c->dst.ptr = &c->regs[VCPU_REGS_RAX];
  1289. c->dst.val = *(u8 *)c->dst.ptr;
  1290. c->dst.bytes = 1;
  1291. c->dst.orig_val = c->dst.val;
  1292. goto and;
  1293. case 0x25: /* and ax imm16, or eax imm32 */
  1294. c->dst.type = OP_REG;
  1295. c->dst.bytes = c->op_bytes;
  1296. c->dst.ptr = &c->regs[VCPU_REGS_RAX];
  1297. if (c->op_bytes == 2)
  1298. c->dst.val = *(u16 *)c->dst.ptr;
  1299. else
  1300. c->dst.val = *(u32 *)c->dst.ptr;
  1301. c->dst.orig_val = c->dst.val;
  1302. goto and;
  1303. case 0x28 ... 0x2d:
  1304. sub: /* sub */
  1305. emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
  1306. break;
  1307. case 0x30 ... 0x35:
  1308. xor: /* xor */
  1309. emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
  1310. break;
  1311. case 0x38 ... 0x3d:
  1312. cmp: /* cmp */
  1313. emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
  1314. break;
  1315. case 0x40 ... 0x47: /* inc r16/r32 */
  1316. emulate_1op("inc", c->dst, ctxt->eflags);
  1317. break;
  1318. case 0x48 ... 0x4f: /* dec r16/r32 */
  1319. emulate_1op("dec", c->dst, ctxt->eflags);
  1320. break;
  1321. case 0x50 ... 0x57: /* push reg */
  1322. c->dst.type = OP_MEM;
  1323. c->dst.bytes = c->op_bytes;
  1324. c->dst.val = c->src.val;
  1325. register_address_increment(c, &c->regs[VCPU_REGS_RSP],
  1326. -c->op_bytes);
  1327. c->dst.ptr = (void *) register_address(
  1328. c, ss_base(ctxt), c->regs[VCPU_REGS_RSP]);
  1329. break;
  1330. case 0x58 ... 0x5f: /* pop reg */
  1331. pop_instruction:
  1332. if ((rc = ops->read_std(register_address(c, ss_base(ctxt),
  1333. c->regs[VCPU_REGS_RSP]), c->dst.ptr,
  1334. c->op_bytes, ctxt->vcpu)) != 0)
  1335. goto done;
  1336. register_address_increment(c, &c->regs[VCPU_REGS_RSP],
  1337. c->op_bytes);
  1338. c->dst.type = OP_NONE; /* Disable writeback. */
  1339. break;
  1340. case 0x63: /* movsxd */
  1341. if (ctxt->mode != X86EMUL_MODE_PROT64)
  1342. goto cannot_emulate;
  1343. c->dst.val = (s32) c->src.val;
  1344. break;
  1345. case 0x68: /* push imm */
  1346. case 0x6a: /* push imm8 */
  1347. emulate_push(ctxt);
  1348. break;
  1349. case 0x6c: /* insb */
  1350. case 0x6d: /* insw/insd */
  1351. if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
  1352. 1,
  1353. (c->d & ByteOp) ? 1 : c->op_bytes,
  1354. c->rep_prefix ?
  1355. address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
  1356. (ctxt->eflags & EFLG_DF),
  1357. register_address(c, es_base(ctxt),
  1358. c->regs[VCPU_REGS_RDI]),
  1359. c->rep_prefix,
  1360. c->regs[VCPU_REGS_RDX]) == 0) {
  1361. c->eip = saved_eip;
  1362. return -1;
  1363. }
  1364. return 0;
  1365. case 0x6e: /* outsb */
  1366. case 0x6f: /* outsw/outsd */
  1367. if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
  1368. 0,
  1369. (c->d & ByteOp) ? 1 : c->op_bytes,
  1370. c->rep_prefix ?
  1371. address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
  1372. (ctxt->eflags & EFLG_DF),
  1373. register_address(c,
  1374. seg_override_base(ctxt, c),
  1375. c->regs[VCPU_REGS_RSI]),
  1376. c->rep_prefix,
  1377. c->regs[VCPU_REGS_RDX]) == 0) {
  1378. c->eip = saved_eip;
  1379. return -1;
  1380. }
  1381. return 0;
  1382. case 0x70 ... 0x7f: /* jcc (short) */ {
  1383. int rel = insn_fetch(s8, 1, c->eip);
  1384. if (test_cc(c->b, ctxt->eflags))
  1385. jmp_rel(c, rel);
  1386. break;
  1387. }
  1388. case 0x80 ... 0x83: /* Grp1 */
  1389. switch (c->modrm_reg) {
  1390. case 0:
  1391. goto add;
  1392. case 1:
  1393. goto or;
  1394. case 2:
  1395. goto adc;
  1396. case 3:
  1397. goto sbb;
  1398. case 4:
  1399. goto and;
  1400. case 5:
  1401. goto sub;
  1402. case 6:
  1403. goto xor;
  1404. case 7:
  1405. goto cmp;
  1406. }
  1407. break;
  1408. case 0x84 ... 0x85:
  1409. emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
  1410. break;
  1411. case 0x86 ... 0x87: /* xchg */
  1412. xchg:
  1413. /* Write back the register source. */
  1414. switch (c->dst.bytes) {
  1415. case 1:
  1416. *(u8 *) c->src.ptr = (u8) c->dst.val;
  1417. break;
  1418. case 2:
  1419. *(u16 *) c->src.ptr = (u16) c->dst.val;
  1420. break;
  1421. case 4:
  1422. *c->src.ptr = (u32) c->dst.val;
  1423. break; /* 64b reg: zero-extend */
  1424. case 8:
  1425. *c->src.ptr = c->dst.val;
  1426. break;
  1427. }
  1428. /*
  1429. * Write back the memory destination with implicit LOCK
  1430. * prefix.
  1431. */
  1432. c->dst.val = c->src.val;
  1433. c->lock_prefix = 1;
  1434. break;
  1435. case 0x88 ... 0x8b: /* mov */
  1436. goto mov;
  1437. case 0x8c: { /* mov r/m, sreg */
  1438. struct kvm_segment segreg;
  1439. if (c->modrm_reg <= 5)
  1440. kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
  1441. else {
  1442. printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
  1443. c->modrm);
  1444. goto cannot_emulate;
  1445. }
  1446. c->dst.val = segreg.selector;
  1447. break;
  1448. }
  1449. case 0x8d: /* lea r16/r32, m */
  1450. c->dst.val = c->modrm_ea;
  1451. break;
  1452. case 0x8e: { /* mov seg, r/m16 */
  1453. uint16_t sel;
  1454. int type_bits;
  1455. int err;
  1456. sel = c->src.val;
  1457. if (c->modrm_reg <= 5) {
  1458. type_bits = (c->modrm_reg == 1) ? 9 : 1;
  1459. err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
  1460. type_bits, c->modrm_reg);
  1461. } else {
  1462. printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
  1463. c->modrm);
  1464. goto cannot_emulate;
  1465. }
  1466. if (err < 0)
  1467. goto cannot_emulate;
  1468. c->dst.type = OP_NONE; /* Disable writeback. */
  1469. break;
  1470. }
  1471. case 0x8f: /* pop (sole member of Grp1a) */
  1472. rc = emulate_grp1a(ctxt, ops);
  1473. if (rc != 0)
  1474. goto done;
  1475. break;
  1476. case 0x90: /* nop / xchg r8,rax */
  1477. if (!(c->rex_prefix & 1)) { /* nop */
  1478. c->dst.type = OP_NONE;
  1479. break;
  1480. }
  1481. case 0x91 ... 0x97: /* xchg reg,rax */
  1482. c->src.type = c->dst.type = OP_REG;
  1483. c->src.bytes = c->dst.bytes = c->op_bytes;
  1484. c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
  1485. c->src.val = *(c->src.ptr);
  1486. goto xchg;
  1487. case 0x9c: /* pushf */
  1488. c->src.val = (unsigned long) ctxt->eflags;
  1489. emulate_push(ctxt);
  1490. break;
  1491. case 0x9d: /* popf */
  1492. c->dst.ptr = (unsigned long *) &ctxt->eflags;
  1493. goto pop_instruction;
  1494. case 0xa0 ... 0xa1: /* mov */
  1495. c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
  1496. c->dst.val = c->src.val;
  1497. break;
  1498. case 0xa2 ... 0xa3: /* mov */
  1499. c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
  1500. break;
  1501. case 0xa4 ... 0xa5: /* movs */
  1502. c->dst.type = OP_MEM;
  1503. c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  1504. c->dst.ptr = (unsigned long *)register_address(c,
  1505. es_base(ctxt),
  1506. c->regs[VCPU_REGS_RDI]);
  1507. if ((rc = ops->read_emulated(register_address(c,
  1508. seg_override_base(ctxt, c),
  1509. c->regs[VCPU_REGS_RSI]),
  1510. &c->dst.val,
  1511. c->dst.bytes, ctxt->vcpu)) != 0)
  1512. goto done;
  1513. register_address_increment(c, &c->regs[VCPU_REGS_RSI],
  1514. (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
  1515. : c->dst.bytes);
  1516. register_address_increment(c, &c->regs[VCPU_REGS_RDI],
  1517. (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
  1518. : c->dst.bytes);
  1519. break;
  1520. case 0xa6 ... 0xa7: /* cmps */
  1521. c->src.type = OP_NONE; /* Disable writeback. */
  1522. c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  1523. c->src.ptr = (unsigned long *)register_address(c,
  1524. seg_override_base(ctxt, c),
  1525. c->regs[VCPU_REGS_RSI]);
  1526. if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
  1527. &c->src.val,
  1528. c->src.bytes,
  1529. ctxt->vcpu)) != 0)
  1530. goto done;
  1531. c->dst.type = OP_NONE; /* Disable writeback. */
  1532. c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  1533. c->dst.ptr = (unsigned long *)register_address(c,
  1534. es_base(ctxt),
  1535. c->regs[VCPU_REGS_RDI]);
  1536. if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
  1537. &c->dst.val,
  1538. c->dst.bytes,
  1539. ctxt->vcpu)) != 0)
  1540. goto done;
  1541. DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
  1542. emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
  1543. register_address_increment(c, &c->regs[VCPU_REGS_RSI],
  1544. (ctxt->eflags & EFLG_DF) ? -c->src.bytes
  1545. : c->src.bytes);
  1546. register_address_increment(c, &c->regs[VCPU_REGS_RDI],
  1547. (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
  1548. : c->dst.bytes);
  1549. break;
  1550. case 0xaa ... 0xab: /* stos */
  1551. c->dst.type = OP_MEM;
  1552. c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  1553. c->dst.ptr = (unsigned long *)register_address(c,
  1554. es_base(ctxt),
  1555. c->regs[VCPU_REGS_RDI]);
  1556. c->dst.val = c->regs[VCPU_REGS_RAX];
  1557. register_address_increment(c, &c->regs[VCPU_REGS_RDI],
  1558. (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
  1559. : c->dst.bytes);
  1560. break;
  1561. case 0xac ... 0xad: /* lods */
  1562. c->dst.type = OP_REG;
  1563. c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
  1564. c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
  1565. if ((rc = ops->read_emulated(register_address(c,
  1566. seg_override_base(ctxt, c),
  1567. c->regs[VCPU_REGS_RSI]),
  1568. &c->dst.val,
  1569. c->dst.bytes,
  1570. ctxt->vcpu)) != 0)
  1571. goto done;
  1572. register_address_increment(c, &c->regs[VCPU_REGS_RSI],
  1573. (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
  1574. : c->dst.bytes);
  1575. break;
  1576. case 0xae ... 0xaf: /* scas */
  1577. DPRINTF("Urk! I don't handle SCAS.\n");
  1578. goto cannot_emulate;
  1579. case 0xb0 ... 0xbf: /* mov r, imm */
  1580. goto mov;
  1581. case 0xc0 ... 0xc1:
  1582. emulate_grp2(ctxt);
  1583. break;
  1584. case 0xc3: /* ret */
  1585. c->dst.ptr = &c->eip;
  1586. goto pop_instruction;
  1587. case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
  1588. mov:
  1589. c->dst.val = c->src.val;
  1590. break;
  1591. case 0xd0 ... 0xd1: /* Grp2 */
  1592. c->src.val = 1;
  1593. emulate_grp2(ctxt);
  1594. break;
  1595. case 0xd2 ... 0xd3: /* Grp2 */
  1596. c->src.val = c->regs[VCPU_REGS_RCX];
  1597. emulate_grp2(ctxt);
  1598. break;
  1599. case 0xe4: /* inb */
  1600. case 0xe5: /* in */
  1601. port = insn_fetch(u8, 1, c->eip);
  1602. io_dir_in = 1;
  1603. goto do_io;
  1604. case 0xe6: /* outb */
  1605. case 0xe7: /* out */
  1606. port = insn_fetch(u8, 1, c->eip);
  1607. io_dir_in = 0;
  1608. goto do_io;
  1609. case 0xe8: /* call (near) */ {
  1610. long int rel;
  1611. switch (c->op_bytes) {
  1612. case 2:
  1613. rel = insn_fetch(s16, 2, c->eip);
  1614. break;
  1615. case 4:
  1616. rel = insn_fetch(s32, 4, c->eip);
  1617. break;
  1618. default:
  1619. DPRINTF("Call: Invalid op_bytes\n");
  1620. goto cannot_emulate;
  1621. }
  1622. c->src.val = (unsigned long) c->eip;
  1623. jmp_rel(c, rel);
  1624. c->op_bytes = c->ad_bytes;
  1625. emulate_push(ctxt);
  1626. break;
  1627. }
  1628. case 0xe9: /* jmp rel */
  1629. goto jmp;
  1630. case 0xea: /* jmp far */ {
  1631. uint32_t eip;
  1632. uint16_t sel;
  1633. switch (c->op_bytes) {
  1634. case 2:
  1635. eip = insn_fetch(u16, 2, c->eip);
  1636. break;
  1637. case 4:
  1638. eip = insn_fetch(u32, 4, c->eip);
  1639. break;
  1640. default:
  1641. DPRINTF("jmp far: Invalid op_bytes\n");
  1642. goto cannot_emulate;
  1643. }
  1644. sel = insn_fetch(u16, 2, c->eip);
  1645. if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) {
  1646. DPRINTF("jmp far: Failed to load CS descriptor\n");
  1647. goto cannot_emulate;
  1648. }
  1649. c->eip = eip;
  1650. break;
  1651. }
  1652. case 0xeb:
  1653. jmp: /* jmp rel short */
  1654. jmp_rel(c, c->src.val);
  1655. c->dst.type = OP_NONE; /* Disable writeback. */
  1656. break;
  1657. case 0xec: /* in al,dx */
  1658. case 0xed: /* in (e/r)ax,dx */
  1659. port = c->regs[VCPU_REGS_RDX];
  1660. io_dir_in = 1;
  1661. goto do_io;
  1662. case 0xee: /* out al,dx */
  1663. case 0xef: /* out (e/r)ax,dx */
  1664. port = c->regs[VCPU_REGS_RDX];
  1665. io_dir_in = 0;
  1666. do_io: if (kvm_emulate_pio(ctxt->vcpu, NULL, io_dir_in,
  1667. (c->d & ByteOp) ? 1 : c->op_bytes,
  1668. port) != 0) {
  1669. c->eip = saved_eip;
  1670. goto cannot_emulate;
  1671. }
  1672. return 0;
  1673. case 0xf4: /* hlt */
  1674. ctxt->vcpu->arch.halt_request = 1;
  1675. break;
  1676. case 0xf5: /* cmc */
  1677. /* complement carry flag from eflags reg */
  1678. ctxt->eflags ^= EFLG_CF;
  1679. c->dst.type = OP_NONE; /* Disable writeback. */
  1680. break;
  1681. case 0xf6 ... 0xf7: /* Grp3 */
  1682. rc = emulate_grp3(ctxt, ops);
  1683. if (rc != 0)
  1684. goto done;
  1685. break;
  1686. case 0xf8: /* clc */
  1687. ctxt->eflags &= ~EFLG_CF;
  1688. c->dst.type = OP_NONE; /* Disable writeback. */
  1689. break;
  1690. case 0xfa: /* cli */
  1691. ctxt->eflags &= ~X86_EFLAGS_IF;
  1692. c->dst.type = OP_NONE; /* Disable writeback. */
  1693. break;
  1694. case 0xfb: /* sti */
  1695. ctxt->eflags |= X86_EFLAGS_IF;
  1696. c->dst.type = OP_NONE; /* Disable writeback. */
  1697. break;
  1698. case 0xfc: /* cld */
  1699. ctxt->eflags &= ~EFLG_DF;
  1700. c->dst.type = OP_NONE; /* Disable writeback. */
  1701. break;
  1702. case 0xfd: /* std */
  1703. ctxt->eflags |= EFLG_DF;
  1704. c->dst.type = OP_NONE; /* Disable writeback. */
  1705. break;
  1706. case 0xfe ... 0xff: /* Grp4/Grp5 */
  1707. rc = emulate_grp45(ctxt, ops);
  1708. if (rc != 0)
  1709. goto done;
  1710. break;
  1711. }
  1712. writeback:
  1713. rc = writeback(ctxt, ops);
  1714. if (rc != 0)
  1715. goto done;
  1716. /* Commit shadow register state. */
  1717. memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
  1718. kvm_rip_write(ctxt->vcpu, c->eip);
  1719. done:
  1720. if (rc == X86EMUL_UNHANDLEABLE) {
  1721. c->eip = saved_eip;
  1722. return -1;
  1723. }
  1724. return 0;
  1725. twobyte_insn:
  1726. switch (c->b) {
  1727. case 0x01: /* lgdt, lidt, lmsw */
  1728. switch (c->modrm_reg) {
  1729. u16 size;
  1730. unsigned long address;
  1731. case 0: /* vmcall */
  1732. if (c->modrm_mod != 3 || c->modrm_rm != 1)
  1733. goto cannot_emulate;
  1734. rc = kvm_fix_hypercall(ctxt->vcpu);
  1735. if (rc)
  1736. goto done;
  1737. /* Let the processor re-execute the fixed hypercall */
  1738. c->eip = kvm_rip_read(ctxt->vcpu);
  1739. /* Disable writeback. */
  1740. c->dst.type = OP_NONE;
  1741. break;
  1742. case 2: /* lgdt */
  1743. rc = read_descriptor(ctxt, ops, c->src.ptr,
  1744. &size, &address, c->op_bytes);
  1745. if (rc)
  1746. goto done;
  1747. realmode_lgdt(ctxt->vcpu, size, address);
  1748. /* Disable writeback. */
  1749. c->dst.type = OP_NONE;
  1750. break;
  1751. case 3: /* lidt/vmmcall */
  1752. if (c->modrm_mod == 3 && c->modrm_rm == 1) {
  1753. rc = kvm_fix_hypercall(ctxt->vcpu);
  1754. if (rc)
  1755. goto done;
  1756. kvm_emulate_hypercall(ctxt->vcpu);
  1757. } else {
  1758. rc = read_descriptor(ctxt, ops, c->src.ptr,
  1759. &size, &address,
  1760. c->op_bytes);
  1761. if (rc)
  1762. goto done;
  1763. realmode_lidt(ctxt->vcpu, size, address);
  1764. }
  1765. /* Disable writeback. */
  1766. c->dst.type = OP_NONE;
  1767. break;
  1768. case 4: /* smsw */
  1769. c->dst.bytes = 2;
  1770. c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
  1771. break;
  1772. case 6: /* lmsw */
  1773. realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
  1774. &ctxt->eflags);
  1775. c->dst.type = OP_NONE;
  1776. break;
  1777. case 7: /* invlpg*/
  1778. emulate_invlpg(ctxt->vcpu, memop);
  1779. /* Disable writeback. */
  1780. c->dst.type = OP_NONE;
  1781. break;
  1782. default:
  1783. goto cannot_emulate;
  1784. }
  1785. break;
  1786. case 0x06:
  1787. emulate_clts(ctxt->vcpu);
  1788. c->dst.type = OP_NONE;
  1789. break;
  1790. case 0x08: /* invd */
  1791. case 0x09: /* wbinvd */
  1792. case 0x0d: /* GrpP (prefetch) */
  1793. case 0x18: /* Grp16 (prefetch/nop) */
  1794. c->dst.type = OP_NONE;
  1795. break;
  1796. case 0x20: /* mov cr, reg */
  1797. if (c->modrm_mod != 3)
  1798. goto cannot_emulate;
  1799. c->regs[c->modrm_rm] =
  1800. realmode_get_cr(ctxt->vcpu, c->modrm_reg);
  1801. c->dst.type = OP_NONE; /* no writeback */
  1802. break;
  1803. case 0x21: /* mov from dr to reg */
  1804. if (c->modrm_mod != 3)
  1805. goto cannot_emulate;
  1806. rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
  1807. if (rc)
  1808. goto cannot_emulate;
  1809. c->dst.type = OP_NONE; /* no writeback */
  1810. break;
  1811. case 0x22: /* mov reg, cr */
  1812. if (c->modrm_mod != 3)
  1813. goto cannot_emulate;
  1814. realmode_set_cr(ctxt->vcpu,
  1815. c->modrm_reg, c->modrm_val, &ctxt->eflags);
  1816. c->dst.type = OP_NONE;
  1817. break;
  1818. case 0x23: /* mov from reg to dr */
  1819. if (c->modrm_mod != 3)
  1820. goto cannot_emulate;
  1821. rc = emulator_set_dr(ctxt, c->modrm_reg,
  1822. c->regs[c->modrm_rm]);
  1823. if (rc)
  1824. goto cannot_emulate;
  1825. c->dst.type = OP_NONE; /* no writeback */
  1826. break;
  1827. case 0x30:
  1828. /* wrmsr */
  1829. msr_data = (u32)c->regs[VCPU_REGS_RAX]
  1830. | ((u64)c->regs[VCPU_REGS_RDX] << 32);
  1831. rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
  1832. if (rc) {
  1833. kvm_inject_gp(ctxt->vcpu, 0);
  1834. c->eip = kvm_rip_read(ctxt->vcpu);
  1835. }
  1836. rc = X86EMUL_CONTINUE;
  1837. c->dst.type = OP_NONE;
  1838. break;
  1839. case 0x32:
  1840. /* rdmsr */
  1841. rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
  1842. if (rc) {
  1843. kvm_inject_gp(ctxt->vcpu, 0);
  1844. c->eip = kvm_rip_read(ctxt->vcpu);
  1845. } else {
  1846. c->regs[VCPU_REGS_RAX] = (u32)msr_data;
  1847. c->regs[VCPU_REGS_RDX] = msr_data >> 32;
  1848. }
  1849. rc = X86EMUL_CONTINUE;
  1850. c->dst.type = OP_NONE;
  1851. break;
  1852. case 0x40 ... 0x4f: /* cmov */
  1853. c->dst.val = c->dst.orig_val = c->src.val;
  1854. if (!test_cc(c->b, ctxt->eflags))
  1855. c->dst.type = OP_NONE; /* no writeback */
  1856. break;
  1857. case 0x80 ... 0x8f: /* jnz rel, etc*/ {
  1858. long int rel;
  1859. switch (c->op_bytes) {
  1860. case 2:
  1861. rel = insn_fetch(s16, 2, c->eip);
  1862. break;
  1863. case 4:
  1864. rel = insn_fetch(s32, 4, c->eip);
  1865. break;
  1866. case 8:
  1867. rel = insn_fetch(s64, 8, c->eip);
  1868. break;
  1869. default:
  1870. DPRINTF("jnz: Invalid op_bytes\n");
  1871. goto cannot_emulate;
  1872. }
  1873. if (test_cc(c->b, ctxt->eflags))
  1874. jmp_rel(c, rel);
  1875. c->dst.type = OP_NONE;
  1876. break;
  1877. }
  1878. case 0xa3:
  1879. bt: /* bt */
  1880. c->dst.type = OP_NONE;
  1881. /* only subword offset */
  1882. c->src.val &= (c->dst.bytes << 3) - 1;
  1883. emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
  1884. break;
  1885. case 0xab:
  1886. bts: /* bts */
  1887. /* only subword offset */
  1888. c->src.val &= (c->dst.bytes << 3) - 1;
  1889. emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
  1890. break;
  1891. case 0xae: /* clflush */
  1892. break;
  1893. case 0xb0 ... 0xb1: /* cmpxchg */
  1894. /*
  1895. * Save real source value, then compare EAX against
  1896. * destination.
  1897. */
  1898. c->src.orig_val = c->src.val;
  1899. c->src.val = c->regs[VCPU_REGS_RAX];
  1900. emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
  1901. if (ctxt->eflags & EFLG_ZF) {
  1902. /* Success: write back to memory. */
  1903. c->dst.val = c->src.orig_val;
  1904. } else {
  1905. /* Failure: write the value we saw to EAX. */
  1906. c->dst.type = OP_REG;
  1907. c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
  1908. }
  1909. break;
  1910. case 0xb3:
  1911. btr: /* btr */
  1912. /* only subword offset */
  1913. c->src.val &= (c->dst.bytes << 3) - 1;
  1914. emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
  1915. break;
  1916. case 0xb6 ... 0xb7: /* movzx */
  1917. c->dst.bytes = c->op_bytes;
  1918. c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
  1919. : (u16) c->src.val;
  1920. break;
  1921. case 0xba: /* Grp8 */
  1922. switch (c->modrm_reg & 3) {
  1923. case 0:
  1924. goto bt;
  1925. case 1:
  1926. goto bts;
  1927. case 2:
  1928. goto btr;
  1929. case 3:
  1930. goto btc;
  1931. }
  1932. break;
  1933. case 0xbb:
  1934. btc: /* btc */
  1935. /* only subword offset */
  1936. c->src.val &= (c->dst.bytes << 3) - 1;
  1937. emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
  1938. break;
  1939. case 0xbe ... 0xbf: /* movsx */
  1940. c->dst.bytes = c->op_bytes;
  1941. c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
  1942. (s16) c->src.val;
  1943. break;
  1944. case 0xc3: /* movnti */
  1945. c->dst.bytes = c->op_bytes;
  1946. c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
  1947. (u64) c->src.val;
  1948. break;
  1949. case 0xc7: /* Grp9 (cmpxchg8b) */
  1950. rc = emulate_grp9(ctxt, ops, memop);
  1951. if (rc != 0)
  1952. goto done;
  1953. c->dst.type = OP_NONE;
  1954. break;
  1955. }
  1956. goto writeback;
  1957. cannot_emulate:
  1958. DPRINTF("Cannot emulate %02x\n", c->b);
  1959. c->eip = saved_eip;
  1960. return -1;
  1961. }