opcode_tilegx.h 35 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405
  1. /* TILE-Gx opcode information.
  2. *
  3. * Copyright 2011 Tilera Corporation. All Rights Reserved.
  4. *
  5. * This program is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU General Public License
  7. * as published by the Free Software Foundation, version 2.
  8. *
  9. * This program is distributed in the hope that it will be useful, but
  10. * WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
  12. * NON INFRINGEMENT. See the GNU General Public License for
  13. * more details.
  14. *
  15. *
  16. *
  17. *
  18. *
  19. */
  20. #ifndef __ARCH_OPCODE_H__
  21. #define __ARCH_OPCODE_H__
  22. #ifndef __ASSEMBLER__
  23. typedef unsigned long long tilegx_bundle_bits;
  24. /* These are the bits that determine if a bundle is in the X encoding. */
  25. #define TILEGX_BUNDLE_MODE_MASK ((tilegx_bundle_bits)3 << 62)
  26. enum
  27. {
  28. /* Maximum number of instructions in a bundle (2 for X, 3 for Y). */
  29. TILEGX_MAX_INSTRUCTIONS_PER_BUNDLE = 3,
  30. /* How many different pipeline encodings are there? X0, X1, Y0, Y1, Y2. */
  31. TILEGX_NUM_PIPELINE_ENCODINGS = 5,
  32. /* Log base 2 of TILEGX_BUNDLE_SIZE_IN_BYTES. */
  33. TILEGX_LOG2_BUNDLE_SIZE_IN_BYTES = 3,
  34. /* Instructions take this many bytes. */
  35. TILEGX_BUNDLE_SIZE_IN_BYTES = 1 << TILEGX_LOG2_BUNDLE_SIZE_IN_BYTES,
  36. /* Log base 2 of TILEGX_BUNDLE_ALIGNMENT_IN_BYTES. */
  37. TILEGX_LOG2_BUNDLE_ALIGNMENT_IN_BYTES = 3,
  38. /* Bundles should be aligned modulo this number of bytes. */
  39. TILEGX_BUNDLE_ALIGNMENT_IN_BYTES =
  40. (1 << TILEGX_LOG2_BUNDLE_ALIGNMENT_IN_BYTES),
  41. /* Number of registers (some are magic, such as network I/O). */
  42. TILEGX_NUM_REGISTERS = 64,
  43. };
  44. /* Make a few "tile_" variables to simplify common code between
  45. architectures. */
  46. typedef tilegx_bundle_bits tile_bundle_bits;
  47. #define TILE_BUNDLE_SIZE_IN_BYTES TILEGX_BUNDLE_SIZE_IN_BYTES
  48. #define TILE_BUNDLE_ALIGNMENT_IN_BYTES TILEGX_BUNDLE_ALIGNMENT_IN_BYTES
  49. #define TILE_LOG2_BUNDLE_ALIGNMENT_IN_BYTES \
  50. TILEGX_LOG2_BUNDLE_ALIGNMENT_IN_BYTES
  51. /* 64-bit pattern for a { bpt ; nop } bundle. */
  52. #define TILEGX_BPT_BUNDLE 0x286a44ae51485000ULL
  53. static __inline unsigned int
  54. get_BFEnd_X0(tilegx_bundle_bits num)
  55. {
  56. const unsigned int n = (unsigned int)num;
  57. return (((n >> 12)) & 0x3f);
  58. }
  59. static __inline unsigned int
  60. get_BFOpcodeExtension_X0(tilegx_bundle_bits num)
  61. {
  62. const unsigned int n = (unsigned int)num;
  63. return (((n >> 24)) & 0xf);
  64. }
  65. static __inline unsigned int
  66. get_BFStart_X0(tilegx_bundle_bits num)
  67. {
  68. const unsigned int n = (unsigned int)num;
  69. return (((n >> 18)) & 0x3f);
  70. }
  71. static __inline unsigned int
  72. get_BrOff_X1(tilegx_bundle_bits n)
  73. {
  74. return (((unsigned int)(n >> 31)) & 0x0000003f) |
  75. (((unsigned int)(n >> 37)) & 0x0001ffc0);
  76. }
  77. static __inline unsigned int
  78. get_BrType_X1(tilegx_bundle_bits n)
  79. {
  80. return (((unsigned int)(n >> 54)) & 0x1f);
  81. }
  82. static __inline unsigned int
  83. get_Dest_Imm8_X1(tilegx_bundle_bits n)
  84. {
  85. return (((unsigned int)(n >> 31)) & 0x0000003f) |
  86. (((unsigned int)(n >> 43)) & 0x000000c0);
  87. }
  88. static __inline unsigned int
  89. get_Dest_X0(tilegx_bundle_bits num)
  90. {
  91. const unsigned int n = (unsigned int)num;
  92. return (((n >> 0)) & 0x3f);
  93. }
  94. static __inline unsigned int
  95. get_Dest_X1(tilegx_bundle_bits n)
  96. {
  97. return (((unsigned int)(n >> 31)) & 0x3f);
  98. }
  99. static __inline unsigned int
  100. get_Dest_Y0(tilegx_bundle_bits num)
  101. {
  102. const unsigned int n = (unsigned int)num;
  103. return (((n >> 0)) & 0x3f);
  104. }
  105. static __inline unsigned int
  106. get_Dest_Y1(tilegx_bundle_bits n)
  107. {
  108. return (((unsigned int)(n >> 31)) & 0x3f);
  109. }
  110. static __inline unsigned int
  111. get_Imm16_X0(tilegx_bundle_bits num)
  112. {
  113. const unsigned int n = (unsigned int)num;
  114. return (((n >> 12)) & 0xffff);
  115. }
  116. static __inline unsigned int
  117. get_Imm16_X1(tilegx_bundle_bits n)
  118. {
  119. return (((unsigned int)(n >> 43)) & 0xffff);
  120. }
  121. static __inline unsigned int
  122. get_Imm8OpcodeExtension_X0(tilegx_bundle_bits num)
  123. {
  124. const unsigned int n = (unsigned int)num;
  125. return (((n >> 20)) & 0xff);
  126. }
  127. static __inline unsigned int
  128. get_Imm8OpcodeExtension_X1(tilegx_bundle_bits n)
  129. {
  130. return (((unsigned int)(n >> 51)) & 0xff);
  131. }
  132. static __inline unsigned int
  133. get_Imm8_X0(tilegx_bundle_bits num)
  134. {
  135. const unsigned int n = (unsigned int)num;
  136. return (((n >> 12)) & 0xff);
  137. }
  138. static __inline unsigned int
  139. get_Imm8_X1(tilegx_bundle_bits n)
  140. {
  141. return (((unsigned int)(n >> 43)) & 0xff);
  142. }
  143. static __inline unsigned int
  144. get_Imm8_Y0(tilegx_bundle_bits num)
  145. {
  146. const unsigned int n = (unsigned int)num;
  147. return (((n >> 12)) & 0xff);
  148. }
  149. static __inline unsigned int
  150. get_Imm8_Y1(tilegx_bundle_bits n)
  151. {
  152. return (((unsigned int)(n >> 43)) & 0xff);
  153. }
  154. static __inline unsigned int
  155. get_JumpOff_X1(tilegx_bundle_bits n)
  156. {
  157. return (((unsigned int)(n >> 31)) & 0x7ffffff);
  158. }
  159. static __inline unsigned int
  160. get_JumpOpcodeExtension_X1(tilegx_bundle_bits n)
  161. {
  162. return (((unsigned int)(n >> 58)) & 0x1);
  163. }
  164. static __inline unsigned int
  165. get_MF_Imm14_X1(tilegx_bundle_bits n)
  166. {
  167. return (((unsigned int)(n >> 37)) & 0x3fff);
  168. }
  169. static __inline unsigned int
  170. get_MT_Imm14_X1(tilegx_bundle_bits n)
  171. {
  172. return (((unsigned int)(n >> 31)) & 0x0000003f) |
  173. (((unsigned int)(n >> 37)) & 0x00003fc0);
  174. }
  175. static __inline unsigned int
  176. get_Mode(tilegx_bundle_bits n)
  177. {
  178. return (((unsigned int)(n >> 62)) & 0x3);
  179. }
  180. static __inline unsigned int
  181. get_Opcode_X0(tilegx_bundle_bits num)
  182. {
  183. const unsigned int n = (unsigned int)num;
  184. return (((n >> 28)) & 0x7);
  185. }
  186. static __inline unsigned int
  187. get_Opcode_X1(tilegx_bundle_bits n)
  188. {
  189. return (((unsigned int)(n >> 59)) & 0x7);
  190. }
  191. static __inline unsigned int
  192. get_Opcode_Y0(tilegx_bundle_bits num)
  193. {
  194. const unsigned int n = (unsigned int)num;
  195. return (((n >> 27)) & 0xf);
  196. }
  197. static __inline unsigned int
  198. get_Opcode_Y1(tilegx_bundle_bits n)
  199. {
  200. return (((unsigned int)(n >> 58)) & 0xf);
  201. }
  202. static __inline unsigned int
  203. get_Opcode_Y2(tilegx_bundle_bits n)
  204. {
  205. return (((n >> 26)) & 0x00000001) |
  206. (((unsigned int)(n >> 56)) & 0x00000002);
  207. }
  208. static __inline unsigned int
  209. get_RRROpcodeExtension_X0(tilegx_bundle_bits num)
  210. {
  211. const unsigned int n = (unsigned int)num;
  212. return (((n >> 18)) & 0x3ff);
  213. }
  214. static __inline unsigned int
  215. get_RRROpcodeExtension_X1(tilegx_bundle_bits n)
  216. {
  217. return (((unsigned int)(n >> 49)) & 0x3ff);
  218. }
  219. static __inline unsigned int
  220. get_RRROpcodeExtension_Y0(tilegx_bundle_bits num)
  221. {
  222. const unsigned int n = (unsigned int)num;
  223. return (((n >> 18)) & 0x3);
  224. }
  225. static __inline unsigned int
  226. get_RRROpcodeExtension_Y1(tilegx_bundle_bits n)
  227. {
  228. return (((unsigned int)(n >> 49)) & 0x3);
  229. }
  230. static __inline unsigned int
  231. get_ShAmt_X0(tilegx_bundle_bits num)
  232. {
  233. const unsigned int n = (unsigned int)num;
  234. return (((n >> 12)) & 0x3f);
  235. }
  236. static __inline unsigned int
  237. get_ShAmt_X1(tilegx_bundle_bits n)
  238. {
  239. return (((unsigned int)(n >> 43)) & 0x3f);
  240. }
  241. static __inline unsigned int
  242. get_ShAmt_Y0(tilegx_bundle_bits num)
  243. {
  244. const unsigned int n = (unsigned int)num;
  245. return (((n >> 12)) & 0x3f);
  246. }
  247. static __inline unsigned int
  248. get_ShAmt_Y1(tilegx_bundle_bits n)
  249. {
  250. return (((unsigned int)(n >> 43)) & 0x3f);
  251. }
  252. static __inline unsigned int
  253. get_ShiftOpcodeExtension_X0(tilegx_bundle_bits num)
  254. {
  255. const unsigned int n = (unsigned int)num;
  256. return (((n >> 18)) & 0x3ff);
  257. }
  258. static __inline unsigned int
  259. get_ShiftOpcodeExtension_X1(tilegx_bundle_bits n)
  260. {
  261. return (((unsigned int)(n >> 49)) & 0x3ff);
  262. }
  263. static __inline unsigned int
  264. get_ShiftOpcodeExtension_Y0(tilegx_bundle_bits num)
  265. {
  266. const unsigned int n = (unsigned int)num;
  267. return (((n >> 18)) & 0x3);
  268. }
  269. static __inline unsigned int
  270. get_ShiftOpcodeExtension_Y1(tilegx_bundle_bits n)
  271. {
  272. return (((unsigned int)(n >> 49)) & 0x3);
  273. }
  274. static __inline unsigned int
  275. get_SrcA_X0(tilegx_bundle_bits num)
  276. {
  277. const unsigned int n = (unsigned int)num;
  278. return (((n >> 6)) & 0x3f);
  279. }
  280. static __inline unsigned int
  281. get_SrcA_X1(tilegx_bundle_bits n)
  282. {
  283. return (((unsigned int)(n >> 37)) & 0x3f);
  284. }
  285. static __inline unsigned int
  286. get_SrcA_Y0(tilegx_bundle_bits num)
  287. {
  288. const unsigned int n = (unsigned int)num;
  289. return (((n >> 6)) & 0x3f);
  290. }
  291. static __inline unsigned int
  292. get_SrcA_Y1(tilegx_bundle_bits n)
  293. {
  294. return (((unsigned int)(n >> 37)) & 0x3f);
  295. }
  296. static __inline unsigned int
  297. get_SrcA_Y2(tilegx_bundle_bits num)
  298. {
  299. const unsigned int n = (unsigned int)num;
  300. return (((n >> 20)) & 0x3f);
  301. }
  302. static __inline unsigned int
  303. get_SrcBDest_Y2(tilegx_bundle_bits n)
  304. {
  305. return (((unsigned int)(n >> 51)) & 0x3f);
  306. }
  307. static __inline unsigned int
  308. get_SrcB_X0(tilegx_bundle_bits num)
  309. {
  310. const unsigned int n = (unsigned int)num;
  311. return (((n >> 12)) & 0x3f);
  312. }
  313. static __inline unsigned int
  314. get_SrcB_X1(tilegx_bundle_bits n)
  315. {
  316. return (((unsigned int)(n >> 43)) & 0x3f);
  317. }
  318. static __inline unsigned int
  319. get_SrcB_Y0(tilegx_bundle_bits num)
  320. {
  321. const unsigned int n = (unsigned int)num;
  322. return (((n >> 12)) & 0x3f);
  323. }
  324. static __inline unsigned int
  325. get_SrcB_Y1(tilegx_bundle_bits n)
  326. {
  327. return (((unsigned int)(n >> 43)) & 0x3f);
  328. }
  329. static __inline unsigned int
  330. get_UnaryOpcodeExtension_X0(tilegx_bundle_bits num)
  331. {
  332. const unsigned int n = (unsigned int)num;
  333. return (((n >> 12)) & 0x3f);
  334. }
  335. static __inline unsigned int
  336. get_UnaryOpcodeExtension_X1(tilegx_bundle_bits n)
  337. {
  338. return (((unsigned int)(n >> 43)) & 0x3f);
  339. }
  340. static __inline unsigned int
  341. get_UnaryOpcodeExtension_Y0(tilegx_bundle_bits num)
  342. {
  343. const unsigned int n = (unsigned int)num;
  344. return (((n >> 12)) & 0x3f);
  345. }
  346. static __inline unsigned int
  347. get_UnaryOpcodeExtension_Y1(tilegx_bundle_bits n)
  348. {
  349. return (((unsigned int)(n >> 43)) & 0x3f);
  350. }
  351. static __inline int
  352. sign_extend(int n, int num_bits)
  353. {
  354. int shift = (int)(sizeof(int) * 8 - num_bits);
  355. return (n << shift) >> shift;
  356. }
  357. static __inline tilegx_bundle_bits
  358. create_BFEnd_X0(int num)
  359. {
  360. const unsigned int n = (unsigned int)num;
  361. return ((n & 0x3f) << 12);
  362. }
  363. static __inline tilegx_bundle_bits
  364. create_BFOpcodeExtension_X0(int num)
  365. {
  366. const unsigned int n = (unsigned int)num;
  367. return ((n & 0xf) << 24);
  368. }
  369. static __inline tilegx_bundle_bits
  370. create_BFStart_X0(int num)
  371. {
  372. const unsigned int n = (unsigned int)num;
  373. return ((n & 0x3f) << 18);
  374. }
  375. static __inline tilegx_bundle_bits
  376. create_BrOff_X1(int num)
  377. {
  378. const unsigned int n = (unsigned int)num;
  379. return (((tilegx_bundle_bits)(n & 0x0000003f)) << 31) |
  380. (((tilegx_bundle_bits)(n & 0x0001ffc0)) << 37);
  381. }
  382. static __inline tilegx_bundle_bits
  383. create_BrType_X1(int num)
  384. {
  385. const unsigned int n = (unsigned int)num;
  386. return (((tilegx_bundle_bits)(n & 0x1f)) << 54);
  387. }
  388. static __inline tilegx_bundle_bits
  389. create_Dest_Imm8_X1(int num)
  390. {
  391. const unsigned int n = (unsigned int)num;
  392. return (((tilegx_bundle_bits)(n & 0x0000003f)) << 31) |
  393. (((tilegx_bundle_bits)(n & 0x000000c0)) << 43);
  394. }
  395. static __inline tilegx_bundle_bits
  396. create_Dest_X0(int num)
  397. {
  398. const unsigned int n = (unsigned int)num;
  399. return ((n & 0x3f) << 0);
  400. }
  401. static __inline tilegx_bundle_bits
  402. create_Dest_X1(int num)
  403. {
  404. const unsigned int n = (unsigned int)num;
  405. return (((tilegx_bundle_bits)(n & 0x3f)) << 31);
  406. }
  407. static __inline tilegx_bundle_bits
  408. create_Dest_Y0(int num)
  409. {
  410. const unsigned int n = (unsigned int)num;
  411. return ((n & 0x3f) << 0);
  412. }
  413. static __inline tilegx_bundle_bits
  414. create_Dest_Y1(int num)
  415. {
  416. const unsigned int n = (unsigned int)num;
  417. return (((tilegx_bundle_bits)(n & 0x3f)) << 31);
  418. }
  419. static __inline tilegx_bundle_bits
  420. create_Imm16_X0(int num)
  421. {
  422. const unsigned int n = (unsigned int)num;
  423. return ((n & 0xffff) << 12);
  424. }
  425. static __inline tilegx_bundle_bits
  426. create_Imm16_X1(int num)
  427. {
  428. const unsigned int n = (unsigned int)num;
  429. return (((tilegx_bundle_bits)(n & 0xffff)) << 43);
  430. }
  431. static __inline tilegx_bundle_bits
  432. create_Imm8OpcodeExtension_X0(int num)
  433. {
  434. const unsigned int n = (unsigned int)num;
  435. return ((n & 0xff) << 20);
  436. }
  437. static __inline tilegx_bundle_bits
  438. create_Imm8OpcodeExtension_X1(int num)
  439. {
  440. const unsigned int n = (unsigned int)num;
  441. return (((tilegx_bundle_bits)(n & 0xff)) << 51);
  442. }
  443. static __inline tilegx_bundle_bits
  444. create_Imm8_X0(int num)
  445. {
  446. const unsigned int n = (unsigned int)num;
  447. return ((n & 0xff) << 12);
  448. }
  449. static __inline tilegx_bundle_bits
  450. create_Imm8_X1(int num)
  451. {
  452. const unsigned int n = (unsigned int)num;
  453. return (((tilegx_bundle_bits)(n & 0xff)) << 43);
  454. }
  455. static __inline tilegx_bundle_bits
  456. create_Imm8_Y0(int num)
  457. {
  458. const unsigned int n = (unsigned int)num;
  459. return ((n & 0xff) << 12);
  460. }
  461. static __inline tilegx_bundle_bits
  462. create_Imm8_Y1(int num)
  463. {
  464. const unsigned int n = (unsigned int)num;
  465. return (((tilegx_bundle_bits)(n & 0xff)) << 43);
  466. }
  467. static __inline tilegx_bundle_bits
  468. create_JumpOff_X1(int num)
  469. {
  470. const unsigned int n = (unsigned int)num;
  471. return (((tilegx_bundle_bits)(n & 0x7ffffff)) << 31);
  472. }
  473. static __inline tilegx_bundle_bits
  474. create_JumpOpcodeExtension_X1(int num)
  475. {
  476. const unsigned int n = (unsigned int)num;
  477. return (((tilegx_bundle_bits)(n & 0x1)) << 58);
  478. }
  479. static __inline tilegx_bundle_bits
  480. create_MF_Imm14_X1(int num)
  481. {
  482. const unsigned int n = (unsigned int)num;
  483. return (((tilegx_bundle_bits)(n & 0x3fff)) << 37);
  484. }
  485. static __inline tilegx_bundle_bits
  486. create_MT_Imm14_X1(int num)
  487. {
  488. const unsigned int n = (unsigned int)num;
  489. return (((tilegx_bundle_bits)(n & 0x0000003f)) << 31) |
  490. (((tilegx_bundle_bits)(n & 0x00003fc0)) << 37);
  491. }
  492. static __inline tilegx_bundle_bits
  493. create_Mode(int num)
  494. {
  495. const unsigned int n = (unsigned int)num;
  496. return (((tilegx_bundle_bits)(n & 0x3)) << 62);
  497. }
  498. static __inline tilegx_bundle_bits
  499. create_Opcode_X0(int num)
  500. {
  501. const unsigned int n = (unsigned int)num;
  502. return ((n & 0x7) << 28);
  503. }
  504. static __inline tilegx_bundle_bits
  505. create_Opcode_X1(int num)
  506. {
  507. const unsigned int n = (unsigned int)num;
  508. return (((tilegx_bundle_bits)(n & 0x7)) << 59);
  509. }
  510. static __inline tilegx_bundle_bits
  511. create_Opcode_Y0(int num)
  512. {
  513. const unsigned int n = (unsigned int)num;
  514. return ((n & 0xf) << 27);
  515. }
  516. static __inline tilegx_bundle_bits
  517. create_Opcode_Y1(int num)
  518. {
  519. const unsigned int n = (unsigned int)num;
  520. return (((tilegx_bundle_bits)(n & 0xf)) << 58);
  521. }
  522. static __inline tilegx_bundle_bits
  523. create_Opcode_Y2(int num)
  524. {
  525. const unsigned int n = (unsigned int)num;
  526. return ((n & 0x00000001) << 26) |
  527. (((tilegx_bundle_bits)(n & 0x00000002)) << 56);
  528. }
  529. static __inline tilegx_bundle_bits
  530. create_RRROpcodeExtension_X0(int num)
  531. {
  532. const unsigned int n = (unsigned int)num;
  533. return ((n & 0x3ff) << 18);
  534. }
  535. static __inline tilegx_bundle_bits
  536. create_RRROpcodeExtension_X1(int num)
  537. {
  538. const unsigned int n = (unsigned int)num;
  539. return (((tilegx_bundle_bits)(n & 0x3ff)) << 49);
  540. }
  541. static __inline tilegx_bundle_bits
  542. create_RRROpcodeExtension_Y0(int num)
  543. {
  544. const unsigned int n = (unsigned int)num;
  545. return ((n & 0x3) << 18);
  546. }
  547. static __inline tilegx_bundle_bits
  548. create_RRROpcodeExtension_Y1(int num)
  549. {
  550. const unsigned int n = (unsigned int)num;
  551. return (((tilegx_bundle_bits)(n & 0x3)) << 49);
  552. }
  553. static __inline tilegx_bundle_bits
  554. create_ShAmt_X0(int num)
  555. {
  556. const unsigned int n = (unsigned int)num;
  557. return ((n & 0x3f) << 12);
  558. }
  559. static __inline tilegx_bundle_bits
  560. create_ShAmt_X1(int num)
  561. {
  562. const unsigned int n = (unsigned int)num;
  563. return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
  564. }
  565. static __inline tilegx_bundle_bits
  566. create_ShAmt_Y0(int num)
  567. {
  568. const unsigned int n = (unsigned int)num;
  569. return ((n & 0x3f) << 12);
  570. }
  571. static __inline tilegx_bundle_bits
  572. create_ShAmt_Y1(int num)
  573. {
  574. const unsigned int n = (unsigned int)num;
  575. return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
  576. }
  577. static __inline tilegx_bundle_bits
  578. create_ShiftOpcodeExtension_X0(int num)
  579. {
  580. const unsigned int n = (unsigned int)num;
  581. return ((n & 0x3ff) << 18);
  582. }
  583. static __inline tilegx_bundle_bits
  584. create_ShiftOpcodeExtension_X1(int num)
  585. {
  586. const unsigned int n = (unsigned int)num;
  587. return (((tilegx_bundle_bits)(n & 0x3ff)) << 49);
  588. }
  589. static __inline tilegx_bundle_bits
  590. create_ShiftOpcodeExtension_Y0(int num)
  591. {
  592. const unsigned int n = (unsigned int)num;
  593. return ((n & 0x3) << 18);
  594. }
  595. static __inline tilegx_bundle_bits
  596. create_ShiftOpcodeExtension_Y1(int num)
  597. {
  598. const unsigned int n = (unsigned int)num;
  599. return (((tilegx_bundle_bits)(n & 0x3)) << 49);
  600. }
  601. static __inline tilegx_bundle_bits
  602. create_SrcA_X0(int num)
  603. {
  604. const unsigned int n = (unsigned int)num;
  605. return ((n & 0x3f) << 6);
  606. }
  607. static __inline tilegx_bundle_bits
  608. create_SrcA_X1(int num)
  609. {
  610. const unsigned int n = (unsigned int)num;
  611. return (((tilegx_bundle_bits)(n & 0x3f)) << 37);
  612. }
  613. static __inline tilegx_bundle_bits
  614. create_SrcA_Y0(int num)
  615. {
  616. const unsigned int n = (unsigned int)num;
  617. return ((n & 0x3f) << 6);
  618. }
  619. static __inline tilegx_bundle_bits
  620. create_SrcA_Y1(int num)
  621. {
  622. const unsigned int n = (unsigned int)num;
  623. return (((tilegx_bundle_bits)(n & 0x3f)) << 37);
  624. }
  625. static __inline tilegx_bundle_bits
  626. create_SrcA_Y2(int num)
  627. {
  628. const unsigned int n = (unsigned int)num;
  629. return ((n & 0x3f) << 20);
  630. }
  631. static __inline tilegx_bundle_bits
  632. create_SrcBDest_Y2(int num)
  633. {
  634. const unsigned int n = (unsigned int)num;
  635. return (((tilegx_bundle_bits)(n & 0x3f)) << 51);
  636. }
  637. static __inline tilegx_bundle_bits
  638. create_SrcB_X0(int num)
  639. {
  640. const unsigned int n = (unsigned int)num;
  641. return ((n & 0x3f) << 12);
  642. }
  643. static __inline tilegx_bundle_bits
  644. create_SrcB_X1(int num)
  645. {
  646. const unsigned int n = (unsigned int)num;
  647. return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
  648. }
  649. static __inline tilegx_bundle_bits
  650. create_SrcB_Y0(int num)
  651. {
  652. const unsigned int n = (unsigned int)num;
  653. return ((n & 0x3f) << 12);
  654. }
  655. static __inline tilegx_bundle_bits
  656. create_SrcB_Y1(int num)
  657. {
  658. const unsigned int n = (unsigned int)num;
  659. return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
  660. }
  661. static __inline tilegx_bundle_bits
  662. create_UnaryOpcodeExtension_X0(int num)
  663. {
  664. const unsigned int n = (unsigned int)num;
  665. return ((n & 0x3f) << 12);
  666. }
  667. static __inline tilegx_bundle_bits
  668. create_UnaryOpcodeExtension_X1(int num)
  669. {
  670. const unsigned int n = (unsigned int)num;
  671. return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
  672. }
  673. static __inline tilegx_bundle_bits
  674. create_UnaryOpcodeExtension_Y0(int num)
  675. {
  676. const unsigned int n = (unsigned int)num;
  677. return ((n & 0x3f) << 12);
  678. }
  679. static __inline tilegx_bundle_bits
  680. create_UnaryOpcodeExtension_Y1(int num)
  681. {
  682. const unsigned int n = (unsigned int)num;
  683. return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
  684. }
  685. enum
  686. {
  687. ADDI_IMM8_OPCODE_X0 = 1,
  688. ADDI_IMM8_OPCODE_X1 = 1,
  689. ADDI_OPCODE_Y0 = 0,
  690. ADDI_OPCODE_Y1 = 1,
  691. ADDLI_OPCODE_X0 = 1,
  692. ADDLI_OPCODE_X1 = 0,
  693. ADDXI_IMM8_OPCODE_X0 = 2,
  694. ADDXI_IMM8_OPCODE_X1 = 2,
  695. ADDXI_OPCODE_Y0 = 1,
  696. ADDXI_OPCODE_Y1 = 2,
  697. ADDXLI_OPCODE_X0 = 2,
  698. ADDXLI_OPCODE_X1 = 1,
  699. ADDXSC_RRR_0_OPCODE_X0 = 1,
  700. ADDXSC_RRR_0_OPCODE_X1 = 1,
  701. ADDX_RRR_0_OPCODE_X0 = 2,
  702. ADDX_RRR_0_OPCODE_X1 = 2,
  703. ADDX_RRR_0_OPCODE_Y0 = 0,
  704. ADDX_SPECIAL_0_OPCODE_Y1 = 0,
  705. ADD_RRR_0_OPCODE_X0 = 3,
  706. ADD_RRR_0_OPCODE_X1 = 3,
  707. ADD_RRR_0_OPCODE_Y0 = 1,
  708. ADD_SPECIAL_0_OPCODE_Y1 = 1,
  709. ANDI_IMM8_OPCODE_X0 = 3,
  710. ANDI_IMM8_OPCODE_X1 = 3,
  711. ANDI_OPCODE_Y0 = 2,
  712. ANDI_OPCODE_Y1 = 3,
  713. AND_RRR_0_OPCODE_X0 = 4,
  714. AND_RRR_0_OPCODE_X1 = 4,
  715. AND_RRR_5_OPCODE_Y0 = 0,
  716. AND_RRR_5_OPCODE_Y1 = 0,
  717. BEQZT_BRANCH_OPCODE_X1 = 16,
  718. BEQZ_BRANCH_OPCODE_X1 = 17,
  719. BFEXTS_BF_OPCODE_X0 = 4,
  720. BFEXTU_BF_OPCODE_X0 = 5,
  721. BFINS_BF_OPCODE_X0 = 6,
  722. BF_OPCODE_X0 = 3,
  723. BGEZT_BRANCH_OPCODE_X1 = 18,
  724. BGEZ_BRANCH_OPCODE_X1 = 19,
  725. BGTZT_BRANCH_OPCODE_X1 = 20,
  726. BGTZ_BRANCH_OPCODE_X1 = 21,
  727. BLBCT_BRANCH_OPCODE_X1 = 22,
  728. BLBC_BRANCH_OPCODE_X1 = 23,
  729. BLBST_BRANCH_OPCODE_X1 = 24,
  730. BLBS_BRANCH_OPCODE_X1 = 25,
  731. BLEZT_BRANCH_OPCODE_X1 = 26,
  732. BLEZ_BRANCH_OPCODE_X1 = 27,
  733. BLTZT_BRANCH_OPCODE_X1 = 28,
  734. BLTZ_BRANCH_OPCODE_X1 = 29,
  735. BNEZT_BRANCH_OPCODE_X1 = 30,
  736. BNEZ_BRANCH_OPCODE_X1 = 31,
  737. BRANCH_OPCODE_X1 = 2,
  738. CMOVEQZ_RRR_0_OPCODE_X0 = 5,
  739. CMOVEQZ_RRR_4_OPCODE_Y0 = 0,
  740. CMOVNEZ_RRR_0_OPCODE_X0 = 6,
  741. CMOVNEZ_RRR_4_OPCODE_Y0 = 1,
  742. CMPEQI_IMM8_OPCODE_X0 = 4,
  743. CMPEQI_IMM8_OPCODE_X1 = 4,
  744. CMPEQI_OPCODE_Y0 = 3,
  745. CMPEQI_OPCODE_Y1 = 4,
  746. CMPEQ_RRR_0_OPCODE_X0 = 7,
  747. CMPEQ_RRR_0_OPCODE_X1 = 5,
  748. CMPEQ_RRR_3_OPCODE_Y0 = 0,
  749. CMPEQ_RRR_3_OPCODE_Y1 = 2,
  750. CMPEXCH4_RRR_0_OPCODE_X1 = 6,
  751. CMPEXCH_RRR_0_OPCODE_X1 = 7,
  752. CMPLES_RRR_0_OPCODE_X0 = 8,
  753. CMPLES_RRR_0_OPCODE_X1 = 8,
  754. CMPLES_RRR_2_OPCODE_Y0 = 0,
  755. CMPLES_RRR_2_OPCODE_Y1 = 0,
  756. CMPLEU_RRR_0_OPCODE_X0 = 9,
  757. CMPLEU_RRR_0_OPCODE_X1 = 9,
  758. CMPLEU_RRR_2_OPCODE_Y0 = 1,
  759. CMPLEU_RRR_2_OPCODE_Y1 = 1,
  760. CMPLTSI_IMM8_OPCODE_X0 = 5,
  761. CMPLTSI_IMM8_OPCODE_X1 = 5,
  762. CMPLTSI_OPCODE_Y0 = 4,
  763. CMPLTSI_OPCODE_Y1 = 5,
  764. CMPLTS_RRR_0_OPCODE_X0 = 10,
  765. CMPLTS_RRR_0_OPCODE_X1 = 10,
  766. CMPLTS_RRR_2_OPCODE_Y0 = 2,
  767. CMPLTS_RRR_2_OPCODE_Y1 = 2,
  768. CMPLTUI_IMM8_OPCODE_X0 = 6,
  769. CMPLTUI_IMM8_OPCODE_X1 = 6,
  770. CMPLTU_RRR_0_OPCODE_X0 = 11,
  771. CMPLTU_RRR_0_OPCODE_X1 = 11,
  772. CMPLTU_RRR_2_OPCODE_Y0 = 3,
  773. CMPLTU_RRR_2_OPCODE_Y1 = 3,
  774. CMPNE_RRR_0_OPCODE_X0 = 12,
  775. CMPNE_RRR_0_OPCODE_X1 = 12,
  776. CMPNE_RRR_3_OPCODE_Y0 = 1,
  777. CMPNE_RRR_3_OPCODE_Y1 = 3,
  778. CMULAF_RRR_0_OPCODE_X0 = 13,
  779. CMULA_RRR_0_OPCODE_X0 = 14,
  780. CMULFR_RRR_0_OPCODE_X0 = 15,
  781. CMULF_RRR_0_OPCODE_X0 = 16,
  782. CMULHR_RRR_0_OPCODE_X0 = 17,
  783. CMULH_RRR_0_OPCODE_X0 = 18,
  784. CMUL_RRR_0_OPCODE_X0 = 19,
  785. CNTLZ_UNARY_OPCODE_X0 = 1,
  786. CNTLZ_UNARY_OPCODE_Y0 = 1,
  787. CNTTZ_UNARY_OPCODE_X0 = 2,
  788. CNTTZ_UNARY_OPCODE_Y0 = 2,
  789. CRC32_32_RRR_0_OPCODE_X0 = 20,
  790. CRC32_8_RRR_0_OPCODE_X0 = 21,
  791. DBLALIGN2_RRR_0_OPCODE_X0 = 22,
  792. DBLALIGN2_RRR_0_OPCODE_X1 = 13,
  793. DBLALIGN4_RRR_0_OPCODE_X0 = 23,
  794. DBLALIGN4_RRR_0_OPCODE_X1 = 14,
  795. DBLALIGN6_RRR_0_OPCODE_X0 = 24,
  796. DBLALIGN6_RRR_0_OPCODE_X1 = 15,
  797. DBLALIGN_RRR_0_OPCODE_X0 = 25,
  798. DRAIN_UNARY_OPCODE_X1 = 1,
  799. DTLBPR_UNARY_OPCODE_X1 = 2,
  800. EXCH4_RRR_0_OPCODE_X1 = 16,
  801. EXCH_RRR_0_OPCODE_X1 = 17,
  802. FDOUBLE_ADDSUB_RRR_0_OPCODE_X0 = 26,
  803. FDOUBLE_ADD_FLAGS_RRR_0_OPCODE_X0 = 27,
  804. FDOUBLE_MUL_FLAGS_RRR_0_OPCODE_X0 = 28,
  805. FDOUBLE_PACK1_RRR_0_OPCODE_X0 = 29,
  806. FDOUBLE_PACK2_RRR_0_OPCODE_X0 = 30,
  807. FDOUBLE_SUB_FLAGS_RRR_0_OPCODE_X0 = 31,
  808. FDOUBLE_UNPACK_MAX_RRR_0_OPCODE_X0 = 32,
  809. FDOUBLE_UNPACK_MIN_RRR_0_OPCODE_X0 = 33,
  810. FETCHADD4_RRR_0_OPCODE_X1 = 18,
  811. FETCHADDGEZ4_RRR_0_OPCODE_X1 = 19,
  812. FETCHADDGEZ_RRR_0_OPCODE_X1 = 20,
  813. FETCHADD_RRR_0_OPCODE_X1 = 21,
  814. FETCHAND4_RRR_0_OPCODE_X1 = 22,
  815. FETCHAND_RRR_0_OPCODE_X1 = 23,
  816. FETCHOR4_RRR_0_OPCODE_X1 = 24,
  817. FETCHOR_RRR_0_OPCODE_X1 = 25,
  818. FINV_UNARY_OPCODE_X1 = 3,
  819. FLUSHWB_UNARY_OPCODE_X1 = 4,
  820. FLUSH_UNARY_OPCODE_X1 = 5,
  821. FNOP_UNARY_OPCODE_X0 = 3,
  822. FNOP_UNARY_OPCODE_X1 = 6,
  823. FNOP_UNARY_OPCODE_Y0 = 3,
  824. FNOP_UNARY_OPCODE_Y1 = 8,
  825. FSINGLE_ADD1_RRR_0_OPCODE_X0 = 34,
  826. FSINGLE_ADDSUB2_RRR_0_OPCODE_X0 = 35,
  827. FSINGLE_MUL1_RRR_0_OPCODE_X0 = 36,
  828. FSINGLE_MUL2_RRR_0_OPCODE_X0 = 37,
  829. FSINGLE_PACK1_UNARY_OPCODE_X0 = 4,
  830. FSINGLE_PACK1_UNARY_OPCODE_Y0 = 4,
  831. FSINGLE_PACK2_RRR_0_OPCODE_X0 = 38,
  832. FSINGLE_SUB1_RRR_0_OPCODE_X0 = 39,
  833. ICOH_UNARY_OPCODE_X1 = 7,
  834. ILL_UNARY_OPCODE_X1 = 8,
  835. ILL_UNARY_OPCODE_Y1 = 9,
  836. IMM8_OPCODE_X0 = 4,
  837. IMM8_OPCODE_X1 = 3,
  838. INV_UNARY_OPCODE_X1 = 9,
  839. IRET_UNARY_OPCODE_X1 = 10,
  840. JALRP_UNARY_OPCODE_X1 = 11,
  841. JALRP_UNARY_OPCODE_Y1 = 10,
  842. JALR_UNARY_OPCODE_X1 = 12,
  843. JALR_UNARY_OPCODE_Y1 = 11,
  844. JAL_JUMP_OPCODE_X1 = 0,
  845. JRP_UNARY_OPCODE_X1 = 13,
  846. JRP_UNARY_OPCODE_Y1 = 12,
  847. JR_UNARY_OPCODE_X1 = 14,
  848. JR_UNARY_OPCODE_Y1 = 13,
  849. JUMP_OPCODE_X1 = 4,
  850. J_JUMP_OPCODE_X1 = 1,
  851. LD1S_ADD_IMM8_OPCODE_X1 = 7,
  852. LD1S_OPCODE_Y2 = 0,
  853. LD1S_UNARY_OPCODE_X1 = 15,
  854. LD1U_ADD_IMM8_OPCODE_X1 = 8,
  855. LD1U_OPCODE_Y2 = 1,
  856. LD1U_UNARY_OPCODE_X1 = 16,
  857. LD2S_ADD_IMM8_OPCODE_X1 = 9,
  858. LD2S_OPCODE_Y2 = 2,
  859. LD2S_UNARY_OPCODE_X1 = 17,
  860. LD2U_ADD_IMM8_OPCODE_X1 = 10,
  861. LD2U_OPCODE_Y2 = 3,
  862. LD2U_UNARY_OPCODE_X1 = 18,
  863. LD4S_ADD_IMM8_OPCODE_X1 = 11,
  864. LD4S_OPCODE_Y2 = 1,
  865. LD4S_UNARY_OPCODE_X1 = 19,
  866. LD4U_ADD_IMM8_OPCODE_X1 = 12,
  867. LD4U_OPCODE_Y2 = 2,
  868. LD4U_UNARY_OPCODE_X1 = 20,
  869. LDNA_UNARY_OPCODE_X1 = 21,
  870. LDNT1S_ADD_IMM8_OPCODE_X1 = 13,
  871. LDNT1S_UNARY_OPCODE_X1 = 22,
  872. LDNT1U_ADD_IMM8_OPCODE_X1 = 14,
  873. LDNT1U_UNARY_OPCODE_X1 = 23,
  874. LDNT2S_ADD_IMM8_OPCODE_X1 = 15,
  875. LDNT2S_UNARY_OPCODE_X1 = 24,
  876. LDNT2U_ADD_IMM8_OPCODE_X1 = 16,
  877. LDNT2U_UNARY_OPCODE_X1 = 25,
  878. LDNT4S_ADD_IMM8_OPCODE_X1 = 17,
  879. LDNT4S_UNARY_OPCODE_X1 = 26,
  880. LDNT4U_ADD_IMM8_OPCODE_X1 = 18,
  881. LDNT4U_UNARY_OPCODE_X1 = 27,
  882. LDNT_ADD_IMM8_OPCODE_X1 = 19,
  883. LDNT_UNARY_OPCODE_X1 = 28,
  884. LD_ADD_IMM8_OPCODE_X1 = 20,
  885. LD_OPCODE_Y2 = 3,
  886. LD_UNARY_OPCODE_X1 = 29,
  887. LNK_UNARY_OPCODE_X1 = 30,
  888. LNK_UNARY_OPCODE_Y1 = 14,
  889. LWNA_ADD_IMM8_OPCODE_X1 = 21,
  890. MFSPR_IMM8_OPCODE_X1 = 22,
  891. MF_UNARY_OPCODE_X1 = 31,
  892. MM_BF_OPCODE_X0 = 7,
  893. MNZ_RRR_0_OPCODE_X0 = 40,
  894. MNZ_RRR_0_OPCODE_X1 = 26,
  895. MNZ_RRR_4_OPCODE_Y0 = 2,
  896. MNZ_RRR_4_OPCODE_Y1 = 2,
  897. MODE_OPCODE_YA2 = 1,
  898. MODE_OPCODE_YB2 = 2,
  899. MODE_OPCODE_YC2 = 3,
  900. MTSPR_IMM8_OPCODE_X1 = 23,
  901. MULAX_RRR_0_OPCODE_X0 = 41,
  902. MULAX_RRR_3_OPCODE_Y0 = 2,
  903. MULA_HS_HS_RRR_0_OPCODE_X0 = 42,
  904. MULA_HS_HS_RRR_9_OPCODE_Y0 = 0,
  905. MULA_HS_HU_RRR_0_OPCODE_X0 = 43,
  906. MULA_HS_LS_RRR_0_OPCODE_X0 = 44,
  907. MULA_HS_LU_RRR_0_OPCODE_X0 = 45,
  908. MULA_HU_HU_RRR_0_OPCODE_X0 = 46,
  909. MULA_HU_HU_RRR_9_OPCODE_Y0 = 1,
  910. MULA_HU_LS_RRR_0_OPCODE_X0 = 47,
  911. MULA_HU_LU_RRR_0_OPCODE_X0 = 48,
  912. MULA_LS_LS_RRR_0_OPCODE_X0 = 49,
  913. MULA_LS_LS_RRR_9_OPCODE_Y0 = 2,
  914. MULA_LS_LU_RRR_0_OPCODE_X0 = 50,
  915. MULA_LU_LU_RRR_0_OPCODE_X0 = 51,
  916. MULA_LU_LU_RRR_9_OPCODE_Y0 = 3,
  917. MULX_RRR_0_OPCODE_X0 = 52,
  918. MULX_RRR_3_OPCODE_Y0 = 3,
  919. MUL_HS_HS_RRR_0_OPCODE_X0 = 53,
  920. MUL_HS_HS_RRR_8_OPCODE_Y0 = 0,
  921. MUL_HS_HU_RRR_0_OPCODE_X0 = 54,
  922. MUL_HS_LS_RRR_0_OPCODE_X0 = 55,
  923. MUL_HS_LU_RRR_0_OPCODE_X0 = 56,
  924. MUL_HU_HU_RRR_0_OPCODE_X0 = 57,
  925. MUL_HU_HU_RRR_8_OPCODE_Y0 = 1,
  926. MUL_HU_LS_RRR_0_OPCODE_X0 = 58,
  927. MUL_HU_LU_RRR_0_OPCODE_X0 = 59,
  928. MUL_LS_LS_RRR_0_OPCODE_X0 = 60,
  929. MUL_LS_LS_RRR_8_OPCODE_Y0 = 2,
  930. MUL_LS_LU_RRR_0_OPCODE_X0 = 61,
  931. MUL_LU_LU_RRR_0_OPCODE_X0 = 62,
  932. MUL_LU_LU_RRR_8_OPCODE_Y0 = 3,
  933. MZ_RRR_0_OPCODE_X0 = 63,
  934. MZ_RRR_0_OPCODE_X1 = 27,
  935. MZ_RRR_4_OPCODE_Y0 = 3,
  936. MZ_RRR_4_OPCODE_Y1 = 3,
  937. NAP_UNARY_OPCODE_X1 = 32,
  938. NOP_UNARY_OPCODE_X0 = 5,
  939. NOP_UNARY_OPCODE_X1 = 33,
  940. NOP_UNARY_OPCODE_Y0 = 5,
  941. NOP_UNARY_OPCODE_Y1 = 15,
  942. NOR_RRR_0_OPCODE_X0 = 64,
  943. NOR_RRR_0_OPCODE_X1 = 28,
  944. NOR_RRR_5_OPCODE_Y0 = 1,
  945. NOR_RRR_5_OPCODE_Y1 = 1,
  946. ORI_IMM8_OPCODE_X0 = 7,
  947. ORI_IMM8_OPCODE_X1 = 24,
  948. OR_RRR_0_OPCODE_X0 = 65,
  949. OR_RRR_0_OPCODE_X1 = 29,
  950. OR_RRR_5_OPCODE_Y0 = 2,
  951. OR_RRR_5_OPCODE_Y1 = 2,
  952. PCNT_UNARY_OPCODE_X0 = 6,
  953. PCNT_UNARY_OPCODE_Y0 = 6,
  954. REVBITS_UNARY_OPCODE_X0 = 7,
  955. REVBITS_UNARY_OPCODE_Y0 = 7,
  956. REVBYTES_UNARY_OPCODE_X0 = 8,
  957. REVBYTES_UNARY_OPCODE_Y0 = 8,
  958. ROTLI_SHIFT_OPCODE_X0 = 1,
  959. ROTLI_SHIFT_OPCODE_X1 = 1,
  960. ROTLI_SHIFT_OPCODE_Y0 = 0,
  961. ROTLI_SHIFT_OPCODE_Y1 = 0,
  962. ROTL_RRR_0_OPCODE_X0 = 66,
  963. ROTL_RRR_0_OPCODE_X1 = 30,
  964. ROTL_RRR_6_OPCODE_Y0 = 0,
  965. ROTL_RRR_6_OPCODE_Y1 = 0,
  966. RRR_0_OPCODE_X0 = 5,
  967. RRR_0_OPCODE_X1 = 5,
  968. RRR_0_OPCODE_Y0 = 5,
  969. RRR_0_OPCODE_Y1 = 6,
  970. RRR_1_OPCODE_Y0 = 6,
  971. RRR_1_OPCODE_Y1 = 7,
  972. RRR_2_OPCODE_Y0 = 7,
  973. RRR_2_OPCODE_Y1 = 8,
  974. RRR_3_OPCODE_Y0 = 8,
  975. RRR_3_OPCODE_Y1 = 9,
  976. RRR_4_OPCODE_Y0 = 9,
  977. RRR_4_OPCODE_Y1 = 10,
  978. RRR_5_OPCODE_Y0 = 10,
  979. RRR_5_OPCODE_Y1 = 11,
  980. RRR_6_OPCODE_Y0 = 11,
  981. RRR_6_OPCODE_Y1 = 12,
  982. RRR_7_OPCODE_Y0 = 12,
  983. RRR_7_OPCODE_Y1 = 13,
  984. RRR_8_OPCODE_Y0 = 13,
  985. RRR_9_OPCODE_Y0 = 14,
  986. SHIFT_OPCODE_X0 = 6,
  987. SHIFT_OPCODE_X1 = 6,
  988. SHIFT_OPCODE_Y0 = 15,
  989. SHIFT_OPCODE_Y1 = 14,
  990. SHL16INSLI_OPCODE_X0 = 7,
  991. SHL16INSLI_OPCODE_X1 = 7,
  992. SHL1ADDX_RRR_0_OPCODE_X0 = 67,
  993. SHL1ADDX_RRR_0_OPCODE_X1 = 31,
  994. SHL1ADDX_RRR_7_OPCODE_Y0 = 1,
  995. SHL1ADDX_RRR_7_OPCODE_Y1 = 1,
  996. SHL1ADD_RRR_0_OPCODE_X0 = 68,
  997. SHL1ADD_RRR_0_OPCODE_X1 = 32,
  998. SHL1ADD_RRR_1_OPCODE_Y0 = 0,
  999. SHL1ADD_RRR_1_OPCODE_Y1 = 0,
  1000. SHL2ADDX_RRR_0_OPCODE_X0 = 69,
  1001. SHL2ADDX_RRR_0_OPCODE_X1 = 33,
  1002. SHL2ADDX_RRR_7_OPCODE_Y0 = 2,
  1003. SHL2ADDX_RRR_7_OPCODE_Y1 = 2,
  1004. SHL2ADD_RRR_0_OPCODE_X0 = 70,
  1005. SHL2ADD_RRR_0_OPCODE_X1 = 34,
  1006. SHL2ADD_RRR_1_OPCODE_Y0 = 1,
  1007. SHL2ADD_RRR_1_OPCODE_Y1 = 1,
  1008. SHL3ADDX_RRR_0_OPCODE_X0 = 71,
  1009. SHL3ADDX_RRR_0_OPCODE_X1 = 35,
  1010. SHL3ADDX_RRR_7_OPCODE_Y0 = 3,
  1011. SHL3ADDX_RRR_7_OPCODE_Y1 = 3,
  1012. SHL3ADD_RRR_0_OPCODE_X0 = 72,
  1013. SHL3ADD_RRR_0_OPCODE_X1 = 36,
  1014. SHL3ADD_RRR_1_OPCODE_Y0 = 2,
  1015. SHL3ADD_RRR_1_OPCODE_Y1 = 2,
  1016. SHLI_SHIFT_OPCODE_X0 = 2,
  1017. SHLI_SHIFT_OPCODE_X1 = 2,
  1018. SHLI_SHIFT_OPCODE_Y0 = 1,
  1019. SHLI_SHIFT_OPCODE_Y1 = 1,
  1020. SHLXI_SHIFT_OPCODE_X0 = 3,
  1021. SHLXI_SHIFT_OPCODE_X1 = 3,
  1022. SHLX_RRR_0_OPCODE_X0 = 73,
  1023. SHLX_RRR_0_OPCODE_X1 = 37,
  1024. SHL_RRR_0_OPCODE_X0 = 74,
  1025. SHL_RRR_0_OPCODE_X1 = 38,
  1026. SHL_RRR_6_OPCODE_Y0 = 1,
  1027. SHL_RRR_6_OPCODE_Y1 = 1,
  1028. SHRSI_SHIFT_OPCODE_X0 = 4,
  1029. SHRSI_SHIFT_OPCODE_X1 = 4,
  1030. SHRSI_SHIFT_OPCODE_Y0 = 2,
  1031. SHRSI_SHIFT_OPCODE_Y1 = 2,
  1032. SHRS_RRR_0_OPCODE_X0 = 75,
  1033. SHRS_RRR_0_OPCODE_X1 = 39,
  1034. SHRS_RRR_6_OPCODE_Y0 = 2,
  1035. SHRS_RRR_6_OPCODE_Y1 = 2,
  1036. SHRUI_SHIFT_OPCODE_X0 = 5,
  1037. SHRUI_SHIFT_OPCODE_X1 = 5,
  1038. SHRUI_SHIFT_OPCODE_Y0 = 3,
  1039. SHRUI_SHIFT_OPCODE_Y1 = 3,
  1040. SHRUXI_SHIFT_OPCODE_X0 = 6,
  1041. SHRUXI_SHIFT_OPCODE_X1 = 6,
  1042. SHRUX_RRR_0_OPCODE_X0 = 76,
  1043. SHRUX_RRR_0_OPCODE_X1 = 40,
  1044. SHRU_RRR_0_OPCODE_X0 = 77,
  1045. SHRU_RRR_0_OPCODE_X1 = 41,
  1046. SHRU_RRR_6_OPCODE_Y0 = 3,
  1047. SHRU_RRR_6_OPCODE_Y1 = 3,
  1048. SHUFFLEBYTES_RRR_0_OPCODE_X0 = 78,
  1049. ST1_ADD_IMM8_OPCODE_X1 = 25,
  1050. ST1_OPCODE_Y2 = 0,
  1051. ST1_RRR_0_OPCODE_X1 = 42,
  1052. ST2_ADD_IMM8_OPCODE_X1 = 26,
  1053. ST2_OPCODE_Y2 = 1,
  1054. ST2_RRR_0_OPCODE_X1 = 43,
  1055. ST4_ADD_IMM8_OPCODE_X1 = 27,
  1056. ST4_OPCODE_Y2 = 2,
  1057. ST4_RRR_0_OPCODE_X1 = 44,
  1058. STNT1_ADD_IMM8_OPCODE_X1 = 28,
  1059. STNT1_RRR_0_OPCODE_X1 = 45,
  1060. STNT2_ADD_IMM8_OPCODE_X1 = 29,
  1061. STNT2_RRR_0_OPCODE_X1 = 46,
  1062. STNT4_ADD_IMM8_OPCODE_X1 = 30,
  1063. STNT4_RRR_0_OPCODE_X1 = 47,
  1064. STNT_ADD_IMM8_OPCODE_X1 = 31,
  1065. STNT_RRR_0_OPCODE_X1 = 48,
  1066. ST_ADD_IMM8_OPCODE_X1 = 32,
  1067. ST_OPCODE_Y2 = 3,
  1068. ST_RRR_0_OPCODE_X1 = 49,
  1069. SUBXSC_RRR_0_OPCODE_X0 = 79,
  1070. SUBXSC_RRR_0_OPCODE_X1 = 50,
  1071. SUBX_RRR_0_OPCODE_X0 = 80,
  1072. SUBX_RRR_0_OPCODE_X1 = 51,
  1073. SUBX_RRR_0_OPCODE_Y0 = 2,
  1074. SUBX_RRR_0_OPCODE_Y1 = 2,
  1075. SUB_RRR_0_OPCODE_X0 = 81,
  1076. SUB_RRR_0_OPCODE_X1 = 52,
  1077. SUB_RRR_0_OPCODE_Y0 = 3,
  1078. SUB_RRR_0_OPCODE_Y1 = 3,
  1079. SWINT0_UNARY_OPCODE_X1 = 34,
  1080. SWINT1_UNARY_OPCODE_X1 = 35,
  1081. SWINT2_UNARY_OPCODE_X1 = 36,
  1082. SWINT3_UNARY_OPCODE_X1 = 37,
  1083. TBLIDXB0_UNARY_OPCODE_X0 = 9,
  1084. TBLIDXB0_UNARY_OPCODE_Y0 = 9,
  1085. TBLIDXB1_UNARY_OPCODE_X0 = 10,
  1086. TBLIDXB1_UNARY_OPCODE_Y0 = 10,
  1087. TBLIDXB2_UNARY_OPCODE_X0 = 11,
  1088. TBLIDXB2_UNARY_OPCODE_Y0 = 11,
  1089. TBLIDXB3_UNARY_OPCODE_X0 = 12,
  1090. TBLIDXB3_UNARY_OPCODE_Y0 = 12,
  1091. UNARY_RRR_0_OPCODE_X0 = 82,
  1092. UNARY_RRR_0_OPCODE_X1 = 53,
  1093. UNARY_RRR_1_OPCODE_Y0 = 3,
  1094. UNARY_RRR_1_OPCODE_Y1 = 3,
  1095. V1ADDI_IMM8_OPCODE_X0 = 8,
  1096. V1ADDI_IMM8_OPCODE_X1 = 33,
  1097. V1ADDUC_RRR_0_OPCODE_X0 = 83,
  1098. V1ADDUC_RRR_0_OPCODE_X1 = 54,
  1099. V1ADD_RRR_0_OPCODE_X0 = 84,
  1100. V1ADD_RRR_0_OPCODE_X1 = 55,
  1101. V1ADIFFU_RRR_0_OPCODE_X0 = 85,
  1102. V1AVGU_RRR_0_OPCODE_X0 = 86,
  1103. V1CMPEQI_IMM8_OPCODE_X0 = 9,
  1104. V1CMPEQI_IMM8_OPCODE_X1 = 34,
  1105. V1CMPEQ_RRR_0_OPCODE_X0 = 87,
  1106. V1CMPEQ_RRR_0_OPCODE_X1 = 56,
  1107. V1CMPLES_RRR_0_OPCODE_X0 = 88,
  1108. V1CMPLES_RRR_0_OPCODE_X1 = 57,
  1109. V1CMPLEU_RRR_0_OPCODE_X0 = 89,
  1110. V1CMPLEU_RRR_0_OPCODE_X1 = 58,
  1111. V1CMPLTSI_IMM8_OPCODE_X0 = 10,
  1112. V1CMPLTSI_IMM8_OPCODE_X1 = 35,
  1113. V1CMPLTS_RRR_0_OPCODE_X0 = 90,
  1114. V1CMPLTS_RRR_0_OPCODE_X1 = 59,
  1115. V1CMPLTUI_IMM8_OPCODE_X0 = 11,
  1116. V1CMPLTUI_IMM8_OPCODE_X1 = 36,
  1117. V1CMPLTU_RRR_0_OPCODE_X0 = 91,
  1118. V1CMPLTU_RRR_0_OPCODE_X1 = 60,
  1119. V1CMPNE_RRR_0_OPCODE_X0 = 92,
  1120. V1CMPNE_RRR_0_OPCODE_X1 = 61,
  1121. V1DDOTPUA_RRR_0_OPCODE_X0 = 161,
  1122. V1DDOTPUSA_RRR_0_OPCODE_X0 = 93,
  1123. V1DDOTPUS_RRR_0_OPCODE_X0 = 94,
  1124. V1DDOTPU_RRR_0_OPCODE_X0 = 162,
  1125. V1DOTPA_RRR_0_OPCODE_X0 = 95,
  1126. V1DOTPUA_RRR_0_OPCODE_X0 = 163,
  1127. V1DOTPUSA_RRR_0_OPCODE_X0 = 96,
  1128. V1DOTPUS_RRR_0_OPCODE_X0 = 97,
  1129. V1DOTPU_RRR_0_OPCODE_X0 = 164,
  1130. V1DOTP_RRR_0_OPCODE_X0 = 98,
  1131. V1INT_H_RRR_0_OPCODE_X0 = 99,
  1132. V1INT_H_RRR_0_OPCODE_X1 = 62,
  1133. V1INT_L_RRR_0_OPCODE_X0 = 100,
  1134. V1INT_L_RRR_0_OPCODE_X1 = 63,
  1135. V1MAXUI_IMM8_OPCODE_X0 = 12,
  1136. V1MAXUI_IMM8_OPCODE_X1 = 37,
  1137. V1MAXU_RRR_0_OPCODE_X0 = 101,
  1138. V1MAXU_RRR_0_OPCODE_X1 = 64,
  1139. V1MINUI_IMM8_OPCODE_X0 = 13,
  1140. V1MINUI_IMM8_OPCODE_X1 = 38,
  1141. V1MINU_RRR_0_OPCODE_X0 = 102,
  1142. V1MINU_RRR_0_OPCODE_X1 = 65,
  1143. V1MNZ_RRR_0_OPCODE_X0 = 103,
  1144. V1MNZ_RRR_0_OPCODE_X1 = 66,
  1145. V1MULTU_RRR_0_OPCODE_X0 = 104,
  1146. V1MULUS_RRR_0_OPCODE_X0 = 105,
  1147. V1MULU_RRR_0_OPCODE_X0 = 106,
  1148. V1MZ_RRR_0_OPCODE_X0 = 107,
  1149. V1MZ_RRR_0_OPCODE_X1 = 67,
  1150. V1SADAU_RRR_0_OPCODE_X0 = 108,
  1151. V1SADU_RRR_0_OPCODE_X0 = 109,
  1152. V1SHLI_SHIFT_OPCODE_X0 = 7,
  1153. V1SHLI_SHIFT_OPCODE_X1 = 7,
  1154. V1SHL_RRR_0_OPCODE_X0 = 110,
  1155. V1SHL_RRR_0_OPCODE_X1 = 68,
  1156. V1SHRSI_SHIFT_OPCODE_X0 = 8,
  1157. V1SHRSI_SHIFT_OPCODE_X1 = 8,
  1158. V1SHRS_RRR_0_OPCODE_X0 = 111,
  1159. V1SHRS_RRR_0_OPCODE_X1 = 69,
  1160. V1SHRUI_SHIFT_OPCODE_X0 = 9,
  1161. V1SHRUI_SHIFT_OPCODE_X1 = 9,
  1162. V1SHRU_RRR_0_OPCODE_X0 = 112,
  1163. V1SHRU_RRR_0_OPCODE_X1 = 70,
  1164. V1SUBUC_RRR_0_OPCODE_X0 = 113,
  1165. V1SUBUC_RRR_0_OPCODE_X1 = 71,
  1166. V1SUB_RRR_0_OPCODE_X0 = 114,
  1167. V1SUB_RRR_0_OPCODE_X1 = 72,
  1168. V2ADDI_IMM8_OPCODE_X0 = 14,
  1169. V2ADDI_IMM8_OPCODE_X1 = 39,
  1170. V2ADDSC_RRR_0_OPCODE_X0 = 115,
  1171. V2ADDSC_RRR_0_OPCODE_X1 = 73,
  1172. V2ADD_RRR_0_OPCODE_X0 = 116,
  1173. V2ADD_RRR_0_OPCODE_X1 = 74,
  1174. V2ADIFFS_RRR_0_OPCODE_X0 = 117,
  1175. V2AVGS_RRR_0_OPCODE_X0 = 118,
  1176. V2CMPEQI_IMM8_OPCODE_X0 = 15,
  1177. V2CMPEQI_IMM8_OPCODE_X1 = 40,
  1178. V2CMPEQ_RRR_0_OPCODE_X0 = 119,
  1179. V2CMPEQ_RRR_0_OPCODE_X1 = 75,
  1180. V2CMPLES_RRR_0_OPCODE_X0 = 120,
  1181. V2CMPLES_RRR_0_OPCODE_X1 = 76,
  1182. V2CMPLEU_RRR_0_OPCODE_X0 = 121,
  1183. V2CMPLEU_RRR_0_OPCODE_X1 = 77,
  1184. V2CMPLTSI_IMM8_OPCODE_X0 = 16,
  1185. V2CMPLTSI_IMM8_OPCODE_X1 = 41,
  1186. V2CMPLTS_RRR_0_OPCODE_X0 = 122,
  1187. V2CMPLTS_RRR_0_OPCODE_X1 = 78,
  1188. V2CMPLTUI_IMM8_OPCODE_X0 = 17,
  1189. V2CMPLTUI_IMM8_OPCODE_X1 = 42,
  1190. V2CMPLTU_RRR_0_OPCODE_X0 = 123,
  1191. V2CMPLTU_RRR_0_OPCODE_X1 = 79,
  1192. V2CMPNE_RRR_0_OPCODE_X0 = 124,
  1193. V2CMPNE_RRR_0_OPCODE_X1 = 80,
  1194. V2DOTPA_RRR_0_OPCODE_X0 = 125,
  1195. V2DOTP_RRR_0_OPCODE_X0 = 126,
  1196. V2INT_H_RRR_0_OPCODE_X0 = 127,
  1197. V2INT_H_RRR_0_OPCODE_X1 = 81,
  1198. V2INT_L_RRR_0_OPCODE_X0 = 128,
  1199. V2INT_L_RRR_0_OPCODE_X1 = 82,
  1200. V2MAXSI_IMM8_OPCODE_X0 = 18,
  1201. V2MAXSI_IMM8_OPCODE_X1 = 43,
  1202. V2MAXS_RRR_0_OPCODE_X0 = 129,
  1203. V2MAXS_RRR_0_OPCODE_X1 = 83,
  1204. V2MINSI_IMM8_OPCODE_X0 = 19,
  1205. V2MINSI_IMM8_OPCODE_X1 = 44,
  1206. V2MINS_RRR_0_OPCODE_X0 = 130,
  1207. V2MINS_RRR_0_OPCODE_X1 = 84,
  1208. V2MNZ_RRR_0_OPCODE_X0 = 131,
  1209. V2MNZ_RRR_0_OPCODE_X1 = 85,
  1210. V2MULFSC_RRR_0_OPCODE_X0 = 132,
  1211. V2MULS_RRR_0_OPCODE_X0 = 133,
  1212. V2MULTS_RRR_0_OPCODE_X0 = 134,
  1213. V2MZ_RRR_0_OPCODE_X0 = 135,
  1214. V2MZ_RRR_0_OPCODE_X1 = 86,
  1215. V2PACKH_RRR_0_OPCODE_X0 = 136,
  1216. V2PACKH_RRR_0_OPCODE_X1 = 87,
  1217. V2PACKL_RRR_0_OPCODE_X0 = 137,
  1218. V2PACKL_RRR_0_OPCODE_X1 = 88,
  1219. V2PACKUC_RRR_0_OPCODE_X0 = 138,
  1220. V2PACKUC_RRR_0_OPCODE_X1 = 89,
  1221. V2SADAS_RRR_0_OPCODE_X0 = 139,
  1222. V2SADAU_RRR_0_OPCODE_X0 = 140,
  1223. V2SADS_RRR_0_OPCODE_X0 = 141,
  1224. V2SADU_RRR_0_OPCODE_X0 = 142,
  1225. V2SHLI_SHIFT_OPCODE_X0 = 10,
  1226. V2SHLI_SHIFT_OPCODE_X1 = 10,
  1227. V2SHLSC_RRR_0_OPCODE_X0 = 143,
  1228. V2SHLSC_RRR_0_OPCODE_X1 = 90,
  1229. V2SHL_RRR_0_OPCODE_X0 = 144,
  1230. V2SHL_RRR_0_OPCODE_X1 = 91,
  1231. V2SHRSI_SHIFT_OPCODE_X0 = 11,
  1232. V2SHRSI_SHIFT_OPCODE_X1 = 11,
  1233. V2SHRS_RRR_0_OPCODE_X0 = 145,
  1234. V2SHRS_RRR_0_OPCODE_X1 = 92,
  1235. V2SHRUI_SHIFT_OPCODE_X0 = 12,
  1236. V2SHRUI_SHIFT_OPCODE_X1 = 12,
  1237. V2SHRU_RRR_0_OPCODE_X0 = 146,
  1238. V2SHRU_RRR_0_OPCODE_X1 = 93,
  1239. V2SUBSC_RRR_0_OPCODE_X0 = 147,
  1240. V2SUBSC_RRR_0_OPCODE_X1 = 94,
  1241. V2SUB_RRR_0_OPCODE_X0 = 148,
  1242. V2SUB_RRR_0_OPCODE_X1 = 95,
  1243. V4ADDSC_RRR_0_OPCODE_X0 = 149,
  1244. V4ADDSC_RRR_0_OPCODE_X1 = 96,
  1245. V4ADD_RRR_0_OPCODE_X0 = 150,
  1246. V4ADD_RRR_0_OPCODE_X1 = 97,
  1247. V4INT_H_RRR_0_OPCODE_X0 = 151,
  1248. V4INT_H_RRR_0_OPCODE_X1 = 98,
  1249. V4INT_L_RRR_0_OPCODE_X0 = 152,
  1250. V4INT_L_RRR_0_OPCODE_X1 = 99,
  1251. V4PACKSC_RRR_0_OPCODE_X0 = 153,
  1252. V4PACKSC_RRR_0_OPCODE_X1 = 100,
  1253. V4SHLSC_RRR_0_OPCODE_X0 = 154,
  1254. V4SHLSC_RRR_0_OPCODE_X1 = 101,
  1255. V4SHL_RRR_0_OPCODE_X0 = 155,
  1256. V4SHL_RRR_0_OPCODE_X1 = 102,
  1257. V4SHRS_RRR_0_OPCODE_X0 = 156,
  1258. V4SHRS_RRR_0_OPCODE_X1 = 103,
  1259. V4SHRU_RRR_0_OPCODE_X0 = 157,
  1260. V4SHRU_RRR_0_OPCODE_X1 = 104,
  1261. V4SUBSC_RRR_0_OPCODE_X0 = 158,
  1262. V4SUBSC_RRR_0_OPCODE_X1 = 105,
  1263. V4SUB_RRR_0_OPCODE_X0 = 159,
  1264. V4SUB_RRR_0_OPCODE_X1 = 106,
  1265. WH64_UNARY_OPCODE_X1 = 38,
  1266. XORI_IMM8_OPCODE_X0 = 20,
  1267. XORI_IMM8_OPCODE_X1 = 45,
  1268. XOR_RRR_0_OPCODE_X0 = 160,
  1269. XOR_RRR_0_OPCODE_X1 = 107,
  1270. XOR_RRR_5_OPCODE_Y0 = 3,
  1271. XOR_RRR_5_OPCODE_Y1 = 3
  1272. };
  1273. #endif /* __ASSEMBLER__ */
  1274. #endif /* __ARCH_OPCODE_H__ */