serpent-avx-x86_64-asm_64.S 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704
  1. /*
  2. * Serpent Cipher 8-way parallel algorithm (x86_64/AVX)
  3. *
  4. * Copyright (C) 2012 Johannes Goetzfried
  5. * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
  6. *
  7. * Based on arch/x86/crypto/serpent-sse2-x86_64-asm_64.S by
  8. * Copyright (C) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
  9. *
  10. * This program is free software; you can redistribute it and/or modify
  11. * it under the terms of the GNU General Public License as published by
  12. * the Free Software Foundation; either version 2 of the License, or
  13. * (at your option) any later version.
  14. *
  15. * This program is distributed in the hope that it will be useful,
  16. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18. * GNU General Public License for more details.
  19. *
  20. * You should have received a copy of the GNU General Public License
  21. * along with this program; if not, write to the Free Software
  22. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
  23. * USA
  24. *
  25. */
  26. .file "serpent-avx-x86_64-asm_64.S"
  27. .text
  28. #define CTX %rdi
  29. /**********************************************************************
  30. 8-way AVX serpent
  31. **********************************************************************/
  32. #define RA1 %xmm0
  33. #define RB1 %xmm1
  34. #define RC1 %xmm2
  35. #define RD1 %xmm3
  36. #define RE1 %xmm4
  37. #define tp %xmm5
  38. #define RA2 %xmm6
  39. #define RB2 %xmm7
  40. #define RC2 %xmm8
  41. #define RD2 %xmm9
  42. #define RE2 %xmm10
  43. #define RNOT %xmm11
  44. #define RK0 %xmm12
  45. #define RK1 %xmm13
  46. #define RK2 %xmm14
  47. #define RK3 %xmm15
  48. #define S0_1(x0, x1, x2, x3, x4) \
  49. vpor x0, x3, tp; \
  50. vpxor x3, x0, x0; \
  51. vpxor x2, x3, x4; \
  52. vpxor RNOT, x4, x4; \
  53. vpxor x1, tp, x3; \
  54. vpand x0, x1, x1; \
  55. vpxor x4, x1, x1; \
  56. vpxor x0, x2, x2;
  57. #define S0_2(x0, x1, x2, x3, x4) \
  58. vpxor x3, x0, x0; \
  59. vpor x0, x4, x4; \
  60. vpxor x2, x0, x0; \
  61. vpand x1, x2, x2; \
  62. vpxor x2, x3, x3; \
  63. vpxor RNOT, x1, x1; \
  64. vpxor x4, x2, x2; \
  65. vpxor x2, x1, x1;
  66. #define S1_1(x0, x1, x2, x3, x4) \
  67. vpxor x0, x1, tp; \
  68. vpxor x3, x0, x0; \
  69. vpxor RNOT, x3, x3; \
  70. vpand tp, x1, x4; \
  71. vpor tp, x0, x0; \
  72. vpxor x2, x3, x3; \
  73. vpxor x3, x0, x0; \
  74. vpxor x3, tp, x1;
  75. #define S1_2(x0, x1, x2, x3, x4) \
  76. vpxor x4, x3, x3; \
  77. vpor x4, x1, x1; \
  78. vpxor x2, x4, x4; \
  79. vpand x0, x2, x2; \
  80. vpxor x1, x2, x2; \
  81. vpor x0, x1, x1; \
  82. vpxor RNOT, x0, x0; \
  83. vpxor x2, x0, x0; \
  84. vpxor x1, x4, x4;
  85. #define S2_1(x0, x1, x2, x3, x4) \
  86. vpxor RNOT, x3, x3; \
  87. vpxor x0, x1, x1; \
  88. vpand x2, x0, tp; \
  89. vpxor x3, tp, tp; \
  90. vpor x0, x3, x3; \
  91. vpxor x1, x2, x2; \
  92. vpxor x1, x3, x3; \
  93. vpand tp, x1, x1;
  94. #define S2_2(x0, x1, x2, x3, x4) \
  95. vpxor x2, tp, tp; \
  96. vpand x3, x2, x2; \
  97. vpor x1, x3, x3; \
  98. vpxor RNOT, tp, tp; \
  99. vpxor tp, x3, x3; \
  100. vpxor tp, x0, x4; \
  101. vpxor x2, tp, x0; \
  102. vpor x2, x1, x1;
  103. #define S3_1(x0, x1, x2, x3, x4) \
  104. vpxor x3, x1, tp; \
  105. vpor x0, x3, x3; \
  106. vpand x0, x1, x4; \
  107. vpxor x2, x0, x0; \
  108. vpxor tp, x2, x2; \
  109. vpand x3, tp, x1; \
  110. vpxor x3, x2, x2; \
  111. vpor x4, x0, x0; \
  112. vpxor x3, x4, x4;
  113. #define S3_2(x0, x1, x2, x3, x4) \
  114. vpxor x0, x1, x1; \
  115. vpand x3, x0, x0; \
  116. vpand x4, x3, x3; \
  117. vpxor x2, x3, x3; \
  118. vpor x1, x4, x4; \
  119. vpand x1, x2, x2; \
  120. vpxor x3, x4, x4; \
  121. vpxor x3, x0, x0; \
  122. vpxor x2, x3, x3;
  123. #define S4_1(x0, x1, x2, x3, x4) \
  124. vpand x0, x3, tp; \
  125. vpxor x3, x0, x0; \
  126. vpxor x2, tp, tp; \
  127. vpor x3, x2, x2; \
  128. vpxor x1, x0, x0; \
  129. vpxor tp, x3, x4; \
  130. vpor x0, x2, x2; \
  131. vpxor x1, x2, x2;
  132. #define S4_2(x0, x1, x2, x3, x4) \
  133. vpand x0, x1, x1; \
  134. vpxor x4, x1, x1; \
  135. vpand x2, x4, x4; \
  136. vpxor tp, x2, x2; \
  137. vpxor x0, x4, x4; \
  138. vpor x1, tp, x3; \
  139. vpxor RNOT, x1, x1; \
  140. vpxor x0, x3, x3;
  141. #define S5_1(x0, x1, x2, x3, x4) \
  142. vpor x0, x1, tp; \
  143. vpxor tp, x2, x2; \
  144. vpxor RNOT, x3, x3; \
  145. vpxor x0, x1, x4; \
  146. vpxor x2, x0, x0; \
  147. vpand x4, tp, x1; \
  148. vpor x3, x4, x4; \
  149. vpxor x0, x4, x4;
  150. #define S5_2(x0, x1, x2, x3, x4) \
  151. vpand x3, x0, x0; \
  152. vpxor x3, x1, x1; \
  153. vpxor x2, x3, x3; \
  154. vpxor x1, x0, x0; \
  155. vpand x4, x2, x2; \
  156. vpxor x2, x1, x1; \
  157. vpand x0, x2, x2; \
  158. vpxor x2, x3, x3;
  159. #define S6_1(x0, x1, x2, x3, x4) \
  160. vpxor x0, x3, x3; \
  161. vpxor x2, x1, tp; \
  162. vpxor x0, x2, x2; \
  163. vpand x3, x0, x0; \
  164. vpor x3, tp, tp; \
  165. vpxor RNOT, x1, x4; \
  166. vpxor tp, x0, x0; \
  167. vpxor x2, tp, x1;
  168. #define S6_2(x0, x1, x2, x3, x4) \
  169. vpxor x4, x3, x3; \
  170. vpxor x0, x4, x4; \
  171. vpand x0, x2, x2; \
  172. vpxor x1, x4, x4; \
  173. vpxor x3, x2, x2; \
  174. vpand x1, x3, x3; \
  175. vpxor x0, x3, x3; \
  176. vpxor x2, x1, x1;
  177. #define S7_1(x0, x1, x2, x3, x4) \
  178. vpxor RNOT, x1, tp; \
  179. vpxor RNOT, x0, x0; \
  180. vpand x2, tp, x1; \
  181. vpxor x3, x1, x1; \
  182. vpor tp, x3, x3; \
  183. vpxor x2, tp, x4; \
  184. vpxor x3, x2, x2; \
  185. vpxor x0, x3, x3; \
  186. vpor x1, x0, x0;
  187. #define S7_2(x0, x1, x2, x3, x4) \
  188. vpand x0, x2, x2; \
  189. vpxor x4, x0, x0; \
  190. vpxor x3, x4, x4; \
  191. vpand x0, x3, x3; \
  192. vpxor x1, x4, x4; \
  193. vpxor x4, x2, x2; \
  194. vpxor x1, x3, x3; \
  195. vpor x0, x4, x4; \
  196. vpxor x1, x4, x4;
  197. #define SI0_1(x0, x1, x2, x3, x4) \
  198. vpxor x0, x1, x1; \
  199. vpor x1, x3, tp; \
  200. vpxor x1, x3, x4; \
  201. vpxor RNOT, x0, x0; \
  202. vpxor tp, x2, x2; \
  203. vpxor x0, tp, x3; \
  204. vpand x1, x0, x0; \
  205. vpxor x2, x0, x0;
  206. #define SI0_2(x0, x1, x2, x3, x4) \
  207. vpand x3, x2, x2; \
  208. vpxor x4, x3, x3; \
  209. vpxor x3, x2, x2; \
  210. vpxor x3, x1, x1; \
  211. vpand x0, x3, x3; \
  212. vpxor x0, x1, x1; \
  213. vpxor x2, x0, x0; \
  214. vpxor x3, x4, x4;
  215. #define SI1_1(x0, x1, x2, x3, x4) \
  216. vpxor x3, x1, x1; \
  217. vpxor x2, x0, tp; \
  218. vpxor RNOT, x2, x2; \
  219. vpor x1, x0, x4; \
  220. vpxor x3, x4, x4; \
  221. vpand x1, x3, x3; \
  222. vpxor x2, x1, x1; \
  223. vpand x4, x2, x2;
  224. #define SI1_2(x0, x1, x2, x3, x4) \
  225. vpxor x1, x4, x4; \
  226. vpor x3, x1, x1; \
  227. vpxor tp, x3, x3; \
  228. vpxor tp, x2, x2; \
  229. vpor x4, tp, x0; \
  230. vpxor x4, x2, x2; \
  231. vpxor x0, x1, x1; \
  232. vpxor x1, x4, x4;
  233. #define SI2_1(x0, x1, x2, x3, x4) \
  234. vpxor x1, x2, x2; \
  235. vpxor RNOT, x3, tp; \
  236. vpor x2, tp, tp; \
  237. vpxor x3, x2, x2; \
  238. vpxor x0, x3, x4; \
  239. vpxor x1, tp, x3; \
  240. vpor x2, x1, x1; \
  241. vpxor x0, x2, x2;
  242. #define SI2_2(x0, x1, x2, x3, x4) \
  243. vpxor x4, x1, x1; \
  244. vpor x3, x4, x4; \
  245. vpxor x3, x2, x2; \
  246. vpxor x2, x4, x4; \
  247. vpand x1, x2, x2; \
  248. vpxor x3, x2, x2; \
  249. vpxor x4, x3, x3; \
  250. vpxor x0, x4, x4;
  251. #define SI3_1(x0, x1, x2, x3, x4) \
  252. vpxor x1, x2, x2; \
  253. vpand x2, x1, tp; \
  254. vpxor x0, tp, tp; \
  255. vpor x1, x0, x0; \
  256. vpxor x3, x1, x4; \
  257. vpxor x3, x0, x0; \
  258. vpor tp, x3, x3; \
  259. vpxor x2, tp, x1;
  260. #define SI3_2(x0, x1, x2, x3, x4) \
  261. vpxor x3, x1, x1; \
  262. vpxor x2, x0, x0; \
  263. vpxor x3, x2, x2; \
  264. vpand x1, x3, x3; \
  265. vpxor x0, x1, x1; \
  266. vpand x2, x0, x0; \
  267. vpxor x3, x4, x4; \
  268. vpxor x0, x3, x3; \
  269. vpxor x1, x0, x0;
  270. #define SI4_1(x0, x1, x2, x3, x4) \
  271. vpxor x3, x2, x2; \
  272. vpand x1, x0, tp; \
  273. vpxor x2, tp, tp; \
  274. vpor x3, x2, x2; \
  275. vpxor RNOT, x0, x4; \
  276. vpxor tp, x1, x1; \
  277. vpxor x2, tp, x0; \
  278. vpand x4, x2, x2;
  279. #define SI4_2(x0, x1, x2, x3, x4) \
  280. vpxor x0, x2, x2; \
  281. vpor x4, x0, x0; \
  282. vpxor x3, x0, x0; \
  283. vpand x2, x3, x3; \
  284. vpxor x3, x4, x4; \
  285. vpxor x1, x3, x3; \
  286. vpand x0, x1, x1; \
  287. vpxor x1, x4, x4; \
  288. vpxor x3, x0, x0;
  289. #define SI5_1(x0, x1, x2, x3, x4) \
  290. vpor x2, x1, tp; \
  291. vpxor x1, x2, x2; \
  292. vpxor x3, tp, tp; \
  293. vpand x1, x3, x3; \
  294. vpxor x3, x2, x2; \
  295. vpor x0, x3, x3; \
  296. vpxor RNOT, x0, x0; \
  297. vpxor x2, x3, x3; \
  298. vpor x0, x2, x2;
  299. #define SI5_2(x0, x1, x2, x3, x4) \
  300. vpxor tp, x1, x4; \
  301. vpxor x4, x2, x2; \
  302. vpand x0, x4, x4; \
  303. vpxor tp, x0, x0; \
  304. vpxor x3, tp, x1; \
  305. vpand x2, x0, x0; \
  306. vpxor x3, x2, x2; \
  307. vpxor x2, x0, x0; \
  308. vpxor x4, x2, x2; \
  309. vpxor x3, x4, x4;
  310. #define SI6_1(x0, x1, x2, x3, x4) \
  311. vpxor x2, x0, x0; \
  312. vpand x3, x0, tp; \
  313. vpxor x3, x2, x2; \
  314. vpxor x2, tp, tp; \
  315. vpxor x1, x3, x3; \
  316. vpor x0, x2, x2; \
  317. vpxor x3, x2, x2; \
  318. vpand tp, x3, x3;
  319. #define SI6_2(x0, x1, x2, x3, x4) \
  320. vpxor RNOT, tp, tp; \
  321. vpxor x1, x3, x3; \
  322. vpand x2, x1, x1; \
  323. vpxor tp, x0, x4; \
  324. vpxor x4, x3, x3; \
  325. vpxor x2, x4, x4; \
  326. vpxor x1, tp, x0; \
  327. vpxor x0, x2, x2;
  328. #define SI7_1(x0, x1, x2, x3, x4) \
  329. vpand x0, x3, tp; \
  330. vpxor x2, x0, x0; \
  331. vpor x3, x2, x2; \
  332. vpxor x1, x3, x4; \
  333. vpxor RNOT, x0, x0; \
  334. vpor tp, x1, x1; \
  335. vpxor x0, x4, x4; \
  336. vpand x2, x0, x0; \
  337. vpxor x1, x0, x0;
  338. #define SI7_2(x0, x1, x2, x3, x4) \
  339. vpand x2, x1, x1; \
  340. vpxor x2, tp, x3; \
  341. vpxor x3, x4, x4; \
  342. vpand x3, x2, x2; \
  343. vpor x0, x3, x3; \
  344. vpxor x4, x1, x1; \
  345. vpxor x4, x3, x3; \
  346. vpand x0, x4, x4; \
  347. vpxor x2, x4, x4;
  348. #define get_key(i, j, t) \
  349. vbroadcastss (4*(i)+(j))*4(CTX), t;
  350. #define K2(x0, x1, x2, x3, x4, i) \
  351. get_key(i, 0, RK0); \
  352. get_key(i, 1, RK1); \
  353. get_key(i, 2, RK2); \
  354. get_key(i, 3, RK3); \
  355. vpxor RK0, x0 ## 1, x0 ## 1; \
  356. vpxor RK1, x1 ## 1, x1 ## 1; \
  357. vpxor RK2, x2 ## 1, x2 ## 1; \
  358. vpxor RK3, x3 ## 1, x3 ## 1; \
  359. vpxor RK0, x0 ## 2, x0 ## 2; \
  360. vpxor RK1, x1 ## 2, x1 ## 2; \
  361. vpxor RK2, x2 ## 2, x2 ## 2; \
  362. vpxor RK3, x3 ## 2, x3 ## 2;
  363. #define LK2(x0, x1, x2, x3, x4, i) \
  364. vpslld $13, x0 ## 1, x4 ## 1; \
  365. vpsrld $(32 - 13), x0 ## 1, x0 ## 1; \
  366. vpor x4 ## 1, x0 ## 1, x0 ## 1; \
  367. vpxor x0 ## 1, x1 ## 1, x1 ## 1; \
  368. vpslld $3, x2 ## 1, x4 ## 1; \
  369. vpsrld $(32 - 3), x2 ## 1, x2 ## 1; \
  370. vpor x4 ## 1, x2 ## 1, x2 ## 1; \
  371. vpxor x2 ## 1, x1 ## 1, x1 ## 1; \
  372. vpslld $13, x0 ## 2, x4 ## 2; \
  373. vpsrld $(32 - 13), x0 ## 2, x0 ## 2; \
  374. vpor x4 ## 2, x0 ## 2, x0 ## 2; \
  375. vpxor x0 ## 2, x1 ## 2, x1 ## 2; \
  376. vpslld $3, x2 ## 2, x4 ## 2; \
  377. vpsrld $(32 - 3), x2 ## 2, x2 ## 2; \
  378. vpor x4 ## 2, x2 ## 2, x2 ## 2; \
  379. vpxor x2 ## 2, x1 ## 2, x1 ## 2; \
  380. vpslld $1, x1 ## 1, x4 ## 1; \
  381. vpsrld $(32 - 1), x1 ## 1, x1 ## 1; \
  382. vpor x4 ## 1, x1 ## 1, x1 ## 1; \
  383. vpslld $3, x0 ## 1, x4 ## 1; \
  384. vpxor x2 ## 1, x3 ## 1, x3 ## 1; \
  385. vpxor x4 ## 1, x3 ## 1, x3 ## 1; \
  386. get_key(i, 1, RK1); \
  387. vpslld $1, x1 ## 2, x4 ## 2; \
  388. vpsrld $(32 - 1), x1 ## 2, x1 ## 2; \
  389. vpor x4 ## 2, x1 ## 2, x1 ## 2; \
  390. vpslld $3, x0 ## 2, x4 ## 2; \
  391. vpxor x2 ## 2, x3 ## 2, x3 ## 2; \
  392. vpxor x4 ## 2, x3 ## 2, x3 ## 2; \
  393. get_key(i, 3, RK3); \
  394. vpslld $7, x3 ## 1, x4 ## 1; \
  395. vpsrld $(32 - 7), x3 ## 1, x3 ## 1; \
  396. vpor x4 ## 1, x3 ## 1, x3 ## 1; \
  397. vpslld $7, x1 ## 1, x4 ## 1; \
  398. vpxor x1 ## 1, x0 ## 1, x0 ## 1; \
  399. vpxor x3 ## 1, x0 ## 1, x0 ## 1; \
  400. vpxor x3 ## 1, x2 ## 1, x2 ## 1; \
  401. vpxor x4 ## 1, x2 ## 1, x2 ## 1; \
  402. get_key(i, 0, RK0); \
  403. vpslld $7, x3 ## 2, x4 ## 2; \
  404. vpsrld $(32 - 7), x3 ## 2, x3 ## 2; \
  405. vpor x4 ## 2, x3 ## 2, x3 ## 2; \
  406. vpslld $7, x1 ## 2, x4 ## 2; \
  407. vpxor x1 ## 2, x0 ## 2, x0 ## 2; \
  408. vpxor x3 ## 2, x0 ## 2, x0 ## 2; \
  409. vpxor x3 ## 2, x2 ## 2, x2 ## 2; \
  410. vpxor x4 ## 2, x2 ## 2, x2 ## 2; \
  411. get_key(i, 2, RK2); \
  412. vpxor RK1, x1 ## 1, x1 ## 1; \
  413. vpxor RK3, x3 ## 1, x3 ## 1; \
  414. vpslld $5, x0 ## 1, x4 ## 1; \
  415. vpsrld $(32 - 5), x0 ## 1, x0 ## 1; \
  416. vpor x4 ## 1, x0 ## 1, x0 ## 1; \
  417. vpslld $22, x2 ## 1, x4 ## 1; \
  418. vpsrld $(32 - 22), x2 ## 1, x2 ## 1; \
  419. vpor x4 ## 1, x2 ## 1, x2 ## 1; \
  420. vpxor RK0, x0 ## 1, x0 ## 1; \
  421. vpxor RK2, x2 ## 1, x2 ## 1; \
  422. vpxor RK1, x1 ## 2, x1 ## 2; \
  423. vpxor RK3, x3 ## 2, x3 ## 2; \
  424. vpslld $5, x0 ## 2, x4 ## 2; \
  425. vpsrld $(32 - 5), x0 ## 2, x0 ## 2; \
  426. vpor x4 ## 2, x0 ## 2, x0 ## 2; \
  427. vpslld $22, x2 ## 2, x4 ## 2; \
  428. vpsrld $(32 - 22), x2 ## 2, x2 ## 2; \
  429. vpor x4 ## 2, x2 ## 2, x2 ## 2; \
  430. vpxor RK0, x0 ## 2, x0 ## 2; \
  431. vpxor RK2, x2 ## 2, x2 ## 2;
  432. #define KL2(x0, x1, x2, x3, x4, i) \
  433. vpxor RK0, x0 ## 1, x0 ## 1; \
  434. vpxor RK2, x2 ## 1, x2 ## 1; \
  435. vpsrld $5, x0 ## 1, x4 ## 1; \
  436. vpslld $(32 - 5), x0 ## 1, x0 ## 1; \
  437. vpor x4 ## 1, x0 ## 1, x0 ## 1; \
  438. vpxor RK3, x3 ## 1, x3 ## 1; \
  439. vpxor RK1, x1 ## 1, x1 ## 1; \
  440. vpsrld $22, x2 ## 1, x4 ## 1; \
  441. vpslld $(32 - 22), x2 ## 1, x2 ## 1; \
  442. vpor x4 ## 1, x2 ## 1, x2 ## 1; \
  443. vpxor x3 ## 1, x2 ## 1, x2 ## 1; \
  444. vpxor RK0, x0 ## 2, x0 ## 2; \
  445. vpxor RK2, x2 ## 2, x2 ## 2; \
  446. vpsrld $5, x0 ## 2, x4 ## 2; \
  447. vpslld $(32 - 5), x0 ## 2, x0 ## 2; \
  448. vpor x4 ## 2, x0 ## 2, x0 ## 2; \
  449. vpxor RK3, x3 ## 2, x3 ## 2; \
  450. vpxor RK1, x1 ## 2, x1 ## 2; \
  451. vpsrld $22, x2 ## 2, x4 ## 2; \
  452. vpslld $(32 - 22), x2 ## 2, x2 ## 2; \
  453. vpor x4 ## 2, x2 ## 2, x2 ## 2; \
  454. vpxor x3 ## 2, x2 ## 2, x2 ## 2; \
  455. vpxor x3 ## 1, x0 ## 1, x0 ## 1; \
  456. vpslld $7, x1 ## 1, x4 ## 1; \
  457. vpxor x1 ## 1, x0 ## 1, x0 ## 1; \
  458. vpxor x4 ## 1, x2 ## 1, x2 ## 1; \
  459. vpsrld $1, x1 ## 1, x4 ## 1; \
  460. vpslld $(32 - 1), x1 ## 1, x1 ## 1; \
  461. vpor x4 ## 1, x1 ## 1, x1 ## 1; \
  462. vpxor x3 ## 2, x0 ## 2, x0 ## 2; \
  463. vpslld $7, x1 ## 2, x4 ## 2; \
  464. vpxor x1 ## 2, x0 ## 2, x0 ## 2; \
  465. vpxor x4 ## 2, x2 ## 2, x2 ## 2; \
  466. vpsrld $1, x1 ## 2, x4 ## 2; \
  467. vpslld $(32 - 1), x1 ## 2, x1 ## 2; \
  468. vpor x4 ## 2, x1 ## 2, x1 ## 2; \
  469. vpsrld $7, x3 ## 1, x4 ## 1; \
  470. vpslld $(32 - 7), x3 ## 1, x3 ## 1; \
  471. vpor x4 ## 1, x3 ## 1, x3 ## 1; \
  472. vpxor x0 ## 1, x1 ## 1, x1 ## 1; \
  473. vpslld $3, x0 ## 1, x4 ## 1; \
  474. vpxor x4 ## 1, x3 ## 1, x3 ## 1; \
  475. vpsrld $7, x3 ## 2, x4 ## 2; \
  476. vpslld $(32 - 7), x3 ## 2, x3 ## 2; \
  477. vpor x4 ## 2, x3 ## 2, x3 ## 2; \
  478. vpxor x0 ## 2, x1 ## 2, x1 ## 2; \
  479. vpslld $3, x0 ## 2, x4 ## 2; \
  480. vpxor x4 ## 2, x3 ## 2, x3 ## 2; \
  481. vpsrld $13, x0 ## 1, x4 ## 1; \
  482. vpslld $(32 - 13), x0 ## 1, x0 ## 1; \
  483. vpor x4 ## 1, x0 ## 1, x0 ## 1; \
  484. vpxor x2 ## 1, x1 ## 1, x1 ## 1; \
  485. vpxor x2 ## 1, x3 ## 1, x3 ## 1; \
  486. vpsrld $3, x2 ## 1, x4 ## 1; \
  487. vpslld $(32 - 3), x2 ## 1, x2 ## 1; \
  488. vpor x4 ## 1, x2 ## 1, x2 ## 1; \
  489. vpsrld $13, x0 ## 2, x4 ## 2; \
  490. vpslld $(32 - 13), x0 ## 2, x0 ## 2; \
  491. vpor x4 ## 2, x0 ## 2, x0 ## 2; \
  492. vpxor x2 ## 2, x1 ## 2, x1 ## 2; \
  493. vpxor x2 ## 2, x3 ## 2, x3 ## 2; \
  494. vpsrld $3, x2 ## 2, x4 ## 2; \
  495. vpslld $(32 - 3), x2 ## 2, x2 ## 2; \
  496. vpor x4 ## 2, x2 ## 2, x2 ## 2;
  497. #define S(SBOX, x0, x1, x2, x3, x4) \
  498. SBOX ## _1(x0 ## 1, x1 ## 1, x2 ## 1, x3 ## 1, x4 ## 1); \
  499. SBOX ## _2(x0 ## 1, x1 ## 1, x2 ## 1, x3 ## 1, x4 ## 1); \
  500. SBOX ## _1(x0 ## 2, x1 ## 2, x2 ## 2, x3 ## 2, x4 ## 2); \
  501. SBOX ## _2(x0 ## 2, x1 ## 2, x2 ## 2, x3 ## 2, x4 ## 2);
  502. #define SP(SBOX, x0, x1, x2, x3, x4, i) \
  503. get_key(i, 0, RK0); \
  504. SBOX ## _1(x0 ## 1, x1 ## 1, x2 ## 1, x3 ## 1, x4 ## 1); \
  505. get_key(i, 2, RK2); \
  506. SBOX ## _2(x0 ## 1, x1 ## 1, x2 ## 1, x3 ## 1, x4 ## 1); \
  507. get_key(i, 3, RK3); \
  508. SBOX ## _1(x0 ## 2, x1 ## 2, x2 ## 2, x3 ## 2, x4 ## 2); \
  509. get_key(i, 1, RK1); \
  510. SBOX ## _2(x0 ## 2, x1 ## 2, x2 ## 2, x3 ## 2, x4 ## 2); \
  511. #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \
  512. vpunpckldq x1, x0, t0; \
  513. vpunpckhdq x1, x0, t2; \
  514. vpunpckldq x3, x2, t1; \
  515. vpunpckhdq x3, x2, x3; \
  516. \
  517. vpunpcklqdq t1, t0, x0; \
  518. vpunpckhqdq t1, t0, x1; \
  519. vpunpcklqdq x3, t2, x2; \
  520. vpunpckhqdq x3, t2, x3;
  521. #define read_blocks(in, x0, x1, x2, x3, t0, t1, t2) \
  522. vmovdqu (0*4*4)(in), x0; \
  523. vmovdqu (1*4*4)(in), x1; \
  524. vmovdqu (2*4*4)(in), x2; \
  525. vmovdqu (3*4*4)(in), x3; \
  526. \
  527. transpose_4x4(x0, x1, x2, x3, t0, t1, t2)
  528. #define write_blocks(out, x0, x1, x2, x3, t0, t1, t2) \
  529. transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \
  530. \
  531. vmovdqu x0, (0*4*4)(out); \
  532. vmovdqu x1, (1*4*4)(out); \
  533. vmovdqu x2, (2*4*4)(out); \
  534. vmovdqu x3, (3*4*4)(out);
  535. #define xor_blocks(out, x0, x1, x2, x3, t0, t1, t2) \
  536. transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \
  537. \
  538. vpxor (0*4*4)(out), x0, x0; \
  539. vmovdqu x0, (0*4*4)(out); \
  540. vpxor (1*4*4)(out), x1, x1; \
  541. vmovdqu x1, (1*4*4)(out); \
  542. vpxor (2*4*4)(out), x2, x2; \
  543. vmovdqu x2, (2*4*4)(out); \
  544. vpxor (3*4*4)(out), x3, x3; \
  545. vmovdqu x3, (3*4*4)(out);
  546. .align 8
  547. .global __serpent_enc_blk_8way_avx
  548. .type __serpent_enc_blk_8way_avx,@function;
  549. __serpent_enc_blk_8way_avx:
  550. /* input:
  551. * %rdi: ctx, CTX
  552. * %rsi: dst
  553. * %rdx: src
  554. * %rcx: bool, if true: xor output
  555. */
  556. vpcmpeqd RNOT, RNOT, RNOT;
  557. leaq (4*4*4)(%rdx), %rax;
  558. read_blocks(%rdx, RA1, RB1, RC1, RD1, RK0, RK1, RK2);
  559. read_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2);
  560. K2(RA, RB, RC, RD, RE, 0);
  561. S(S0, RA, RB, RC, RD, RE); LK2(RC, RB, RD, RA, RE, 1);
  562. S(S1, RC, RB, RD, RA, RE); LK2(RE, RD, RA, RC, RB, 2);
  563. S(S2, RE, RD, RA, RC, RB); LK2(RB, RD, RE, RC, RA, 3);
  564. S(S3, RB, RD, RE, RC, RA); LK2(RC, RA, RD, RB, RE, 4);
  565. S(S4, RC, RA, RD, RB, RE); LK2(RA, RD, RB, RE, RC, 5);
  566. S(S5, RA, RD, RB, RE, RC); LK2(RC, RA, RD, RE, RB, 6);
  567. S(S6, RC, RA, RD, RE, RB); LK2(RD, RB, RA, RE, RC, 7);
  568. S(S7, RD, RB, RA, RE, RC); LK2(RC, RA, RE, RD, RB, 8);
  569. S(S0, RC, RA, RE, RD, RB); LK2(RE, RA, RD, RC, RB, 9);
  570. S(S1, RE, RA, RD, RC, RB); LK2(RB, RD, RC, RE, RA, 10);
  571. S(S2, RB, RD, RC, RE, RA); LK2(RA, RD, RB, RE, RC, 11);
  572. S(S3, RA, RD, RB, RE, RC); LK2(RE, RC, RD, RA, RB, 12);
  573. S(S4, RE, RC, RD, RA, RB); LK2(RC, RD, RA, RB, RE, 13);
  574. S(S5, RC, RD, RA, RB, RE); LK2(RE, RC, RD, RB, RA, 14);
  575. S(S6, RE, RC, RD, RB, RA); LK2(RD, RA, RC, RB, RE, 15);
  576. S(S7, RD, RA, RC, RB, RE); LK2(RE, RC, RB, RD, RA, 16);
  577. S(S0, RE, RC, RB, RD, RA); LK2(RB, RC, RD, RE, RA, 17);
  578. S(S1, RB, RC, RD, RE, RA); LK2(RA, RD, RE, RB, RC, 18);
  579. S(S2, RA, RD, RE, RB, RC); LK2(RC, RD, RA, RB, RE, 19);
  580. S(S3, RC, RD, RA, RB, RE); LK2(RB, RE, RD, RC, RA, 20);
  581. S(S4, RB, RE, RD, RC, RA); LK2(RE, RD, RC, RA, RB, 21);
  582. S(S5, RE, RD, RC, RA, RB); LK2(RB, RE, RD, RA, RC, 22);
  583. S(S6, RB, RE, RD, RA, RC); LK2(RD, RC, RE, RA, RB, 23);
  584. S(S7, RD, RC, RE, RA, RB); LK2(RB, RE, RA, RD, RC, 24);
  585. S(S0, RB, RE, RA, RD, RC); LK2(RA, RE, RD, RB, RC, 25);
  586. S(S1, RA, RE, RD, RB, RC); LK2(RC, RD, RB, RA, RE, 26);
  587. S(S2, RC, RD, RB, RA, RE); LK2(RE, RD, RC, RA, RB, 27);
  588. S(S3, RE, RD, RC, RA, RB); LK2(RA, RB, RD, RE, RC, 28);
  589. S(S4, RA, RB, RD, RE, RC); LK2(RB, RD, RE, RC, RA, 29);
  590. S(S5, RB, RD, RE, RC, RA); LK2(RA, RB, RD, RC, RE, 30);
  591. S(S6, RA, RB, RD, RC, RE); LK2(RD, RE, RB, RC, RA, 31);
  592. S(S7, RD, RE, RB, RC, RA); K2(RA, RB, RC, RD, RE, 32);
  593. leaq (4*4*4)(%rsi), %rax;
  594. testb %cl, %cl;
  595. jnz __enc_xor8;
  596. write_blocks(%rsi, RA1, RB1, RC1, RD1, RK0, RK1, RK2);
  597. write_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2);
  598. ret;
  599. __enc_xor8:
  600. xor_blocks(%rsi, RA1, RB1, RC1, RD1, RK0, RK1, RK2);
  601. xor_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2);
  602. ret;
  603. .align 8
  604. .global serpent_dec_blk_8way_avx
  605. .type serpent_dec_blk_8way_avx,@function;
  606. serpent_dec_blk_8way_avx:
  607. /* input:
  608. * %rdi: ctx, CTX
  609. * %rsi: dst
  610. * %rdx: src
  611. */
  612. vpcmpeqd RNOT, RNOT, RNOT;
  613. leaq (4*4*4)(%rdx), %rax;
  614. read_blocks(%rdx, RA1, RB1, RC1, RD1, RK0, RK1, RK2);
  615. read_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2);
  616. K2(RA, RB, RC, RD, RE, 32);
  617. SP(SI7, RA, RB, RC, RD, RE, 31); KL2(RB, RD, RA, RE, RC, 31);
  618. SP(SI6, RB, RD, RA, RE, RC, 30); KL2(RA, RC, RE, RB, RD, 30);
  619. SP(SI5, RA, RC, RE, RB, RD, 29); KL2(RC, RD, RA, RE, RB, 29);
  620. SP(SI4, RC, RD, RA, RE, RB, 28); KL2(RC, RA, RB, RE, RD, 28);
  621. SP(SI3, RC, RA, RB, RE, RD, 27); KL2(RB, RC, RD, RE, RA, 27);
  622. SP(SI2, RB, RC, RD, RE, RA, 26); KL2(RC, RA, RE, RD, RB, 26);
  623. SP(SI1, RC, RA, RE, RD, RB, 25); KL2(RB, RA, RE, RD, RC, 25);
  624. SP(SI0, RB, RA, RE, RD, RC, 24); KL2(RE, RC, RA, RB, RD, 24);
  625. SP(SI7, RE, RC, RA, RB, RD, 23); KL2(RC, RB, RE, RD, RA, 23);
  626. SP(SI6, RC, RB, RE, RD, RA, 22); KL2(RE, RA, RD, RC, RB, 22);
  627. SP(SI5, RE, RA, RD, RC, RB, 21); KL2(RA, RB, RE, RD, RC, 21);
  628. SP(SI4, RA, RB, RE, RD, RC, 20); KL2(RA, RE, RC, RD, RB, 20);
  629. SP(SI3, RA, RE, RC, RD, RB, 19); KL2(RC, RA, RB, RD, RE, 19);
  630. SP(SI2, RC, RA, RB, RD, RE, 18); KL2(RA, RE, RD, RB, RC, 18);
  631. SP(SI1, RA, RE, RD, RB, RC, 17); KL2(RC, RE, RD, RB, RA, 17);
  632. SP(SI0, RC, RE, RD, RB, RA, 16); KL2(RD, RA, RE, RC, RB, 16);
  633. SP(SI7, RD, RA, RE, RC, RB, 15); KL2(RA, RC, RD, RB, RE, 15);
  634. SP(SI6, RA, RC, RD, RB, RE, 14); KL2(RD, RE, RB, RA, RC, 14);
  635. SP(SI5, RD, RE, RB, RA, RC, 13); KL2(RE, RC, RD, RB, RA, 13);
  636. SP(SI4, RE, RC, RD, RB, RA, 12); KL2(RE, RD, RA, RB, RC, 12);
  637. SP(SI3, RE, RD, RA, RB, RC, 11); KL2(RA, RE, RC, RB, RD, 11);
  638. SP(SI2, RA, RE, RC, RB, RD, 10); KL2(RE, RD, RB, RC, RA, 10);
  639. SP(SI1, RE, RD, RB, RC, RA, 9); KL2(RA, RD, RB, RC, RE, 9);
  640. SP(SI0, RA, RD, RB, RC, RE, 8); KL2(RB, RE, RD, RA, RC, 8);
  641. SP(SI7, RB, RE, RD, RA, RC, 7); KL2(RE, RA, RB, RC, RD, 7);
  642. SP(SI6, RE, RA, RB, RC, RD, 6); KL2(RB, RD, RC, RE, RA, 6);
  643. SP(SI5, RB, RD, RC, RE, RA, 5); KL2(RD, RA, RB, RC, RE, 5);
  644. SP(SI4, RD, RA, RB, RC, RE, 4); KL2(RD, RB, RE, RC, RA, 4);
  645. SP(SI3, RD, RB, RE, RC, RA, 3); KL2(RE, RD, RA, RC, RB, 3);
  646. SP(SI2, RE, RD, RA, RC, RB, 2); KL2(RD, RB, RC, RA, RE, 2);
  647. SP(SI1, RD, RB, RC, RA, RE, 1); KL2(RE, RB, RC, RA, RD, 1);
  648. S(SI0, RE, RB, RC, RA, RD); K2(RC, RD, RB, RE, RA, 0);
  649. leaq (4*4*4)(%rsi), %rax;
  650. write_blocks(%rsi, RC1, RD1, RB1, RE1, RK0, RK1, RK2);
  651. write_blocks(%rax, RC2, RD2, RB2, RE2, RK0, RK1, RK2);
  652. ret;