camellia_aesni_avx2_glue.c 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586
  1. /*
  2. * Glue Code for x86_64/AVX2/AES-NI assembler optimized version of Camellia
  3. *
  4. * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation; either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. */
  12. #include <linux/module.h>
  13. #include <linux/types.h>
  14. #include <linux/crypto.h>
  15. #include <linux/err.h>
  16. #include <crypto/algapi.h>
  17. #include <crypto/ctr.h>
  18. #include <crypto/lrw.h>
  19. #include <crypto/xts.h>
  20. #include <asm/xcr.h>
  21. #include <asm/xsave.h>
  22. #include <asm/crypto/camellia.h>
  23. #include <asm/crypto/ablk_helper.h>
  24. #include <asm/crypto/glue_helper.h>
  25. #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
  26. #define CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS 32
  27. /* 32-way AVX2/AES-NI parallel cipher functions */
  28. asmlinkage void camellia_ecb_enc_32way(struct camellia_ctx *ctx, u8 *dst,
  29. const u8 *src);
  30. asmlinkage void camellia_ecb_dec_32way(struct camellia_ctx *ctx, u8 *dst,
  31. const u8 *src);
  32. asmlinkage void camellia_cbc_dec_32way(struct camellia_ctx *ctx, u8 *dst,
  33. const u8 *src);
  34. asmlinkage void camellia_ctr_32way(struct camellia_ctx *ctx, u8 *dst,
  35. const u8 *src, le128 *iv);
  36. asmlinkage void camellia_xts_enc_32way(struct camellia_ctx *ctx, u8 *dst,
  37. const u8 *src, le128 *iv);
  38. asmlinkage void camellia_xts_dec_32way(struct camellia_ctx *ctx, u8 *dst,
  39. const u8 *src, le128 *iv);
  40. static const struct common_glue_ctx camellia_enc = {
  41. .num_funcs = 4,
  42. .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  43. .funcs = { {
  44. .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
  45. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_32way) }
  46. }, {
  47. .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  48. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
  49. }, {
  50. .num_blocks = 2,
  51. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
  52. }, {
  53. .num_blocks = 1,
  54. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
  55. } }
  56. };
  57. static const struct common_glue_ctx camellia_ctr = {
  58. .num_funcs = 4,
  59. .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  60. .funcs = { {
  61. .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
  62. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_32way) }
  63. }, {
  64. .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  65. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
  66. }, {
  67. .num_blocks = 2,
  68. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
  69. }, {
  70. .num_blocks = 1,
  71. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
  72. } }
  73. };
  74. static const struct common_glue_ctx camellia_enc_xts = {
  75. .num_funcs = 3,
  76. .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  77. .funcs = { {
  78. .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
  79. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_32way) }
  80. }, {
  81. .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  82. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
  83. }, {
  84. .num_blocks = 1,
  85. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
  86. } }
  87. };
  88. static const struct common_glue_ctx camellia_dec = {
  89. .num_funcs = 4,
  90. .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  91. .funcs = { {
  92. .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
  93. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_32way) }
  94. }, {
  95. .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  96. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
  97. }, {
  98. .num_blocks = 2,
  99. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
  100. }, {
  101. .num_blocks = 1,
  102. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
  103. } }
  104. };
  105. static const struct common_glue_ctx camellia_dec_cbc = {
  106. .num_funcs = 4,
  107. .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  108. .funcs = { {
  109. .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
  110. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_32way) }
  111. }, {
  112. .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  113. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
  114. }, {
  115. .num_blocks = 2,
  116. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
  117. }, {
  118. .num_blocks = 1,
  119. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
  120. } }
  121. };
  122. static const struct common_glue_ctx camellia_dec_xts = {
  123. .num_funcs = 3,
  124. .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  125. .funcs = { {
  126. .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
  127. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_32way) }
  128. }, {
  129. .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  130. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
  131. }, {
  132. .num_blocks = 1,
  133. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
  134. } }
  135. };
  136. static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  137. struct scatterlist *src, unsigned int nbytes)
  138. {
  139. return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes);
  140. }
  141. static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  142. struct scatterlist *src, unsigned int nbytes)
  143. {
  144. return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes);
  145. }
  146. static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  147. struct scatterlist *src, unsigned int nbytes)
  148. {
  149. return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(camellia_enc_blk), desc,
  150. dst, src, nbytes);
  151. }
  152. static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  153. struct scatterlist *src, unsigned int nbytes)
  154. {
  155. return glue_cbc_decrypt_128bit(&camellia_dec_cbc, desc, dst, src,
  156. nbytes);
  157. }
  158. static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  159. struct scatterlist *src, unsigned int nbytes)
  160. {
  161. return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes);
  162. }
  163. static inline bool camellia_fpu_begin(bool fpu_enabled, unsigned int nbytes)
  164. {
  165. return glue_fpu_begin(CAMELLIA_BLOCK_SIZE,
  166. CAMELLIA_AESNI_PARALLEL_BLOCKS, NULL, fpu_enabled,
  167. nbytes);
  168. }
  169. static inline void camellia_fpu_end(bool fpu_enabled)
  170. {
  171. glue_fpu_end(fpu_enabled);
  172. }
  173. static int camellia_setkey(struct crypto_tfm *tfm, const u8 *in_key,
  174. unsigned int key_len)
  175. {
  176. return __camellia_setkey(crypto_tfm_ctx(tfm), in_key, key_len,
  177. &tfm->crt_flags);
  178. }
  179. struct crypt_priv {
  180. struct camellia_ctx *ctx;
  181. bool fpu_enabled;
  182. };
  183. static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
  184. {
  185. const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
  186. struct crypt_priv *ctx = priv;
  187. int i;
  188. ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
  189. if (nbytes >= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS * bsize) {
  190. camellia_ecb_enc_32way(ctx->ctx, srcdst, srcdst);
  191. srcdst += bsize * CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS;
  192. nbytes -= bsize * CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS;
  193. }
  194. if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
  195. camellia_ecb_enc_16way(ctx->ctx, srcdst, srcdst);
  196. srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
  197. nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
  198. }
  199. while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
  200. camellia_enc_blk_2way(ctx->ctx, srcdst, srcdst);
  201. srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
  202. nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
  203. }
  204. for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
  205. camellia_enc_blk(ctx->ctx, srcdst, srcdst);
  206. }
  207. static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
  208. {
  209. const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
  210. struct crypt_priv *ctx = priv;
  211. int i;
  212. ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
  213. if (nbytes >= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS * bsize) {
  214. camellia_ecb_dec_32way(ctx->ctx, srcdst, srcdst);
  215. srcdst += bsize * CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS;
  216. nbytes -= bsize * CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS;
  217. }
  218. if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
  219. camellia_ecb_dec_16way(ctx->ctx, srcdst, srcdst);
  220. srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
  221. nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
  222. }
  223. while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
  224. camellia_dec_blk_2way(ctx->ctx, srcdst, srcdst);
  225. srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
  226. nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
  227. }
  228. for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
  229. camellia_dec_blk(ctx->ctx, srcdst, srcdst);
  230. }
  231. static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  232. struct scatterlist *src, unsigned int nbytes)
  233. {
  234. struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  235. be128 buf[CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS];
  236. struct crypt_priv crypt_ctx = {
  237. .ctx = &ctx->camellia_ctx,
  238. .fpu_enabled = false,
  239. };
  240. struct lrw_crypt_req req = {
  241. .tbuf = buf,
  242. .tbuflen = sizeof(buf),
  243. .table_ctx = &ctx->lrw_table,
  244. .crypt_ctx = &crypt_ctx,
  245. .crypt_fn = encrypt_callback,
  246. };
  247. int ret;
  248. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  249. ret = lrw_crypt(desc, dst, src, nbytes, &req);
  250. camellia_fpu_end(crypt_ctx.fpu_enabled);
  251. return ret;
  252. }
  253. static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  254. struct scatterlist *src, unsigned int nbytes)
  255. {
  256. struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  257. be128 buf[CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS];
  258. struct crypt_priv crypt_ctx = {
  259. .ctx = &ctx->camellia_ctx,
  260. .fpu_enabled = false,
  261. };
  262. struct lrw_crypt_req req = {
  263. .tbuf = buf,
  264. .tbuflen = sizeof(buf),
  265. .table_ctx = &ctx->lrw_table,
  266. .crypt_ctx = &crypt_ctx,
  267. .crypt_fn = decrypt_callback,
  268. };
  269. int ret;
  270. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  271. ret = lrw_crypt(desc, dst, src, nbytes, &req);
  272. camellia_fpu_end(crypt_ctx.fpu_enabled);
  273. return ret;
  274. }
  275. static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  276. struct scatterlist *src, unsigned int nbytes)
  277. {
  278. struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  279. return glue_xts_crypt_128bit(&camellia_enc_xts, desc, dst, src, nbytes,
  280. XTS_TWEAK_CAST(camellia_enc_blk),
  281. &ctx->tweak_ctx, &ctx->crypt_ctx);
  282. }
  283. static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  284. struct scatterlist *src, unsigned int nbytes)
  285. {
  286. struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  287. return glue_xts_crypt_128bit(&camellia_dec_xts, desc, dst, src, nbytes,
  288. XTS_TWEAK_CAST(camellia_enc_blk),
  289. &ctx->tweak_ctx, &ctx->crypt_ctx);
  290. }
  291. static struct crypto_alg cmll_algs[10] = { {
  292. .cra_name = "__ecb-camellia-aesni-avx2",
  293. .cra_driver_name = "__driver-ecb-camellia-aesni-avx2",
  294. .cra_priority = 0,
  295. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  296. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  297. .cra_ctxsize = sizeof(struct camellia_ctx),
  298. .cra_alignmask = 0,
  299. .cra_type = &crypto_blkcipher_type,
  300. .cra_module = THIS_MODULE,
  301. .cra_u = {
  302. .blkcipher = {
  303. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  304. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  305. .setkey = camellia_setkey,
  306. .encrypt = ecb_encrypt,
  307. .decrypt = ecb_decrypt,
  308. },
  309. },
  310. }, {
  311. .cra_name = "__cbc-camellia-aesni-avx2",
  312. .cra_driver_name = "__driver-cbc-camellia-aesni-avx2",
  313. .cra_priority = 0,
  314. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  315. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  316. .cra_ctxsize = sizeof(struct camellia_ctx),
  317. .cra_alignmask = 0,
  318. .cra_type = &crypto_blkcipher_type,
  319. .cra_module = THIS_MODULE,
  320. .cra_u = {
  321. .blkcipher = {
  322. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  323. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  324. .setkey = camellia_setkey,
  325. .encrypt = cbc_encrypt,
  326. .decrypt = cbc_decrypt,
  327. },
  328. },
  329. }, {
  330. .cra_name = "__ctr-camellia-aesni-avx2",
  331. .cra_driver_name = "__driver-ctr-camellia-aesni-avx2",
  332. .cra_priority = 0,
  333. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  334. .cra_blocksize = 1,
  335. .cra_ctxsize = sizeof(struct camellia_ctx),
  336. .cra_alignmask = 0,
  337. .cra_type = &crypto_blkcipher_type,
  338. .cra_module = THIS_MODULE,
  339. .cra_u = {
  340. .blkcipher = {
  341. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  342. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  343. .ivsize = CAMELLIA_BLOCK_SIZE,
  344. .setkey = camellia_setkey,
  345. .encrypt = ctr_crypt,
  346. .decrypt = ctr_crypt,
  347. },
  348. },
  349. }, {
  350. .cra_name = "__lrw-camellia-aesni-avx2",
  351. .cra_driver_name = "__driver-lrw-camellia-aesni-avx2",
  352. .cra_priority = 0,
  353. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  354. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  355. .cra_ctxsize = sizeof(struct camellia_lrw_ctx),
  356. .cra_alignmask = 0,
  357. .cra_type = &crypto_blkcipher_type,
  358. .cra_module = THIS_MODULE,
  359. .cra_exit = lrw_camellia_exit_tfm,
  360. .cra_u = {
  361. .blkcipher = {
  362. .min_keysize = CAMELLIA_MIN_KEY_SIZE +
  363. CAMELLIA_BLOCK_SIZE,
  364. .max_keysize = CAMELLIA_MAX_KEY_SIZE +
  365. CAMELLIA_BLOCK_SIZE,
  366. .ivsize = CAMELLIA_BLOCK_SIZE,
  367. .setkey = lrw_camellia_setkey,
  368. .encrypt = lrw_encrypt,
  369. .decrypt = lrw_decrypt,
  370. },
  371. },
  372. }, {
  373. .cra_name = "__xts-camellia-aesni-avx2",
  374. .cra_driver_name = "__driver-xts-camellia-aesni-avx2",
  375. .cra_priority = 0,
  376. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  377. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  378. .cra_ctxsize = sizeof(struct camellia_xts_ctx),
  379. .cra_alignmask = 0,
  380. .cra_type = &crypto_blkcipher_type,
  381. .cra_module = THIS_MODULE,
  382. .cra_u = {
  383. .blkcipher = {
  384. .min_keysize = CAMELLIA_MIN_KEY_SIZE * 2,
  385. .max_keysize = CAMELLIA_MAX_KEY_SIZE * 2,
  386. .ivsize = CAMELLIA_BLOCK_SIZE,
  387. .setkey = xts_camellia_setkey,
  388. .encrypt = xts_encrypt,
  389. .decrypt = xts_decrypt,
  390. },
  391. },
  392. }, {
  393. .cra_name = "ecb(camellia)",
  394. .cra_driver_name = "ecb-camellia-aesni-avx2",
  395. .cra_priority = 500,
  396. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  397. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  398. .cra_ctxsize = sizeof(struct async_helper_ctx),
  399. .cra_alignmask = 0,
  400. .cra_type = &crypto_ablkcipher_type,
  401. .cra_module = THIS_MODULE,
  402. .cra_init = ablk_init,
  403. .cra_exit = ablk_exit,
  404. .cra_u = {
  405. .ablkcipher = {
  406. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  407. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  408. .setkey = ablk_set_key,
  409. .encrypt = ablk_encrypt,
  410. .decrypt = ablk_decrypt,
  411. },
  412. },
  413. }, {
  414. .cra_name = "cbc(camellia)",
  415. .cra_driver_name = "cbc-camellia-aesni-avx2",
  416. .cra_priority = 500,
  417. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  418. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  419. .cra_ctxsize = sizeof(struct async_helper_ctx),
  420. .cra_alignmask = 0,
  421. .cra_type = &crypto_ablkcipher_type,
  422. .cra_module = THIS_MODULE,
  423. .cra_init = ablk_init,
  424. .cra_exit = ablk_exit,
  425. .cra_u = {
  426. .ablkcipher = {
  427. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  428. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  429. .ivsize = CAMELLIA_BLOCK_SIZE,
  430. .setkey = ablk_set_key,
  431. .encrypt = __ablk_encrypt,
  432. .decrypt = ablk_decrypt,
  433. },
  434. },
  435. }, {
  436. .cra_name = "ctr(camellia)",
  437. .cra_driver_name = "ctr-camellia-aesni-avx2",
  438. .cra_priority = 500,
  439. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  440. .cra_blocksize = 1,
  441. .cra_ctxsize = sizeof(struct async_helper_ctx),
  442. .cra_alignmask = 0,
  443. .cra_type = &crypto_ablkcipher_type,
  444. .cra_module = THIS_MODULE,
  445. .cra_init = ablk_init,
  446. .cra_exit = ablk_exit,
  447. .cra_u = {
  448. .ablkcipher = {
  449. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  450. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  451. .ivsize = CAMELLIA_BLOCK_SIZE,
  452. .setkey = ablk_set_key,
  453. .encrypt = ablk_encrypt,
  454. .decrypt = ablk_encrypt,
  455. .geniv = "chainiv",
  456. },
  457. },
  458. }, {
  459. .cra_name = "lrw(camellia)",
  460. .cra_driver_name = "lrw-camellia-aesni-avx2",
  461. .cra_priority = 500,
  462. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  463. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  464. .cra_ctxsize = sizeof(struct async_helper_ctx),
  465. .cra_alignmask = 0,
  466. .cra_type = &crypto_ablkcipher_type,
  467. .cra_module = THIS_MODULE,
  468. .cra_init = ablk_init,
  469. .cra_exit = ablk_exit,
  470. .cra_u = {
  471. .ablkcipher = {
  472. .min_keysize = CAMELLIA_MIN_KEY_SIZE +
  473. CAMELLIA_BLOCK_SIZE,
  474. .max_keysize = CAMELLIA_MAX_KEY_SIZE +
  475. CAMELLIA_BLOCK_SIZE,
  476. .ivsize = CAMELLIA_BLOCK_SIZE,
  477. .setkey = ablk_set_key,
  478. .encrypt = ablk_encrypt,
  479. .decrypt = ablk_decrypt,
  480. },
  481. },
  482. }, {
  483. .cra_name = "xts(camellia)",
  484. .cra_driver_name = "xts-camellia-aesni-avx2",
  485. .cra_priority = 500,
  486. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  487. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  488. .cra_ctxsize = sizeof(struct async_helper_ctx),
  489. .cra_alignmask = 0,
  490. .cra_type = &crypto_ablkcipher_type,
  491. .cra_module = THIS_MODULE,
  492. .cra_init = ablk_init,
  493. .cra_exit = ablk_exit,
  494. .cra_u = {
  495. .ablkcipher = {
  496. .min_keysize = CAMELLIA_MIN_KEY_SIZE * 2,
  497. .max_keysize = CAMELLIA_MAX_KEY_SIZE * 2,
  498. .ivsize = CAMELLIA_BLOCK_SIZE,
  499. .setkey = ablk_set_key,
  500. .encrypt = ablk_encrypt,
  501. .decrypt = ablk_decrypt,
  502. },
  503. },
  504. } };
  505. static int __init camellia_aesni_init(void)
  506. {
  507. u64 xcr0;
  508. if (!cpu_has_avx2 || !cpu_has_avx || !cpu_has_aes || !cpu_has_osxsave) {
  509. pr_info("AVX2 or AES-NI instructions are not detected.\n");
  510. return -ENODEV;
  511. }
  512. xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
  513. if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
  514. pr_info("AVX2 detected but unusable.\n");
  515. return -ENODEV;
  516. }
  517. return crypto_register_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
  518. }
  519. static void __exit camellia_aesni_fini(void)
  520. {
  521. crypto_unregister_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
  522. }
  523. module_init(camellia_aesni_init);
  524. module_exit(camellia_aesni_fini);
  525. MODULE_LICENSE("GPL");
  526. MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX2 optimized");
  527. MODULE_ALIAS("camellia");
  528. MODULE_ALIAS("camellia-asm");