camellia_aesni_avx_glue.c 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558
  1. /*
  2. * Glue Code for x86_64/AVX/AES-NI assembler optimized version of Camellia
  3. *
  4. * Copyright © 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation; either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. */
  12. #include <linux/module.h>
  13. #include <linux/types.h>
  14. #include <linux/crypto.h>
  15. #include <linux/err.h>
  16. #include <crypto/algapi.h>
  17. #include <crypto/ctr.h>
  18. #include <crypto/lrw.h>
  19. #include <crypto/xts.h>
  20. #include <asm/xcr.h>
  21. #include <asm/xsave.h>
  22. #include <asm/crypto/camellia.h>
  23. #include <asm/crypto/ablk_helper.h>
  24. #include <asm/crypto/glue_helper.h>
  25. #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
  26. /* 16-way AES-NI parallel cipher functions */
  27. asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst,
  28. const u8 *src);
  29. asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst,
  30. const u8 *src);
  31. asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst,
  32. const u8 *src);
  33. asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst,
  34. const u8 *src, le128 *iv);
  35. static const struct common_glue_ctx camellia_enc = {
  36. .num_funcs = 3,
  37. .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  38. .funcs = { {
  39. .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  40. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
  41. }, {
  42. .num_blocks = 2,
  43. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
  44. }, {
  45. .num_blocks = 1,
  46. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
  47. } }
  48. };
  49. static const struct common_glue_ctx camellia_ctr = {
  50. .num_funcs = 3,
  51. .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  52. .funcs = { {
  53. .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  54. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
  55. }, {
  56. .num_blocks = 2,
  57. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
  58. }, {
  59. .num_blocks = 1,
  60. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
  61. } }
  62. };
  63. static const struct common_glue_ctx camellia_dec = {
  64. .num_funcs = 3,
  65. .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  66. .funcs = { {
  67. .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  68. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
  69. }, {
  70. .num_blocks = 2,
  71. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
  72. }, {
  73. .num_blocks = 1,
  74. .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
  75. } }
  76. };
  77. static const struct common_glue_ctx camellia_dec_cbc = {
  78. .num_funcs = 3,
  79. .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  80. .funcs = { {
  81. .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
  82. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
  83. }, {
  84. .num_blocks = 2,
  85. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
  86. }, {
  87. .num_blocks = 1,
  88. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
  89. } }
  90. };
  91. static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  92. struct scatterlist *src, unsigned int nbytes)
  93. {
  94. return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes);
  95. }
  96. static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  97. struct scatterlist *src, unsigned int nbytes)
  98. {
  99. return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes);
  100. }
  101. static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  102. struct scatterlist *src, unsigned int nbytes)
  103. {
  104. return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(camellia_enc_blk), desc,
  105. dst, src, nbytes);
  106. }
  107. static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  108. struct scatterlist *src, unsigned int nbytes)
  109. {
  110. return glue_cbc_decrypt_128bit(&camellia_dec_cbc, desc, dst, src,
  111. nbytes);
  112. }
  113. static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  114. struct scatterlist *src, unsigned int nbytes)
  115. {
  116. return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes);
  117. }
  118. static inline bool camellia_fpu_begin(bool fpu_enabled, unsigned int nbytes)
  119. {
  120. return glue_fpu_begin(CAMELLIA_BLOCK_SIZE,
  121. CAMELLIA_AESNI_PARALLEL_BLOCKS, NULL, fpu_enabled,
  122. nbytes);
  123. }
  124. static inline void camellia_fpu_end(bool fpu_enabled)
  125. {
  126. glue_fpu_end(fpu_enabled);
  127. }
  128. static int camellia_setkey(struct crypto_tfm *tfm, const u8 *in_key,
  129. unsigned int key_len)
  130. {
  131. return __camellia_setkey(crypto_tfm_ctx(tfm), in_key, key_len,
  132. &tfm->crt_flags);
  133. }
  134. struct crypt_priv {
  135. struct camellia_ctx *ctx;
  136. bool fpu_enabled;
  137. };
  138. static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
  139. {
  140. const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
  141. struct crypt_priv *ctx = priv;
  142. int i;
  143. ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
  144. if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
  145. camellia_ecb_enc_16way(ctx->ctx, srcdst, srcdst);
  146. srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
  147. nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
  148. }
  149. while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
  150. camellia_enc_blk_2way(ctx->ctx, srcdst, srcdst);
  151. srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
  152. nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
  153. }
  154. for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
  155. camellia_enc_blk(ctx->ctx, srcdst, srcdst);
  156. }
  157. static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
  158. {
  159. const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
  160. struct crypt_priv *ctx = priv;
  161. int i;
  162. ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
  163. if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
  164. camellia_ecb_dec_16way(ctx->ctx, srcdst, srcdst);
  165. srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
  166. nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
  167. }
  168. while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
  169. camellia_dec_blk_2way(ctx->ctx, srcdst, srcdst);
  170. srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
  171. nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
  172. }
  173. for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
  174. camellia_dec_blk(ctx->ctx, srcdst, srcdst);
  175. }
  176. static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  177. struct scatterlist *src, unsigned int nbytes)
  178. {
  179. struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  180. be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
  181. struct crypt_priv crypt_ctx = {
  182. .ctx = &ctx->camellia_ctx,
  183. .fpu_enabled = false,
  184. };
  185. struct lrw_crypt_req req = {
  186. .tbuf = buf,
  187. .tbuflen = sizeof(buf),
  188. .table_ctx = &ctx->lrw_table,
  189. .crypt_ctx = &crypt_ctx,
  190. .crypt_fn = encrypt_callback,
  191. };
  192. int ret;
  193. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  194. ret = lrw_crypt(desc, dst, src, nbytes, &req);
  195. camellia_fpu_end(crypt_ctx.fpu_enabled);
  196. return ret;
  197. }
  198. static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  199. struct scatterlist *src, unsigned int nbytes)
  200. {
  201. struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  202. be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
  203. struct crypt_priv crypt_ctx = {
  204. .ctx = &ctx->camellia_ctx,
  205. .fpu_enabled = false,
  206. };
  207. struct lrw_crypt_req req = {
  208. .tbuf = buf,
  209. .tbuflen = sizeof(buf),
  210. .table_ctx = &ctx->lrw_table,
  211. .crypt_ctx = &crypt_ctx,
  212. .crypt_fn = decrypt_callback,
  213. };
  214. int ret;
  215. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  216. ret = lrw_crypt(desc, dst, src, nbytes, &req);
  217. camellia_fpu_end(crypt_ctx.fpu_enabled);
  218. return ret;
  219. }
  220. static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  221. struct scatterlist *src, unsigned int nbytes)
  222. {
  223. struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  224. be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
  225. struct crypt_priv crypt_ctx = {
  226. .ctx = &ctx->crypt_ctx,
  227. .fpu_enabled = false,
  228. };
  229. struct xts_crypt_req req = {
  230. .tbuf = buf,
  231. .tbuflen = sizeof(buf),
  232. .tweak_ctx = &ctx->tweak_ctx,
  233. .tweak_fn = XTS_TWEAK_CAST(camellia_enc_blk),
  234. .crypt_ctx = &crypt_ctx,
  235. .crypt_fn = encrypt_callback,
  236. };
  237. int ret;
  238. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  239. ret = xts_crypt(desc, dst, src, nbytes, &req);
  240. camellia_fpu_end(crypt_ctx.fpu_enabled);
  241. return ret;
  242. }
  243. static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  244. struct scatterlist *src, unsigned int nbytes)
  245. {
  246. struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  247. be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
  248. struct crypt_priv crypt_ctx = {
  249. .ctx = &ctx->crypt_ctx,
  250. .fpu_enabled = false,
  251. };
  252. struct xts_crypt_req req = {
  253. .tbuf = buf,
  254. .tbuflen = sizeof(buf),
  255. .tweak_ctx = &ctx->tweak_ctx,
  256. .tweak_fn = XTS_TWEAK_CAST(camellia_enc_blk),
  257. .crypt_ctx = &crypt_ctx,
  258. .crypt_fn = decrypt_callback,
  259. };
  260. int ret;
  261. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  262. ret = xts_crypt(desc, dst, src, nbytes, &req);
  263. camellia_fpu_end(crypt_ctx.fpu_enabled);
  264. return ret;
  265. }
  266. static struct crypto_alg cmll_algs[10] = { {
  267. .cra_name = "__ecb-camellia-aesni",
  268. .cra_driver_name = "__driver-ecb-camellia-aesni",
  269. .cra_priority = 0,
  270. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  271. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  272. .cra_ctxsize = sizeof(struct camellia_ctx),
  273. .cra_alignmask = 0,
  274. .cra_type = &crypto_blkcipher_type,
  275. .cra_module = THIS_MODULE,
  276. .cra_u = {
  277. .blkcipher = {
  278. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  279. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  280. .setkey = camellia_setkey,
  281. .encrypt = ecb_encrypt,
  282. .decrypt = ecb_decrypt,
  283. },
  284. },
  285. }, {
  286. .cra_name = "__cbc-camellia-aesni",
  287. .cra_driver_name = "__driver-cbc-camellia-aesni",
  288. .cra_priority = 0,
  289. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  290. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  291. .cra_ctxsize = sizeof(struct camellia_ctx),
  292. .cra_alignmask = 0,
  293. .cra_type = &crypto_blkcipher_type,
  294. .cra_module = THIS_MODULE,
  295. .cra_u = {
  296. .blkcipher = {
  297. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  298. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  299. .setkey = camellia_setkey,
  300. .encrypt = cbc_encrypt,
  301. .decrypt = cbc_decrypt,
  302. },
  303. },
  304. }, {
  305. .cra_name = "__ctr-camellia-aesni",
  306. .cra_driver_name = "__driver-ctr-camellia-aesni",
  307. .cra_priority = 0,
  308. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  309. .cra_blocksize = 1,
  310. .cra_ctxsize = sizeof(struct camellia_ctx),
  311. .cra_alignmask = 0,
  312. .cra_type = &crypto_blkcipher_type,
  313. .cra_module = THIS_MODULE,
  314. .cra_u = {
  315. .blkcipher = {
  316. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  317. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  318. .ivsize = CAMELLIA_BLOCK_SIZE,
  319. .setkey = camellia_setkey,
  320. .encrypt = ctr_crypt,
  321. .decrypt = ctr_crypt,
  322. },
  323. },
  324. }, {
  325. .cra_name = "__lrw-camellia-aesni",
  326. .cra_driver_name = "__driver-lrw-camellia-aesni",
  327. .cra_priority = 0,
  328. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  329. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  330. .cra_ctxsize = sizeof(struct camellia_lrw_ctx),
  331. .cra_alignmask = 0,
  332. .cra_type = &crypto_blkcipher_type,
  333. .cra_module = THIS_MODULE,
  334. .cra_exit = lrw_camellia_exit_tfm,
  335. .cra_u = {
  336. .blkcipher = {
  337. .min_keysize = CAMELLIA_MIN_KEY_SIZE +
  338. CAMELLIA_BLOCK_SIZE,
  339. .max_keysize = CAMELLIA_MAX_KEY_SIZE +
  340. CAMELLIA_BLOCK_SIZE,
  341. .ivsize = CAMELLIA_BLOCK_SIZE,
  342. .setkey = lrw_camellia_setkey,
  343. .encrypt = lrw_encrypt,
  344. .decrypt = lrw_decrypt,
  345. },
  346. },
  347. }, {
  348. .cra_name = "__xts-camellia-aesni",
  349. .cra_driver_name = "__driver-xts-camellia-aesni",
  350. .cra_priority = 0,
  351. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  352. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  353. .cra_ctxsize = sizeof(struct camellia_xts_ctx),
  354. .cra_alignmask = 0,
  355. .cra_type = &crypto_blkcipher_type,
  356. .cra_module = THIS_MODULE,
  357. .cra_u = {
  358. .blkcipher = {
  359. .min_keysize = CAMELLIA_MIN_KEY_SIZE * 2,
  360. .max_keysize = CAMELLIA_MAX_KEY_SIZE * 2,
  361. .ivsize = CAMELLIA_BLOCK_SIZE,
  362. .setkey = xts_camellia_setkey,
  363. .encrypt = xts_encrypt,
  364. .decrypt = xts_decrypt,
  365. },
  366. },
  367. }, {
  368. .cra_name = "ecb(camellia)",
  369. .cra_driver_name = "ecb-camellia-aesni",
  370. .cra_priority = 400,
  371. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  372. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  373. .cra_ctxsize = sizeof(struct async_helper_ctx),
  374. .cra_alignmask = 0,
  375. .cra_type = &crypto_ablkcipher_type,
  376. .cra_module = THIS_MODULE,
  377. .cra_init = ablk_init,
  378. .cra_exit = ablk_exit,
  379. .cra_u = {
  380. .ablkcipher = {
  381. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  382. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  383. .setkey = ablk_set_key,
  384. .encrypt = ablk_encrypt,
  385. .decrypt = ablk_decrypt,
  386. },
  387. },
  388. }, {
  389. .cra_name = "cbc(camellia)",
  390. .cra_driver_name = "cbc-camellia-aesni",
  391. .cra_priority = 400,
  392. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  393. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  394. .cra_ctxsize = sizeof(struct async_helper_ctx),
  395. .cra_alignmask = 0,
  396. .cra_type = &crypto_ablkcipher_type,
  397. .cra_module = THIS_MODULE,
  398. .cra_init = ablk_init,
  399. .cra_exit = ablk_exit,
  400. .cra_u = {
  401. .ablkcipher = {
  402. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  403. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  404. .ivsize = CAMELLIA_BLOCK_SIZE,
  405. .setkey = ablk_set_key,
  406. .encrypt = __ablk_encrypt,
  407. .decrypt = ablk_decrypt,
  408. },
  409. },
  410. }, {
  411. .cra_name = "ctr(camellia)",
  412. .cra_driver_name = "ctr-camellia-aesni",
  413. .cra_priority = 400,
  414. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  415. .cra_blocksize = 1,
  416. .cra_ctxsize = sizeof(struct async_helper_ctx),
  417. .cra_alignmask = 0,
  418. .cra_type = &crypto_ablkcipher_type,
  419. .cra_module = THIS_MODULE,
  420. .cra_init = ablk_init,
  421. .cra_exit = ablk_exit,
  422. .cra_u = {
  423. .ablkcipher = {
  424. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  425. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  426. .ivsize = CAMELLIA_BLOCK_SIZE,
  427. .setkey = ablk_set_key,
  428. .encrypt = ablk_encrypt,
  429. .decrypt = ablk_encrypt,
  430. .geniv = "chainiv",
  431. },
  432. },
  433. }, {
  434. .cra_name = "lrw(camellia)",
  435. .cra_driver_name = "lrw-camellia-aesni",
  436. .cra_priority = 400,
  437. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  438. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  439. .cra_ctxsize = sizeof(struct async_helper_ctx),
  440. .cra_alignmask = 0,
  441. .cra_type = &crypto_ablkcipher_type,
  442. .cra_module = THIS_MODULE,
  443. .cra_init = ablk_init,
  444. .cra_exit = ablk_exit,
  445. .cra_u = {
  446. .ablkcipher = {
  447. .min_keysize = CAMELLIA_MIN_KEY_SIZE +
  448. CAMELLIA_BLOCK_SIZE,
  449. .max_keysize = CAMELLIA_MAX_KEY_SIZE +
  450. CAMELLIA_BLOCK_SIZE,
  451. .ivsize = CAMELLIA_BLOCK_SIZE,
  452. .setkey = ablk_set_key,
  453. .encrypt = ablk_encrypt,
  454. .decrypt = ablk_decrypt,
  455. },
  456. },
  457. }, {
  458. .cra_name = "xts(camellia)",
  459. .cra_driver_name = "xts-camellia-aesni",
  460. .cra_priority = 400,
  461. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  462. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  463. .cra_ctxsize = sizeof(struct async_helper_ctx),
  464. .cra_alignmask = 0,
  465. .cra_type = &crypto_ablkcipher_type,
  466. .cra_module = THIS_MODULE,
  467. .cra_init = ablk_init,
  468. .cra_exit = ablk_exit,
  469. .cra_u = {
  470. .ablkcipher = {
  471. .min_keysize = CAMELLIA_MIN_KEY_SIZE * 2,
  472. .max_keysize = CAMELLIA_MAX_KEY_SIZE * 2,
  473. .ivsize = CAMELLIA_BLOCK_SIZE,
  474. .setkey = ablk_set_key,
  475. .encrypt = ablk_encrypt,
  476. .decrypt = ablk_decrypt,
  477. },
  478. },
  479. } };
  480. static int __init camellia_aesni_init(void)
  481. {
  482. u64 xcr0;
  483. if (!cpu_has_avx || !cpu_has_aes || !cpu_has_osxsave) {
  484. pr_info("AVX or AES-NI instructions are not detected.\n");
  485. return -ENODEV;
  486. }
  487. xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
  488. if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
  489. pr_info("AVX detected but unusable.\n");
  490. return -ENODEV;
  491. }
  492. return crypto_register_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
  493. }
  494. static void __exit camellia_aesni_fini(void)
  495. {
  496. crypto_unregister_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
  497. }
  498. module_init(camellia_aesni_init);
  499. module_exit(camellia_aesni_fini);
  500. MODULE_LICENSE("GPL");
  501. MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX optimized");
  502. MODULE_ALIAS("camellia");
  503. MODULE_ALIAS("camellia-asm");