aesni-intel_glue.c 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839
  1. /*
  2. * Support for Intel AES-NI instructions. This file contains glue
  3. * code, the real AES implementation is in intel-aes_asm.S.
  4. *
  5. * Copyright (C) 2008, Intel Corp.
  6. * Author: Huang Ying <ying.huang@intel.com>
  7. *
  8. * This program is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License as published by
  10. * the Free Software Foundation; either version 2 of the License, or
  11. * (at your option) any later version.
  12. */
  13. #include <linux/hardirq.h>
  14. #include <linux/types.h>
  15. #include <linux/crypto.h>
  16. #include <linux/err.h>
  17. #include <crypto/algapi.h>
  18. #include <crypto/aes.h>
  19. #include <crypto/cryptd.h>
  20. #include <crypto/ctr.h>
  21. #include <asm/i387.h>
  22. #include <asm/aes.h>
  23. #if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)
  24. #define HAS_CTR
  25. #endif
  26. #if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)
  27. #define HAS_LRW
  28. #endif
  29. #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
  30. #define HAS_PCBC
  31. #endif
  32. #if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)
  33. #define HAS_XTS
  34. #endif
  35. struct async_aes_ctx {
  36. struct cryptd_ablkcipher *cryptd_tfm;
  37. };
  38. #define AESNI_ALIGN 16
  39. #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
  40. asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
  41. unsigned int key_len);
  42. asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
  43. const u8 *in);
  44. asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
  45. const u8 *in);
  46. asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
  47. const u8 *in, unsigned int len);
  48. asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
  49. const u8 *in, unsigned int len);
  50. asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
  51. const u8 *in, unsigned int len, u8 *iv);
  52. asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
  53. const u8 *in, unsigned int len, u8 *iv);
  54. asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
  55. const u8 *in, unsigned int len, u8 *iv);
  56. static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
  57. {
  58. unsigned long addr = (unsigned long)raw_ctx;
  59. unsigned long align = AESNI_ALIGN;
  60. if (align <= crypto_tfm_ctx_alignment())
  61. align = 1;
  62. return (struct crypto_aes_ctx *)ALIGN(addr, align);
  63. }
  64. static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
  65. const u8 *in_key, unsigned int key_len)
  66. {
  67. struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
  68. u32 *flags = &tfm->crt_flags;
  69. int err;
  70. if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
  71. key_len != AES_KEYSIZE_256) {
  72. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  73. return -EINVAL;
  74. }
  75. if (!irq_fpu_usable())
  76. err = crypto_aes_expand_key(ctx, in_key, key_len);
  77. else {
  78. kernel_fpu_begin();
  79. err = aesni_set_key(ctx, in_key, key_len);
  80. kernel_fpu_end();
  81. }
  82. return err;
  83. }
  84. static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  85. unsigned int key_len)
  86. {
  87. return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
  88. }
  89. static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  90. {
  91. struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
  92. if (!irq_fpu_usable())
  93. crypto_aes_encrypt_x86(ctx, dst, src);
  94. else {
  95. kernel_fpu_begin();
  96. aesni_enc(ctx, dst, src);
  97. kernel_fpu_end();
  98. }
  99. }
  100. static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  101. {
  102. struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
  103. if (!irq_fpu_usable())
  104. crypto_aes_decrypt_x86(ctx, dst, src);
  105. else {
  106. kernel_fpu_begin();
  107. aesni_dec(ctx, dst, src);
  108. kernel_fpu_end();
  109. }
  110. }
  111. static struct crypto_alg aesni_alg = {
  112. .cra_name = "aes",
  113. .cra_driver_name = "aes-aesni",
  114. .cra_priority = 300,
  115. .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
  116. .cra_blocksize = AES_BLOCK_SIZE,
  117. .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
  118. .cra_alignmask = 0,
  119. .cra_module = THIS_MODULE,
  120. .cra_list = LIST_HEAD_INIT(aesni_alg.cra_list),
  121. .cra_u = {
  122. .cipher = {
  123. .cia_min_keysize = AES_MIN_KEY_SIZE,
  124. .cia_max_keysize = AES_MAX_KEY_SIZE,
  125. .cia_setkey = aes_set_key,
  126. .cia_encrypt = aes_encrypt,
  127. .cia_decrypt = aes_decrypt
  128. }
  129. }
  130. };
  131. static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  132. {
  133. struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
  134. aesni_enc(ctx, dst, src);
  135. }
  136. static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  137. {
  138. struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
  139. aesni_dec(ctx, dst, src);
  140. }
  141. static struct crypto_alg __aesni_alg = {
  142. .cra_name = "__aes-aesni",
  143. .cra_driver_name = "__driver-aes-aesni",
  144. .cra_priority = 0,
  145. .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
  146. .cra_blocksize = AES_BLOCK_SIZE,
  147. .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
  148. .cra_alignmask = 0,
  149. .cra_module = THIS_MODULE,
  150. .cra_list = LIST_HEAD_INIT(__aesni_alg.cra_list),
  151. .cra_u = {
  152. .cipher = {
  153. .cia_min_keysize = AES_MIN_KEY_SIZE,
  154. .cia_max_keysize = AES_MAX_KEY_SIZE,
  155. .cia_setkey = aes_set_key,
  156. .cia_encrypt = __aes_encrypt,
  157. .cia_decrypt = __aes_decrypt
  158. }
  159. }
  160. };
  161. static int ecb_encrypt(struct blkcipher_desc *desc,
  162. struct scatterlist *dst, struct scatterlist *src,
  163. unsigned int nbytes)
  164. {
  165. struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
  166. struct blkcipher_walk walk;
  167. int err;
  168. blkcipher_walk_init(&walk, dst, src, nbytes);
  169. err = blkcipher_walk_virt(desc, &walk);
  170. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  171. kernel_fpu_begin();
  172. while ((nbytes = walk.nbytes)) {
  173. aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
  174. nbytes & AES_BLOCK_MASK);
  175. nbytes &= AES_BLOCK_SIZE - 1;
  176. err = blkcipher_walk_done(desc, &walk, nbytes);
  177. }
  178. kernel_fpu_end();
  179. return err;
  180. }
  181. static int ecb_decrypt(struct blkcipher_desc *desc,
  182. struct scatterlist *dst, struct scatterlist *src,
  183. unsigned int nbytes)
  184. {
  185. struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
  186. struct blkcipher_walk walk;
  187. int err;
  188. blkcipher_walk_init(&walk, dst, src, nbytes);
  189. err = blkcipher_walk_virt(desc, &walk);
  190. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  191. kernel_fpu_begin();
  192. while ((nbytes = walk.nbytes)) {
  193. aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
  194. nbytes & AES_BLOCK_MASK);
  195. nbytes &= AES_BLOCK_SIZE - 1;
  196. err = blkcipher_walk_done(desc, &walk, nbytes);
  197. }
  198. kernel_fpu_end();
  199. return err;
  200. }
  201. static struct crypto_alg blk_ecb_alg = {
  202. .cra_name = "__ecb-aes-aesni",
  203. .cra_driver_name = "__driver-ecb-aes-aesni",
  204. .cra_priority = 0,
  205. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  206. .cra_blocksize = AES_BLOCK_SIZE,
  207. .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
  208. .cra_alignmask = 0,
  209. .cra_type = &crypto_blkcipher_type,
  210. .cra_module = THIS_MODULE,
  211. .cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
  212. .cra_u = {
  213. .blkcipher = {
  214. .min_keysize = AES_MIN_KEY_SIZE,
  215. .max_keysize = AES_MAX_KEY_SIZE,
  216. .setkey = aes_set_key,
  217. .encrypt = ecb_encrypt,
  218. .decrypt = ecb_decrypt,
  219. },
  220. },
  221. };
  222. static int cbc_encrypt(struct blkcipher_desc *desc,
  223. struct scatterlist *dst, struct scatterlist *src,
  224. unsigned int nbytes)
  225. {
  226. struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
  227. struct blkcipher_walk walk;
  228. int err;
  229. blkcipher_walk_init(&walk, dst, src, nbytes);
  230. err = blkcipher_walk_virt(desc, &walk);
  231. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  232. kernel_fpu_begin();
  233. while ((nbytes = walk.nbytes)) {
  234. aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
  235. nbytes & AES_BLOCK_MASK, walk.iv);
  236. nbytes &= AES_BLOCK_SIZE - 1;
  237. err = blkcipher_walk_done(desc, &walk, nbytes);
  238. }
  239. kernel_fpu_end();
  240. return err;
  241. }
  242. static int cbc_decrypt(struct blkcipher_desc *desc,
  243. struct scatterlist *dst, struct scatterlist *src,
  244. unsigned int nbytes)
  245. {
  246. struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
  247. struct blkcipher_walk walk;
  248. int err;
  249. blkcipher_walk_init(&walk, dst, src, nbytes);
  250. err = blkcipher_walk_virt(desc, &walk);
  251. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  252. kernel_fpu_begin();
  253. while ((nbytes = walk.nbytes)) {
  254. aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
  255. nbytes & AES_BLOCK_MASK, walk.iv);
  256. nbytes &= AES_BLOCK_SIZE - 1;
  257. err = blkcipher_walk_done(desc, &walk, nbytes);
  258. }
  259. kernel_fpu_end();
  260. return err;
  261. }
  262. static struct crypto_alg blk_cbc_alg = {
  263. .cra_name = "__cbc-aes-aesni",
  264. .cra_driver_name = "__driver-cbc-aes-aesni",
  265. .cra_priority = 0,
  266. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  267. .cra_blocksize = AES_BLOCK_SIZE,
  268. .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
  269. .cra_alignmask = 0,
  270. .cra_type = &crypto_blkcipher_type,
  271. .cra_module = THIS_MODULE,
  272. .cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
  273. .cra_u = {
  274. .blkcipher = {
  275. .min_keysize = AES_MIN_KEY_SIZE,
  276. .max_keysize = AES_MAX_KEY_SIZE,
  277. .setkey = aes_set_key,
  278. .encrypt = cbc_encrypt,
  279. .decrypt = cbc_decrypt,
  280. },
  281. },
  282. };
  283. static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
  284. struct blkcipher_walk *walk)
  285. {
  286. u8 *ctrblk = walk->iv;
  287. u8 keystream[AES_BLOCK_SIZE];
  288. u8 *src = walk->src.virt.addr;
  289. u8 *dst = walk->dst.virt.addr;
  290. unsigned int nbytes = walk->nbytes;
  291. aesni_enc(ctx, keystream, ctrblk);
  292. crypto_xor(keystream, src, nbytes);
  293. memcpy(dst, keystream, nbytes);
  294. crypto_inc(ctrblk, AES_BLOCK_SIZE);
  295. }
  296. static int ctr_crypt(struct blkcipher_desc *desc,
  297. struct scatterlist *dst, struct scatterlist *src,
  298. unsigned int nbytes)
  299. {
  300. struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
  301. struct blkcipher_walk walk;
  302. int err;
  303. blkcipher_walk_init(&walk, dst, src, nbytes);
  304. err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
  305. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  306. kernel_fpu_begin();
  307. while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
  308. aesni_ctr_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
  309. nbytes & AES_BLOCK_MASK, walk.iv);
  310. nbytes &= AES_BLOCK_SIZE - 1;
  311. err = blkcipher_walk_done(desc, &walk, nbytes);
  312. }
  313. if (walk.nbytes) {
  314. ctr_crypt_final(ctx, &walk);
  315. err = blkcipher_walk_done(desc, &walk, 0);
  316. }
  317. kernel_fpu_end();
  318. return err;
  319. }
  320. static struct crypto_alg blk_ctr_alg = {
  321. .cra_name = "__ctr-aes-aesni",
  322. .cra_driver_name = "__driver-ctr-aes-aesni",
  323. .cra_priority = 0,
  324. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  325. .cra_blocksize = 1,
  326. .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
  327. .cra_alignmask = 0,
  328. .cra_type = &crypto_blkcipher_type,
  329. .cra_module = THIS_MODULE,
  330. .cra_list = LIST_HEAD_INIT(blk_ctr_alg.cra_list),
  331. .cra_u = {
  332. .blkcipher = {
  333. .min_keysize = AES_MIN_KEY_SIZE,
  334. .max_keysize = AES_MAX_KEY_SIZE,
  335. .ivsize = AES_BLOCK_SIZE,
  336. .setkey = aes_set_key,
  337. .encrypt = ctr_crypt,
  338. .decrypt = ctr_crypt,
  339. },
  340. },
  341. };
  342. static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
  343. unsigned int key_len)
  344. {
  345. struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  346. struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base;
  347. int err;
  348. crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  349. crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm)
  350. & CRYPTO_TFM_REQ_MASK);
  351. err = crypto_ablkcipher_setkey(child, key, key_len);
  352. crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child)
  353. & CRYPTO_TFM_RES_MASK);
  354. return err;
  355. }
  356. static int ablk_encrypt(struct ablkcipher_request *req)
  357. {
  358. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  359. struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  360. if (!irq_fpu_usable()) {
  361. struct ablkcipher_request *cryptd_req =
  362. ablkcipher_request_ctx(req);
  363. memcpy(cryptd_req, req, sizeof(*req));
  364. ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
  365. return crypto_ablkcipher_encrypt(cryptd_req);
  366. } else {
  367. struct blkcipher_desc desc;
  368. desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
  369. desc.info = req->info;
  370. desc.flags = 0;
  371. return crypto_blkcipher_crt(desc.tfm)->encrypt(
  372. &desc, req->dst, req->src, req->nbytes);
  373. }
  374. }
  375. static int ablk_decrypt(struct ablkcipher_request *req)
  376. {
  377. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  378. struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  379. if (!irq_fpu_usable()) {
  380. struct ablkcipher_request *cryptd_req =
  381. ablkcipher_request_ctx(req);
  382. memcpy(cryptd_req, req, sizeof(*req));
  383. ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
  384. return crypto_ablkcipher_decrypt(cryptd_req);
  385. } else {
  386. struct blkcipher_desc desc;
  387. desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
  388. desc.info = req->info;
  389. desc.flags = 0;
  390. return crypto_blkcipher_crt(desc.tfm)->decrypt(
  391. &desc, req->dst, req->src, req->nbytes);
  392. }
  393. }
  394. static void ablk_exit(struct crypto_tfm *tfm)
  395. {
  396. struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
  397. cryptd_free_ablkcipher(ctx->cryptd_tfm);
  398. }
  399. static void ablk_init_common(struct crypto_tfm *tfm,
  400. struct cryptd_ablkcipher *cryptd_tfm)
  401. {
  402. struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
  403. ctx->cryptd_tfm = cryptd_tfm;
  404. tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +
  405. crypto_ablkcipher_reqsize(&cryptd_tfm->base);
  406. }
  407. static int ablk_ecb_init(struct crypto_tfm *tfm)
  408. {
  409. struct cryptd_ablkcipher *cryptd_tfm;
  410. cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
  411. if (IS_ERR(cryptd_tfm))
  412. return PTR_ERR(cryptd_tfm);
  413. ablk_init_common(tfm, cryptd_tfm);
  414. return 0;
  415. }
  416. static struct crypto_alg ablk_ecb_alg = {
  417. .cra_name = "ecb(aes)",
  418. .cra_driver_name = "ecb-aes-aesni",
  419. .cra_priority = 400,
  420. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  421. .cra_blocksize = AES_BLOCK_SIZE,
  422. .cra_ctxsize = sizeof(struct async_aes_ctx),
  423. .cra_alignmask = 0,
  424. .cra_type = &crypto_ablkcipher_type,
  425. .cra_module = THIS_MODULE,
  426. .cra_list = LIST_HEAD_INIT(ablk_ecb_alg.cra_list),
  427. .cra_init = ablk_ecb_init,
  428. .cra_exit = ablk_exit,
  429. .cra_u = {
  430. .ablkcipher = {
  431. .min_keysize = AES_MIN_KEY_SIZE,
  432. .max_keysize = AES_MAX_KEY_SIZE,
  433. .setkey = ablk_set_key,
  434. .encrypt = ablk_encrypt,
  435. .decrypt = ablk_decrypt,
  436. },
  437. },
  438. };
  439. static int ablk_cbc_init(struct crypto_tfm *tfm)
  440. {
  441. struct cryptd_ablkcipher *cryptd_tfm;
  442. cryptd_tfm = cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
  443. if (IS_ERR(cryptd_tfm))
  444. return PTR_ERR(cryptd_tfm);
  445. ablk_init_common(tfm, cryptd_tfm);
  446. return 0;
  447. }
  448. static struct crypto_alg ablk_cbc_alg = {
  449. .cra_name = "cbc(aes)",
  450. .cra_driver_name = "cbc-aes-aesni",
  451. .cra_priority = 400,
  452. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  453. .cra_blocksize = AES_BLOCK_SIZE,
  454. .cra_ctxsize = sizeof(struct async_aes_ctx),
  455. .cra_alignmask = 0,
  456. .cra_type = &crypto_ablkcipher_type,
  457. .cra_module = THIS_MODULE,
  458. .cra_list = LIST_HEAD_INIT(ablk_cbc_alg.cra_list),
  459. .cra_init = ablk_cbc_init,
  460. .cra_exit = ablk_exit,
  461. .cra_u = {
  462. .ablkcipher = {
  463. .min_keysize = AES_MIN_KEY_SIZE,
  464. .max_keysize = AES_MAX_KEY_SIZE,
  465. .ivsize = AES_BLOCK_SIZE,
  466. .setkey = ablk_set_key,
  467. .encrypt = ablk_encrypt,
  468. .decrypt = ablk_decrypt,
  469. },
  470. },
  471. };
  472. static int ablk_ctr_init(struct crypto_tfm *tfm)
  473. {
  474. struct cryptd_ablkcipher *cryptd_tfm;
  475. cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ctr-aes-aesni", 0, 0);
  476. if (IS_ERR(cryptd_tfm))
  477. return PTR_ERR(cryptd_tfm);
  478. ablk_init_common(tfm, cryptd_tfm);
  479. return 0;
  480. }
  481. static struct crypto_alg ablk_ctr_alg = {
  482. .cra_name = "ctr(aes)",
  483. .cra_driver_name = "ctr-aes-aesni",
  484. .cra_priority = 400,
  485. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  486. .cra_blocksize = 1,
  487. .cra_ctxsize = sizeof(struct async_aes_ctx),
  488. .cra_alignmask = 0,
  489. .cra_type = &crypto_ablkcipher_type,
  490. .cra_module = THIS_MODULE,
  491. .cra_list = LIST_HEAD_INIT(ablk_ctr_alg.cra_list),
  492. .cra_init = ablk_ctr_init,
  493. .cra_exit = ablk_exit,
  494. .cra_u = {
  495. .ablkcipher = {
  496. .min_keysize = AES_MIN_KEY_SIZE,
  497. .max_keysize = AES_MAX_KEY_SIZE,
  498. .ivsize = AES_BLOCK_SIZE,
  499. .setkey = ablk_set_key,
  500. .encrypt = ablk_encrypt,
  501. .decrypt = ablk_encrypt,
  502. .geniv = "chainiv",
  503. },
  504. },
  505. };
  506. #ifdef HAS_CTR
  507. static int ablk_rfc3686_ctr_init(struct crypto_tfm *tfm)
  508. {
  509. struct cryptd_ablkcipher *cryptd_tfm;
  510. cryptd_tfm = cryptd_alloc_ablkcipher(
  511. "rfc3686(__driver-ctr-aes-aesni)", 0, 0);
  512. if (IS_ERR(cryptd_tfm))
  513. return PTR_ERR(cryptd_tfm);
  514. ablk_init_common(tfm, cryptd_tfm);
  515. return 0;
  516. }
  517. static struct crypto_alg ablk_rfc3686_ctr_alg = {
  518. .cra_name = "rfc3686(ctr(aes))",
  519. .cra_driver_name = "rfc3686-ctr-aes-aesni",
  520. .cra_priority = 400,
  521. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  522. .cra_blocksize = 1,
  523. .cra_ctxsize = sizeof(struct async_aes_ctx),
  524. .cra_alignmask = 0,
  525. .cra_type = &crypto_ablkcipher_type,
  526. .cra_module = THIS_MODULE,
  527. .cra_list = LIST_HEAD_INIT(ablk_rfc3686_ctr_alg.cra_list),
  528. .cra_init = ablk_rfc3686_ctr_init,
  529. .cra_exit = ablk_exit,
  530. .cra_u = {
  531. .ablkcipher = {
  532. .min_keysize = AES_MIN_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,
  533. .max_keysize = AES_MAX_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,
  534. .ivsize = CTR_RFC3686_IV_SIZE,
  535. .setkey = ablk_set_key,
  536. .encrypt = ablk_encrypt,
  537. .decrypt = ablk_decrypt,
  538. .geniv = "seqiv",
  539. },
  540. },
  541. };
  542. #endif
  543. #ifdef HAS_LRW
  544. static int ablk_lrw_init(struct crypto_tfm *tfm)
  545. {
  546. struct cryptd_ablkcipher *cryptd_tfm;
  547. cryptd_tfm = cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
  548. 0, 0);
  549. if (IS_ERR(cryptd_tfm))
  550. return PTR_ERR(cryptd_tfm);
  551. ablk_init_common(tfm, cryptd_tfm);
  552. return 0;
  553. }
  554. static struct crypto_alg ablk_lrw_alg = {
  555. .cra_name = "lrw(aes)",
  556. .cra_driver_name = "lrw-aes-aesni",
  557. .cra_priority = 400,
  558. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  559. .cra_blocksize = AES_BLOCK_SIZE,
  560. .cra_ctxsize = sizeof(struct async_aes_ctx),
  561. .cra_alignmask = 0,
  562. .cra_type = &crypto_ablkcipher_type,
  563. .cra_module = THIS_MODULE,
  564. .cra_list = LIST_HEAD_INIT(ablk_lrw_alg.cra_list),
  565. .cra_init = ablk_lrw_init,
  566. .cra_exit = ablk_exit,
  567. .cra_u = {
  568. .ablkcipher = {
  569. .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
  570. .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
  571. .ivsize = AES_BLOCK_SIZE,
  572. .setkey = ablk_set_key,
  573. .encrypt = ablk_encrypt,
  574. .decrypt = ablk_decrypt,
  575. },
  576. },
  577. };
  578. #endif
  579. #ifdef HAS_PCBC
  580. static int ablk_pcbc_init(struct crypto_tfm *tfm)
  581. {
  582. struct cryptd_ablkcipher *cryptd_tfm;
  583. cryptd_tfm = cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
  584. 0, 0);
  585. if (IS_ERR(cryptd_tfm))
  586. return PTR_ERR(cryptd_tfm);
  587. ablk_init_common(tfm, cryptd_tfm);
  588. return 0;
  589. }
  590. static struct crypto_alg ablk_pcbc_alg = {
  591. .cra_name = "pcbc(aes)",
  592. .cra_driver_name = "pcbc-aes-aesni",
  593. .cra_priority = 400,
  594. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  595. .cra_blocksize = AES_BLOCK_SIZE,
  596. .cra_ctxsize = sizeof(struct async_aes_ctx),
  597. .cra_alignmask = 0,
  598. .cra_type = &crypto_ablkcipher_type,
  599. .cra_module = THIS_MODULE,
  600. .cra_list = LIST_HEAD_INIT(ablk_pcbc_alg.cra_list),
  601. .cra_init = ablk_pcbc_init,
  602. .cra_exit = ablk_exit,
  603. .cra_u = {
  604. .ablkcipher = {
  605. .min_keysize = AES_MIN_KEY_SIZE,
  606. .max_keysize = AES_MAX_KEY_SIZE,
  607. .ivsize = AES_BLOCK_SIZE,
  608. .setkey = ablk_set_key,
  609. .encrypt = ablk_encrypt,
  610. .decrypt = ablk_decrypt,
  611. },
  612. },
  613. };
  614. #endif
  615. #ifdef HAS_XTS
  616. static int ablk_xts_init(struct crypto_tfm *tfm)
  617. {
  618. struct cryptd_ablkcipher *cryptd_tfm;
  619. cryptd_tfm = cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
  620. 0, 0);
  621. if (IS_ERR(cryptd_tfm))
  622. return PTR_ERR(cryptd_tfm);
  623. ablk_init_common(tfm, cryptd_tfm);
  624. return 0;
  625. }
  626. static struct crypto_alg ablk_xts_alg = {
  627. .cra_name = "xts(aes)",
  628. .cra_driver_name = "xts-aes-aesni",
  629. .cra_priority = 400,
  630. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  631. .cra_blocksize = AES_BLOCK_SIZE,
  632. .cra_ctxsize = sizeof(struct async_aes_ctx),
  633. .cra_alignmask = 0,
  634. .cra_type = &crypto_ablkcipher_type,
  635. .cra_module = THIS_MODULE,
  636. .cra_list = LIST_HEAD_INIT(ablk_xts_alg.cra_list),
  637. .cra_init = ablk_xts_init,
  638. .cra_exit = ablk_exit,
  639. .cra_u = {
  640. .ablkcipher = {
  641. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  642. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  643. .ivsize = AES_BLOCK_SIZE,
  644. .setkey = ablk_set_key,
  645. .encrypt = ablk_encrypt,
  646. .decrypt = ablk_decrypt,
  647. },
  648. },
  649. };
  650. #endif
  651. static int __init aesni_init(void)
  652. {
  653. int err;
  654. if (!cpu_has_aes) {
  655. printk(KERN_INFO "Intel AES-NI instructions are not detected.\n");
  656. return -ENODEV;
  657. }
  658. if ((err = crypto_register_alg(&aesni_alg)))
  659. goto aes_err;
  660. if ((err = crypto_register_alg(&__aesni_alg)))
  661. goto __aes_err;
  662. if ((err = crypto_register_alg(&blk_ecb_alg)))
  663. goto blk_ecb_err;
  664. if ((err = crypto_register_alg(&blk_cbc_alg)))
  665. goto blk_cbc_err;
  666. if ((err = crypto_register_alg(&blk_ctr_alg)))
  667. goto blk_ctr_err;
  668. if ((err = crypto_register_alg(&ablk_ecb_alg)))
  669. goto ablk_ecb_err;
  670. if ((err = crypto_register_alg(&ablk_cbc_alg)))
  671. goto ablk_cbc_err;
  672. if ((err = crypto_register_alg(&ablk_ctr_alg)))
  673. goto ablk_ctr_err;
  674. #ifdef HAS_CTR
  675. if ((err = crypto_register_alg(&ablk_rfc3686_ctr_alg)))
  676. goto ablk_rfc3686_ctr_err;
  677. #endif
  678. #ifdef HAS_LRW
  679. if ((err = crypto_register_alg(&ablk_lrw_alg)))
  680. goto ablk_lrw_err;
  681. #endif
  682. #ifdef HAS_PCBC
  683. if ((err = crypto_register_alg(&ablk_pcbc_alg)))
  684. goto ablk_pcbc_err;
  685. #endif
  686. #ifdef HAS_XTS
  687. if ((err = crypto_register_alg(&ablk_xts_alg)))
  688. goto ablk_xts_err;
  689. #endif
  690. return err;
  691. #ifdef HAS_XTS
  692. ablk_xts_err:
  693. #endif
  694. #ifdef HAS_PCBC
  695. crypto_unregister_alg(&ablk_pcbc_alg);
  696. ablk_pcbc_err:
  697. #endif
  698. #ifdef HAS_LRW
  699. crypto_unregister_alg(&ablk_lrw_alg);
  700. ablk_lrw_err:
  701. #endif
  702. #ifdef HAS_CTR
  703. crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
  704. ablk_rfc3686_ctr_err:
  705. #endif
  706. crypto_unregister_alg(&ablk_ctr_alg);
  707. ablk_ctr_err:
  708. crypto_unregister_alg(&ablk_cbc_alg);
  709. ablk_cbc_err:
  710. crypto_unregister_alg(&ablk_ecb_alg);
  711. ablk_ecb_err:
  712. crypto_unregister_alg(&blk_ctr_alg);
  713. blk_ctr_err:
  714. crypto_unregister_alg(&blk_cbc_alg);
  715. blk_cbc_err:
  716. crypto_unregister_alg(&blk_ecb_alg);
  717. blk_ecb_err:
  718. crypto_unregister_alg(&__aesni_alg);
  719. __aes_err:
  720. crypto_unregister_alg(&aesni_alg);
  721. aes_err:
  722. return err;
  723. }
  724. static void __exit aesni_exit(void)
  725. {
  726. #ifdef HAS_XTS
  727. crypto_unregister_alg(&ablk_xts_alg);
  728. #endif
  729. #ifdef HAS_PCBC
  730. crypto_unregister_alg(&ablk_pcbc_alg);
  731. #endif
  732. #ifdef HAS_LRW
  733. crypto_unregister_alg(&ablk_lrw_alg);
  734. #endif
  735. #ifdef HAS_CTR
  736. crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
  737. #endif
  738. crypto_unregister_alg(&ablk_ctr_alg);
  739. crypto_unregister_alg(&ablk_cbc_alg);
  740. crypto_unregister_alg(&ablk_ecb_alg);
  741. crypto_unregister_alg(&blk_ctr_alg);
  742. crypto_unregister_alg(&blk_cbc_alg);
  743. crypto_unregister_alg(&blk_ecb_alg);
  744. crypto_unregister_alg(&__aesni_alg);
  745. crypto_unregister_alg(&aesni_alg);
  746. }
  747. module_init(aesni_init);
  748. module_exit(aesni_exit);
  749. MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
  750. MODULE_LICENSE("GPL");
  751. MODULE_ALIAS("aes");