twofish_glue_3way.c 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737
  1. /*
  2. * Glue Code for 3-way parallel assembler optimized version of Twofish
  3. *
  4. * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
  5. *
  6. * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
  7. * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  8. * CTR part based on code (crypto/ctr.c) by:
  9. * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  10. *
  11. * This program is free software; you can redistribute it and/or modify
  12. * it under the terms of the GNU General Public License as published by
  13. * the Free Software Foundation; either version 2 of the License, or
  14. * (at your option) any later version.
  15. *
  16. * This program is distributed in the hope that it will be useful,
  17. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19. * GNU General Public License for more details.
  20. *
  21. * You should have received a copy of the GNU General Public License
  22. * along with this program; if not, write to the Free Software
  23. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
  24. * USA
  25. *
  26. */
  27. #include <asm/processor.h>
  28. #include <linux/crypto.h>
  29. #include <linux/init.h>
  30. #include <linux/module.h>
  31. #include <linux/types.h>
  32. #include <crypto/algapi.h>
  33. #include <crypto/twofish.h>
  34. #include <crypto/b128ops.h>
  35. #include <crypto/lrw.h>
  36. #include <crypto/xts.h>
  37. /* regular block cipher functions from twofish_x86_64 module */
  38. asmlinkage void twofish_enc_blk(struct twofish_ctx *ctx, u8 *dst,
  39. const u8 *src);
  40. asmlinkage void twofish_dec_blk(struct twofish_ctx *ctx, u8 *dst,
  41. const u8 *src);
  42. /* 3-way parallel cipher functions */
  43. asmlinkage void __twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
  44. const u8 *src, bool xor);
  45. asmlinkage void twofish_dec_blk_3way(struct twofish_ctx *ctx, u8 *dst,
  46. const u8 *src);
  47. static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
  48. const u8 *src)
  49. {
  50. __twofish_enc_blk_3way(ctx, dst, src, false);
  51. }
  52. static inline void twofish_enc_blk_xor_3way(struct twofish_ctx *ctx, u8 *dst,
  53. const u8 *src)
  54. {
  55. __twofish_enc_blk_3way(ctx, dst, src, true);
  56. }
  57. static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk,
  58. void (*fn)(struct twofish_ctx *, u8 *, const u8 *),
  59. void (*fn_3way)(struct twofish_ctx *, u8 *, const u8 *))
  60. {
  61. struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  62. unsigned int bsize = TF_BLOCK_SIZE;
  63. unsigned int nbytes;
  64. int err;
  65. err = blkcipher_walk_virt(desc, walk);
  66. while ((nbytes = walk->nbytes)) {
  67. u8 *wsrc = walk->src.virt.addr;
  68. u8 *wdst = walk->dst.virt.addr;
  69. /* Process three block batch */
  70. if (nbytes >= bsize * 3) {
  71. do {
  72. fn_3way(ctx, wdst, wsrc);
  73. wsrc += bsize * 3;
  74. wdst += bsize * 3;
  75. nbytes -= bsize * 3;
  76. } while (nbytes >= bsize * 3);
  77. if (nbytes < bsize)
  78. goto done;
  79. }
  80. /* Handle leftovers */
  81. do {
  82. fn(ctx, wdst, wsrc);
  83. wsrc += bsize;
  84. wdst += bsize;
  85. nbytes -= bsize;
  86. } while (nbytes >= bsize);
  87. done:
  88. err = blkcipher_walk_done(desc, walk, nbytes);
  89. }
  90. return err;
  91. }
  92. static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  93. struct scatterlist *src, unsigned int nbytes)
  94. {
  95. struct blkcipher_walk walk;
  96. blkcipher_walk_init(&walk, dst, src, nbytes);
  97. return ecb_crypt(desc, &walk, twofish_enc_blk, twofish_enc_blk_3way);
  98. }
  99. static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  100. struct scatterlist *src, unsigned int nbytes)
  101. {
  102. struct blkcipher_walk walk;
  103. blkcipher_walk_init(&walk, dst, src, nbytes);
  104. return ecb_crypt(desc, &walk, twofish_dec_blk, twofish_dec_blk_3way);
  105. }
  106. static struct crypto_alg blk_ecb_alg = {
  107. .cra_name = "ecb(twofish)",
  108. .cra_driver_name = "ecb-twofish-3way",
  109. .cra_priority = 300,
  110. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  111. .cra_blocksize = TF_BLOCK_SIZE,
  112. .cra_ctxsize = sizeof(struct twofish_ctx),
  113. .cra_alignmask = 0,
  114. .cra_type = &crypto_blkcipher_type,
  115. .cra_module = THIS_MODULE,
  116. .cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
  117. .cra_u = {
  118. .blkcipher = {
  119. .min_keysize = TF_MIN_KEY_SIZE,
  120. .max_keysize = TF_MAX_KEY_SIZE,
  121. .setkey = twofish_setkey,
  122. .encrypt = ecb_encrypt,
  123. .decrypt = ecb_decrypt,
  124. },
  125. },
  126. };
  127. static unsigned int __cbc_encrypt(struct blkcipher_desc *desc,
  128. struct blkcipher_walk *walk)
  129. {
  130. struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  131. unsigned int bsize = TF_BLOCK_SIZE;
  132. unsigned int nbytes = walk->nbytes;
  133. u128 *src = (u128 *)walk->src.virt.addr;
  134. u128 *dst = (u128 *)walk->dst.virt.addr;
  135. u128 *iv = (u128 *)walk->iv;
  136. do {
  137. u128_xor(dst, src, iv);
  138. twofish_enc_blk(ctx, (u8 *)dst, (u8 *)dst);
  139. iv = dst;
  140. src += 1;
  141. dst += 1;
  142. nbytes -= bsize;
  143. } while (nbytes >= bsize);
  144. u128_xor((u128 *)walk->iv, (u128 *)walk->iv, iv);
  145. return nbytes;
  146. }
  147. static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  148. struct scatterlist *src, unsigned int nbytes)
  149. {
  150. struct blkcipher_walk walk;
  151. int err;
  152. blkcipher_walk_init(&walk, dst, src, nbytes);
  153. err = blkcipher_walk_virt(desc, &walk);
  154. while ((nbytes = walk.nbytes)) {
  155. nbytes = __cbc_encrypt(desc, &walk);
  156. err = blkcipher_walk_done(desc, &walk, nbytes);
  157. }
  158. return err;
  159. }
  160. static unsigned int __cbc_decrypt(struct blkcipher_desc *desc,
  161. struct blkcipher_walk *walk)
  162. {
  163. struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  164. unsigned int bsize = TF_BLOCK_SIZE;
  165. unsigned int nbytes = walk->nbytes;
  166. u128 *src = (u128 *)walk->src.virt.addr;
  167. u128 *dst = (u128 *)walk->dst.virt.addr;
  168. u128 ivs[3 - 1];
  169. u128 last_iv;
  170. /* Start of the last block. */
  171. src += nbytes / bsize - 1;
  172. dst += nbytes / bsize - 1;
  173. last_iv = *src;
  174. /* Process three block batch */
  175. if (nbytes >= bsize * 3) {
  176. do {
  177. nbytes -= bsize * (3 - 1);
  178. src -= 3 - 1;
  179. dst -= 3 - 1;
  180. ivs[0] = src[0];
  181. ivs[1] = src[1];
  182. twofish_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src);
  183. u128_xor(dst + 1, dst + 1, ivs + 0);
  184. u128_xor(dst + 2, dst + 2, ivs + 1);
  185. nbytes -= bsize;
  186. if (nbytes < bsize)
  187. goto done;
  188. u128_xor(dst, dst, src - 1);
  189. src -= 1;
  190. dst -= 1;
  191. } while (nbytes >= bsize * 3);
  192. if (nbytes < bsize)
  193. goto done;
  194. }
  195. /* Handle leftovers */
  196. for (;;) {
  197. twofish_dec_blk(ctx, (u8 *)dst, (u8 *)src);
  198. nbytes -= bsize;
  199. if (nbytes < bsize)
  200. break;
  201. u128_xor(dst, dst, src - 1);
  202. src -= 1;
  203. dst -= 1;
  204. }
  205. done:
  206. u128_xor(dst, dst, (u128 *)walk->iv);
  207. *(u128 *)walk->iv = last_iv;
  208. return nbytes;
  209. }
  210. static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  211. struct scatterlist *src, unsigned int nbytes)
  212. {
  213. struct blkcipher_walk walk;
  214. int err;
  215. blkcipher_walk_init(&walk, dst, src, nbytes);
  216. err = blkcipher_walk_virt(desc, &walk);
  217. while ((nbytes = walk.nbytes)) {
  218. nbytes = __cbc_decrypt(desc, &walk);
  219. err = blkcipher_walk_done(desc, &walk, nbytes);
  220. }
  221. return err;
  222. }
  223. static struct crypto_alg blk_cbc_alg = {
  224. .cra_name = "cbc(twofish)",
  225. .cra_driver_name = "cbc-twofish-3way",
  226. .cra_priority = 300,
  227. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  228. .cra_blocksize = TF_BLOCK_SIZE,
  229. .cra_ctxsize = sizeof(struct twofish_ctx),
  230. .cra_alignmask = 0,
  231. .cra_type = &crypto_blkcipher_type,
  232. .cra_module = THIS_MODULE,
  233. .cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
  234. .cra_u = {
  235. .blkcipher = {
  236. .min_keysize = TF_MIN_KEY_SIZE,
  237. .max_keysize = TF_MAX_KEY_SIZE,
  238. .ivsize = TF_BLOCK_SIZE,
  239. .setkey = twofish_setkey,
  240. .encrypt = cbc_encrypt,
  241. .decrypt = cbc_decrypt,
  242. },
  243. },
  244. };
  245. static inline void u128_to_be128(be128 *dst, const u128 *src)
  246. {
  247. dst->a = cpu_to_be64(src->a);
  248. dst->b = cpu_to_be64(src->b);
  249. }
  250. static inline void be128_to_u128(u128 *dst, const be128 *src)
  251. {
  252. dst->a = be64_to_cpu(src->a);
  253. dst->b = be64_to_cpu(src->b);
  254. }
  255. static inline void u128_inc(u128 *i)
  256. {
  257. i->b++;
  258. if (!i->b)
  259. i->a++;
  260. }
  261. static void ctr_crypt_final(struct blkcipher_desc *desc,
  262. struct blkcipher_walk *walk)
  263. {
  264. struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  265. u8 *ctrblk = walk->iv;
  266. u8 keystream[TF_BLOCK_SIZE];
  267. u8 *src = walk->src.virt.addr;
  268. u8 *dst = walk->dst.virt.addr;
  269. unsigned int nbytes = walk->nbytes;
  270. twofish_enc_blk(ctx, keystream, ctrblk);
  271. crypto_xor(keystream, src, nbytes);
  272. memcpy(dst, keystream, nbytes);
  273. crypto_inc(ctrblk, TF_BLOCK_SIZE);
  274. }
  275. static unsigned int __ctr_crypt(struct blkcipher_desc *desc,
  276. struct blkcipher_walk *walk)
  277. {
  278. struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  279. unsigned int bsize = TF_BLOCK_SIZE;
  280. unsigned int nbytes = walk->nbytes;
  281. u128 *src = (u128 *)walk->src.virt.addr;
  282. u128 *dst = (u128 *)walk->dst.virt.addr;
  283. u128 ctrblk;
  284. be128 ctrblocks[3];
  285. be128_to_u128(&ctrblk, (be128 *)walk->iv);
  286. /* Process three block batch */
  287. if (nbytes >= bsize * 3) {
  288. do {
  289. if (dst != src) {
  290. dst[0] = src[0];
  291. dst[1] = src[1];
  292. dst[2] = src[2];
  293. }
  294. /* create ctrblks for parallel encrypt */
  295. u128_to_be128(&ctrblocks[0], &ctrblk);
  296. u128_inc(&ctrblk);
  297. u128_to_be128(&ctrblocks[1], &ctrblk);
  298. u128_inc(&ctrblk);
  299. u128_to_be128(&ctrblocks[2], &ctrblk);
  300. u128_inc(&ctrblk);
  301. twofish_enc_blk_xor_3way(ctx, (u8 *)dst,
  302. (u8 *)ctrblocks);
  303. src += 3;
  304. dst += 3;
  305. nbytes -= bsize * 3;
  306. } while (nbytes >= bsize * 3);
  307. if (nbytes < bsize)
  308. goto done;
  309. }
  310. /* Handle leftovers */
  311. do {
  312. if (dst != src)
  313. *dst = *src;
  314. u128_to_be128(&ctrblocks[0], &ctrblk);
  315. u128_inc(&ctrblk);
  316. twofish_enc_blk(ctx, (u8 *)ctrblocks, (u8 *)ctrblocks);
  317. u128_xor(dst, dst, (u128 *)ctrblocks);
  318. src += 1;
  319. dst += 1;
  320. nbytes -= bsize;
  321. } while (nbytes >= bsize);
  322. done:
  323. u128_to_be128((be128 *)walk->iv, &ctrblk);
  324. return nbytes;
  325. }
  326. static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  327. struct scatterlist *src, unsigned int nbytes)
  328. {
  329. struct blkcipher_walk walk;
  330. int err;
  331. blkcipher_walk_init(&walk, dst, src, nbytes);
  332. err = blkcipher_walk_virt_block(desc, &walk, TF_BLOCK_SIZE);
  333. while ((nbytes = walk.nbytes) >= TF_BLOCK_SIZE) {
  334. nbytes = __ctr_crypt(desc, &walk);
  335. err = blkcipher_walk_done(desc, &walk, nbytes);
  336. }
  337. if (walk.nbytes) {
  338. ctr_crypt_final(desc, &walk);
  339. err = blkcipher_walk_done(desc, &walk, 0);
  340. }
  341. return err;
  342. }
  343. static struct crypto_alg blk_ctr_alg = {
  344. .cra_name = "ctr(twofish)",
  345. .cra_driver_name = "ctr-twofish-3way",
  346. .cra_priority = 300,
  347. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  348. .cra_blocksize = 1,
  349. .cra_ctxsize = sizeof(struct twofish_ctx),
  350. .cra_alignmask = 0,
  351. .cra_type = &crypto_blkcipher_type,
  352. .cra_module = THIS_MODULE,
  353. .cra_list = LIST_HEAD_INIT(blk_ctr_alg.cra_list),
  354. .cra_u = {
  355. .blkcipher = {
  356. .min_keysize = TF_MIN_KEY_SIZE,
  357. .max_keysize = TF_MAX_KEY_SIZE,
  358. .ivsize = TF_BLOCK_SIZE,
  359. .setkey = twofish_setkey,
  360. .encrypt = ctr_crypt,
  361. .decrypt = ctr_crypt,
  362. },
  363. },
  364. };
  365. static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
  366. {
  367. const unsigned int bsize = TF_BLOCK_SIZE;
  368. struct twofish_ctx *ctx = priv;
  369. int i;
  370. if (nbytes == 3 * bsize) {
  371. twofish_enc_blk_3way(ctx, srcdst, srcdst);
  372. return;
  373. }
  374. for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
  375. twofish_enc_blk(ctx, srcdst, srcdst);
  376. }
  377. static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
  378. {
  379. const unsigned int bsize = TF_BLOCK_SIZE;
  380. struct twofish_ctx *ctx = priv;
  381. int i;
  382. if (nbytes == 3 * bsize) {
  383. twofish_dec_blk_3way(ctx, srcdst, srcdst);
  384. return;
  385. }
  386. for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
  387. twofish_dec_blk(ctx, srcdst, srcdst);
  388. }
  389. struct twofish_lrw_ctx {
  390. struct lrw_table_ctx lrw_table;
  391. struct twofish_ctx twofish_ctx;
  392. };
  393. static int lrw_twofish_setkey(struct crypto_tfm *tfm, const u8 *key,
  394. unsigned int keylen)
  395. {
  396. struct twofish_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
  397. int err;
  398. err = __twofish_setkey(&ctx->twofish_ctx, key, keylen - TF_BLOCK_SIZE,
  399. &tfm->crt_flags);
  400. if (err)
  401. return err;
  402. return lrw_init_table(&ctx->lrw_table, key + keylen - TF_BLOCK_SIZE);
  403. }
  404. static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  405. struct scatterlist *src, unsigned int nbytes)
  406. {
  407. struct twofish_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  408. be128 buf[3];
  409. struct lrw_crypt_req req = {
  410. .tbuf = buf,
  411. .tbuflen = sizeof(buf),
  412. .table_ctx = &ctx->lrw_table,
  413. .crypt_ctx = &ctx->twofish_ctx,
  414. .crypt_fn = encrypt_callback,
  415. };
  416. return lrw_crypt(desc, dst, src, nbytes, &req);
  417. }
  418. static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  419. struct scatterlist *src, unsigned int nbytes)
  420. {
  421. struct twofish_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  422. be128 buf[3];
  423. struct lrw_crypt_req req = {
  424. .tbuf = buf,
  425. .tbuflen = sizeof(buf),
  426. .table_ctx = &ctx->lrw_table,
  427. .crypt_ctx = &ctx->twofish_ctx,
  428. .crypt_fn = decrypt_callback,
  429. };
  430. return lrw_crypt(desc, dst, src, nbytes, &req);
  431. }
  432. static void lrw_exit_tfm(struct crypto_tfm *tfm)
  433. {
  434. struct twofish_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
  435. lrw_free_table(&ctx->lrw_table);
  436. }
  437. static struct crypto_alg blk_lrw_alg = {
  438. .cra_name = "lrw(twofish)",
  439. .cra_driver_name = "lrw-twofish-3way",
  440. .cra_priority = 300,
  441. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  442. .cra_blocksize = TF_BLOCK_SIZE,
  443. .cra_ctxsize = sizeof(struct twofish_lrw_ctx),
  444. .cra_alignmask = 0,
  445. .cra_type = &crypto_blkcipher_type,
  446. .cra_module = THIS_MODULE,
  447. .cra_list = LIST_HEAD_INIT(blk_lrw_alg.cra_list),
  448. .cra_exit = lrw_exit_tfm,
  449. .cra_u = {
  450. .blkcipher = {
  451. .min_keysize = TF_MIN_KEY_SIZE + TF_BLOCK_SIZE,
  452. .max_keysize = TF_MAX_KEY_SIZE + TF_BLOCK_SIZE,
  453. .ivsize = TF_BLOCK_SIZE,
  454. .setkey = lrw_twofish_setkey,
  455. .encrypt = lrw_encrypt,
  456. .decrypt = lrw_decrypt,
  457. },
  458. },
  459. };
  460. struct twofish_xts_ctx {
  461. struct twofish_ctx tweak_ctx;
  462. struct twofish_ctx crypt_ctx;
  463. };
  464. static int xts_twofish_setkey(struct crypto_tfm *tfm, const u8 *key,
  465. unsigned int keylen)
  466. {
  467. struct twofish_xts_ctx *ctx = crypto_tfm_ctx(tfm);
  468. u32 *flags = &tfm->crt_flags;
  469. int err;
  470. /* key consists of keys of equal size concatenated, therefore
  471. * the length must be even
  472. */
  473. if (keylen % 2) {
  474. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  475. return -EINVAL;
  476. }
  477. /* first half of xts-key is for crypt */
  478. err = __twofish_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
  479. if (err)
  480. return err;
  481. /* second half of xts-key is for tweak */
  482. return __twofish_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
  483. flags);
  484. }
  485. static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  486. struct scatterlist *src, unsigned int nbytes)
  487. {
  488. struct twofish_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  489. be128 buf[3];
  490. struct xts_crypt_req req = {
  491. .tbuf = buf,
  492. .tbuflen = sizeof(buf),
  493. .tweak_ctx = &ctx->tweak_ctx,
  494. .tweak_fn = XTS_TWEAK_CAST(twofish_enc_blk),
  495. .crypt_ctx = &ctx->crypt_ctx,
  496. .crypt_fn = encrypt_callback,
  497. };
  498. return xts_crypt(desc, dst, src, nbytes, &req);
  499. }
  500. static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  501. struct scatterlist *src, unsigned int nbytes)
  502. {
  503. struct twofish_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  504. be128 buf[3];
  505. struct xts_crypt_req req = {
  506. .tbuf = buf,
  507. .tbuflen = sizeof(buf),
  508. .tweak_ctx = &ctx->tweak_ctx,
  509. .tweak_fn = XTS_TWEAK_CAST(twofish_enc_blk),
  510. .crypt_ctx = &ctx->crypt_ctx,
  511. .crypt_fn = decrypt_callback,
  512. };
  513. return xts_crypt(desc, dst, src, nbytes, &req);
  514. }
  515. static struct crypto_alg blk_xts_alg = {
  516. .cra_name = "xts(twofish)",
  517. .cra_driver_name = "xts-twofish-3way",
  518. .cra_priority = 300,
  519. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  520. .cra_blocksize = TF_BLOCK_SIZE,
  521. .cra_ctxsize = sizeof(struct twofish_xts_ctx),
  522. .cra_alignmask = 0,
  523. .cra_type = &crypto_blkcipher_type,
  524. .cra_module = THIS_MODULE,
  525. .cra_list = LIST_HEAD_INIT(blk_xts_alg.cra_list),
  526. .cra_u = {
  527. .blkcipher = {
  528. .min_keysize = TF_MIN_KEY_SIZE * 2,
  529. .max_keysize = TF_MAX_KEY_SIZE * 2,
  530. .ivsize = TF_BLOCK_SIZE,
  531. .setkey = xts_twofish_setkey,
  532. .encrypt = xts_encrypt,
  533. .decrypt = xts_decrypt,
  534. },
  535. },
  536. };
  537. static bool is_blacklisted_cpu(void)
  538. {
  539. if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL)
  540. return false;
  541. if (boot_cpu_data.x86 == 0x06 &&
  542. (boot_cpu_data.x86_model == 0x1c ||
  543. boot_cpu_data.x86_model == 0x26 ||
  544. boot_cpu_data.x86_model == 0x36)) {
  545. /*
  546. * On Atom, twofish-3way is slower than original assembler
  547. * implementation. Twofish-3way trades off some performance in
  548. * storing blocks in 64bit registers to allow three blocks to
  549. * be processed parallel. Parallel operation then allows gaining
  550. * more performance than was trade off, on out-of-order CPUs.
  551. * However Atom does not benefit from this parallellism and
  552. * should be blacklisted.
  553. */
  554. return true;
  555. }
  556. if (boot_cpu_data.x86 == 0x0f) {
  557. /*
  558. * On Pentium 4, twofish-3way is slower than original assembler
  559. * implementation because excessive uses of 64bit rotate and
  560. * left-shifts (which are really slow on P4) needed to store and
  561. * handle 128bit block in two 64bit registers.
  562. */
  563. return true;
  564. }
  565. return false;
  566. }
  567. static int force;
  568. module_param(force, int, 0);
  569. MODULE_PARM_DESC(force, "Force module load, ignore CPU blacklist");
  570. int __init init(void)
  571. {
  572. int err;
  573. if (!force && is_blacklisted_cpu()) {
  574. printk(KERN_INFO
  575. "twofish-x86_64-3way: performance on this CPU "
  576. "would be suboptimal: disabling "
  577. "twofish-x86_64-3way.\n");
  578. return -ENODEV;
  579. }
  580. err = crypto_register_alg(&blk_ecb_alg);
  581. if (err)
  582. goto ecb_err;
  583. err = crypto_register_alg(&blk_cbc_alg);
  584. if (err)
  585. goto cbc_err;
  586. err = crypto_register_alg(&blk_ctr_alg);
  587. if (err)
  588. goto ctr_err;
  589. err = crypto_register_alg(&blk_lrw_alg);
  590. if (err)
  591. goto blk_lrw_err;
  592. err = crypto_register_alg(&blk_xts_alg);
  593. if (err)
  594. goto blk_xts_err;
  595. return 0;
  596. crypto_unregister_alg(&blk_xts_alg);
  597. blk_xts_err:
  598. crypto_unregister_alg(&blk_lrw_alg);
  599. blk_lrw_err:
  600. crypto_unregister_alg(&blk_ctr_alg);
  601. ctr_err:
  602. crypto_unregister_alg(&blk_cbc_alg);
  603. cbc_err:
  604. crypto_unregister_alg(&blk_ecb_alg);
  605. ecb_err:
  606. return err;
  607. }
  608. void __exit fini(void)
  609. {
  610. crypto_unregister_alg(&blk_xts_alg);
  611. crypto_unregister_alg(&blk_lrw_alg);
  612. crypto_unregister_alg(&blk_ctr_alg);
  613. crypto_unregister_alg(&blk_cbc_alg);
  614. crypto_unregister_alg(&blk_ecb_alg);
  615. }
  616. module_init(init);
  617. module_exit(fini);
  618. MODULE_LICENSE("GPL");
  619. MODULE_DESCRIPTION("Twofish Cipher Algorithm, 3-way parallel asm optimized");
  620. MODULE_ALIAS("twofish");
  621. MODULE_ALIAS("twofish-asm");