aes_s390.c 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943
  1. /*
  2. * Cryptographic API.
  3. *
  4. * s390 implementation of the AES Cipher Algorithm.
  5. *
  6. * s390 Version:
  7. * Copyright IBM Corp. 2005, 2007
  8. * Author(s): Jan Glauber (jang@de.ibm.com)
  9. * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
  10. *
  11. * Derived from "crypto/aes_generic.c"
  12. *
  13. * This program is free software; you can redistribute it and/or modify it
  14. * under the terms of the GNU General Public License as published by the Free
  15. * Software Foundation; either version 2 of the License, or (at your option)
  16. * any later version.
  17. *
  18. */
  19. #define KMSG_COMPONENT "aes_s390"
  20. #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
  21. #include <crypto/aes.h>
  22. #include <crypto/algapi.h>
  23. #include <linux/err.h>
  24. #include <linux/module.h>
  25. #include <linux/init.h>
  26. #include "crypt_s390.h"
  27. #define AES_KEYLEN_128 1
  28. #define AES_KEYLEN_192 2
  29. #define AES_KEYLEN_256 4
  30. static u8 *ctrblk;
  31. static char keylen_flag;
  32. struct s390_aes_ctx {
  33. u8 key[AES_MAX_KEY_SIZE];
  34. long enc;
  35. long dec;
  36. int key_len;
  37. union {
  38. struct crypto_blkcipher *blk;
  39. struct crypto_cipher *cip;
  40. } fallback;
  41. };
  42. struct pcc_param {
  43. u8 key[32];
  44. u8 tweak[16];
  45. u8 block[16];
  46. u8 bit[16];
  47. u8 xts[16];
  48. };
  49. struct s390_xts_ctx {
  50. u8 key[32];
  51. u8 xts_param[16];
  52. struct pcc_param pcc;
  53. long enc;
  54. long dec;
  55. int key_len;
  56. struct crypto_blkcipher *fallback;
  57. };
  58. /*
  59. * Check if the key_len is supported by the HW.
  60. * Returns 0 if it is, a positive number if it is not and software fallback is
  61. * required or a negative number in case the key size is not valid
  62. */
  63. static int need_fallback(unsigned int key_len)
  64. {
  65. switch (key_len) {
  66. case 16:
  67. if (!(keylen_flag & AES_KEYLEN_128))
  68. return 1;
  69. break;
  70. case 24:
  71. if (!(keylen_flag & AES_KEYLEN_192))
  72. return 1;
  73. break;
  74. case 32:
  75. if (!(keylen_flag & AES_KEYLEN_256))
  76. return 1;
  77. break;
  78. default:
  79. return -1;
  80. break;
  81. }
  82. return 0;
  83. }
  84. static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
  85. unsigned int key_len)
  86. {
  87. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  88. int ret;
  89. sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  90. sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
  91. CRYPTO_TFM_REQ_MASK);
  92. ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
  93. if (ret) {
  94. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  95. tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
  96. CRYPTO_TFM_RES_MASK);
  97. }
  98. return ret;
  99. }
  100. static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  101. unsigned int key_len)
  102. {
  103. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  104. u32 *flags = &tfm->crt_flags;
  105. int ret;
  106. ret = need_fallback(key_len);
  107. if (ret < 0) {
  108. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  109. return -EINVAL;
  110. }
  111. sctx->key_len = key_len;
  112. if (!ret) {
  113. memcpy(sctx->key, in_key, key_len);
  114. return 0;
  115. }
  116. return setkey_fallback_cip(tfm, in_key, key_len);
  117. }
  118. static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  119. {
  120. const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  121. if (unlikely(need_fallback(sctx->key_len))) {
  122. crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
  123. return;
  124. }
  125. switch (sctx->key_len) {
  126. case 16:
  127. crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
  128. AES_BLOCK_SIZE);
  129. break;
  130. case 24:
  131. crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
  132. AES_BLOCK_SIZE);
  133. break;
  134. case 32:
  135. crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
  136. AES_BLOCK_SIZE);
  137. break;
  138. }
  139. }
  140. static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  141. {
  142. const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  143. if (unlikely(need_fallback(sctx->key_len))) {
  144. crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
  145. return;
  146. }
  147. switch (sctx->key_len) {
  148. case 16:
  149. crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
  150. AES_BLOCK_SIZE);
  151. break;
  152. case 24:
  153. crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
  154. AES_BLOCK_SIZE);
  155. break;
  156. case 32:
  157. crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
  158. AES_BLOCK_SIZE);
  159. break;
  160. }
  161. }
  162. static int fallback_init_cip(struct crypto_tfm *tfm)
  163. {
  164. const char *name = tfm->__crt_alg->cra_name;
  165. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  166. sctx->fallback.cip = crypto_alloc_cipher(name, 0,
  167. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  168. if (IS_ERR(sctx->fallback.cip)) {
  169. pr_err("Allocating AES fallback algorithm %s failed\n",
  170. name);
  171. return PTR_ERR(sctx->fallback.cip);
  172. }
  173. return 0;
  174. }
  175. static void fallback_exit_cip(struct crypto_tfm *tfm)
  176. {
  177. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  178. crypto_free_cipher(sctx->fallback.cip);
  179. sctx->fallback.cip = NULL;
  180. }
  181. static struct crypto_alg aes_alg = {
  182. .cra_name = "aes",
  183. .cra_driver_name = "aes-s390",
  184. .cra_priority = CRYPT_S390_PRIORITY,
  185. .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
  186. CRYPTO_ALG_NEED_FALLBACK,
  187. .cra_blocksize = AES_BLOCK_SIZE,
  188. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  189. .cra_module = THIS_MODULE,
  190. .cra_init = fallback_init_cip,
  191. .cra_exit = fallback_exit_cip,
  192. .cra_u = {
  193. .cipher = {
  194. .cia_min_keysize = AES_MIN_KEY_SIZE,
  195. .cia_max_keysize = AES_MAX_KEY_SIZE,
  196. .cia_setkey = aes_set_key,
  197. .cia_encrypt = aes_encrypt,
  198. .cia_decrypt = aes_decrypt,
  199. }
  200. }
  201. };
  202. static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
  203. unsigned int len)
  204. {
  205. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  206. unsigned int ret;
  207. sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  208. sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
  209. CRYPTO_TFM_REQ_MASK);
  210. ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
  211. if (ret) {
  212. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  213. tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
  214. CRYPTO_TFM_RES_MASK);
  215. }
  216. return ret;
  217. }
  218. static int fallback_blk_dec(struct blkcipher_desc *desc,
  219. struct scatterlist *dst, struct scatterlist *src,
  220. unsigned int nbytes)
  221. {
  222. unsigned int ret;
  223. struct crypto_blkcipher *tfm;
  224. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  225. tfm = desc->tfm;
  226. desc->tfm = sctx->fallback.blk;
  227. ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
  228. desc->tfm = tfm;
  229. return ret;
  230. }
  231. static int fallback_blk_enc(struct blkcipher_desc *desc,
  232. struct scatterlist *dst, struct scatterlist *src,
  233. unsigned int nbytes)
  234. {
  235. unsigned int ret;
  236. struct crypto_blkcipher *tfm;
  237. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  238. tfm = desc->tfm;
  239. desc->tfm = sctx->fallback.blk;
  240. ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
  241. desc->tfm = tfm;
  242. return ret;
  243. }
  244. static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  245. unsigned int key_len)
  246. {
  247. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  248. int ret;
  249. ret = need_fallback(key_len);
  250. if (ret > 0) {
  251. sctx->key_len = key_len;
  252. return setkey_fallback_blk(tfm, in_key, key_len);
  253. }
  254. switch (key_len) {
  255. case 16:
  256. sctx->enc = KM_AES_128_ENCRYPT;
  257. sctx->dec = KM_AES_128_DECRYPT;
  258. break;
  259. case 24:
  260. sctx->enc = KM_AES_192_ENCRYPT;
  261. sctx->dec = KM_AES_192_DECRYPT;
  262. break;
  263. case 32:
  264. sctx->enc = KM_AES_256_ENCRYPT;
  265. sctx->dec = KM_AES_256_DECRYPT;
  266. break;
  267. }
  268. return aes_set_key(tfm, in_key, key_len);
  269. }
  270. static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
  271. struct blkcipher_walk *walk)
  272. {
  273. int ret = blkcipher_walk_virt(desc, walk);
  274. unsigned int nbytes;
  275. while ((nbytes = walk->nbytes)) {
  276. /* only use complete blocks */
  277. unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
  278. u8 *out = walk->dst.virt.addr;
  279. u8 *in = walk->src.virt.addr;
  280. ret = crypt_s390_km(func, param, out, in, n);
  281. if (ret < 0 || ret != n)
  282. return -EIO;
  283. nbytes &= AES_BLOCK_SIZE - 1;
  284. ret = blkcipher_walk_done(desc, walk, nbytes);
  285. }
  286. return ret;
  287. }
  288. static int ecb_aes_encrypt(struct blkcipher_desc *desc,
  289. struct scatterlist *dst, struct scatterlist *src,
  290. unsigned int nbytes)
  291. {
  292. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  293. struct blkcipher_walk walk;
  294. if (unlikely(need_fallback(sctx->key_len)))
  295. return fallback_blk_enc(desc, dst, src, nbytes);
  296. blkcipher_walk_init(&walk, dst, src, nbytes);
  297. return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
  298. }
  299. static int ecb_aes_decrypt(struct blkcipher_desc *desc,
  300. struct scatterlist *dst, struct scatterlist *src,
  301. unsigned int nbytes)
  302. {
  303. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  304. struct blkcipher_walk walk;
  305. if (unlikely(need_fallback(sctx->key_len)))
  306. return fallback_blk_dec(desc, dst, src, nbytes);
  307. blkcipher_walk_init(&walk, dst, src, nbytes);
  308. return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
  309. }
  310. static int fallback_init_blk(struct crypto_tfm *tfm)
  311. {
  312. const char *name = tfm->__crt_alg->cra_name;
  313. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  314. sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
  315. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  316. if (IS_ERR(sctx->fallback.blk)) {
  317. pr_err("Allocating AES fallback algorithm %s failed\n",
  318. name);
  319. return PTR_ERR(sctx->fallback.blk);
  320. }
  321. return 0;
  322. }
  323. static void fallback_exit_blk(struct crypto_tfm *tfm)
  324. {
  325. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  326. crypto_free_blkcipher(sctx->fallback.blk);
  327. sctx->fallback.blk = NULL;
  328. }
  329. static struct crypto_alg ecb_aes_alg = {
  330. .cra_name = "ecb(aes)",
  331. .cra_driver_name = "ecb-aes-s390",
  332. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  333. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  334. CRYPTO_ALG_NEED_FALLBACK,
  335. .cra_blocksize = AES_BLOCK_SIZE,
  336. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  337. .cra_type = &crypto_blkcipher_type,
  338. .cra_module = THIS_MODULE,
  339. .cra_init = fallback_init_blk,
  340. .cra_exit = fallback_exit_blk,
  341. .cra_u = {
  342. .blkcipher = {
  343. .min_keysize = AES_MIN_KEY_SIZE,
  344. .max_keysize = AES_MAX_KEY_SIZE,
  345. .setkey = ecb_aes_set_key,
  346. .encrypt = ecb_aes_encrypt,
  347. .decrypt = ecb_aes_decrypt,
  348. }
  349. }
  350. };
  351. static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  352. unsigned int key_len)
  353. {
  354. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  355. int ret;
  356. ret = need_fallback(key_len);
  357. if (ret > 0) {
  358. sctx->key_len = key_len;
  359. return setkey_fallback_blk(tfm, in_key, key_len);
  360. }
  361. switch (key_len) {
  362. case 16:
  363. sctx->enc = KMC_AES_128_ENCRYPT;
  364. sctx->dec = KMC_AES_128_DECRYPT;
  365. break;
  366. case 24:
  367. sctx->enc = KMC_AES_192_ENCRYPT;
  368. sctx->dec = KMC_AES_192_DECRYPT;
  369. break;
  370. case 32:
  371. sctx->enc = KMC_AES_256_ENCRYPT;
  372. sctx->dec = KMC_AES_256_DECRYPT;
  373. break;
  374. }
  375. return aes_set_key(tfm, in_key, key_len);
  376. }
  377. static int cbc_aes_crypt(struct blkcipher_desc *desc, long func,
  378. struct blkcipher_walk *walk)
  379. {
  380. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  381. int ret = blkcipher_walk_virt(desc, walk);
  382. unsigned int nbytes = walk->nbytes;
  383. struct {
  384. u8 iv[AES_BLOCK_SIZE];
  385. u8 key[AES_MAX_KEY_SIZE];
  386. } param;
  387. if (!nbytes)
  388. goto out;
  389. memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
  390. memcpy(param.key, sctx->key, sctx->key_len);
  391. do {
  392. /* only use complete blocks */
  393. unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
  394. u8 *out = walk->dst.virt.addr;
  395. u8 *in = walk->src.virt.addr;
  396. ret = crypt_s390_kmc(func, &param, out, in, n);
  397. if (ret < 0 || ret != n)
  398. return -EIO;
  399. nbytes &= AES_BLOCK_SIZE - 1;
  400. ret = blkcipher_walk_done(desc, walk, nbytes);
  401. } while ((nbytes = walk->nbytes));
  402. memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
  403. out:
  404. return ret;
  405. }
  406. static int cbc_aes_encrypt(struct blkcipher_desc *desc,
  407. struct scatterlist *dst, struct scatterlist *src,
  408. unsigned int nbytes)
  409. {
  410. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  411. struct blkcipher_walk walk;
  412. if (unlikely(need_fallback(sctx->key_len)))
  413. return fallback_blk_enc(desc, dst, src, nbytes);
  414. blkcipher_walk_init(&walk, dst, src, nbytes);
  415. return cbc_aes_crypt(desc, sctx->enc, &walk);
  416. }
  417. static int cbc_aes_decrypt(struct blkcipher_desc *desc,
  418. struct scatterlist *dst, struct scatterlist *src,
  419. unsigned int nbytes)
  420. {
  421. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  422. struct blkcipher_walk walk;
  423. if (unlikely(need_fallback(sctx->key_len)))
  424. return fallback_blk_dec(desc, dst, src, nbytes);
  425. blkcipher_walk_init(&walk, dst, src, nbytes);
  426. return cbc_aes_crypt(desc, sctx->dec, &walk);
  427. }
  428. static struct crypto_alg cbc_aes_alg = {
  429. .cra_name = "cbc(aes)",
  430. .cra_driver_name = "cbc-aes-s390",
  431. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  432. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  433. CRYPTO_ALG_NEED_FALLBACK,
  434. .cra_blocksize = AES_BLOCK_SIZE,
  435. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  436. .cra_type = &crypto_blkcipher_type,
  437. .cra_module = THIS_MODULE,
  438. .cra_init = fallback_init_blk,
  439. .cra_exit = fallback_exit_blk,
  440. .cra_u = {
  441. .blkcipher = {
  442. .min_keysize = AES_MIN_KEY_SIZE,
  443. .max_keysize = AES_MAX_KEY_SIZE,
  444. .ivsize = AES_BLOCK_SIZE,
  445. .setkey = cbc_aes_set_key,
  446. .encrypt = cbc_aes_encrypt,
  447. .decrypt = cbc_aes_decrypt,
  448. }
  449. }
  450. };
  451. static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
  452. unsigned int len)
  453. {
  454. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  455. unsigned int ret;
  456. xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  457. xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
  458. CRYPTO_TFM_REQ_MASK);
  459. ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
  460. if (ret) {
  461. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  462. tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
  463. CRYPTO_TFM_RES_MASK);
  464. }
  465. return ret;
  466. }
  467. static int xts_fallback_decrypt(struct blkcipher_desc *desc,
  468. struct scatterlist *dst, struct scatterlist *src,
  469. unsigned int nbytes)
  470. {
  471. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  472. struct crypto_blkcipher *tfm;
  473. unsigned int ret;
  474. tfm = desc->tfm;
  475. desc->tfm = xts_ctx->fallback;
  476. ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
  477. desc->tfm = tfm;
  478. return ret;
  479. }
  480. static int xts_fallback_encrypt(struct blkcipher_desc *desc,
  481. struct scatterlist *dst, struct scatterlist *src,
  482. unsigned int nbytes)
  483. {
  484. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  485. struct crypto_blkcipher *tfm;
  486. unsigned int ret;
  487. tfm = desc->tfm;
  488. desc->tfm = xts_ctx->fallback;
  489. ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
  490. desc->tfm = tfm;
  491. return ret;
  492. }
  493. static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  494. unsigned int key_len)
  495. {
  496. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  497. u32 *flags = &tfm->crt_flags;
  498. switch (key_len) {
  499. case 32:
  500. xts_ctx->enc = KM_XTS_128_ENCRYPT;
  501. xts_ctx->dec = KM_XTS_128_DECRYPT;
  502. memcpy(xts_ctx->key + 16, in_key, 16);
  503. memcpy(xts_ctx->pcc.key + 16, in_key + 16, 16);
  504. break;
  505. case 48:
  506. xts_ctx->enc = 0;
  507. xts_ctx->dec = 0;
  508. xts_fallback_setkey(tfm, in_key, key_len);
  509. break;
  510. case 64:
  511. xts_ctx->enc = KM_XTS_256_ENCRYPT;
  512. xts_ctx->dec = KM_XTS_256_DECRYPT;
  513. memcpy(xts_ctx->key, in_key, 32);
  514. memcpy(xts_ctx->pcc.key, in_key + 32, 32);
  515. break;
  516. default:
  517. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  518. return -EINVAL;
  519. }
  520. xts_ctx->key_len = key_len;
  521. return 0;
  522. }
  523. static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
  524. struct s390_xts_ctx *xts_ctx,
  525. struct blkcipher_walk *walk)
  526. {
  527. unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
  528. int ret = blkcipher_walk_virt(desc, walk);
  529. unsigned int nbytes = walk->nbytes;
  530. unsigned int n;
  531. u8 *in, *out;
  532. void *param;
  533. if (!nbytes)
  534. goto out;
  535. memset(xts_ctx->pcc.block, 0, sizeof(xts_ctx->pcc.block));
  536. memset(xts_ctx->pcc.bit, 0, sizeof(xts_ctx->pcc.bit));
  537. memset(xts_ctx->pcc.xts, 0, sizeof(xts_ctx->pcc.xts));
  538. memcpy(xts_ctx->pcc.tweak, walk->iv, sizeof(xts_ctx->pcc.tweak));
  539. param = xts_ctx->pcc.key + offset;
  540. ret = crypt_s390_pcc(func, param);
  541. if (ret < 0)
  542. return -EIO;
  543. memcpy(xts_ctx->xts_param, xts_ctx->pcc.xts, 16);
  544. param = xts_ctx->key + offset;
  545. do {
  546. /* only use complete blocks */
  547. n = nbytes & ~(AES_BLOCK_SIZE - 1);
  548. out = walk->dst.virt.addr;
  549. in = walk->src.virt.addr;
  550. ret = crypt_s390_km(func, param, out, in, n);
  551. if (ret < 0 || ret != n)
  552. return -EIO;
  553. nbytes &= AES_BLOCK_SIZE - 1;
  554. ret = blkcipher_walk_done(desc, walk, nbytes);
  555. } while ((nbytes = walk->nbytes));
  556. out:
  557. return ret;
  558. }
  559. static int xts_aes_encrypt(struct blkcipher_desc *desc,
  560. struct scatterlist *dst, struct scatterlist *src,
  561. unsigned int nbytes)
  562. {
  563. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  564. struct blkcipher_walk walk;
  565. if (unlikely(xts_ctx->key_len == 48))
  566. return xts_fallback_encrypt(desc, dst, src, nbytes);
  567. blkcipher_walk_init(&walk, dst, src, nbytes);
  568. return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
  569. }
  570. static int xts_aes_decrypt(struct blkcipher_desc *desc,
  571. struct scatterlist *dst, struct scatterlist *src,
  572. unsigned int nbytes)
  573. {
  574. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  575. struct blkcipher_walk walk;
  576. if (unlikely(xts_ctx->key_len == 48))
  577. return xts_fallback_decrypt(desc, dst, src, nbytes);
  578. blkcipher_walk_init(&walk, dst, src, nbytes);
  579. return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
  580. }
  581. static int xts_fallback_init(struct crypto_tfm *tfm)
  582. {
  583. const char *name = tfm->__crt_alg->cra_name;
  584. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  585. xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
  586. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  587. if (IS_ERR(xts_ctx->fallback)) {
  588. pr_err("Allocating XTS fallback algorithm %s failed\n",
  589. name);
  590. return PTR_ERR(xts_ctx->fallback);
  591. }
  592. return 0;
  593. }
  594. static void xts_fallback_exit(struct crypto_tfm *tfm)
  595. {
  596. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  597. crypto_free_blkcipher(xts_ctx->fallback);
  598. xts_ctx->fallback = NULL;
  599. }
  600. static struct crypto_alg xts_aes_alg = {
  601. .cra_name = "xts(aes)",
  602. .cra_driver_name = "xts-aes-s390",
  603. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  604. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  605. CRYPTO_ALG_NEED_FALLBACK,
  606. .cra_blocksize = AES_BLOCK_SIZE,
  607. .cra_ctxsize = sizeof(struct s390_xts_ctx),
  608. .cra_type = &crypto_blkcipher_type,
  609. .cra_module = THIS_MODULE,
  610. .cra_init = xts_fallback_init,
  611. .cra_exit = xts_fallback_exit,
  612. .cra_u = {
  613. .blkcipher = {
  614. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  615. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  616. .ivsize = AES_BLOCK_SIZE,
  617. .setkey = xts_aes_set_key,
  618. .encrypt = xts_aes_encrypt,
  619. .decrypt = xts_aes_decrypt,
  620. }
  621. }
  622. };
  623. static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  624. unsigned int key_len)
  625. {
  626. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  627. switch (key_len) {
  628. case 16:
  629. sctx->enc = KMCTR_AES_128_ENCRYPT;
  630. sctx->dec = KMCTR_AES_128_DECRYPT;
  631. break;
  632. case 24:
  633. sctx->enc = KMCTR_AES_192_ENCRYPT;
  634. sctx->dec = KMCTR_AES_192_DECRYPT;
  635. break;
  636. case 32:
  637. sctx->enc = KMCTR_AES_256_ENCRYPT;
  638. sctx->dec = KMCTR_AES_256_DECRYPT;
  639. break;
  640. }
  641. return aes_set_key(tfm, in_key, key_len);
  642. }
  643. static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
  644. struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
  645. {
  646. int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
  647. unsigned int i, n, nbytes;
  648. u8 buf[AES_BLOCK_SIZE];
  649. u8 *out, *in;
  650. if (!walk->nbytes)
  651. return ret;
  652. memcpy(ctrblk, walk->iv, AES_BLOCK_SIZE);
  653. while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
  654. out = walk->dst.virt.addr;
  655. in = walk->src.virt.addr;
  656. while (nbytes >= AES_BLOCK_SIZE) {
  657. /* only use complete blocks, max. PAGE_SIZE */
  658. n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
  659. nbytes & ~(AES_BLOCK_SIZE - 1);
  660. for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
  661. memcpy(ctrblk + i, ctrblk + i - AES_BLOCK_SIZE,
  662. AES_BLOCK_SIZE);
  663. crypto_inc(ctrblk + i, AES_BLOCK_SIZE);
  664. }
  665. ret = crypt_s390_kmctr(func, sctx->key, out, in, n, ctrblk);
  666. if (ret < 0 || ret != n)
  667. return -EIO;
  668. if (n > AES_BLOCK_SIZE)
  669. memcpy(ctrblk, ctrblk + n - AES_BLOCK_SIZE,
  670. AES_BLOCK_SIZE);
  671. crypto_inc(ctrblk, AES_BLOCK_SIZE);
  672. out += n;
  673. in += n;
  674. nbytes -= n;
  675. }
  676. ret = blkcipher_walk_done(desc, walk, nbytes);
  677. }
  678. /*
  679. * final block may be < AES_BLOCK_SIZE, copy only nbytes
  680. */
  681. if (nbytes) {
  682. out = walk->dst.virt.addr;
  683. in = walk->src.virt.addr;
  684. ret = crypt_s390_kmctr(func, sctx->key, buf, in,
  685. AES_BLOCK_SIZE, ctrblk);
  686. if (ret < 0 || ret != AES_BLOCK_SIZE)
  687. return -EIO;
  688. memcpy(out, buf, nbytes);
  689. crypto_inc(ctrblk, AES_BLOCK_SIZE);
  690. ret = blkcipher_walk_done(desc, walk, 0);
  691. }
  692. memcpy(walk->iv, ctrblk, AES_BLOCK_SIZE);
  693. return ret;
  694. }
  695. static int ctr_aes_encrypt(struct blkcipher_desc *desc,
  696. struct scatterlist *dst, struct scatterlist *src,
  697. unsigned int nbytes)
  698. {
  699. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  700. struct blkcipher_walk walk;
  701. blkcipher_walk_init(&walk, dst, src, nbytes);
  702. return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
  703. }
  704. static int ctr_aes_decrypt(struct blkcipher_desc *desc,
  705. struct scatterlist *dst, struct scatterlist *src,
  706. unsigned int nbytes)
  707. {
  708. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  709. struct blkcipher_walk walk;
  710. blkcipher_walk_init(&walk, dst, src, nbytes);
  711. return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
  712. }
  713. static struct crypto_alg ctr_aes_alg = {
  714. .cra_name = "ctr(aes)",
  715. .cra_driver_name = "ctr-aes-s390",
  716. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  717. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  718. .cra_blocksize = 1,
  719. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  720. .cra_type = &crypto_blkcipher_type,
  721. .cra_module = THIS_MODULE,
  722. .cra_u = {
  723. .blkcipher = {
  724. .min_keysize = AES_MIN_KEY_SIZE,
  725. .max_keysize = AES_MAX_KEY_SIZE,
  726. .ivsize = AES_BLOCK_SIZE,
  727. .setkey = ctr_aes_set_key,
  728. .encrypt = ctr_aes_encrypt,
  729. .decrypt = ctr_aes_decrypt,
  730. }
  731. }
  732. };
  733. static int __init aes_s390_init(void)
  734. {
  735. int ret;
  736. if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
  737. keylen_flag |= AES_KEYLEN_128;
  738. if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
  739. keylen_flag |= AES_KEYLEN_192;
  740. if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
  741. keylen_flag |= AES_KEYLEN_256;
  742. if (!keylen_flag)
  743. return -EOPNOTSUPP;
  744. /* z9 109 and z9 BC/EC only support 128 bit key length */
  745. if (keylen_flag == AES_KEYLEN_128)
  746. pr_info("AES hardware acceleration is only available for"
  747. " 128-bit keys\n");
  748. ret = crypto_register_alg(&aes_alg);
  749. if (ret)
  750. goto aes_err;
  751. ret = crypto_register_alg(&ecb_aes_alg);
  752. if (ret)
  753. goto ecb_aes_err;
  754. ret = crypto_register_alg(&cbc_aes_alg);
  755. if (ret)
  756. goto cbc_aes_err;
  757. if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
  758. CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
  759. crypt_s390_func_available(KM_XTS_256_ENCRYPT,
  760. CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
  761. ret = crypto_register_alg(&xts_aes_alg);
  762. if (ret)
  763. goto xts_aes_err;
  764. }
  765. if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
  766. CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
  767. crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
  768. CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
  769. crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
  770. CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
  771. ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
  772. if (!ctrblk) {
  773. ret = -ENOMEM;
  774. goto ctr_aes_err;
  775. }
  776. ret = crypto_register_alg(&ctr_aes_alg);
  777. if (ret) {
  778. free_page((unsigned long) ctrblk);
  779. goto ctr_aes_err;
  780. }
  781. }
  782. out:
  783. return ret;
  784. ctr_aes_err:
  785. crypto_unregister_alg(&xts_aes_alg);
  786. xts_aes_err:
  787. crypto_unregister_alg(&cbc_aes_alg);
  788. cbc_aes_err:
  789. crypto_unregister_alg(&ecb_aes_alg);
  790. ecb_aes_err:
  791. crypto_unregister_alg(&aes_alg);
  792. aes_err:
  793. goto out;
  794. }
  795. static void __exit aes_s390_fini(void)
  796. {
  797. crypto_unregister_alg(&ctr_aes_alg);
  798. free_page((unsigned long) ctrblk);
  799. crypto_unregister_alg(&xts_aes_alg);
  800. crypto_unregister_alg(&cbc_aes_alg);
  801. crypto_unregister_alg(&ecb_aes_alg);
  802. crypto_unregister_alg(&aes_alg);
  803. }
  804. module_init(aes_s390_init);
  805. module_exit(aes_s390_fini);
  806. MODULE_ALIAS("aes-all");
  807. MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
  808. MODULE_LICENSE("GPL");