aes_s390.c 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955
  1. /*
  2. * Cryptographic API.
  3. *
  4. * s390 implementation of the AES Cipher Algorithm.
  5. *
  6. * s390 Version:
  7. * Copyright IBM Corp. 2005, 2007
  8. * Author(s): Jan Glauber (jang@de.ibm.com)
  9. * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
  10. *
  11. * Derived from "crypto/aes_generic.c"
  12. *
  13. * This program is free software; you can redistribute it and/or modify it
  14. * under the terms of the GNU General Public License as published by the Free
  15. * Software Foundation; either version 2 of the License, or (at your option)
  16. * any later version.
  17. *
  18. */
  19. #define KMSG_COMPONENT "aes_s390"
  20. #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
  21. #include <crypto/aes.h>
  22. #include <crypto/algapi.h>
  23. #include <linux/err.h>
  24. #include <linux/module.h>
  25. #include <linux/init.h>
  26. #include "crypt_s390.h"
  27. #define AES_KEYLEN_128 1
  28. #define AES_KEYLEN_192 2
  29. #define AES_KEYLEN_256 4
  30. static u8 *ctrblk;
  31. static char keylen_flag;
  32. struct s390_aes_ctx {
  33. u8 key[AES_MAX_KEY_SIZE];
  34. long enc;
  35. long dec;
  36. int key_len;
  37. union {
  38. struct crypto_blkcipher *blk;
  39. struct crypto_cipher *cip;
  40. } fallback;
  41. };
  42. struct pcc_param {
  43. u8 key[32];
  44. u8 tweak[16];
  45. u8 block[16];
  46. u8 bit[16];
  47. u8 xts[16];
  48. };
  49. struct s390_xts_ctx {
  50. u8 key[32];
  51. u8 pcc_key[32];
  52. long enc;
  53. long dec;
  54. int key_len;
  55. struct crypto_blkcipher *fallback;
  56. };
  57. /*
  58. * Check if the key_len is supported by the HW.
  59. * Returns 0 if it is, a positive number if it is not and software fallback is
  60. * required or a negative number in case the key size is not valid
  61. */
  62. static int need_fallback(unsigned int key_len)
  63. {
  64. switch (key_len) {
  65. case 16:
  66. if (!(keylen_flag & AES_KEYLEN_128))
  67. return 1;
  68. break;
  69. case 24:
  70. if (!(keylen_flag & AES_KEYLEN_192))
  71. return 1;
  72. break;
  73. case 32:
  74. if (!(keylen_flag & AES_KEYLEN_256))
  75. return 1;
  76. break;
  77. default:
  78. return -1;
  79. break;
  80. }
  81. return 0;
  82. }
  83. static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
  84. unsigned int key_len)
  85. {
  86. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  87. int ret;
  88. sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  89. sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
  90. CRYPTO_TFM_REQ_MASK);
  91. ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
  92. if (ret) {
  93. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  94. tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
  95. CRYPTO_TFM_RES_MASK);
  96. }
  97. return ret;
  98. }
  99. static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  100. unsigned int key_len)
  101. {
  102. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  103. u32 *flags = &tfm->crt_flags;
  104. int ret;
  105. ret = need_fallback(key_len);
  106. if (ret < 0) {
  107. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  108. return -EINVAL;
  109. }
  110. sctx->key_len = key_len;
  111. if (!ret) {
  112. memcpy(sctx->key, in_key, key_len);
  113. return 0;
  114. }
  115. return setkey_fallback_cip(tfm, in_key, key_len);
  116. }
  117. static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  118. {
  119. const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  120. if (unlikely(need_fallback(sctx->key_len))) {
  121. crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
  122. return;
  123. }
  124. switch (sctx->key_len) {
  125. case 16:
  126. crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
  127. AES_BLOCK_SIZE);
  128. break;
  129. case 24:
  130. crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
  131. AES_BLOCK_SIZE);
  132. break;
  133. case 32:
  134. crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
  135. AES_BLOCK_SIZE);
  136. break;
  137. }
  138. }
  139. static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  140. {
  141. const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  142. if (unlikely(need_fallback(sctx->key_len))) {
  143. crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
  144. return;
  145. }
  146. switch (sctx->key_len) {
  147. case 16:
  148. crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
  149. AES_BLOCK_SIZE);
  150. break;
  151. case 24:
  152. crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
  153. AES_BLOCK_SIZE);
  154. break;
  155. case 32:
  156. crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
  157. AES_BLOCK_SIZE);
  158. break;
  159. }
  160. }
  161. static int fallback_init_cip(struct crypto_tfm *tfm)
  162. {
  163. const char *name = tfm->__crt_alg->cra_name;
  164. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  165. sctx->fallback.cip = crypto_alloc_cipher(name, 0,
  166. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  167. if (IS_ERR(sctx->fallback.cip)) {
  168. pr_err("Allocating AES fallback algorithm %s failed\n",
  169. name);
  170. return PTR_ERR(sctx->fallback.cip);
  171. }
  172. return 0;
  173. }
  174. static void fallback_exit_cip(struct crypto_tfm *tfm)
  175. {
  176. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  177. crypto_free_cipher(sctx->fallback.cip);
  178. sctx->fallback.cip = NULL;
  179. }
  180. static struct crypto_alg aes_alg = {
  181. .cra_name = "aes",
  182. .cra_driver_name = "aes-s390",
  183. .cra_priority = CRYPT_S390_PRIORITY,
  184. .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
  185. CRYPTO_ALG_NEED_FALLBACK,
  186. .cra_blocksize = AES_BLOCK_SIZE,
  187. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  188. .cra_module = THIS_MODULE,
  189. .cra_init = fallback_init_cip,
  190. .cra_exit = fallback_exit_cip,
  191. .cra_u = {
  192. .cipher = {
  193. .cia_min_keysize = AES_MIN_KEY_SIZE,
  194. .cia_max_keysize = AES_MAX_KEY_SIZE,
  195. .cia_setkey = aes_set_key,
  196. .cia_encrypt = aes_encrypt,
  197. .cia_decrypt = aes_decrypt,
  198. }
  199. }
  200. };
  201. static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
  202. unsigned int len)
  203. {
  204. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  205. unsigned int ret;
  206. sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  207. sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
  208. CRYPTO_TFM_REQ_MASK);
  209. ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
  210. if (ret) {
  211. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  212. tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
  213. CRYPTO_TFM_RES_MASK);
  214. }
  215. return ret;
  216. }
  217. static int fallback_blk_dec(struct blkcipher_desc *desc,
  218. struct scatterlist *dst, struct scatterlist *src,
  219. unsigned int nbytes)
  220. {
  221. unsigned int ret;
  222. struct crypto_blkcipher *tfm;
  223. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  224. tfm = desc->tfm;
  225. desc->tfm = sctx->fallback.blk;
  226. ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
  227. desc->tfm = tfm;
  228. return ret;
  229. }
  230. static int fallback_blk_enc(struct blkcipher_desc *desc,
  231. struct scatterlist *dst, struct scatterlist *src,
  232. unsigned int nbytes)
  233. {
  234. unsigned int ret;
  235. struct crypto_blkcipher *tfm;
  236. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  237. tfm = desc->tfm;
  238. desc->tfm = sctx->fallback.blk;
  239. ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
  240. desc->tfm = tfm;
  241. return ret;
  242. }
  243. static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  244. unsigned int key_len)
  245. {
  246. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  247. int ret;
  248. ret = need_fallback(key_len);
  249. if (ret > 0) {
  250. sctx->key_len = key_len;
  251. return setkey_fallback_blk(tfm, in_key, key_len);
  252. }
  253. switch (key_len) {
  254. case 16:
  255. sctx->enc = KM_AES_128_ENCRYPT;
  256. sctx->dec = KM_AES_128_DECRYPT;
  257. break;
  258. case 24:
  259. sctx->enc = KM_AES_192_ENCRYPT;
  260. sctx->dec = KM_AES_192_DECRYPT;
  261. break;
  262. case 32:
  263. sctx->enc = KM_AES_256_ENCRYPT;
  264. sctx->dec = KM_AES_256_DECRYPT;
  265. break;
  266. }
  267. return aes_set_key(tfm, in_key, key_len);
  268. }
  269. static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
  270. struct blkcipher_walk *walk)
  271. {
  272. int ret = blkcipher_walk_virt(desc, walk);
  273. unsigned int nbytes;
  274. while ((nbytes = walk->nbytes)) {
  275. /* only use complete blocks */
  276. unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
  277. u8 *out = walk->dst.virt.addr;
  278. u8 *in = walk->src.virt.addr;
  279. ret = crypt_s390_km(func, param, out, in, n);
  280. if (ret < 0 || ret != n)
  281. return -EIO;
  282. nbytes &= AES_BLOCK_SIZE - 1;
  283. ret = blkcipher_walk_done(desc, walk, nbytes);
  284. }
  285. return ret;
  286. }
  287. static int ecb_aes_encrypt(struct blkcipher_desc *desc,
  288. struct scatterlist *dst, struct scatterlist *src,
  289. unsigned int nbytes)
  290. {
  291. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  292. struct blkcipher_walk walk;
  293. if (unlikely(need_fallback(sctx->key_len)))
  294. return fallback_blk_enc(desc, dst, src, nbytes);
  295. blkcipher_walk_init(&walk, dst, src, nbytes);
  296. return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
  297. }
  298. static int ecb_aes_decrypt(struct blkcipher_desc *desc,
  299. struct scatterlist *dst, struct scatterlist *src,
  300. unsigned int nbytes)
  301. {
  302. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  303. struct blkcipher_walk walk;
  304. if (unlikely(need_fallback(sctx->key_len)))
  305. return fallback_blk_dec(desc, dst, src, nbytes);
  306. blkcipher_walk_init(&walk, dst, src, nbytes);
  307. return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
  308. }
  309. static int fallback_init_blk(struct crypto_tfm *tfm)
  310. {
  311. const char *name = tfm->__crt_alg->cra_name;
  312. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  313. sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
  314. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  315. if (IS_ERR(sctx->fallback.blk)) {
  316. pr_err("Allocating AES fallback algorithm %s failed\n",
  317. name);
  318. return PTR_ERR(sctx->fallback.blk);
  319. }
  320. return 0;
  321. }
  322. static void fallback_exit_blk(struct crypto_tfm *tfm)
  323. {
  324. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  325. crypto_free_blkcipher(sctx->fallback.blk);
  326. sctx->fallback.blk = NULL;
  327. }
  328. static struct crypto_alg ecb_aes_alg = {
  329. .cra_name = "ecb(aes)",
  330. .cra_driver_name = "ecb-aes-s390",
  331. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  332. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  333. CRYPTO_ALG_NEED_FALLBACK,
  334. .cra_blocksize = AES_BLOCK_SIZE,
  335. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  336. .cra_type = &crypto_blkcipher_type,
  337. .cra_module = THIS_MODULE,
  338. .cra_init = fallback_init_blk,
  339. .cra_exit = fallback_exit_blk,
  340. .cra_u = {
  341. .blkcipher = {
  342. .min_keysize = AES_MIN_KEY_SIZE,
  343. .max_keysize = AES_MAX_KEY_SIZE,
  344. .setkey = ecb_aes_set_key,
  345. .encrypt = ecb_aes_encrypt,
  346. .decrypt = ecb_aes_decrypt,
  347. }
  348. }
  349. };
  350. static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  351. unsigned int key_len)
  352. {
  353. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  354. int ret;
  355. ret = need_fallback(key_len);
  356. if (ret > 0) {
  357. sctx->key_len = key_len;
  358. return setkey_fallback_blk(tfm, in_key, key_len);
  359. }
  360. switch (key_len) {
  361. case 16:
  362. sctx->enc = KMC_AES_128_ENCRYPT;
  363. sctx->dec = KMC_AES_128_DECRYPT;
  364. break;
  365. case 24:
  366. sctx->enc = KMC_AES_192_ENCRYPT;
  367. sctx->dec = KMC_AES_192_DECRYPT;
  368. break;
  369. case 32:
  370. sctx->enc = KMC_AES_256_ENCRYPT;
  371. sctx->dec = KMC_AES_256_DECRYPT;
  372. break;
  373. }
  374. return aes_set_key(tfm, in_key, key_len);
  375. }
  376. static int cbc_aes_crypt(struct blkcipher_desc *desc, long func,
  377. struct blkcipher_walk *walk)
  378. {
  379. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  380. int ret = blkcipher_walk_virt(desc, walk);
  381. unsigned int nbytes = walk->nbytes;
  382. struct {
  383. u8 iv[AES_BLOCK_SIZE];
  384. u8 key[AES_MAX_KEY_SIZE];
  385. } param;
  386. if (!nbytes)
  387. goto out;
  388. memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
  389. memcpy(param.key, sctx->key, sctx->key_len);
  390. do {
  391. /* only use complete blocks */
  392. unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
  393. u8 *out = walk->dst.virt.addr;
  394. u8 *in = walk->src.virt.addr;
  395. ret = crypt_s390_kmc(func, &param, out, in, n);
  396. if (ret < 0 || ret != n)
  397. return -EIO;
  398. nbytes &= AES_BLOCK_SIZE - 1;
  399. ret = blkcipher_walk_done(desc, walk, nbytes);
  400. } while ((nbytes = walk->nbytes));
  401. memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
  402. out:
  403. return ret;
  404. }
  405. static int cbc_aes_encrypt(struct blkcipher_desc *desc,
  406. struct scatterlist *dst, struct scatterlist *src,
  407. unsigned int nbytes)
  408. {
  409. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  410. struct blkcipher_walk walk;
  411. if (unlikely(need_fallback(sctx->key_len)))
  412. return fallback_blk_enc(desc, dst, src, nbytes);
  413. blkcipher_walk_init(&walk, dst, src, nbytes);
  414. return cbc_aes_crypt(desc, sctx->enc, &walk);
  415. }
  416. static int cbc_aes_decrypt(struct blkcipher_desc *desc,
  417. struct scatterlist *dst, struct scatterlist *src,
  418. unsigned int nbytes)
  419. {
  420. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  421. struct blkcipher_walk walk;
  422. if (unlikely(need_fallback(sctx->key_len)))
  423. return fallback_blk_dec(desc, dst, src, nbytes);
  424. blkcipher_walk_init(&walk, dst, src, nbytes);
  425. return cbc_aes_crypt(desc, sctx->dec, &walk);
  426. }
  427. static struct crypto_alg cbc_aes_alg = {
  428. .cra_name = "cbc(aes)",
  429. .cra_driver_name = "cbc-aes-s390",
  430. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  431. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  432. CRYPTO_ALG_NEED_FALLBACK,
  433. .cra_blocksize = AES_BLOCK_SIZE,
  434. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  435. .cra_type = &crypto_blkcipher_type,
  436. .cra_module = THIS_MODULE,
  437. .cra_init = fallback_init_blk,
  438. .cra_exit = fallback_exit_blk,
  439. .cra_u = {
  440. .blkcipher = {
  441. .min_keysize = AES_MIN_KEY_SIZE,
  442. .max_keysize = AES_MAX_KEY_SIZE,
  443. .ivsize = AES_BLOCK_SIZE,
  444. .setkey = cbc_aes_set_key,
  445. .encrypt = cbc_aes_encrypt,
  446. .decrypt = cbc_aes_decrypt,
  447. }
  448. }
  449. };
  450. static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
  451. unsigned int len)
  452. {
  453. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  454. unsigned int ret;
  455. xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  456. xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
  457. CRYPTO_TFM_REQ_MASK);
  458. ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
  459. if (ret) {
  460. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  461. tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
  462. CRYPTO_TFM_RES_MASK);
  463. }
  464. return ret;
  465. }
  466. static int xts_fallback_decrypt(struct blkcipher_desc *desc,
  467. struct scatterlist *dst, struct scatterlist *src,
  468. unsigned int nbytes)
  469. {
  470. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  471. struct crypto_blkcipher *tfm;
  472. unsigned int ret;
  473. tfm = desc->tfm;
  474. desc->tfm = xts_ctx->fallback;
  475. ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
  476. desc->tfm = tfm;
  477. return ret;
  478. }
  479. static int xts_fallback_encrypt(struct blkcipher_desc *desc,
  480. struct scatterlist *dst, struct scatterlist *src,
  481. unsigned int nbytes)
  482. {
  483. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  484. struct crypto_blkcipher *tfm;
  485. unsigned int ret;
  486. tfm = desc->tfm;
  487. desc->tfm = xts_ctx->fallback;
  488. ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
  489. desc->tfm = tfm;
  490. return ret;
  491. }
  492. static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  493. unsigned int key_len)
  494. {
  495. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  496. u32 *flags = &tfm->crt_flags;
  497. switch (key_len) {
  498. case 32:
  499. xts_ctx->enc = KM_XTS_128_ENCRYPT;
  500. xts_ctx->dec = KM_XTS_128_DECRYPT;
  501. memcpy(xts_ctx->key + 16, in_key, 16);
  502. memcpy(xts_ctx->pcc_key + 16, in_key + 16, 16);
  503. break;
  504. case 48:
  505. xts_ctx->enc = 0;
  506. xts_ctx->dec = 0;
  507. xts_fallback_setkey(tfm, in_key, key_len);
  508. break;
  509. case 64:
  510. xts_ctx->enc = KM_XTS_256_ENCRYPT;
  511. xts_ctx->dec = KM_XTS_256_DECRYPT;
  512. memcpy(xts_ctx->key, in_key, 32);
  513. memcpy(xts_ctx->pcc_key, in_key + 32, 32);
  514. break;
  515. default:
  516. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  517. return -EINVAL;
  518. }
  519. xts_ctx->key_len = key_len;
  520. return 0;
  521. }
  522. static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
  523. struct s390_xts_ctx *xts_ctx,
  524. struct blkcipher_walk *walk)
  525. {
  526. unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
  527. int ret = blkcipher_walk_virt(desc, walk);
  528. unsigned int nbytes = walk->nbytes;
  529. unsigned int n;
  530. u8 *in, *out;
  531. struct pcc_param pcc_param;
  532. struct {
  533. u8 key[32];
  534. u8 init[16];
  535. } xts_param;
  536. if (!nbytes)
  537. goto out;
  538. memset(pcc_param.block, 0, sizeof(pcc_param.block));
  539. memset(pcc_param.bit, 0, sizeof(pcc_param.bit));
  540. memset(pcc_param.xts, 0, sizeof(pcc_param.xts));
  541. memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
  542. memcpy(pcc_param.key, xts_ctx->pcc_key, 32);
  543. ret = crypt_s390_pcc(func, &pcc_param.key[offset]);
  544. if (ret < 0)
  545. return -EIO;
  546. memcpy(xts_param.key, xts_ctx->key, 32);
  547. memcpy(xts_param.init, pcc_param.xts, 16);
  548. do {
  549. /* only use complete blocks */
  550. n = nbytes & ~(AES_BLOCK_SIZE - 1);
  551. out = walk->dst.virt.addr;
  552. in = walk->src.virt.addr;
  553. ret = crypt_s390_km(func, &xts_param.key[offset], out, in, n);
  554. if (ret < 0 || ret != n)
  555. return -EIO;
  556. nbytes &= AES_BLOCK_SIZE - 1;
  557. ret = blkcipher_walk_done(desc, walk, nbytes);
  558. } while ((nbytes = walk->nbytes));
  559. out:
  560. return ret;
  561. }
  562. static int xts_aes_encrypt(struct blkcipher_desc *desc,
  563. struct scatterlist *dst, struct scatterlist *src,
  564. unsigned int nbytes)
  565. {
  566. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  567. struct blkcipher_walk walk;
  568. if (unlikely(xts_ctx->key_len == 48))
  569. return xts_fallback_encrypt(desc, dst, src, nbytes);
  570. blkcipher_walk_init(&walk, dst, src, nbytes);
  571. return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
  572. }
  573. static int xts_aes_decrypt(struct blkcipher_desc *desc,
  574. struct scatterlist *dst, struct scatterlist *src,
  575. unsigned int nbytes)
  576. {
  577. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  578. struct blkcipher_walk walk;
  579. if (unlikely(xts_ctx->key_len == 48))
  580. return xts_fallback_decrypt(desc, dst, src, nbytes);
  581. blkcipher_walk_init(&walk, dst, src, nbytes);
  582. return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
  583. }
  584. static int xts_fallback_init(struct crypto_tfm *tfm)
  585. {
  586. const char *name = tfm->__crt_alg->cra_name;
  587. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  588. xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
  589. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  590. if (IS_ERR(xts_ctx->fallback)) {
  591. pr_err("Allocating XTS fallback algorithm %s failed\n",
  592. name);
  593. return PTR_ERR(xts_ctx->fallback);
  594. }
  595. return 0;
  596. }
  597. static void xts_fallback_exit(struct crypto_tfm *tfm)
  598. {
  599. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  600. crypto_free_blkcipher(xts_ctx->fallback);
  601. xts_ctx->fallback = NULL;
  602. }
  603. static struct crypto_alg xts_aes_alg = {
  604. .cra_name = "xts(aes)",
  605. .cra_driver_name = "xts-aes-s390",
  606. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  607. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  608. CRYPTO_ALG_NEED_FALLBACK,
  609. .cra_blocksize = AES_BLOCK_SIZE,
  610. .cra_ctxsize = sizeof(struct s390_xts_ctx),
  611. .cra_type = &crypto_blkcipher_type,
  612. .cra_module = THIS_MODULE,
  613. .cra_init = xts_fallback_init,
  614. .cra_exit = xts_fallback_exit,
  615. .cra_u = {
  616. .blkcipher = {
  617. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  618. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  619. .ivsize = AES_BLOCK_SIZE,
  620. .setkey = xts_aes_set_key,
  621. .encrypt = xts_aes_encrypt,
  622. .decrypt = xts_aes_decrypt,
  623. }
  624. }
  625. };
  626. static int xts_aes_alg_reg;
  627. static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  628. unsigned int key_len)
  629. {
  630. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  631. switch (key_len) {
  632. case 16:
  633. sctx->enc = KMCTR_AES_128_ENCRYPT;
  634. sctx->dec = KMCTR_AES_128_DECRYPT;
  635. break;
  636. case 24:
  637. sctx->enc = KMCTR_AES_192_ENCRYPT;
  638. sctx->dec = KMCTR_AES_192_DECRYPT;
  639. break;
  640. case 32:
  641. sctx->enc = KMCTR_AES_256_ENCRYPT;
  642. sctx->dec = KMCTR_AES_256_DECRYPT;
  643. break;
  644. }
  645. return aes_set_key(tfm, in_key, key_len);
  646. }
  647. static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
  648. struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
  649. {
  650. int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
  651. unsigned int i, n, nbytes;
  652. u8 buf[AES_BLOCK_SIZE];
  653. u8 *out, *in;
  654. if (!walk->nbytes)
  655. return ret;
  656. memcpy(ctrblk, walk->iv, AES_BLOCK_SIZE);
  657. while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
  658. out = walk->dst.virt.addr;
  659. in = walk->src.virt.addr;
  660. while (nbytes >= AES_BLOCK_SIZE) {
  661. /* only use complete blocks, max. PAGE_SIZE */
  662. n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
  663. nbytes & ~(AES_BLOCK_SIZE - 1);
  664. for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
  665. memcpy(ctrblk + i, ctrblk + i - AES_BLOCK_SIZE,
  666. AES_BLOCK_SIZE);
  667. crypto_inc(ctrblk + i, AES_BLOCK_SIZE);
  668. }
  669. ret = crypt_s390_kmctr(func, sctx->key, out, in, n, ctrblk);
  670. if (ret < 0 || ret != n)
  671. return -EIO;
  672. if (n > AES_BLOCK_SIZE)
  673. memcpy(ctrblk, ctrblk + n - AES_BLOCK_SIZE,
  674. AES_BLOCK_SIZE);
  675. crypto_inc(ctrblk, AES_BLOCK_SIZE);
  676. out += n;
  677. in += n;
  678. nbytes -= n;
  679. }
  680. ret = blkcipher_walk_done(desc, walk, nbytes);
  681. }
  682. /*
  683. * final block may be < AES_BLOCK_SIZE, copy only nbytes
  684. */
  685. if (nbytes) {
  686. out = walk->dst.virt.addr;
  687. in = walk->src.virt.addr;
  688. ret = crypt_s390_kmctr(func, sctx->key, buf, in,
  689. AES_BLOCK_SIZE, ctrblk);
  690. if (ret < 0 || ret != AES_BLOCK_SIZE)
  691. return -EIO;
  692. memcpy(out, buf, nbytes);
  693. crypto_inc(ctrblk, AES_BLOCK_SIZE);
  694. ret = blkcipher_walk_done(desc, walk, 0);
  695. }
  696. memcpy(walk->iv, ctrblk, AES_BLOCK_SIZE);
  697. return ret;
  698. }
  699. static int ctr_aes_encrypt(struct blkcipher_desc *desc,
  700. struct scatterlist *dst, struct scatterlist *src,
  701. unsigned int nbytes)
  702. {
  703. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  704. struct blkcipher_walk walk;
  705. blkcipher_walk_init(&walk, dst, src, nbytes);
  706. return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
  707. }
  708. static int ctr_aes_decrypt(struct blkcipher_desc *desc,
  709. struct scatterlist *dst, struct scatterlist *src,
  710. unsigned int nbytes)
  711. {
  712. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  713. struct blkcipher_walk walk;
  714. blkcipher_walk_init(&walk, dst, src, nbytes);
  715. return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
  716. }
  717. static struct crypto_alg ctr_aes_alg = {
  718. .cra_name = "ctr(aes)",
  719. .cra_driver_name = "ctr-aes-s390",
  720. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  721. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  722. .cra_blocksize = 1,
  723. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  724. .cra_type = &crypto_blkcipher_type,
  725. .cra_module = THIS_MODULE,
  726. .cra_u = {
  727. .blkcipher = {
  728. .min_keysize = AES_MIN_KEY_SIZE,
  729. .max_keysize = AES_MAX_KEY_SIZE,
  730. .ivsize = AES_BLOCK_SIZE,
  731. .setkey = ctr_aes_set_key,
  732. .encrypt = ctr_aes_encrypt,
  733. .decrypt = ctr_aes_decrypt,
  734. }
  735. }
  736. };
  737. static int ctr_aes_alg_reg;
  738. static int __init aes_s390_init(void)
  739. {
  740. int ret;
  741. if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
  742. keylen_flag |= AES_KEYLEN_128;
  743. if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
  744. keylen_flag |= AES_KEYLEN_192;
  745. if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
  746. keylen_flag |= AES_KEYLEN_256;
  747. if (!keylen_flag)
  748. return -EOPNOTSUPP;
  749. /* z9 109 and z9 BC/EC only support 128 bit key length */
  750. if (keylen_flag == AES_KEYLEN_128)
  751. pr_info("AES hardware acceleration is only available for"
  752. " 128-bit keys\n");
  753. ret = crypto_register_alg(&aes_alg);
  754. if (ret)
  755. goto aes_err;
  756. ret = crypto_register_alg(&ecb_aes_alg);
  757. if (ret)
  758. goto ecb_aes_err;
  759. ret = crypto_register_alg(&cbc_aes_alg);
  760. if (ret)
  761. goto cbc_aes_err;
  762. if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
  763. CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
  764. crypt_s390_func_available(KM_XTS_256_ENCRYPT,
  765. CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
  766. ret = crypto_register_alg(&xts_aes_alg);
  767. if (ret)
  768. goto xts_aes_err;
  769. xts_aes_alg_reg = 1;
  770. }
  771. if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
  772. CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
  773. crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
  774. CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
  775. crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
  776. CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
  777. ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
  778. if (!ctrblk) {
  779. ret = -ENOMEM;
  780. goto ctr_aes_err;
  781. }
  782. ret = crypto_register_alg(&ctr_aes_alg);
  783. if (ret) {
  784. free_page((unsigned long) ctrblk);
  785. goto ctr_aes_err;
  786. }
  787. ctr_aes_alg_reg = 1;
  788. }
  789. out:
  790. return ret;
  791. ctr_aes_err:
  792. crypto_unregister_alg(&xts_aes_alg);
  793. xts_aes_err:
  794. crypto_unregister_alg(&cbc_aes_alg);
  795. cbc_aes_err:
  796. crypto_unregister_alg(&ecb_aes_alg);
  797. ecb_aes_err:
  798. crypto_unregister_alg(&aes_alg);
  799. aes_err:
  800. goto out;
  801. }
  802. static void __exit aes_s390_fini(void)
  803. {
  804. if (ctr_aes_alg_reg) {
  805. crypto_unregister_alg(&ctr_aes_alg);
  806. free_page((unsigned long) ctrblk);
  807. }
  808. if (xts_aes_alg_reg)
  809. crypto_unregister_alg(&xts_aes_alg);
  810. crypto_unregister_alg(&cbc_aes_alg);
  811. crypto_unregister_alg(&ecb_aes_alg);
  812. crypto_unregister_alg(&aes_alg);
  813. }
  814. module_init(aes_s390_init);
  815. module_exit(aes_s390_fini);
  816. MODULE_ALIAS("aes-all");
  817. MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
  818. MODULE_LICENSE("GPL");