aes_s390.c 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385
  1. /*
  2. * Cryptographic API.
  3. *
  4. * s390 implementation of the AES Cipher Algorithm.
  5. *
  6. * s390 Version:
  7. * Copyright IBM Corp. 2005,2007
  8. * Author(s): Jan Glauber (jang@de.ibm.com)
  9. *
  10. * Derived from "crypto/aes_generic.c"
  11. *
  12. * This program is free software; you can redistribute it and/or modify it
  13. * under the terms of the GNU General Public License as published by the Free
  14. * Software Foundation; either version 2 of the License, or (at your option)
  15. * any later version.
  16. *
  17. */
  18. #include <crypto/algapi.h>
  19. #include <linux/module.h>
  20. #include <linux/init.h>
  21. #include "crypt_s390.h"
  22. #define AES_MIN_KEY_SIZE 16
  23. #define AES_MAX_KEY_SIZE 32
  24. /* data block size for all key lengths */
  25. #define AES_BLOCK_SIZE 16
  26. #define AES_KEYLEN_128 1
  27. #define AES_KEYLEN_192 2
  28. #define AES_KEYLEN_256 4
  29. static char keylen_flag = 0;
  30. struct s390_aes_ctx {
  31. u8 iv[AES_BLOCK_SIZE];
  32. u8 key[AES_MAX_KEY_SIZE];
  33. long enc;
  34. long dec;
  35. int key_len;
  36. };
  37. static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  38. unsigned int key_len)
  39. {
  40. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  41. u32 *flags = &tfm->crt_flags;
  42. switch (key_len) {
  43. case 16:
  44. if (!(keylen_flag & AES_KEYLEN_128))
  45. goto fail;
  46. break;
  47. case 24:
  48. if (!(keylen_flag & AES_KEYLEN_192))
  49. goto fail;
  50. break;
  51. case 32:
  52. if (!(keylen_flag & AES_KEYLEN_256))
  53. goto fail;
  54. break;
  55. default:
  56. goto fail;
  57. break;
  58. }
  59. sctx->key_len = key_len;
  60. memcpy(sctx->key, in_key, key_len);
  61. return 0;
  62. fail:
  63. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  64. return -EINVAL;
  65. }
  66. static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  67. {
  68. const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  69. switch (sctx->key_len) {
  70. case 16:
  71. crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
  72. AES_BLOCK_SIZE);
  73. break;
  74. case 24:
  75. crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
  76. AES_BLOCK_SIZE);
  77. break;
  78. case 32:
  79. crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
  80. AES_BLOCK_SIZE);
  81. break;
  82. }
  83. }
  84. static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  85. {
  86. const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  87. switch (sctx->key_len) {
  88. case 16:
  89. crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
  90. AES_BLOCK_SIZE);
  91. break;
  92. case 24:
  93. crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
  94. AES_BLOCK_SIZE);
  95. break;
  96. case 32:
  97. crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
  98. AES_BLOCK_SIZE);
  99. break;
  100. }
  101. }
  102. static struct crypto_alg aes_alg = {
  103. .cra_name = "aes",
  104. .cra_driver_name = "aes-s390",
  105. .cra_priority = CRYPT_S390_PRIORITY,
  106. .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
  107. CRYPTO_ALG_NEED_FALLBACK,
  108. .cra_blocksize = AES_BLOCK_SIZE,
  109. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  110. .cra_module = THIS_MODULE,
  111. .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
  112. .cra_u = {
  113. .cipher = {
  114. .cia_min_keysize = AES_MIN_KEY_SIZE,
  115. .cia_max_keysize = AES_MAX_KEY_SIZE,
  116. .cia_setkey = aes_set_key,
  117. .cia_encrypt = aes_encrypt,
  118. .cia_decrypt = aes_decrypt,
  119. }
  120. }
  121. };
  122. static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  123. unsigned int key_len)
  124. {
  125. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  126. switch (key_len) {
  127. case 16:
  128. sctx->enc = KM_AES_128_ENCRYPT;
  129. sctx->dec = KM_AES_128_DECRYPT;
  130. break;
  131. case 24:
  132. sctx->enc = KM_AES_192_ENCRYPT;
  133. sctx->dec = KM_AES_192_DECRYPT;
  134. break;
  135. case 32:
  136. sctx->enc = KM_AES_256_ENCRYPT;
  137. sctx->dec = KM_AES_256_DECRYPT;
  138. break;
  139. }
  140. return aes_set_key(tfm, in_key, key_len);
  141. }
  142. static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
  143. struct blkcipher_walk *walk)
  144. {
  145. int ret = blkcipher_walk_virt(desc, walk);
  146. unsigned int nbytes;
  147. while ((nbytes = walk->nbytes)) {
  148. /* only use complete blocks */
  149. unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
  150. u8 *out = walk->dst.virt.addr;
  151. u8 *in = walk->src.virt.addr;
  152. ret = crypt_s390_km(func, param, out, in, n);
  153. BUG_ON((ret < 0) || (ret != n));
  154. nbytes &= AES_BLOCK_SIZE - 1;
  155. ret = blkcipher_walk_done(desc, walk, nbytes);
  156. }
  157. return ret;
  158. }
  159. static int ecb_aes_encrypt(struct blkcipher_desc *desc,
  160. struct scatterlist *dst, struct scatterlist *src,
  161. unsigned int nbytes)
  162. {
  163. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  164. struct blkcipher_walk walk;
  165. blkcipher_walk_init(&walk, dst, src, nbytes);
  166. return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
  167. }
  168. static int ecb_aes_decrypt(struct blkcipher_desc *desc,
  169. struct scatterlist *dst, struct scatterlist *src,
  170. unsigned int nbytes)
  171. {
  172. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  173. struct blkcipher_walk walk;
  174. blkcipher_walk_init(&walk, dst, src, nbytes);
  175. return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
  176. }
  177. static struct crypto_alg ecb_aes_alg = {
  178. .cra_name = "ecb(aes)",
  179. .cra_driver_name = "ecb-aes-s390",
  180. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  181. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  182. CRYPTO_ALG_NEED_FALLBACK,
  183. .cra_blocksize = AES_BLOCK_SIZE,
  184. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  185. .cra_type = &crypto_blkcipher_type,
  186. .cra_module = THIS_MODULE,
  187. .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
  188. .cra_u = {
  189. .blkcipher = {
  190. .min_keysize = AES_MIN_KEY_SIZE,
  191. .max_keysize = AES_MAX_KEY_SIZE,
  192. .setkey = ecb_aes_set_key,
  193. .encrypt = ecb_aes_encrypt,
  194. .decrypt = ecb_aes_decrypt,
  195. }
  196. }
  197. };
  198. static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  199. unsigned int key_len)
  200. {
  201. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  202. switch (key_len) {
  203. case 16:
  204. sctx->enc = KMC_AES_128_ENCRYPT;
  205. sctx->dec = KMC_AES_128_DECRYPT;
  206. break;
  207. case 24:
  208. sctx->enc = KMC_AES_192_ENCRYPT;
  209. sctx->dec = KMC_AES_192_DECRYPT;
  210. break;
  211. case 32:
  212. sctx->enc = KMC_AES_256_ENCRYPT;
  213. sctx->dec = KMC_AES_256_DECRYPT;
  214. break;
  215. }
  216. return aes_set_key(tfm, in_key, key_len);
  217. }
  218. static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
  219. struct blkcipher_walk *walk)
  220. {
  221. int ret = blkcipher_walk_virt(desc, walk);
  222. unsigned int nbytes = walk->nbytes;
  223. if (!nbytes)
  224. goto out;
  225. memcpy(param, walk->iv, AES_BLOCK_SIZE);
  226. do {
  227. /* only use complete blocks */
  228. unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
  229. u8 *out = walk->dst.virt.addr;
  230. u8 *in = walk->src.virt.addr;
  231. ret = crypt_s390_kmc(func, param, out, in, n);
  232. BUG_ON((ret < 0) || (ret != n));
  233. nbytes &= AES_BLOCK_SIZE - 1;
  234. ret = blkcipher_walk_done(desc, walk, nbytes);
  235. } while ((nbytes = walk->nbytes));
  236. memcpy(walk->iv, param, AES_BLOCK_SIZE);
  237. out:
  238. return ret;
  239. }
  240. static int cbc_aes_encrypt(struct blkcipher_desc *desc,
  241. struct scatterlist *dst, struct scatterlist *src,
  242. unsigned int nbytes)
  243. {
  244. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  245. struct blkcipher_walk walk;
  246. blkcipher_walk_init(&walk, dst, src, nbytes);
  247. return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
  248. }
  249. static int cbc_aes_decrypt(struct blkcipher_desc *desc,
  250. struct scatterlist *dst, struct scatterlist *src,
  251. unsigned int nbytes)
  252. {
  253. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  254. struct blkcipher_walk walk;
  255. blkcipher_walk_init(&walk, dst, src, nbytes);
  256. return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
  257. }
  258. static struct crypto_alg cbc_aes_alg = {
  259. .cra_name = "cbc(aes)",
  260. .cra_driver_name = "cbc-aes-s390",
  261. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  262. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  263. CRYPTO_ALG_NEED_FALLBACK,
  264. .cra_blocksize = AES_BLOCK_SIZE,
  265. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  266. .cra_type = &crypto_blkcipher_type,
  267. .cra_module = THIS_MODULE,
  268. .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
  269. .cra_u = {
  270. .blkcipher = {
  271. .min_keysize = AES_MIN_KEY_SIZE,
  272. .max_keysize = AES_MAX_KEY_SIZE,
  273. .ivsize = AES_BLOCK_SIZE,
  274. .setkey = cbc_aes_set_key,
  275. .encrypt = cbc_aes_encrypt,
  276. .decrypt = cbc_aes_decrypt,
  277. }
  278. }
  279. };
  280. static int __init aes_init(void)
  281. {
  282. int ret;
  283. if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
  284. keylen_flag |= AES_KEYLEN_128;
  285. if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
  286. keylen_flag |= AES_KEYLEN_192;
  287. if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
  288. keylen_flag |= AES_KEYLEN_256;
  289. if (!keylen_flag)
  290. return -EOPNOTSUPP;
  291. /* z9 109 and z9 BC/EC only support 128 bit key length */
  292. if (keylen_flag == AES_KEYLEN_128) {
  293. aes_alg.cra_u.cipher.cia_max_keysize = AES_MIN_KEY_SIZE;
  294. ecb_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
  295. cbc_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
  296. printk(KERN_INFO
  297. "aes_s390: hardware acceleration only available for"
  298. "128 bit keys\n");
  299. }
  300. ret = crypto_register_alg(&aes_alg);
  301. if (ret)
  302. goto aes_err;
  303. ret = crypto_register_alg(&ecb_aes_alg);
  304. if (ret)
  305. goto ecb_aes_err;
  306. ret = crypto_register_alg(&cbc_aes_alg);
  307. if (ret)
  308. goto cbc_aes_err;
  309. out:
  310. return ret;
  311. cbc_aes_err:
  312. crypto_unregister_alg(&ecb_aes_alg);
  313. ecb_aes_err:
  314. crypto_unregister_alg(&aes_alg);
  315. aes_err:
  316. goto out;
  317. }
  318. static void __exit aes_fini(void)
  319. {
  320. crypto_unregister_alg(&cbc_aes_alg);
  321. crypto_unregister_alg(&ecb_aes_alg);
  322. crypto_unregister_alg(&aes_alg);
  323. }
  324. module_init(aes_init);
  325. module_exit(aes_fini);
  326. MODULE_ALIAS("aes");
  327. MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
  328. MODULE_LICENSE("GPL");