nx-aes-gcm.c 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351
  1. /**
  2. * AES GCM routines supporting the Power 7+ Nest Accelerators driver
  3. *
  4. * Copyright (C) 2012 International Business Machines Inc.
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation; version 2 only.
  9. *
  10. * This program is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. * GNU General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU General Public License
  16. * along with this program; if not, write to the Free Software
  17. * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
  18. *
  19. * Author: Kent Yoder <yoder1@us.ibm.com>
  20. */
  21. #include <crypto/internal/aead.h>
  22. #include <crypto/aes.h>
  23. #include <crypto/algapi.h>
  24. #include <crypto/scatterwalk.h>
  25. #include <linux/module.h>
  26. #include <linux/types.h>
  27. #include <linux/crypto.h>
  28. #include <asm/vio.h>
  29. #include "nx_csbcpb.h"
  30. #include "nx.h"
  31. static int gcm_aes_nx_set_key(struct crypto_aead *tfm,
  32. const u8 *in_key,
  33. unsigned int key_len)
  34. {
  35. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base);
  36. struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
  37. struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead;
  38. nx_ctx_init(nx_ctx, HCOP_FC_AES);
  39. switch (key_len) {
  40. case AES_KEYSIZE_128:
  41. NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128);
  42. NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_128);
  43. nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128];
  44. break;
  45. case AES_KEYSIZE_192:
  46. NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_192);
  47. NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_192);
  48. nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192];
  49. break;
  50. case AES_KEYSIZE_256:
  51. NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_256);
  52. NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_256);
  53. nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256];
  54. break;
  55. default:
  56. return -EINVAL;
  57. }
  58. csbcpb->cpb.hdr.mode = NX_MODE_AES_GCM;
  59. memcpy(csbcpb->cpb.aes_gcm.key, in_key, key_len);
  60. csbcpb_aead->cpb.hdr.mode = NX_MODE_AES_GCA;
  61. memcpy(csbcpb_aead->cpb.aes_gca.key, in_key, key_len);
  62. return 0;
  63. }
  64. static int gcm4106_aes_nx_set_key(struct crypto_aead *tfm,
  65. const u8 *in_key,
  66. unsigned int key_len)
  67. {
  68. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base);
  69. char *nonce = nx_ctx->priv.gcm.nonce;
  70. int rc;
  71. if (key_len < 4)
  72. return -EINVAL;
  73. key_len -= 4;
  74. rc = gcm_aes_nx_set_key(tfm, in_key, key_len);
  75. if (rc)
  76. goto out;
  77. memcpy(nonce, in_key + key_len, 4);
  78. out:
  79. return rc;
  80. }
  81. static int gcm_aes_nx_setauthsize(struct crypto_aead *tfm,
  82. unsigned int authsize)
  83. {
  84. if (authsize > crypto_aead_alg(tfm)->maxauthsize)
  85. return -EINVAL;
  86. crypto_aead_crt(tfm)->authsize = authsize;
  87. return 0;
  88. }
  89. static int gcm4106_aes_nx_setauthsize(struct crypto_aead *tfm,
  90. unsigned int authsize)
  91. {
  92. switch (authsize) {
  93. case 8:
  94. case 12:
  95. case 16:
  96. break;
  97. default:
  98. return -EINVAL;
  99. }
  100. crypto_aead_crt(tfm)->authsize = authsize;
  101. return 0;
  102. }
  103. static int nx_gca(struct nx_crypto_ctx *nx_ctx,
  104. struct aead_request *req,
  105. u8 *out)
  106. {
  107. struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead;
  108. int rc = -EINVAL;
  109. struct scatter_walk walk;
  110. struct nx_sg *nx_sg = nx_ctx->in_sg;
  111. if (req->assoclen > nx_ctx->ap->databytelen)
  112. goto out;
  113. if (req->assoclen <= AES_BLOCK_SIZE) {
  114. scatterwalk_start(&walk, req->assoc);
  115. scatterwalk_copychunks(out, &walk, req->assoclen,
  116. SCATTERWALK_FROM_SG);
  117. scatterwalk_done(&walk, SCATTERWALK_FROM_SG, 0);
  118. rc = 0;
  119. goto out;
  120. }
  121. nx_sg = nx_walk_and_build(nx_sg, nx_ctx->ap->sglen, req->assoc, 0,
  122. req->assoclen);
  123. nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg) * sizeof(struct nx_sg);
  124. rc = nx_hcall_sync(nx_ctx, &nx_ctx->op_aead,
  125. req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
  126. if (rc)
  127. goto out;
  128. atomic_inc(&(nx_ctx->stats->aes_ops));
  129. atomic64_add(req->assoclen, &(nx_ctx->stats->aes_bytes));
  130. memcpy(out, csbcpb_aead->cpb.aes_gca.out_pat, AES_BLOCK_SIZE);
  131. out:
  132. return rc;
  133. }
  134. static int gcm_aes_nx_crypt(struct aead_request *req, int enc)
  135. {
  136. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
  137. struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
  138. struct blkcipher_desc desc;
  139. unsigned int nbytes = req->cryptlen;
  140. int rc = -EINVAL;
  141. if (nbytes > nx_ctx->ap->databytelen)
  142. goto out;
  143. desc.info = nx_ctx->priv.gcm.iv;
  144. /* initialize the counter */
  145. *(u32 *)(desc.info + NX_GCM_CTR_OFFSET) = 1;
  146. /* For scenarios where the input message is zero length, AES CTR mode
  147. * may be used. Set the source data to be a single block (16B) of all
  148. * zeros, and set the input IV value to be the same as the GMAC IV
  149. * value. - nx_wb 4.8.1.3 */
  150. if (nbytes == 0) {
  151. char src[AES_BLOCK_SIZE] = {};
  152. struct scatterlist sg;
  153. desc.tfm = crypto_alloc_blkcipher("ctr(aes)", 0, 0);
  154. if (IS_ERR(desc.tfm)) {
  155. rc = -ENOMEM;
  156. goto out;
  157. }
  158. crypto_blkcipher_setkey(desc.tfm, csbcpb->cpb.aes_gcm.key,
  159. NX_CPB_KEY_SIZE(csbcpb) == NX_KS_AES_128 ? 16 :
  160. NX_CPB_KEY_SIZE(csbcpb) == NX_KS_AES_192 ? 24 : 32);
  161. sg_init_one(&sg, src, AES_BLOCK_SIZE);
  162. if (enc)
  163. crypto_blkcipher_encrypt_iv(&desc, req->dst, &sg,
  164. AES_BLOCK_SIZE);
  165. else
  166. crypto_blkcipher_decrypt_iv(&desc, req->dst, &sg,
  167. AES_BLOCK_SIZE);
  168. crypto_free_blkcipher(desc.tfm);
  169. rc = 0;
  170. goto out;
  171. }
  172. desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
  173. csbcpb->cpb.aes_gcm.bit_length_aad = req->assoclen * 8;
  174. if (req->assoclen) {
  175. rc = nx_gca(nx_ctx, req, csbcpb->cpb.aes_gcm.in_pat_or_aad);
  176. if (rc)
  177. goto out;
  178. }
  179. if (enc)
  180. NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT;
  181. else
  182. nbytes -= AES_BLOCK_SIZE;
  183. csbcpb->cpb.aes_gcm.bit_length_data = nbytes * 8;
  184. rc = nx_build_sg_lists(nx_ctx, &desc, req->dst, req->src, nbytes,
  185. csbcpb->cpb.aes_gcm.iv_or_cnt);
  186. if (rc)
  187. goto out;
  188. rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
  189. req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
  190. if (rc)
  191. goto out;
  192. atomic_inc(&(nx_ctx->stats->aes_ops));
  193. atomic64_add(csbcpb->csb.processed_byte_count,
  194. &(nx_ctx->stats->aes_bytes));
  195. if (enc) {
  196. /* copy out the auth tag */
  197. scatterwalk_map_and_copy(csbcpb->cpb.aes_gcm.out_pat_or_mac,
  198. req->dst, nbytes,
  199. crypto_aead_authsize(crypto_aead_reqtfm(req)),
  200. SCATTERWALK_TO_SG);
  201. } else if (req->assoclen) {
  202. u8 *itag = nx_ctx->priv.gcm.iauth_tag;
  203. u8 *otag = csbcpb->cpb.aes_gcm.out_pat_or_mac;
  204. scatterwalk_map_and_copy(itag, req->dst, nbytes,
  205. crypto_aead_authsize(crypto_aead_reqtfm(req)),
  206. SCATTERWALK_FROM_SG);
  207. rc = memcmp(itag, otag,
  208. crypto_aead_authsize(crypto_aead_reqtfm(req))) ?
  209. -EBADMSG : 0;
  210. }
  211. out:
  212. return rc;
  213. }
  214. static int gcm_aes_nx_encrypt(struct aead_request *req)
  215. {
  216. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
  217. char *iv = nx_ctx->priv.gcm.iv;
  218. memcpy(iv, req->iv, 12);
  219. return gcm_aes_nx_crypt(req, 1);
  220. }
  221. static int gcm_aes_nx_decrypt(struct aead_request *req)
  222. {
  223. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
  224. char *iv = nx_ctx->priv.gcm.iv;
  225. memcpy(iv, req->iv, 12);
  226. return gcm_aes_nx_crypt(req, 0);
  227. }
  228. static int gcm4106_aes_nx_encrypt(struct aead_request *req)
  229. {
  230. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
  231. char *iv = nx_ctx->priv.gcm.iv;
  232. char *nonce = nx_ctx->priv.gcm.nonce;
  233. memcpy(iv, nonce, NX_GCM4106_NONCE_LEN);
  234. memcpy(iv + NX_GCM4106_NONCE_LEN, req->iv, 8);
  235. return gcm_aes_nx_crypt(req, 1);
  236. }
  237. static int gcm4106_aes_nx_decrypt(struct aead_request *req)
  238. {
  239. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
  240. char *iv = nx_ctx->priv.gcm.iv;
  241. char *nonce = nx_ctx->priv.gcm.nonce;
  242. memcpy(iv, nonce, NX_GCM4106_NONCE_LEN);
  243. memcpy(iv + NX_GCM4106_NONCE_LEN, req->iv, 8);
  244. return gcm_aes_nx_crypt(req, 0);
  245. }
  246. /* tell the block cipher walk routines that this is a stream cipher by
  247. * setting cra_blocksize to 1. Even using blkcipher_walk_virt_block
  248. * during encrypt/decrypt doesn't solve this problem, because it calls
  249. * blkcipher_walk_done under the covers, which doesn't use walk->blocksize,
  250. * but instead uses this tfm->blocksize. */
  251. struct crypto_alg nx_gcm_aes_alg = {
  252. .cra_name = "gcm(aes)",
  253. .cra_driver_name = "gcm-aes-nx",
  254. .cra_priority = 300,
  255. .cra_flags = CRYPTO_ALG_TYPE_AEAD,
  256. .cra_blocksize = 1,
  257. .cra_ctxsize = sizeof(struct nx_crypto_ctx),
  258. .cra_type = &crypto_aead_type,
  259. .cra_module = THIS_MODULE,
  260. .cra_init = nx_crypto_ctx_aes_gcm_init,
  261. .cra_exit = nx_crypto_ctx_exit,
  262. .cra_aead = {
  263. .ivsize = AES_BLOCK_SIZE,
  264. .maxauthsize = AES_BLOCK_SIZE,
  265. .setkey = gcm_aes_nx_set_key,
  266. .setauthsize = gcm_aes_nx_setauthsize,
  267. .encrypt = gcm_aes_nx_encrypt,
  268. .decrypt = gcm_aes_nx_decrypt,
  269. }
  270. };
  271. struct crypto_alg nx_gcm4106_aes_alg = {
  272. .cra_name = "rfc4106(gcm(aes))",
  273. .cra_driver_name = "rfc4106-gcm-aes-nx",
  274. .cra_priority = 300,
  275. .cra_flags = CRYPTO_ALG_TYPE_AEAD,
  276. .cra_blocksize = 1,
  277. .cra_ctxsize = sizeof(struct nx_crypto_ctx),
  278. .cra_type = &crypto_nivaead_type,
  279. .cra_module = THIS_MODULE,
  280. .cra_init = nx_crypto_ctx_aes_gcm_init,
  281. .cra_exit = nx_crypto_ctx_exit,
  282. .cra_aead = {
  283. .ivsize = 8,
  284. .maxauthsize = AES_BLOCK_SIZE,
  285. .geniv = "seqiv",
  286. .setkey = gcm4106_aes_nx_set_key,
  287. .setauthsize = gcm4106_aes_nx_setauthsize,
  288. .encrypt = gcm4106_aes_nx_encrypt,
  289. .decrypt = gcm4106_aes_nx_decrypt,
  290. }
  291. };