nx-aes-ccm.c 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466
  1. /**
  2. * AES CCM routines supporting the Power 7+ Nest Accelerators driver
  3. *
  4. * Copyright (C) 2012 International Business Machines Inc.
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation; version 2 only.
  9. *
  10. * This program is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. * GNU General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU General Public License
  16. * along with this program; if not, write to the Free Software
  17. * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
  18. *
  19. * Author: Kent Yoder <yoder1@us.ibm.com>
  20. */
  21. #include <crypto/internal/aead.h>
  22. #include <crypto/aes.h>
  23. #include <crypto/algapi.h>
  24. #include <crypto/scatterwalk.h>
  25. #include <linux/module.h>
  26. #include <linux/types.h>
  27. #include <linux/crypto.h>
  28. #include <asm/vio.h>
  29. #include "nx_csbcpb.h"
  30. #include "nx.h"
  31. static int ccm_aes_nx_set_key(struct crypto_aead *tfm,
  32. const u8 *in_key,
  33. unsigned int key_len)
  34. {
  35. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base);
  36. struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
  37. struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead;
  38. nx_ctx_init(nx_ctx, HCOP_FC_AES);
  39. switch (key_len) {
  40. case AES_KEYSIZE_128:
  41. NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128);
  42. NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_128);
  43. nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128];
  44. break;
  45. default:
  46. return -EINVAL;
  47. }
  48. csbcpb->cpb.hdr.mode = NX_MODE_AES_CCM;
  49. memcpy(csbcpb->cpb.aes_ccm.key, in_key, key_len);
  50. csbcpb_aead->cpb.hdr.mode = NX_MODE_AES_CCA;
  51. memcpy(csbcpb_aead->cpb.aes_cca.key, in_key, key_len);
  52. return 0;
  53. }
  54. static int ccm4309_aes_nx_set_key(struct crypto_aead *tfm,
  55. const u8 *in_key,
  56. unsigned int key_len)
  57. {
  58. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base);
  59. if (key_len < 3)
  60. return -EINVAL;
  61. key_len -= 3;
  62. memcpy(nx_ctx->priv.ccm.nonce, in_key + key_len, 3);
  63. return ccm_aes_nx_set_key(tfm, in_key, key_len);
  64. }
  65. static int ccm_aes_nx_setauthsize(struct crypto_aead *tfm,
  66. unsigned int authsize)
  67. {
  68. switch (authsize) {
  69. case 4:
  70. case 6:
  71. case 8:
  72. case 10:
  73. case 12:
  74. case 14:
  75. case 16:
  76. break;
  77. default:
  78. return -EINVAL;
  79. }
  80. crypto_aead_crt(tfm)->authsize = authsize;
  81. return 0;
  82. }
  83. static int ccm4309_aes_nx_setauthsize(struct crypto_aead *tfm,
  84. unsigned int authsize)
  85. {
  86. switch (authsize) {
  87. case 8:
  88. case 12:
  89. case 16:
  90. break;
  91. default:
  92. return -EINVAL;
  93. }
  94. crypto_aead_crt(tfm)->authsize = authsize;
  95. return 0;
  96. }
  97. /* taken from crypto/ccm.c */
  98. static int set_msg_len(u8 *block, unsigned int msglen, int csize)
  99. {
  100. __be32 data;
  101. memset(block, 0, csize);
  102. block += csize;
  103. if (csize >= 4)
  104. csize = 4;
  105. else if (msglen > (unsigned int)(1 << (8 * csize)))
  106. return -EOVERFLOW;
  107. data = cpu_to_be32(msglen);
  108. memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
  109. return 0;
  110. }
  111. /* taken from crypto/ccm.c */
  112. static inline int crypto_ccm_check_iv(const u8 *iv)
  113. {
  114. /* 2 <= L <= 8, so 1 <= L' <= 7. */
  115. if (1 > iv[0] || iv[0] > 7)
  116. return -EINVAL;
  117. return 0;
  118. }
  119. /* based on code from crypto/ccm.c */
  120. static int generate_b0(u8 *iv, unsigned int assoclen, unsigned int authsize,
  121. unsigned int cryptlen, u8 *b0)
  122. {
  123. unsigned int l, lp, m = authsize;
  124. int rc;
  125. memcpy(b0, iv, 16);
  126. lp = b0[0];
  127. l = lp + 1;
  128. /* set m, bits 3-5 */
  129. *b0 |= (8 * ((m - 2) / 2));
  130. /* set adata, bit 6, if associated data is used */
  131. if (assoclen)
  132. *b0 |= 64;
  133. rc = set_msg_len(b0 + 16 - l, cryptlen, l);
  134. return rc;
  135. }
  136. static int generate_pat(u8 *iv,
  137. struct aead_request *req,
  138. struct nx_crypto_ctx *nx_ctx,
  139. unsigned int authsize,
  140. unsigned int nbytes,
  141. u8 *out)
  142. {
  143. struct nx_sg *nx_insg = nx_ctx->in_sg;
  144. struct nx_sg *nx_outsg = nx_ctx->out_sg;
  145. unsigned int iauth_len = 0;
  146. struct vio_pfo_op *op = NULL;
  147. u8 tmp[16], *b1 = NULL, *b0 = NULL, *result = NULL;
  148. int rc;
  149. /* zero the ctr value */
  150. memset(iv + 15 - iv[0], 0, iv[0] + 1);
  151. if (!req->assoclen) {
  152. b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0;
  153. } else if (req->assoclen <= 14) {
  154. /* if associated data is 14 bytes or less, we do 1 GCM
  155. * operation on 2 AES blocks, B0 (stored in the csbcpb) and B1,
  156. * which is fed in through the source buffers here */
  157. b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0;
  158. b1 = nx_ctx->priv.ccm.iauth_tag;
  159. iauth_len = req->assoclen;
  160. nx_insg = nx_build_sg_list(nx_insg, b1, 16, nx_ctx->ap->sglen);
  161. nx_outsg = nx_build_sg_list(nx_outsg, tmp, 16,
  162. nx_ctx->ap->sglen);
  163. /* inlen should be negative, indicating to phyp that its a
  164. * pointer to an sg list */
  165. nx_ctx->op.inlen = (nx_ctx->in_sg - nx_insg) *
  166. sizeof(struct nx_sg);
  167. nx_ctx->op.outlen = (nx_ctx->out_sg - nx_outsg) *
  168. sizeof(struct nx_sg);
  169. NX_CPB_FDM(nx_ctx->csbcpb) |= NX_FDM_ENDE_ENCRYPT;
  170. NX_CPB_FDM(nx_ctx->csbcpb) |= NX_FDM_INTERMEDIATE;
  171. op = &nx_ctx->op;
  172. result = nx_ctx->csbcpb->cpb.aes_ccm.out_pat_or_mac;
  173. } else if (req->assoclen <= 65280) {
  174. /* if associated data is less than (2^16 - 2^8), we construct
  175. * B1 differently and feed in the associated data to a CCA
  176. * operation */
  177. b0 = nx_ctx->csbcpb_aead->cpb.aes_cca.b0;
  178. b1 = nx_ctx->csbcpb_aead->cpb.aes_cca.b1;
  179. iauth_len = 14;
  180. /* remaining assoc data must have scatterlist built for it */
  181. nx_insg = nx_walk_and_build(nx_insg, nx_ctx->ap->sglen,
  182. req->assoc, iauth_len,
  183. req->assoclen - iauth_len);
  184. nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_insg) *
  185. sizeof(struct nx_sg);
  186. op = &nx_ctx->op_aead;
  187. result = nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0;
  188. } else {
  189. /* if associated data is less than (2^32), we construct B1
  190. * differently yet again and feed in the associated data to a
  191. * CCA operation */
  192. pr_err("associated data len is %u bytes (returning -EINVAL)\n",
  193. req->assoclen);
  194. rc = -EINVAL;
  195. }
  196. rc = generate_b0(iv, req->assoclen, authsize, nbytes, b0);
  197. if (rc)
  198. goto done;
  199. if (b1) {
  200. memset(b1, 0, 16);
  201. *(u16 *)b1 = (u16)req->assoclen;
  202. scatterwalk_map_and_copy(b1 + 2, req->assoc, 0,
  203. iauth_len, SCATTERWALK_FROM_SG);
  204. rc = nx_hcall_sync(nx_ctx, op,
  205. req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
  206. if (rc)
  207. goto done;
  208. atomic_inc(&(nx_ctx->stats->aes_ops));
  209. atomic64_add(req->assoclen, &(nx_ctx->stats->aes_bytes));
  210. memcpy(out, result, AES_BLOCK_SIZE);
  211. }
  212. done:
  213. return rc;
  214. }
  215. static int ccm_nx_decrypt(struct aead_request *req,
  216. struct blkcipher_desc *desc)
  217. {
  218. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
  219. struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
  220. unsigned int nbytes = req->cryptlen;
  221. unsigned int authsize = crypto_aead_authsize(crypto_aead_reqtfm(req));
  222. struct nx_ccm_priv *priv = &nx_ctx->priv.ccm;
  223. int rc = -1;
  224. if (nbytes > nx_ctx->ap->databytelen)
  225. return -EINVAL;
  226. nbytes -= authsize;
  227. /* copy out the auth tag to compare with later */
  228. scatterwalk_map_and_copy(priv->oauth_tag,
  229. req->src, nbytes, authsize,
  230. SCATTERWALK_FROM_SG);
  231. rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes,
  232. csbcpb->cpb.aes_ccm.in_pat_or_b0);
  233. if (rc)
  234. goto out;
  235. rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, nbytes,
  236. csbcpb->cpb.aes_ccm.iv_or_ctr);
  237. if (rc)
  238. goto out;
  239. NX_CPB_FDM(nx_ctx->csbcpb) &= ~NX_FDM_ENDE_ENCRYPT;
  240. NX_CPB_FDM(nx_ctx->csbcpb) &= ~NX_FDM_INTERMEDIATE;
  241. rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
  242. req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
  243. if (rc)
  244. goto out;
  245. atomic_inc(&(nx_ctx->stats->aes_ops));
  246. atomic64_add(csbcpb->csb.processed_byte_count,
  247. &(nx_ctx->stats->aes_bytes));
  248. rc = memcmp(csbcpb->cpb.aes_ccm.out_pat_or_mac, priv->oauth_tag,
  249. authsize) ? -EBADMSG : 0;
  250. out:
  251. return rc;
  252. }
  253. static int ccm_nx_encrypt(struct aead_request *req,
  254. struct blkcipher_desc *desc)
  255. {
  256. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
  257. struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
  258. unsigned int nbytes = req->cryptlen;
  259. unsigned int authsize = crypto_aead_authsize(crypto_aead_reqtfm(req));
  260. int rc = -1;
  261. if (nbytes > nx_ctx->ap->databytelen)
  262. return -EINVAL;
  263. rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes,
  264. csbcpb->cpb.aes_ccm.in_pat_or_b0);
  265. if (rc)
  266. goto out;
  267. rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, nbytes,
  268. csbcpb->cpb.aes_ccm.iv_or_ctr);
  269. if (rc)
  270. goto out;
  271. NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT;
  272. NX_CPB_FDM(csbcpb) &= ~NX_FDM_INTERMEDIATE;
  273. rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
  274. req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
  275. if (rc)
  276. goto out;
  277. atomic_inc(&(nx_ctx->stats->aes_ops));
  278. atomic64_add(csbcpb->csb.processed_byte_count,
  279. &(nx_ctx->stats->aes_bytes));
  280. /* copy out the auth tag */
  281. scatterwalk_map_and_copy(csbcpb->cpb.aes_ccm.out_pat_or_mac,
  282. req->dst, nbytes, authsize,
  283. SCATTERWALK_TO_SG);
  284. out:
  285. return rc;
  286. }
  287. static int ccm4309_aes_nx_encrypt(struct aead_request *req)
  288. {
  289. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
  290. struct blkcipher_desc desc;
  291. u8 *iv = nx_ctx->priv.ccm.iv;
  292. iv[0] = 3;
  293. memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3);
  294. memcpy(iv + 4, req->iv, 8);
  295. desc.info = iv;
  296. desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
  297. return ccm_nx_encrypt(req, &desc);
  298. }
  299. static int ccm_aes_nx_encrypt(struct aead_request *req)
  300. {
  301. struct blkcipher_desc desc;
  302. int rc;
  303. desc.info = req->iv;
  304. desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
  305. rc = crypto_ccm_check_iv(desc.info);
  306. if (rc)
  307. return rc;
  308. return ccm_nx_encrypt(req, &desc);
  309. }
  310. static int ccm4309_aes_nx_decrypt(struct aead_request *req)
  311. {
  312. struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
  313. struct blkcipher_desc desc;
  314. u8 *iv = nx_ctx->priv.ccm.iv;
  315. iv[0] = 3;
  316. memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3);
  317. memcpy(iv + 4, req->iv, 8);
  318. desc.info = iv;
  319. desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
  320. return ccm_nx_decrypt(req, &desc);
  321. }
  322. static int ccm_aes_nx_decrypt(struct aead_request *req)
  323. {
  324. struct blkcipher_desc desc;
  325. int rc;
  326. desc.info = req->iv;
  327. desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
  328. rc = crypto_ccm_check_iv(desc.info);
  329. if (rc)
  330. return rc;
  331. return ccm_nx_decrypt(req, &desc);
  332. }
  333. /* tell the block cipher walk routines that this is a stream cipher by
  334. * setting cra_blocksize to 1. Even using blkcipher_walk_virt_block
  335. * during encrypt/decrypt doesn't solve this problem, because it calls
  336. * blkcipher_walk_done under the covers, which doesn't use walk->blocksize,
  337. * but instead uses this tfm->blocksize. */
  338. struct crypto_alg nx_ccm_aes_alg = {
  339. .cra_name = "ccm(aes)",
  340. .cra_driver_name = "ccm-aes-nx",
  341. .cra_priority = 300,
  342. .cra_flags = CRYPTO_ALG_TYPE_AEAD |
  343. CRYPTO_ALG_NEED_FALLBACK,
  344. .cra_blocksize = 1,
  345. .cra_ctxsize = sizeof(struct nx_crypto_ctx),
  346. .cra_type = &crypto_aead_type,
  347. .cra_module = THIS_MODULE,
  348. .cra_init = nx_crypto_ctx_aes_ccm_init,
  349. .cra_exit = nx_crypto_ctx_exit,
  350. .cra_aead = {
  351. .ivsize = AES_BLOCK_SIZE,
  352. .maxauthsize = AES_BLOCK_SIZE,
  353. .setkey = ccm_aes_nx_set_key,
  354. .setauthsize = ccm_aes_nx_setauthsize,
  355. .encrypt = ccm_aes_nx_encrypt,
  356. .decrypt = ccm_aes_nx_decrypt,
  357. }
  358. };
  359. struct crypto_alg nx_ccm4309_aes_alg = {
  360. .cra_name = "rfc4309(ccm(aes))",
  361. .cra_driver_name = "rfc4309-ccm-aes-nx",
  362. .cra_priority = 300,
  363. .cra_flags = CRYPTO_ALG_TYPE_AEAD |
  364. CRYPTO_ALG_NEED_FALLBACK,
  365. .cra_blocksize = 1,
  366. .cra_ctxsize = sizeof(struct nx_crypto_ctx),
  367. .cra_type = &crypto_nivaead_type,
  368. .cra_module = THIS_MODULE,
  369. .cra_init = nx_crypto_ctx_aes_ccm_init,
  370. .cra_exit = nx_crypto_ctx_exit,
  371. .cra_aead = {
  372. .ivsize = 8,
  373. .maxauthsize = AES_BLOCK_SIZE,
  374. .setkey = ccm4309_aes_nx_set_key,
  375. .setauthsize = ccm4309_aes_nx_setauthsize,
  376. .encrypt = ccm4309_aes_nx_encrypt,
  377. .decrypt = ccm4309_aes_nx_decrypt,
  378. .geniv = "seqiv",
  379. }
  380. };