gcm.c 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522
  1. /*
  2. * GCM: Galois/Counter Mode.
  3. *
  4. * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License version 2 as published
  8. * by the Free Software Foundation.
  9. */
  10. #include <crypto/algapi.h>
  11. #include <crypto/gf128mul.h>
  12. #include <crypto/scatterwalk.h>
  13. #include <linux/completion.h>
  14. #include <linux/err.h>
  15. #include <linux/init.h>
  16. #include <linux/kernel.h>
  17. #include <linux/module.h>
  18. #include <linux/slab.h>
  19. #include "internal.h"
  20. struct gcm_instance_ctx {
  21. struct crypto_spawn ctr;
  22. };
  23. struct crypto_gcm_ctx {
  24. struct crypto_ablkcipher *ctr;
  25. struct gf128mul_4k *gf128;
  26. };
  27. struct crypto_gcm_ghash_ctx {
  28. u32 bytes;
  29. u32 flags;
  30. struct gf128mul_4k *gf128;
  31. u8 buffer[16];
  32. };
  33. struct crypto_gcm_req_priv_ctx {
  34. u8 auth_tag[16];
  35. u8 iauth_tag[16];
  36. struct scatterlist src[2];
  37. struct scatterlist dst[2];
  38. struct crypto_gcm_ghash_ctx ghash;
  39. struct ablkcipher_request abreq;
  40. };
  41. struct crypto_gcm_setkey_result {
  42. int err;
  43. struct completion completion;
  44. };
  45. static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
  46. struct aead_request *req)
  47. {
  48. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  49. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  50. }
  51. static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
  52. struct gf128mul_4k *gf128)
  53. {
  54. ctx->bytes = 0;
  55. ctx->flags = flags;
  56. ctx->gf128 = gf128;
  57. memset(ctx->buffer, 0, 16);
  58. }
  59. static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
  60. const u8 *src, unsigned int srclen)
  61. {
  62. u8 *dst = ctx->buffer;
  63. if (ctx->bytes) {
  64. int n = min(srclen, ctx->bytes);
  65. u8 *pos = dst + (16 - ctx->bytes);
  66. ctx->bytes -= n;
  67. srclen -= n;
  68. while (n--)
  69. *pos++ ^= *src++;
  70. if (!ctx->bytes)
  71. gf128mul_4k_lle((be128 *)dst, ctx->gf128);
  72. }
  73. while (srclen >= 16) {
  74. crypto_xor(dst, src, 16);
  75. gf128mul_4k_lle((be128 *)dst, ctx->gf128);
  76. src += 16;
  77. srclen -= 16;
  78. }
  79. if (srclen) {
  80. ctx->bytes = 16 - srclen;
  81. while (srclen--)
  82. *dst++ ^= *src++;
  83. }
  84. }
  85. static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
  86. struct scatterlist *sg, int len)
  87. {
  88. struct scatter_walk walk;
  89. u8 *src;
  90. int n;
  91. if (!len)
  92. return;
  93. scatterwalk_start(&walk, sg);
  94. while (len) {
  95. n = scatterwalk_clamp(&walk, len);
  96. if (!n) {
  97. scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
  98. n = scatterwalk_clamp(&walk, len);
  99. }
  100. src = scatterwalk_map(&walk, 0);
  101. crypto_gcm_ghash_update(ctx, src, n);
  102. len -= n;
  103. scatterwalk_unmap(src, 0);
  104. scatterwalk_advance(&walk, n);
  105. scatterwalk_done(&walk, 0, len);
  106. if (len)
  107. crypto_yield(ctx->flags);
  108. }
  109. }
  110. static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
  111. {
  112. u8 *dst = ctx->buffer;
  113. if (ctx->bytes) {
  114. u8 *tmp = dst + (16 - ctx->bytes);
  115. while (ctx->bytes--)
  116. *tmp++ ^= 0;
  117. gf128mul_4k_lle((be128 *)dst, ctx->gf128);
  118. }
  119. ctx->bytes = 0;
  120. }
  121. static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
  122. unsigned int authlen,
  123. unsigned int cryptlen, u8 *dst)
  124. {
  125. u8 *buf = ctx->buffer;
  126. u128 lengths;
  127. lengths.a = cpu_to_be64(authlen * 8);
  128. lengths.b = cpu_to_be64(cryptlen * 8);
  129. crypto_gcm_ghash_flush(ctx);
  130. crypto_xor(buf, (u8 *)&lengths, 16);
  131. gf128mul_4k_lle((be128 *)buf, ctx->gf128);
  132. crypto_xor(dst, buf, 16);
  133. }
  134. static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
  135. {
  136. struct crypto_gcm_setkey_result *result = req->data;
  137. if (err == -EINPROGRESS)
  138. return;
  139. result->err = err;
  140. complete(&result->completion);
  141. }
  142. static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
  143. unsigned int keylen)
  144. {
  145. struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
  146. struct crypto_ablkcipher *ctr = ctx->ctr;
  147. struct {
  148. be128 hash;
  149. u8 iv[8];
  150. struct crypto_gcm_setkey_result result;
  151. struct scatterlist sg[1];
  152. struct ablkcipher_request req;
  153. } *data;
  154. int err;
  155. crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  156. crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  157. CRYPTO_TFM_REQ_MASK);
  158. err = crypto_ablkcipher_setkey(ctr, key, keylen);
  159. if (err)
  160. return err;
  161. crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
  162. CRYPTO_TFM_RES_MASK);
  163. data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr),
  164. GFP_KERNEL);
  165. if (!data)
  166. return -ENOMEM;
  167. init_completion(&data->result.completion);
  168. sg_init_one(data->sg, &data->hash, sizeof(data->hash));
  169. ablkcipher_request_set_tfm(&data->req, ctr);
  170. ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
  171. CRYPTO_TFM_REQ_MAY_BACKLOG,
  172. crypto_gcm_setkey_done,
  173. &data->result);
  174. ablkcipher_request_set_crypt(&data->req, data->sg, data->sg,
  175. sizeof(data->hash), data->iv);
  176. err = crypto_ablkcipher_encrypt(&data->req);
  177. if (err == -EINPROGRESS || err == -EBUSY) {
  178. err = wait_for_completion_interruptible(
  179. &data->result.completion);
  180. if (!err)
  181. err = data->result.err;
  182. }
  183. if (err)
  184. goto out;
  185. if (ctx->gf128 != NULL)
  186. gf128mul_free_4k(ctx->gf128);
  187. ctx->gf128 = gf128mul_init_4k_lle(&data->hash);
  188. if (ctx->gf128 == NULL)
  189. err = -ENOMEM;
  190. out:
  191. kfree(data);
  192. return err;
  193. }
  194. static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
  195. struct aead_request *req,
  196. unsigned int cryptlen)
  197. {
  198. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  199. struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
  200. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  201. u32 flags = req->base.tfm->crt_flags;
  202. struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
  203. struct scatterlist *dst;
  204. __be32 counter = cpu_to_be32(1);
  205. memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
  206. memcpy(req->iv + 12, &counter, 4);
  207. sg_init_table(pctx->src, 2);
  208. sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
  209. scatterwalk_sg_chain(pctx->src, 2, req->src);
  210. dst = pctx->src;
  211. if (req->src != req->dst) {
  212. sg_init_table(pctx->dst, 2);
  213. sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
  214. scatterwalk_sg_chain(pctx->dst, 2, req->dst);
  215. dst = pctx->dst;
  216. }
  217. ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
  218. ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
  219. cryptlen + sizeof(pctx->auth_tag),
  220. req->iv);
  221. crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
  222. crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
  223. crypto_gcm_ghash_flush(ghash);
  224. }
  225. static int crypto_gcm_hash(struct aead_request *req)
  226. {
  227. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  228. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  229. u8 *auth_tag = pctx->auth_tag;
  230. struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
  231. crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen);
  232. crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen,
  233. auth_tag);
  234. scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
  235. crypto_aead_authsize(aead), 1);
  236. return 0;
  237. }
  238. static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err)
  239. {
  240. struct aead_request *req = areq->data;
  241. if (!err)
  242. err = crypto_gcm_hash(req);
  243. aead_request_complete(req, err);
  244. }
  245. static int crypto_gcm_encrypt(struct aead_request *req)
  246. {
  247. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  248. struct ablkcipher_request *abreq = &pctx->abreq;
  249. int err;
  250. crypto_gcm_init_crypt(abreq, req, req->cryptlen);
  251. ablkcipher_request_set_callback(abreq, aead_request_flags(req),
  252. crypto_gcm_encrypt_done, req);
  253. err = crypto_ablkcipher_encrypt(abreq);
  254. if (err)
  255. return err;
  256. return crypto_gcm_hash(req);
  257. }
  258. static int crypto_gcm_verify(struct aead_request *req)
  259. {
  260. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  261. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  262. struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
  263. u8 *auth_tag = pctx->auth_tag;
  264. u8 *iauth_tag = pctx->iauth_tag;
  265. unsigned int authsize = crypto_aead_authsize(aead);
  266. unsigned int cryptlen = req->cryptlen - authsize;
  267. crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
  268. authsize = crypto_aead_authsize(aead);
  269. scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
  270. return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
  271. }
  272. static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
  273. {
  274. struct aead_request *req = areq->data;
  275. if (!err)
  276. err = crypto_gcm_verify(req);
  277. aead_request_complete(req, err);
  278. }
  279. static int crypto_gcm_decrypt(struct aead_request *req)
  280. {
  281. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  282. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  283. struct ablkcipher_request *abreq = &pctx->abreq;
  284. struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
  285. unsigned int cryptlen = req->cryptlen;
  286. unsigned int authsize = crypto_aead_authsize(aead);
  287. int err;
  288. if (cryptlen < authsize)
  289. return -EINVAL;
  290. cryptlen -= authsize;
  291. crypto_gcm_init_crypt(abreq, req, cryptlen);
  292. ablkcipher_request_set_callback(abreq, aead_request_flags(req),
  293. crypto_gcm_decrypt_done, req);
  294. crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
  295. err = crypto_ablkcipher_decrypt(abreq);
  296. if (err)
  297. return err;
  298. return crypto_gcm_verify(req);
  299. }
  300. static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
  301. {
  302. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  303. struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
  304. struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
  305. struct crypto_ablkcipher *ctr;
  306. unsigned long align;
  307. int err;
  308. ctr = crypto_spawn_ablkcipher(&ictx->ctr);
  309. err = PTR_ERR(ctr);
  310. if (IS_ERR(ctr))
  311. return err;
  312. ctx->ctr = ctr;
  313. ctx->gf128 = NULL;
  314. align = crypto_tfm_alg_alignmask(tfm);
  315. align &= ~(crypto_tfm_ctx_alignment() - 1);
  316. tfm->crt_aead.reqsize = align +
  317. sizeof(struct crypto_gcm_req_priv_ctx) +
  318. crypto_ablkcipher_reqsize(ctr);
  319. return 0;
  320. }
  321. static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
  322. {
  323. struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
  324. if (ctx->gf128 != NULL)
  325. gf128mul_free_4k(ctx->gf128);
  326. crypto_free_ablkcipher(ctx->ctr);
  327. }
  328. static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
  329. {
  330. struct crypto_instance *inst;
  331. struct crypto_alg *ctr;
  332. struct crypto_alg *cipher;
  333. struct gcm_instance_ctx *ctx;
  334. int err;
  335. char ctr_name[CRYPTO_MAX_ALG_NAME];
  336. err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD);
  337. if (err)
  338. return ERR_PTR(err);
  339. cipher = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
  340. CRYPTO_ALG_TYPE_MASK);
  341. inst = ERR_PTR(PTR_ERR(cipher));
  342. if (IS_ERR(cipher))
  343. return inst;
  344. inst = ERR_PTR(ENAMETOOLONG);
  345. if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
  346. cipher->cra_name) >= CRYPTO_MAX_ALG_NAME)
  347. return inst;
  348. ctr = crypto_alg_mod_lookup(ctr_name, CRYPTO_ALG_TYPE_BLKCIPHER,
  349. CRYPTO_ALG_TYPE_MASK);
  350. if (IS_ERR(ctr))
  351. return ERR_PTR(PTR_ERR(ctr));
  352. if (cipher->cra_blocksize != 16)
  353. goto out_put_ctr;
  354. inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
  355. err = -ENOMEM;
  356. if (!inst)
  357. goto out_put_ctr;
  358. err = -ENAMETOOLONG;
  359. if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
  360. "gcm(%s)", cipher->cra_name) >= CRYPTO_MAX_ALG_NAME ||
  361. snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  362. "gcm(%s)", cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  363. goto err_free_inst;
  364. ctx = crypto_instance_ctx(inst);
  365. err = crypto_init_spawn(&ctx->ctr, ctr, inst, CRYPTO_ALG_TYPE_MASK);
  366. if (err)
  367. goto err_free_inst;
  368. inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
  369. inst->alg.cra_priority = ctr->cra_priority;
  370. inst->alg.cra_blocksize = 16;
  371. inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
  372. inst->alg.cra_type = &crypto_aead_type;
  373. inst->alg.cra_aead.ivsize = 16;
  374. inst->alg.cra_aead.maxauthsize = 16;
  375. inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
  376. inst->alg.cra_init = crypto_gcm_init_tfm;
  377. inst->alg.cra_exit = crypto_gcm_exit_tfm;
  378. inst->alg.cra_aead.setkey = crypto_gcm_setkey;
  379. inst->alg.cra_aead.encrypt = crypto_gcm_encrypt;
  380. inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
  381. out:
  382. crypto_mod_put(ctr);
  383. return inst;
  384. err_free_inst:
  385. kfree(inst);
  386. out_put_ctr:
  387. inst = ERR_PTR(err);
  388. goto out;
  389. }
  390. static void crypto_gcm_free(struct crypto_instance *inst)
  391. {
  392. struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
  393. crypto_drop_spawn(&ctx->ctr);
  394. kfree(inst);
  395. }
  396. static struct crypto_template crypto_gcm_tmpl = {
  397. .name = "gcm",
  398. .alloc = crypto_gcm_alloc,
  399. .free = crypto_gcm_free,
  400. .module = THIS_MODULE,
  401. };
  402. static int __init crypto_gcm_module_init(void)
  403. {
  404. return crypto_register_template(&crypto_gcm_tmpl);
  405. }
  406. static void __exit crypto_gcm_module_exit(void)
  407. {
  408. crypto_unregister_template(&crypto_gcm_tmpl);
  409. }
  410. module_init(crypto_gcm_module_init);
  411. module_exit(crypto_gcm_module_exit);
  412. MODULE_LICENSE("GPL");
  413. MODULE_DESCRIPTION("Galois/Counter Mode");
  414. MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");