gcm.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584
  1. /*
  2. * GCM: Galois/Counter Mode.
  3. *
  4. * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License version 2 as published
  8. * by the Free Software Foundation.
  9. */
  10. #include <crypto/gf128mul.h>
  11. #include <crypto/internal/skcipher.h>
  12. #include <crypto/scatterwalk.h>
  13. #include <linux/completion.h>
  14. #include <linux/err.h>
  15. #include <linux/init.h>
  16. #include <linux/kernel.h>
  17. #include <linux/module.h>
  18. #include <linux/slab.h>
  19. struct gcm_instance_ctx {
  20. struct crypto_skcipher_spawn ctr;
  21. };
  22. struct crypto_gcm_ctx {
  23. struct crypto_ablkcipher *ctr;
  24. struct gf128mul_4k *gf128;
  25. };
  26. struct crypto_gcm_ghash_ctx {
  27. u32 bytes;
  28. u32 flags;
  29. struct gf128mul_4k *gf128;
  30. u8 buffer[16];
  31. };
  32. struct crypto_gcm_req_priv_ctx {
  33. u8 auth_tag[16];
  34. u8 iauth_tag[16];
  35. struct scatterlist src[2];
  36. struct scatterlist dst[2];
  37. struct crypto_gcm_ghash_ctx ghash;
  38. struct ablkcipher_request abreq;
  39. };
  40. struct crypto_gcm_setkey_result {
  41. int err;
  42. struct completion completion;
  43. };
  44. static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
  45. struct aead_request *req)
  46. {
  47. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  48. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  49. }
  50. static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
  51. struct gf128mul_4k *gf128)
  52. {
  53. ctx->bytes = 0;
  54. ctx->flags = flags;
  55. ctx->gf128 = gf128;
  56. memset(ctx->buffer, 0, 16);
  57. }
  58. static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
  59. const u8 *src, unsigned int srclen)
  60. {
  61. u8 *dst = ctx->buffer;
  62. if (ctx->bytes) {
  63. int n = min(srclen, ctx->bytes);
  64. u8 *pos = dst + (16 - ctx->bytes);
  65. ctx->bytes -= n;
  66. srclen -= n;
  67. while (n--)
  68. *pos++ ^= *src++;
  69. if (!ctx->bytes)
  70. gf128mul_4k_lle((be128 *)dst, ctx->gf128);
  71. }
  72. while (srclen >= 16) {
  73. crypto_xor(dst, src, 16);
  74. gf128mul_4k_lle((be128 *)dst, ctx->gf128);
  75. src += 16;
  76. srclen -= 16;
  77. }
  78. if (srclen) {
  79. ctx->bytes = 16 - srclen;
  80. while (srclen--)
  81. *dst++ ^= *src++;
  82. }
  83. }
  84. static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
  85. struct scatterlist *sg, int len)
  86. {
  87. struct scatter_walk walk;
  88. u8 *src;
  89. int n;
  90. if (!len)
  91. return;
  92. scatterwalk_start(&walk, sg);
  93. while (len) {
  94. n = scatterwalk_clamp(&walk, len);
  95. if (!n) {
  96. scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
  97. n = scatterwalk_clamp(&walk, len);
  98. }
  99. src = scatterwalk_map(&walk, 0);
  100. crypto_gcm_ghash_update(ctx, src, n);
  101. len -= n;
  102. scatterwalk_unmap(src, 0);
  103. scatterwalk_advance(&walk, n);
  104. scatterwalk_done(&walk, 0, len);
  105. if (len)
  106. crypto_yield(ctx->flags);
  107. }
  108. }
  109. static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
  110. {
  111. u8 *dst = ctx->buffer;
  112. if (ctx->bytes) {
  113. u8 *tmp = dst + (16 - ctx->bytes);
  114. while (ctx->bytes--)
  115. *tmp++ ^= 0;
  116. gf128mul_4k_lle((be128 *)dst, ctx->gf128);
  117. }
  118. ctx->bytes = 0;
  119. }
  120. static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
  121. unsigned int authlen,
  122. unsigned int cryptlen, u8 *dst)
  123. {
  124. u8 *buf = ctx->buffer;
  125. u128 lengths;
  126. lengths.a = cpu_to_be64(authlen * 8);
  127. lengths.b = cpu_to_be64(cryptlen * 8);
  128. crypto_gcm_ghash_flush(ctx);
  129. crypto_xor(buf, (u8 *)&lengths, 16);
  130. gf128mul_4k_lle((be128 *)buf, ctx->gf128);
  131. crypto_xor(dst, buf, 16);
  132. }
  133. static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
  134. {
  135. struct crypto_gcm_setkey_result *result = req->data;
  136. if (err == -EINPROGRESS)
  137. return;
  138. result->err = err;
  139. complete(&result->completion);
  140. }
  141. static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
  142. unsigned int keylen)
  143. {
  144. struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
  145. struct crypto_ablkcipher *ctr = ctx->ctr;
  146. struct {
  147. be128 hash;
  148. u8 iv[8];
  149. struct crypto_gcm_setkey_result result;
  150. struct scatterlist sg[1];
  151. struct ablkcipher_request req;
  152. } *data;
  153. int err;
  154. crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  155. crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  156. CRYPTO_TFM_REQ_MASK);
  157. err = crypto_ablkcipher_setkey(ctr, key, keylen);
  158. if (err)
  159. return err;
  160. crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
  161. CRYPTO_TFM_RES_MASK);
  162. data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr),
  163. GFP_KERNEL);
  164. if (!data)
  165. return -ENOMEM;
  166. init_completion(&data->result.completion);
  167. sg_init_one(data->sg, &data->hash, sizeof(data->hash));
  168. ablkcipher_request_set_tfm(&data->req, ctr);
  169. ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
  170. CRYPTO_TFM_REQ_MAY_BACKLOG,
  171. crypto_gcm_setkey_done,
  172. &data->result);
  173. ablkcipher_request_set_crypt(&data->req, data->sg, data->sg,
  174. sizeof(data->hash), data->iv);
  175. err = crypto_ablkcipher_encrypt(&data->req);
  176. if (err == -EINPROGRESS || err == -EBUSY) {
  177. err = wait_for_completion_interruptible(
  178. &data->result.completion);
  179. if (!err)
  180. err = data->result.err;
  181. }
  182. if (err)
  183. goto out;
  184. if (ctx->gf128 != NULL)
  185. gf128mul_free_4k(ctx->gf128);
  186. ctx->gf128 = gf128mul_init_4k_lle(&data->hash);
  187. if (ctx->gf128 == NULL)
  188. err = -ENOMEM;
  189. out:
  190. kfree(data);
  191. return err;
  192. }
  193. static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
  194. struct aead_request *req,
  195. unsigned int cryptlen)
  196. {
  197. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  198. struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
  199. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  200. u32 flags = req->base.tfm->crt_flags;
  201. struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
  202. struct scatterlist *dst;
  203. __be32 counter = cpu_to_be32(1);
  204. memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
  205. memcpy(req->iv + 12, &counter, 4);
  206. sg_init_table(pctx->src, 2);
  207. sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
  208. scatterwalk_sg_chain(pctx->src, 2, req->src);
  209. dst = pctx->src;
  210. if (req->src != req->dst) {
  211. sg_init_table(pctx->dst, 2);
  212. sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
  213. scatterwalk_sg_chain(pctx->dst, 2, req->dst);
  214. dst = pctx->dst;
  215. }
  216. ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
  217. ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
  218. cryptlen + sizeof(pctx->auth_tag),
  219. req->iv);
  220. crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
  221. crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
  222. crypto_gcm_ghash_flush(ghash);
  223. }
  224. static int crypto_gcm_hash(struct aead_request *req)
  225. {
  226. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  227. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  228. u8 *auth_tag = pctx->auth_tag;
  229. struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
  230. crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen);
  231. crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen,
  232. auth_tag);
  233. scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
  234. crypto_aead_authsize(aead), 1);
  235. return 0;
  236. }
  237. static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err)
  238. {
  239. struct aead_request *req = areq->data;
  240. if (!err)
  241. err = crypto_gcm_hash(req);
  242. aead_request_complete(req, err);
  243. }
  244. static int crypto_gcm_encrypt(struct aead_request *req)
  245. {
  246. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  247. struct ablkcipher_request *abreq = &pctx->abreq;
  248. int err;
  249. crypto_gcm_init_crypt(abreq, req, req->cryptlen);
  250. ablkcipher_request_set_callback(abreq, aead_request_flags(req),
  251. crypto_gcm_encrypt_done, req);
  252. err = crypto_ablkcipher_encrypt(abreq);
  253. if (err)
  254. return err;
  255. return crypto_gcm_hash(req);
  256. }
  257. static int crypto_gcm_verify(struct aead_request *req)
  258. {
  259. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  260. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  261. struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
  262. u8 *auth_tag = pctx->auth_tag;
  263. u8 *iauth_tag = pctx->iauth_tag;
  264. unsigned int authsize = crypto_aead_authsize(aead);
  265. unsigned int cryptlen = req->cryptlen - authsize;
  266. crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
  267. authsize = crypto_aead_authsize(aead);
  268. scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
  269. return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
  270. }
  271. static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
  272. {
  273. struct aead_request *req = areq->data;
  274. if (!err)
  275. err = crypto_gcm_verify(req);
  276. aead_request_complete(req, err);
  277. }
  278. static int crypto_gcm_decrypt(struct aead_request *req)
  279. {
  280. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  281. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  282. struct ablkcipher_request *abreq = &pctx->abreq;
  283. struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
  284. unsigned int cryptlen = req->cryptlen;
  285. unsigned int authsize = crypto_aead_authsize(aead);
  286. int err;
  287. if (cryptlen < authsize)
  288. return -EINVAL;
  289. cryptlen -= authsize;
  290. crypto_gcm_init_crypt(abreq, req, cryptlen);
  291. ablkcipher_request_set_callback(abreq, aead_request_flags(req),
  292. crypto_gcm_decrypt_done, req);
  293. crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
  294. err = crypto_ablkcipher_decrypt(abreq);
  295. if (err)
  296. return err;
  297. return crypto_gcm_verify(req);
  298. }
  299. static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
  300. {
  301. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  302. struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
  303. struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
  304. struct crypto_ablkcipher *ctr;
  305. unsigned long align;
  306. int err;
  307. ctr = crypto_spawn_skcipher(&ictx->ctr);
  308. err = PTR_ERR(ctr);
  309. if (IS_ERR(ctr))
  310. return err;
  311. ctx->ctr = ctr;
  312. ctx->gf128 = NULL;
  313. align = crypto_tfm_alg_alignmask(tfm);
  314. align &= ~(crypto_tfm_ctx_alignment() - 1);
  315. tfm->crt_aead.reqsize = align +
  316. sizeof(struct crypto_gcm_req_priv_ctx) +
  317. crypto_ablkcipher_reqsize(ctr);
  318. return 0;
  319. }
  320. static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
  321. {
  322. struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
  323. if (ctx->gf128 != NULL)
  324. gf128mul_free_4k(ctx->gf128);
  325. crypto_free_ablkcipher(ctx->ctr);
  326. }
  327. static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
  328. const char *full_name,
  329. const char *ctr_name)
  330. {
  331. struct crypto_attr_type *algt;
  332. struct crypto_instance *inst;
  333. struct crypto_alg *ctr;
  334. struct gcm_instance_ctx *ctx;
  335. int err;
  336. algt = crypto_get_attr_type(tb);
  337. err = PTR_ERR(algt);
  338. if (IS_ERR(algt))
  339. return ERR_PTR(err);
  340. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  341. return ERR_PTR(-EINVAL);
  342. inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
  343. if (!inst)
  344. return ERR_PTR(-ENOMEM);
  345. ctx = crypto_instance_ctx(inst);
  346. crypto_set_skcipher_spawn(&ctx->ctr, inst);
  347. err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0,
  348. crypto_requires_sync(algt->type,
  349. algt->mask));
  350. if (err)
  351. goto err_free_inst;
  352. ctr = crypto_skcipher_spawn_alg(&ctx->ctr);
  353. /* We only support 16-byte blocks. */
  354. if (ctr->cra_ablkcipher.ivsize != 16)
  355. goto out_put_ctr;
  356. /* Not a stream cipher? */
  357. err = -EINVAL;
  358. if (ctr->cra_blocksize != 1)
  359. goto out_put_ctr;
  360. err = -ENAMETOOLONG;
  361. if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  362. "gcm_base(%s)", ctr->cra_driver_name) >=
  363. CRYPTO_MAX_ALG_NAME)
  364. goto out_put_ctr;
  365. memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
  366. inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
  367. inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC;
  368. inst->alg.cra_priority = ctr->cra_priority;
  369. inst->alg.cra_blocksize = 1;
  370. inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
  371. inst->alg.cra_type = &crypto_aead_type;
  372. inst->alg.cra_aead.ivsize = 16;
  373. inst->alg.cra_aead.maxauthsize = 16;
  374. inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
  375. inst->alg.cra_init = crypto_gcm_init_tfm;
  376. inst->alg.cra_exit = crypto_gcm_exit_tfm;
  377. inst->alg.cra_aead.setkey = crypto_gcm_setkey;
  378. inst->alg.cra_aead.encrypt = crypto_gcm_encrypt;
  379. inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
  380. out:
  381. return inst;
  382. out_put_ctr:
  383. crypto_drop_skcipher(&ctx->ctr);
  384. err_free_inst:
  385. kfree(inst);
  386. inst = ERR_PTR(err);
  387. goto out;
  388. }
  389. static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
  390. {
  391. int err;
  392. const char *cipher_name;
  393. char ctr_name[CRYPTO_MAX_ALG_NAME];
  394. char full_name[CRYPTO_MAX_ALG_NAME];
  395. cipher_name = crypto_attr_alg_name(tb[1]);
  396. err = PTR_ERR(cipher_name);
  397. if (IS_ERR(cipher_name))
  398. return ERR_PTR(err);
  399. if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
  400. CRYPTO_MAX_ALG_NAME)
  401. return ERR_PTR(-ENAMETOOLONG);
  402. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm(%s)", cipher_name) >=
  403. CRYPTO_MAX_ALG_NAME)
  404. return ERR_PTR(-ENAMETOOLONG);
  405. return crypto_gcm_alloc_common(tb, full_name, ctr_name);
  406. }
  407. static void crypto_gcm_free(struct crypto_instance *inst)
  408. {
  409. struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
  410. crypto_drop_skcipher(&ctx->ctr);
  411. kfree(inst);
  412. }
  413. static struct crypto_template crypto_gcm_tmpl = {
  414. .name = "gcm",
  415. .alloc = crypto_gcm_alloc,
  416. .free = crypto_gcm_free,
  417. .module = THIS_MODULE,
  418. };
  419. static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
  420. {
  421. int err;
  422. const char *ctr_name;
  423. char full_name[CRYPTO_MAX_ALG_NAME];
  424. ctr_name = crypto_attr_alg_name(tb[1]);
  425. err = PTR_ERR(ctr_name);
  426. if (IS_ERR(ctr_name))
  427. return ERR_PTR(err);
  428. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s)",
  429. ctr_name) >= CRYPTO_MAX_ALG_NAME)
  430. return ERR_PTR(-ENAMETOOLONG);
  431. return crypto_gcm_alloc_common(tb, full_name, ctr_name);
  432. }
  433. static struct crypto_template crypto_gcm_base_tmpl = {
  434. .name = "gcm_base",
  435. .alloc = crypto_gcm_base_alloc,
  436. .free = crypto_gcm_free,
  437. .module = THIS_MODULE,
  438. };
  439. static int __init crypto_gcm_module_init(void)
  440. {
  441. int err;
  442. err = crypto_register_template(&crypto_gcm_base_tmpl);
  443. if (err)
  444. goto out;
  445. err = crypto_register_template(&crypto_gcm_tmpl);
  446. if (err)
  447. goto out_undo_base;
  448. out:
  449. return err;
  450. out_undo_base:
  451. crypto_unregister_template(&crypto_gcm_base_tmpl);
  452. goto out;
  453. }
  454. static void __exit crypto_gcm_module_exit(void)
  455. {
  456. crypto_unregister_template(&crypto_gcm_tmpl);
  457. crypto_unregister_template(&crypto_gcm_base_tmpl);
  458. }
  459. module_init(crypto_gcm_module_init);
  460. module_exit(crypto_gcm_module_exit);
  461. MODULE_LICENSE("GPL");
  462. MODULE_DESCRIPTION("Galois/Counter Mode");
  463. MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
  464. MODULE_ALIAS("gcm_base");