gcm.c 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488
  1. /*
  2. * GCM: Galois/Counter Mode.
  3. *
  4. * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License version 2 as published
  8. * by the Free Software Foundation.
  9. */
  10. #include <crypto/algapi.h>
  11. #include <crypto/gf128mul.h>
  12. #include <crypto/scatterwalk.h>
  13. #include <linux/err.h>
  14. #include <linux/init.h>
  15. #include <linux/kernel.h>
  16. #include <linux/module.h>
  17. #include <linux/slab.h>
  18. #include "internal.h"
  19. struct gcm_instance_ctx {
  20. struct crypto_spawn ctr;
  21. };
  22. struct crypto_gcm_ctx {
  23. struct crypto_ablkcipher *ctr;
  24. struct gf128mul_4k *gf128;
  25. };
  26. struct crypto_gcm_ghash_ctx {
  27. u32 bytes;
  28. u32 flags;
  29. struct gf128mul_4k *gf128;
  30. u8 buffer[16];
  31. };
  32. struct crypto_gcm_req_priv_ctx {
  33. u8 auth_tag[16];
  34. u8 iauth_tag[16];
  35. u8 counter[16];
  36. struct crypto_gcm_ghash_ctx ghash;
  37. struct ablkcipher_request abreq;
  38. };
  39. static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
  40. struct aead_request *req)
  41. {
  42. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  43. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  44. }
  45. static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
  46. struct gf128mul_4k *gf128)
  47. {
  48. ctx->bytes = 0;
  49. ctx->flags = flags;
  50. ctx->gf128 = gf128;
  51. memset(ctx->buffer, 0, 16);
  52. }
  53. static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
  54. const u8 *src, unsigned int srclen)
  55. {
  56. u8 *dst = ctx->buffer;
  57. if (ctx->bytes) {
  58. int n = min(srclen, ctx->bytes);
  59. u8 *pos = dst + (16 - ctx->bytes);
  60. ctx->bytes -= n;
  61. srclen -= n;
  62. while (n--)
  63. *pos++ ^= *src++;
  64. if (!ctx->bytes)
  65. gf128mul_4k_lle((be128 *)dst, ctx->gf128);
  66. }
  67. while (srclen >= 16) {
  68. crypto_xor(dst, src, 16);
  69. gf128mul_4k_lle((be128 *)dst, ctx->gf128);
  70. src += 16;
  71. srclen -= 16;
  72. }
  73. if (srclen) {
  74. ctx->bytes = 16 - srclen;
  75. while (srclen--)
  76. *dst++ ^= *src++;
  77. }
  78. }
  79. static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
  80. struct scatterlist *sg, int len)
  81. {
  82. struct scatter_walk walk;
  83. u8 *src;
  84. int n;
  85. if (!len)
  86. return;
  87. scatterwalk_start(&walk, sg);
  88. while (len) {
  89. n = scatterwalk_clamp(&walk, len);
  90. if (!n) {
  91. scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
  92. n = scatterwalk_clamp(&walk, len);
  93. }
  94. src = scatterwalk_map(&walk, 0);
  95. crypto_gcm_ghash_update(ctx, src, n);
  96. len -= n;
  97. scatterwalk_unmap(src, 0);
  98. scatterwalk_advance(&walk, n);
  99. scatterwalk_done(&walk, 0, len);
  100. if (len)
  101. crypto_yield(ctx->flags);
  102. }
  103. }
  104. static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
  105. {
  106. u8 *dst = ctx->buffer;
  107. if (ctx->bytes) {
  108. u8 *tmp = dst + (16 - ctx->bytes);
  109. while (ctx->bytes--)
  110. *tmp++ ^= 0;
  111. gf128mul_4k_lle((be128 *)dst, ctx->gf128);
  112. }
  113. ctx->bytes = 0;
  114. }
  115. static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
  116. unsigned int authlen,
  117. unsigned int cryptlen, u8 *dst)
  118. {
  119. u8 *buf = ctx->buffer;
  120. u128 lengths;
  121. lengths.a = cpu_to_be64(authlen * 8);
  122. lengths.b = cpu_to_be64(cryptlen * 8);
  123. crypto_gcm_ghash_flush(ctx);
  124. crypto_xor(buf, (u8 *)&lengths, 16);
  125. gf128mul_4k_lle((be128 *)buf, ctx->gf128);
  126. crypto_xor(dst, buf, 16);
  127. }
  128. static inline void crypto_gcm_set_counter(u8 *counterblock, u32 value)
  129. {
  130. *((u32 *)&counterblock[12]) = cpu_to_be32(value + 1);
  131. }
  132. static int crypto_gcm_encrypt_counter(struct crypto_aead *aead, u8 *block,
  133. u32 value, const u8 *iv)
  134. {
  135. struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
  136. struct crypto_ablkcipher *ctr = ctx->ctr;
  137. struct ablkcipher_request req;
  138. struct scatterlist sg;
  139. u8 counterblock[16];
  140. if (iv == NULL)
  141. memset(counterblock, 0, 12);
  142. else
  143. memcpy(counterblock, iv, 12);
  144. crypto_gcm_set_counter(counterblock, value);
  145. sg_init_one(&sg, block, 16);
  146. ablkcipher_request_set_tfm(&req, ctr);
  147. ablkcipher_request_set_crypt(&req, &sg, &sg, 16, counterblock);
  148. ablkcipher_request_set_callback(&req, 0, NULL, NULL);
  149. memset(block, 0, 16);
  150. return crypto_ablkcipher_encrypt(&req);
  151. }
  152. static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
  153. unsigned int keylen)
  154. {
  155. struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
  156. struct crypto_ablkcipher *ctr = ctx->ctr;
  157. int alignmask = crypto_ablkcipher_alignmask(ctr);
  158. u8 alignbuf[16+alignmask];
  159. u8 *hash = (u8 *)ALIGN((unsigned long)alignbuf, alignmask+1);
  160. int err = 0;
  161. crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  162. crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  163. CRYPTO_TFM_REQ_MASK);
  164. err = crypto_ablkcipher_setkey(ctr, key, keylen);
  165. if (err)
  166. goto out;
  167. crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
  168. CRYPTO_TFM_RES_MASK);
  169. err = crypto_gcm_encrypt_counter(aead, hash, -1, NULL);
  170. if (err)
  171. goto out;
  172. if (ctx->gf128 != NULL)
  173. gf128mul_free_4k(ctx->gf128);
  174. ctx->gf128 = gf128mul_init_4k_lle((be128 *)hash);
  175. if (ctx->gf128 == NULL)
  176. err = -ENOMEM;
  177. out:
  178. return err;
  179. }
  180. static int crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
  181. struct aead_request *req,
  182. unsigned int cryptlen,
  183. void (*done)(struct crypto_async_request *,
  184. int))
  185. {
  186. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  187. struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
  188. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  189. u32 flags = req->base.tfm->crt_flags;
  190. u8 *auth_tag = pctx->auth_tag;
  191. u8 *counter = pctx->counter;
  192. struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
  193. int err = 0;
  194. ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
  195. ablkcipher_request_set_callback(ablk_req, aead_request_flags(req),
  196. done, req);
  197. ablkcipher_request_set_crypt(ablk_req, req->src, req->dst,
  198. cryptlen, counter);
  199. err = crypto_gcm_encrypt_counter(aead, auth_tag, 0, req->iv);
  200. if (err)
  201. goto out;
  202. memcpy(counter, req->iv, 12);
  203. crypto_gcm_set_counter(counter, 1);
  204. crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
  205. crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
  206. crypto_gcm_ghash_flush(ghash);
  207. out:
  208. return err;
  209. }
  210. static int crypto_gcm_hash(struct aead_request *req)
  211. {
  212. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  213. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  214. u8 *auth_tag = pctx->auth_tag;
  215. struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
  216. crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen);
  217. crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen,
  218. auth_tag);
  219. scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
  220. crypto_aead_authsize(aead), 1);
  221. return 0;
  222. }
  223. static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err)
  224. {
  225. struct aead_request *req = areq->data;
  226. if (!err)
  227. err = crypto_gcm_hash(req);
  228. aead_request_complete(req, err);
  229. }
  230. static int crypto_gcm_encrypt(struct aead_request *req)
  231. {
  232. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  233. struct ablkcipher_request *abreq = &pctx->abreq;
  234. int err = 0;
  235. err = crypto_gcm_init_crypt(abreq, req, req->cryptlen,
  236. crypto_gcm_encrypt_done);
  237. if (err)
  238. return err;
  239. if (req->cryptlen) {
  240. err = crypto_ablkcipher_encrypt(abreq);
  241. if (err)
  242. return err;
  243. }
  244. return crypto_gcm_hash(req);
  245. }
  246. static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
  247. {
  248. aead_request_complete(areq->data, err);
  249. }
  250. static int crypto_gcm_decrypt(struct aead_request *req)
  251. {
  252. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  253. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  254. struct ablkcipher_request *abreq = &pctx->abreq;
  255. u8 *auth_tag = pctx->auth_tag;
  256. u8 *iauth_tag = pctx->iauth_tag;
  257. struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
  258. unsigned int cryptlen = req->cryptlen;
  259. unsigned int authsize = crypto_aead_authsize(aead);
  260. int err;
  261. if (cryptlen < authsize)
  262. return -EINVAL;
  263. cryptlen -= authsize;
  264. err = crypto_gcm_init_crypt(abreq, req, cryptlen,
  265. crypto_gcm_decrypt_done);
  266. if (err)
  267. return err;
  268. crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
  269. crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
  270. scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
  271. if (memcmp(iauth_tag, auth_tag, authsize))
  272. return -EBADMSG;
  273. return crypto_ablkcipher_decrypt(abreq);
  274. }
  275. static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
  276. {
  277. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  278. struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
  279. struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
  280. struct crypto_ablkcipher *ctr;
  281. unsigned long align;
  282. int err;
  283. ctr = crypto_spawn_ablkcipher(&ictx->ctr);
  284. err = PTR_ERR(ctr);
  285. if (IS_ERR(ctr))
  286. return err;
  287. ctx->ctr = ctr;
  288. ctx->gf128 = NULL;
  289. align = crypto_tfm_alg_alignmask(tfm);
  290. align &= ~(crypto_tfm_ctx_alignment() - 1);
  291. tfm->crt_aead.reqsize = align +
  292. sizeof(struct crypto_gcm_req_priv_ctx) +
  293. crypto_ablkcipher_reqsize(ctr);
  294. return 0;
  295. }
  296. static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
  297. {
  298. struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
  299. if (ctx->gf128 != NULL)
  300. gf128mul_free_4k(ctx->gf128);
  301. crypto_free_ablkcipher(ctx->ctr);
  302. }
  303. static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
  304. {
  305. struct crypto_instance *inst;
  306. struct crypto_alg *ctr;
  307. struct crypto_alg *cipher;
  308. struct gcm_instance_ctx *ctx;
  309. int err;
  310. char ctr_name[CRYPTO_MAX_ALG_NAME];
  311. err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD);
  312. if (err)
  313. return ERR_PTR(err);
  314. cipher = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
  315. CRYPTO_ALG_TYPE_MASK);
  316. inst = ERR_PTR(PTR_ERR(cipher));
  317. if (IS_ERR(cipher))
  318. return inst;
  319. inst = ERR_PTR(ENAMETOOLONG);
  320. if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
  321. cipher->cra_name) >= CRYPTO_MAX_ALG_NAME)
  322. return inst;
  323. ctr = crypto_alg_mod_lookup(ctr_name, CRYPTO_ALG_TYPE_BLKCIPHER,
  324. CRYPTO_ALG_TYPE_MASK);
  325. if (IS_ERR(ctr))
  326. return ERR_PTR(PTR_ERR(ctr));
  327. if (cipher->cra_blocksize != 16)
  328. goto out_put_ctr;
  329. inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
  330. err = -ENOMEM;
  331. if (!inst)
  332. goto out_put_ctr;
  333. err = -ENAMETOOLONG;
  334. if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
  335. "gcm(%s)", cipher->cra_name) >= CRYPTO_MAX_ALG_NAME ||
  336. snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  337. "gcm(%s)", cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  338. goto err_free_inst;
  339. ctx = crypto_instance_ctx(inst);
  340. err = crypto_init_spawn(&ctx->ctr, ctr, inst, CRYPTO_ALG_TYPE_MASK);
  341. if (err)
  342. goto err_free_inst;
  343. inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
  344. inst->alg.cra_priority = ctr->cra_priority;
  345. inst->alg.cra_blocksize = 16;
  346. inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
  347. inst->alg.cra_type = &crypto_aead_type;
  348. inst->alg.cra_aead.ivsize = 12;
  349. inst->alg.cra_aead.maxauthsize = 16;
  350. inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
  351. inst->alg.cra_init = crypto_gcm_init_tfm;
  352. inst->alg.cra_exit = crypto_gcm_exit_tfm;
  353. inst->alg.cra_aead.setkey = crypto_gcm_setkey;
  354. inst->alg.cra_aead.encrypt = crypto_gcm_encrypt;
  355. inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
  356. out:
  357. crypto_mod_put(ctr);
  358. return inst;
  359. err_free_inst:
  360. kfree(inst);
  361. out_put_ctr:
  362. inst = ERR_PTR(err);
  363. goto out;
  364. }
  365. static void crypto_gcm_free(struct crypto_instance *inst)
  366. {
  367. struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
  368. crypto_drop_spawn(&ctx->ctr);
  369. kfree(inst);
  370. }
  371. static struct crypto_template crypto_gcm_tmpl = {
  372. .name = "gcm",
  373. .alloc = crypto_gcm_alloc,
  374. .free = crypto_gcm_free,
  375. .module = THIS_MODULE,
  376. };
  377. static int __init crypto_gcm_module_init(void)
  378. {
  379. return crypto_register_template(&crypto_gcm_tmpl);
  380. }
  381. static void __exit crypto_gcm_module_exit(void)
  382. {
  383. crypto_unregister_template(&crypto_gcm_tmpl);
  384. }
  385. module_init(crypto_gcm_module_init);
  386. module_exit(crypto_gcm_module_exit);
  387. MODULE_LICENSE("GPL");
  388. MODULE_DESCRIPTION("Galois/Counter Mode");
  389. MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");