shash.c 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503
  1. /*
  2. * Synchronous Cryptographic Hash operations.
  3. *
  4. * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #include <crypto/scatterwalk.h>
  13. #include <crypto/internal/hash.h>
  14. #include <linux/err.h>
  15. #include <linux/kernel.h>
  16. #include <linux/module.h>
  17. #include <linux/slab.h>
  18. #include <linux/seq_file.h>
  19. #include "internal.h"
  20. static const struct crypto_type crypto_shash_type;
  21. static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
  22. unsigned int keylen)
  23. {
  24. struct shash_alg *shash = crypto_shash_alg(tfm);
  25. unsigned long alignmask = crypto_shash_alignmask(tfm);
  26. unsigned long absize;
  27. u8 *buffer, *alignbuffer;
  28. int err;
  29. absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1));
  30. buffer = kmalloc(absize, GFP_KERNEL);
  31. if (!buffer)
  32. return -ENOMEM;
  33. alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
  34. memcpy(alignbuffer, key, keylen);
  35. err = shash->setkey(tfm, alignbuffer, keylen);
  36. memset(alignbuffer, 0, keylen);
  37. kfree(buffer);
  38. return err;
  39. }
  40. int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
  41. unsigned int keylen)
  42. {
  43. struct shash_alg *shash = crypto_shash_alg(tfm);
  44. unsigned long alignmask = crypto_shash_alignmask(tfm);
  45. if (!shash->setkey)
  46. return -ENOSYS;
  47. if ((unsigned long)key & alignmask)
  48. return shash_setkey_unaligned(tfm, key, keylen);
  49. return shash->setkey(tfm, key, keylen);
  50. }
  51. EXPORT_SYMBOL_GPL(crypto_shash_setkey);
  52. static inline unsigned int shash_align_buffer_size(unsigned len,
  53. unsigned long mask)
  54. {
  55. return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
  56. }
  57. static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
  58. unsigned int len)
  59. {
  60. struct crypto_shash *tfm = desc->tfm;
  61. struct shash_alg *shash = crypto_shash_alg(tfm);
  62. unsigned long alignmask = crypto_shash_alignmask(tfm);
  63. unsigned int unaligned_len = alignmask + 1 -
  64. ((unsigned long)data & alignmask);
  65. u8 buf[shash_align_buffer_size(unaligned_len, alignmask)]
  66. __attribute__ ((aligned));
  67. memcpy(buf, data, unaligned_len);
  68. return shash->update(desc, buf, unaligned_len) ?:
  69. shash->update(desc, data + unaligned_len, len - unaligned_len);
  70. }
  71. int crypto_shash_update(struct shash_desc *desc, const u8 *data,
  72. unsigned int len)
  73. {
  74. struct crypto_shash *tfm = desc->tfm;
  75. struct shash_alg *shash = crypto_shash_alg(tfm);
  76. unsigned long alignmask = crypto_shash_alignmask(tfm);
  77. if ((unsigned long)data & alignmask)
  78. return shash_update_unaligned(desc, data, len);
  79. return shash->update(desc, data, len);
  80. }
  81. EXPORT_SYMBOL_GPL(crypto_shash_update);
  82. static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
  83. {
  84. struct crypto_shash *tfm = desc->tfm;
  85. unsigned long alignmask = crypto_shash_alignmask(tfm);
  86. struct shash_alg *shash = crypto_shash_alg(tfm);
  87. unsigned int ds = crypto_shash_digestsize(tfm);
  88. u8 buf[shash_align_buffer_size(ds, alignmask)]
  89. __attribute__ ((aligned));
  90. int err;
  91. err = shash->final(desc, buf);
  92. memcpy(out, buf, ds);
  93. return err;
  94. }
  95. int crypto_shash_final(struct shash_desc *desc, u8 *out)
  96. {
  97. struct crypto_shash *tfm = desc->tfm;
  98. struct shash_alg *shash = crypto_shash_alg(tfm);
  99. unsigned long alignmask = crypto_shash_alignmask(tfm);
  100. if ((unsigned long)out & alignmask)
  101. return shash_final_unaligned(desc, out);
  102. return shash->final(desc, out);
  103. }
  104. EXPORT_SYMBOL_GPL(crypto_shash_final);
  105. static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
  106. unsigned int len, u8 *out)
  107. {
  108. return crypto_shash_update(desc, data, len) ?:
  109. crypto_shash_final(desc, out);
  110. }
  111. int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
  112. unsigned int len, u8 *out)
  113. {
  114. struct crypto_shash *tfm = desc->tfm;
  115. struct shash_alg *shash = crypto_shash_alg(tfm);
  116. unsigned long alignmask = crypto_shash_alignmask(tfm);
  117. if (((unsigned long)data | (unsigned long)out) & alignmask ||
  118. !shash->finup)
  119. return shash_finup_unaligned(desc, data, len, out);
  120. return shash->finup(desc, data, len, out);
  121. }
  122. EXPORT_SYMBOL_GPL(crypto_shash_finup);
  123. static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
  124. unsigned int len, u8 *out)
  125. {
  126. return crypto_shash_init(desc) ?:
  127. crypto_shash_update(desc, data, len) ?:
  128. crypto_shash_final(desc, out);
  129. }
  130. int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
  131. unsigned int len, u8 *out)
  132. {
  133. struct crypto_shash *tfm = desc->tfm;
  134. struct shash_alg *shash = crypto_shash_alg(tfm);
  135. unsigned long alignmask = crypto_shash_alignmask(tfm);
  136. if (((unsigned long)data | (unsigned long)out) & alignmask ||
  137. !shash->digest)
  138. return shash_digest_unaligned(desc, data, len, out);
  139. return shash->digest(desc, data, len, out);
  140. }
  141. EXPORT_SYMBOL_GPL(crypto_shash_digest);
  142. int crypto_shash_import(struct shash_desc *desc, const u8 *in)
  143. {
  144. struct crypto_shash *tfm = desc->tfm;
  145. struct shash_alg *alg = crypto_shash_alg(tfm);
  146. memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
  147. if (alg->reinit)
  148. alg->reinit(desc);
  149. return 0;
  150. }
  151. EXPORT_SYMBOL_GPL(crypto_shash_import);
  152. static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
  153. unsigned int keylen)
  154. {
  155. struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
  156. return crypto_shash_setkey(*ctx, key, keylen);
  157. }
  158. static int shash_async_init(struct ahash_request *req)
  159. {
  160. struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
  161. struct shash_desc *desc = ahash_request_ctx(req);
  162. desc->tfm = *ctx;
  163. desc->flags = req->base.flags;
  164. return crypto_shash_init(desc);
  165. }
  166. static int shash_async_update(struct ahash_request *req)
  167. {
  168. struct shash_desc *desc = ahash_request_ctx(req);
  169. struct crypto_hash_walk walk;
  170. int nbytes;
  171. for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
  172. nbytes = crypto_hash_walk_done(&walk, nbytes))
  173. nbytes = crypto_shash_update(desc, walk.data, nbytes);
  174. return nbytes;
  175. }
  176. static int shash_async_final(struct ahash_request *req)
  177. {
  178. return crypto_shash_final(ahash_request_ctx(req), req->result);
  179. }
  180. static int shash_async_digest(struct ahash_request *req)
  181. {
  182. struct scatterlist *sg = req->src;
  183. unsigned int offset = sg->offset;
  184. unsigned int nbytes = req->nbytes;
  185. int err;
  186. if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
  187. struct crypto_shash **ctx =
  188. crypto_ahash_ctx(crypto_ahash_reqtfm(req));
  189. struct shash_desc *desc = ahash_request_ctx(req);
  190. void *data;
  191. desc->tfm = *ctx;
  192. desc->flags = req->base.flags;
  193. data = crypto_kmap(sg_page(sg), 0);
  194. err = crypto_shash_digest(desc, data + offset, nbytes,
  195. req->result);
  196. crypto_kunmap(data, 0);
  197. crypto_yield(desc->flags);
  198. goto out;
  199. }
  200. err = shash_async_init(req);
  201. if (err)
  202. goto out;
  203. err = shash_async_update(req);
  204. if (err)
  205. goto out;
  206. err = shash_async_final(req);
  207. out:
  208. return err;
  209. }
  210. static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
  211. {
  212. struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
  213. crypto_free_shash(*ctx);
  214. }
  215. static int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
  216. {
  217. struct crypto_alg *calg = tfm->__crt_alg;
  218. struct shash_alg *alg = __crypto_shash_alg(calg);
  219. struct ahash_tfm *crt = &tfm->crt_ahash;
  220. struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
  221. struct crypto_shash *shash;
  222. if (!crypto_mod_get(calg))
  223. return -EAGAIN;
  224. shash = crypto_create_tfm(calg, &crypto_shash_type);
  225. if (IS_ERR(shash)) {
  226. crypto_mod_put(calg);
  227. return PTR_ERR(shash);
  228. }
  229. *ctx = shash;
  230. tfm->exit = crypto_exit_shash_ops_async;
  231. crt->init = shash_async_init;
  232. crt->update = shash_async_update;
  233. crt->final = shash_async_final;
  234. crt->digest = shash_async_digest;
  235. crt->setkey = shash_async_setkey;
  236. crt->digestsize = alg->digestsize;
  237. crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
  238. return 0;
  239. }
  240. static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
  241. unsigned int keylen)
  242. {
  243. struct shash_desc *desc = crypto_hash_ctx(tfm);
  244. return crypto_shash_setkey(desc->tfm, key, keylen);
  245. }
  246. static int shash_compat_init(struct hash_desc *hdesc)
  247. {
  248. struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
  249. desc->flags = hdesc->flags;
  250. return crypto_shash_init(desc);
  251. }
  252. static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
  253. unsigned int len)
  254. {
  255. struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
  256. struct crypto_hash_walk walk;
  257. int nbytes;
  258. for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
  259. nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
  260. nbytes = crypto_shash_update(desc, walk.data, nbytes);
  261. return nbytes;
  262. }
  263. static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
  264. {
  265. return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out);
  266. }
  267. static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
  268. unsigned int nbytes, u8 *out)
  269. {
  270. unsigned int offset = sg->offset;
  271. int err;
  272. if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
  273. struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
  274. void *data;
  275. desc->flags = hdesc->flags;
  276. data = crypto_kmap(sg_page(sg), 0);
  277. err = crypto_shash_digest(desc, data + offset, nbytes, out);
  278. crypto_kunmap(data, 0);
  279. crypto_yield(desc->flags);
  280. goto out;
  281. }
  282. err = shash_compat_init(hdesc);
  283. if (err)
  284. goto out;
  285. err = shash_compat_update(hdesc, sg, nbytes);
  286. if (err)
  287. goto out;
  288. err = shash_compat_final(hdesc, out);
  289. out:
  290. return err;
  291. }
  292. static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
  293. {
  294. struct shash_desc *desc= crypto_tfm_ctx(tfm);
  295. crypto_free_shash(desc->tfm);
  296. }
  297. static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
  298. {
  299. struct hash_tfm *crt = &tfm->crt_hash;
  300. struct crypto_alg *calg = tfm->__crt_alg;
  301. struct shash_alg *alg = __crypto_shash_alg(calg);
  302. struct shash_desc *desc = crypto_tfm_ctx(tfm);
  303. struct crypto_shash *shash;
  304. if (!crypto_mod_get(calg))
  305. return -EAGAIN;
  306. shash = crypto_create_tfm(calg, &crypto_shash_type);
  307. if (IS_ERR(shash)) {
  308. crypto_mod_put(calg);
  309. return PTR_ERR(shash);
  310. }
  311. desc->tfm = shash;
  312. tfm->exit = crypto_exit_shash_ops_compat;
  313. crt->init = shash_compat_init;
  314. crt->update = shash_compat_update;
  315. crt->final = shash_compat_final;
  316. crt->digest = shash_compat_digest;
  317. crt->setkey = shash_compat_setkey;
  318. crt->digestsize = alg->digestsize;
  319. return 0;
  320. }
  321. static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
  322. {
  323. switch (mask & CRYPTO_ALG_TYPE_MASK) {
  324. case CRYPTO_ALG_TYPE_HASH_MASK:
  325. return crypto_init_shash_ops_compat(tfm);
  326. case CRYPTO_ALG_TYPE_AHASH_MASK:
  327. return crypto_init_shash_ops_async(tfm);
  328. }
  329. return -EINVAL;
  330. }
  331. static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
  332. u32 mask)
  333. {
  334. struct shash_alg *salg = __crypto_shash_alg(alg);
  335. switch (mask & CRYPTO_ALG_TYPE_MASK) {
  336. case CRYPTO_ALG_TYPE_HASH_MASK:
  337. return sizeof(struct shash_desc) + salg->descsize;
  338. case CRYPTO_ALG_TYPE_AHASH_MASK:
  339. return sizeof(struct crypto_shash *);
  340. }
  341. return 0;
  342. }
  343. static int crypto_shash_init_tfm(struct crypto_tfm *tfm,
  344. const struct crypto_type *frontend)
  345. {
  346. return 0;
  347. }
  348. static unsigned int crypto_shash_extsize(struct crypto_alg *alg,
  349. const struct crypto_type *frontend)
  350. {
  351. return alg->cra_ctxsize;
  352. }
  353. static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
  354. __attribute__ ((unused));
  355. static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
  356. {
  357. struct shash_alg *salg = __crypto_shash_alg(alg);
  358. seq_printf(m, "type : shash\n");
  359. seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
  360. seq_printf(m, "digestsize : %u\n", salg->digestsize);
  361. seq_printf(m, "descsize : %u\n", salg->descsize);
  362. }
  363. static const struct crypto_type crypto_shash_type = {
  364. .ctxsize = crypto_shash_ctxsize,
  365. .extsize = crypto_shash_extsize,
  366. .init = crypto_init_shash_ops,
  367. .init_tfm = crypto_shash_init_tfm,
  368. #ifdef CONFIG_PROC_FS
  369. .show = crypto_shash_show,
  370. #endif
  371. .maskclear = ~CRYPTO_ALG_TYPE_MASK,
  372. .maskset = CRYPTO_ALG_TYPE_MASK,
  373. .type = CRYPTO_ALG_TYPE_SHASH,
  374. .tfmsize = offsetof(struct crypto_shash, base),
  375. };
  376. struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
  377. u32 mask)
  378. {
  379. return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
  380. }
  381. EXPORT_SYMBOL_GPL(crypto_alloc_shash);
  382. int crypto_register_shash(struct shash_alg *alg)
  383. {
  384. struct crypto_alg *base = &alg->base;
  385. if (alg->digestsize > PAGE_SIZE / 8 ||
  386. alg->descsize > PAGE_SIZE / 8)
  387. return -EINVAL;
  388. base->cra_type = &crypto_shash_type;
  389. base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
  390. base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
  391. return crypto_register_alg(base);
  392. }
  393. EXPORT_SYMBOL_GPL(crypto_register_shash);
  394. int crypto_unregister_shash(struct shash_alg *alg)
  395. {
  396. return crypto_unregister_alg(&alg->base);
  397. }
  398. EXPORT_SYMBOL_GPL(crypto_unregister_shash);
  399. MODULE_LICENSE("GPL");
  400. MODULE_DESCRIPTION("Synchronous cryptographic hash type");