xcbc.c 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372
  1. /*
  2. * Copyright (C)2006 USAGI/WIDE Project
  3. *
  4. * This program is free software; you can redistribute it and/or modify
  5. * it under the terms of the GNU General Public License as published by
  6. * the Free Software Foundation; either version 2 of the License, or
  7. * (at your option) any later version.
  8. *
  9. * This program is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. * GNU General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU General Public License
  15. * along with this program; if not, write to the Free Software
  16. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  17. *
  18. * Author:
  19. * Kazunori Miyazawa <miyazawa@linux-ipv6.org>
  20. */
  21. #include <crypto/scatterwalk.h>
  22. #include <linux/crypto.h>
  23. #include <linux/err.h>
  24. #include <linux/hardirq.h>
  25. #include <linux/kernel.h>
  26. #include <linux/mm.h>
  27. #include <linux/rtnetlink.h>
  28. #include <linux/slab.h>
  29. #include <linux/scatterlist.h>
  30. static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101,
  31. 0x02020202, 0x02020202, 0x02020202, 0x02020202,
  32. 0x03030303, 0x03030303, 0x03030303, 0x03030303};
  33. /*
  34. * +------------------------
  35. * | <parent tfm>
  36. * +------------------------
  37. * | crypto_xcbc_ctx
  38. * +------------------------
  39. * | odds (block size)
  40. * +------------------------
  41. * | prev (block size)
  42. * +------------------------
  43. * | key (block size)
  44. * +------------------------
  45. * | consts (block size * 3)
  46. * +------------------------
  47. */
  48. struct crypto_xcbc_ctx {
  49. struct crypto_cipher *child;
  50. u8 *odds;
  51. u8 *prev;
  52. u8 *key;
  53. u8 *consts;
  54. void (*xor)(u8 *a, const u8 *b, unsigned int bs);
  55. unsigned int keylen;
  56. unsigned int len;
  57. };
  58. static void xor_128(u8 *a, const u8 *b, unsigned int bs)
  59. {
  60. ((u32 *)a)[0] ^= ((u32 *)b)[0];
  61. ((u32 *)a)[1] ^= ((u32 *)b)[1];
  62. ((u32 *)a)[2] ^= ((u32 *)b)[2];
  63. ((u32 *)a)[3] ^= ((u32 *)b)[3];
  64. }
  65. static int _crypto_xcbc_digest_setkey(struct crypto_hash *parent,
  66. struct crypto_xcbc_ctx *ctx)
  67. {
  68. int bs = crypto_hash_blocksize(parent);
  69. int err = 0;
  70. u8 key1[bs];
  71. if ((err = crypto_cipher_setkey(ctx->child, ctx->key, ctx->keylen)))
  72. return err;
  73. crypto_cipher_encrypt_one(ctx->child, key1, ctx->consts);
  74. return crypto_cipher_setkey(ctx->child, key1, bs);
  75. }
  76. static int crypto_xcbc_digest_setkey(struct crypto_hash *parent,
  77. const u8 *inkey, unsigned int keylen)
  78. {
  79. struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
  80. if (keylen != crypto_cipher_blocksize(ctx->child))
  81. return -EINVAL;
  82. ctx->keylen = keylen;
  83. memcpy(ctx->key, inkey, keylen);
  84. ctx->consts = (u8*)ks;
  85. return _crypto_xcbc_digest_setkey(parent, ctx);
  86. }
  87. static int crypto_xcbc_digest_init(struct hash_desc *pdesc)
  88. {
  89. struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(pdesc->tfm);
  90. int bs = crypto_hash_blocksize(pdesc->tfm);
  91. ctx->len = 0;
  92. memset(ctx->odds, 0, bs);
  93. memset(ctx->prev, 0, bs);
  94. return 0;
  95. }
  96. static int crypto_xcbc_digest_update2(struct hash_desc *pdesc,
  97. struct scatterlist *sg,
  98. unsigned int nbytes)
  99. {
  100. struct crypto_hash *parent = pdesc->tfm;
  101. struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
  102. struct crypto_cipher *tfm = ctx->child;
  103. int bs = crypto_hash_blocksize(parent);
  104. unsigned int i = 0;
  105. do {
  106. struct page *pg = sg_page(&sg[i]);
  107. unsigned int offset = sg[i].offset;
  108. unsigned int slen = sg[i].length;
  109. if (unlikely(slen > nbytes))
  110. slen = nbytes;
  111. nbytes -= slen;
  112. while (slen > 0) {
  113. unsigned int len = min(slen, ((unsigned int)(PAGE_SIZE)) - offset);
  114. char *p = crypto_kmap(pg, 0) + offset;
  115. /* checking the data can fill the block */
  116. if ((ctx->len + len) <= bs) {
  117. memcpy(ctx->odds + ctx->len, p, len);
  118. ctx->len += len;
  119. slen -= len;
  120. /* checking the rest of the page */
  121. if (len + offset >= PAGE_SIZE) {
  122. offset = 0;
  123. pg++;
  124. } else
  125. offset += len;
  126. crypto_kunmap(p, 0);
  127. crypto_yield(pdesc->flags);
  128. continue;
  129. }
  130. /* filling odds with new data and encrypting it */
  131. memcpy(ctx->odds + ctx->len, p, bs - ctx->len);
  132. len -= bs - ctx->len;
  133. p += bs - ctx->len;
  134. ctx->xor(ctx->prev, ctx->odds, bs);
  135. crypto_cipher_encrypt_one(tfm, ctx->prev, ctx->prev);
  136. /* clearing the length */
  137. ctx->len = 0;
  138. /* encrypting the rest of data */
  139. while (len > bs) {
  140. ctx->xor(ctx->prev, p, bs);
  141. crypto_cipher_encrypt_one(tfm, ctx->prev,
  142. ctx->prev);
  143. p += bs;
  144. len -= bs;
  145. }
  146. /* keeping the surplus of blocksize */
  147. if (len) {
  148. memcpy(ctx->odds, p, len);
  149. ctx->len = len;
  150. }
  151. crypto_kunmap(p, 0);
  152. crypto_yield(pdesc->flags);
  153. slen -= min(slen, ((unsigned int)(PAGE_SIZE)) - offset);
  154. offset = 0;
  155. pg++;
  156. }
  157. i++;
  158. } while (nbytes>0);
  159. return 0;
  160. }
  161. static int crypto_xcbc_digest_update(struct hash_desc *pdesc,
  162. struct scatterlist *sg,
  163. unsigned int nbytes)
  164. {
  165. if (WARN_ON_ONCE(in_irq()))
  166. return -EDEADLK;
  167. return crypto_xcbc_digest_update2(pdesc, sg, nbytes);
  168. }
  169. static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out)
  170. {
  171. struct crypto_hash *parent = pdesc->tfm;
  172. struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
  173. struct crypto_cipher *tfm = ctx->child;
  174. int bs = crypto_hash_blocksize(parent);
  175. int err = 0;
  176. if (ctx->len == bs) {
  177. u8 key2[bs];
  178. if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0)
  179. return err;
  180. crypto_cipher_encrypt_one(tfm, key2,
  181. (u8 *)(ctx->consts + bs));
  182. ctx->xor(ctx->prev, ctx->odds, bs);
  183. ctx->xor(ctx->prev, key2, bs);
  184. _crypto_xcbc_digest_setkey(parent, ctx);
  185. crypto_cipher_encrypt_one(tfm, out, ctx->prev);
  186. } else {
  187. u8 key3[bs];
  188. unsigned int rlen;
  189. u8 *p = ctx->odds + ctx->len;
  190. *p = 0x80;
  191. p++;
  192. rlen = bs - ctx->len -1;
  193. if (rlen)
  194. memset(p, 0, rlen);
  195. if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0)
  196. return err;
  197. crypto_cipher_encrypt_one(tfm, key3,
  198. (u8 *)(ctx->consts + bs * 2));
  199. ctx->xor(ctx->prev, ctx->odds, bs);
  200. ctx->xor(ctx->prev, key3, bs);
  201. _crypto_xcbc_digest_setkey(parent, ctx);
  202. crypto_cipher_encrypt_one(tfm, out, ctx->prev);
  203. }
  204. return 0;
  205. }
  206. static int crypto_xcbc_digest(struct hash_desc *pdesc,
  207. struct scatterlist *sg, unsigned int nbytes, u8 *out)
  208. {
  209. if (WARN_ON_ONCE(in_irq()))
  210. return -EDEADLK;
  211. crypto_xcbc_digest_init(pdesc);
  212. crypto_xcbc_digest_update2(pdesc, sg, nbytes);
  213. return crypto_xcbc_digest_final(pdesc, out);
  214. }
  215. static int xcbc_init_tfm(struct crypto_tfm *tfm)
  216. {
  217. struct crypto_cipher *cipher;
  218. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  219. struct crypto_spawn *spawn = crypto_instance_ctx(inst);
  220. struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm));
  221. int bs = crypto_hash_blocksize(__crypto_hash_cast(tfm));
  222. cipher = crypto_spawn_cipher(spawn);
  223. if (IS_ERR(cipher))
  224. return PTR_ERR(cipher);
  225. switch(bs) {
  226. case 16:
  227. ctx->xor = xor_128;
  228. break;
  229. default:
  230. return -EINVAL;
  231. }
  232. ctx->child = cipher;
  233. ctx->odds = (u8*)(ctx+1);
  234. ctx->prev = ctx->odds + bs;
  235. ctx->key = ctx->prev + bs;
  236. return 0;
  237. };
  238. static void xcbc_exit_tfm(struct crypto_tfm *tfm)
  239. {
  240. struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm));
  241. crypto_free_cipher(ctx->child);
  242. }
  243. static struct crypto_instance *xcbc_alloc(struct rtattr **tb)
  244. {
  245. struct crypto_instance *inst;
  246. struct crypto_alg *alg;
  247. int err;
  248. err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_HASH);
  249. if (err)
  250. return ERR_PTR(err);
  251. alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
  252. CRYPTO_ALG_TYPE_MASK);
  253. if (IS_ERR(alg))
  254. return ERR_CAST(alg);
  255. switch(alg->cra_blocksize) {
  256. case 16:
  257. break;
  258. default:
  259. inst = ERR_PTR(-EINVAL);
  260. goto out_put_alg;
  261. }
  262. inst = crypto_alloc_instance("xcbc", alg);
  263. if (IS_ERR(inst))
  264. goto out_put_alg;
  265. inst->alg.cra_flags = CRYPTO_ALG_TYPE_HASH;
  266. inst->alg.cra_priority = alg->cra_priority;
  267. inst->alg.cra_blocksize = alg->cra_blocksize;
  268. inst->alg.cra_alignmask = alg->cra_alignmask;
  269. inst->alg.cra_type = &crypto_hash_type;
  270. inst->alg.cra_hash.digestsize = alg->cra_blocksize;
  271. inst->alg.cra_ctxsize = sizeof(struct crypto_xcbc_ctx) +
  272. ALIGN(inst->alg.cra_blocksize * 3, sizeof(void *));
  273. inst->alg.cra_init = xcbc_init_tfm;
  274. inst->alg.cra_exit = xcbc_exit_tfm;
  275. inst->alg.cra_hash.init = crypto_xcbc_digest_init;
  276. inst->alg.cra_hash.update = crypto_xcbc_digest_update;
  277. inst->alg.cra_hash.final = crypto_xcbc_digest_final;
  278. inst->alg.cra_hash.digest = crypto_xcbc_digest;
  279. inst->alg.cra_hash.setkey = crypto_xcbc_digest_setkey;
  280. out_put_alg:
  281. crypto_mod_put(alg);
  282. return inst;
  283. }
  284. static void xcbc_free(struct crypto_instance *inst)
  285. {
  286. crypto_drop_spawn(crypto_instance_ctx(inst));
  287. kfree(inst);
  288. }
  289. static struct crypto_template crypto_xcbc_tmpl = {
  290. .name = "xcbc",
  291. .alloc = xcbc_alloc,
  292. .free = xcbc_free,
  293. .module = THIS_MODULE,
  294. };
  295. static int __init crypto_xcbc_module_init(void)
  296. {
  297. return crypto_register_template(&crypto_xcbc_tmpl);
  298. }
  299. static void __exit crypto_xcbc_module_exit(void)
  300. {
  301. crypto_unregister_template(&crypto_xcbc_tmpl);
  302. }
  303. module_init(crypto_xcbc_module_init);
  304. module_exit(crypto_xcbc_module_exit);
  305. MODULE_LICENSE("GPL");
  306. MODULE_DESCRIPTION("XCBC keyed hash algorithm");