xcbc.c 8.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348
  1. /*
  2. * Copyright (C)2006 USAGI/WIDE Project
  3. *
  4. * This program is free software; you can redistribute it and/or modify
  5. * it under the terms of the GNU General Public License as published by
  6. * the Free Software Foundation; either version 2 of the License, or
  7. * (at your option) any later version.
  8. *
  9. * This program is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. * GNU General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU General Public License
  15. * along with this program; if not, write to the Free Software
  16. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  17. *
  18. * Author:
  19. * Kazunori Miyazawa <miyazawa@linux-ipv6.org>
  20. */
  21. #include <linux/crypto.h>
  22. #include <linux/err.h>
  23. #include <linux/kernel.h>
  24. #include <linux/mm.h>
  25. #include <linux/rtnetlink.h>
  26. #include <linux/slab.h>
  27. #include <linux/scatterlist.h>
  28. #include "internal.h"
  29. static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101,
  30. 0x02020202, 0x02020202, 0x02020202, 0x02020202,
  31. 0x03030303, 0x03030303, 0x03030303, 0x03030303};
  32. /*
  33. * +------------------------
  34. * | <parent tfm>
  35. * +------------------------
  36. * | crypto_xcbc_ctx
  37. * +------------------------
  38. * | odds (block size)
  39. * +------------------------
  40. * | prev (block size)
  41. * +------------------------
  42. * | key (block size)
  43. * +------------------------
  44. * | consts (block size * 3)
  45. * +------------------------
  46. */
  47. struct crypto_xcbc_ctx {
  48. struct crypto_tfm *child;
  49. u8 *odds;
  50. u8 *prev;
  51. u8 *key;
  52. u8 *consts;
  53. void (*xor)(u8 *a, const u8 *b, unsigned int bs);
  54. unsigned int keylen;
  55. unsigned int len;
  56. };
  57. static void xor_128(u8 *a, const u8 *b, unsigned int bs)
  58. {
  59. ((u32 *)a)[0] ^= ((u32 *)b)[0];
  60. ((u32 *)a)[1] ^= ((u32 *)b)[1];
  61. ((u32 *)a)[2] ^= ((u32 *)b)[2];
  62. ((u32 *)a)[3] ^= ((u32 *)b)[3];
  63. }
  64. static int _crypto_xcbc_digest_setkey(struct crypto_hash *parent,
  65. struct crypto_xcbc_ctx *ctx)
  66. {
  67. int bs = crypto_hash_blocksize(parent);
  68. int err = 0;
  69. u8 key1[bs];
  70. if ((err = crypto_cipher_setkey(ctx->child, ctx->key, ctx->keylen)))
  71. return err;
  72. ctx->child->__crt_alg->cra_cipher.cia_encrypt(ctx->child, key1,
  73. ctx->consts);
  74. return crypto_cipher_setkey(ctx->child, key1, bs);
  75. }
  76. static int crypto_xcbc_digest_setkey(struct crypto_hash *parent,
  77. const u8 *inkey, unsigned int keylen)
  78. {
  79. struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
  80. if (keylen != crypto_tfm_alg_blocksize(ctx->child))
  81. return -EINVAL;
  82. ctx->keylen = keylen;
  83. memcpy(ctx->key, inkey, keylen);
  84. ctx->consts = (u8*)ks;
  85. return _crypto_xcbc_digest_setkey(parent, ctx);
  86. }
  87. static int crypto_xcbc_digest_init(struct hash_desc *pdesc)
  88. {
  89. struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(pdesc->tfm);
  90. int bs = crypto_hash_blocksize(pdesc->tfm);
  91. ctx->len = 0;
  92. memset(ctx->odds, 0, bs);
  93. memset(ctx->prev, 0, bs);
  94. return 0;
  95. }
  96. static int crypto_xcbc_digest_update(struct hash_desc *pdesc,
  97. struct scatterlist *sg,
  98. unsigned int nbytes)
  99. {
  100. struct crypto_hash *parent = pdesc->tfm;
  101. struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
  102. struct crypto_tfm *tfm = ctx->child;
  103. int bs = crypto_hash_blocksize(parent);
  104. unsigned int i = 0;
  105. do {
  106. struct page *pg = sg[i].page;
  107. unsigned int offset = sg[i].offset;
  108. unsigned int slen = sg[i].length;
  109. while (slen > 0) {
  110. unsigned int len = min(slen, ((unsigned int)(PAGE_SIZE)) - offset);
  111. char *p = crypto_kmap(pg, 0) + offset;
  112. /* checking the data can fill the block */
  113. if ((ctx->len + len) <= bs) {
  114. memcpy(ctx->odds + ctx->len, p, len);
  115. ctx->len += len;
  116. slen -= len;
  117. /* checking the rest of the page */
  118. if (len + offset >= PAGE_SIZE) {
  119. offset = 0;
  120. pg++;
  121. } else
  122. offset += len;
  123. crypto_kunmap(p, 0);
  124. crypto_yield(tfm->crt_flags);
  125. continue;
  126. }
  127. /* filling odds with new data and encrypting it */
  128. memcpy(ctx->odds + ctx->len, p, bs - ctx->len);
  129. len -= bs - ctx->len;
  130. p += bs - ctx->len;
  131. ctx->xor(ctx->prev, ctx->odds, bs);
  132. tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, ctx->prev, ctx->prev);
  133. /* clearing the length */
  134. ctx->len = 0;
  135. /* encrypting the rest of data */
  136. while (len > bs) {
  137. ctx->xor(ctx->prev, p, bs);
  138. tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, ctx->prev, ctx->prev);
  139. p += bs;
  140. len -= bs;
  141. }
  142. /* keeping the surplus of blocksize */
  143. if (len) {
  144. memcpy(ctx->odds, p, len);
  145. ctx->len = len;
  146. }
  147. crypto_kunmap(p, 0);
  148. crypto_yield(tfm->crt_flags);
  149. slen -= min(slen, ((unsigned int)(PAGE_SIZE)) - offset);
  150. offset = 0;
  151. pg++;
  152. }
  153. nbytes-=sg[i].length;
  154. i++;
  155. } while (nbytes>0);
  156. return 0;
  157. }
  158. static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out)
  159. {
  160. struct crypto_hash *parent = pdesc->tfm;
  161. struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
  162. struct crypto_tfm *tfm = ctx->child;
  163. int bs = crypto_hash_blocksize(parent);
  164. int err = 0;
  165. if (ctx->len == bs) {
  166. u8 key2[bs];
  167. if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0)
  168. return err;
  169. tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, key2, (const u8*)(ctx->consts+bs));
  170. ctx->xor(ctx->prev, ctx->odds, bs);
  171. ctx->xor(ctx->prev, key2, bs);
  172. _crypto_xcbc_digest_setkey(parent, ctx);
  173. tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, out, ctx->prev);
  174. } else {
  175. u8 key3[bs];
  176. unsigned int rlen;
  177. u8 *p = ctx->odds + ctx->len;
  178. *p = 0x80;
  179. p++;
  180. rlen = bs - ctx->len -1;
  181. if (rlen)
  182. memset(p, 0, rlen);
  183. if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0)
  184. return err;
  185. tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, key3, (const u8*)(ctx->consts+bs*2));
  186. ctx->xor(ctx->prev, ctx->odds, bs);
  187. ctx->xor(ctx->prev, key3, bs);
  188. _crypto_xcbc_digest_setkey(parent, ctx);
  189. tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, out, ctx->prev);
  190. }
  191. return 0;
  192. }
  193. static int crypto_xcbc_digest(struct hash_desc *pdesc,
  194. struct scatterlist *sg, unsigned int nbytes, u8 *out)
  195. {
  196. crypto_xcbc_digest_init(pdesc);
  197. crypto_xcbc_digest_update(pdesc, sg, nbytes);
  198. return crypto_xcbc_digest_final(pdesc, out);
  199. }
  200. static int xcbc_init_tfm(struct crypto_tfm *tfm)
  201. {
  202. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  203. struct crypto_spawn *spawn = crypto_instance_ctx(inst);
  204. struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm));
  205. int bs = crypto_hash_blocksize(__crypto_hash_cast(tfm));
  206. tfm = crypto_spawn_tfm(spawn);
  207. if (IS_ERR(tfm))
  208. return PTR_ERR(tfm);
  209. switch(bs) {
  210. case 16:
  211. ctx->xor = xor_128;
  212. break;
  213. default:
  214. return -EINVAL;
  215. }
  216. ctx->child = crypto_cipher_cast(tfm);
  217. ctx->odds = (u8*)(ctx+1);
  218. ctx->prev = ctx->odds + bs;
  219. ctx->key = ctx->prev + bs;
  220. return 0;
  221. };
  222. static void xcbc_exit_tfm(struct crypto_tfm *tfm)
  223. {
  224. struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm));
  225. crypto_free_cipher(ctx->child);
  226. }
  227. static struct crypto_instance *xcbc_alloc(void *param, unsigned int len)
  228. {
  229. struct crypto_instance *inst;
  230. struct crypto_alg *alg;
  231. alg = crypto_get_attr_alg(param, len, CRYPTO_ALG_TYPE_CIPHER,
  232. CRYPTO_ALG_TYPE_HASH_MASK | CRYPTO_ALG_ASYNC);
  233. if (IS_ERR(alg))
  234. return ERR_PTR(PTR_ERR(alg));
  235. switch(alg->cra_blocksize) {
  236. case 16:
  237. break;
  238. default:
  239. return ERR_PTR(PTR_ERR(alg));
  240. }
  241. inst = crypto_alloc_instance("xcbc", alg);
  242. if (IS_ERR(inst))
  243. goto out_put_alg;
  244. inst->alg.cra_flags = CRYPTO_ALG_TYPE_HASH;
  245. inst->alg.cra_priority = alg->cra_priority;
  246. inst->alg.cra_blocksize = alg->cra_blocksize;
  247. inst->alg.cra_alignmask = alg->cra_alignmask;
  248. inst->alg.cra_type = &crypto_hash_type;
  249. inst->alg.cra_hash.digestsize =
  250. (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
  251. CRYPTO_ALG_TYPE_HASH ? alg->cra_hash.digestsize :
  252. alg->cra_blocksize;
  253. inst->alg.cra_ctxsize = sizeof(struct crypto_xcbc_ctx) +
  254. ALIGN(inst->alg.cra_blocksize * 3, sizeof(void *));
  255. inst->alg.cra_init = xcbc_init_tfm;
  256. inst->alg.cra_exit = xcbc_exit_tfm;
  257. inst->alg.cra_hash.init = crypto_xcbc_digest_init;
  258. inst->alg.cra_hash.update = crypto_xcbc_digest_update;
  259. inst->alg.cra_hash.final = crypto_xcbc_digest_final;
  260. inst->alg.cra_hash.digest = crypto_xcbc_digest;
  261. inst->alg.cra_hash.setkey = crypto_xcbc_digest_setkey;
  262. out_put_alg:
  263. crypto_mod_put(alg);
  264. return inst;
  265. }
  266. static void xcbc_free(struct crypto_instance *inst)
  267. {
  268. crypto_drop_spawn(crypto_instance_ctx(inst));
  269. kfree(inst);
  270. }
  271. static struct crypto_template crypto_xcbc_tmpl = {
  272. .name = "xcbc",
  273. .alloc = xcbc_alloc,
  274. .free = xcbc_free,
  275. .module = THIS_MODULE,
  276. };
  277. static int __init crypto_xcbc_module_init(void)
  278. {
  279. return crypto_register_template(&crypto_xcbc_tmpl);
  280. }
  281. static void __exit crypto_xcbc_module_exit(void)
  282. {
  283. crypto_unregister_template(&crypto_xcbc_tmpl);
  284. }
  285. module_init(crypto_xcbc_module_init);
  286. module_exit(crypto_xcbc_module_exit);
  287. MODULE_LICENSE("GPL");
  288. MODULE_DESCRIPTION("XCBC keyed hash algorithm");