crypto.h 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484
  1. /*
  2. * Scatterlist Cryptographic API.
  3. *
  4. * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  5. * Copyright (c) 2002 David S. Miller (davem@redhat.com)
  6. * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
  7. *
  8. * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
  9. * and Nettle, by Niels Möller.
  10. *
  11. * This program is free software; you can redistribute it and/or modify it
  12. * under the terms of the GNU General Public License as published by the Free
  13. * Software Foundation; either version 2 of the License, or (at your option)
  14. * any later version.
  15. *
  16. */
  17. #ifndef _LINUX_CRYPTO_H
  18. #define _LINUX_CRYPTO_H
  19. #include <asm/atomic.h>
  20. #include <linux/module.h>
  21. #include <linux/kernel.h>
  22. #include <linux/types.h>
  23. #include <linux/list.h>
  24. #include <linux/slab.h>
  25. #include <linux/string.h>
  26. #include <linux/uaccess.h>
  27. /*
  28. * Algorithm masks and types.
  29. */
  30. #define CRYPTO_ALG_TYPE_MASK 0x0000000f
  31. #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
  32. #define CRYPTO_ALG_TYPE_DIGEST 0x00000002
  33. #define CRYPTO_ALG_TYPE_COMPRESS 0x00000004
  34. #define CRYPTO_ALG_LARVAL 0x00000010
  35. #define CRYPTO_ALG_DEAD 0x00000020
  36. #define CRYPTO_ALG_DYING 0x00000040
  37. /*
  38. * Transform masks and values (for crt_flags).
  39. */
  40. #define CRYPTO_TFM_MODE_MASK 0x000000ff
  41. #define CRYPTO_TFM_REQ_MASK 0x000fff00
  42. #define CRYPTO_TFM_RES_MASK 0xfff00000
  43. #define CRYPTO_TFM_MODE_ECB 0x00000001
  44. #define CRYPTO_TFM_MODE_CBC 0x00000002
  45. #define CRYPTO_TFM_MODE_CFB 0x00000004
  46. #define CRYPTO_TFM_MODE_CTR 0x00000008
  47. #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
  48. #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
  49. #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
  50. #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
  51. #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
  52. #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
  53. #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
  54. /*
  55. * Miscellaneous stuff.
  56. */
  57. #define CRYPTO_UNSPEC 0
  58. #define CRYPTO_MAX_ALG_NAME 64
  59. #define CRYPTO_DIR_ENCRYPT 1
  60. #define CRYPTO_DIR_DECRYPT 0
  61. /*
  62. * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
  63. * declaration) is used to ensure that the crypto_tfm context structure is
  64. * aligned correctly for the given architecture so that there are no alignment
  65. * faults for C data types. In particular, this is required on platforms such
  66. * as arm where pointers are 32-bit aligned but there are data types such as
  67. * u64 which require 64-bit alignment.
  68. */
  69. #if defined(ARCH_KMALLOC_MINALIGN)
  70. #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
  71. #elif defined(ARCH_SLAB_MINALIGN)
  72. #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN
  73. #endif
  74. #ifdef CRYPTO_MINALIGN
  75. #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
  76. #else
  77. #define CRYPTO_MINALIGN_ATTR
  78. #endif
  79. struct scatterlist;
  80. struct crypto_tfm;
  81. struct cipher_desc {
  82. struct crypto_tfm *tfm;
  83. void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  84. unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
  85. const u8 *src, unsigned int nbytes);
  86. void *info;
  87. };
  88. /*
  89. * Algorithms: modular crypto algorithm implementations, managed
  90. * via crypto_register_alg() and crypto_unregister_alg().
  91. */
  92. struct cipher_alg {
  93. unsigned int cia_min_keysize;
  94. unsigned int cia_max_keysize;
  95. int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
  96. unsigned int keylen, u32 *flags);
  97. void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  98. void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  99. unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc,
  100. u8 *dst, const u8 *src,
  101. unsigned int nbytes);
  102. unsigned int (*cia_decrypt_ecb)(const struct cipher_desc *desc,
  103. u8 *dst, const u8 *src,
  104. unsigned int nbytes);
  105. unsigned int (*cia_encrypt_cbc)(const struct cipher_desc *desc,
  106. u8 *dst, const u8 *src,
  107. unsigned int nbytes);
  108. unsigned int (*cia_decrypt_cbc)(const struct cipher_desc *desc,
  109. u8 *dst, const u8 *src,
  110. unsigned int nbytes);
  111. };
  112. struct digest_alg {
  113. unsigned int dia_digestsize;
  114. void (*dia_init)(struct crypto_tfm *tfm);
  115. void (*dia_update)(struct crypto_tfm *tfm, const u8 *data,
  116. unsigned int len);
  117. void (*dia_final)(struct crypto_tfm *tfm, u8 *out);
  118. int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key,
  119. unsigned int keylen, u32 *flags);
  120. };
  121. struct compress_alg {
  122. int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
  123. unsigned int slen, u8 *dst, unsigned int *dlen);
  124. int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
  125. unsigned int slen, u8 *dst, unsigned int *dlen);
  126. };
  127. #define cra_cipher cra_u.cipher
  128. #define cra_digest cra_u.digest
  129. #define cra_compress cra_u.compress
  130. struct crypto_alg {
  131. struct list_head cra_list;
  132. struct list_head cra_users;
  133. u32 cra_flags;
  134. unsigned int cra_blocksize;
  135. unsigned int cra_ctxsize;
  136. unsigned int cra_alignmask;
  137. int cra_priority;
  138. atomic_t cra_refcnt;
  139. char cra_name[CRYPTO_MAX_ALG_NAME];
  140. char cra_driver_name[CRYPTO_MAX_ALG_NAME];
  141. union {
  142. struct cipher_alg cipher;
  143. struct digest_alg digest;
  144. struct compress_alg compress;
  145. } cra_u;
  146. int (*cra_init)(struct crypto_tfm *tfm);
  147. void (*cra_exit)(struct crypto_tfm *tfm);
  148. void (*cra_destroy)(struct crypto_alg *alg);
  149. struct module *cra_module;
  150. };
  151. /*
  152. * Algorithm registration interface.
  153. */
  154. int crypto_register_alg(struct crypto_alg *alg);
  155. int crypto_unregister_alg(struct crypto_alg *alg);
  156. /*
  157. * Algorithm query interface.
  158. */
  159. #ifdef CONFIG_CRYPTO
  160. int crypto_alg_available(const char *name, u32 flags);
  161. #else
  162. static inline int crypto_alg_available(const char *name, u32 flags)
  163. {
  164. return 0;
  165. }
  166. #endif
  167. /*
  168. * Transforms: user-instantiated objects which encapsulate algorithms
  169. * and core processing logic. Managed via crypto_alloc_tfm() and
  170. * crypto_free_tfm(), as well as the various helpers below.
  171. */
  172. struct cipher_tfm {
  173. void *cit_iv;
  174. unsigned int cit_ivsize;
  175. u32 cit_mode;
  176. int (*cit_setkey)(struct crypto_tfm *tfm,
  177. const u8 *key, unsigned int keylen);
  178. int (*cit_encrypt)(struct crypto_tfm *tfm,
  179. struct scatterlist *dst,
  180. struct scatterlist *src,
  181. unsigned int nbytes);
  182. int (*cit_encrypt_iv)(struct crypto_tfm *tfm,
  183. struct scatterlist *dst,
  184. struct scatterlist *src,
  185. unsigned int nbytes, u8 *iv);
  186. int (*cit_decrypt)(struct crypto_tfm *tfm,
  187. struct scatterlist *dst,
  188. struct scatterlist *src,
  189. unsigned int nbytes);
  190. int (*cit_decrypt_iv)(struct crypto_tfm *tfm,
  191. struct scatterlist *dst,
  192. struct scatterlist *src,
  193. unsigned int nbytes, u8 *iv);
  194. void (*cit_xor_block)(u8 *dst, const u8 *src);
  195. };
  196. struct digest_tfm {
  197. void (*dit_init)(struct crypto_tfm *tfm);
  198. void (*dit_update)(struct crypto_tfm *tfm,
  199. struct scatterlist *sg, unsigned int nsg);
  200. void (*dit_final)(struct crypto_tfm *tfm, u8 *out);
  201. void (*dit_digest)(struct crypto_tfm *tfm, struct scatterlist *sg,
  202. unsigned int nsg, u8 *out);
  203. int (*dit_setkey)(struct crypto_tfm *tfm,
  204. const u8 *key, unsigned int keylen);
  205. #ifdef CONFIG_CRYPTO_HMAC
  206. void *dit_hmac_block;
  207. #endif
  208. };
  209. struct compress_tfm {
  210. int (*cot_compress)(struct crypto_tfm *tfm,
  211. const u8 *src, unsigned int slen,
  212. u8 *dst, unsigned int *dlen);
  213. int (*cot_decompress)(struct crypto_tfm *tfm,
  214. const u8 *src, unsigned int slen,
  215. u8 *dst, unsigned int *dlen);
  216. };
  217. #define crt_cipher crt_u.cipher
  218. #define crt_digest crt_u.digest
  219. #define crt_compress crt_u.compress
  220. struct crypto_tfm {
  221. u32 crt_flags;
  222. union {
  223. struct cipher_tfm cipher;
  224. struct digest_tfm digest;
  225. struct compress_tfm compress;
  226. } crt_u;
  227. struct crypto_alg *__crt_alg;
  228. void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
  229. };
  230. enum {
  231. CRYPTOA_UNSPEC,
  232. CRYPTOA_ALG,
  233. };
  234. struct crypto_attr_alg {
  235. char name[CRYPTO_MAX_ALG_NAME];
  236. };
  237. /*
  238. * Transform user interface.
  239. */
  240. /*
  241. * crypto_alloc_tfm() will first attempt to locate an already loaded algorithm.
  242. * If that fails and the kernel supports dynamically loadable modules, it
  243. * will then attempt to load a module of the same name or alias. A refcount
  244. * is grabbed on the algorithm which is then associated with the new transform.
  245. *
  246. * crypto_free_tfm() frees up the transform and any associated resources,
  247. * then drops the refcount on the associated algorithm.
  248. */
  249. struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
  250. void crypto_free_tfm(struct crypto_tfm *tfm);
  251. /*
  252. * Transform helpers which query the underlying algorithm.
  253. */
  254. static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
  255. {
  256. return tfm->__crt_alg->cra_name;
  257. }
  258. static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
  259. {
  260. return module_name(tfm->__crt_alg->cra_module);
  261. }
  262. static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
  263. {
  264. return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
  265. }
  266. static inline unsigned int crypto_tfm_alg_min_keysize(struct crypto_tfm *tfm)
  267. {
  268. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
  269. return tfm->__crt_alg->cra_cipher.cia_min_keysize;
  270. }
  271. static inline unsigned int crypto_tfm_alg_max_keysize(struct crypto_tfm *tfm)
  272. {
  273. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
  274. return tfm->__crt_alg->cra_cipher.cia_max_keysize;
  275. }
  276. static inline unsigned int crypto_tfm_alg_ivsize(struct crypto_tfm *tfm)
  277. {
  278. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
  279. return tfm->crt_cipher.cit_ivsize;
  280. }
  281. static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
  282. {
  283. return tfm->__crt_alg->cra_blocksize;
  284. }
  285. static inline unsigned int crypto_tfm_alg_digestsize(struct crypto_tfm *tfm)
  286. {
  287. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
  288. return tfm->__crt_alg->cra_digest.dia_digestsize;
  289. }
  290. static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
  291. {
  292. return tfm->__crt_alg->cra_alignmask;
  293. }
  294. static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
  295. {
  296. return tfm->__crt_ctx;
  297. }
  298. static inline unsigned int crypto_tfm_ctx_alignment(void)
  299. {
  300. struct crypto_tfm *tfm;
  301. return __alignof__(tfm->__crt_ctx);
  302. }
  303. /*
  304. * API wrappers.
  305. */
  306. static inline void crypto_digest_init(struct crypto_tfm *tfm)
  307. {
  308. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
  309. tfm->crt_digest.dit_init(tfm);
  310. }
  311. static inline void crypto_digest_update(struct crypto_tfm *tfm,
  312. struct scatterlist *sg,
  313. unsigned int nsg)
  314. {
  315. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
  316. tfm->crt_digest.dit_update(tfm, sg, nsg);
  317. }
  318. static inline void crypto_digest_final(struct crypto_tfm *tfm, u8 *out)
  319. {
  320. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
  321. tfm->crt_digest.dit_final(tfm, out);
  322. }
  323. static inline void crypto_digest_digest(struct crypto_tfm *tfm,
  324. struct scatterlist *sg,
  325. unsigned int nsg, u8 *out)
  326. {
  327. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
  328. tfm->crt_digest.dit_digest(tfm, sg, nsg, out);
  329. }
  330. static inline int crypto_digest_setkey(struct crypto_tfm *tfm,
  331. const u8 *key, unsigned int keylen)
  332. {
  333. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
  334. if (tfm->crt_digest.dit_setkey == NULL)
  335. return -ENOSYS;
  336. return tfm->crt_digest.dit_setkey(tfm, key, keylen);
  337. }
  338. static inline int crypto_cipher_setkey(struct crypto_tfm *tfm,
  339. const u8 *key, unsigned int keylen)
  340. {
  341. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
  342. return tfm->crt_cipher.cit_setkey(tfm, key, keylen);
  343. }
  344. static inline int crypto_cipher_encrypt(struct crypto_tfm *tfm,
  345. struct scatterlist *dst,
  346. struct scatterlist *src,
  347. unsigned int nbytes)
  348. {
  349. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
  350. return tfm->crt_cipher.cit_encrypt(tfm, dst, src, nbytes);
  351. }
  352. static inline int crypto_cipher_encrypt_iv(struct crypto_tfm *tfm,
  353. struct scatterlist *dst,
  354. struct scatterlist *src,
  355. unsigned int nbytes, u8 *iv)
  356. {
  357. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
  358. BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
  359. return tfm->crt_cipher.cit_encrypt_iv(tfm, dst, src, nbytes, iv);
  360. }
  361. static inline int crypto_cipher_decrypt(struct crypto_tfm *tfm,
  362. struct scatterlist *dst,
  363. struct scatterlist *src,
  364. unsigned int nbytes)
  365. {
  366. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
  367. return tfm->crt_cipher.cit_decrypt(tfm, dst, src, nbytes);
  368. }
  369. static inline int crypto_cipher_decrypt_iv(struct crypto_tfm *tfm,
  370. struct scatterlist *dst,
  371. struct scatterlist *src,
  372. unsigned int nbytes, u8 *iv)
  373. {
  374. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
  375. BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
  376. return tfm->crt_cipher.cit_decrypt_iv(tfm, dst, src, nbytes, iv);
  377. }
  378. static inline void crypto_cipher_set_iv(struct crypto_tfm *tfm,
  379. const u8 *src, unsigned int len)
  380. {
  381. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
  382. memcpy(tfm->crt_cipher.cit_iv, src, len);
  383. }
  384. static inline void crypto_cipher_get_iv(struct crypto_tfm *tfm,
  385. u8 *dst, unsigned int len)
  386. {
  387. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
  388. memcpy(dst, tfm->crt_cipher.cit_iv, len);
  389. }
  390. static inline int crypto_comp_compress(struct crypto_tfm *tfm,
  391. const u8 *src, unsigned int slen,
  392. u8 *dst, unsigned int *dlen)
  393. {
  394. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
  395. return tfm->crt_compress.cot_compress(tfm, src, slen, dst, dlen);
  396. }
  397. static inline int crypto_comp_decompress(struct crypto_tfm *tfm,
  398. const u8 *src, unsigned int slen,
  399. u8 *dst, unsigned int *dlen)
  400. {
  401. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
  402. return tfm->crt_compress.cot_decompress(tfm, src, slen, dst, dlen);
  403. }
  404. /*
  405. * HMAC support.
  406. */
  407. #ifdef CONFIG_CRYPTO_HMAC
  408. void crypto_hmac_init(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen);
  409. void crypto_hmac_update(struct crypto_tfm *tfm,
  410. struct scatterlist *sg, unsigned int nsg);
  411. void crypto_hmac_final(struct crypto_tfm *tfm, u8 *key,
  412. unsigned int *keylen, u8 *out);
  413. void crypto_hmac(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen,
  414. struct scatterlist *sg, unsigned int nsg, u8 *out);
  415. #endif /* CONFIG_CRYPTO_HMAC */
  416. #endif /* _LINUX_CRYPTO_H */