padlock-sha.c 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339
  1. /*
  2. * Cryptographic API.
  3. *
  4. * Support for VIA PadLock hardware crypto engine.
  5. *
  6. * Copyright (c) 2006 Michal Ludvig <michal@logix.cz>
  7. *
  8. * This program is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License as published by
  10. * the Free Software Foundation; either version 2 of the License, or
  11. * (at your option) any later version.
  12. *
  13. */
  14. #include <linux/module.h>
  15. #include <linux/init.h>
  16. #include <linux/errno.h>
  17. #include <linux/crypto.h>
  18. #include <linux/cryptohash.h>
  19. #include <linux/interrupt.h>
  20. #include <linux/kernel.h>
  21. #include <linux/scatterlist.h>
  22. #include "padlock.h"
  23. #define SHA1_DEFAULT_FALLBACK "sha1-generic"
  24. #define SHA1_DIGEST_SIZE 20
  25. #define SHA1_HMAC_BLOCK_SIZE 64
  26. #define SHA256_DEFAULT_FALLBACK "sha256-generic"
  27. #define SHA256_DIGEST_SIZE 32
  28. #define SHA256_HMAC_BLOCK_SIZE 64
  29. static char *sha1_fallback = SHA1_DEFAULT_FALLBACK;
  30. static char *sha256_fallback = SHA256_DEFAULT_FALLBACK;
  31. module_param(sha1_fallback, charp, 0644);
  32. module_param(sha256_fallback, charp, 0644);
  33. MODULE_PARM_DESC(sha1_fallback, "Fallback driver for SHA1. Default is "
  34. SHA1_DEFAULT_FALLBACK);
  35. MODULE_PARM_DESC(sha256_fallback, "Fallback driver for SHA256. Default is "
  36. SHA256_DEFAULT_FALLBACK);
  37. struct padlock_sha_ctx {
  38. char *data;
  39. size_t used;
  40. int bypass;
  41. void (*f_sha_padlock)(const char *in, char *out, int count);
  42. struct crypto_tfm *fallback_tfm;
  43. };
  44. static inline struct padlock_sha_ctx *ctx(struct crypto_tfm *tfm)
  45. {
  46. return (struct padlock_sha_ctx *)(crypto_tfm_ctx(tfm));
  47. }
  48. /* We'll need aligned address on the stack */
  49. #define NEAREST_ALIGNED(ptr) \
  50. ((void *)ALIGN((size_t)(ptr), PADLOCK_ALIGNMENT))
  51. static struct crypto_alg sha1_alg, sha256_alg;
  52. static void padlock_sha_bypass(struct crypto_tfm *tfm)
  53. {
  54. if (ctx(tfm)->bypass)
  55. return;
  56. BUG_ON(!ctx(tfm)->fallback_tfm);
  57. crypto_digest_init(ctx(tfm)->fallback_tfm);
  58. if (ctx(tfm)->data && ctx(tfm)->used) {
  59. struct scatterlist sg;
  60. sg_set_buf(&sg, ctx(tfm)->data, ctx(tfm)->used);
  61. crypto_digest_update(ctx(tfm)->fallback_tfm, &sg, 1);
  62. }
  63. ctx(tfm)->used = 0;
  64. ctx(tfm)->bypass = 1;
  65. }
  66. static void padlock_sha_init(struct crypto_tfm *tfm)
  67. {
  68. ctx(tfm)->used = 0;
  69. ctx(tfm)->bypass = 0;
  70. }
  71. static void padlock_sha_update(struct crypto_tfm *tfm,
  72. const uint8_t *data, unsigned int length)
  73. {
  74. /* Our buffer is always one page. */
  75. if (unlikely(!ctx(tfm)->bypass &&
  76. (ctx(tfm)->used + length > PAGE_SIZE)))
  77. padlock_sha_bypass(tfm);
  78. if (unlikely(ctx(tfm)->bypass)) {
  79. struct scatterlist sg;
  80. BUG_ON(!ctx(tfm)->fallback_tfm);
  81. sg_set_buf(&sg, (uint8_t *)data, length);
  82. crypto_digest_update(ctx(tfm)->fallback_tfm, &sg, 1);
  83. return;
  84. }
  85. memcpy(ctx(tfm)->data + ctx(tfm)->used, data, length);
  86. ctx(tfm)->used += length;
  87. }
  88. static inline void padlock_output_block(uint32_t *src,
  89. uint32_t *dst, size_t count)
  90. {
  91. while (count--)
  92. *dst++ = swab32(*src++);
  93. }
  94. static void padlock_do_sha1(const char *in, char *out, int count)
  95. {
  96. /* We can't store directly to *out as it may be unaligned. */
  97. /* BTW Don't reduce the buffer size below 128 Bytes!
  98. * PadLock microcode needs it that big. */
  99. char buf[128+16];
  100. char *result = NEAREST_ALIGNED(buf);
  101. ((uint32_t *)result)[0] = 0x67452301;
  102. ((uint32_t *)result)[1] = 0xEFCDAB89;
  103. ((uint32_t *)result)[2] = 0x98BADCFE;
  104. ((uint32_t *)result)[3] = 0x10325476;
  105. ((uint32_t *)result)[4] = 0xC3D2E1F0;
  106. asm volatile (".byte 0xf3,0x0f,0xa6,0xc8" /* rep xsha1 */
  107. : "+S"(in), "+D"(result)
  108. : "c"(count), "a"(0));
  109. padlock_output_block((uint32_t *)result, (uint32_t *)out, 5);
  110. }
  111. static void padlock_do_sha256(const char *in, char *out, int count)
  112. {
  113. /* We can't store directly to *out as it may be unaligned. */
  114. /* BTW Don't reduce the buffer size below 128 Bytes!
  115. * PadLock microcode needs it that big. */
  116. char buf[128+16];
  117. char *result = NEAREST_ALIGNED(buf);
  118. ((uint32_t *)result)[0] = 0x6A09E667;
  119. ((uint32_t *)result)[1] = 0xBB67AE85;
  120. ((uint32_t *)result)[2] = 0x3C6EF372;
  121. ((uint32_t *)result)[3] = 0xA54FF53A;
  122. ((uint32_t *)result)[4] = 0x510E527F;
  123. ((uint32_t *)result)[5] = 0x9B05688C;
  124. ((uint32_t *)result)[6] = 0x1F83D9AB;
  125. ((uint32_t *)result)[7] = 0x5BE0CD19;
  126. asm volatile (".byte 0xf3,0x0f,0xa6,0xd0" /* rep xsha256 */
  127. : "+S"(in), "+D"(result)
  128. : "c"(count), "a"(0));
  129. padlock_output_block((uint32_t *)result, (uint32_t *)out, 8);
  130. }
  131. static void padlock_sha_final(struct crypto_tfm *tfm, uint8_t *out)
  132. {
  133. if (unlikely(ctx(tfm)->bypass)) {
  134. BUG_ON(!ctx(tfm)->fallback_tfm);
  135. crypto_digest_final(ctx(tfm)->fallback_tfm, out);
  136. ctx(tfm)->bypass = 0;
  137. return;
  138. }
  139. /* Pass the input buffer to PadLock microcode... */
  140. ctx(tfm)->f_sha_padlock(ctx(tfm)->data, out, ctx(tfm)->used);
  141. ctx(tfm)->used = 0;
  142. }
  143. static int padlock_cra_init(struct crypto_tfm *tfm, const char *fallback_driver_name)
  144. {
  145. /* For now we'll allocate one page. This
  146. * could eventually be configurable one day. */
  147. ctx(tfm)->data = (char *)__get_free_page(GFP_KERNEL);
  148. if (!ctx(tfm)->data)
  149. return -ENOMEM;
  150. /* Allocate a fallback and abort if it failed. */
  151. ctx(tfm)->fallback_tfm = crypto_alloc_tfm(fallback_driver_name, 0);
  152. if (!ctx(tfm)->fallback_tfm) {
  153. printk(KERN_WARNING PFX "Fallback driver '%s' could not be loaded!\n",
  154. fallback_driver_name);
  155. free_page((unsigned long)(ctx(tfm)->data));
  156. return -ENOENT;
  157. }
  158. return 0;
  159. }
  160. static int padlock_sha1_cra_init(struct crypto_tfm *tfm)
  161. {
  162. ctx(tfm)->f_sha_padlock = padlock_do_sha1;
  163. return padlock_cra_init(tfm, sha1_fallback);
  164. }
  165. static int padlock_sha256_cra_init(struct crypto_tfm *tfm)
  166. {
  167. ctx(tfm)->f_sha_padlock = padlock_do_sha256;
  168. return padlock_cra_init(tfm, sha256_fallback);
  169. }
  170. static void padlock_cra_exit(struct crypto_tfm *tfm)
  171. {
  172. if (ctx(tfm)->data) {
  173. free_page((unsigned long)(ctx(tfm)->data));
  174. ctx(tfm)->data = NULL;
  175. }
  176. BUG_ON(!ctx(tfm)->fallback_tfm);
  177. crypto_free_tfm(ctx(tfm)->fallback_tfm);
  178. ctx(tfm)->fallback_tfm = NULL;
  179. }
  180. static struct crypto_alg sha1_alg = {
  181. .cra_name = "sha1",
  182. .cra_driver_name = "sha1-padlock",
  183. .cra_priority = PADLOCK_CRA_PRIORITY,
  184. .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
  185. .cra_blocksize = SHA1_HMAC_BLOCK_SIZE,
  186. .cra_ctxsize = sizeof(struct padlock_sha_ctx),
  187. .cra_module = THIS_MODULE,
  188. .cra_list = LIST_HEAD_INIT(sha1_alg.cra_list),
  189. .cra_init = padlock_sha1_cra_init,
  190. .cra_exit = padlock_cra_exit,
  191. .cra_u = {
  192. .digest = {
  193. .dia_digestsize = SHA1_DIGEST_SIZE,
  194. .dia_init = padlock_sha_init,
  195. .dia_update = padlock_sha_update,
  196. .dia_final = padlock_sha_final,
  197. }
  198. }
  199. };
  200. static struct crypto_alg sha256_alg = {
  201. .cra_name = "sha256",
  202. .cra_driver_name = "sha256-padlock",
  203. .cra_priority = PADLOCK_CRA_PRIORITY,
  204. .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
  205. .cra_blocksize = SHA256_HMAC_BLOCK_SIZE,
  206. .cra_ctxsize = sizeof(struct padlock_sha_ctx),
  207. .cra_module = THIS_MODULE,
  208. .cra_list = LIST_HEAD_INIT(sha256_alg.cra_list),
  209. .cra_init = padlock_sha256_cra_init,
  210. .cra_exit = padlock_cra_exit,
  211. .cra_u = {
  212. .digest = {
  213. .dia_digestsize = SHA256_DIGEST_SIZE,
  214. .dia_init = padlock_sha_init,
  215. .dia_update = padlock_sha_update,
  216. .dia_final = padlock_sha_final,
  217. }
  218. }
  219. };
  220. static void __init padlock_sha_check_fallbacks(void)
  221. {
  222. static struct crypto_tfm *tfm_sha1, *tfm_sha256;
  223. /* We'll try to allocate one TFM for each fallback
  224. * to test that the modules are available. */
  225. tfm_sha1 = crypto_alloc_tfm(sha1_fallback, 0);
  226. if (!tfm_sha1) {
  227. printk(KERN_WARNING PFX "Couldn't load fallback module for '%s'. Tried '%s'.\n",
  228. sha1_alg.cra_name, sha1_fallback);
  229. } else {
  230. printk(KERN_NOTICE PFX "Fallback for '%s' is driver '%s' (prio=%d)\n", sha1_alg.cra_name,
  231. crypto_tfm_alg_driver_name(tfm_sha1), crypto_tfm_alg_priority(tfm_sha1));
  232. crypto_free_tfm(tfm_sha1);
  233. }
  234. tfm_sha256 = crypto_alloc_tfm(sha256_fallback, 0);
  235. if (!tfm_sha256) {
  236. printk(KERN_WARNING PFX "Couldn't load fallback module for '%s'. Tried '%s'.\n",
  237. sha256_alg.cra_name, sha256_fallback);
  238. } else {
  239. printk(KERN_NOTICE PFX "Fallback for '%s' is driver '%s' (prio=%d)\n", sha256_alg.cra_name,
  240. crypto_tfm_alg_driver_name(tfm_sha256), crypto_tfm_alg_priority(tfm_sha256));
  241. crypto_free_tfm(tfm_sha256);
  242. }
  243. }
  244. static int __init padlock_init(void)
  245. {
  246. int rc = -ENODEV;
  247. if (!cpu_has_phe) {
  248. printk(KERN_ERR PFX "VIA PadLock Hash Engine not detected.\n");
  249. return -ENODEV;
  250. }
  251. if (!cpu_has_phe_enabled) {
  252. printk(KERN_ERR PFX "VIA PadLock detected, but not enabled. Hmm, strange...\n");
  253. return -ENODEV;
  254. }
  255. padlock_sha_check_fallbacks();
  256. rc = crypto_register_alg(&sha1_alg);
  257. if (rc)
  258. goto out;
  259. rc = crypto_register_alg(&sha256_alg);
  260. if (rc)
  261. goto out_unreg1;
  262. printk(KERN_NOTICE PFX "Using VIA PadLock ACE for SHA1/SHA256 algorithms.\n");
  263. return 0;
  264. out_unreg1:
  265. crypto_unregister_alg(&sha1_alg);
  266. out:
  267. printk(KERN_ERR PFX "VIA PadLock SHA1/SHA256 initialization failed.\n");
  268. return rc;
  269. }
  270. static void __exit padlock_fini(void)
  271. {
  272. crypto_unregister_alg(&sha1_alg);
  273. crypto_unregister_alg(&sha256_alg);
  274. }
  275. module_init(padlock_init);
  276. module_exit(padlock_fini);
  277. MODULE_DESCRIPTION("VIA PadLock SHA1/SHA256 algorithms support.");
  278. MODULE_LICENSE("GPL");
  279. MODULE_AUTHOR("Michal Ludvig");
  280. MODULE_ALIAS("sha1-padlock");
  281. MODULE_ALIAS("sha256-padlock");