sha512_glue.c 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222
  1. /* Glue code for SHA512 hashing optimized for sparc64 crypto opcodes.
  2. *
  3. * This is based largely upon crypto/sha512_generic.c
  4. *
  5. * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
  6. * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
  7. * Copyright (c) 2003 Kyle McMartin <kyle@debian.org>
  8. */
  9. #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
  10. #include <crypto/internal/hash.h>
  11. #include <linux/init.h>
  12. #include <linux/module.h>
  13. #include <linux/mm.h>
  14. #include <linux/cryptohash.h>
  15. #include <linux/types.h>
  16. #include <crypto/sha.h>
  17. #include <asm/pstate.h>
  18. #include <asm/elf.h>
  19. asmlinkage void sha512_sparc64_transform(u64 *digest, const char *data,
  20. unsigned int rounds);
  21. static int sha512_sparc64_init(struct shash_desc *desc)
  22. {
  23. struct sha512_state *sctx = shash_desc_ctx(desc);
  24. sctx->state[0] = SHA512_H0;
  25. sctx->state[1] = SHA512_H1;
  26. sctx->state[2] = SHA512_H2;
  27. sctx->state[3] = SHA512_H3;
  28. sctx->state[4] = SHA512_H4;
  29. sctx->state[5] = SHA512_H5;
  30. sctx->state[6] = SHA512_H6;
  31. sctx->state[7] = SHA512_H7;
  32. sctx->count[0] = sctx->count[1] = 0;
  33. return 0;
  34. }
  35. static int sha384_sparc64_init(struct shash_desc *desc)
  36. {
  37. struct sha512_state *sctx = shash_desc_ctx(desc);
  38. sctx->state[0] = SHA384_H0;
  39. sctx->state[1] = SHA384_H1;
  40. sctx->state[2] = SHA384_H2;
  41. sctx->state[3] = SHA384_H3;
  42. sctx->state[4] = SHA384_H4;
  43. sctx->state[5] = SHA384_H5;
  44. sctx->state[6] = SHA384_H6;
  45. sctx->state[7] = SHA384_H7;
  46. sctx->count[0] = sctx->count[1] = 0;
  47. return 0;
  48. }
  49. static void __sha512_sparc64_update(struct sha512_state *sctx, const u8 *data,
  50. unsigned int len, unsigned int partial)
  51. {
  52. unsigned int done = 0;
  53. if ((sctx->count[0] += len) < len)
  54. sctx->count[1]++;
  55. if (partial) {
  56. done = SHA512_BLOCK_SIZE - partial;
  57. memcpy(sctx->buf + partial, data, done);
  58. sha512_sparc64_transform(sctx->state, sctx->buf, 1);
  59. }
  60. if (len - done >= SHA512_BLOCK_SIZE) {
  61. const unsigned int rounds = (len - done) / SHA512_BLOCK_SIZE;
  62. sha512_sparc64_transform(sctx->state, data + done, rounds);
  63. done += rounds * SHA512_BLOCK_SIZE;
  64. }
  65. memcpy(sctx->buf, data + done, len - done);
  66. }
  67. static int sha512_sparc64_update(struct shash_desc *desc, const u8 *data,
  68. unsigned int len)
  69. {
  70. struct sha512_state *sctx = shash_desc_ctx(desc);
  71. unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE;
  72. /* Handle the fast case right here */
  73. if (partial + len < SHA512_BLOCK_SIZE) {
  74. if ((sctx->count[0] += len) < len)
  75. sctx->count[1]++;
  76. memcpy(sctx->buf + partial, data, len);
  77. } else
  78. __sha512_sparc64_update(sctx, data, len, partial);
  79. return 0;
  80. }
  81. static int sha512_sparc64_final(struct shash_desc *desc, u8 *out)
  82. {
  83. struct sha512_state *sctx = shash_desc_ctx(desc);
  84. unsigned int i, index, padlen;
  85. __be64 *dst = (__be64 *)out;
  86. __be64 bits[2];
  87. static const u8 padding[SHA512_BLOCK_SIZE] = { 0x80, };
  88. /* Save number of bits */
  89. bits[1] = cpu_to_be64(sctx->count[0] << 3);
  90. bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61);
  91. /* Pad out to 112 mod 128 and append length */
  92. index = sctx->count[0] % SHA512_BLOCK_SIZE;
  93. padlen = (index < 112) ? (112 - index) : ((SHA512_BLOCK_SIZE+112) - index);
  94. /* We need to fill a whole block for __sha512_sparc64_update() */
  95. if (padlen <= 112) {
  96. if ((sctx->count[0] += padlen) < padlen)
  97. sctx->count[1]++;
  98. memcpy(sctx->buf + index, padding, padlen);
  99. } else {
  100. __sha512_sparc64_update(sctx, padding, padlen, index);
  101. }
  102. __sha512_sparc64_update(sctx, (const u8 *)&bits, sizeof(bits), 112);
  103. /* Store state in digest */
  104. for (i = 0; i < 8; i++)
  105. dst[i] = cpu_to_be64(sctx->state[i]);
  106. /* Wipe context */
  107. memset(sctx, 0, sizeof(*sctx));
  108. return 0;
  109. }
  110. static int sha384_sparc64_final(struct shash_desc *desc, u8 *hash)
  111. {
  112. u8 D[64];
  113. sha512_sparc64_final(desc, D);
  114. memcpy(hash, D, 48);
  115. memset(D, 0, 64);
  116. return 0;
  117. }
  118. static struct shash_alg sha512 = {
  119. .digestsize = SHA512_DIGEST_SIZE,
  120. .init = sha512_sparc64_init,
  121. .update = sha512_sparc64_update,
  122. .final = sha512_sparc64_final,
  123. .descsize = sizeof(struct sha512_state),
  124. .base = {
  125. .cra_name = "sha512",
  126. .cra_driver_name= "sha512-sparc64",
  127. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  128. .cra_blocksize = SHA512_BLOCK_SIZE,
  129. .cra_module = THIS_MODULE,
  130. }
  131. };
  132. static struct shash_alg sha384 = {
  133. .digestsize = SHA384_DIGEST_SIZE,
  134. .init = sha384_sparc64_init,
  135. .update = sha512_sparc64_update,
  136. .final = sha384_sparc64_final,
  137. .descsize = sizeof(struct sha512_state),
  138. .base = {
  139. .cra_name = "sha384",
  140. .cra_driver_name= "sha384-sparc64",
  141. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  142. .cra_blocksize = SHA384_BLOCK_SIZE,
  143. .cra_module = THIS_MODULE,
  144. }
  145. };
  146. static bool __init sparc64_has_sha512_opcode(void)
  147. {
  148. unsigned long cfr;
  149. if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
  150. return false;
  151. __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
  152. if (!(cfr & CFR_SHA512))
  153. return false;
  154. return true;
  155. }
  156. static int __init sha512_sparc64_mod_init(void)
  157. {
  158. if (sparc64_has_sha512_opcode()) {
  159. int ret = crypto_register_shash(&sha384);
  160. if (ret < 0)
  161. return ret;
  162. ret = crypto_register_shash(&sha512);
  163. if (ret < 0) {
  164. crypto_unregister_shash(&sha384);
  165. return ret;
  166. }
  167. pr_info("Using sparc64 sha512 opcode optimized SHA-512/SHA-384 implementation\n");
  168. return 0;
  169. }
  170. pr_info("sparc64 sha512 opcode not available.\n");
  171. return -ENODEV;
  172. }
  173. static void __exit sha512_sparc64_mod_fini(void)
  174. {
  175. crypto_unregister_shash(&sha384);
  176. crypto_unregister_shash(&sha512);
  177. }
  178. module_init(sha512_sparc64_mod_init);
  179. module_exit(sha512_sparc64_mod_fini);
  180. MODULE_LICENSE("GPL");
  181. MODULE_DESCRIPTION("SHA-384 and SHA-512 Secure Hash Algorithm, sparc64 sha512 opcode accelerated");
  182. MODULE_ALIAS("sha384");
  183. MODULE_ALIAS("sha512");