camellia_glue.c 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320
  1. /* Glue code for CAMELLIA encryption optimized for sparc64 crypto opcodes.
  2. *
  3. * Copyright (C) 2012 David S. Miller <davem@davemloft.net>
  4. */
  5. #include <linux/crypto.h>
  6. #include <linux/init.h>
  7. #include <linux/module.h>
  8. #include <linux/mm.h>
  9. #include <linux/types.h>
  10. #include <crypto/algapi.h>
  11. #include <asm/fpumacro.h>
  12. #include <asm/pstate.h>
  13. #include <asm/elf.h>
  14. #include "opcodes.h"
  15. #define CAMELLIA_MIN_KEY_SIZE 16
  16. #define CAMELLIA_MAX_KEY_SIZE 32
  17. #define CAMELLIA_BLOCK_SIZE 16
  18. #define CAMELLIA_TABLE_BYTE_LEN 272
  19. struct camellia_sparc64_ctx {
  20. u64 encrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
  21. u64 decrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
  22. int key_len;
  23. };
  24. extern void camellia_sparc64_key_expand(const u32 *in_key, u64 *encrypt_key,
  25. unsigned int key_len, u64 *decrypt_key);
  26. static int camellia_set_key(struct crypto_tfm *tfm, const u8 *_in_key,
  27. unsigned int key_len)
  28. {
  29. struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  30. const u32 *in_key = (const u32 *) _in_key;
  31. u32 *flags = &tfm->crt_flags;
  32. if (key_len != 16 && key_len != 24 && key_len != 32) {
  33. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  34. return -EINVAL;
  35. }
  36. ctx->key_len = key_len;
  37. camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0],
  38. key_len, &ctx->decrypt_key[0]);
  39. return 0;
  40. }
  41. extern void camellia_sparc64_crypt(const u64 *key, const u32 *input,
  42. u32 *output, unsigned int key_len);
  43. static void camellia_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  44. {
  45. struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  46. camellia_sparc64_crypt(&ctx->encrypt_key[0],
  47. (const u32 *) src,
  48. (u32 *) dst, ctx->key_len);
  49. }
  50. static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  51. {
  52. struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  53. camellia_sparc64_crypt(&ctx->decrypt_key[0],
  54. (const u32 *) src,
  55. (u32 *) dst, ctx->key_len);
  56. }
  57. extern void camellia_sparc64_load_keys(const u64 *key, unsigned int key_len);
  58. typedef void ecb_crypt_op(const u64 *input, u64 *output, unsigned int len,
  59. const u64 *key);
  60. extern ecb_crypt_op camellia_sparc64_ecb_crypt_3_grand_rounds;
  61. extern ecb_crypt_op camellia_sparc64_ecb_crypt_4_grand_rounds;
  62. #define CAMELLIA_BLOCK_MASK (~(CAMELLIA_BLOCK_SIZE - 1))
  63. static int __ecb_crypt(struct blkcipher_desc *desc,
  64. struct scatterlist *dst, struct scatterlist *src,
  65. unsigned int nbytes, bool encrypt)
  66. {
  67. struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  68. struct blkcipher_walk walk;
  69. ecb_crypt_op *op;
  70. const u64 *key;
  71. int err;
  72. op = camellia_sparc64_ecb_crypt_3_grand_rounds;
  73. if (ctx->key_len != 16)
  74. op = camellia_sparc64_ecb_crypt_4_grand_rounds;
  75. blkcipher_walk_init(&walk, dst, src, nbytes);
  76. err = blkcipher_walk_virt(desc, &walk);
  77. if (encrypt)
  78. key = &ctx->encrypt_key[0];
  79. else
  80. key = &ctx->decrypt_key[0];
  81. camellia_sparc64_load_keys(key, ctx->key_len);
  82. while ((nbytes = walk.nbytes)) {
  83. unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
  84. if (likely(block_len)) {
  85. const u64 *src64;
  86. u64 *dst64;
  87. src64 = (const u64 *)walk.src.virt.addr;
  88. dst64 = (u64 *) walk.dst.virt.addr;
  89. op(src64, dst64, block_len, key);
  90. }
  91. nbytes &= CAMELLIA_BLOCK_SIZE - 1;
  92. err = blkcipher_walk_done(desc, &walk, nbytes);
  93. }
  94. fprs_write(0);
  95. return err;
  96. }
  97. static int ecb_encrypt(struct blkcipher_desc *desc,
  98. struct scatterlist *dst, struct scatterlist *src,
  99. unsigned int nbytes)
  100. {
  101. return __ecb_crypt(desc, dst, src, nbytes, true);
  102. }
  103. static int ecb_decrypt(struct blkcipher_desc *desc,
  104. struct scatterlist *dst, struct scatterlist *src,
  105. unsigned int nbytes)
  106. {
  107. return __ecb_crypt(desc, dst, src, nbytes, false);
  108. }
  109. typedef void cbc_crypt_op(const u64 *input, u64 *output, unsigned int len,
  110. const u64 *key, u64 *iv);
  111. extern cbc_crypt_op camellia_sparc64_cbc_encrypt_3_grand_rounds;
  112. extern cbc_crypt_op camellia_sparc64_cbc_encrypt_4_grand_rounds;
  113. extern cbc_crypt_op camellia_sparc64_cbc_decrypt_3_grand_rounds;
  114. extern cbc_crypt_op camellia_sparc64_cbc_decrypt_4_grand_rounds;
  115. static int cbc_encrypt(struct blkcipher_desc *desc,
  116. struct scatterlist *dst, struct scatterlist *src,
  117. unsigned int nbytes)
  118. {
  119. struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  120. struct blkcipher_walk walk;
  121. cbc_crypt_op *op;
  122. const u64 *key;
  123. int err;
  124. op = camellia_sparc64_cbc_encrypt_3_grand_rounds;
  125. if (ctx->key_len != 16)
  126. op = camellia_sparc64_cbc_encrypt_4_grand_rounds;
  127. blkcipher_walk_init(&walk, dst, src, nbytes);
  128. err = blkcipher_walk_virt(desc, &walk);
  129. key = &ctx->encrypt_key[0];
  130. camellia_sparc64_load_keys(key, ctx->key_len);
  131. while ((nbytes = walk.nbytes)) {
  132. unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
  133. if (likely(block_len)) {
  134. const u64 *src64;
  135. u64 *dst64;
  136. src64 = (const u64 *)walk.src.virt.addr;
  137. dst64 = (u64 *) walk.dst.virt.addr;
  138. op(src64, dst64, block_len, key,
  139. (u64 *) walk.iv);
  140. }
  141. nbytes &= CAMELLIA_BLOCK_SIZE - 1;
  142. err = blkcipher_walk_done(desc, &walk, nbytes);
  143. }
  144. fprs_write(0);
  145. return err;
  146. }
  147. static int cbc_decrypt(struct blkcipher_desc *desc,
  148. struct scatterlist *dst, struct scatterlist *src,
  149. unsigned int nbytes)
  150. {
  151. struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  152. struct blkcipher_walk walk;
  153. cbc_crypt_op *op;
  154. const u64 *key;
  155. int err;
  156. op = camellia_sparc64_cbc_decrypt_3_grand_rounds;
  157. if (ctx->key_len != 16)
  158. op = camellia_sparc64_cbc_decrypt_4_grand_rounds;
  159. blkcipher_walk_init(&walk, dst, src, nbytes);
  160. err = blkcipher_walk_virt(desc, &walk);
  161. key = &ctx->decrypt_key[0];
  162. camellia_sparc64_load_keys(key, ctx->key_len);
  163. while ((nbytes = walk.nbytes)) {
  164. unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
  165. if (likely(block_len)) {
  166. const u64 *src64;
  167. u64 *dst64;
  168. src64 = (const u64 *)walk.src.virt.addr;
  169. dst64 = (u64 *) walk.dst.virt.addr;
  170. op(src64, dst64, block_len, key,
  171. (u64 *) walk.iv);
  172. }
  173. nbytes &= CAMELLIA_BLOCK_SIZE - 1;
  174. err = blkcipher_walk_done(desc, &walk, nbytes);
  175. }
  176. fprs_write(0);
  177. return err;
  178. }
  179. static struct crypto_alg algs[] = { {
  180. .cra_name = "camellia",
  181. .cra_driver_name = "camellia-sparc64",
  182. .cra_priority = SPARC_CR_OPCODE_PRIORITY,
  183. .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
  184. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  185. .cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
  186. .cra_alignmask = 3,
  187. .cra_module = THIS_MODULE,
  188. .cra_u = {
  189. .cipher = {
  190. .cia_min_keysize = CAMELLIA_MIN_KEY_SIZE,
  191. .cia_max_keysize = CAMELLIA_MAX_KEY_SIZE,
  192. .cia_setkey = camellia_set_key,
  193. .cia_encrypt = camellia_encrypt,
  194. .cia_decrypt = camellia_decrypt
  195. }
  196. }
  197. }, {
  198. .cra_name = "ecb(camellia)",
  199. .cra_driver_name = "ecb-camellia-sparc64",
  200. .cra_priority = SPARC_CR_OPCODE_PRIORITY,
  201. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  202. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  203. .cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
  204. .cra_alignmask = 7,
  205. .cra_type = &crypto_blkcipher_type,
  206. .cra_module = THIS_MODULE,
  207. .cra_u = {
  208. .blkcipher = {
  209. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  210. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  211. .setkey = camellia_set_key,
  212. .encrypt = ecb_encrypt,
  213. .decrypt = ecb_decrypt,
  214. },
  215. },
  216. }, {
  217. .cra_name = "cbc(camellia)",
  218. .cra_driver_name = "cbc-camellia-sparc64",
  219. .cra_priority = SPARC_CR_OPCODE_PRIORITY,
  220. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  221. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  222. .cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
  223. .cra_alignmask = 7,
  224. .cra_type = &crypto_blkcipher_type,
  225. .cra_module = THIS_MODULE,
  226. .cra_u = {
  227. .blkcipher = {
  228. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  229. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  230. .setkey = camellia_set_key,
  231. .encrypt = cbc_encrypt,
  232. .decrypt = cbc_decrypt,
  233. },
  234. },
  235. }
  236. };
  237. static bool __init sparc64_has_camellia_opcode(void)
  238. {
  239. unsigned long cfr;
  240. if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
  241. return false;
  242. __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
  243. if (!(cfr & CFR_CAMELLIA))
  244. return false;
  245. return true;
  246. }
  247. static int __init camellia_sparc64_mod_init(void)
  248. {
  249. int i;
  250. for (i = 0; i < ARRAY_SIZE(algs); i++)
  251. INIT_LIST_HEAD(&algs[i].cra_list);
  252. if (sparc64_has_camellia_opcode()) {
  253. pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");
  254. return crypto_register_algs(algs, ARRAY_SIZE(algs));
  255. }
  256. pr_info("sparc64 camellia opcodes not available.\n");
  257. return -ENODEV;
  258. }
  259. static void __exit camellia_sparc64_mod_fini(void)
  260. {
  261. crypto_unregister_algs(algs, ARRAY_SIZE(algs));
  262. }
  263. module_init(camellia_sparc64_mod_init);
  264. module_exit(camellia_sparc64_mod_fini);
  265. MODULE_LICENSE("GPL");
  266. MODULE_DESCRIPTION("Camellia Cipher Algorithm, sparc64 camellia opcode accelerated");
  267. MODULE_ALIAS("aes");