geode-aes.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604
  1. /* Copyright (C) 2004-2006, Advanced Micro Devices, Inc.
  2. *
  3. * This program is free software; you can redistribute it and/or modify
  4. * it under the terms of the GNU General Public License as published by
  5. * the Free Software Foundation; either version 2 of the License, or
  6. * (at your option) any later version.
  7. */
  8. #include <linux/module.h>
  9. #include <linux/kernel.h>
  10. #include <linux/pci.h>
  11. #include <linux/pci_ids.h>
  12. #include <linux/crypto.h>
  13. #include <linux/spinlock.h>
  14. #include <crypto/algapi.h>
  15. #include <crypto/aes.h>
  16. #include <asm/io.h>
  17. #include <asm/delay.h>
  18. #include "geode-aes.h"
  19. /* Static structures */
  20. static void __iomem * _iobase;
  21. static spinlock_t lock;
  22. /* Write a 128 bit field (either a writable key or IV) */
  23. static inline void
  24. _writefield(u32 offset, void *value)
  25. {
  26. int i;
  27. for(i = 0; i < 4; i++)
  28. iowrite32(((u32 *) value)[i], _iobase + offset + (i * 4));
  29. }
  30. /* Read a 128 bit field (either a writable key or IV) */
  31. static inline void
  32. _readfield(u32 offset, void *value)
  33. {
  34. int i;
  35. for(i = 0; i < 4; i++)
  36. ((u32 *) value)[i] = ioread32(_iobase + offset + (i * 4));
  37. }
  38. static int
  39. do_crypt(void *src, void *dst, int len, u32 flags)
  40. {
  41. u32 status;
  42. u32 counter = AES_OP_TIMEOUT;
  43. iowrite32(virt_to_phys(src), _iobase + AES_SOURCEA_REG);
  44. iowrite32(virt_to_phys(dst), _iobase + AES_DSTA_REG);
  45. iowrite32(len, _iobase + AES_LENA_REG);
  46. /* Start the operation */
  47. iowrite32(AES_CTRL_START | flags, _iobase + AES_CTRLA_REG);
  48. do {
  49. status = ioread32(_iobase + AES_INTR_REG);
  50. cpu_relax();
  51. } while(!(status & AES_INTRA_PENDING) && --counter);
  52. /* Clear the event */
  53. iowrite32((status & 0xFF) | AES_INTRA_PENDING, _iobase + AES_INTR_REG);
  54. return counter ? 0 : 1;
  55. }
  56. static unsigned int
  57. geode_aes_crypt(struct geode_aes_op *op)
  58. {
  59. u32 flags = 0;
  60. unsigned long iflags;
  61. int ret;
  62. if (op->len == 0)
  63. return 0;
  64. /* If the source and destination is the same, then
  65. * we need to turn on the coherent flags, otherwise
  66. * we don't need to worry
  67. */
  68. flags |= (AES_CTRL_DCA | AES_CTRL_SCA);
  69. if (op->dir == AES_DIR_ENCRYPT)
  70. flags |= AES_CTRL_ENCRYPT;
  71. /* Start the critical section */
  72. spin_lock_irqsave(&lock, iflags);
  73. if (op->mode == AES_MODE_CBC) {
  74. flags |= AES_CTRL_CBC;
  75. _writefield(AES_WRITEIV0_REG, op->iv);
  76. }
  77. if (!(op->flags & AES_FLAGS_HIDDENKEY)) {
  78. flags |= AES_CTRL_WRKEY;
  79. _writefield(AES_WRITEKEY0_REG, op->key);
  80. }
  81. ret = do_crypt(op->src, op->dst, op->len, flags);
  82. BUG_ON(ret);
  83. if (op->mode == AES_MODE_CBC)
  84. _readfield(AES_WRITEIV0_REG, op->iv);
  85. spin_unlock_irqrestore(&lock, iflags);
  86. return op->len;
  87. }
  88. /* CRYPTO-API Functions */
  89. static int geode_setkey_cip(struct crypto_tfm *tfm, const u8 *key,
  90. unsigned int len)
  91. {
  92. struct geode_aes_op *op = crypto_tfm_ctx(tfm);
  93. unsigned int ret;
  94. op->keylen = len;
  95. if (len == AES_KEYSIZE_128) {
  96. memcpy(op->key, key, len);
  97. return 0;
  98. }
  99. if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256) {
  100. /* not supported at all */
  101. tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  102. return -EINVAL;
  103. }
  104. /*
  105. * The requested key size is not supported by HW, do a fallback
  106. */
  107. op->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  108. op->fallback.blk->base.crt_flags |= (tfm->crt_flags & CRYPTO_TFM_REQ_MASK);
  109. ret = crypto_cipher_setkey(op->fallback.cip, key, len);
  110. if (ret) {
  111. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  112. tfm->crt_flags |= (op->fallback.blk->base.crt_flags & CRYPTO_TFM_RES_MASK);
  113. }
  114. return ret;
  115. }
  116. static int geode_setkey_blk(struct crypto_tfm *tfm, const u8 *key,
  117. unsigned int len)
  118. {
  119. struct geode_aes_op *op = crypto_tfm_ctx(tfm);
  120. unsigned int ret;
  121. op->keylen = len;
  122. if (len == AES_KEYSIZE_128) {
  123. memcpy(op->key, key, len);
  124. return 0;
  125. }
  126. if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256) {
  127. /* not supported at all */
  128. tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  129. return -EINVAL;
  130. }
  131. /*
  132. * The requested key size is not supported by HW, do a fallback
  133. */
  134. op->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  135. op->fallback.blk->base.crt_flags |= (tfm->crt_flags & CRYPTO_TFM_REQ_MASK);
  136. ret = crypto_blkcipher_setkey(op->fallback.blk, key, len);
  137. if (ret) {
  138. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  139. tfm->crt_flags |= (op->fallback.blk->base.crt_flags & CRYPTO_TFM_RES_MASK);
  140. }
  141. return ret;
  142. }
  143. static int fallback_blk_dec(struct blkcipher_desc *desc,
  144. struct scatterlist *dst, struct scatterlist *src,
  145. unsigned int nbytes)
  146. {
  147. unsigned int ret;
  148. struct crypto_blkcipher *tfm;
  149. struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
  150. tfm = desc->tfm;
  151. desc->tfm = op->fallback.blk;
  152. ret = crypto_blkcipher_decrypt(desc, dst, src, nbytes);
  153. desc->tfm = tfm;
  154. return ret;
  155. }
  156. static int fallback_blk_enc(struct blkcipher_desc *desc,
  157. struct scatterlist *dst, struct scatterlist *src,
  158. unsigned int nbytes)
  159. {
  160. unsigned int ret;
  161. struct crypto_blkcipher *tfm;
  162. struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
  163. tfm = desc->tfm;
  164. desc->tfm = op->fallback.blk;
  165. ret = crypto_blkcipher_encrypt(desc, dst, src, nbytes);
  166. desc->tfm = tfm;
  167. return ret;
  168. }
  169. static void
  170. geode_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  171. {
  172. struct geode_aes_op *op = crypto_tfm_ctx(tfm);
  173. if (unlikely(op->keylen != AES_KEYSIZE_128)) {
  174. crypto_cipher_encrypt_one(op->fallback.cip, out, in);
  175. return;
  176. }
  177. op->src = (void *) in;
  178. op->dst = (void *) out;
  179. op->mode = AES_MODE_ECB;
  180. op->flags = 0;
  181. op->len = AES_MIN_BLOCK_SIZE;
  182. op->dir = AES_DIR_ENCRYPT;
  183. geode_aes_crypt(op);
  184. }
  185. static void
  186. geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  187. {
  188. struct geode_aes_op *op = crypto_tfm_ctx(tfm);
  189. if (unlikely(op->keylen != AES_KEYSIZE_128)) {
  190. crypto_cipher_decrypt_one(op->fallback.cip, out, in);
  191. return;
  192. }
  193. op->src = (void *) in;
  194. op->dst = (void *) out;
  195. op->mode = AES_MODE_ECB;
  196. op->flags = 0;
  197. op->len = AES_MIN_BLOCK_SIZE;
  198. op->dir = AES_DIR_DECRYPT;
  199. geode_aes_crypt(op);
  200. }
  201. static int fallback_init_cip(struct crypto_tfm *tfm)
  202. {
  203. const char *name = tfm->__crt_alg->cra_name;
  204. struct geode_aes_op *op = crypto_tfm_ctx(tfm);
  205. op->fallback.cip = crypto_alloc_cipher(name, 0,
  206. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  207. if (IS_ERR(op->fallback.cip)) {
  208. printk(KERN_ERR "Error allocating fallback algo %s\n", name);
  209. return PTR_ERR(op->fallback.blk);
  210. }
  211. return 0;
  212. }
  213. static void fallback_exit_cip(struct crypto_tfm *tfm)
  214. {
  215. struct geode_aes_op *op = crypto_tfm_ctx(tfm);
  216. crypto_free_cipher(op->fallback.cip);
  217. op->fallback.cip = NULL;
  218. }
  219. static struct crypto_alg geode_alg = {
  220. .cra_name = "aes",
  221. .cra_driver_name = "geode-aes",
  222. .cra_priority = 300,
  223. .cra_alignmask = 15,
  224. .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
  225. CRYPTO_ALG_NEED_FALLBACK,
  226. .cra_init = fallback_init_cip,
  227. .cra_exit = fallback_exit_cip,
  228. .cra_blocksize = AES_MIN_BLOCK_SIZE,
  229. .cra_ctxsize = sizeof(struct geode_aes_op),
  230. .cra_module = THIS_MODULE,
  231. .cra_list = LIST_HEAD_INIT(geode_alg.cra_list),
  232. .cra_u = {
  233. .cipher = {
  234. .cia_min_keysize = AES_MIN_KEY_SIZE,
  235. .cia_max_keysize = AES_MAX_KEY_SIZE,
  236. .cia_setkey = geode_setkey_cip,
  237. .cia_encrypt = geode_encrypt,
  238. .cia_decrypt = geode_decrypt
  239. }
  240. }
  241. };
  242. static int
  243. geode_cbc_decrypt(struct blkcipher_desc *desc,
  244. struct scatterlist *dst, struct scatterlist *src,
  245. unsigned int nbytes)
  246. {
  247. struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
  248. struct blkcipher_walk walk;
  249. int err, ret;
  250. if (unlikely(op->keylen != AES_KEYSIZE_128))
  251. return fallback_blk_dec(desc, dst, src, nbytes);
  252. blkcipher_walk_init(&walk, dst, src, nbytes);
  253. err = blkcipher_walk_virt(desc, &walk);
  254. memcpy(op->iv, walk.iv, AES_IV_LENGTH);
  255. while((nbytes = walk.nbytes)) {
  256. op->src = walk.src.virt.addr,
  257. op->dst = walk.dst.virt.addr;
  258. op->mode = AES_MODE_CBC;
  259. op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE);
  260. op->dir = AES_DIR_DECRYPT;
  261. ret = geode_aes_crypt(op);
  262. nbytes -= ret;
  263. err = blkcipher_walk_done(desc, &walk, nbytes);
  264. }
  265. memcpy(walk.iv, op->iv, AES_IV_LENGTH);
  266. return err;
  267. }
  268. static int
  269. geode_cbc_encrypt(struct blkcipher_desc *desc,
  270. struct scatterlist *dst, struct scatterlist *src,
  271. unsigned int nbytes)
  272. {
  273. struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
  274. struct blkcipher_walk walk;
  275. int err, ret;
  276. if (unlikely(op->keylen != AES_KEYSIZE_128))
  277. return fallback_blk_enc(desc, dst, src, nbytes);
  278. blkcipher_walk_init(&walk, dst, src, nbytes);
  279. err = blkcipher_walk_virt(desc, &walk);
  280. memcpy(op->iv, walk.iv, AES_IV_LENGTH);
  281. while((nbytes = walk.nbytes)) {
  282. op->src = walk.src.virt.addr,
  283. op->dst = walk.dst.virt.addr;
  284. op->mode = AES_MODE_CBC;
  285. op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE);
  286. op->dir = AES_DIR_ENCRYPT;
  287. ret = geode_aes_crypt(op);
  288. nbytes -= ret;
  289. err = blkcipher_walk_done(desc, &walk, nbytes);
  290. }
  291. memcpy(walk.iv, op->iv, AES_IV_LENGTH);
  292. return err;
  293. }
  294. static int fallback_init_blk(struct crypto_tfm *tfm)
  295. {
  296. const char *name = tfm->__crt_alg->cra_name;
  297. struct geode_aes_op *op = crypto_tfm_ctx(tfm);
  298. op->fallback.blk = crypto_alloc_blkcipher(name, 0,
  299. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  300. if (IS_ERR(op->fallback.blk)) {
  301. printk(KERN_ERR "Error allocating fallback algo %s\n", name);
  302. return PTR_ERR(op->fallback.blk);
  303. }
  304. return 0;
  305. }
  306. static void fallback_exit_blk(struct crypto_tfm *tfm)
  307. {
  308. struct geode_aes_op *op = crypto_tfm_ctx(tfm);
  309. crypto_free_blkcipher(op->fallback.blk);
  310. op->fallback.blk = NULL;
  311. }
  312. static struct crypto_alg geode_cbc_alg = {
  313. .cra_name = "cbc(aes)",
  314. .cra_driver_name = "cbc-aes-geode",
  315. .cra_priority = 400,
  316. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  317. CRYPTO_ALG_NEED_FALLBACK,
  318. .cra_init = fallback_init_blk,
  319. .cra_exit = fallback_exit_blk,
  320. .cra_blocksize = AES_MIN_BLOCK_SIZE,
  321. .cra_ctxsize = sizeof(struct geode_aes_op),
  322. .cra_alignmask = 15,
  323. .cra_type = &crypto_blkcipher_type,
  324. .cra_module = THIS_MODULE,
  325. .cra_list = LIST_HEAD_INIT(geode_cbc_alg.cra_list),
  326. .cra_u = {
  327. .blkcipher = {
  328. .min_keysize = AES_MIN_KEY_SIZE,
  329. .max_keysize = AES_MAX_KEY_SIZE,
  330. .setkey = geode_setkey_blk,
  331. .encrypt = geode_cbc_encrypt,
  332. .decrypt = geode_cbc_decrypt,
  333. .ivsize = AES_IV_LENGTH,
  334. }
  335. }
  336. };
  337. static int
  338. geode_ecb_decrypt(struct blkcipher_desc *desc,
  339. struct scatterlist *dst, struct scatterlist *src,
  340. unsigned int nbytes)
  341. {
  342. struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
  343. struct blkcipher_walk walk;
  344. int err, ret;
  345. if (unlikely(op->keylen != AES_KEYSIZE_128))
  346. return fallback_blk_dec(desc, dst, src, nbytes);
  347. blkcipher_walk_init(&walk, dst, src, nbytes);
  348. err = blkcipher_walk_virt(desc, &walk);
  349. while((nbytes = walk.nbytes)) {
  350. op->src = walk.src.virt.addr,
  351. op->dst = walk.dst.virt.addr;
  352. op->mode = AES_MODE_ECB;
  353. op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE);
  354. op->dir = AES_DIR_DECRYPT;
  355. ret = geode_aes_crypt(op);
  356. nbytes -= ret;
  357. err = blkcipher_walk_done(desc, &walk, nbytes);
  358. }
  359. return err;
  360. }
  361. static int
  362. geode_ecb_encrypt(struct blkcipher_desc *desc,
  363. struct scatterlist *dst, struct scatterlist *src,
  364. unsigned int nbytes)
  365. {
  366. struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
  367. struct blkcipher_walk walk;
  368. int err, ret;
  369. if (unlikely(op->keylen != AES_KEYSIZE_128))
  370. return fallback_blk_enc(desc, dst, src, nbytes);
  371. blkcipher_walk_init(&walk, dst, src, nbytes);
  372. err = blkcipher_walk_virt(desc, &walk);
  373. while((nbytes = walk.nbytes)) {
  374. op->src = walk.src.virt.addr,
  375. op->dst = walk.dst.virt.addr;
  376. op->mode = AES_MODE_ECB;
  377. op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE);
  378. op->dir = AES_DIR_ENCRYPT;
  379. ret = geode_aes_crypt(op);
  380. nbytes -= ret;
  381. ret = blkcipher_walk_done(desc, &walk, nbytes);
  382. }
  383. return err;
  384. }
  385. static struct crypto_alg geode_ecb_alg = {
  386. .cra_name = "ecb(aes)",
  387. .cra_driver_name = "ecb-aes-geode",
  388. .cra_priority = 400,
  389. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  390. CRYPTO_ALG_NEED_FALLBACK,
  391. .cra_init = fallback_init_blk,
  392. .cra_exit = fallback_exit_blk,
  393. .cra_blocksize = AES_MIN_BLOCK_SIZE,
  394. .cra_ctxsize = sizeof(struct geode_aes_op),
  395. .cra_alignmask = 15,
  396. .cra_type = &crypto_blkcipher_type,
  397. .cra_module = THIS_MODULE,
  398. .cra_list = LIST_HEAD_INIT(geode_ecb_alg.cra_list),
  399. .cra_u = {
  400. .blkcipher = {
  401. .min_keysize = AES_MIN_KEY_SIZE,
  402. .max_keysize = AES_MAX_KEY_SIZE,
  403. .setkey = geode_setkey_blk,
  404. .encrypt = geode_ecb_encrypt,
  405. .decrypt = geode_ecb_decrypt,
  406. }
  407. }
  408. };
  409. static void
  410. geode_aes_remove(struct pci_dev *dev)
  411. {
  412. crypto_unregister_alg(&geode_alg);
  413. crypto_unregister_alg(&geode_ecb_alg);
  414. crypto_unregister_alg(&geode_cbc_alg);
  415. pci_iounmap(dev, _iobase);
  416. _iobase = NULL;
  417. pci_release_regions(dev);
  418. pci_disable_device(dev);
  419. }
  420. static int
  421. geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id)
  422. {
  423. int ret;
  424. if ((ret = pci_enable_device(dev)))
  425. return ret;
  426. if ((ret = pci_request_regions(dev, "geode-aes")))
  427. goto eenable;
  428. _iobase = pci_iomap(dev, 0, 0);
  429. if (_iobase == NULL) {
  430. ret = -ENOMEM;
  431. goto erequest;
  432. }
  433. spin_lock_init(&lock);
  434. /* Clear any pending activity */
  435. iowrite32(AES_INTR_PENDING | AES_INTR_MASK, _iobase + AES_INTR_REG);
  436. if ((ret = crypto_register_alg(&geode_alg)))
  437. goto eiomap;
  438. if ((ret = crypto_register_alg(&geode_ecb_alg)))
  439. goto ealg;
  440. if ((ret = crypto_register_alg(&geode_cbc_alg)))
  441. goto eecb;
  442. printk(KERN_NOTICE "geode-aes: GEODE AES engine enabled.\n");
  443. return 0;
  444. eecb:
  445. crypto_unregister_alg(&geode_ecb_alg);
  446. ealg:
  447. crypto_unregister_alg(&geode_alg);
  448. eiomap:
  449. pci_iounmap(dev, _iobase);
  450. erequest:
  451. pci_release_regions(dev);
  452. eenable:
  453. pci_disable_device(dev);
  454. printk(KERN_ERR "geode-aes: GEODE AES initialization failed.\n");
  455. return ret;
  456. }
  457. static struct pci_device_id geode_aes_tbl[] = {
  458. { PCI_VENDOR_ID_AMD, PCI_DEVICE_ID_AMD_LX_AES, PCI_ANY_ID, PCI_ANY_ID} ,
  459. { 0, }
  460. };
  461. MODULE_DEVICE_TABLE(pci, geode_aes_tbl);
  462. static struct pci_driver geode_aes_driver = {
  463. .name = "Geode LX AES",
  464. .id_table = geode_aes_tbl,
  465. .probe = geode_aes_probe,
  466. .remove = __devexit_p(geode_aes_remove)
  467. };
  468. static int __init
  469. geode_aes_init(void)
  470. {
  471. return pci_register_driver(&geode_aes_driver);
  472. }
  473. static void __exit
  474. geode_aes_exit(void)
  475. {
  476. pci_unregister_driver(&geode_aes_driver);
  477. }
  478. MODULE_AUTHOR("Advanced Micro Devices, Inc.");
  479. MODULE_DESCRIPTION("Geode LX Hardware AES driver");
  480. MODULE_LICENSE("GPL");
  481. module_init(geode_aes_init);
  482. module_exit(geode_aes_exit);