|
@@ -18,6 +18,8 @@
|
|
|
#include <linux/init.h>
|
|
|
#include <linux/kernel.h>
|
|
|
#include <linux/module.h>
|
|
|
+#include <linux/rtnetlink.h>
|
|
|
+#include <linux/sched.h>
|
|
|
#include <linux/slab.h>
|
|
|
#include <linux/seq_file.h>
|
|
|
|
|
@@ -68,6 +70,16 @@ static unsigned int crypto_ablkcipher_ctxsize(struct crypto_alg *alg, u32 type,
|
|
|
return alg->cra_ctxsize;
|
|
|
}
|
|
|
|
|
|
+int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req)
|
|
|
+{
|
|
|
+ return crypto_ablkcipher_encrypt(&req->creq);
|
|
|
+}
|
|
|
+
|
|
|
+int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req)
|
|
|
+{
|
|
|
+ return crypto_ablkcipher_decrypt(&req->creq);
|
|
|
+}
|
|
|
+
|
|
|
static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type,
|
|
|
u32 mask)
|
|
|
{
|
|
@@ -80,6 +92,10 @@ static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type,
|
|
|
crt->setkey = setkey;
|
|
|
crt->encrypt = alg->encrypt;
|
|
|
crt->decrypt = alg->decrypt;
|
|
|
+ if (!alg->ivsize) {
|
|
|
+ crt->givencrypt = skcipher_null_givencrypt;
|
|
|
+ crt->givdecrypt = skcipher_null_givdecrypt;
|
|
|
+ }
|
|
|
crt->base = __crypto_ablkcipher_cast(tfm);
|
|
|
crt->ivsize = alg->ivsize;
|
|
|
|
|
@@ -163,6 +179,108 @@ const char *crypto_default_geniv(const struct crypto_alg *alg)
|
|
|
return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv";
|
|
|
}
|
|
|
|
|
|
+static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
|
|
|
+{
|
|
|
+ struct rtattr *tb[3];
|
|
|
+ struct {
|
|
|
+ struct rtattr attr;
|
|
|
+ struct crypto_attr_type data;
|
|
|
+ } ptype;
|
|
|
+ struct {
|
|
|
+ struct rtattr attr;
|
|
|
+ struct crypto_attr_alg data;
|
|
|
+ } palg;
|
|
|
+ struct crypto_template *tmpl;
|
|
|
+ struct crypto_instance *inst;
|
|
|
+ struct crypto_alg *larval;
|
|
|
+ const char *geniv;
|
|
|
+ int err;
|
|
|
+
|
|
|
+ larval = crypto_larval_lookup(alg->cra_driver_name,
|
|
|
+ CRYPTO_ALG_TYPE_GIVCIPHER,
|
|
|
+ CRYPTO_ALG_TYPE_MASK);
|
|
|
+ err = PTR_ERR(larval);
|
|
|
+ if (IS_ERR(larval))
|
|
|
+ goto out;
|
|
|
+
|
|
|
+ err = -EAGAIN;
|
|
|
+ if (!crypto_is_larval(larval))
|
|
|
+ goto drop_larval;
|
|
|
+
|
|
|
+ ptype.attr.rta_len = sizeof(ptype);
|
|
|
+ ptype.attr.rta_type = CRYPTOA_TYPE;
|
|
|
+ ptype.data.type = type | CRYPTO_ALG_GENIV;
|
|
|
+ /* GENIV tells the template that we're making a default geniv. */
|
|
|
+ ptype.data.mask = mask | CRYPTO_ALG_GENIV;
|
|
|
+ tb[0] = &ptype.attr;
|
|
|
+
|
|
|
+ palg.attr.rta_len = sizeof(palg);
|
|
|
+ palg.attr.rta_type = CRYPTOA_ALG;
|
|
|
+ /* Must use the exact name to locate ourselves. */
|
|
|
+ memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME);
|
|
|
+ tb[1] = &palg.attr;
|
|
|
+
|
|
|
+ tb[2] = NULL;
|
|
|
+
|
|
|
+ if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
|
|
|
+ CRYPTO_ALG_TYPE_BLKCIPHER)
|
|
|
+ geniv = alg->cra_blkcipher.geniv;
|
|
|
+ else
|
|
|
+ geniv = alg->cra_ablkcipher.geniv;
|
|
|
+
|
|
|
+ if (!geniv)
|
|
|
+ geniv = crypto_default_geniv(alg);
|
|
|
+
|
|
|
+ tmpl = crypto_lookup_template(geniv);
|
|
|
+ err = -ENOENT;
|
|
|
+ if (!tmpl)
|
|
|
+ goto kill_larval;
|
|
|
+
|
|
|
+ inst = tmpl->alloc(tb);
|
|
|
+ err = PTR_ERR(inst);
|
|
|
+ if (IS_ERR(inst))
|
|
|
+ goto put_tmpl;
|
|
|
+
|
|
|
+ if ((err = crypto_register_instance(tmpl, inst))) {
|
|
|
+ tmpl->free(inst);
|
|
|
+ goto put_tmpl;
|
|
|
+ }
|
|
|
+
|
|
|
+ /* Redo the lookup to use the instance we just registered. */
|
|
|
+ err = -EAGAIN;
|
|
|
+
|
|
|
+put_tmpl:
|
|
|
+ crypto_tmpl_put(tmpl);
|
|
|
+kill_larval:
|
|
|
+ crypto_larval_kill(larval);
|
|
|
+drop_larval:
|
|
|
+ crypto_mod_put(larval);
|
|
|
+out:
|
|
|
+ crypto_mod_put(alg);
|
|
|
+ return err;
|
|
|
+}
|
|
|
+
|
|
|
+static struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type,
|
|
|
+ u32 mask)
|
|
|
+{
|
|
|
+ struct crypto_alg *alg;
|
|
|
+
|
|
|
+ alg = crypto_alg_mod_lookup(name, type, mask);
|
|
|
+ if (IS_ERR(alg))
|
|
|
+ return alg;
|
|
|
+
|
|
|
+ if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
|
|
|
+ CRYPTO_ALG_TYPE_GIVCIPHER)
|
|
|
+ return alg;
|
|
|
+
|
|
|
+ if (!((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
|
|
|
+ CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize :
|
|
|
+ alg->cra_ablkcipher.ivsize))
|
|
|
+ return alg;
|
|
|
+
|
|
|
+ return ERR_PTR(crypto_givcipher_default(alg, type, mask));
|
|
|
+}
|
|
|
+
|
|
|
int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
|
|
|
u32 type, u32 mask)
|
|
|
{
|
|
@@ -172,7 +290,7 @@ int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
|
|
|
type = crypto_skcipher_type(type);
|
|
|
mask = crypto_skcipher_mask(mask);
|
|
|
|
|
|
- alg = crypto_alg_mod_lookup(name, type, mask);
|
|
|
+ alg = crypto_lookup_skcipher(name, type, mask);
|
|
|
if (IS_ERR(alg))
|
|
|
return PTR_ERR(alg);
|
|
|
|
|
@@ -182,5 +300,43 @@ int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
|
|
|
}
|
|
|
EXPORT_SYMBOL_GPL(crypto_grab_skcipher);
|
|
|
|
|
|
+struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name,
|
|
|
+ u32 type, u32 mask)
|
|
|
+{
|
|
|
+ struct crypto_tfm *tfm;
|
|
|
+ int err;
|
|
|
+
|
|
|
+ type = crypto_skcipher_type(type);
|
|
|
+ mask = crypto_skcipher_mask(mask);
|
|
|
+
|
|
|
+ for (;;) {
|
|
|
+ struct crypto_alg *alg;
|
|
|
+
|
|
|
+ alg = crypto_lookup_skcipher(alg_name, type, mask);
|
|
|
+ if (IS_ERR(alg)) {
|
|
|
+ err = PTR_ERR(alg);
|
|
|
+ goto err;
|
|
|
+ }
|
|
|
+
|
|
|
+ tfm = __crypto_alloc_tfm(alg, type, mask);
|
|
|
+ if (!IS_ERR(tfm))
|
|
|
+ return __crypto_ablkcipher_cast(tfm);
|
|
|
+
|
|
|
+ crypto_mod_put(alg);
|
|
|
+ err = PTR_ERR(tfm);
|
|
|
+
|
|
|
+err:
|
|
|
+ if (err != -EAGAIN)
|
|
|
+ break;
|
|
|
+ if (signal_pending(current)) {
|
|
|
+ err = -EINTR;
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ return ERR_PTR(err);
|
|
|
+}
|
|
|
+EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher);
|
|
|
+
|
|
|
MODULE_LICENSE("GPL");
|
|
|
MODULE_DESCRIPTION("Asynchronous block chaining cipher type");
|