|
@@ -60,6 +60,7 @@ struct dm_crypt_io {
|
|
};
|
|
};
|
|
|
|
|
|
struct dm_crypt_request {
|
|
struct dm_crypt_request {
|
|
|
|
+ struct convert_context *ctx;
|
|
struct scatterlist sg_in;
|
|
struct scatterlist sg_in;
|
|
struct scatterlist sg_out;
|
|
struct scatterlist sg_out;
|
|
};
|
|
};
|
|
@@ -335,6 +336,18 @@ static void crypt_convert_init(struct crypt_config *cc,
|
|
init_completion(&ctx->restart);
|
|
init_completion(&ctx->restart);
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+static struct dm_crypt_request *dmreq_of_req(struct crypt_config *cc,
|
|
|
|
+ struct ablkcipher_request *req)
|
|
|
|
+{
|
|
|
|
+ return (struct dm_crypt_request *)((char *)req + cc->dmreq_start);
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static struct ablkcipher_request *req_of_dmreq(struct crypt_config *cc,
|
|
|
|
+ struct dm_crypt_request *dmreq)
|
|
|
|
+{
|
|
|
|
+ return (struct ablkcipher_request *)((char *)dmreq - cc->dmreq_start);
|
|
|
|
+}
|
|
|
|
+
|
|
static int crypt_convert_block(struct crypt_config *cc,
|
|
static int crypt_convert_block(struct crypt_config *cc,
|
|
struct convert_context *ctx,
|
|
struct convert_context *ctx,
|
|
struct ablkcipher_request *req)
|
|
struct ablkcipher_request *req)
|
|
@@ -345,10 +358,11 @@ static int crypt_convert_block(struct crypt_config *cc,
|
|
u8 *iv;
|
|
u8 *iv;
|
|
int r = 0;
|
|
int r = 0;
|
|
|
|
|
|
- dmreq = (struct dm_crypt_request *)((char *)req + cc->dmreq_start);
|
|
|
|
|
|
+ dmreq = dmreq_of_req(cc, req);
|
|
iv = (u8 *)ALIGN((unsigned long)(dmreq + 1),
|
|
iv = (u8 *)ALIGN((unsigned long)(dmreq + 1),
|
|
crypto_ablkcipher_alignmask(cc->tfm) + 1);
|
|
crypto_ablkcipher_alignmask(cc->tfm) + 1);
|
|
|
|
|
|
|
|
+ dmreq->ctx = ctx;
|
|
sg_init_table(&dmreq->sg_in, 1);
|
|
sg_init_table(&dmreq->sg_in, 1);
|
|
sg_set_page(&dmreq->sg_in, bv_in->bv_page, 1 << SECTOR_SHIFT,
|
|
sg_set_page(&dmreq->sg_in, bv_in->bv_page, 1 << SECTOR_SHIFT,
|
|
bv_in->bv_offset + ctx->offset_in);
|
|
bv_in->bv_offset + ctx->offset_in);
|
|
@@ -395,8 +409,9 @@ static void crypt_alloc_req(struct crypt_config *cc,
|
|
cc->req = mempool_alloc(cc->req_pool, GFP_NOIO);
|
|
cc->req = mempool_alloc(cc->req_pool, GFP_NOIO);
|
|
ablkcipher_request_set_tfm(cc->req, cc->tfm);
|
|
ablkcipher_request_set_tfm(cc->req, cc->tfm);
|
|
ablkcipher_request_set_callback(cc->req, CRYPTO_TFM_REQ_MAY_BACKLOG |
|
|
ablkcipher_request_set_callback(cc->req, CRYPTO_TFM_REQ_MAY_BACKLOG |
|
|
- CRYPTO_TFM_REQ_MAY_SLEEP,
|
|
|
|
- kcryptd_async_done, ctx);
|
|
|
|
|
|
+ CRYPTO_TFM_REQ_MAY_SLEEP,
|
|
|
|
+ kcryptd_async_done,
|
|
|
|
+ dmreq_of_req(cc, cc->req));
|
|
}
|
|
}
|
|
|
|
|
|
/*
|
|
/*
|
|
@@ -553,19 +568,22 @@ static void crypt_inc_pending(struct dm_crypt_io *io)
|
|
static void crypt_dec_pending(struct dm_crypt_io *io)
|
|
static void crypt_dec_pending(struct dm_crypt_io *io)
|
|
{
|
|
{
|
|
struct crypt_config *cc = io->target->private;
|
|
struct crypt_config *cc = io->target->private;
|
|
|
|
+ struct bio *base_bio = io->base_bio;
|
|
|
|
+ struct dm_crypt_io *base_io = io->base_io;
|
|
|
|
+ int error = io->error;
|
|
|
|
|
|
if (!atomic_dec_and_test(&io->pending))
|
|
if (!atomic_dec_and_test(&io->pending))
|
|
return;
|
|
return;
|
|
|
|
|
|
- if (likely(!io->base_io))
|
|
|
|
- bio_endio(io->base_bio, io->error);
|
|
|
|
|
|
+ mempool_free(io, cc->io_pool);
|
|
|
|
+
|
|
|
|
+ if (likely(!base_io))
|
|
|
|
+ bio_endio(base_bio, error);
|
|
else {
|
|
else {
|
|
- if (io->error && !io->base_io->error)
|
|
|
|
- io->base_io->error = io->error;
|
|
|
|
- crypt_dec_pending(io->base_io);
|
|
|
|
|
|
+ if (error && !base_io->error)
|
|
|
|
+ base_io->error = error;
|
|
|
|
+ crypt_dec_pending(base_io);
|
|
}
|
|
}
|
|
-
|
|
|
|
- mempool_free(io, cc->io_pool);
|
|
|
|
}
|
|
}
|
|
|
|
|
|
/*
|
|
/*
|
|
@@ -821,7 +839,8 @@ static void kcryptd_crypt_read_convert(struct dm_crypt_io *io)
|
|
static void kcryptd_async_done(struct crypto_async_request *async_req,
|
|
static void kcryptd_async_done(struct crypto_async_request *async_req,
|
|
int error)
|
|
int error)
|
|
{
|
|
{
|
|
- struct convert_context *ctx = async_req->data;
|
|
|
|
|
|
+ struct dm_crypt_request *dmreq = async_req->data;
|
|
|
|
+ struct convert_context *ctx = dmreq->ctx;
|
|
struct dm_crypt_io *io = container_of(ctx, struct dm_crypt_io, ctx);
|
|
struct dm_crypt_io *io = container_of(ctx, struct dm_crypt_io, ctx);
|
|
struct crypt_config *cc = io->target->private;
|
|
struct crypt_config *cc = io->target->private;
|
|
|
|
|
|
@@ -830,7 +849,7 @@ static void kcryptd_async_done(struct crypto_async_request *async_req,
|
|
return;
|
|
return;
|
|
}
|
|
}
|
|
|
|
|
|
- mempool_free(ablkcipher_request_cast(async_req), cc->req_pool);
|
|
|
|
|
|
+ mempool_free(req_of_dmreq(cc, dmreq), cc->req_pool);
|
|
|
|
|
|
if (!atomic_dec_and_test(&ctx->pending))
|
|
if (!atomic_dec_and_test(&ctx->pending))
|
|
return;
|
|
return;
|