|
@@ -2018,7 +2018,6 @@ static void hifn_flush(struct hifn_device *dev)
|
|
|
{
|
|
|
unsigned long flags;
|
|
|
struct crypto_async_request *async_req;
|
|
|
- struct hifn_context *ctx;
|
|
|
struct ablkcipher_request *req;
|
|
|
struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
|
|
|
int i;
|
|
@@ -2035,7 +2034,6 @@ static void hifn_flush(struct hifn_device *dev)
|
|
|
|
|
|
spin_lock_irqsave(&dev->lock, flags);
|
|
|
while ((async_req = crypto_dequeue_request(&dev->queue))) {
|
|
|
- ctx = crypto_tfm_ctx(async_req->tfm);
|
|
|
req = container_of(async_req, struct ablkcipher_request, base);
|
|
|
spin_unlock_irqrestore(&dev->lock, flags);
|
|
|
|
|
@@ -2139,7 +2137,6 @@ static int hifn_setup_crypto_req(struct ablkcipher_request *req, u8 op,
|
|
|
static int hifn_process_queue(struct hifn_device *dev)
|
|
|
{
|
|
|
struct crypto_async_request *async_req, *backlog;
|
|
|
- struct hifn_context *ctx;
|
|
|
struct ablkcipher_request *req;
|
|
|
unsigned long flags;
|
|
|
int err = 0;
|
|
@@ -2156,7 +2153,6 @@ static int hifn_process_queue(struct hifn_device *dev)
|
|
|
if (backlog)
|
|
|
backlog->complete(backlog, -EINPROGRESS);
|
|
|
|
|
|
- ctx = crypto_tfm_ctx(async_req->tfm);
|
|
|
req = container_of(async_req, struct ablkcipher_request, base);
|
|
|
|
|
|
err = hifn_handle_req(req);
|