|
@@ -17,6 +17,9 @@
|
|
*
|
|
*
|
|
*/
|
|
*/
|
|
|
|
|
|
|
|
+#define KMSG_COMPONENT "aes_s390"
|
|
|
|
+#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
|
|
|
|
+
|
|
#include <crypto/aes.h>
|
|
#include <crypto/aes.h>
|
|
#include <crypto/algapi.h>
|
|
#include <crypto/algapi.h>
|
|
#include <linux/err.h>
|
|
#include <linux/err.h>
|
|
@@ -169,7 +172,8 @@ static int fallback_init_cip(struct crypto_tfm *tfm)
|
|
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
|
|
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
|
|
|
|
|
|
if (IS_ERR(sctx->fallback.cip)) {
|
|
if (IS_ERR(sctx->fallback.cip)) {
|
|
- printk(KERN_ERR "Error allocating fallback algo %s\n", name);
|
|
|
|
|
|
+ pr_err("Allocating AES fallback algorithm %s failed\n",
|
|
|
|
+ name);
|
|
return PTR_ERR(sctx->fallback.blk);
|
|
return PTR_ERR(sctx->fallback.blk);
|
|
}
|
|
}
|
|
|
|
|
|
@@ -349,7 +353,8 @@ static int fallback_init_blk(struct crypto_tfm *tfm)
|
|
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
|
|
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
|
|
|
|
|
|
if (IS_ERR(sctx->fallback.blk)) {
|
|
if (IS_ERR(sctx->fallback.blk)) {
|
|
- printk(KERN_ERR "Error allocating fallback algo %s\n", name);
|
|
|
|
|
|
+ pr_err("Allocating AES fallback algorithm %s failed\n",
|
|
|
|
+ name);
|
|
return PTR_ERR(sctx->fallback.blk);
|
|
return PTR_ERR(sctx->fallback.blk);
|
|
}
|
|
}
|
|
|
|
|
|
@@ -515,9 +520,8 @@ static int __init aes_s390_init(void)
|
|
|
|
|
|
/* z9 109 and z9 BC/EC only support 128 bit key length */
|
|
/* z9 109 and z9 BC/EC only support 128 bit key length */
|
|
if (keylen_flag == AES_KEYLEN_128)
|
|
if (keylen_flag == AES_KEYLEN_128)
|
|
- printk(KERN_INFO
|
|
|
|
- "aes_s390: hardware acceleration only available for "
|
|
|
|
- "128 bit keys\n");
|
|
|
|
|
|
+ pr_info("AES hardware acceleration is only available for"
|
|
|
|
+ " 128-bit keys\n");
|
|
|
|
|
|
ret = crypto_register_alg(&aes_alg);
|
|
ret = crypto_register_alg(&aes_alg);
|
|
if (ret)
|
|
if (ret)
|