summary refs log tree commit diff
path: root/crypto
diff options
context:
space:
mode:
authorMarcelo Cerri <marcelo.cerri@canonical.com>2017-02-27 09:38:26 -0300
committerHerbert Xu <herbert@gondor.apana.org.au>2017-03-09 18:34:39 +0800
commitd2c2a85cfe829f9d0736dba567edc86ba8524fb2 (patch)
treed6e708d6dca58c5660b67c458594fde53f35e059 /crypto
parente6c2e65c70a6f606ea764f301e4024c85e0cd7a8 (diff)
downloadlinux-d2c2a85cfe829f9d0736dba567edc86ba8524fb2.tar.gz
crypto: ctr - Propagate NEED_FALLBACK bit
When requesting a fallback algorithm, we should propagate the
NEED_FALLBACK bit when search for the underlying algorithm.

This will prevents drivers from allocating unnecessary fallbacks that
are never called. For instance, currently the vmx-crypto driver will use
the following chain of calls when calling the fallback implementation:

p8_aes_ctr -> ctr(p8_aes) -> aes-generic

However p8_aes will always delegate its calls to aes-generic. With this
patch, p8_aes_ctr will be able to use ctr(aes-generic) directly as its
fallback. The same applies to aes_s390.

Signed-off-by: Marcelo Henrique Cerri <marcelo.cerri@canonical.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/ctr.c23
1 files changed, 18 insertions, 5 deletions
diff --git a/crypto/ctr.c b/crypto/ctr.c
index a4f4a8983169..477d9226ccaa 100644
--- a/crypto/ctr.c
+++ b/crypto/ctr.c
@@ -181,15 +181,24 @@ static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
 static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
 {
 	struct crypto_instance *inst;
+	struct crypto_attr_type *algt;
 	struct crypto_alg *alg;
+	u32 mask;
 	int err;
 
 	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
 	if (err)
 		return ERR_PTR(err);
 
-	alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
-				  CRYPTO_ALG_TYPE_MASK);
+	algt = crypto_get_attr_type(tb);
+	if (IS_ERR(algt))
+		return ERR_CAST(algt);
+
+	mask = CRYPTO_ALG_TYPE_MASK |
+		crypto_requires_off(algt->type, algt->mask,
+				    CRYPTO_ALG_NEED_FALLBACK);
+
+	alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER, mask);
 	if (IS_ERR(alg))
 		return ERR_CAST(alg);
 
@@ -350,6 +359,8 @@ static int crypto_rfc3686_create(struct crypto_template *tmpl,
 	struct skcipher_alg *alg;
 	struct crypto_skcipher_spawn *spawn;
 	const char *cipher_name;
+	u32 mask;
+
 	int err;
 
 	algt = crypto_get_attr_type(tb);
@@ -367,12 +378,14 @@ static int crypto_rfc3686_create(struct crypto_template *tmpl,
 	if (!inst)
 		return -ENOMEM;
 
+	mask = crypto_requires_sync(algt->type, algt->mask) |
+		crypto_requires_off(algt->type, algt->mask,
+				    CRYPTO_ALG_NEED_FALLBACK);
+
 	spawn = skcipher_instance_ctx(inst);
 
 	crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
-	err = crypto_grab_skcipher(spawn, cipher_name, 0,
-				   crypto_requires_sync(algt->type,
-							algt->mask));
+	err = crypto_grab_skcipher(spawn, cipher_name, 0, mask);
 	if (err)
 		goto err_free_inst;