mirror of
				https://github.com/torvalds/linux.git
				synced 2025-11-04 10:40:15 +02:00 
			
		
		
		
	crypto: ctr - Propagate NEED_FALLBACK bit
When requesting a fallback algorithm, we should propagate the NEED_FALLBACK bit when search for the underlying algorithm. This will prevents drivers from allocating unnecessary fallbacks that are never called. For instance, currently the vmx-crypto driver will use the following chain of calls when calling the fallback implementation: p8_aes_ctr -> ctr(p8_aes) -> aes-generic However p8_aes will always delegate its calls to aes-generic. With this patch, p8_aes_ctr will be able to use ctr(aes-generic) directly as its fallback. The same applies to aes_s390. Signed-off-by: Marcelo Henrique Cerri <marcelo.cerri@canonical.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
		
							parent
							
								
									e6c2e65c70
								
							
						
					
					
						commit
						d2c2a85cfe
					
				
					 1 changed files with 18 additions and 5 deletions
				
			
		
							
								
								
									
										23
									
								
								crypto/ctr.c
									
									
									
									
									
								
							
							
						
						
									
										23
									
								
								crypto/ctr.c
									
									
									
									
									
								
							| 
						 | 
					@ -181,15 +181,24 @@ static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
 | 
				
			||||||
static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
 | 
					static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
 | 
				
			||||||
{
 | 
					{
 | 
				
			||||||
	struct crypto_instance *inst;
 | 
						struct crypto_instance *inst;
 | 
				
			||||||
 | 
						struct crypto_attr_type *algt;
 | 
				
			||||||
	struct crypto_alg *alg;
 | 
						struct crypto_alg *alg;
 | 
				
			||||||
 | 
						u32 mask;
 | 
				
			||||||
	int err;
 | 
						int err;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
 | 
						err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
 | 
				
			||||||
	if (err)
 | 
						if (err)
 | 
				
			||||||
		return ERR_PTR(err);
 | 
							return ERR_PTR(err);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
 | 
						algt = crypto_get_attr_type(tb);
 | 
				
			||||||
				  CRYPTO_ALG_TYPE_MASK);
 | 
						if (IS_ERR(algt))
 | 
				
			||||||
 | 
							return ERR_CAST(algt);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						mask = CRYPTO_ALG_TYPE_MASK |
 | 
				
			||||||
 | 
							crypto_requires_off(algt->type, algt->mask,
 | 
				
			||||||
 | 
									    CRYPTO_ALG_NEED_FALLBACK);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER, mask);
 | 
				
			||||||
	if (IS_ERR(alg))
 | 
						if (IS_ERR(alg))
 | 
				
			||||||
		return ERR_CAST(alg);
 | 
							return ERR_CAST(alg);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -350,6 +359,8 @@ static int crypto_rfc3686_create(struct crypto_template *tmpl,
 | 
				
			||||||
	struct skcipher_alg *alg;
 | 
						struct skcipher_alg *alg;
 | 
				
			||||||
	struct crypto_skcipher_spawn *spawn;
 | 
						struct crypto_skcipher_spawn *spawn;
 | 
				
			||||||
	const char *cipher_name;
 | 
						const char *cipher_name;
 | 
				
			||||||
 | 
						u32 mask;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	int err;
 | 
						int err;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	algt = crypto_get_attr_type(tb);
 | 
						algt = crypto_get_attr_type(tb);
 | 
				
			||||||
| 
						 | 
					@ -367,12 +378,14 @@ static int crypto_rfc3686_create(struct crypto_template *tmpl,
 | 
				
			||||||
	if (!inst)
 | 
						if (!inst)
 | 
				
			||||||
		return -ENOMEM;
 | 
							return -ENOMEM;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						mask = crypto_requires_sync(algt->type, algt->mask) |
 | 
				
			||||||
 | 
							crypto_requires_off(algt->type, algt->mask,
 | 
				
			||||||
 | 
									    CRYPTO_ALG_NEED_FALLBACK);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	spawn = skcipher_instance_ctx(inst);
 | 
						spawn = skcipher_instance_ctx(inst);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
 | 
						crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
 | 
				
			||||||
	err = crypto_grab_skcipher(spawn, cipher_name, 0,
 | 
						err = crypto_grab_skcipher(spawn, cipher_name, 0, mask);
 | 
				
			||||||
				   crypto_requires_sync(algt->type,
 | 
					 | 
				
			||||||
							algt->mask));
 | 
					 | 
				
			||||||
	if (err)
 | 
						if (err)
 | 
				
			||||||
		goto err_free_inst;
 | 
							goto err_free_inst;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in a new issue