mirror of
				https://github.com/torvalds/linux.git
				synced 2025-11-04 10:40:15 +02:00 
			
		
		
		
	Pull crypto updates from Herbert Xu: "Here is the crypto update for 5.3: API: - Test shash interface directly in testmgr - cra_driver_name is now mandatory Algorithms: - Replace arc4 crypto_cipher with library helper - Implement 5 way interleave for ECB, CBC and CTR on arm64 - Add xxhash - Add continuous self-test on noise source to drbg - Update jitter RNG Drivers: - Add support for SHA204A random number generator - Add support for 7211 in iproc-rng200 - Fix fuzz test failures in inside-secure - Fix fuzz test failures in talitos - Fix fuzz test failures in qat" * 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (143 commits) crypto: stm32/hash - remove interruptible condition for dma crypto: stm32/hash - Fix hmac issue more than 256 bytes crypto: stm32/crc32 - rename driver file crypto: amcc - remove memset after dma_alloc_coherent crypto: ccp - Switch to SPDX license identifiers crypto: ccp - Validate the the error value used to index error messages crypto: doc - Fix formatting of new crypto engine content crypto: doc - Add parameter documentation crypto: arm64/aes-ce - implement 5 way interleave for ECB, CBC and CTR crypto: arm64/aes-ce - add 5 way interleave routines crypto: talitos - drop icv_ool crypto: talitos - fix hash on SEC1. crypto: talitos - move struct talitos_edesc into talitos.h lib/scatterlist: Fix mapping iterator when sg->offset is greater than PAGE_SIZE crypto/NX: Set receive window credits to max number of CRBs in RxFIFO crypto: asymmetric_keys - select CRYPTO_HASH where needed crypto: serpent - mark __serpent_setkey_sbox noinline crypto: testmgr - dynamically allocate crypto_shash crypto: testmgr - dynamically allocate testvec_config crypto: talitos - eliminate unneeded 'done' functions at build time ...
		
			
				
	
	
		
			161 lines
		
	
	
	
		
			3.5 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			161 lines
		
	
	
	
		
			3.5 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
// SPDX-License-Identifier: GPL-2.0-only
 | 
						|
/*
 | 
						|
 * GHASH: digest algorithm for GCM (Galois/Counter Mode).
 | 
						|
 *
 | 
						|
 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
 | 
						|
 * Copyright (c) 2009 Intel Corp.
 | 
						|
 *   Author: Huang Ying <ying.huang@intel.com>
 | 
						|
 *
 | 
						|
 * The algorithm implementation is copied from gcm.c.
 | 
						|
 */
 | 
						|
 | 
						|
#include <crypto/algapi.h>
 | 
						|
#include <crypto/gf128mul.h>
 | 
						|
#include <crypto/ghash.h>
 | 
						|
#include <crypto/internal/hash.h>
 | 
						|
#include <linux/crypto.h>
 | 
						|
#include <linux/init.h>
 | 
						|
#include <linux/kernel.h>
 | 
						|
#include <linux/module.h>
 | 
						|
 | 
						|
static int ghash_init(struct shash_desc *desc)
 | 
						|
{
 | 
						|
	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
 | 
						|
 | 
						|
	memset(dctx, 0, sizeof(*dctx));
 | 
						|
 | 
						|
	return 0;
 | 
						|
}
 | 
						|
 | 
						|
static int ghash_setkey(struct crypto_shash *tfm,
 | 
						|
			const u8 *key, unsigned int keylen)
 | 
						|
{
 | 
						|
	struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
 | 
						|
	be128 k;
 | 
						|
 | 
						|
	if (keylen != GHASH_BLOCK_SIZE) {
 | 
						|
		crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 | 
						|
		return -EINVAL;
 | 
						|
	}
 | 
						|
 | 
						|
	if (ctx->gf128)
 | 
						|
		gf128mul_free_4k(ctx->gf128);
 | 
						|
 | 
						|
	BUILD_BUG_ON(sizeof(k) != GHASH_BLOCK_SIZE);
 | 
						|
	memcpy(&k, key, GHASH_BLOCK_SIZE); /* avoid violating alignment rules */
 | 
						|
	ctx->gf128 = gf128mul_init_4k_lle(&k);
 | 
						|
	memzero_explicit(&k, GHASH_BLOCK_SIZE);
 | 
						|
 | 
						|
	if (!ctx->gf128)
 | 
						|
		return -ENOMEM;
 | 
						|
 | 
						|
	return 0;
 | 
						|
}
 | 
						|
 | 
						|
static int ghash_update(struct shash_desc *desc,
 | 
						|
			 const u8 *src, unsigned int srclen)
 | 
						|
{
 | 
						|
	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
 | 
						|
	struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
 | 
						|
	u8 *dst = dctx->buffer;
 | 
						|
 | 
						|
	if (dctx->bytes) {
 | 
						|
		int n = min(srclen, dctx->bytes);
 | 
						|
		u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
 | 
						|
 | 
						|
		dctx->bytes -= n;
 | 
						|
		srclen -= n;
 | 
						|
 | 
						|
		while (n--)
 | 
						|
			*pos++ ^= *src++;
 | 
						|
 | 
						|
		if (!dctx->bytes)
 | 
						|
			gf128mul_4k_lle((be128 *)dst, ctx->gf128);
 | 
						|
	}
 | 
						|
 | 
						|
	while (srclen >= GHASH_BLOCK_SIZE) {
 | 
						|
		crypto_xor(dst, src, GHASH_BLOCK_SIZE);
 | 
						|
		gf128mul_4k_lle((be128 *)dst, ctx->gf128);
 | 
						|
		src += GHASH_BLOCK_SIZE;
 | 
						|
		srclen -= GHASH_BLOCK_SIZE;
 | 
						|
	}
 | 
						|
 | 
						|
	if (srclen) {
 | 
						|
		dctx->bytes = GHASH_BLOCK_SIZE - srclen;
 | 
						|
		while (srclen--)
 | 
						|
			*dst++ ^= *src++;
 | 
						|
	}
 | 
						|
 | 
						|
	return 0;
 | 
						|
}
 | 
						|
 | 
						|
static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
 | 
						|
{
 | 
						|
	u8 *dst = dctx->buffer;
 | 
						|
 | 
						|
	if (dctx->bytes) {
 | 
						|
		u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
 | 
						|
 | 
						|
		while (dctx->bytes--)
 | 
						|
			*tmp++ ^= 0;
 | 
						|
 | 
						|
		gf128mul_4k_lle((be128 *)dst, ctx->gf128);
 | 
						|
	}
 | 
						|
 | 
						|
	dctx->bytes = 0;
 | 
						|
}
 | 
						|
 | 
						|
static int ghash_final(struct shash_desc *desc, u8 *dst)
 | 
						|
{
 | 
						|
	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
 | 
						|
	struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
 | 
						|
	u8 *buf = dctx->buffer;
 | 
						|
 | 
						|
	ghash_flush(ctx, dctx);
 | 
						|
	memcpy(dst, buf, GHASH_BLOCK_SIZE);
 | 
						|
 | 
						|
	return 0;
 | 
						|
}
 | 
						|
 | 
						|
static void ghash_exit_tfm(struct crypto_tfm *tfm)
 | 
						|
{
 | 
						|
	struct ghash_ctx *ctx = crypto_tfm_ctx(tfm);
 | 
						|
	if (ctx->gf128)
 | 
						|
		gf128mul_free_4k(ctx->gf128);
 | 
						|
}
 | 
						|
 | 
						|
static struct shash_alg ghash_alg = {
 | 
						|
	.digestsize	= GHASH_DIGEST_SIZE,
 | 
						|
	.init		= ghash_init,
 | 
						|
	.update		= ghash_update,
 | 
						|
	.final		= ghash_final,
 | 
						|
	.setkey		= ghash_setkey,
 | 
						|
	.descsize	= sizeof(struct ghash_desc_ctx),
 | 
						|
	.base		= {
 | 
						|
		.cra_name		= "ghash",
 | 
						|
		.cra_driver_name	= "ghash-generic",
 | 
						|
		.cra_priority		= 100,
 | 
						|
		.cra_blocksize		= GHASH_BLOCK_SIZE,
 | 
						|
		.cra_ctxsize		= sizeof(struct ghash_ctx),
 | 
						|
		.cra_module		= THIS_MODULE,
 | 
						|
		.cra_exit		= ghash_exit_tfm,
 | 
						|
	},
 | 
						|
};
 | 
						|
 | 
						|
static int __init ghash_mod_init(void)
 | 
						|
{
 | 
						|
	return crypto_register_shash(&ghash_alg);
 | 
						|
}
 | 
						|
 | 
						|
static void __exit ghash_mod_exit(void)
 | 
						|
{
 | 
						|
	crypto_unregister_shash(&ghash_alg);
 | 
						|
}
 | 
						|
 | 
						|
subsys_initcall(ghash_mod_init);
 | 
						|
module_exit(ghash_mod_exit);
 | 
						|
 | 
						|
MODULE_LICENSE("GPL");
 | 
						|
MODULE_DESCRIPTION("GHASH Message Digest Algorithm");
 | 
						|
MODULE_ALIAS_CRYPTO("ghash");
 | 
						|
MODULE_ALIAS_CRYPTO("ghash-generic");
 |