Switch from the old AES library functions (which use struct
crypto_aes_ctx) to the new ones (which use struct aes_enckey). This
eliminates the unnecessary computation and caching of the decryption
round keys. The new AES en/decryption functions are also much faster
and use AES instructions when supported by the CPU.
Note: aes_encrypt_new() will be renamed to aes_encrypt() once all
callers of the old aes_encrypt() have been updated.
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
---
include/crypto/gcm.h | 2 +-
lib/crypto/aesgcm.c | 12 ++++++------
2 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/include/crypto/gcm.h b/include/crypto/gcm.h
index fd9df607a836..b524e47bd4d0 100644
--- a/include/crypto/gcm.h
+++ b/include/crypto/gcm.h
@@ -64,11 +64,11 @@ static inline int crypto_ipsec_check_assoclen(unsigned int assoclen)
return 0;
}
struct aesgcm_ctx {
be128 ghash_key;
- struct crypto_aes_ctx aes_ctx;
+ struct aes_enckey aes_key;
unsigned int authsize;
};
int aesgcm_expandkey(struct aesgcm_ctx *ctx, const u8 *key,
unsigned int keysize, unsigned int authsize);
diff --git a/lib/crypto/aesgcm.c b/lib/crypto/aesgcm.c
index ac0b2fcfd606..19106fe008fd 100644
--- a/lib/crypto/aesgcm.c
+++ b/lib/crypto/aesgcm.c
@@ -10,11 +10,11 @@
#include <crypto/ghash.h>
#include <linux/export.h>
#include <linux/module.h>
#include <asm/irqflags.h>
-static void aesgcm_encrypt_block(const struct crypto_aes_ctx *ctx, void *dst,
+static void aesgcm_encrypt_block(const struct aes_enckey *key, void *dst,
const void *src)
{
unsigned long flags;
/*
@@ -24,11 +24,11 @@ static void aesgcm_encrypt_block(const struct crypto_aes_ctx *ctx, void *dst,
* mitigates this risk to some extent by pulling the entire S-box into
* the caches before doing any substitutions, but this strategy is more
* effective when running with interrupts disabled.
*/
local_irq_save(flags);
- aes_encrypt(ctx, dst, src);
+ aes_encrypt_new(key, dst, src);
local_irq_restore(flags);
}
/**
* aesgcm_expandkey - Expands the AES and GHASH keys for the AES-GCM key
@@ -47,16 +47,16 @@ int aesgcm_expandkey(struct aesgcm_ctx *ctx, const u8 *key,
{
u8 kin[AES_BLOCK_SIZE] = {};
int ret;
ret = crypto_gcm_check_authsize(authsize) ?:
- aes_expandkey(&ctx->aes_ctx, key, keysize);
+ aes_prepareenckey(&ctx->aes_key, key, keysize);
if (ret)
return ret;
ctx->authsize = authsize;
- aesgcm_encrypt_block(&ctx->aes_ctx, &ctx->ghash_key, kin);
+ aesgcm_encrypt_block(&ctx->aes_key, &ctx->ghash_key, kin);
return 0;
}
EXPORT_SYMBOL(aesgcm_expandkey);
@@ -95,11 +95,11 @@ static void aesgcm_mac(const struct aesgcm_ctx *ctx, const u8 *src, int src_len,
aesgcm_ghash(&ghash, &ctx->ghash_key, assoc, assoc_len);
aesgcm_ghash(&ghash, &ctx->ghash_key, src, src_len);
aesgcm_ghash(&ghash, &ctx->ghash_key, &tail, sizeof(tail));
ctr[3] = cpu_to_be32(1);
- aesgcm_encrypt_block(&ctx->aes_ctx, buf, ctr);
+ aesgcm_encrypt_block(&ctx->aes_key, buf, ctr);
crypto_xor_cpy(authtag, buf, (u8 *)&ghash, ctx->authsize);
memzero_explicit(&ghash, sizeof(ghash));
memzero_explicit(buf, sizeof(buf));
}
@@ -117,11 +117,11 @@ static void aesgcm_crypt(const struct aesgcm_ctx *ctx, u8 *dst, const u8 *src,
* inadvertent IV reuse, which must be avoided at all cost for
* stream ciphers such as AES-CTR. Given the range of 'int
* len', this cannot happen, so no explicit test is necessary.
*/
ctr[3] = cpu_to_be32(n++);
- aesgcm_encrypt_block(&ctx->aes_ctx, buf, ctr);
+ aesgcm_encrypt_block(&ctx->aes_key, buf, ctr);
crypto_xor_cpy(dst, src, buf, min(len, AES_BLOCK_SIZE));
dst += AES_BLOCK_SIZE;
src += AES_BLOCK_SIZE;
len -= AES_BLOCK_SIZE;
--
2.52.0