Switch from the old AES library functions (which use struct
crypto_aes_ctx) to the new ones (which use struct aes_enckey). This
eliminates the unnecessary computation and caching of the decryption
round keys. The new AES en/decryption functions are also much faster
and use AES instructions when supported by the CPU.
Note: aes_encrypt_new() will be renamed to aes_encrypt() once all
callers of the old aes_encrypt() have been updated.
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
---
drivers/crypto/ccp/ccp-crypto-aes-cmac.c | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/drivers/crypto/ccp/ccp-crypto-aes-cmac.c b/drivers/crypto/ccp/ccp-crypto-aes-cmac.c
index d8426bdf3190..ed5b0f8609f1 100644
--- a/drivers/crypto/ccp/ccp-crypto-aes-cmac.c
+++ b/drivers/crypto/ccp/ccp-crypto-aes-cmac.c
@@ -259,11 +259,11 @@ static int ccp_aes_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
struct ccp_ctx *ctx = crypto_ahash_ctx_dma(tfm);
struct ccp_crypto_ahash_alg *alg =
ccp_crypto_ahash_alg(crypto_ahash_tfm(tfm));
u64 k0_hi, k0_lo, k1_hi, k1_lo, k2_hi, k2_lo;
u64 rb_hi = 0x00, rb_lo = 0x87;
- struct crypto_aes_ctx aes;
+ struct aes_enckey aes;
__be64 *gk;
int ret;
switch (key_len) {
case AES_KEYSIZE_128:
@@ -282,17 +282,17 @@ static int ccp_aes_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
/* Set to zero until complete */
ctx->u.aes.key_len = 0;
/* Set the key for the AES cipher used to generate the keys */
- ret = aes_expandkey(&aes, key, key_len);
+ ret = aes_prepareenckey(&aes, key, key_len);
if (ret)
return ret;
/* Encrypt a block of zeroes - use key area in context */
memset(ctx->u.aes.key, 0, sizeof(ctx->u.aes.key));
- aes_encrypt(&aes, ctx->u.aes.key, ctx->u.aes.key);
+ aes_encrypt_new(&aes, ctx->u.aes.key, ctx->u.aes.key);
memzero_explicit(&aes, sizeof(aes));
/* Generate K1 and K2 */
k0_hi = be64_to_cpu(*((__be64 *)ctx->u.aes.key));
k0_lo = be64_to_cpu(*((__be64 *)ctx->u.aes.key + 1));
--
2.52.0