summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@kernel.org>2026-01-12 11:20:08 -0800
committerEric Biggers <ebiggers@kernel.org>2026-01-12 11:39:58 -0800
commitfa2297750c2cc61788d1843f358dbfecaa42944f (patch)
treed619f66893a79c8421cb744e43af99e03229a49a
parenta2484474272ef98d9580d8c610b0f7c6ed2f146c (diff)
lib/crypto: arm/aes: Migrate optimized code into library
Move the ARM optimized single-block AES en/decryption code into lib/crypto/, wire it up to the AES library API, and remove the superseded "aes-arm" crypto_cipher algorithm. The result is that both the AES library and crypto_cipher APIs are now optimized for ARM, whereas previously only crypto_cipher was (and the optimizations weren't enabled by default, which this fixes as well). Acked-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20260112192035.10427-11-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@kernel.org>
-rw-r--r--arch/arm/configs/milbeaut_m10v_defconfig1
-rw-r--r--arch/arm/configs/multi_v7_defconfig2
-rw-r--r--arch/arm/configs/omap2plus_defconfig2
-rw-r--r--arch/arm/configs/pxa_defconfig2
-rw-r--r--arch/arm/crypto/Kconfig18
-rw-r--r--arch/arm/crypto/Makefile2
-rw-r--r--arch/arm/crypto/aes-cipher-glue.c77
-rw-r--r--arch/arm/crypto/aes-cipher.h13
-rw-r--r--lib/crypto/Kconfig1
-rw-r--r--lib/crypto/Makefile3
-rw-r--r--lib/crypto/arm/aes-cipher-core.S (renamed from arch/arm/crypto/aes-cipher-core.S)0
-rw-r--r--lib/crypto/arm/aes.h56
12 files changed, 63 insertions, 114 deletions
diff --git a/arch/arm/configs/milbeaut_m10v_defconfig b/arch/arm/configs/milbeaut_m10v_defconfig
index a2995eb390c6..77b69d672d40 100644
--- a/arch/arm/configs/milbeaut_m10v_defconfig
+++ b/arch/arm/configs/milbeaut_m10v_defconfig
@@ -98,7 +98,6 @@ CONFIG_CRYPTO_SELFTESTS=y
CONFIG_CRYPTO_AES=y
CONFIG_CRYPTO_SEQIV=m
CONFIG_CRYPTO_GHASH_ARM_CE=m
-CONFIG_CRYPTO_AES_ARM=m
CONFIG_CRYPTO_AES_ARM_BS=m
CONFIG_CRYPTO_AES_ARM_CE=m
# CONFIG_CRYPTO_HW is not set
diff --git a/arch/arm/configs/multi_v7_defconfig b/arch/arm/configs/multi_v7_defconfig
index 7f1fa9dd88c9..b6d3e20926bb 100644
--- a/arch/arm/configs/multi_v7_defconfig
+++ b/arch/arm/configs/multi_v7_defconfig
@@ -1286,7 +1286,7 @@ CONFIG_CRYPTO_USER_API_SKCIPHER=m
CONFIG_CRYPTO_USER_API_RNG=m
CONFIG_CRYPTO_USER_API_AEAD=m
CONFIG_CRYPTO_GHASH_ARM_CE=m
-CONFIG_CRYPTO_AES_ARM=m
+CONFIG_CRYPTO_AES=m
CONFIG_CRYPTO_AES_ARM_BS=m
CONFIG_CRYPTO_AES_ARM_CE=m
CONFIG_CRYPTO_DEV_SUN4I_SS=m
diff --git a/arch/arm/configs/omap2plus_defconfig b/arch/arm/configs/omap2plus_defconfig
index 4e53c331cd84..0464f6552169 100644
--- a/arch/arm/configs/omap2plus_defconfig
+++ b/arch/arm/configs/omap2plus_defconfig
@@ -706,7 +706,7 @@ CONFIG_NLS_ISO8859_1=y
CONFIG_SECURITY=y
CONFIG_CRYPTO_MICHAEL_MIC=y
CONFIG_CRYPTO_GHASH_ARM_CE=m
-CONFIG_CRYPTO_AES_ARM=m
+CONFIG_CRYPTO_AES=m
CONFIG_CRYPTO_AES_ARM_BS=m
CONFIG_CRYPTO_DEV_OMAP=m
CONFIG_CRYPTO_DEV_OMAP_SHAM=m
diff --git a/arch/arm/configs/pxa_defconfig b/arch/arm/configs/pxa_defconfig
index 3ea189f1f42f..eacd08fd87ad 100644
--- a/arch/arm/configs/pxa_defconfig
+++ b/arch/arm/configs/pxa_defconfig
@@ -657,7 +657,7 @@ CONFIG_CRYPTO_ANUBIS=m
CONFIG_CRYPTO_XCBC=m
CONFIG_CRYPTO_DEFLATE=y
CONFIG_CRYPTO_LZO=y
-CONFIG_CRYPTO_AES_ARM=m
+CONFIG_CRYPTO_AES=m
CONFIG_FONTS=y
CONFIG_FONT_8x8=y
CONFIG_FONT_8x16=y
diff --git a/arch/arm/crypto/Kconfig b/arch/arm/crypto/Kconfig
index 167a648a9def..b9c28c818b7c 100644
--- a/arch/arm/crypto/Kconfig
+++ b/arch/arm/crypto/Kconfig
@@ -23,24 +23,6 @@ config CRYPTO_GHASH_ARM_CE
that is part of the ARMv8 Crypto Extensions, or a slower variant that
uses the vmull.p8 instruction that is part of the basic NEON ISA.
-config CRYPTO_AES_ARM
- tristate "Ciphers: AES"
- select CRYPTO_ALGAPI
- select CRYPTO_AES
- help
- Block ciphers: AES cipher algorithms (FIPS-197)
-
- Architecture: arm
-
- On ARM processors without the Crypto Extensions, this is the
- fastest AES implementation for single blocks. For multiple
- blocks, the NEON bit-sliced implementation is usually faster.
-
- This implementation may be vulnerable to cache timing attacks,
- since it uses lookup tables. However, as countermeasures it
- disables IRQs and preloads the tables; it is hoped this makes
- such attacks very difficult.
-
config CRYPTO_AES_ARM_BS
tristate "Ciphers: AES, modes: ECB/CBC/CTR/XTS (bit-sliced NEON)"
depends on KERNEL_MODE_NEON
diff --git a/arch/arm/crypto/Makefile b/arch/arm/crypto/Makefile
index d6683e9d4992..e73099e120b3 100644
--- a/arch/arm/crypto/Makefile
+++ b/arch/arm/crypto/Makefile
@@ -3,13 +3,11 @@
# Arch-specific CryptoAPI modules.
#
-obj-$(CONFIG_CRYPTO_AES_ARM) += aes-arm.o
obj-$(CONFIG_CRYPTO_AES_ARM_BS) += aes-arm-bs.o
obj-$(CONFIG_CRYPTO_AES_ARM_CE) += aes-arm-ce.o
obj-$(CONFIG_CRYPTO_GHASH_ARM_CE) += ghash-arm-ce.o
-aes-arm-y := aes-cipher-core.o aes-cipher-glue.o
aes-arm-bs-y := aes-neonbs-core.o aes-neonbs-glue.o
aes-arm-ce-y := aes-ce-core.o aes-ce-glue.o
ghash-arm-ce-y := ghash-ce-core.o ghash-ce-glue.o
diff --git a/arch/arm/crypto/aes-cipher-glue.c b/arch/arm/crypto/aes-cipher-glue.c
deleted file mode 100644
index f302db808cd3..000000000000
--- a/arch/arm/crypto/aes-cipher-glue.c
+++ /dev/null
@@ -1,77 +0,0 @@
-// SPDX-License-Identifier: GPL-2.0-only
-/*
- * Scalar AES core transform
- *
- * Copyright (C) 2017 Linaro Ltd.
- * Author: Ard Biesheuvel <ard.biesheuvel@linaro.org>
- */
-
-#include <crypto/aes.h>
-#include <crypto/algapi.h>
-#include <linux/module.h>
-#include "aes-cipher.h"
-
-EXPORT_SYMBOL_GPL(__aes_arm_encrypt);
-EXPORT_SYMBOL_GPL(__aes_arm_decrypt);
-
-static int aes_arm_setkey(struct crypto_tfm *tfm, const u8 *in_key,
- unsigned int key_len)
-{
- struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
-
- return aes_expandkey(ctx, in_key, key_len);
-}
-
-static void aes_arm_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
-{
- struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
- int rounds = 6 + ctx->key_length / 4;
-
- __aes_arm_encrypt(ctx->key_enc, rounds, in, out);
-}
-
-static void aes_arm_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
-{
- struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
- int rounds = 6 + ctx->key_length / 4;
-
- __aes_arm_decrypt(ctx->key_dec, rounds, in, out);
-}
-
-static struct crypto_alg aes_alg = {
- .cra_name = "aes",
- .cra_driver_name = "aes-arm",
- .cra_priority = 200,
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
- .cra_blocksize = AES_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct crypto_aes_ctx),
- .cra_module = THIS_MODULE,
-
- .cra_cipher.cia_min_keysize = AES_MIN_KEY_SIZE,
- .cra_cipher.cia_max_keysize = AES_MAX_KEY_SIZE,
- .cra_cipher.cia_setkey = aes_arm_setkey,
- .cra_cipher.cia_encrypt = aes_arm_encrypt,
- .cra_cipher.cia_decrypt = aes_arm_decrypt,
-
-#ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
- .cra_alignmask = 3,
-#endif
-};
-
-static int __init aes_init(void)
-{
- return crypto_register_alg(&aes_alg);
-}
-
-static void __exit aes_fini(void)
-{
- crypto_unregister_alg(&aes_alg);
-}
-
-module_init(aes_init);
-module_exit(aes_fini);
-
-MODULE_DESCRIPTION("Scalar AES cipher for ARM");
-MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
-MODULE_LICENSE("GPL v2");
-MODULE_ALIAS_CRYPTO("aes");
diff --git a/arch/arm/crypto/aes-cipher.h b/arch/arm/crypto/aes-cipher.h
deleted file mode 100644
index d5db2b87eb69..000000000000
--- a/arch/arm/crypto/aes-cipher.h
+++ /dev/null
@@ -1,13 +0,0 @@
-/* SPDX-License-Identifier: GPL-2.0-only */
-#ifndef ARM_CRYPTO_AES_CIPHER_H
-#define ARM_CRYPTO_AES_CIPHER_H
-
-#include <linux/linkage.h>
-#include <linux/types.h>
-
-asmlinkage void __aes_arm_encrypt(const u32 rk[], int rounds,
- const u8 *in, u8 *out);
-asmlinkage void __aes_arm_decrypt(const u32 rk[], int rounds,
- const u8 *in, u8 *out);
-
-#endif /* ARM_CRYPTO_AES_CIPHER_H */
diff --git a/lib/crypto/Kconfig b/lib/crypto/Kconfig
index 4efad77daa24..60420b421e04 100644
--- a/lib/crypto/Kconfig
+++ b/lib/crypto/Kconfig
@@ -14,6 +14,7 @@ config CRYPTO_LIB_AES
config CRYPTO_LIB_AES_ARCH
bool
depends on CRYPTO_LIB_AES && !UML && !KMSAN
+ default y if ARM
config CRYPTO_LIB_AESCFB
tristate
diff --git a/lib/crypto/Makefile b/lib/crypto/Makefile
index 01193b3f47ba..2f6b0f59eb1b 100644
--- a/lib/crypto/Makefile
+++ b/lib/crypto/Makefile
@@ -21,6 +21,9 @@ obj-$(CONFIG_CRYPTO_LIB_AES) += libaes.o
libaes-y := aes.o
ifeq ($(CONFIG_CRYPTO_LIB_AES_ARCH),y)
CFLAGS_aes.o += -I$(src)/$(SRCARCH)
+
+libaes-$(CONFIG_ARM) += arm/aes-cipher-core.o
+
endif # CONFIG_CRYPTO_LIB_AES_ARCH
################################################################################
diff --git a/arch/arm/crypto/aes-cipher-core.S b/lib/crypto/arm/aes-cipher-core.S
index 87567d6822ba..87567d6822ba 100644
--- a/arch/arm/crypto/aes-cipher-core.S
+++ b/lib/crypto/arm/aes-cipher-core.S
diff --git a/lib/crypto/arm/aes.h b/lib/crypto/arm/aes.h
new file mode 100644
index 000000000000..1dd7dfa657bb
--- /dev/null
+++ b/lib/crypto/arm/aes.h
@@ -0,0 +1,56 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * AES block cipher, optimized for ARM
+ *
+ * Copyright (C) 2017 Linaro Ltd.
+ * Copyright 2026 Google LLC
+ */
+
+asmlinkage void __aes_arm_encrypt(const u32 rk[], int rounds,
+ const u8 in[AES_BLOCK_SIZE],
+ u8 out[AES_BLOCK_SIZE]);
+asmlinkage void __aes_arm_decrypt(const u32 inv_rk[], int rounds,
+ const u8 in[AES_BLOCK_SIZE],
+ u8 out[AES_BLOCK_SIZE]);
+
+static void aes_preparekey_arch(union aes_enckey_arch *k,
+ union aes_invkey_arch *inv_k,
+ const u8 *in_key, int key_len, int nrounds)
+{
+ aes_expandkey_generic(k->rndkeys, inv_k ? inv_k->inv_rndkeys : NULL,
+ in_key, key_len);
+}
+
+static void aes_encrypt_arch(const struct aes_enckey *key,
+ u8 out[AES_BLOCK_SIZE],
+ const u8 in[AES_BLOCK_SIZE])
+{
+ if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
+ !IS_ALIGNED((uintptr_t)out | (uintptr_t)in, 4)) {
+ u8 bounce_buf[AES_BLOCK_SIZE] __aligned(4);
+
+ memcpy(bounce_buf, in, AES_BLOCK_SIZE);
+ __aes_arm_encrypt(key->k.rndkeys, key->nrounds, bounce_buf,
+ bounce_buf);
+ memcpy(out, bounce_buf, AES_BLOCK_SIZE);
+ return;
+ }
+ __aes_arm_encrypt(key->k.rndkeys, key->nrounds, in, out);
+}
+
+static void aes_decrypt_arch(const struct aes_key *key,
+ u8 out[AES_BLOCK_SIZE],
+ const u8 in[AES_BLOCK_SIZE])
+{
+ if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
+ !IS_ALIGNED((uintptr_t)out | (uintptr_t)in, 4)) {
+ u8 bounce_buf[AES_BLOCK_SIZE] __aligned(4);
+
+ memcpy(bounce_buf, in, AES_BLOCK_SIZE);
+ __aes_arm_decrypt(key->inv_k.inv_rndkeys, key->nrounds,
+ bounce_buf, bounce_buf);
+ memcpy(out, bounce_buf, AES_BLOCK_SIZE);
+ return;
+ }
+ __aes_arm_decrypt(key->inv_k.inv_rndkeys, key->nrounds, in, out);
+}