diff options
-rw-r--r-- | arch/arm/crypto/chacha-glue.c | 9 | ||||
-rw-r--r-- | arch/arm64/crypto/chacha-neon-glue.c | 8 | ||||
-rw-r--r-- | arch/mips/crypto/chacha-glue.c | 8 | ||||
-rw-r--r-- | arch/powerpc/crypto/chacha-p10-glue.c | 8 | ||||
-rw-r--r-- | arch/riscv/crypto/chacha-riscv64-glue.c | 8 | ||||
-rw-r--r-- | arch/s390/crypto/chacha-glue.c | 8 | ||||
-rw-r--r-- | arch/x86/crypto/chacha_glue.c | 8 | ||||
-rw-r--r-- | crypto/Makefile | 3 | ||||
-rw-r--r-- | crypto/chacha.c | 227 | ||||
-rw-r--r-- | crypto/chacha_generic.c | 139 | ||||
-rw-r--r-- | include/crypto/chacha.h | 9 |
11 files changed, 288 insertions, 147 deletions
diff --git a/arch/arm/crypto/chacha-glue.c b/arch/arm/crypto/chacha-glue.c index 50e635512046..e1cb34d31771 100644 --- a/arch/arm/crypto/chacha-glue.c +++ b/arch/arm/crypto/chacha-glue.c @@ -287,6 +287,13 @@ static struct skcipher_alg neon_algs[] = { } }; +bool chacha_is_arch_optimized(void) +{ + /* We always can use at least the ARM scalar implementation. */ + return true; +} +EXPORT_SYMBOL(chacha_is_arch_optimized); + static int __init chacha_simd_mod_init(void) { int err = 0; @@ -333,7 +340,7 @@ static void __exit chacha_simd_mod_fini(void) } } -module_init(chacha_simd_mod_init); +arch_initcall(chacha_simd_mod_init); module_exit(chacha_simd_mod_fini); MODULE_DESCRIPTION("ChaCha and XChaCha stream ciphers (scalar and NEON accelerated)"); diff --git a/arch/arm64/crypto/chacha-neon-glue.c b/arch/arm64/crypto/chacha-neon-glue.c index 229876acfc58..bb9b52321bda 100644 --- a/arch/arm64/crypto/chacha-neon-glue.c +++ b/arch/arm64/crypto/chacha-neon-glue.c @@ -206,6 +206,12 @@ static struct skcipher_alg algs[] = { } }; +bool chacha_is_arch_optimized(void) +{ + return static_key_enabled(&have_neon); +} +EXPORT_SYMBOL(chacha_is_arch_optimized); + static int __init chacha_simd_mod_init(void) { if (!cpu_have_named_feature(ASIMD)) @@ -223,7 +229,7 @@ static void __exit chacha_simd_mod_fini(void) crypto_unregister_skciphers(algs, ARRAY_SIZE(algs)); } -module_init(chacha_simd_mod_init); +arch_initcall(chacha_simd_mod_init); module_exit(chacha_simd_mod_fini); MODULE_DESCRIPTION("ChaCha and XChaCha stream ciphers (NEON accelerated)"); diff --git a/arch/mips/crypto/chacha-glue.c b/arch/mips/crypto/chacha-glue.c index f6fc2e1079a1..64ccaeaeaa1e 100644 --- a/arch/mips/crypto/chacha-glue.c +++ b/arch/mips/crypto/chacha-glue.c @@ -120,6 +120,12 @@ static struct skcipher_alg algs[] = { } }; +bool chacha_is_arch_optimized(void) +{ + return true; +} +EXPORT_SYMBOL(chacha_is_arch_optimized); + static int __init chacha_simd_mod_init(void) { return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ? @@ -132,7 +138,7 @@ static void __exit chacha_simd_mod_fini(void) crypto_unregister_skciphers(algs, ARRAY_SIZE(algs)); } -module_init(chacha_simd_mod_init); +arch_initcall(chacha_simd_mod_init); module_exit(chacha_simd_mod_fini); MODULE_DESCRIPTION("ChaCha and XChaCha stream ciphers (MIPS accelerated)"); diff --git a/arch/powerpc/crypto/chacha-p10-glue.c b/arch/powerpc/crypto/chacha-p10-glue.c index d8796decc1fb..3355305b6c7f 100644 --- a/arch/powerpc/crypto/chacha-p10-glue.c +++ b/arch/powerpc/crypto/chacha-p10-glue.c @@ -189,6 +189,12 @@ static struct skcipher_alg algs[] = { } }; +bool chacha_is_arch_optimized(void) +{ + return static_key_enabled(&have_p10); +} +EXPORT_SYMBOL(chacha_is_arch_optimized); + static int __init chacha_p10_init(void) { if (!cpu_has_feature(CPU_FTR_ARCH_31)) @@ -207,7 +213,7 @@ static void __exit chacha_p10_exit(void) crypto_unregister_skciphers(algs, ARRAY_SIZE(algs)); } -module_init(chacha_p10_init); +arch_initcall(chacha_p10_init); module_exit(chacha_p10_exit); MODULE_DESCRIPTION("ChaCha and XChaCha stream ciphers (P10 accelerated)"); diff --git a/arch/riscv/crypto/chacha-riscv64-glue.c b/arch/riscv/crypto/chacha-riscv64-glue.c index 68caef7a3d50..ccaab0dea383 100644 --- a/arch/riscv/crypto/chacha-riscv64-glue.c +++ b/arch/riscv/crypto/chacha-riscv64-glue.c @@ -49,6 +49,12 @@ void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, } EXPORT_SYMBOL(chacha_crypt_arch); +bool chacha_is_arch_optimized(void) +{ + return static_key_enabled(&use_zvkb); +} +EXPORT_SYMBOL(chacha_is_arch_optimized); + static int __init riscv64_chacha_mod_init(void) { if (riscv_isa_extension_available(NULL, ZVKB) && @@ -56,7 +62,7 @@ static int __init riscv64_chacha_mod_init(void) static_branch_enable(&use_zvkb); return 0; } -module_init(riscv64_chacha_mod_init); +arch_initcall(riscv64_chacha_mod_init); MODULE_DESCRIPTION("ChaCha stream cipher (RISC-V optimized)"); MODULE_AUTHOR("Jerry Shih <jerry.shih@sifive.com>"); diff --git a/arch/s390/crypto/chacha-glue.c b/arch/s390/crypto/chacha-glue.c index 920e9f0941e7..0c68191f2aa4 100644 --- a/arch/s390/crypto/chacha-glue.c +++ b/arch/s390/crypto/chacha-glue.c @@ -103,6 +103,12 @@ static struct skcipher_alg chacha_algs[] = { } }; +bool chacha_is_arch_optimized(void) +{ + return cpu_has_vx(); +} +EXPORT_SYMBOL(chacha_is_arch_optimized); + static int __init chacha_mod_init(void) { return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ? @@ -115,7 +121,7 @@ static void __exit chacha_mod_fini(void) crypto_unregister_skciphers(chacha_algs, ARRAY_SIZE(chacha_algs)); } -module_cpu_feature_match(S390_CPU_FEATURE_VXRS, chacha_mod_init); +arch_initcall(chacha_mod_init); module_exit(chacha_mod_fini); MODULE_DESCRIPTION("ChaCha20 stream cipher"); diff --git a/arch/x86/crypto/chacha_glue.c b/arch/x86/crypto/chacha_glue.c index 946c306f60cd..8858de7b33e3 100644 --- a/arch/x86/crypto/chacha_glue.c +++ b/arch/x86/crypto/chacha_glue.c @@ -247,6 +247,12 @@ static struct skcipher_alg algs[] = { }, }; +bool chacha_is_arch_optimized(void) +{ + return static_key_enabled(&chacha_use_simd); +} +EXPORT_SYMBOL(chacha_is_arch_optimized); + static int __init chacha_simd_mod_init(void) { if (!boot_cpu_has(X86_FEATURE_SSSE3)) @@ -271,7 +277,7 @@ static void __exit chacha_simd_mod_fini(void) crypto_unregister_skciphers(algs, ARRAY_SIZE(algs)); } -module_init(chacha_simd_mod_init); +arch_initcall(chacha_simd_mod_init); module_exit(chacha_simd_mod_fini); MODULE_LICENSE("GPL"); diff --git a/crypto/Makefile b/crypto/Makefile index 0e6ab5ffd3f7..98510a2aa0b1 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -148,7 +148,8 @@ obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o obj-$(CONFIG_CRYPTO_ANUBIS) += anubis.o obj-$(CONFIG_CRYPTO_SEED) += seed.o obj-$(CONFIG_CRYPTO_ARIA) += aria_generic.o -obj-$(CONFIG_CRYPTO_CHACHA20) += chacha_generic.o +obj-$(CONFIG_CRYPTO_CHACHA20) += chacha.o +CFLAGS_chacha.o += -DARCH=$(ARCH) obj-$(CONFIG_CRYPTO_POLY1305) += poly1305_generic.o obj-$(CONFIG_CRYPTO_DEFLATE) += deflate.o obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o diff --git a/crypto/chacha.c b/crypto/chacha.c new file mode 100644 index 000000000000..2009038c5e56 --- /dev/null +++ b/crypto/chacha.c @@ -0,0 +1,227 @@ +// SPDX-License-Identifier: GPL-2.0-or-later +/* + * Crypto API wrappers for the ChaCha20, XChaCha20, and XChaCha12 stream ciphers + * + * Copyright (C) 2015 Martin Willi + * Copyright (C) 2018 Google LLC + */ + +#include <linux/unaligned.h> +#include <crypto/algapi.h> +#include <crypto/internal/chacha.h> +#include <crypto/internal/skcipher.h> +#include <linux/module.h> + +static int chacha_stream_xor(struct skcipher_request *req, + const struct chacha_ctx *ctx, const u8 *iv, + bool arch) +{ + struct skcipher_walk walk; + u32 state[16]; + int err; + + err = skcipher_walk_virt(&walk, req, false); + + chacha_init(state, ctx->key, iv); + + while (walk.nbytes > 0) { + unsigned int nbytes = walk.nbytes; + + if (nbytes < walk.total) + nbytes = round_down(nbytes, CHACHA_BLOCK_SIZE); + + if (arch) + chacha_crypt(state, walk.dst.virt.addr, + walk.src.virt.addr, nbytes, ctx->nrounds); + else + chacha_crypt_generic(state, walk.dst.virt.addr, + walk.src.virt.addr, nbytes, + ctx->nrounds); + err = skcipher_walk_done(&walk, walk.nbytes - nbytes); + } + + return err; +} + +static int crypto_chacha_crypt_generic(struct skcipher_request *req) +{ + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); + const struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm); + + return chacha_stream_xor(req, ctx, req->iv, false); +} + +static int crypto_chacha_crypt_arch(struct skcipher_request *req) +{ + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); + const struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm); + + return chacha_stream_xor(req, ctx, req->iv, true); +} + +static int crypto_xchacha_crypt(struct skcipher_request *req, bool arch) +{ + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); + const struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm); + struct chacha_ctx subctx; + u32 state[16]; + u8 real_iv[16]; + + /* Compute the subkey given the original key and first 128 nonce bits */ + chacha_init(state, ctx->key, req->iv); + if (arch) + hchacha_block(state, subctx.key, ctx->nrounds); + else + hchacha_block_generic(state, subctx.key, ctx->nrounds); + subctx.nrounds = ctx->nrounds; + + /* Build the real IV */ + memcpy(&real_iv[0], req->iv + 24, 8); /* stream position */ + memcpy(&real_iv[8], req->iv + 16, 8); /* remaining 64 nonce bits */ + + /* Generate the stream and XOR it with the data */ + return chacha_stream_xor(req, &subctx, real_iv, arch); +} + +static int crypto_xchacha_crypt_generic(struct skcipher_request *req) +{ + return crypto_xchacha_crypt(req, false); +} + +static int crypto_xchacha_crypt_arch(struct skcipher_request *req) +{ + return crypto_xchacha_crypt(req, true); +} + +static struct skcipher_alg algs[] = { + { + .base.cra_name = "chacha20", + .base.cra_driver_name = "chacha20-generic", + .base.cra_priority = 100, + .base.cra_blocksize = 1, + .base.cra_ctxsize = sizeof(struct chacha_ctx), + .base.cra_module = THIS_MODULE, + + .min_keysize = CHACHA_KEY_SIZE, + .max_keysize = CHACHA_KEY_SIZE, + .ivsize = CHACHA_IV_SIZE, + .chunksize = CHACHA_BLOCK_SIZE, + .setkey = chacha20_setkey, + .encrypt = crypto_chacha_crypt_generic, + .decrypt = crypto_chacha_crypt_generic, + }, + { + .base.cra_name = "xchacha20", + .base.cra_driver_name = "xchacha20-generic", + .base.cra_priority = 100, + .base.cra_blocksize = 1, + .base.cra_ctxsize = sizeof(struct chacha_ctx), + .base.cra_module = THIS_MODULE, + + .min_keysize = CHACHA_KEY_SIZE, + .max_keysize = CHACHA_KEY_SIZE, + .ivsize = XCHACHA_IV_SIZE, + .chunksize = CHACHA_BLOCK_SIZE, + .setkey = chacha20_setkey, + .encrypt = crypto_xchacha_crypt_generic, + .decrypt = crypto_xchacha_crypt_generic, + }, + { + .base.cra_name = "xchacha12", + .base.cra_driver_name = "xchacha12-generic", + .base.cra_priority = 100, + .base.cra_blocksize = 1, + .base.cra_ctxsize = sizeof(struct chacha_ctx), + .base.cra_module = THIS_MODULE, + + .min_keysize = CHACHA_KEY_SIZE, + .max_keysize = CHACHA_KEY_SIZE, + .ivsize = XCHACHA_IV_SIZE, + .chunksize = CHACHA_BLOCK_SIZE, + .setkey = chacha12_setkey, + .encrypt = crypto_xchacha_crypt_generic, + .decrypt = crypto_xchacha_crypt_generic, + }, + { + .base.cra_name = "chacha20", + .base.cra_driver_name = "chacha20-" __stringify(ARCH), + .base.cra_priority = 300, + .base.cra_blocksize = 1, + .base.cra_ctxsize = sizeof(struct chacha_ctx), + .base.cra_module = THIS_MODULE, + + .min_keysize = CHACHA_KEY_SIZE, + .max_keysize = CHACHA_KEY_SIZE, + .ivsize = CHACHA_IV_SIZE, + .chunksize = CHACHA_BLOCK_SIZE, + .setkey = chacha20_setkey, + .encrypt = crypto_chacha_crypt_arch, + .decrypt = crypto_chacha_crypt_arch, + }, + { + .base.cra_name = "xchacha20", + .base.cra_driver_name = "xchacha20-" __stringify(ARCH), + .base.cra_priority = 300, + .base.cra_blocksize = 1, + .base.cra_ctxsize = sizeof(struct chacha_ctx), + .base.cra_module = THIS_MODULE, + + .min_keysize = CHACHA_KEY_SIZE, + .max_keysize = CHACHA_KEY_SIZE, + .ivsize = XCHACHA_IV_SIZE, + .chunksize = CHACHA_BLOCK_SIZE, + .setkey = chacha20_setkey, + .encrypt = crypto_xchacha_crypt_arch, + .decrypt = crypto_xchacha_crypt_arch, + }, + { + .base.cra_name = "xchacha12", + .base.cra_driver_name = "xchacha12-" __stringify(ARCH), + .base.cra_priority = 300, + .base.cra_blocksize = 1, + .base.cra_ctxsize = sizeof(struct chacha_ctx), + .base.cra_module = THIS_MODULE, + + .min_keysize = CHACHA_KEY_SIZE, + .max_keysize = CHACHA_KEY_SIZE, + .ivsize = XCHACHA_IV_SIZE, + .chunksize = CHACHA_BLOCK_SIZE, + .setkey = chacha12_setkey, + .encrypt = crypto_xchacha_crypt_arch, + .decrypt = crypto_xchacha_crypt_arch, + } +}; + +static unsigned int num_algs; + +static int __init crypto_chacha_mod_init(void) +{ + /* register the arch flavours only if they differ from generic */ + num_algs = ARRAY_SIZE(algs); + BUILD_BUG_ON(ARRAY_SIZE(algs) % 2 != 0); + if (!chacha_is_arch_optimized()) + num_algs /= 2; + + return crypto_register_skciphers(algs, num_algs); +} + +static void __exit crypto_chacha_mod_fini(void) +{ + crypto_unregister_skciphers(algs, num_algs); +} + +subsys_initcall(crypto_chacha_mod_init); +module_exit(crypto_chacha_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_AUTHOR("Martin Willi <martin@strongswan.org>"); +MODULE_DESCRIPTION("Crypto API wrappers for the ChaCha20, XChaCha20, and XChaCha12 stream ciphers"); +MODULE_ALIAS_CRYPTO("chacha20"); +MODULE_ALIAS_CRYPTO("chacha20-generic"); +MODULE_ALIAS_CRYPTO("chacha20-" __stringify(ARCH)); +MODULE_ALIAS_CRYPTO("xchacha20"); +MODULE_ALIAS_CRYPTO("xchacha20-generic"); +MODULE_ALIAS_CRYPTO("xchacha20-" __stringify(ARCH)); +MODULE_ALIAS_CRYPTO("xchacha12"); +MODULE_ALIAS_CRYPTO("xchacha12-generic"); +MODULE_ALIAS_CRYPTO("xchacha12-" __stringify(ARCH)); diff --git a/crypto/chacha_generic.c b/crypto/chacha_generic.c deleted file mode 100644 index 1fb9fbd302c6..000000000000 --- a/crypto/chacha_generic.c +++ /dev/null @@ -1,139 +0,0 @@ -// SPDX-License-Identifier: GPL-2.0-or-later -/* - * ChaCha and XChaCha stream ciphers, including ChaCha20 (RFC7539) - * - * Copyright (C) 2015 Martin Willi - * Copyright (C) 2018 Google LLC - */ - -#include <linux/unaligned.h> -#include <crypto/algapi.h> -#include <crypto/internal/chacha.h> -#include <crypto/internal/skcipher.h> -#include <linux/module.h> - -static int chacha_stream_xor(struct skcipher_request *req, - const struct chacha_ctx *ctx, const u8 *iv) -{ - struct skcipher_walk walk; - u32 state[16]; - int err; - - err = skcipher_walk_virt(&walk, req, false); - - chacha_init(state, ctx->key, iv); - - while (walk.nbytes > 0) { - unsigned int nbytes = walk.nbytes; - - if (nbytes < walk.total) - nbytes = round_down(nbytes, CHACHA_BLOCK_SIZE); - - chacha_crypt_generic(state, walk.dst.virt.addr, - walk.src.virt.addr, nbytes, ctx->nrounds); - err = skcipher_walk_done(&walk, walk.nbytes - nbytes); - } - - return err; -} - -static int crypto_chacha_crypt(struct skcipher_request *req) -{ - struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); - struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm); - - return chacha_stream_xor(req, ctx, req->iv); -} - -static int crypto_xchacha_crypt(struct skcipher_request *req) -{ - struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); - struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm); - struct chacha_ctx subctx; - u32 state[16]; - u8 real_iv[16]; - - /* Compute the subkey given the original key and first 128 nonce bits */ - chacha_init(state, ctx->key, req->iv); - hchacha_block_generic(state, subctx.key, ctx->nrounds); - subctx.nrounds = ctx->nrounds; - - /* Build the real IV */ - memcpy(&real_iv[0], req->iv + 24, 8); /* stream position */ - memcpy(&real_iv[8], req->iv + 16, 8); /* remaining 64 nonce bits */ - - /* Generate the stream and XOR it with the data */ - return chacha_stream_xor(req, &subctx, real_iv); -} - -static struct skcipher_alg algs[] = { - { - .base.cra_name = "chacha20", - .base.cra_driver_name = "chacha20-generic", - .base.cra_priority = 100, - .base.cra_blocksize = 1, - .base.cra_ctxsize = sizeof(struct chacha_ctx), - .base.cra_module = THIS_MODULE, - - .min_keysize = CHACHA_KEY_SIZE, - .max_keysize = CHACHA_KEY_SIZE, - .ivsize = CHACHA_IV_SIZE, - .chunksize = CHACHA_BLOCK_SIZE, - .setkey = chacha20_setkey, - .encrypt = crypto_chacha_crypt, - .decrypt = crypto_chacha_crypt, - }, { - .base.cra_name = "xchacha20", - .base.cra_driver_name = "xchacha20-generic", - .base.cra_priority = 100, - .base.cra_blocksize = 1, - .base.cra_ctxsize = sizeof(struct chacha_ctx), - .base.cra_module = THIS_MODULE, - - .min_keysize = CHACHA_KEY_SIZE, - .max_keysize = CHACHA_KEY_SIZE, - .ivsize = XCHACHA_IV_SIZE, - .chunksize = CHACHA_BLOCK_SIZE, - .setkey = chacha20_setkey, - .encrypt = crypto_xchacha_crypt, - .decrypt = crypto_xchacha_crypt, - }, { - .base.cra_name = "xchacha12", - .base.cra_driver_name = "xchacha12-generic", - .base.cra_priority = 100, - .base.cra_blocksize = 1, - .base.cra_ctxsize = sizeof(struct chacha_ctx), - .base.cra_module = THIS_MODULE, - - .min_keysize = CHACHA_KEY_SIZE, - .max_keysize = CHACHA_KEY_SIZE, - .ivsize = XCHACHA_IV_SIZE, - .chunksize = CHACHA_BLOCK_SIZE, - .setkey = chacha12_setkey, - .encrypt = crypto_xchacha_crypt, - .decrypt = crypto_xchacha_crypt, - } -}; - -static int __init chacha_generic_mod_init(void) -{ - return crypto_register_skciphers(algs, ARRAY_SIZE(algs)); -} - -static void __exit chacha_generic_mod_fini(void) -{ - crypto_unregister_skciphers(algs, ARRAY_SIZE(algs)); -} - -subsys_initcall(chacha_generic_mod_init); -module_exit(chacha_generic_mod_fini); - -MODULE_LICENSE("GPL"); -MODULE_AUTHOR("Martin Willi <martin@strongswan.org>"); -MODULE_DESCRIPTION("ChaCha and XChaCha stream ciphers (generic)"); -MODULE_ALIAS_CRYPTO("chacha20"); -MODULE_ALIAS_CRYPTO("chacha20-generic"); -MODULE_ALIAS_CRYPTO("xchacha20"); -MODULE_ALIAS_CRYPTO("xchacha20-generic"); -MODULE_ALIAS_CRYPTO("xchacha12"); -MODULE_ALIAS_CRYPTO("xchacha12-generic"); diff --git a/include/crypto/chacha.h b/include/crypto/chacha.h index f8cc073bba41..58129e18cc31 100644 --- a/include/crypto/chacha.h +++ b/include/crypto/chacha.h @@ -99,4 +99,13 @@ static inline void chacha20_crypt(u32 *state, u8 *dst, const u8 *src, chacha_crypt(state, dst, src, bytes, 20); } +#if IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA) +bool chacha_is_arch_optimized(void); +#else +static inline bool chacha_is_arch_optimized(void) +{ + return false; +} +#endif + #endif /* _CRYPTO_CHACHA_H */ |