@@ -275,7 +275,8 @@ static struct simd_aead_alg *simd_alg;
static int __init crypto_aegis128_aesni_module_init(void)
{
- if (!x86_match_cpu(module_cpu_ids))
+ int ret;
+
return -ENODEV;
if (!boot_cpu_has(X86_FEATURE_XMM2) ||
@@ -283,8 +284,11 @@ static int __init crypto_aegis128_aesni_module_init(void)
!cpu_has_xfeatures(XFEATURE_MASK_SSE, NULL))
return -ENODEV;
- return simd_register_aeads_compat(&crypto_aegis128_aesni_alg, 1,
+ ret = simd_register_aeads_compat(&crypto_aegis128_aesni_alg, 1,
&simd_alg);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit crypto_aegis128_aesni_module_exit(void)
@@ -1238,25 +1238,28 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init aesni_init(void)
{
int err;
+ int enabled_gcm_sse = 0;
+ int enabled_gcm_avx = 0;
+ int enabled_gcm_avx2 = 0;
+ int enabled_ctr_avx = 0;
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
#ifdef CONFIG_X86_64
if (boot_cpu_has(X86_FEATURE_AVX2)) {
- pr_info("AVX2 version of gcm_enc/dec engaged.\n");
+ enabled_gcm_avx = 1;
+ enabled_gcm_avx2 = 1;
static_branch_enable(&gcm_use_avx);
static_branch_enable(&gcm_use_avx2);
- } else
- if (boot_cpu_has(X86_FEATURE_AVX)) {
- pr_info("AVX version of gcm_enc/dec engaged.\n");
+ } else if (boot_cpu_has(X86_FEATURE_AVX)) {
+ enabled_gcm_avx = 1;
static_branch_enable(&gcm_use_avx);
} else {
- pr_info("SSE version of gcm_enc/dec engaged.\n");
+ enabled_gcm_sse = 1;
}
if (boot_cpu_has(X86_FEATURE_AVX)) {
- /* optimize performance of ctr mode encryption transform */
+ enabled_ctr_avx = 1;
static_call_update(aesni_ctr_enc_tfm, aesni_ctr_enc_avx_tfm);
- pr_info("AES CTR mode by8 optimization enabled\n");
}
#endif /* CONFIG_X86_64 */
@@ -1283,6 +1286,11 @@ static int __init aesni_init(void)
goto unregister_aeads;
#endif /* CONFIG_X86_64 */
+ pr_info("CPU-optimized crypto module loaded (GCM SSE=%s, AVX=%s, AVX2=%s)(CTR AVX=%s)\n",
+ enabled_gcm_sse ? "yes" : "no",
+ enabled_gcm_avx ? "yes" : "no",
+ enabled_gcm_avx2 ? "yes" : "no",
+ enabled_ctr_avx ? "yes" : "no");
return 0;
#ifdef CONFIG_X86_64
@@ -170,6 +170,8 @@ static struct simd_skcipher_alg *aria_simd_algs[ARRAY_SIZE(aria_algs)];
static int __init aria_avx_init(void)
{
const char *feature_name;
+ int ret;
+ int enabled_gfni = 0;
if (!boot_cpu_has(X86_FEATURE_AVX) ||
!boot_cpu_has(X86_FEATURE_AES) ||
@@ -188,15 +190,20 @@ static int __init aria_avx_init(void)
aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
+ enabled_gfni = 1;
} else {
aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way;
aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way;
aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way;
}
- return simd_register_skciphers_compat(aria_algs,
- ARRAY_SIZE(aria_algs),
- aria_simd_algs);
+ ret = simd_register_skciphers_compat(aria_algs,
+ ARRAY_SIZE(aria_algs),
+ aria_simd_algs);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded (GFNI=%s)\n",
+ enabled_gfni ? "yes" : "no");
+ return ret;
}
static void __exit aria_avx_exit(void)
@@ -66,11 +66,16 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init blake2s_mod_init(void)
{
+ int enabled_ssse3 = 0;
+ int enabled_avx512 = 0;
+
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
- if (boot_cpu_has(X86_FEATURE_SSSE3))
+ if (boot_cpu_has(X86_FEATURE_SSSE3)) {
+ enabled_ssse3 = 1;
static_branch_enable(&blake2s_use_ssse3);
+ }
if (IS_ENABLED(CONFIG_AS_AVX512) &&
boot_cpu_has(X86_FEATURE_AVX) &&
@@ -78,9 +83,14 @@ static int __init blake2s_mod_init(void)
boot_cpu_has(X86_FEATURE_AVX512F) &&
boot_cpu_has(X86_FEATURE_AVX512VL) &&
cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM |
- XFEATURE_MASK_AVX512, NULL))
+ XFEATURE_MASK_AVX512, NULL)) {
+ enabled_avx512 = 1;
static_branch_enable(&blake2s_use_avx512);
+ }
+ pr_info("CPU-optimized crypto module loaded (SSSE3=%s, AVX512=%s)\n",
+ enabled_ssse3 ? "yes" : "no",
+ enabled_avx512 ? "yes" : "no");
return 0;
}
@@ -336,6 +336,8 @@ static int __init blowfish_init(void)
if (err)
crypto_unregister_alg(&bf_cipher_alg);
+ if (!err)
+ pr_info("CPU-optimized crypto module loaded\n");
return err;
}
@@ -114,6 +114,7 @@ static struct simd_skcipher_alg *camellia_simd_algs[ARRAY_SIZE(camellia_algs)];
static int __init camellia_aesni_init(void)
{
const char *feature_name;
+ int ret;
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
@@ -132,9 +133,12 @@ static int __init camellia_aesni_init(void)
return -ENODEV;
}
- return simd_register_skciphers_compat(camellia_algs,
+ ret = simd_register_skciphers_compat(camellia_algs,
ARRAY_SIZE(camellia_algs),
camellia_simd_algs);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit camellia_aesni_fini(void)
@@ -113,6 +113,7 @@ static struct simd_skcipher_alg *camellia_simd_algs[ARRAY_SIZE(camellia_algs)];
static int __init camellia_aesni_init(void)
{
const char *feature_name;
+ int ret;
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
@@ -130,9 +131,12 @@ static int __init camellia_aesni_init(void)
return -ENODEV;
}
- return simd_register_skciphers_compat(camellia_algs,
+ ret = simd_register_skciphers_compat(camellia_algs,
ARRAY_SIZE(camellia_algs),
camellia_simd_algs);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit camellia_aesni_fini(void)
@@ -1410,6 +1410,9 @@ static int __init camellia_init(void)
if (err)
crypto_unregister_alg(&camellia_cipher_alg);
+ if (!err)
+ pr_info("CPU-optimized crypto module loaded\n");
+
return err;
}
@@ -107,6 +107,7 @@ static struct simd_skcipher_alg *cast5_simd_algs[ARRAY_SIZE(cast5_algs)];
static int __init cast5_init(void)
{
const char *feature_name;
+ int ret;
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
@@ -117,9 +118,12 @@ static int __init cast5_init(void)
return -ENODEV;
}
- return simd_register_skciphers_compat(cast5_algs,
+ ret = simd_register_skciphers_compat(cast5_algs,
ARRAY_SIZE(cast5_algs),
cast5_simd_algs);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit cast5_exit(void)
@@ -107,6 +107,7 @@ static struct simd_skcipher_alg *cast6_simd_algs[ARRAY_SIZE(cast6_algs)];
static int __init cast6_init(void)
{
const char *feature_name;
+ int ret;
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
@@ -117,9 +118,12 @@ static int __init cast6_init(void)
return -ENODEV;
}
- return simd_register_skciphers_compat(cast6_algs,
+ ret = simd_register_skciphers_compat(cast6_algs,
ARRAY_SIZE(cast6_algs),
cast6_simd_algs);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit cast6_exit(void)
@@ -289,6 +289,9 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init chacha_simd_mod_init(void)
{
+ int ret;
+ int enabled_avx2 = 0;
+ int enabled_avx512 = 0;
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
@@ -298,15 +301,25 @@ static int __init chacha_simd_mod_init(void)
if (boot_cpu_has(X86_FEATURE_AVX) &&
boot_cpu_has(X86_FEATURE_AVX2) &&
cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
+ enabled_avx2 = 1;
static_branch_enable(&chacha_use_avx2);
if (IS_ENABLED(CONFIG_AS_AVX512) &&
boot_cpu_has(X86_FEATURE_AVX512VL) &&
- boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */
+ boot_cpu_has(X86_FEATURE_AVX512BW)) { /* kmovq */
+ enabled_avx512 = 1;
static_branch_enable(&chacha_use_avx512vl);
+ }
}
- return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ?
+ ret = IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ?
crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded (AVX2=%s, AVX512=%s)\n",
+ enabled_avx2 ? "yes" : "no",
+ enabled_avx512 ? "yes" : "no");
+ else
+ pr_info("CPU-optimized crypto module not loaded");
+ return ret;
}
static void __exit chacha_simd_mod_fini(void)
@@ -190,9 +190,15 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init crc32_pclmul_mod_init(void)
{
+ int ret;
+
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
- return crypto_register_shash(&alg);
+
+ ret = crypto_register_shash(&alg);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit crc32_pclmul_mod_fini(void)
@@ -242,16 +242,27 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init crc32c_intel_mod_init(void)
{
- if (!x86_match_cpu(module_cpu_ids))
+ int ret;
+ int pcl_enabled = 0;
+
+ if (!x86_match_cpu(module_cpu_ids)) {
+ pr_info("CPU-optimized crypto module not loaded, required CPU feature (SSE4.2) not supported\n");
return -ENODEV;
+ }
+
#ifdef CONFIG_X86_64
if (boot_cpu_has(X86_FEATURE_PCLMULQDQ)) {
+ pcl_enabled = 1;
alg.update = crc32c_pcl_intel_update;
alg.finup = crc32c_pcl_intel_finup;
alg.digest = crc32c_pcl_intel_digest;
}
#endif
- return crypto_register_shash(&alg);
+ ret = crypto_register_shash(&alg);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded (PCLMULQDQ=%s)\n",
+ pcl_enabled ? "yes" : "no");
+ return ret;
}
static void __exit crc32c_intel_mod_fini(void)
@@ -146,10 +146,15 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init crct10dif_intel_mod_init(void)
{
+ int ret;
+
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
- return crypto_register_shash(&alg);
+ ret = crypto_register_shash(&alg);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit crct10dif_intel_mod_fini(void)
@@ -1709,15 +1709,24 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init curve25519_mod_init(void)
{
+ int ret;
+ int enabled_adx = 0;
+
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
- if (boot_cpu_has(X86_FEATURE_BMI2) && boot_cpu_has(X86_FEATURE_ADX))
+ if (boot_cpu_has(X86_FEATURE_BMI2) && boot_cpu_has(X86_FEATURE_ADX)) {
+ enabled_adx = 1;
static_branch_enable(&curve25519_use_bmi2_adx);
+ }
else
return 0;
- return IS_REACHABLE(CONFIG_CRYPTO_KPP) ?
+ ret = IS_REACHABLE(CONFIG_CRYPTO_KPP) ?
crypto_register_kpp(&curve25519_alg) : 0;
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded (ADX=%s)\n",
+ enabled_adx ? "yes" : "no");
+ return ret;
}
static void __exit curve25519_mod_exit(void)
@@ -384,6 +384,8 @@ static int __init des3_ede_x86_init(void)
if (err)
crypto_unregister_alg(&des3_ede_cipher);
+ if (!err)
+ pr_info("CPU-optimized crypto module loaded\n");
return err;
}
@@ -349,6 +349,7 @@ static int __init ghash_pclmulqdqni_mod_init(void)
if (err)
goto err_shash;
+ pr_info("CPU-optimized crypto module loaded\n");
return 0;
err_shash:
@@ -68,6 +68,8 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init nhpoly1305_mod_init(void)
{
+ int ret;
+
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
@@ -75,7 +77,10 @@ static int __init nhpoly1305_mod_init(void)
!boot_cpu_has(X86_FEATURE_OSXSAVE))
return -ENODEV;
- return crypto_register_shash(&nhpoly1305_alg);
+ ret = crypto_register_shash(&nhpoly1305_alg);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit nhpoly1305_mod_exit(void)
@@ -68,13 +68,18 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init nhpoly1305_mod_init(void)
{
+ int ret;
+
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
if (!boot_cpu_has(X86_FEATURE_XMM2))
return -ENODEV;
- return crypto_register_shash(&nhpoly1305_alg);
+ ret = crypto_register_shash(&nhpoly1305_alg);
+ if (ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit nhpoly1305_mod_exit(void)
@@ -273,22 +273,39 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init poly1305_simd_mod_init(void)
{
+ int ret;
+ int enabled_avx = 0;
+ int enabled_avx2 = 0;
+ int enabled_avx512 = 0;
+
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
if (boot_cpu_has(X86_FEATURE_AVX) &&
- cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL))
+ cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
+ enabled_avx = 1;
static_branch_enable(&poly1305_use_avx);
+ }
if (boot_cpu_has(X86_FEATURE_AVX) && boot_cpu_has(X86_FEATURE_AVX2) &&
- cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL))
+ cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
+ enabled_avx2 = 1;
static_branch_enable(&poly1305_use_avx2);
+ }
if (IS_ENABLED(CONFIG_AS_AVX512) && boot_cpu_has(X86_FEATURE_AVX) &&
boot_cpu_has(X86_FEATURE_AVX2) && boot_cpu_has(X86_FEATURE_AVX512F) &&
cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM | XFEATURE_MASK_AVX512, NULL) &&
/* Skylake downclocks unacceptably much when using zmm, but later generations are fast. */
- boot_cpu_data.x86_model != INTEL_FAM6_SKYLAKE_X)
+ boot_cpu_data.x86_model != INTEL_FAM6_SKYLAKE_X) {
+ enabled_avx512 = 1;
static_branch_enable(&poly1305_use_avx512);
- return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? crypto_register_shash(&alg) : 0;
+ }
+ ret = IS_REACHABLE(CONFIG_CRYPTO_HASH) ? crypto_register_shash(&alg) : 0;
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded (AVX=%s, AVX2=%s, AVX512=%s)\n",
+ enabled_avx ? "yes" : "no",
+ enabled_avx2 ? "yes" : "no",
+ enabled_avx512 ? "yes" : "no");
+ return ret;
}
static void __exit poly1305_simd_mod_exit(void)
@@ -183,13 +183,18 @@ MODULE_DEVICE_TABLE(x86cpu, pcmul_cpu_id);
static int __init polyval_clmulni_mod_init(void)
{
+ int ret;
+
if (!x86_match_cpu(pcmul_cpu_id))
return -ENODEV;
if (!boot_cpu_has(X86_FEATURE_AVX))
return -ENODEV;
- return crypto_register_shash(&polyval_alg);
+ ret = crypto_register_shash(&polyval_alg);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit polyval_clmulni_mod_exit(void)
@@ -108,8 +108,8 @@ static struct simd_skcipher_alg *serpent_simd_algs[ARRAY_SIZE(serpent_algs)];
static int __init serpent_avx2_init(void)
{
const char *feature_name;
+ int ret;
- if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
if (!boot_cpu_has(X86_FEATURE_AVX2) || !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
@@ -122,9 +122,12 @@ static int __init serpent_avx2_init(void)
return -ENODEV;
}
- return simd_register_skciphers_compat(serpent_algs,
+ ret = simd_register_skciphers_compat(serpent_algs,
ARRAY_SIZE(serpent_algs),
serpent_simd_algs);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit serpent_avx2_fini(void)
@@ -114,6 +114,7 @@ static struct simd_skcipher_alg *serpent_simd_algs[ARRAY_SIZE(serpent_algs)];
static int __init serpent_init(void)
{
const char *feature_name;
+ int ret;
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
@@ -124,9 +125,12 @@ static int __init serpent_init(void)
return -ENODEV;
}
- return simd_register_skciphers_compat(serpent_algs,
+ ret = simd_register_skciphers_compat(serpent_algs,
ARRAY_SIZE(serpent_algs),
serpent_simd_algs);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit serpent_exit(void)
@@ -116,6 +116,8 @@ static struct simd_skcipher_alg *serpent_simd_algs[ARRAY_SIZE(serpent_algs)];
static int __init serpent_sse2_init(void)
{
+ int ret;
+
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
@@ -124,9 +126,12 @@ static int __init serpent_sse2_init(void)
return -ENODEV;
}
- return simd_register_skciphers_compat(serpent_algs,
+ ret = simd_register_skciphers_compat(serpent_algs,
ARRAY_SIZE(serpent_algs),
serpent_simd_algs);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit serpent_sse2_exit(void)
@@ -125,6 +125,7 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init sm3_avx_mod_init(void)
{
const char *feature_name;
+ int ret;
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
@@ -145,7 +146,10 @@ static int __init sm3_avx_mod_init(void)
return -ENODEV;
}
- return crypto_register_shash(&sm3_avx_alg);
+ ret = crypto_register_shash(&sm3_avx_alg);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit sm3_avx_mod_exit(void)
@@ -143,6 +143,7 @@ simd_sm4_aesni_avx2_skciphers[ARRAY_SIZE(sm4_aesni_avx2_skciphers)];
static int __init sm4_init(void)
{
const char *feature_name;
+ int ret;
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
@@ -161,9 +162,12 @@ static int __init sm4_init(void)
return -ENODEV;
}
- return simd_register_skciphers_compat(sm4_aesni_avx2_skciphers,
+ ret = simd_register_skciphers_compat(sm4_aesni_avx2_skciphers,
ARRAY_SIZE(sm4_aesni_avx2_skciphers),
simd_sm4_aesni_avx2_skciphers);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit sm4_exit(void)
@@ -461,8 +461,8 @@ simd_sm4_aesni_avx_skciphers[ARRAY_SIZE(sm4_aesni_avx_skciphers)];
static int __init sm4_init(void)
{
const char *feature_name;
+ int ret;
- if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
if (!boot_cpu_has(X86_FEATURE_AVX) ||
@@ -478,9 +478,12 @@ static int __init sm4_init(void)
return -ENODEV;
}
- return simd_register_skciphers_compat(sm4_aesni_avx_skciphers,
+ ret = simd_register_skciphers_compat(sm4_aesni_avx_skciphers,
ARRAY_SIZE(sm4_aesni_avx_skciphers),
simd_sm4_aesni_avx_skciphers);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit sm4_exit(void)
@@ -117,6 +117,7 @@ static struct simd_skcipher_alg *twofish_simd_algs[ARRAY_SIZE(twofish_algs)];
static int __init twofish_init(void)
{
const char *feature_name;
+ int ret;
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
@@ -126,9 +127,12 @@ static int __init twofish_init(void)
return -ENODEV;
}
- return simd_register_skciphers_compat(twofish_algs,
- ARRAY_SIZE(twofish_algs),
- twofish_simd_algs);
+ ret = simd_register_skciphers_compat(twofish_algs,
+ ARRAY_SIZE(twofish_algs),
+ twofish_simd_algs);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit twofish_exit(void)
@@ -92,10 +92,15 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init twofish_glue_init(void)
{
+ int ret;
+
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
- return crypto_register_alg(&alg);
+ ret = crypto_register_alg(&alg);
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit twofish_glue_fini(void)
@@ -151,6 +151,8 @@ MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
static int __init twofish_3way_init(void)
{
+ int ret;
+
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
@@ -162,8 +164,11 @@ static int __init twofish_3way_init(void)
return -ENODEV;
}
- return crypto_register_skciphers(tf_skciphers,
- ARRAY_SIZE(tf_skciphers));
+ ret = crypto_register_skciphers(tf_skciphers,
+ ARRAY_SIZE(tf_skciphers));
+ if (!ret)
+ pr_info("CPU-optimized crypto module loaded\n");
+ return ret;
}
static void __exit twofish_3way_fini(void)
Print a positive message at the info level if the CPU-optimized module is loaded, for all modules except the sha modules. Signed-off-by: Robert Elliott <elliott@hpe.com> --- arch/x86/crypto/aegis128-aesni-glue.c | 8 +++++-- arch/x86/crypto/aesni-intel_glue.c | 22 +++++++++++++------ arch/x86/crypto/aria_aesni_avx_glue.c | 13 ++++++++--- arch/x86/crypto/blake2s-glue.c | 14 ++++++++++-- arch/x86/crypto/blowfish_glue.c | 2 ++ arch/x86/crypto/camellia_aesni_avx2_glue.c | 6 +++++- arch/x86/crypto/camellia_aesni_avx_glue.c | 6 +++++- arch/x86/crypto/camellia_glue.c | 3 +++ arch/x86/crypto/cast5_avx_glue.c | 6 +++++- arch/x86/crypto/cast6_avx_glue.c | 6 +++++- arch/x86/crypto/chacha_glue.c | 17 +++++++++++++-- arch/x86/crypto/crc32-pclmul_glue.c | 8 ++++++- arch/x86/crypto/crc32c-intel_glue.c | 15 +++++++++++-- arch/x86/crypto/crct10dif-pclmul_glue.c | 7 +++++- arch/x86/crypto/curve25519-x86_64.c | 13 +++++++++-- arch/x86/crypto/des3_ede_glue.c | 2 ++ arch/x86/crypto/ghash-clmulni-intel_glue.c | 1 + arch/x86/crypto/nhpoly1305-avx2-glue.c | 7 +++++- arch/x86/crypto/nhpoly1305-sse2-glue.c | 7 +++++- arch/x86/crypto/poly1305_glue.c | 25 ++++++++++++++++++---- arch/x86/crypto/polyval-clmulni_glue.c | 7 +++++- arch/x86/crypto/serpent_avx2_glue.c | 7 ++++-- arch/x86/crypto/serpent_avx_glue.c | 6 +++++- arch/x86/crypto/serpent_sse2_glue.c | 7 +++++- arch/x86/crypto/sm3_avx_glue.c | 6 +++++- arch/x86/crypto/sm4_aesni_avx2_glue.c | 6 +++++- arch/x86/crypto/sm4_aesni_avx_glue.c | 7 ++++-- arch/x86/crypto/twofish_avx_glue.c | 10 ++++++--- arch/x86/crypto/twofish_glue.c | 7 +++++- arch/x86/crypto/twofish_glue_3way.c | 9 ++++++-- 30 files changed, 213 insertions(+), 47 deletions(-)