Message ID | 20190612124838.2492-3-ard.biesheuvel@linaro.org |
---|---|
State | New |
Headers | show |
Series | AES cleanup | expand |
On Wed, 12 Jun 2019 at 14:48, Ard Biesheuvel <ard.biesheuvel@linaro.org> wrote: > > Rename some local AES encrypt/decrypt routines so they don't clash with > the names we are about to introduce for the routines exposes by the > generic AES library. > > Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> I need to respin this patch - the subject line is inaccurate, and I forgot to include a similar change for the AES-NI driver. > --- > arch/arm/crypto/aes-cipher-glue.c | 8 ++++---- > arch/arm64/crypto/aes-cipher-glue.c | 8 ++++---- > crypto/aes_generic.c | 8 ++++---- > 3 files changed, 12 insertions(+), 12 deletions(-) > > diff --git a/arch/arm/crypto/aes-cipher-glue.c b/arch/arm/crypto/aes-cipher-glue.c > index c222f6e072ad..f6c07867b8ff 100644 > --- a/arch/arm/crypto/aes-cipher-glue.c > +++ b/arch/arm/crypto/aes-cipher-glue.c > @@ -19,7 +19,7 @@ EXPORT_SYMBOL(__aes_arm_encrypt); > asmlinkage void __aes_arm_decrypt(u32 *rk, int rounds, const u8 *in, u8 *out); > EXPORT_SYMBOL(__aes_arm_decrypt); > > -static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > +static void aes_arm_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > { > struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); > int rounds = 6 + ctx->key_length / 4; > @@ -27,7 +27,7 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > __aes_arm_encrypt(ctx->key_enc, rounds, in, out); > } > > -static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > +static void aes_arm_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > { > struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); > int rounds = 6 + ctx->key_length / 4; > @@ -47,8 +47,8 @@ static struct crypto_alg aes_alg = { > .cra_cipher.cia_min_keysize = AES_MIN_KEY_SIZE, > .cra_cipher.cia_max_keysize = AES_MAX_KEY_SIZE, > .cra_cipher.cia_setkey = crypto_aes_set_key, > - .cra_cipher.cia_encrypt = aes_encrypt, > - .cra_cipher.cia_decrypt = aes_decrypt, > + .cra_cipher.cia_encrypt = aes_arm_encrypt, > + .cra_cipher.cia_decrypt = aes_arm_decrypt, > > #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS > .cra_alignmask = 3, > diff --git a/arch/arm64/crypto/aes-cipher-glue.c b/arch/arm64/crypto/aes-cipher-glue.c > index 7288e7cbebff..0e90b06ebcec 100644 > --- a/arch/arm64/crypto/aes-cipher-glue.c > +++ b/arch/arm64/crypto/aes-cipher-glue.c > @@ -18,7 +18,7 @@ EXPORT_SYMBOL(__aes_arm64_encrypt); > asmlinkage void __aes_arm64_decrypt(u32 *rk, u8 *out, const u8 *in, int rounds); > EXPORT_SYMBOL(__aes_arm64_decrypt); > > -static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > +static void aes_arm64_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > { > struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); > int rounds = 6 + ctx->key_length / 4; > @@ -26,7 +26,7 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > __aes_arm64_encrypt(ctx->key_enc, out, in, rounds); > } > > -static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > +static void aes_arm64_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > { > struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); > int rounds = 6 + ctx->key_length / 4; > @@ -46,8 +46,8 @@ static struct crypto_alg aes_alg = { > .cra_cipher.cia_min_keysize = AES_MIN_KEY_SIZE, > .cra_cipher.cia_max_keysize = AES_MAX_KEY_SIZE, > .cra_cipher.cia_setkey = crypto_aes_set_key, > - .cra_cipher.cia_encrypt = aes_encrypt, > - .cra_cipher.cia_decrypt = aes_decrypt > + .cra_cipher.cia_encrypt = aes_arm64_encrypt, > + .cra_cipher.cia_decrypt = aes_arm64_decrypt > }; > > static int __init aes_init(void) > diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c > index f217568917e4..3aa4a715c216 100644 > --- a/crypto/aes_generic.c > +++ b/crypto/aes_generic.c > @@ -1332,7 +1332,7 @@ EXPORT_SYMBOL_GPL(crypto_aes_set_key); > f_rl(bo, bi, 3, k); \ > } while (0) > > -static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > +static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > { > const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); > u32 b0[4], b1[4]; > @@ -1402,7 +1402,7 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > i_rl(bo, bi, 3, k); \ > } while (0) > > -static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > +static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) > { > const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); > u32 b0[4], b1[4]; > @@ -1454,8 +1454,8 @@ static struct crypto_alg aes_alg = { > .cia_min_keysize = AES_MIN_KEY_SIZE, > .cia_max_keysize = AES_MAX_KEY_SIZE, > .cia_setkey = crypto_aes_set_key, > - .cia_encrypt = aes_encrypt, > - .cia_decrypt = aes_decrypt > + .cia_encrypt = crypto_aes_encrypt, > + .cia_decrypt = crypto_aes_decrypt > } > } > }; > -- > 2.20.1 >
diff --git a/arch/arm/crypto/aes-cipher-glue.c b/arch/arm/crypto/aes-cipher-glue.c index c222f6e072ad..f6c07867b8ff 100644 --- a/arch/arm/crypto/aes-cipher-glue.c +++ b/arch/arm/crypto/aes-cipher-glue.c @@ -19,7 +19,7 @@ EXPORT_SYMBOL(__aes_arm_encrypt); asmlinkage void __aes_arm_decrypt(u32 *rk, int rounds, const u8 *in, u8 *out); EXPORT_SYMBOL(__aes_arm_decrypt); -static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +static void aes_arm_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); int rounds = 6 + ctx->key_length / 4; @@ -27,7 +27,7 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) __aes_arm_encrypt(ctx->key_enc, rounds, in, out); } -static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +static void aes_arm_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); int rounds = 6 + ctx->key_length / 4; @@ -47,8 +47,8 @@ static struct crypto_alg aes_alg = { .cra_cipher.cia_min_keysize = AES_MIN_KEY_SIZE, .cra_cipher.cia_max_keysize = AES_MAX_KEY_SIZE, .cra_cipher.cia_setkey = crypto_aes_set_key, - .cra_cipher.cia_encrypt = aes_encrypt, - .cra_cipher.cia_decrypt = aes_decrypt, + .cra_cipher.cia_encrypt = aes_arm_encrypt, + .cra_cipher.cia_decrypt = aes_arm_decrypt, #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS .cra_alignmask = 3, diff --git a/arch/arm64/crypto/aes-cipher-glue.c b/arch/arm64/crypto/aes-cipher-glue.c index 7288e7cbebff..0e90b06ebcec 100644 --- a/arch/arm64/crypto/aes-cipher-glue.c +++ b/arch/arm64/crypto/aes-cipher-glue.c @@ -18,7 +18,7 @@ EXPORT_SYMBOL(__aes_arm64_encrypt); asmlinkage void __aes_arm64_decrypt(u32 *rk, u8 *out, const u8 *in, int rounds); EXPORT_SYMBOL(__aes_arm64_decrypt); -static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +static void aes_arm64_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); int rounds = 6 + ctx->key_length / 4; @@ -26,7 +26,7 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) __aes_arm64_encrypt(ctx->key_enc, out, in, rounds); } -static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +static void aes_arm64_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); int rounds = 6 + ctx->key_length / 4; @@ -46,8 +46,8 @@ static struct crypto_alg aes_alg = { .cra_cipher.cia_min_keysize = AES_MIN_KEY_SIZE, .cra_cipher.cia_max_keysize = AES_MAX_KEY_SIZE, .cra_cipher.cia_setkey = crypto_aes_set_key, - .cra_cipher.cia_encrypt = aes_encrypt, - .cra_cipher.cia_decrypt = aes_decrypt + .cra_cipher.cia_encrypt = aes_arm64_encrypt, + .cra_cipher.cia_decrypt = aes_arm64_decrypt }; static int __init aes_init(void) diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c index f217568917e4..3aa4a715c216 100644 --- a/crypto/aes_generic.c +++ b/crypto/aes_generic.c @@ -1332,7 +1332,7 @@ EXPORT_SYMBOL_GPL(crypto_aes_set_key); f_rl(bo, bi, 3, k); \ } while (0) -static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); u32 b0[4], b1[4]; @@ -1402,7 +1402,7 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) i_rl(bo, bi, 3, k); \ } while (0) -static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); u32 b0[4], b1[4]; @@ -1454,8 +1454,8 @@ static struct crypto_alg aes_alg = { .cia_min_keysize = AES_MIN_KEY_SIZE, .cia_max_keysize = AES_MAX_KEY_SIZE, .cia_setkey = crypto_aes_set_key, - .cia_encrypt = aes_encrypt, - .cia_decrypt = aes_decrypt + .cia_encrypt = crypto_aes_encrypt, + .cia_decrypt = crypto_aes_decrypt } } };
Rename some local AES encrypt/decrypt routines so they don't clash with the names we are about to introduce for the routines exposes by the generic AES library. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> --- arch/arm/crypto/aes-cipher-glue.c | 8 ++++---- arch/arm64/crypto/aes-cipher-glue.c | 8 ++++---- crypto/aes_generic.c | 8 ++++---- 3 files changed, 12 insertions(+), 12 deletions(-) -- 2.20.1