summaryrefslogtreecommitdiff
path: root/arch
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2019-07-02 21:41:35 +0200
committerHerbert Xu <herbert@gondor.apana.org.au>2019-07-26 14:58:08 +1000
commitff6f4115cb953c5be8d7a76b2ec1877df2f4c2c0 (patch)
tree62090ff9537f12980ae6e7ade34aab052b1cdbd8 /arch
parentd9ec772d9550b6e513c51bc4e6fa1e3ffb50181c (diff)
crypto: aes - move sync ctr(aes) to AES library and generic helper
In preparation of duplicating the sync ctr(aes) functionality to modules under arch/arm, move the helper function from a inline .h file to the AES library, which is already depended upon by the drivers that use this fallback. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch')
-rw-r--r--arch/arm64/crypto/aes-ctr-fallback.h50
-rw-r--r--arch/arm64/crypto/aes-glue.c22
-rw-r--r--arch/arm64/crypto/aes-neonbs-glue.c21
3 files changed, 33 insertions, 60 deletions
diff --git a/arch/arm64/crypto/aes-ctr-fallback.h b/arch/arm64/crypto/aes-ctr-fallback.h
deleted file mode 100644
index 3ac911990ec7..000000000000
--- a/arch/arm64/crypto/aes-ctr-fallback.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/* SPDX-License-Identifier: GPL-2.0-only */
-/*
- * Fallback for sync aes(ctr) in contexts where kernel mode NEON
- * is not allowed
- *
- * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
- */
-
-#include <crypto/aes.h>
-#include <crypto/internal/skcipher.h>
-
-asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
-
-static inline int aes_ctr_encrypt_fallback(struct crypto_aes_ctx *ctx,
- struct skcipher_request *req)
-{
- struct skcipher_walk walk;
- u8 buf[AES_BLOCK_SIZE];
- int err;
-
- err = skcipher_walk_virt(&walk, req, true);
-
- while (walk.nbytes > 0) {
- u8 *dst = walk.dst.virt.addr;
- u8 *src = walk.src.virt.addr;
- int nbytes = walk.nbytes;
- int tail = 0;
-
- if (nbytes < walk.total) {
- nbytes = round_down(nbytes, AES_BLOCK_SIZE);
- tail = walk.nbytes % AES_BLOCK_SIZE;
- }
-
- do {
- int bsize = min(nbytes, AES_BLOCK_SIZE);
-
- __aes_arm64_encrypt(ctx->key_enc, buf, walk.iv,
- 6 + ctx->key_length / 4);
- crypto_xor_cpy(dst, src, buf, bsize);
- crypto_inc(walk.iv, AES_BLOCK_SIZE);
-
- dst += AES_BLOCK_SIZE;
- src += AES_BLOCK_SIZE;
- nbytes -= AES_BLOCK_SIZE;
- } while (nbytes > 0);
-
- err = skcipher_walk_done(&walk, tail);
- }
- return err;
-}
diff --git a/arch/arm64/crypto/aes-glue.c b/arch/arm64/crypto/aes-glue.c
index 843fb27c4961..55d6d4838708 100644
--- a/arch/arm64/crypto/aes-glue.c
+++ b/arch/arm64/crypto/aes-glue.c
@@ -9,6 +9,7 @@
#include <asm/hwcap.h>
#include <asm/simd.h>
#include <crypto/aes.h>
+#include <crypto/ctr.h>
#include <crypto/internal/hash.h>
#include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h>
@@ -18,7 +19,6 @@
#include <crypto/xts.h>
#include "aes-ce-setkey.h"
-#include "aes-ctr-fallback.h"
#ifdef USE_V8_CRYPTO_EXTENSIONS
#define MODE "ce"
@@ -401,13 +401,25 @@ static int ctr_encrypt(struct skcipher_request *req)
return err;
}
-static int ctr_encrypt_sync(struct skcipher_request *req)
+static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst)
{
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
- struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
+ const struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
+ unsigned long flags;
+
+ /*
+ * Temporarily disable interrupts to avoid races where
+ * cachelines are evicted when the CPU is interrupted
+ * to do something else.
+ */
+ local_irq_save(flags);
+ aes_encrypt(ctx, dst, src);
+ local_irq_restore(flags);
+}
+static int ctr_encrypt_sync(struct skcipher_request *req)
+{
if (!crypto_simd_usable())
- return aes_ctr_encrypt_fallback(ctx, req);
+ return crypto_ctr_encrypt_walk(req, ctr_encrypt_one);
return ctr_encrypt(req);
}
diff --git a/arch/arm64/crypto/aes-neonbs-glue.c b/arch/arm64/crypto/aes-neonbs-glue.c
index 25fe51eedc98..bafd2ebef8f1 100644
--- a/arch/arm64/crypto/aes-neonbs-glue.c
+++ b/arch/arm64/crypto/aes-neonbs-glue.c
@@ -8,13 +8,12 @@
#include <asm/neon.h>
#include <asm/simd.h>
#include <crypto/aes.h>
+#include <crypto/ctr.h>
#include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h>
#include <crypto/xts.h>
#include <linux/module.h>
-#include "aes-ctr-fallback.h"
-
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
MODULE_LICENSE("GPL v2");
@@ -280,13 +279,25 @@ static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
return aesbs_setkey(tfm, in_key, key_len);
}
-static int ctr_encrypt_sync(struct skcipher_request *req)
+static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst)
{
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
+ unsigned long flags;
+
+ /*
+ * Temporarily disable interrupts to avoid races where
+ * cachelines are evicted when the CPU is interrupted
+ * to do something else.
+ */
+ local_irq_save(flags);
+ aes_encrypt(&ctx->fallback, dst, src);
+ local_irq_restore(flags);
+}
+static int ctr_encrypt_sync(struct skcipher_request *req)
+{
if (!crypto_simd_usable())
- return aes_ctr_encrypt_fallback(&ctx->fallback, req);
+ return crypto_ctr_encrypt_walk(req, ctr_encrypt_one);
return ctr_encrypt(req);
}