mirror of https://github.com/torvalds/linux.git
crypto/arm64: aes-gcm - Switch to 'ksimd' scoped guard API
Switch to the more abstract 'scoped_ksimd()' API, which will be modified in a future patch to transparently allocate a kernel mode FP/SIMD state buffer on the stack, so that kernel mode FP/SIMD code remains preemptible in principe, but without the memory overhead that adds 528 bytes to the size of struct task_struct. Reviewed-by: Eric Biggers <ebiggers@kernel.org> Reviewed-by: Jonathan Cameron <jonathan.cameron@huawei.com> Acked-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
This commit is contained in:
parent
ba3c1b3b5a
commit
87c9b04e71
|
|
@ -5,7 +5,6 @@
|
||||||
* Copyright (C) 2014 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
|
* Copyright (C) 2014 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <asm/neon.h>
|
|
||||||
#include <crypto/aes.h>
|
#include <crypto/aes.h>
|
||||||
#include <crypto/b128ops.h>
|
#include <crypto/b128ops.h>
|
||||||
#include <crypto/gcm.h>
|
#include <crypto/gcm.h>
|
||||||
|
|
@ -22,6 +21,8 @@
|
||||||
#include <linux/string.h>
|
#include <linux/string.h>
|
||||||
#include <linux/unaligned.h>
|
#include <linux/unaligned.h>
|
||||||
|
|
||||||
|
#include <asm/simd.h>
|
||||||
|
|
||||||
MODULE_DESCRIPTION("GHASH and AES-GCM using ARMv8 Crypto Extensions");
|
MODULE_DESCRIPTION("GHASH and AES-GCM using ARMv8 Crypto Extensions");
|
||||||
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
|
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
|
||||||
MODULE_LICENSE("GPL v2");
|
MODULE_LICENSE("GPL v2");
|
||||||
|
|
@ -74,9 +75,8 @@ void ghash_do_simd_update(int blocks, u64 dg[], const char *src,
|
||||||
u64 const h[][2],
|
u64 const h[][2],
|
||||||
const char *head))
|
const char *head))
|
||||||
{
|
{
|
||||||
kernel_neon_begin();
|
scoped_ksimd()
|
||||||
simd_update(blocks, dg, src, key->h, head);
|
simd_update(blocks, dg, src, key->h, head);
|
||||||
kernel_neon_end();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* avoid hogging the CPU for too long */
|
/* avoid hogging the CPU for too long */
|
||||||
|
|
@ -329,11 +329,10 @@ static int gcm_encrypt(struct aead_request *req, char *iv, int assoclen)
|
||||||
tag = NULL;
|
tag = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
kernel_neon_begin();
|
scoped_ksimd()
|
||||||
pmull_gcm_encrypt(nbytes, dst, src, ctx->ghash_key.h,
|
pmull_gcm_encrypt(nbytes, dst, src, ctx->ghash_key.h,
|
||||||
dg, iv, ctx->aes_key.key_enc, nrounds,
|
dg, iv, ctx->aes_key.key_enc, nrounds,
|
||||||
tag);
|
tag);
|
||||||
kernel_neon_end();
|
|
||||||
|
|
||||||
if (unlikely(!nbytes))
|
if (unlikely(!nbytes))
|
||||||
break;
|
break;
|
||||||
|
|
@ -399,11 +398,11 @@ static int gcm_decrypt(struct aead_request *req, char *iv, int assoclen)
|
||||||
tag = NULL;
|
tag = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
kernel_neon_begin();
|
scoped_ksimd()
|
||||||
ret = pmull_gcm_decrypt(nbytes, dst, src, ctx->ghash_key.h,
|
ret = pmull_gcm_decrypt(nbytes, dst, src,
|
||||||
dg, iv, ctx->aes_key.key_enc,
|
ctx->ghash_key.h,
|
||||||
nrounds, tag, otag, authsize);
|
dg, iv, ctx->aes_key.key_enc,
|
||||||
kernel_neon_end();
|
nrounds, tag, otag, authsize);
|
||||||
|
|
||||||
if (unlikely(!nbytes))
|
if (unlikely(!nbytes))
|
||||||
break;
|
break;
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue