crypto: lib/utils - Move utilities into new header
The utilities have historically resided in algapi.h as they were first used internally before being exported. Move them into a new header file so external users don't see internal API details. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
9c19fb86a8
commit
c616fb0cba
|
@ -7,15 +7,12 @@
|
|||
#ifndef _CRYPTO_ALGAPI_H
|
||||
#define _CRYPTO_ALGAPI_H
|
||||
|
||||
#include <crypto/utils.h>
|
||||
#include <linux/align.h>
|
||||
#include <linux/cache.h>
|
||||
#include <linux/crypto.h>
|
||||
#include <linux/kconfig.h>
|
||||
#include <linux/list.h>
|
||||
#include <linux/types.h>
|
||||
|
||||
#include <asm/unaligned.h>
|
||||
|
||||
/*
|
||||
* Maximum values for blocksize and alignmask, used to allocate
|
||||
* static buffers that are big enough for any combination of
|
||||
|
@ -172,47 +169,6 @@ static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
|
|||
}
|
||||
|
||||
void crypto_inc(u8 *a, unsigned int size);
|
||||
void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size);
|
||||
|
||||
static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
|
||||
{
|
||||
if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
|
||||
__builtin_constant_p(size) &&
|
||||
(size % sizeof(unsigned long)) == 0) {
|
||||
unsigned long *d = (unsigned long *)dst;
|
||||
unsigned long *s = (unsigned long *)src;
|
||||
unsigned long l;
|
||||
|
||||
while (size > 0) {
|
||||
l = get_unaligned(d) ^ get_unaligned(s++);
|
||||
put_unaligned(l, d++);
|
||||
size -= sizeof(unsigned long);
|
||||
}
|
||||
} else {
|
||||
__crypto_xor(dst, dst, src, size);
|
||||
}
|
||||
}
|
||||
|
||||
static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2,
|
||||
unsigned int size)
|
||||
{
|
||||
if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
|
||||
__builtin_constant_p(size) &&
|
||||
(size % sizeof(unsigned long)) == 0) {
|
||||
unsigned long *d = (unsigned long *)dst;
|
||||
unsigned long *s1 = (unsigned long *)src1;
|
||||
unsigned long *s2 = (unsigned long *)src2;
|
||||
unsigned long l;
|
||||
|
||||
while (size > 0) {
|
||||
l = get_unaligned(s1++) ^ get_unaligned(s2++);
|
||||
put_unaligned(l, d++);
|
||||
size -= sizeof(unsigned long);
|
||||
}
|
||||
} else {
|
||||
__crypto_xor(dst, src1, src2, size);
|
||||
}
|
||||
}
|
||||
|
||||
static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
|
||||
{
|
||||
|
@ -291,23 +247,6 @@ static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
|
|||
return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
|
||||
}
|
||||
|
||||
noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
|
||||
|
||||
/**
|
||||
* crypto_memneq - Compare two areas of memory without leaking
|
||||
* timing information.
|
||||
*
|
||||
* @a: One area of memory
|
||||
* @b: Another area of memory
|
||||
* @size: The size of the area.
|
||||
*
|
||||
* Returns 0 when data is equal, 1 otherwise.
|
||||
*/
|
||||
static inline int crypto_memneq(const void *a, const void *b, size_t size)
|
||||
{
|
||||
return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
|
||||
}
|
||||
|
||||
int crypto_register_notifier(struct notifier_block *nb);
|
||||
int crypto_unregister_notifier(struct notifier_block *nb);
|
||||
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
/* SPDX-License-Identifier: GPL-2.0-or-later */
|
||||
/*
|
||||
* Cryptographic utilities
|
||||
*
|
||||
* Copyright (c) 2023 Herbert Xu <herbert@gondor.apana.org.au>
|
||||
*/
|
||||
#ifndef _CRYPTO_UTILS_H
|
||||
#define _CRYPTO_UTILS_H
|
||||
|
||||
#include <asm/unaligned.h>
|
||||
#include <linux/compiler_attributes.h>
|
||||
#include <linux/types.h>
|
||||
|
||||
void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size);
|
||||
|
||||
static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
|
||||
{
|
||||
if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
|
||||
__builtin_constant_p(size) &&
|
||||
(size % sizeof(unsigned long)) == 0) {
|
||||
unsigned long *d = (unsigned long *)dst;
|
||||
unsigned long *s = (unsigned long *)src;
|
||||
unsigned long l;
|
||||
|
||||
while (size > 0) {
|
||||
l = get_unaligned(d) ^ get_unaligned(s++);
|
||||
put_unaligned(l, d++);
|
||||
size -= sizeof(unsigned long);
|
||||
}
|
||||
} else {
|
||||
__crypto_xor(dst, dst, src, size);
|
||||
}
|
||||
}
|
||||
|
||||
static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2,
|
||||
unsigned int size)
|
||||
{
|
||||
if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
|
||||
__builtin_constant_p(size) &&
|
||||
(size % sizeof(unsigned long)) == 0) {
|
||||
unsigned long *d = (unsigned long *)dst;
|
||||
unsigned long *s1 = (unsigned long *)src1;
|
||||
unsigned long *s2 = (unsigned long *)src2;
|
||||
unsigned long l;
|
||||
|
||||
while (size > 0) {
|
||||
l = get_unaligned(s1++) ^ get_unaligned(s2++);
|
||||
put_unaligned(l, d++);
|
||||
size -= sizeof(unsigned long);
|
||||
}
|
||||
} else {
|
||||
__crypto_xor(dst, src1, src2, size);
|
||||
}
|
||||
}
|
||||
|
||||
noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
|
||||
|
||||
/**
|
||||
* crypto_memneq - Compare two areas of memory without leaking
|
||||
* timing information.
|
||||
*
|
||||
* @a: One area of memory
|
||||
* @b: Another area of memory
|
||||
* @size: The size of the area.
|
||||
*
|
||||
* Returns 0 when data is equal, 1 otherwise.
|
||||
*/
|
||||
static inline int crypto_memneq(const void *a, const void *b, size_t size)
|
||||
{
|
||||
return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
|
||||
}
|
||||
|
||||
#endif /* _CRYPTO_UTILS_H */
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
#include <asm/unaligned.h>
|
||||
#include <crypto/algapi.h>
|
||||
#include <crypto/utils.h>
|
||||
#include <linux/module.h>
|
||||
|
||||
/*
|
||||
|
|
Loading…
Reference in New Issue