curve25519: squelch warnings on clang

These are generic helper functions we don't want to move into the actual
implementations, so that it's easy to keep parity with the kernel code.

Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
This commit is contained in:
Jason A. Donenfeld 2020-02-07 15:46:59 +01:00
parent e5b08c2849
commit d68b8b189c
1 changed files with 21 additions and 17 deletions

View File

@ -39,23 +39,9 @@ typedef int64_t s64;
#define le32_to_cpup(a) (*(a))
#define cpu_to_le64(a) (a)
#endif
static inline __le32 get_unaligned_le32(const u8 *a)
{
__le32 l;
__builtin_memcpy(&l, a, sizeof(l));
return le32_to_cpup(&l);
}
static inline __le64 get_unaligned_le64(const u8 *a)
{
__le64 l;
__builtin_memcpy(&l, a, sizeof(l));
return le64_to_cpup(&l);
}
static inline void put_unaligned_le64(u64 s, u8 *d)
{
__le64 l = cpu_to_le64(s);
__builtin_memcpy(d, &l, sizeof(l));
}
#ifndef __unused
#define __unused __attribute__((unused))
#endif
#ifndef __always_inline
#define __always_inline __inline __attribute__((__always_inline__))
#endif
@ -69,6 +55,24 @@ static inline void put_unaligned_le64(u64 s, u8 *d)
#define __force
#endif
static __always_inline __unused __le32 get_unaligned_le32(const u8 *a)
{
__le32 l;
__builtin_memcpy(&l, a, sizeof(l));
return le32_to_cpup(&l);
}
static __always_inline __unused __le64 get_unaligned_le64(const u8 *a)
{
__le64 l;
__builtin_memcpy(&l, a, sizeof(l));
return le64_to_cpup(&l);
}
static __always_inline __unused void put_unaligned_le64(u64 s, u8 *d)
{
__le64 l = cpu_to_le64(s);
__builtin_memcpy(d, &l, sizeof(l));
}
static noinline void memzero_explicit(void *s, size_t count)
{
memset(s, 0, count);