From 66a5c1cd7e557187bf3a1913e836d0c9be8adbb7 Mon Sep 17 00:00:00 2001 From: zhudacai 00228490 Date: Wed, 4 Sep 2019 12:14:25 +0000 Subject: [PATCH] The aarch64 architecture is support normal memory unaligned accesses, so add the UNALIGNED_LE_CPU to the aarch64 . --- src/siphash.c | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/siphash.c b/src/siphash.c index 6b9419031..357741132 100644 --- a/src/siphash.c +++ b/src/siphash.c @@ -58,7 +58,8 @@ int siptlw(int c) { /* Test of the CPU is Little Endian and supports not aligned accesses. * Two interesting conditions to speedup the function that happen to be * in most of x86 servers. */ -#if defined(__X86_64__) || defined(__x86_64__) || defined (__i386__) +#if defined(__X86_64__) || defined(__x86_64__) || defined (__i386__) \ + || defined (__aarch64__) || defined (__arm64__) #define UNALIGNED_LE_CPU #endif