244#if defined (__cplusplus) 
  268#  define XXH_STATIC_LINKING_ONLY 
  281#  define XXH_IMPLEMENTATION 
  302#  define XXH_INLINE_ALL 
  303#  undef XXH_INLINE_ALL 
  307#  define XXH_PRIVATE_API 
  308#  undef XXH_PRIVATE_API 
  322#  define XXH_NAMESPACE  
  326#if (defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)) \ 
  327    && !defined(XXH_INLINE_ALL_31684351384) 
  329#  define XXH_INLINE_ALL_31684351384 
  331#  undef XXH_STATIC_LINKING_ONLY    
  332#  define XXH_STATIC_LINKING_ONLY 
  334#  undef XXH_PUBLIC_API 
  335#  if defined(__GNUC__) 
  336#    define XXH_PUBLIC_API static __inline __attribute__((__unused__)) 
  337#  elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) 
  338#    define XXH_PUBLIC_API static inline 
  339#  elif defined(_MSC_VER) 
  340#    define XXH_PUBLIC_API static __inline 
  343#    define XXH_PUBLIC_API static 
  359#  undef XXH_versionNumber 
  362#  undef XXH32_createState 
  363#  undef XXH32_freeState 
  367#  undef XXH32_copyState 
  368#  undef XXH32_canonicalFromHash 
  369#  undef XXH32_hashFromCanonical 
  372#  undef XXH64_createState 
  373#  undef XXH64_freeState 
  377#  undef XXH64_copyState 
  378#  undef XXH64_canonicalFromHash 
  379#  undef XXH64_hashFromCanonical 
  382#  undef XXH3_64bits_withSecret 
  383#  undef XXH3_64bits_withSeed 
  384#  undef XXH3_64bits_withSecretandSeed 
  385#  undef XXH3_createState 
  386#  undef XXH3_freeState 
  387#  undef XXH3_copyState 
  388#  undef XXH3_64bits_reset 
  389#  undef XXH3_64bits_reset_withSeed 
  390#  undef XXH3_64bits_reset_withSecret 
  391#  undef XXH3_64bits_update 
  392#  undef XXH3_64bits_digest 
  393#  undef XXH3_generateSecret 
  397#  undef XXH3_128bits_withSeed 
  398#  undef XXH3_128bits_withSecret 
  399#  undef XXH3_128bits_reset 
  400#  undef XXH3_128bits_reset_withSeed 
  401#  undef XXH3_128bits_reset_withSecret 
  402#  undef XXH3_128bits_reset_withSecretandSeed 
  403#  undef XXH3_128bits_update 
  404#  undef XXH3_128bits_digest 
  405#  undef XXH128_isEqual 
  407#  undef XXH128_canonicalFromHash 
  408#  undef XXH128_hashFromCanonical 
  413#  define XXH_NAMESPACE XXH_INLINE_ 
  421#  define XXH_IPREF(Id)   XXH_NAMESPACE ## Id 
  422#  define XXH_OK XXH_IPREF(XXH_OK) 
  423#  define XXH_ERROR XXH_IPREF(XXH_ERROR) 
  424#  define XXH_errorcode XXH_IPREF(XXH_errorcode) 
  425#  define XXH32_canonical_t  XXH_IPREF(XXH32_canonical_t) 
  426#  define XXH64_canonical_t  XXH_IPREF(XXH64_canonical_t) 
  427#  define XXH128_canonical_t XXH_IPREF(XXH128_canonical_t) 
  428#  define XXH32_state_s XXH_IPREF(XXH32_state_s) 
  429#  define XXH32_state_t XXH_IPREF(XXH32_state_t) 
  430#  define XXH64_state_s XXH_IPREF(XXH64_state_s) 
  431#  define XXH64_state_t XXH_IPREF(XXH64_state_t) 
  432#  define XXH3_state_s  XXH_IPREF(XXH3_state_s) 
  433#  define XXH3_state_t  XXH_IPREF(XXH3_state_t) 
  434#  define XXH128_hash_t XXH_IPREF(XXH128_hash_t) 
  436#  undef XXHASH_H_5627135585666179 
  437#  undef XXHASH_H_STATIC_13879238742 
  443#ifndef XXHASH_H_5627135585666179 
  444#define XXHASH_H_5627135585666179 1 
  447#if !defined(XXH_INLINE_ALL) && !defined(XXH_PRIVATE_API) 
  448#  if defined(_WIN32) && defined(_MSC_VER) && (defined(XXH_IMPORT) || defined(XXH_EXPORT)) 
  450#      define XXH_PUBLIC_API __declspec(dllexport) 
  452#      define XXH_PUBLIC_API __declspec(dllimport) 
  455#    define XXH_PUBLIC_API    
  460#  define XXH_CAT(A,B) A##B 
  461#  define XXH_NAME2(A,B) XXH_CAT(A,B) 
  462#  define XXH_versionNumber XXH_NAME2(XXH_NAMESPACE, XXH_versionNumber) 
  464#  define XXH32 XXH_NAME2(XXH_NAMESPACE, XXH32) 
  465#  define XXH32_createState XXH_NAME2(XXH_NAMESPACE, XXH32_createState) 
  466#  define XXH32_freeState XXH_NAME2(XXH_NAMESPACE, XXH32_freeState) 
  467#  define XXH32_reset XXH_NAME2(XXH_NAMESPACE, XXH32_reset) 
  468#  define XXH32_update XXH_NAME2(XXH_NAMESPACE, XXH32_update) 
  469#  define XXH32_digest XXH_NAME2(XXH_NAMESPACE, XXH32_digest) 
  470#  define XXH32_copyState XXH_NAME2(XXH_NAMESPACE, XXH32_copyState) 
  471#  define XXH32_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH32_canonicalFromHash) 
  472#  define XXH32_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH32_hashFromCanonical) 
  474#  define XXH64 XXH_NAME2(XXH_NAMESPACE, XXH64) 
  475#  define XXH64_createState XXH_NAME2(XXH_NAMESPACE, XXH64_createState) 
  476#  define XXH64_freeState XXH_NAME2(XXH_NAMESPACE, XXH64_freeState) 
  477#  define XXH64_reset XXH_NAME2(XXH_NAMESPACE, XXH64_reset) 
  478#  define XXH64_update XXH_NAME2(XXH_NAMESPACE, XXH64_update) 
  479#  define XXH64_digest XXH_NAME2(XXH_NAMESPACE, XXH64_digest) 
  480#  define XXH64_copyState XXH_NAME2(XXH_NAMESPACE, XXH64_copyState) 
  481#  define XXH64_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH64_canonicalFromHash) 
  482#  define XXH64_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH64_hashFromCanonical) 
  484#  define XXH3_64bits XXH_NAME2(XXH_NAMESPACE, XXH3_64bits) 
  485#  define XXH3_64bits_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSecret) 
  486#  define XXH3_64bits_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSeed) 
  487#  define XXH3_64bits_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSecretandSeed) 
  488#  define XXH3_createState XXH_NAME2(XXH_NAMESPACE, XXH3_createState) 
  489#  define XXH3_freeState XXH_NAME2(XXH_NAMESPACE, XXH3_freeState) 
  490#  define XXH3_copyState XXH_NAME2(XXH_NAMESPACE, XXH3_copyState) 
  491#  define XXH3_64bits_reset XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset) 
  492#  define XXH3_64bits_reset_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSeed) 
  493#  define XXH3_64bits_reset_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSecret) 
  494#  define XXH3_64bits_reset_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSecretandSeed) 
  495#  define XXH3_64bits_update XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_update) 
  496#  define XXH3_64bits_digest XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_digest) 
  497#  define XXH3_generateSecret XXH_NAME2(XXH_NAMESPACE, XXH3_generateSecret) 
  498#  define XXH3_generateSecret_fromSeed XXH_NAME2(XXH_NAMESPACE, XXH3_generateSecret_fromSeed) 
  500#  define XXH128 XXH_NAME2(XXH_NAMESPACE, XXH128) 
  501#  define XXH3_128bits XXH_NAME2(XXH_NAMESPACE, XXH3_128bits) 
  502#  define XXH3_128bits_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSeed) 
  503#  define XXH3_128bits_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSecret) 
  504#  define XXH3_128bits_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSecretandSeed) 
  505#  define XXH3_128bits_reset XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset) 
  506#  define XXH3_128bits_reset_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSeed) 
  507#  define XXH3_128bits_reset_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSecret) 
  508#  define XXH3_128bits_reset_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSecretandSeed) 
  509#  define XXH3_128bits_update XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_update) 
  510#  define XXH3_128bits_digest XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_digest) 
  511#  define XXH128_isEqual XXH_NAME2(XXH_NAMESPACE, XXH128_isEqual) 
  512#  define XXH128_cmp     XXH_NAME2(XXH_NAMESPACE, XXH128_cmp) 
  513#  define XXH128_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH128_canonicalFromHash) 
  514#  define XXH128_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH128_hashFromCanonical) 
  523#if !defined(XXH_INLINE_ALL) && !defined(XXH_PRIVATE_API) 
  524#  if defined(_WIN32) && defined(_MSC_VER) && (defined(XXH_IMPORT) || defined(XXH_EXPORT)) 
  526#      define XXH_PUBLIC_API __declspec(dllexport) 
  528#      define XXH_PUBLIC_API __declspec(dllimport) 
  531#    define XXH_PUBLIC_API    
  535#if defined (__GNUC__) 
  536# define XXH_CONSTF  __attribute__((__const__)) 
  537# define XXH_PUREF   __attribute__((__pure__)) 
  538# define XXH_MALLOCF __attribute__((__malloc__)) 
  548#define XXH_VERSION_MAJOR    0 
  549#define XXH_VERSION_MINOR    8 
  550#define XXH_VERSION_RELEASE  3 
  552#define XXH_VERSION_NUMBER  (XXH_VERSION_MAJOR *100*100 + XXH_VERSION_MINOR *100 + XXH_VERSION_RELEASE) 
  581#if defined(XXH_DOXYGEN)  
  589#elif !defined (__VMS) \ 
  590  && (defined (__cplusplus) \ 
  591  || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) ) 
  593#     include <inttypes.h> 
  601#   if UINT_MAX == 0xFFFFFFFFUL 
  603#   elif ULONG_MAX == 0xFFFFFFFFUL 
  606#     error "unsupported platform: need a 32-bit type" 
  787#ifdef __has_attribute 
  788# define XXH_HAS_ATTRIBUTE(x) __has_attribute(x) 
  790# define XXH_HAS_ATTRIBUTE(x) 0 
  800#define XXH_C23_VN 201711L 
  805#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= XXH_C23_VN) && defined(__has_c_attribute) 
  806# define XXH_HAS_C_ATTRIBUTE(x) __has_c_attribute(x) 
  808# define XXH_HAS_C_ATTRIBUTE(x) 0 
  813#if defined(__cplusplus) && defined(__has_cpp_attribute) 
  814# define XXH_HAS_CPP_ATTRIBUTE(x) __has_cpp_attribute(x) 
  816# define XXH_HAS_CPP_ATTRIBUTE(x) 0 
  827#if XXH_HAS_C_ATTRIBUTE(fallthrough) || XXH_HAS_CPP_ATTRIBUTE(fallthrough) 
  828# define XXH_FALLTHROUGH [[fallthrough]] 
  829#elif XXH_HAS_ATTRIBUTE(__fallthrough__) 
  830# define XXH_FALLTHROUGH __attribute__ ((__fallthrough__)) 
  832# define XXH_FALLTHROUGH  
  842#if XXH_HAS_ATTRIBUTE(noescape) 
  843# define XXH_NOESCAPE __attribute__((__noescape__)) 
  856#ifndef XXH_NO_LONG_LONG 
  860#if defined(XXH_DOXYGEN)  
  867#elif !defined (__VMS) \ 
  868  && (defined (__cplusplus) \ 
  869  || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) ) 
  871#     include <inttypes.h> 
  878#  if defined(__LP64__) && ULONG_MAX == 0xFFFFFFFFFFFFFFFFULL 
 1120#  define XXH_SCALAR 0  
 1123#  define XXH_AVX512 3  
 1192#define XXH3_SECRET_SIZE_MIN 136 
 1230#ifndef XXH_NO_STREAM 
 1457#ifndef XXH_NO_STREAM 
 1645#if defined(XXH_STATIC_LINKING_ONLY) && !defined(XXHASH_H_STATIC_13879238742) 
 1646#define XXHASH_H_STATIC_13879238742 
 1683#ifndef XXH_NO_LONG_LONG   
 1708#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)  
 1709#  define XXH_ALIGN(n)      _Alignas(n) 
 1710#elif defined(__cplusplus) && (__cplusplus >= 201103L)  
 1712#  define XXH_ALIGN(n)      alignas(n) 
 1713#elif defined(__GNUC__) 
 1714#  define XXH_ALIGN(n)      __attribute__ ((aligned(n))) 
 1715#elif defined(_MSC_VER) 
 1716#  define XXH_ALIGN(n)      __declspec(align(n)) 
 1718#  define XXH_ALIGN(n)    
 1722#if !(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L))    \ 
 1723    && ! (defined(__cplusplus) && (__cplusplus >= 201103L))  \ 
 1724    && defined(__GNUC__) 
 1725#   define XXH_ALIGN_MEMBER(align, type) type XXH_ALIGN(align) 
 1727#   define XXH_ALIGN_MEMBER(align, type) XXH_ALIGN(align) type 
 1738#define XXH3_INTERNALBUFFER_SIZE 256 
 1749#define XXH3_SECRET_DEFAULT_SIZE 192 
 1778   XXH_ALIGN_MEMBER(64, 
unsigned char buffer[XXH3_INTERNALBUFFER_SIZE]);
 
 
 1802#undef XXH_ALIGN_MEMBER 
 1815#define XXH3_INITSTATE(XXH3_state_ptr)                       \ 
 1817        XXH3_state_t* tmp_xxh3_state_ptr = (XXH3_state_ptr); \ 
 1818        tmp_xxh3_state_ptr->seed = 0;                        \ 
 1819        tmp_xxh3_state_ptr->extSecret = NULL;                \ 
 
 1948#define XXH3_MIDSIZE_MAX 240 
 1986                              XXH_NOESCAPE 
const void* secret, 
size_t secretSize,
 
 2005                               XXH_NOESCAPE 
const void* secret, 
size_t secretSize,
 
 2008#ifndef XXH_NO_STREAM 
 2024                                    XXH_NOESCAPE 
const void* secret, 
size_t secretSize,
 
 2050                                     XXH_NOESCAPE 
const void* secret, 
size_t secretSize,
 
 2057#if defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API) 
 2058#  define XXH_IMPLEMENTATION 
 2091#if ( defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API) \ 
 2092   || defined(XXH_IMPLEMENTATION) ) && !defined(XXH_IMPLEM_13a8737387) 
 2093#  define XXH_IMPLEM_13a8737387 
 2111#  define XXH_NO_LONG_LONG 
 2112#  undef XXH_NO_LONG_LONG  
 2163#  define XXH_FORCE_MEMORY_ACCESS 0 
 2191#  define XXH_SIZE_OPT 0 
 2221#  define XXH_FORCE_ALIGN_CHECK 0 
 2243#  define XXH_NO_INLINE_HINTS 0 
 2260#  define XXH3_INLINE_SECRET 0 
 2272#  define XXH32_ENDJMP 0 
 2281#  define XXH_OLD_NAMES 
 2282#  undef XXH_OLD_NAMES  
 2292#  define XXH_NO_STREAM 
 2293#  undef XXH_NO_STREAM  
 2299#ifndef XXH_FORCE_MEMORY_ACCESS    
 2303#  if defined(__GNUC__) && !(defined(__ARM_ARCH) && __ARM_ARCH < 7 && defined(__ARM_FEATURE_UNALIGNED)) 
 2304#    define XXH_FORCE_MEMORY_ACCESS 1 
 2310#  if (defined(__GNUC__) || defined(__clang__)) && defined(__OPTIMIZE_SIZE__) 
 2311#    define XXH_SIZE_OPT 1 
 2313#    define XXH_SIZE_OPT 0 
 2317#ifndef XXH_FORCE_ALIGN_CHECK   
 2319#  if XXH_SIZE_OPT >= 1 || \ 
 2320      defined(__i386)  || defined(__x86_64__) || defined(__aarch64__) || defined(__ARM_FEATURE_UNALIGNED) \ 
 2321   || defined(_M_IX86) || defined(_M_X64)     || defined(_M_ARM64)    || defined(_M_ARM)  
 2322#    define XXH_FORCE_ALIGN_CHECK 0 
 2324#    define XXH_FORCE_ALIGN_CHECK 1 
 2328#ifndef XXH_NO_INLINE_HINTS 
 2329#  if XXH_SIZE_OPT >= 1 || defined(__NO_INLINE__)   
 2330#    define XXH_NO_INLINE_HINTS 1 
 2332#    define XXH_NO_INLINE_HINTS 0 
 2336#ifndef XXH3_INLINE_SECRET 
 2337#  if (defined(__GNUC__) && !defined(__clang__) && __GNUC__ >= 12) \ 
 2338     || !defined(XXH_INLINE_ALL) 
 2339#    define XXH3_INLINE_SECRET 0 
 2341#    define XXH3_INLINE_SECRET 1 
 2347#  define XXH32_ENDJMP 0 
 2359#if defined(XXH_NO_STREAM) 
 2361#elif defined(XXH_NO_STDLIB) 
 2372static XXH_CONSTF 
void* XXH_malloc(
size_t s) { (void)s; 
return NULL; }
 
 2373static void XXH_free(
void* p) { (void)p; }
 
 2387static XXH_MALLOCF 
void* XXH_malloc(
size_t s) { 
return malloc(s); }
 
 2393static void XXH_free(
void* p) { free(p); }
 
 2403static void* XXH_memcpy(
void* dest, 
const void* src, 
size_t size)
 
 2405    return memcpy(dest,src,size);
 
 2415#  pragma warning(disable : 4127)  
 2418#if XXH_NO_INLINE_HINTS   
 2419#  if defined(__GNUC__) || defined(__clang__) 
 2420#    define XXH_FORCE_INLINE static __attribute__((__unused__)) 
 2422#    define XXH_FORCE_INLINE static 
 2424#  define XXH_NO_INLINE static 
 2426#elif defined(__GNUC__) || defined(__clang__) 
 2427#  define XXH_FORCE_INLINE static __inline__ __attribute__((__always_inline__, __unused__)) 
 2428#  define XXH_NO_INLINE static __attribute__((__noinline__)) 
 2429#elif defined(_MSC_VER)   
 2430#  define XXH_FORCE_INLINE static __forceinline 
 2431#  define XXH_NO_INLINE static __declspec(noinline) 
 2432#elif defined (__cplusplus) \ 
 2433  || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L))    
 2434#  define XXH_FORCE_INLINE static inline 
 2435#  define XXH_NO_INLINE static 
 2437#  define XXH_FORCE_INLINE static 
 2438#  define XXH_NO_INLINE static 
 2441#if defined(XXH_INLINE_ALL) 
 2442#  define XXH_STATIC XXH_FORCE_INLINE 
 2444#  define XXH_STATIC static 
 2447#if XXH3_INLINE_SECRET 
 2448#  define XXH3_WITH_SECRET_INLINE XXH_FORCE_INLINE 
 2450#  define XXH3_WITH_SECRET_INLINE XXH_NO_INLINE 
 2453#if ((defined(sun) || defined(__sun)) && __cplusplus)  
 2454#  define XXH_RESTRICT    
 2455#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L    
 2456#  define XXH_RESTRICT   restrict 
 2457#elif (defined (__GNUC__) && ((__GNUC__ > 3) || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))) \ 
 2458   || (defined (__clang__)) \ 
 2459   || (defined (_MSC_VER) && (_MSC_VER >= 1400)) \ 
 2460   || (defined (__INTEL_COMPILER) && (__INTEL_COMPILER >= 1300)) 
 2465#  define XXH_RESTRICT   __restrict 
 2467#  define XXH_RESTRICT    
 2481#ifndef XXH_DEBUGLEVEL 
 2483#    define XXH_DEBUGLEVEL DEBUGLEVEL 
 2485#    define XXH_DEBUGLEVEL 0 
 2489#if (XXH_DEBUGLEVEL>=1) 
 2491#  define XXH_ASSERT(c)   assert(c) 
 2493#  if defined(__INTEL_COMPILER) 
 2494#    define XXH_ASSERT(c)   XXH_ASSUME((unsigned char) (c)) 
 2496#    define XXH_ASSERT(c)   XXH_ASSUME(c) 
 2501#ifndef XXH_STATIC_ASSERT 
 2502#  if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)     
 2503#    define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { _Static_assert((c),m); } while(0) 
 2504#  elif defined(__cplusplus) && (__cplusplus >= 201103L)             
 2505#    define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { static_assert((c),m); } while(0) 
 2507#    define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { struct xxh_sa { char x[(c) ? 1 : -1]; }; } while(0) 
 2509#  define XXH_STATIC_ASSERT(c) XXH_STATIC_ASSERT_WITH_MESSAGE((c),#c) 
 2528#if defined(__GNUC__) || defined(__clang__) 
 2529#  define XXH_COMPILER_GUARD(var) __asm__("" : "+r" (var)) 
 2531#  define XXH_COMPILER_GUARD(var) ((void)0) 
 2536#if defined(__clang__) && defined(__ARM_ARCH) && !defined(__wasm__) 
 2537#  define XXH_COMPILER_GUARD_CLANG_NEON(var) __asm__("" : "+w" (var)) 
 2539#  define XXH_COMPILER_GUARD_CLANG_NEON(var) ((void)0) 
 2545#if !defined (__VMS) \ 
 2546 && (defined (__cplusplus) \ 
 2547 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) ) 
 2549#     include <inttypes.h> 
 2553    typedef uint8_t xxh_u8;
 
 2555    typedef unsigned char xxh_u8;
 
 2560#  warning "XXH_OLD_NAMES is planned to be removed starting v0.9. If the program depends on it, consider moving away from it by employing newer type names directly" 
 2618#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3)) 
 2623#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2)) 
 2629static xxh_u32 XXH_read32(
const void* memPtr) { 
return *(
const xxh_u32*) memPtr; }
 
 2631#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1)) 
 2641typedef union { xxh_u32 u32; } __attribute__((__packed__)) unalign;
 
 2643static xxh_u32 XXH_read32(
const void* ptr)
 
 2645    typedef __attribute__((__aligned__(1))) xxh_u32 xxh_unalign32;
 
 2646    return *((const xxh_unalign32*)ptr);
 
 2655static xxh_u32 XXH_read32(
const void* memPtr)
 
 2658    XXH_memcpy(&val, memPtr, 
sizeof(val));
 
 2683#ifndef XXH_CPU_LITTLE_ENDIAN 
 2688#  if defined(_WIN32)  \ 
 2689     || defined(__LITTLE_ENDIAN__) \ 
 2690     || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) 
 2691#    define XXH_CPU_LITTLE_ENDIAN 1 
 2692#  elif defined(__BIG_ENDIAN__) \ 
 2693     || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__) 
 2694#    define XXH_CPU_LITTLE_ENDIAN 0 
 2702static int XXH_isLittleEndian(
void)
 
 2708    const union { xxh_u32 u; xxh_u8 c[4]; } one = { 1 };
 
 2711#   define XXH_CPU_LITTLE_ENDIAN   XXH_isLittleEndian() 
 2721#define XXH_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__) 
 2724#  define XXH_HAS_BUILTIN(x) __has_builtin(x) 
 2726#  define XXH_HAS_BUILTIN(x) 0 
 2758#if XXH_HAS_BUILTIN(__builtin_unreachable) 
 2759#  define XXH_UNREACHABLE() __builtin_unreachable() 
 2761#elif defined(_MSC_VER) 
 2762#  define XXH_UNREACHABLE() __assume(0) 
 2765#  define XXH_UNREACHABLE() 
 2768#if XXH_HAS_BUILTIN(__builtin_assume) 
 2769#  define XXH_ASSUME(c) __builtin_assume(c) 
 2771#  define XXH_ASSUME(c) if (!(c)) { XXH_UNREACHABLE(); } 
 2787#if !defined(NO_CLANG_BUILTIN) && XXH_HAS_BUILTIN(__builtin_rotateleft32) \ 
 2788                               && XXH_HAS_BUILTIN(__builtin_rotateleft64) 
 2789#  define XXH_rotl32 __builtin_rotateleft32 
 2790#  define XXH_rotl64 __builtin_rotateleft64 
 2791#elif XXH_HAS_BUILTIN(__builtin_stdc_rotate_left) 
 2792#  define XXH_rotl32 __builtin_stdc_rotate_left 
 2793#  define XXH_rotl64 __builtin_stdc_rotate_left 
 2795#elif defined(_MSC_VER) 
 2796#  define XXH_rotl32(x,r) _rotl(x,r) 
 2797#  define XXH_rotl64(x,r) _rotl64(x,r) 
 2799#  define XXH_rotl32(x,r) (((x) << (r)) | ((x) >> (32 - (r)))) 
 2800#  define XXH_rotl64(x,r) (((x) << (r)) | ((x) >> (64 - (r)))) 
 2811#if defined(_MSC_VER)      
 2812#  define XXH_swap32 _byteswap_ulong 
 2813#elif XXH_GCC_VERSION >= 403 
 2814#  define XXH_swap32 __builtin_bswap32 
 2816static xxh_u32 XXH_swap32 (xxh_u32 x)
 
 2818    return  ((x << 24) & 0xff000000 ) |
 
 2819            ((x <<  8) & 0x00ff0000 ) |
 
 2820            ((x >>  8) & 0x0000ff00 ) |
 
 2821            ((x >> 24) & 0x000000ff );
 
 2844#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3)) 
 2846XXH_FORCE_INLINE xxh_u32 XXH_readLE32(
const void* memPtr)
 
 2848    const xxh_u8* bytePtr = (
const xxh_u8 *)memPtr;
 
 2850         | ((xxh_u32)bytePtr[1] << 8)
 
 2851         | ((xxh_u32)bytePtr[2] << 16)
 
 2852         | ((xxh_u32)bytePtr[3] << 24);
 
 2855XXH_FORCE_INLINE xxh_u32 XXH_readBE32(
const void* memPtr)
 
 2857    const xxh_u8* bytePtr = (
const xxh_u8 *)memPtr;
 
 2859         | ((xxh_u32)bytePtr[2] << 8)
 
 2860         | ((xxh_u32)bytePtr[1] << 16)
 
 2861         | ((xxh_u32)bytePtr[0] << 24);
 
 2865XXH_FORCE_INLINE xxh_u32 XXH_readLE32(
const void* ptr)
 
 2870static xxh_u32 XXH_readBE32(
const void* ptr)
 
 2876XXH_FORCE_INLINE xxh_u32
 
 2880        return XXH_readLE32(ptr);
 
 2906#define XXH_PRIME32_1  0x9E3779B1U   
 2907#define XXH_PRIME32_2  0x85EBCA77U   
 2908#define XXH_PRIME32_3  0xC2B2AE3DU   
 2909#define XXH_PRIME32_4  0x27D4EB2FU   
 2910#define XXH_PRIME32_5  0x165667B1U   
 2913#  define PRIME32_1 XXH_PRIME32_1 
 2914#  define PRIME32_2 XXH_PRIME32_2 
 2915#  define PRIME32_3 XXH_PRIME32_3 
 2916#  define PRIME32_4 XXH_PRIME32_4 
 2917#  define PRIME32_5 XXH_PRIME32_5 
 2931static xxh_u32 XXH32_round(xxh_u32 acc, xxh_u32 input)
 
 2934    acc  = XXH_rotl32(acc, 13);
 
 2936#if (defined(__SSE4_1__) || defined(__aarch64__) || defined(__wasm_simd128__)) && !defined(XXH_ENABLE_AUTOVECTORIZE) 
 2973    XXH_COMPILER_GUARD(acc);
 
 2988static xxh_u32 XXH32_avalanche(xxh_u32 hash)
 
 2998#define XXH_get32bits(p) XXH_readLE32_align(p, align) 
 3004XXH_FORCE_INLINE 
void 
 3005XXH32_initAccs(xxh_u32 *acc, xxh_u32 seed)
 
 3007    XXH_ASSERT(acc != NULL);
 
 3020XXH_FORCE_INLINE 
const xxh_u8 *
 
 3022    xxh_u32 *XXH_RESTRICT acc,
 
 3023    xxh_u8 
const *XXH_RESTRICT input,
 
 3028    const xxh_u8* 
const bEnd = input + len;
 
 3029    const xxh_u8* 
const limit = bEnd - 15;
 
 3030    XXH_ASSERT(acc != NULL);
 
 3031    XXH_ASSERT(input != NULL);
 
 3032    XXH_ASSERT(len >= 16);
 
 3034        acc[0] = XXH32_round(acc[0], XXH_get32bits(input)); input += 4;
 
 3035        acc[1] = XXH32_round(acc[1], XXH_get32bits(input)); input += 4;
 
 3036        acc[2] = XXH32_round(acc[2], XXH_get32bits(input)); input += 4;
 
 3037        acc[3] = XXH32_round(acc[3], XXH_get32bits(input)); input += 4;
 
 3038    } 
while (input < limit);
 
 3047XXH_FORCE_INLINE XXH_PUREF xxh_u32
 
 3048XXH32_mergeAccs(
const xxh_u32 *acc)
 
 3050    XXH_ASSERT(acc != NULL);
 
 3051    return XXH_rotl32(acc[0], 1)  + XXH_rotl32(acc[1], 7)
 
 3052         + XXH_rotl32(acc[2], 12) + XXH_rotl32(acc[3], 18);
 
 3070static XXH_PUREF xxh_u32
 
 3071XXH32_finalize(xxh_u32 hash, 
const xxh_u8* ptr, 
size_t len, 
XXH_alignment align)
 
 3073#define XXH_PROCESS1 do {                             \ 
 3074    hash += (*ptr++) * XXH_PRIME32_5;                 \ 
 3075    hash = XXH_rotl32(hash, 11) * XXH_PRIME32_1;      \ 
 3078#define XXH_PROCESS4 do {                             \ 
 3079    hash += XXH_get32bits(ptr) * XXH_PRIME32_3;       \ 
 3081    hash  = XXH_rotl32(hash, 17) * XXH_PRIME32_4;     \ 
 3084    if (ptr==NULL) XXH_ASSERT(len == 0);
 
 3097        return XXH32_avalanche(hash);
 
 3100           case 12:      XXH_PROCESS4;
 
 3102           case 8:       XXH_PROCESS4;
 
 3104           case 4:       XXH_PROCESS4;
 
 3105                         return XXH32_avalanche(hash);
 
 3107           case 13:      XXH_PROCESS4;
 
 3109           case 9:       XXH_PROCESS4;
 
 3111           case 5:       XXH_PROCESS4;
 
 3113                         return XXH32_avalanche(hash);
 
 3115           case 14:      XXH_PROCESS4;
 
 3117           case 10:      XXH_PROCESS4;
 
 3119           case 6:       XXH_PROCESS4;
 
 3122                         return XXH32_avalanche(hash);
 
 3124           case 15:      XXH_PROCESS4;
 
 3126           case 11:      XXH_PROCESS4;
 
 3128           case 7:       XXH_PROCESS4;
 
 3130           case 3:       XXH_PROCESS1;
 
 3132           case 2:       XXH_PROCESS1;
 
 3134           case 1:       XXH_PROCESS1;
 
 3136           case 0:       
return XXH32_avalanche(hash);
 
 3144#  define PROCESS1 XXH_PROCESS1 
 3145#  define PROCESS4 XXH_PROCESS4 
 3159XXH_FORCE_INLINE XXH_PUREF xxh_u32
 
 3160XXH32_endian_align(
const xxh_u8* input, 
size_t len, xxh_u32 seed, 
XXH_alignment align)
 
 3164    if (input==NULL) XXH_ASSERT(len == 0);
 
 3168        XXH32_initAccs(acc, seed);
 
 3170        input = XXH32_consumeLong(acc, input, len, align);
 
 3172        h32 = XXH32_mergeAccs(acc);
 
 3177    h32 += (xxh_u32)len;
 
 3179    return XXH32_finalize(h32, input, len&15, align);
 
 3185#if !defined(XXH_NO_STREAM) && XXH_SIZE_OPT >= 2 
 3193        if ((((
size_t)input) & 3) == 0) {   
 
 3194            return XXH32_endian_align((
const xxh_u8*)input, len, seed, 
XXH_aligned);
 
 3197    return XXH32_endian_align((
const xxh_u8*)input, len, seed, 
XXH_unaligned);
 
 
 3204#ifndef XXH_NO_STREAM 
 3220    XXH_memcpy(dstState, srcState, 
sizeof(*dstState));
 
 
 3226    XXH_ASSERT(statePtr != NULL);
 
 3227    memset(statePtr, 0, 
sizeof(*statePtr));
 
 3228    XXH32_initAccs(statePtr->
acc, seed);
 
 
 3238        XXH_ASSERT(len == 0);
 
 3252    {   
const xxh_u8* xinput = (
const xxh_u8*)input;
 
 3253        const xxh_u8* 
const bEnd = xinput + len;
 
 3263        XXH_ASSERT(xinput <= bEnd);
 
 3264        if ((
size_t)(bEnd - xinput) >= 
sizeof(state->
buffer)) {
 
 3266            xinput = XXH32_consumeLong(state->
acc, xinput, (
size_t)(bEnd - xinput), 
XXH_unaligned);
 
 3269        if (xinput < bEnd) {
 
 3271            XXH_memcpy(state->
buffer, xinput, (
size_t)(bEnd-xinput));
 
 
 3286        h32 = XXH32_mergeAccs(state->
acc);
 
 
 3304    XXH_memcpy(dst, &hash, 
sizeof(*dst));
 
 
 3309    return XXH_readBE32(src);
 
 
 3313#ifndef XXH_NO_LONG_LONG 
 3331#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3)) 
 3336#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2)) 
 3339static xxh_u64 XXH_read64(
const void* memPtr)
 
 3341    return *(
const xxh_u64*) memPtr;
 
 3344#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1)) 
 3354typedef union { xxh_u32 u32; xxh_u64 u64; } __attribute__((__packed__)) unalign64;
 
 3356static xxh_u64 XXH_read64(
const void* ptr)
 
 3358    typedef __attribute__((__aligned__(1))) xxh_u64 xxh_unalign64;
 
 3359    return *((const xxh_unalign64*)ptr);
 
 3368static xxh_u64 XXH_read64(
const void* memPtr)
 
 3371    XXH_memcpy(&val, memPtr, 
sizeof(val));
 
 3377#if defined(_MSC_VER)      
 3378#  define XXH_swap64 _byteswap_uint64 
 3379#elif XXH_GCC_VERSION >= 403 
 3380#  define XXH_swap64 __builtin_bswap64 
 3382static xxh_u64 XXH_swap64(xxh_u64 x)
 
 3384    return  ((x << 56) & 0xff00000000000000ULL) |
 
 3385            ((x << 40) & 0x00ff000000000000ULL) |
 
 3386            ((x << 24) & 0x0000ff0000000000ULL) |
 
 3387            ((x << 8)  & 0x000000ff00000000ULL) |
 
 3388            ((x >> 8)  & 0x00000000ff000000ULL) |
 
 3389            ((x >> 24) & 0x0000000000ff0000ULL) |
 
 3390            ((x >> 40) & 0x000000000000ff00ULL) |
 
 3391            ((x >> 56) & 0x00000000000000ffULL);
 
 3397#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3)) 
 3399XXH_FORCE_INLINE xxh_u64 XXH_readLE64(
const void* memPtr)
 
 3401    const xxh_u8* bytePtr = (
const xxh_u8 *)memPtr;
 
 3403         | ((xxh_u64)bytePtr[1] << 8)
 
 3404         | ((xxh_u64)bytePtr[2] << 16)
 
 3405         | ((xxh_u64)bytePtr[3] << 24)
 
 3406         | ((xxh_u64)bytePtr[4] << 32)
 
 3407         | ((xxh_u64)bytePtr[5] << 40)
 
 3408         | ((xxh_u64)bytePtr[6] << 48)
 
 3409         | ((xxh_u64)bytePtr[7] << 56);
 
 3412XXH_FORCE_INLINE xxh_u64 XXH_readBE64(
const void* memPtr)
 
 3414    const xxh_u8* bytePtr = (
const xxh_u8 *)memPtr;
 
 3416         | ((xxh_u64)bytePtr[6] << 8)
 
 3417         | ((xxh_u64)bytePtr[5] << 16)
 
 3418         | ((xxh_u64)bytePtr[4] << 24)
 
 3419         | ((xxh_u64)bytePtr[3] << 32)
 
 3420         | ((xxh_u64)bytePtr[2] << 40)
 
 3421         | ((xxh_u64)bytePtr[1] << 48)
 
 3422         | ((xxh_u64)bytePtr[0] << 56);
 
 3426XXH_FORCE_INLINE xxh_u64 XXH_readLE64(
const void* ptr)
 
 3431static xxh_u64 XXH_readBE64(
const void* ptr)
 
 3437XXH_FORCE_INLINE xxh_u64
 
 3441        return XXH_readLE64(ptr);
 
 3457#define XXH_PRIME64_1  0x9E3779B185EBCA87ULL   
 3458#define XXH_PRIME64_2  0xC2B2AE3D27D4EB4FULL   
 3459#define XXH_PRIME64_3  0x165667B19E3779F9ULL   
 3460#define XXH_PRIME64_4  0x85EBCA77C2B2AE63ULL   
 3461#define XXH_PRIME64_5  0x27D4EB2F165667C5ULL   
 3464#  define PRIME64_1 XXH_PRIME64_1 
 3465#  define PRIME64_2 XXH_PRIME64_2 
 3466#  define PRIME64_3 XXH_PRIME64_3 
 3467#  define PRIME64_4 XXH_PRIME64_4 
 3468#  define PRIME64_5 XXH_PRIME64_5 
 3472static xxh_u64 XXH64_round(xxh_u64 acc, xxh_u64 input)
 
 3475    acc  = XXH_rotl64(acc, 31);
 
 3477#if (defined(__AVX512F__)) && !defined(XXH_ENABLE_AUTOVECTORIZE) 
 3492    XXH_COMPILER_GUARD(acc);
 
 3497static xxh_u64 XXH64_mergeRound(xxh_u64 acc, xxh_u64 val)
 
 3499    val  = XXH64_round(0, val);
 
 3506static xxh_u64 XXH64_avalanche(xxh_u64 hash)
 
 3517#define XXH_get64bits(p) XXH_readLE64_align(p, align) 
 3523XXH_FORCE_INLINE 
void 
 3524XXH64_initAccs(xxh_u64 *acc, xxh_u64 seed)
 
 3526    XXH_ASSERT(acc != NULL);
 
 3539XXH_FORCE_INLINE 
const xxh_u8 *
 
 3541    xxh_u64 *XXH_RESTRICT acc,
 
 3542    xxh_u8 
const *XXH_RESTRICT input,
 
 3547    const xxh_u8* 
const bEnd = input + len;
 
 3548    const xxh_u8* 
const limit = bEnd - 31;
 
 3549    XXH_ASSERT(acc != NULL);
 
 3550    XXH_ASSERT(input != NULL);
 
 3551    XXH_ASSERT(len >= 32);
 
 3554        if (
sizeof(
void *) < 
sizeof(xxh_u64)) {
 
 3556            for (i = 0; i < 4; i++) {
 
 3557                acc[i] = XXH64_round(acc[i], XXH_get64bits(input));
 
 3561            acc[0] = XXH64_round(acc[0], XXH_get64bits(input)); input += 8;
 
 3562            acc[1] = XXH64_round(acc[1], XXH_get64bits(input)); input += 8;
 
 3563            acc[2] = XXH64_round(acc[2], XXH_get64bits(input)); input += 8;
 
 3564            acc[3] = XXH64_round(acc[3], XXH_get64bits(input)); input += 8;
 
 3566    } 
while (input < limit);
 
 3575XXH_FORCE_INLINE XXH_PUREF xxh_u64
 
 3576XXH64_mergeAccs(
const xxh_u64 *acc)
 
 3578    XXH_ASSERT(acc != NULL);
 
 3580        xxh_u64 h64 = XXH_rotl64(acc[0], 1) + XXH_rotl64(acc[1], 7)
 
 3581                    + XXH_rotl64(acc[2], 12) + XXH_rotl64(acc[3], 18);
 
 3583        if (
sizeof(
void *) < 
sizeof(xxh_u64)) {
 
 3585            for (i = 0; i < 4; i++) {
 
 3586                h64 = XXH64_mergeRound(h64, acc[i]);
 
 3589            h64 = XXH64_mergeRound(h64, acc[0]);
 
 3590            h64 = XXH64_mergeRound(h64, acc[1]);
 
 3591            h64 = XXH64_mergeRound(h64, acc[2]);
 
 3592            h64 = XXH64_mergeRound(h64, acc[3]);
 
 3613XXH_STATIC XXH_PUREF xxh_u64
 
 3614XXH64_finalize(xxh_u64 hash, 
const xxh_u8* ptr, 
size_t len, 
XXH_alignment align)
 
 3616    if (ptr==NULL) XXH_ASSERT(len == 0);
 
 3619        xxh_u64 
const k1 = XXH64_round(0, XXH_get64bits(ptr));
 
 3636    return  XXH64_avalanche(hash);
 
 3640#  define PROCESS1_64 XXH_PROCESS1_64 
 3641#  define PROCESS4_64 XXH_PROCESS4_64 
 3642#  define PROCESS8_64 XXH_PROCESS8_64 
 3644#  undef XXH_PROCESS1_64 
 3645#  undef XXH_PROCESS4_64 
 3646#  undef XXH_PROCESS8_64 
 3657XXH_FORCE_INLINE XXH_PUREF xxh_u64
 
 3658XXH64_endian_align(
const xxh_u8* input, 
size_t len, xxh_u64 seed, 
XXH_alignment align)
 
 3661    if (input==NULL) XXH_ASSERT(len == 0);
 
 3665        XXH64_initAccs(acc, seed);
 
 3667        input = XXH64_consumeLong(acc, input, len, align);
 
 3669        h64 = XXH64_mergeAccs(acc);
 
 3674    h64 += (xxh_u64) len;
 
 3676    return XXH64_finalize(h64, input, len, align);
 
 3683#if !defined(XXH_NO_STREAM) && XXH_SIZE_OPT >= 2 
 3691        if ((((
size_t)input) & 7)==0) {  
 
 3692            return XXH64_endian_align((
const xxh_u8*)input, len, seed, 
XXH_aligned);
 
 3695    return XXH64_endian_align((
const xxh_u8*)input, len, seed, 
XXH_unaligned);
 
 
 3701#ifndef XXH_NO_STREAM 
 3717    XXH_memcpy(dstState, srcState, 
sizeof(*dstState));
 
 
 3723    XXH_ASSERT(statePtr != NULL);
 
 3724    memset(statePtr, 0, 
sizeof(*statePtr));
 
 3725    XXH64_initAccs(statePtr->acc, seed);
 
 
 3734        XXH_ASSERT(len == 0);
 
 3738    state->total_len += len;
 
 3740    XXH_ASSERT(state->bufferedSize <= 
sizeof(state->buffer));
 
 3741    if (len < 
sizeof(state->buffer) - state->bufferedSize)  {   
 
 3742        XXH_memcpy(state->buffer + state->bufferedSize, input, len);
 
 3747    {   
const xxh_u8* xinput = (
const xxh_u8*)input;
 
 3748        const xxh_u8* 
const bEnd = xinput + len;
 
 3750        if (state->bufferedSize) {   
 
 3751            XXH_memcpy(state->buffer + state->bufferedSize, xinput, 
sizeof(state->buffer) - state->bufferedSize);
 
 3752            xinput += 
sizeof(state->buffer) - state->bufferedSize;
 
 3754            (void)XXH64_consumeLong(state->acc, state->buffer, 
sizeof(state->buffer), 
XXH_aligned);
 
 3755            state->bufferedSize = 0;
 
 3758        XXH_ASSERT(xinput <= bEnd);
 
 3759        if ((
size_t)(bEnd - xinput) >= 
sizeof(state->buffer)) {
 
 3761            xinput = XXH64_consumeLong(state->acc, xinput, (
size_t)(bEnd - xinput), 
XXH_unaligned);
 
 3764        if (xinput < bEnd) {
 
 3766            XXH_memcpy(state->buffer, xinput, (
size_t)(bEnd-xinput));
 
 3767            state->bufferedSize = (unsigned)(bEnd-xinput);
 
 
 3780    if (state->total_len >= 32) {
 
 3781        h64 = XXH64_mergeAccs(state->acc);
 
 3786    h64 += (xxh_u64) state->total_len;
 
 3788    return XXH64_finalize(h64, state->buffer, (
size_t)state->total_len, 
XXH_aligned);
 
 
 3799    XXH_memcpy(dst, &hash, 
sizeof(*dst));
 
 
 3805    return XXH_readBE64(src);
 
 
 3824#if (defined(__GNUC__) && (__GNUC__ >= 3))  \ 
 3825  || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) \ 
 3826  || defined(__clang__) 
 3827#    define XXH_likely(x) __builtin_expect(x, 1) 
 3828#    define XXH_unlikely(x) __builtin_expect(x, 0) 
 3830#    define XXH_likely(x) (x) 
 3831#    define XXH_unlikely(x) (x) 
 3834#ifndef XXH_HAS_INCLUDE 
 3835#  ifdef __has_include 
 3840#    define XXH_HAS_INCLUDE __has_include 
 3842#    define XXH_HAS_INCLUDE(x) 0 
 3846#if defined(__GNUC__) || defined(__clang__) 
 3847#  if defined(__ARM_FEATURE_SVE) 
 3848#    include <arm_sve.h> 
 3850#  if defined(__ARM_NEON__) || defined(__ARM_NEON) \ 
 3851   || (defined(_M_ARM) && _M_ARM >= 7) \ 
 3852   || defined(_M_ARM64) || defined(_M_ARM64EC) \ 
 3853   || (defined(__wasm_simd128__) && XXH_HAS_INCLUDE(<arm_neon.h>))  
 3854#    define inline __inline__   
 3855#    include <arm_neon.h> 
 3857#  elif defined(__AVX2__) 
 3858#    include <immintrin.h> 
 3859#  elif defined(__SSE2__) 
 3860#    include <emmintrin.h> 
 3861#  elif defined(__loongarch_asx) 
 3862#    include <lasxintrin.h> 
 3863#    include <lsxintrin.h> 
 3864#  elif defined(__loongarch_sx) 
 3865#    include <lsxintrin.h> 
 3869#if defined(_MSC_VER) 
 3942#if defined(__thumb__) && !defined(__thumb2__) && defined(__ARM_ARCH_ISA_ARM) 
 3943#   warning "XXH3 is highly inefficient without ARM or Thumb-2." 
 3961#  define XXH_VECTOR XXH_SCALAR 
 3971#  define XXH_ACC_ALIGN 8 
 3979#  if defined(__ARM_FEATURE_SVE) 
 3980#    define XXH_VECTOR XXH_SVE 
 3982        defined(__ARM_NEON__) || defined(__ARM_NEON)  \ 
 3983     || defined(_M_ARM) || defined(_M_ARM64) || defined(_M_ARM64EC)  \ 
 3984     || (defined(__wasm_simd128__) && XXH_HAS_INCLUDE(<arm_neon.h>))  \ 
 3986        defined(_WIN32) || defined(__LITTLE_ENDIAN__)  \ 
 3987    || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) \ 
 3989#    define XXH_VECTOR XXH_NEON 
 3990#  elif defined(__AVX512F__) 
 3991#    define XXH_VECTOR XXH_AVX512 
 3992#  elif defined(__AVX2__) 
 3993#    define XXH_VECTOR XXH_AVX2 
 3994#  elif defined(__SSE2__) || defined(_M_AMD64) || defined(_M_X64) || (defined(_M_IX86_FP) && (_M_IX86_FP == 2)) 
 3995#    define XXH_VECTOR XXH_SSE2 
 3996#  elif (defined(__PPC64__) && defined(__POWER8_VECTOR__)) \ 
 3997     || (defined(__s390x__) && defined(__VEC__)) \ 
 3998     && defined(__GNUC__)  
 3999#    define XXH_VECTOR XXH_VSX 
 4000#  elif defined(__loongarch_asx) 
 4001#    define XXH_VECTOR XXH_LASX 
 4002#  elif defined(__loongarch_sx) 
 4003#    define XXH_VECTOR XXH_LSX 
 4005#    define XXH_VECTOR XXH_SCALAR 
 4010#if (XXH_VECTOR == XXH_SVE) && !defined(__ARM_FEATURE_SVE) 
 4012#    pragma warning(once : 4606) 
 4014#    warning "__ARM_FEATURE_SVE isn't supported. Use SCALAR instead." 
 4017#  define XXH_VECTOR XXH_SCALAR 
 4024#ifndef XXH_ACC_ALIGN 
 4025#  if defined(XXH_X86DISPATCH) 
 4026#     define XXH_ACC_ALIGN 64   
 4027#  elif XXH_VECTOR == XXH_SCALAR   
 4028#     define XXH_ACC_ALIGN 8 
 4029#  elif XXH_VECTOR == XXH_SSE2   
 4030#     define XXH_ACC_ALIGN 16 
 4031#  elif XXH_VECTOR == XXH_AVX2   
 4032#     define XXH_ACC_ALIGN 32 
 4033#  elif XXH_VECTOR == XXH_NEON   
 4034#     define XXH_ACC_ALIGN 16 
 4035#  elif XXH_VECTOR == XXH_VSX    
 4036#     define XXH_ACC_ALIGN 16 
 4037#  elif XXH_VECTOR == XXH_AVX512   
 4038#     define XXH_ACC_ALIGN 64 
 4039#  elif XXH_VECTOR == XXH_SVE    
 4040#     define XXH_ACC_ALIGN 64 
 4041#  elif XXH_VECTOR == XXH_LASX    
 4042#     define XXH_ACC_ALIGN 64 
 4043#  elif XXH_VECTOR == XXH_LSX    
 4044#     define XXH_ACC_ALIGN 64 
 4048#if defined(XXH_X86DISPATCH) || XXH_VECTOR == XXH_SSE2 \ 
 4049    || XXH_VECTOR == XXH_AVX2 || XXH_VECTOR == XXH_AVX512 
 4050#  define XXH_SEC_ALIGN XXH_ACC_ALIGN 
 4051#elif XXH_VECTOR == XXH_SVE 
 4052#  define XXH_SEC_ALIGN XXH_ACC_ALIGN 
 4054#  define XXH_SEC_ALIGN 8 
 4057#if defined(__GNUC__) || defined(__clang__) 
 4058#  define XXH_ALIASING __attribute__((__may_alias__)) 
 4060#  define XXH_ALIASING  
 4084#if XXH_VECTOR == XXH_AVX2  \ 
 4085  && defined(__GNUC__) && !defined(__clang__)  \ 
 4086  && defined(__OPTIMIZE__) && XXH_SIZE_OPT <= 0  
 4087#  pragma GCC push_options 
 4088#  pragma GCC optimize("-O2") 
 4091#if XXH_VECTOR == XXH_NEON 
 4100typedef uint64x2_t xxh_aliasing_uint64x2_t XXH_ALIASING;
 
 4115#if defined(__aarch64__) && defined(__GNUC__) && !defined(__clang__) 
 4116XXH_FORCE_INLINE uint64x2_t XXH_vld1q_u64(
void const* ptr) 
 
 4118    return *(xxh_aliasing_uint64x2_t 
const *)ptr;
 
 4121XXH_FORCE_INLINE uint64x2_t XXH_vld1q_u64(
void const* ptr)
 
 4123    return vreinterpretq_u64_u8(vld1q_u8((uint8_t 
const*)ptr));
 
 4135#if defined(__aarch64__) && defined(__GNUC__) && !defined(__clang__) && __GNUC__ < 11 
 4136XXH_FORCE_INLINE uint64x2_t
 
 4137XXH_vmlal_low_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
 
 4140    __asm__(
"umlal   %0.2d, %1.2s, %2.2s" : 
"+w" (acc) : 
"w" (lhs), 
"w" (rhs));
 
 4143XXH_FORCE_INLINE uint64x2_t
 
 4144XXH_vmlal_high_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
 
 4147    return vmlal_high_u32(acc, lhs, rhs);
 
 4151XXH_FORCE_INLINE uint64x2_t
 
 4152XXH_vmlal_low_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
 
 4154    return vmlal_u32(acc, vget_low_u32(lhs), vget_low_u32(rhs));
 
 4158XXH_FORCE_INLINE uint64x2_t
 
 4159XXH_vmlal_high_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
 
 4161    return vmlal_u32(acc, vget_high_u32(lhs), vget_high_u32(rhs));
 
 4203# ifndef XXH3_NEON_LANES 
 4204#  if (defined(__aarch64__) || defined(__arm64__) || defined(_M_ARM64) || defined(_M_ARM64EC)) \ 
 4205   && !defined(__APPLE__) && XXH_SIZE_OPT <= 0 
 4206#   define XXH3_NEON_LANES 6 
 4208#   define XXH3_NEON_LANES XXH_ACC_NB 
 4221#if XXH_VECTOR == XXH_VSX 
 4230#  pragma push_macro("bool") 
 4231#  pragma push_macro("vector") 
 4232#  pragma push_macro("pixel") 
 4238#  if defined(__s390x__) 
 4239#    include <s390intrin.h> 
 4241#    include <altivec.h> 
 4245#  pragma pop_macro("pixel") 
 4246#  pragma pop_macro("vector") 
 4247#  pragma pop_macro("bool") 
 4249typedef __vector 
unsigned long long xxh_u64x2;
 
 4250typedef __vector 
unsigned char xxh_u8x16;
 
 4251typedef __vector 
unsigned xxh_u32x4;
 
 4256typedef xxh_u64x2 xxh_aliasing_u64x2 XXH_ALIASING;
 
 4259#  if defined(__BIG_ENDIAN__) \ 
 4260  || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__) 
 4261#    define XXH_VSX_BE 1 
 4262#  elif defined(__VEC_ELEMENT_REG_ORDER__) && __VEC_ELEMENT_REG_ORDER__ == __ORDER_BIG_ENDIAN__ 
 4263#    warning "-maltivec=be is not recommended. Please use native endianness." 
 4264#    define XXH_VSX_BE 1 
 4266#    define XXH_VSX_BE 0 
 4271#  if defined(__POWER9_VECTOR__) || (defined(__clang__) && defined(__s390x__)) 
 4272#    define XXH_vec_revb vec_revb 
 4277XXH_FORCE_INLINE xxh_u64x2 XXH_vec_revb(xxh_u64x2 val)
 
 4279    xxh_u8x16 
const vByteSwap = { 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01, 0x00,
 
 4280                                  0x0F, 0x0E, 0x0D, 0x0C, 0x0B, 0x0A, 0x09, 0x08 };
 
 4281    return vec_perm(val, val, vByteSwap);
 
 4289XXH_FORCE_INLINE xxh_u64x2 XXH_vec_loadu(
const void *ptr)
 
 4292    XXH_memcpy(&ret, ptr, 
sizeof(xxh_u64x2));
 
 4294    ret = XXH_vec_revb(ret);
 
 4305# if defined(__s390x__) 
 4307#  define XXH_vec_mulo vec_mulo 
 4308#  define XXH_vec_mule vec_mule 
 4309# elif defined(__clang__) && XXH_HAS_BUILTIN(__builtin_altivec_vmuleuw) && !defined(__ibmxl__) 
 4312#  define XXH_vec_mulo __builtin_altivec_vmulouw 
 4313#  define XXH_vec_mule __builtin_altivec_vmuleuw 
 4317XXH_FORCE_INLINE xxh_u64x2 XXH_vec_mulo(xxh_u32x4 a, xxh_u32x4 b)
 
 4320    __asm__(
"vmulouw %0, %1, %2" : 
"=v" (result) : 
"v" (a), 
"v" (b));
 
 4323XXH_FORCE_INLINE xxh_u64x2 XXH_vec_mule(xxh_u32x4 a, xxh_u32x4 b)
 
 4326    __asm__(
"vmuleuw %0, %1, %2" : 
"=v" (result) : 
"v" (a), 
"v" (b));
 
 4332#if XXH_VECTOR == XXH_SVE 
 4333#define ACCRND(acc, offset) \ 
 4335    svuint64_t input_vec = svld1_u64(mask, xinput + offset);         \ 
 4336    svuint64_t secret_vec = svld1_u64(mask, xsecret + offset);       \ 
 4337    svuint64_t mixed = sveor_u64_x(mask, secret_vec, input_vec);     \ 
 4338    svuint64_t swapped = svtbl_u64(input_vec, kSwap);                \ 
 4339    svuint64_t mixed_lo = svextw_u64_x(mask, mixed);                 \ 
 4340    svuint64_t mixed_hi = svlsr_n_u64_x(mask, mixed, 32);            \ 
 4341    svuint64_t mul = svmad_u64_x(mask, mixed_lo, mixed_hi, swapped); \ 
 4342    acc = svadd_u64_x(mask, acc, mul);                               \ 
 4348#if defined(XXH_NO_PREFETCH) 
 4349#  define XXH_PREFETCH(ptr)  (void)(ptr)   
 4351#  if XXH_SIZE_OPT >= 1 
 4352#    define XXH_PREFETCH(ptr) (void)(ptr) 
 4353#  elif defined(_MSC_VER) && (defined(_M_X64) || defined(_M_IX86))   
 4354#    include <mmintrin.h>    
 4355#    define XXH_PREFETCH(ptr)  _mm_prefetch((const char*)(ptr), _MM_HINT_T0) 
 4356#  elif defined(__GNUC__) && ( (__GNUC__ >= 4) || ( (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1) ) ) 
 4357#    define XXH_PREFETCH(ptr)  __builtin_prefetch((ptr), 0 , 3 ) 
 4359#    define XXH_PREFETCH(ptr) (void)(ptr)   
 4368#define XXH_SECRET_DEFAULT_SIZE 192    
 4370#if (XXH_SECRET_DEFAULT_SIZE < XXH3_SECRET_SIZE_MIN) 
 4371#  error "default keyset is not large enough" 
 4378XXH_ALIGN(64) static const xxh_u8 XXH3_kSecret[XXH_SECRET_DEFAULT_SIZE] = {
 
 4379    0xb8, 0xfe, 0x6c, 0x39, 0x23, 0xa4, 0x4b, 0xbe, 0x7c, 0x01, 0x81, 0x2c, 0xf7, 0x21, 0xad, 0x1c,
 
 4380    0xde, 0xd4, 0x6d, 0xe9, 0x83, 0x90, 0x97, 0xdb, 0x72, 0x40, 0xa4, 0xa4, 0xb7, 0xb3, 0x67, 0x1f,
 
 4381    0xcb, 0x79, 0xe6, 0x4e, 0xcc, 0xc0, 0xe5, 0x78, 0x82, 0x5a, 0xd0, 0x7d, 0xcc, 0xff, 0x72, 0x21,
 
 4382    0xb8, 0x08, 0x46, 0x74, 0xf7, 0x43, 0x24, 0x8e, 0xe0, 0x35, 0x90, 0xe6, 0x81, 0x3a, 0x26, 0x4c,
 
 4383    0x3c, 0x28, 0x52, 0xbb, 0x91, 0xc3, 0x00, 0xcb, 0x88, 0xd0, 0x65, 0x8b, 0x1b, 0x53, 0x2e, 0xa3,
 
 4384    0x71, 0x64, 0x48, 0x97, 0xa2, 0x0d, 0xf9, 0x4e, 0x38, 0x19, 0xef, 0x46, 0xa9, 0xde, 0xac, 0xd8,
 
 4385    0xa8, 0xfa, 0x76, 0x3f, 0xe3, 0x9c, 0x34, 0x3f, 0xf9, 0xdc, 0xbb, 0xc7, 0xc7, 0x0b, 0x4f, 0x1d,
 
 4386    0x8a, 0x51, 0xe0, 0x4b, 0xcd, 0xb4, 0x59, 0x31, 0xc8, 0x9f, 0x7e, 0xc9, 0xd9, 0x78, 0x73, 0x64,
 
 4387    0xea, 0xc5, 0xac, 0x83, 0x34, 0xd3, 0xeb, 0xc3, 0xc5, 0x81, 0xa0, 0xff, 0xfa, 0x13, 0x63, 0xeb,
 
 4388    0x17, 0x0d, 0xdd, 0x51, 0xb7, 0xf0, 0xda, 0x49, 0xd3, 0x16, 0x55, 0x26, 0x29, 0xd4, 0x68, 0x9e,
 
 4389    0x2b, 0x16, 0xbe, 0x58, 0x7d, 0x47, 0xa1, 0xfc, 0x8f, 0xf8, 0xb8, 0xd1, 0x7a, 0xd0, 0x31, 0xce,
 
 4390    0x45, 0xcb, 0x3a, 0x8f, 0x95, 0x16, 0x04, 0x28, 0xaf, 0xd7, 0xfb, 0xca, 0xbb, 0x4b, 0x40, 0x7e,
 
 4393static const xxh_u64 PRIME_MX1 = 0x165667919E3779F9ULL;  
 
 4394static const xxh_u64 PRIME_MX2 = 0x9FB21C651E98DF25ULL;  
 
 4397#  define kSecret XXH3_kSecret 
 4417XXH_FORCE_INLINE xxh_u64
 
 4418XXH_mult32to64(xxh_u64 x, xxh_u64 y)
 
 4420   return (x & 0xFFFFFFFF) * (y & 0xFFFFFFFF);
 
 4422#elif defined(_MSC_VER) && defined(_M_IX86) 
 4423#    define XXH_mult32to64(x, y) __emulu((unsigned)(x), (unsigned)(y)) 
 4432#    define XXH_mult32to64(x, y) ((xxh_u64)(xxh_u32)(x) * (xxh_u64)(xxh_u32)(y)) 
 4445XXH_mult64to128(xxh_u64 lhs, xxh_u64 rhs)
 
 4462#if (defined(__GNUC__) || defined(__clang__)) && !defined(__wasm__) \ 
 4463    && defined(__SIZEOF_INT128__) \ 
 4464    || (defined(_INTEGRAL_MAX_BITS) && _INTEGRAL_MAX_BITS >= 128) 
 4466    __uint128_t 
const product = (__uint128_t)lhs * (__uint128_t)rhs;
 
 4468    r128.
low64  = (xxh_u64)(product);
 
 4469    r128.
high64 = (xxh_u64)(product >> 64);
 
 4479#elif (defined(_M_X64) || defined(_M_IA64)) && !defined(_M_ARM64EC) 
 4482#   pragma intrinsic(_umul128) 
 4484    xxh_u64 product_high;
 
 4485    xxh_u64 
const product_low = _umul128(lhs, rhs, &product_high);
 
 4487    r128.
low64  = product_low;
 
 4488    r128.
high64 = product_high;
 
 4496#elif defined(_M_ARM64) || defined(_M_ARM64EC) 
 4499#   pragma intrinsic(__umulh) 
 4502    r128.
low64  = lhs * rhs;
 
 4503    r128.
high64 = __umulh(lhs, rhs);
 
 4551    xxh_u64 
const lo_lo = XXH_mult32to64(lhs & 0xFFFFFFFF, rhs & 0xFFFFFFFF);
 
 4552    xxh_u64 
const hi_lo = XXH_mult32to64(lhs >> 32,        rhs & 0xFFFFFFFF);
 
 4553    xxh_u64 
const lo_hi = XXH_mult32to64(lhs & 0xFFFFFFFF, rhs >> 32);
 
 4554    xxh_u64 
const hi_hi = XXH_mult32to64(lhs >> 32,        rhs >> 32);
 
 4557    xxh_u64 
const cross = (lo_lo >> 32) + (hi_lo & 0xFFFFFFFF) + lo_hi;
 
 4558    xxh_u64 
const upper = (hi_lo >> 32) + (cross >> 32)        + hi_hi;
 
 4559    xxh_u64 
const lower = (cross << 32) | (lo_lo & 0xFFFFFFFF);
 
 4579XXH3_mul128_fold64(xxh_u64 lhs, xxh_u64 rhs)
 
 4586XXH_FORCE_INLINE XXH_CONSTF xxh_u64 XXH_xorshift64(xxh_u64 v64, 
int shift)
 
 4588    XXH_ASSERT(0 <= shift && shift < 64);
 
 4589    return v64 ^ (v64 >> shift);
 
 4598    h64 = XXH_xorshift64(h64, 37);
 
 4600    h64 = XXH_xorshift64(h64, 32);
 
 4609static XXH64_hash_t XXH3_rrmxmx(xxh_u64 h64, xxh_u64 len)
 
 4612    h64 ^= XXH_rotl64(h64, 49) ^ XXH_rotl64(h64, 24);
 
 4614    h64 ^= (h64 >> 35) + len ;
 
 4616    return XXH_xorshift64(h64, 28);
 
 4654XXH3_len_1to3_64b(
const xxh_u8* input, 
size_t len, 
const xxh_u8* secret, 
XXH64_hash_t seed)
 
 4656    XXH_ASSERT(input != NULL);
 
 4657    XXH_ASSERT(1 <= len && len <= 3);
 
 4658    XXH_ASSERT(secret != NULL);
 
 4664    {   xxh_u8  
const c1 = input[0];
 
 4665        xxh_u8  
const c2 = input[len >> 1];
 
 4666        xxh_u8  
const c3 = input[len - 1];
 
 4667        xxh_u32 
const combined = ((xxh_u32)c1 << 16) | ((xxh_u32)c2  << 24)
 
 4668                               | ((xxh_u32)c3 <<  0) | ((xxh_u32)len << 8);
 
 4669        xxh_u64 
const bitflip = (XXH_readLE32(secret) ^ XXH_readLE32(secret+4)) + seed;
 
 4670        xxh_u64 
const keyed = (xxh_u64)combined ^ bitflip;
 
 4671        return XXH64_avalanche(keyed);
 
 4676XXH3_len_4to8_64b(
const xxh_u8* input, 
size_t len, 
const xxh_u8* secret, 
XXH64_hash_t seed)
 
 4678    XXH_ASSERT(input != NULL);
 
 4679    XXH_ASSERT(secret != NULL);
 
 4680    XXH_ASSERT(4 <= len && len <= 8);
 
 4681    seed ^= (xxh_u64)XXH_swap32((xxh_u32)seed) << 32;
 
 4682    {   xxh_u32 
const input1 = XXH_readLE32(input);
 
 4683        xxh_u32 
const input2 = XXH_readLE32(input + len - 4);
 
 4684        xxh_u64 
const bitflip = (XXH_readLE64(secret+8) ^ XXH_readLE64(secret+16)) - seed;
 
 4685        xxh_u64 
const input64 = input2 + (((xxh_u64)input1) << 32);
 
 4686        xxh_u64 
const keyed = input64 ^ bitflip;
 
 4687        return XXH3_rrmxmx(keyed, len);
 
 4692XXH3_len_9to16_64b(
const xxh_u8* input, 
size_t len, 
const xxh_u8* secret, 
XXH64_hash_t seed)
 
 4694    XXH_ASSERT(input != NULL);
 
 4695    XXH_ASSERT(secret != NULL);
 
 4696    XXH_ASSERT(9 <= len && len <= 16);
 
 4697    {   xxh_u64 
const bitflip1 = (XXH_readLE64(secret+24) ^ XXH_readLE64(secret+32)) + seed;
 
 4698        xxh_u64 
const bitflip2 = (XXH_readLE64(secret+40) ^ XXH_readLE64(secret+48)) - seed;
 
 4699        xxh_u64 
const input_lo = XXH_readLE64(input)           ^ bitflip1;
 
 4700        xxh_u64 
const input_hi = XXH_readLE64(input + len - 8) ^ bitflip2;
 
 4701        xxh_u64 
const acc = len
 
 4702                          + XXH_swap64(input_lo) + input_hi
 
 4703                          + XXH3_mul128_fold64(input_lo, input_hi);
 
 4704        return XXH3_avalanche(acc);
 
 4709XXH3_len_0to16_64b(
const xxh_u8* input, 
size_t len, 
const xxh_u8* secret, 
XXH64_hash_t seed)
 
 4711    XXH_ASSERT(len <= 16);
 
 4712    {   
if (XXH_likely(len >  8)) 
return XXH3_len_9to16_64b(input, len, secret, seed);
 
 4713        if (XXH_likely(len >= 4)) 
return XXH3_len_4to8_64b(input, len, secret, seed);
 
 4714        if (len) 
return XXH3_len_1to3_64b(input, len, secret, seed);
 
 4715        return XXH64_avalanche(seed ^ (XXH_readLE64(secret+56) ^ XXH_readLE64(secret+64)));
 
 4745XXH_FORCE_INLINE xxh_u64 XXH3_mix16B(
const xxh_u8* XXH_RESTRICT input,
 
 4746                                     const xxh_u8* XXH_RESTRICT secret, xxh_u64 seed64)
 
 4748#if defined(__GNUC__) && !defined(__clang__)  \ 
 4749  && defined(__i386__) && defined(__SSE2__)   \ 
 4750  && !defined(XXH_ENABLE_AUTOVECTORIZE)       
 4766    XXH_COMPILER_GUARD(seed64);
 
 4768    {   xxh_u64 
const input_lo = XXH_readLE64(input);
 
 4769        xxh_u64 
const input_hi = XXH_readLE64(input+8);
 
 4770        return XXH3_mul128_fold64(
 
 4771            input_lo ^ (XXH_readLE64(secret)   + seed64),
 
 4772            input_hi ^ (XXH_readLE64(secret+8) - seed64)
 
 4779XXH3_len_17to128_64b(
const xxh_u8* XXH_RESTRICT input, 
size_t len,
 
 4780                     const xxh_u8* XXH_RESTRICT secret, 
size_t secretSize,
 
 4784    XXH_ASSERT(16 < len && len <= 128);
 
 4787#if XXH_SIZE_OPT >= 1 
 4789        unsigned int i = (
unsigned int)(len - 1) / 32;
 
 4791            acc += XXH3_mix16B(input+16 * i, secret+32*i, seed);
 
 4792            acc += XXH3_mix16B(input+len-16*(i+1), secret+32*i+16, seed);
 
 4798                    acc += XXH3_mix16B(input+48, secret+96, seed);
 
 4799                    acc += XXH3_mix16B(input+len-64, secret+112, seed);
 
 4801                acc += XXH3_mix16B(input+32, secret+64, seed);
 
 4802                acc += XXH3_mix16B(input+len-48, secret+80, seed);
 
 4804            acc += XXH3_mix16B(input+16, secret+32, seed);
 
 4805            acc += XXH3_mix16B(input+len-32, secret+48, seed);
 
 4807        acc += XXH3_mix16B(input+0, secret+0, seed);
 
 4808        acc += XXH3_mix16B(input+len-16, secret+16, seed);
 
 4810        return XXH3_avalanche(acc);
 
 4815XXH3_len_129to240_64b(
const xxh_u8* XXH_RESTRICT input, 
size_t len,
 
 4816                      const xxh_u8* XXH_RESTRICT secret, 
size_t secretSize,
 
 4822    #define XXH3_MIDSIZE_STARTOFFSET 3 
 4823    #define XXH3_MIDSIZE_LASTOFFSET  17 
 4827        unsigned int const nbRounds = (
unsigned int)len / 16;
 
 4830        for (i=0; i<8; i++) {
 
 4831            acc += XXH3_mix16B(input+(16*i), secret+(16*i), seed);
 
 4834        acc_end = XXH3_mix16B(input + len - 16, secret + 
XXH3_SECRET_SIZE_MIN - XXH3_MIDSIZE_LASTOFFSET, seed);
 
 4835        XXH_ASSERT(nbRounds >= 8);
 
 4836        acc = XXH3_avalanche(acc);
 
 4837#if defined(__clang__)                                 \ 
 4838    && (defined(__ARM_NEON) || defined(__ARM_NEON__))  \ 
 4839    && !defined(XXH_ENABLE_AUTOVECTORIZE)              
 4860        #pragma clang loop vectorize(disable) 
 4862        for (i=8 ; i < nbRounds; i++) {
 
 4866            XXH_COMPILER_GUARD(acc);
 
 4867            acc_end += XXH3_mix16B(input+(16*i), secret+(16*(i-8)) + XXH3_MIDSIZE_STARTOFFSET, seed);
 
 4869        return XXH3_avalanche(acc + acc_end);
 
 4876#define XXH_STRIPE_LEN 64 
 4877#define XXH_SECRET_CONSUME_RATE 8    
 4878#define XXH_ACC_NB (XXH_STRIPE_LEN / sizeof(xxh_u64)) 
 4881#  define STRIPE_LEN XXH_STRIPE_LEN 
 4882#  define ACC_NB XXH_ACC_NB 
 4885#ifndef XXH_PREFETCH_DIST 
 4887#    define XXH_PREFETCH_DIST 320 
 4889#    if (XXH_VECTOR == XXH_AVX512) 
 4890#      define XXH_PREFETCH_DIST 512 
 4892#      define XXH_PREFETCH_DIST 384 
 4907#define XXH3_ACCUMULATE_TEMPLATE(name)                      \ 
 4909XXH3_accumulate_##name(xxh_u64* XXH_RESTRICT acc,           \ 
 4910                       const xxh_u8* XXH_RESTRICT input,    \ 
 4911                       const xxh_u8* XXH_RESTRICT secret,   \ 
 4915    for (n = 0; n < nbStripes; n++ ) {                      \ 
 4916        const xxh_u8* const in = input + n*XXH_STRIPE_LEN;  \ 
 4917        XXH_PREFETCH(in + XXH_PREFETCH_DIST);               \ 
 4918        XXH3_accumulate_512_##name(                         \ 
 4921                 secret + n*XXH_SECRET_CONSUME_RATE);       \ 
 4926XXH_FORCE_INLINE 
void XXH_writeLE64(
void* dst, xxh_u64 v64)
 
 4929    XXH_memcpy(dst, &v64, 
sizeof(v64));
 
 4937#if !defined (__VMS) \ 
 4938  && (defined (__cplusplus) \ 
 4939  || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) ) 
 4940    typedef int64_t xxh_i64;
 
 4943    typedef long long xxh_i64;
 
 4970#if (XXH_VECTOR == XXH_AVX512) \ 
 4971     || (defined(XXH_DISPATCH_AVX512) && XXH_DISPATCH_AVX512 != 0) 
 4973#ifndef XXH_TARGET_AVX512 
 4974# define XXH_TARGET_AVX512   
 4978XXH3_accumulate_512_avx512(
void* XXH_RESTRICT acc,
 
 4979                     const void* XXH_RESTRICT input,
 
 4980                     const void* XXH_RESTRICT secret)
 
 4982    __m512i* 
const xacc = (__m512i *) acc;
 
 4983    XXH_ASSERT((((
size_t)acc) & 63) == 0);
 
 4984    XXH_STATIC_ASSERT(XXH_STRIPE_LEN == 
sizeof(__m512i));
 
 4988        __m512i 
const data_vec    = _mm512_loadu_si512   (input);
 
 4990        __m512i 
const key_vec     = _mm512_loadu_si512   (secret);
 
 4992        __m512i 
const data_key    = _mm512_xor_si512     (data_vec, key_vec);
 
 4994        __m512i 
const data_key_lo = _mm512_srli_epi64 (data_key, 32);
 
 4996        __m512i 
const product     = _mm512_mul_epu32     (data_key, data_key_lo);
 
 4998        __m512i 
const data_swap = _mm512_shuffle_epi32(data_vec, (_MM_PERM_ENUM)_MM_SHUFFLE(1, 0, 3, 2));
 
 4999        __m512i 
const sum       = _mm512_add_epi64(*xacc, data_swap);
 
 5001        *xacc = _mm512_add_epi64(product, sum);
 
 5028XXH3_scrambleAcc_avx512(
void* XXH_RESTRICT acc, 
const void* XXH_RESTRICT secret)
 
 5030    XXH_ASSERT((((
size_t)acc) & 63) == 0);
 
 5031    XXH_STATIC_ASSERT(XXH_STRIPE_LEN == 
sizeof(__m512i));
 
 5032    {   __m512i* 
const xacc = (__m512i*) acc;
 
 5033        const __m512i prime32 = _mm512_set1_epi32((
int)
XXH_PRIME32_1);
 
 5036        __m512i 
const acc_vec     = *xacc;
 
 5037        __m512i 
const shifted     = _mm512_srli_epi64    (acc_vec, 47);
 
 5039        __m512i 
const key_vec     = _mm512_loadu_si512   (secret);
 
 5040        __m512i 
const data_key    = _mm512_ternarylogic_epi32(key_vec, acc_vec, shifted, 0x96 );
 
 5043        __m512i 
const data_key_hi = _mm512_srli_epi64 (data_key, 32);
 
 5044        __m512i 
const prod_lo     = _mm512_mul_epu32     (data_key, prime32);
 
 5045        __m512i 
const prod_hi     = _mm512_mul_epu32     (data_key_hi, prime32);
 
 5046        *xacc = _mm512_add_epi64(prod_lo, _mm512_slli_epi64(prod_hi, 32));
 
 5051XXH3_initCustomSecret_avx512(
void* XXH_RESTRICT customSecret, xxh_u64 seed64)
 
 5053    XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 63) == 0);
 
 5054    XXH_STATIC_ASSERT(XXH_SEC_ALIGN == 64);
 
 5055    XXH_ASSERT(((
size_t)customSecret & 63) == 0);
 
 5056    (void)(&XXH_writeLE64);
 
 5057    {   
int const nbRounds = XXH_SECRET_DEFAULT_SIZE / 
sizeof(__m512i);
 
 5058        __m512i 
const seed_pos = _mm512_set1_epi64((xxh_i64)seed64);
 
 5059        __m512i 
const seed     = _mm512_mask_sub_epi64(seed_pos, 0xAA, _mm512_set1_epi8(0), seed_pos);
 
 5061        const __m512i* 
const src  = (
const __m512i*) ((
const void*) XXH3_kSecret);
 
 5062              __m512i* 
const dest = (      __m512i*) customSecret;
 
 5064        XXH_ASSERT(((
size_t)src & 63) == 0); 
 
 5065        XXH_ASSERT(((
size_t)dest & 63) == 0);
 
 5066        for (i=0; i < nbRounds; ++i) {
 
 5067            dest[i] = _mm512_add_epi64(_mm512_load_si512(src + i), seed);
 
 5073#if (XXH_VECTOR == XXH_AVX2) \ 
 5074    || (defined(XXH_DISPATCH_AVX2) && XXH_DISPATCH_AVX2 != 0) 
 5076#ifndef XXH_TARGET_AVX2 
 5077# define XXH_TARGET_AVX2   
 5081XXH3_accumulate_512_avx2( 
void* XXH_RESTRICT acc,
 
 5082                    const void* XXH_RESTRICT input,
 
 5083                    const void* XXH_RESTRICT secret)
 
 5085    XXH_ASSERT((((
size_t)acc) & 31) == 0);
 
 5086    {   __m256i* 
const xacc    =       (__m256i *) acc;
 
 5089        const         __m256i* 
const xinput  = (
const __m256i *) input;
 
 5092        const         __m256i* 
const xsecret = (
const __m256i *) secret;
 
 5095        for (i=0; i < XXH_STRIPE_LEN/
sizeof(__m256i); i++) {
 
 5097            __m256i 
const data_vec    = _mm256_loadu_si256    (xinput+i);
 
 5099            __m256i 
const key_vec     = _mm256_loadu_si256   (xsecret+i);
 
 5101            __m256i 
const data_key    = _mm256_xor_si256     (data_vec, key_vec);
 
 5103            __m256i 
const data_key_lo = _mm256_srli_epi64 (data_key, 32);
 
 5105            __m256i 
const product     = _mm256_mul_epu32     (data_key, data_key_lo);
 
 5107            __m256i 
const data_swap = _mm256_shuffle_epi32(data_vec, _MM_SHUFFLE(1, 0, 3, 2));
 
 5108            __m256i 
const sum       = _mm256_add_epi64(xacc[i], data_swap);
 
 5110            xacc[i] = _mm256_add_epi64(product, sum);
 
 5116XXH3_scrambleAcc_avx2(
void* XXH_RESTRICT acc, 
const void* XXH_RESTRICT secret)
 
 5118    XXH_ASSERT((((
size_t)acc) & 31) == 0);
 
 5119    {   __m256i* 
const xacc = (__m256i*) acc;
 
 5122        const         __m256i* 
const xsecret = (
const __m256i *) secret;
 
 5123        const __m256i prime32 = _mm256_set1_epi32((
int)
XXH_PRIME32_1);
 
 5126        for (i=0; i < XXH_STRIPE_LEN/
sizeof(__m256i); i++) {
 
 5128            __m256i 
const acc_vec     = xacc[i];
 
 5129            __m256i 
const shifted     = _mm256_srli_epi64    (acc_vec, 47);
 
 5130            __m256i 
const data_vec    = _mm256_xor_si256     (acc_vec, shifted);
 
 5132            __m256i 
const key_vec     = _mm256_loadu_si256   (xsecret+i);
 
 5133            __m256i 
const data_key    = _mm256_xor_si256     (data_vec, key_vec);
 
 5136            __m256i 
const data_key_hi = _mm256_srli_epi64 (data_key, 32);
 
 5137            __m256i 
const prod_lo     = _mm256_mul_epu32     (data_key, prime32);
 
 5138            __m256i 
const prod_hi     = _mm256_mul_epu32     (data_key_hi, prime32);
 
 5139            xacc[i] = _mm256_add_epi64(prod_lo, _mm256_slli_epi64(prod_hi, 32));
 
 5144XXH_FORCE_INLINE 
XXH_TARGET_AVX2 void XXH3_initCustomSecret_avx2(
void* XXH_RESTRICT customSecret, xxh_u64 seed64)
 
 5146    XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 31) == 0);
 
 5147    XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE / 
sizeof(__m256i)) == 6);
 
 5148    XXH_STATIC_ASSERT(XXH_SEC_ALIGN <= 64);
 
 5149    (void)(&XXH_writeLE64);
 
 5150    XXH_PREFETCH(customSecret);
 
 5151    {   __m256i 
const seed = _mm256_set_epi64x((xxh_i64)(0U - seed64), (xxh_i64)seed64, (xxh_i64)(0U - seed64), (xxh_i64)seed64);
 
 5153        const __m256i* 
const src  = (
const __m256i*) ((
const void*) XXH3_kSecret);
 
 5154              __m256i*       dest = (      __m256i*) customSecret;
 
 5156#       if defined(__GNUC__) || defined(__clang__) 
 5162        XXH_COMPILER_GUARD(dest);
 
 5164        XXH_ASSERT(((
size_t)src & 31) == 0); 
 
 5165        XXH_ASSERT(((
size_t)dest & 31) == 0);
 
 5168        dest[0] = _mm256_add_epi64(_mm256_load_si256(src+0), seed);
 
 5169        dest[1] = _mm256_add_epi64(_mm256_load_si256(src+1), seed);
 
 5170        dest[2] = _mm256_add_epi64(_mm256_load_si256(src+2), seed);
 
 5171        dest[3] = _mm256_add_epi64(_mm256_load_si256(src+3), seed);
 
 5172        dest[4] = _mm256_add_epi64(_mm256_load_si256(src+4), seed);
 
 5173        dest[5] = _mm256_add_epi64(_mm256_load_si256(src+5), seed);
 
 5180#if (XXH_VECTOR == XXH_SSE2) || defined(XXH_X86DISPATCH) 
 5182#ifndef XXH_TARGET_SSE2 
 5183# define XXH_TARGET_SSE2   
 5187XXH3_accumulate_512_sse2( 
void* XXH_RESTRICT acc,
 
 5188                    const void* XXH_RESTRICT input,
 
 5189                    const void* XXH_RESTRICT secret)
 
 5192    XXH_ASSERT((((
size_t)acc) & 15) == 0);
 
 5193    {   __m128i* 
const xacc    =       (__m128i *) acc;
 
 5196        const         __m128i* 
const xinput  = (
const __m128i *) input;
 
 5199        const         __m128i* 
const xsecret = (
const __m128i *) secret;
 
 5202        for (i=0; i < XXH_STRIPE_LEN/
sizeof(__m128i); i++) {
 
 5204            __m128i 
const data_vec    = _mm_loadu_si128   (xinput+i);
 
 5206            __m128i 
const key_vec     = _mm_loadu_si128   (xsecret+i);
 
 5208            __m128i 
const data_key    = _mm_xor_si128     (data_vec, key_vec);
 
 5210            __m128i 
const data_key_lo = _mm_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
 
 5212            __m128i 
const product     = _mm_mul_epu32     (data_key, data_key_lo);
 
 5214            __m128i 
const data_swap = _mm_shuffle_epi32(data_vec, _MM_SHUFFLE(1,0,3,2));
 
 5215            __m128i 
const sum       = _mm_add_epi64(xacc[i], data_swap);
 
 5217            xacc[i] = _mm_add_epi64(product, sum);
 
 5223XXH3_scrambleAcc_sse2(
void* XXH_RESTRICT acc, 
const void* XXH_RESTRICT secret)
 
 5225    XXH_ASSERT((((
size_t)acc) & 15) == 0);
 
 5226    {   __m128i* 
const xacc = (__m128i*) acc;
 
 5229        const         __m128i* 
const xsecret = (
const __m128i *) secret;
 
 5233        for (i=0; i < XXH_STRIPE_LEN/
sizeof(__m128i); i++) {
 
 5235            __m128i 
const acc_vec     = xacc[i];
 
 5236            __m128i 
const shifted     = _mm_srli_epi64    (acc_vec, 47);
 
 5237            __m128i 
const data_vec    = _mm_xor_si128     (acc_vec, shifted);
 
 5239            __m128i 
const key_vec     = _mm_loadu_si128   (xsecret+i);
 
 5240            __m128i 
const data_key    = _mm_xor_si128     (data_vec, key_vec);
 
 5243            __m128i 
const data_key_hi = _mm_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
 
 5244            __m128i 
const prod_lo     = _mm_mul_epu32     (data_key, prime32);
 
 5245            __m128i 
const prod_hi     = _mm_mul_epu32     (data_key_hi, prime32);
 
 5246            xacc[i] = _mm_add_epi64(prod_lo, _mm_slli_epi64(prod_hi, 32));
 
 5251XXH_FORCE_INLINE 
XXH_TARGET_SSE2 void XXH3_initCustomSecret_sse2(
void* XXH_RESTRICT customSecret, xxh_u64 seed64)
 
 5253    XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 15) == 0);
 
 5254    (void)(&XXH_writeLE64);
 
 5255    {   
int const nbRounds = XXH_SECRET_DEFAULT_SIZE / 
sizeof(__m128i);
 
 5257#       if defined(_MSC_VER) && defined(_M_IX86) && _MSC_VER < 1900 
 5259        XXH_ALIGN(16) const xxh_i64 seed64x2[2] = { (xxh_i64)seed64, (xxh_i64)(0U - seed64) };
 
 5260        __m128i 
const seed = _mm_load_si128((__m128i 
const*)seed64x2);
 
 5262        __m128i 
const seed = _mm_set_epi64x((xxh_i64)(0U - seed64), (xxh_i64)seed64);
 
 5266        const void* 
const src16 = XXH3_kSecret;
 
 5267        __m128i* dst16 = (__m128i*) customSecret;
 
 5268#       if defined(__GNUC__) || defined(__clang__) 
 5274        XXH_COMPILER_GUARD(dst16);
 
 5276        XXH_ASSERT(((
size_t)src16 & 15) == 0); 
 
 5277        XXH_ASSERT(((
size_t)dst16 & 15) == 0);
 
 5279        for (i=0; i < nbRounds; ++i) {
 
 5280            dst16[i] = _mm_add_epi64(_mm_load_si128((
const __m128i *)src16+i), seed);
 
 5286#if (XXH_VECTOR == XXH_NEON) 
 5289XXH_FORCE_INLINE 
void 
 5290XXH3_scalarRound(
void* XXH_RESTRICT acc, 
void const* XXH_RESTRICT input,
 
 5291                 void const* XXH_RESTRICT secret, 
size_t lane);
 
 5293XXH_FORCE_INLINE 
void 
 5294XXH3_scalarScrambleRound(
void* XXH_RESTRICT acc,
 
 5295                         void const* XXH_RESTRICT secret, 
size_t lane);
 
 5321XXH_FORCE_INLINE 
void 
 5322XXH3_accumulate_512_neon( 
void* XXH_RESTRICT acc,
 
 5323                    const void* XXH_RESTRICT input,
 
 5324                    const void* XXH_RESTRICT secret)
 
 5326    XXH_ASSERT((((
size_t)acc) & 15) == 0);
 
 5329        xxh_aliasing_uint64x2_t* 
const xacc = (xxh_aliasing_uint64x2_t*) acc;
 
 5331        uint8_t 
const* xinput = (
const uint8_t *) input;
 
 5332        uint8_t 
const* xsecret  = (
const uint8_t *) secret;
 
 5335#ifdef __wasm_simd128__ 
 5353        XXH_COMPILER_GUARD(xsecret);
 
 5357            XXH3_scalarRound(acc, input, secret, i);
 
 5363            uint64x2_t data_vec_1 = XXH_vld1q_u64(xinput  + (i * 16));
 
 5364            uint64x2_t data_vec_2 = XXH_vld1q_u64(xinput  + ((i+1) * 16));
 
 5366            uint64x2_t key_vec_1  = XXH_vld1q_u64(xsecret + (i * 16));
 
 5367            uint64x2_t key_vec_2  = XXH_vld1q_u64(xsecret + ((i+1) * 16));
 
 5369            uint64x2_t data_swap_1 = vextq_u64(data_vec_1, data_vec_1, 1);
 
 5370            uint64x2_t data_swap_2 = vextq_u64(data_vec_2, data_vec_2, 1);
 
 5372            uint64x2_t data_key_1 = veorq_u64(data_vec_1, key_vec_1);
 
 5373            uint64x2_t data_key_2 = veorq_u64(data_vec_2, key_vec_2);
 
 5388            uint32x4x2_t unzipped = vuzpq_u32(
 
 5389                vreinterpretq_u32_u64(data_key_1),
 
 5390                vreinterpretq_u32_u64(data_key_2)
 
 5393            uint32x4_t data_key_lo = unzipped.val[0];
 
 5395            uint32x4_t data_key_hi = unzipped.val[1];
 
 5403            uint64x2_t sum_1 = XXH_vmlal_low_u32(data_swap_1, data_key_lo, data_key_hi);
 
 5404            uint64x2_t sum_2 = XXH_vmlal_high_u32(data_swap_2, data_key_lo, data_key_hi);
 
 5417            XXH_COMPILER_GUARD_CLANG_NEON(sum_1);
 
 5418            XXH_COMPILER_GUARD_CLANG_NEON(sum_2);
 
 5420            xacc[i]   = vaddq_u64(xacc[i], sum_1);
 
 5421            xacc[i+1] = vaddq_u64(xacc[i+1], sum_2);
 
 5426            uint64x2_t data_vec = XXH_vld1q_u64(xinput  + (i * 16));
 
 5428            uint64x2_t key_vec  = XXH_vld1q_u64(xsecret + (i * 16));
 
 5430            uint64x2_t data_swap = vextq_u64(data_vec, data_vec, 1);
 
 5432            uint64x2_t data_key = veorq_u64(data_vec, key_vec);
 
 5435            uint32x2_t data_key_lo = vmovn_u64(data_key);
 
 5437            uint32x2_t data_key_hi = vshrn_n_u64(data_key, 32);
 
 5439            uint64x2_t sum = vmlal_u32(data_swap, data_key_lo, data_key_hi);
 
 5441            XXH_COMPILER_GUARD_CLANG_NEON(sum);
 
 5443            xacc[i] = vaddq_u64 (xacc[i], sum);
 
 5447XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(neon)
 
 5449XXH_FORCE_INLINE 
void 
 5450XXH3_scrambleAcc_neon(
void* XXH_RESTRICT acc, 
const void* XXH_RESTRICT secret)
 
 5452    XXH_ASSERT((((
size_t)acc) & 15) == 0);
 
 5454    {   xxh_aliasing_uint64x2_t* xacc       = (xxh_aliasing_uint64x2_t*) acc;
 
 5455        uint8_t 
const* xsecret = (uint8_t 
const*) secret;
 
 5459#ifndef __wasm_simd128__ 
 5463        uint32x4_t 
const kPrimeHi = vreinterpretq_u32_u64(vdupq_n_u64((xxh_u64)
XXH_PRIME32_1 << 32));
 
 5468            XXH3_scalarScrambleRound(acc, secret, i);
 
 5472            uint64x2_t acc_vec  = xacc[i];
 
 5473            uint64x2_t shifted  = vshrq_n_u64(acc_vec, 47);
 
 5474            uint64x2_t data_vec = veorq_u64(acc_vec, shifted);
 
 5477            uint64x2_t key_vec  = XXH_vld1q_u64(xsecret + (i * 16));
 
 5478            uint64x2_t data_key = veorq_u64(data_vec, key_vec);
 
 5480#ifdef __wasm_simd128__ 
 5495            uint32x4_t prod_hi = vmulq_u32 (vreinterpretq_u32_u64(data_key), kPrimeHi);
 
 5497            uint32x2_t data_key_lo = vmovn_u64(data_key);
 
 5499            xacc[i] = vmlal_u32(vreinterpretq_u64_u32(prod_hi), data_key_lo, kPrimeLo);
 
 5506#if (XXH_VECTOR == XXH_VSX) 
 5508XXH_FORCE_INLINE 
void 
 5509XXH3_accumulate_512_vsx(  
void* XXH_RESTRICT acc,
 
 5510                    const void* XXH_RESTRICT input,
 
 5511                    const void* XXH_RESTRICT secret)
 
 5514    xxh_aliasing_u64x2* 
const xacc = (xxh_aliasing_u64x2*) acc;
 
 5515    xxh_u8 
const* 
const xinput   = (xxh_u8 
const*) input;   
 
 5516    xxh_u8 
const* 
const xsecret  = (xxh_u8 
const*) secret;    
 
 5517    xxh_u64x2 
const v32 = { 32, 32 };
 
 5519    for (i = 0; i < XXH_STRIPE_LEN / 
sizeof(xxh_u64x2); i++) {
 
 5521        xxh_u64x2 
const data_vec = XXH_vec_loadu(xinput + 16*i);
 
 5523        xxh_u64x2 
const key_vec  = XXH_vec_loadu(xsecret + 16*i);
 
 5524        xxh_u64x2 
const data_key = data_vec ^ key_vec;
 
 5526        xxh_u32x4 
const shuffled = (xxh_u32x4)vec_rl(data_key, v32);
 
 5528        xxh_u64x2 
const product  = XXH_vec_mulo((xxh_u32x4)data_key, shuffled);
 
 5530        xxh_u64x2 acc_vec        = xacc[i];
 
 5535        acc_vec += vec_permi(data_vec, data_vec, 2);
 
 5537        acc_vec += vec_xxpermdi(data_vec, data_vec, 2);
 
 5542XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(vsx)
 
 5544XXH_FORCE_INLINE 
void 
 5545XXH3_scrambleAcc_vsx(
void* XXH_RESTRICT acc, 
const void* XXH_RESTRICT secret)
 
 5547    XXH_ASSERT((((
size_t)acc) & 15) == 0);
 
 5549    {   xxh_aliasing_u64x2* 
const xacc = (xxh_aliasing_u64x2*) acc;
 
 5550        const xxh_u8* 
const xsecret = (
const xxh_u8*) secret;
 
 5552        xxh_u64x2 
const v32  = { 32, 32 };
 
 5553        xxh_u64x2 
const v47 = { 47, 47 };
 
 5556        for (i = 0; i < XXH_STRIPE_LEN / 
sizeof(xxh_u64x2); i++) {
 
 5558            xxh_u64x2 
const acc_vec  = xacc[i];
 
 5559            xxh_u64x2 
const data_vec = acc_vec ^ (acc_vec >> v47);
 
 5562            xxh_u64x2 
const key_vec  = XXH_vec_loadu(xsecret + 16*i);
 
 5563            xxh_u64x2 
const data_key = data_vec ^ key_vec;
 
 5567            xxh_u64x2 
const prod_even  = XXH_vec_mule((xxh_u32x4)data_key, prime);
 
 5569            xxh_u64x2 
const prod_odd  = XXH_vec_mulo((xxh_u32x4)data_key, prime);
 
 5570            xacc[i] = prod_odd + (prod_even << v32);
 
 5576#if (XXH_VECTOR == XXH_SVE) 
 5578XXH_FORCE_INLINE 
void 
 5579XXH3_accumulate_512_sve( 
void* XXH_RESTRICT acc,
 
 5580                   const void* XXH_RESTRICT input,
 
 5581                   const void* XXH_RESTRICT secret)
 
 5583    uint64_t *xacc = (uint64_t *)acc;
 
 5584    const uint64_t *xinput = (
const uint64_t *)(
const void *)input;
 
 5585    const uint64_t *xsecret = (
const uint64_t *)(
const void *)secret;
 
 5586    svuint64_t kSwap = sveor_n_u64_z(svptrue_b64(), svindex_u64(0, 1), 1);
 
 5587    uint64_t element_count = svcntd();
 
 5588    if (element_count >= 8) {
 
 5589        svbool_t mask = svptrue_pat_b64(SV_VL8);
 
 5590        svuint64_t vacc = svld1_u64(mask, xacc);
 
 5592        svst1_u64(mask, xacc, vacc);
 
 5593    } 
else if (element_count == 2) {   
 
 5594        svbool_t mask = svptrue_pat_b64(SV_VL2);
 
 5595        svuint64_t acc0 = svld1_u64(mask, xacc + 0);
 
 5596        svuint64_t acc1 = svld1_u64(mask, xacc + 2);
 
 5597        svuint64_t acc2 = svld1_u64(mask, xacc + 4);
 
 5598        svuint64_t acc3 = svld1_u64(mask, xacc + 6);
 
 5603        svst1_u64(mask, xacc + 0, acc0);
 
 5604        svst1_u64(mask, xacc + 2, acc1);
 
 5605        svst1_u64(mask, xacc + 4, acc2);
 
 5606        svst1_u64(mask, xacc + 6, acc3);
 
 5608        svbool_t mask = svptrue_pat_b64(SV_VL4);
 
 5609        svuint64_t acc0 = svld1_u64(mask, xacc + 0);
 
 5610        svuint64_t acc1 = svld1_u64(mask, xacc + 4);
 
 5613        svst1_u64(mask, xacc + 0, acc0);
 
 5614        svst1_u64(mask, xacc + 4, acc1);
 
 5618XXH_FORCE_INLINE 
void 
 5619XXH3_accumulate_sve(xxh_u64* XXH_RESTRICT acc,
 
 5620               const xxh_u8* XXH_RESTRICT input,
 
 5621               const xxh_u8* XXH_RESTRICT secret,
 
 5624    if (nbStripes != 0) {
 
 5625        uint64_t *xacc = (uint64_t *)acc;
 
 5626        const uint64_t *xinput = (
const uint64_t *)(
const void *)input;
 
 5627        const uint64_t *xsecret = (
const uint64_t *)(
const void *)secret;
 
 5628        svuint64_t kSwap = sveor_n_u64_z(svptrue_b64(), svindex_u64(0, 1), 1);
 
 5629        uint64_t element_count = svcntd();
 
 5630        if (element_count >= 8) {
 
 5631            svbool_t mask = svptrue_pat_b64(SV_VL8);
 
 5632            svuint64_t vacc = svld1_u64(mask, xacc + 0);
 
 5635                svprfd(mask, xinput + 128, SV_PLDL1STRM);
 
 5640           } 
while (nbStripes != 0);
 
 5642           svst1_u64(mask, xacc + 0, vacc);
 
 5643        } 
else if (element_count == 2) { 
 
 5644            svbool_t mask = svptrue_pat_b64(SV_VL2);
 
 5645            svuint64_t acc0 = svld1_u64(mask, xacc + 0);
 
 5646            svuint64_t acc1 = svld1_u64(mask, xacc + 2);
 
 5647            svuint64_t acc2 = svld1_u64(mask, xacc + 4);
 
 5648            svuint64_t acc3 = svld1_u64(mask, xacc + 6);
 
 5650                svprfd(mask, xinput + 128, SV_PLDL1STRM);
 
 5658           } 
while (nbStripes != 0);
 
 5660           svst1_u64(mask, xacc + 0, acc0);
 
 5661           svst1_u64(mask, xacc + 2, acc1);
 
 5662           svst1_u64(mask, xacc + 4, acc2);
 
 5663           svst1_u64(mask, xacc + 6, acc3);
 
 5665            svbool_t mask = svptrue_pat_b64(SV_VL4);
 
 5666            svuint64_t acc0 = svld1_u64(mask, xacc + 0);
 
 5667            svuint64_t acc1 = svld1_u64(mask, xacc + 4);
 
 5669                svprfd(mask, xinput + 128, SV_PLDL1STRM);
 
 5675           } 
while (nbStripes != 0);
 
 5677           svst1_u64(mask, xacc + 0, acc0);
 
 5678           svst1_u64(mask, xacc + 4, acc1);
 
 5685#if (XXH_VECTOR == XXH_LSX) 
 5686#define _LSX_SHUFFLE(z, y, x, w) (((z) << 6) | ((y) << 4) | ((x) << 2) | (w)) 
 5688XXH_FORCE_INLINE 
void 
 5689XXH3_accumulate_512_lsx( 
void* XXH_RESTRICT acc,
 
 5690                    const void* XXH_RESTRICT input,
 
 5691                    const void* XXH_RESTRICT secret)
 
 5693    XXH_ASSERT((((
size_t)acc) & 15) == 0);
 
 5695        __m128i* 
const xacc    =       (__m128i *) acc;
 
 5696        const __m128i* 
const xinput  = (
const __m128i *) input;
 
 5697        const __m128i* 
const xsecret = (
const __m128i *) secret;
 
 5699        for (
size_t i = 0; i < XXH_STRIPE_LEN / 
sizeof(__m128i); i++) {
 
 5701            __m128i 
const data_vec = __lsx_vld(xinput + i, 0);
 
 5703            __m128i 
const key_vec = __lsx_vld(xsecret + i, 0);
 
 5705            __m128i 
const data_key = __lsx_vxor_v(data_vec, key_vec);
 
 5707            __m128i 
const data_key_lo = __lsx_vsrli_d(data_key, 32);
 
 5710            __m128i 
const product = __lsx_vmulwev_d_wu(data_key, data_key_lo);
 
 5712            __m128i 
const data_swap = __lsx_vshuf4i_w(data_vec, _LSX_SHUFFLE(1, 0, 3, 2));
 
 5713            __m128i 
const sum = __lsx_vadd_d(xacc[i], data_swap);
 
 5715            xacc[i] = __lsx_vadd_d(product, sum);
 
 5719XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(lsx)
 
 5721XXH_FORCE_INLINE 
void 
 5722XXH3_scrambleAcc_lsx(
void* XXH_RESTRICT acc, 
const void* XXH_RESTRICT secret)
 
 5724    XXH_ASSERT((((
size_t)acc) & 15) == 0);
 
 5726        __m128i* 
const xacc = (__m128i*) acc;
 
 5727        const __m128i* 
const xsecret = (
const __m128i *) secret;
 
 5730        for (
size_t i = 0; i < XXH_STRIPE_LEN / 
sizeof(__m128i); i++) {
 
 5732            __m128i 
const acc_vec = xacc[i];
 
 5733            __m128i 
const shifted = __lsx_vsrli_d(acc_vec, 47);
 
 5734            __m128i 
const data_vec = __lsx_vxor_v(acc_vec, shifted);
 
 5736            __m128i 
const key_vec = __lsx_vld(xsecret + i, 0);
 
 5737            __m128i 
const data_key = __lsx_vxor_v(data_vec, key_vec);
 
 5740            xacc[i] = __lsx_vmul_d(data_key, prime32);
 
 5747#if (XXH_VECTOR == XXH_LASX) 
 5748#define _LASX_SHUFFLE(z, y, x, w) (((z) << 6) | ((y) << 4) | ((x) << 2) | (w)) 
 5750XXH_FORCE_INLINE 
void 
 5751XXH3_accumulate_512_lasx( 
void* XXH_RESTRICT acc,
 
 5752                    const void* XXH_RESTRICT input,
 
 5753                    const void* XXH_RESTRICT secret)
 
 5755    XXH_ASSERT((((
size_t)acc) & 31) == 0);
 
 5757        __m256i* 
const xacc    =       (__m256i *) acc;
 
 5758        const __m256i* 
const xinput  = (
const __m256i *) input;
 
 5759        const __m256i* 
const xsecret = (
const __m256i *) secret;
 
 5761        for (
size_t i = 0; i < XXH_STRIPE_LEN / 
sizeof(__m256i); i++) {
 
 5763            __m256i 
const data_vec = __lasx_xvld(xinput + i, 0);
 
 5765            __m256i 
const key_vec = __lasx_xvld(xsecret + i, 0);
 
 5767            __m256i 
const data_key = __lasx_xvxor_v(data_vec, key_vec);
 
 5769            __m256i 
const data_key_lo = __lasx_xvsrli_d(data_key, 32);
 
 5772            __m256i 
const product = __lasx_xvmulwev_d_wu(data_key, data_key_lo);
 
 5774            __m256i 
const data_swap = __lasx_xvshuf4i_w(data_vec, _LASX_SHUFFLE(1, 0, 3, 2));
 
 5775            __m256i 
const sum = __lasx_xvadd_d(xacc[i], data_swap);
 
 5777            xacc[i] = __lasx_xvadd_d(product, sum);
 
 5781XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(lasx)
 
 5783XXH_FORCE_INLINE 
void 
 5784XXH3_scrambleAcc_lasx(
void* XXH_RESTRICT acc, 
const void* XXH_RESTRICT secret)
 
 5786    XXH_ASSERT((((
size_t)acc) & 31) == 0);
 
 5788        __m256i* 
const xacc = (__m256i*) acc;
 
 5789        const __m256i* 
const xsecret = (
const __m256i *) secret;
 
 5790        const __m256i prime32 = __lasx_xvreplgr2vr_d(
XXH_PRIME32_1);
 
 5792        for (
size_t i = 0; i < XXH_STRIPE_LEN / 
sizeof(__m256i); i++) {
 
 5794            __m256i 
const acc_vec = xacc[i];
 
 5795            __m256i 
const shifted = __lasx_xvsrli_d(acc_vec, 47);
 
 5796            __m256i 
const data_vec = __lasx_xvxor_v(acc_vec, shifted);
 
 5798            __m256i 
const key_vec = __lasx_xvld(xsecret + i, 0);
 
 5799            __m256i 
const data_key = __lasx_xvxor_v(data_vec, key_vec);
 
 5802            xacc[i] = __lasx_xvmul_d(data_key, prime32);
 
 5811#if defined(__aarch64__) && (defined(__GNUC__) || defined(__clang__)) 
 5826XXH_FORCE_INLINE xxh_u64
 
 5827XXH_mult32to64_add64(xxh_u64 lhs, xxh_u64 rhs, xxh_u64 acc)
 
 5831    __asm__(
"umaddl %x0, %w1, %w2, %x3" : 
"=r" (ret) : 
"r" (lhs), 
"r" (rhs), 
"r" (acc));
 
 5835XXH_FORCE_INLINE xxh_u64
 
 5836XXH_mult32to64_add64(xxh_u64 lhs, xxh_u64 rhs, xxh_u64 acc)
 
 5838    return XXH_mult32to64((xxh_u32)lhs, (xxh_u32)rhs) + acc;
 
 5849XXH_FORCE_INLINE 
void 
 5850XXH3_scalarRound(
void* XXH_RESTRICT acc,
 
 5851                 void const* XXH_RESTRICT input,
 
 5852                 void const* XXH_RESTRICT secret,
 
 5855    xxh_u64* xacc = (xxh_u64*) acc;
 
 5856    xxh_u8 
const* xinput  = (xxh_u8 
const*) input;
 
 5857    xxh_u8 
const* xsecret = (xxh_u8 
const*) secret;
 
 5858    XXH_ASSERT(lane < XXH_ACC_NB);
 
 5861        xxh_u64 
const data_val = XXH_readLE64(xinput + lane * 8);
 
 5862        xxh_u64 
const data_key = data_val ^ XXH_readLE64(xsecret + lane * 8);
 
 5863        xacc[lane ^ 1] += data_val; 
 
 5864        xacc[lane] = XXH_mult32to64_add64(data_key , data_key >> 32, xacc[lane]);
 
 5872XXH_FORCE_INLINE 
void 
 5873XXH3_accumulate_512_scalar(
void* XXH_RESTRICT acc,
 
 5874                     const void* XXH_RESTRICT input,
 
 5875                     const void* XXH_RESTRICT secret)
 
 5879#if defined(__GNUC__) && !defined(__clang__) \ 
 5880  && (defined(__arm__) || defined(__thumb2__)) \ 
 5881  && defined(__ARM_FEATURE_UNALIGNED)  \ 
 5882  && XXH_SIZE_OPT <= 0 
 5883#  pragma GCC unroll 8 
 5885    for (i=0; i < XXH_ACC_NB; i++) {
 
 5886        XXH3_scalarRound(acc, input, secret, i);
 
 5889XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(scalar)
 
 5898XXH_FORCE_INLINE 
void 
 5899XXH3_scalarScrambleRound(
void* XXH_RESTRICT acc,
 
 5900                         void const* XXH_RESTRICT secret,
 
 5903    xxh_u64* 
const xacc = (xxh_u64*) acc;   
 
 5904    const xxh_u8* 
const xsecret = (
const xxh_u8*) secret;   
 
 5906    XXH_ASSERT(lane < XXH_ACC_NB);
 
 5908        xxh_u64 
const key64 = XXH_readLE64(xsecret + lane * 8);
 
 5909        xxh_u64 acc64 = xacc[lane];
 
 5910        acc64 = XXH_xorshift64(acc64, 47);
 
 5921XXH_FORCE_INLINE 
void 
 5922XXH3_scrambleAcc_scalar(
void* XXH_RESTRICT acc, 
const void* XXH_RESTRICT secret)
 
 5925    for (i=0; i < XXH_ACC_NB; i++) {
 
 5926        XXH3_scalarScrambleRound(acc, secret, i);
 
 5930XXH_FORCE_INLINE 
void 
 5931XXH3_initCustomSecret_scalar(
void* XXH_RESTRICT customSecret, xxh_u64 seed64)
 
 5938    const xxh_u8* kSecretPtr = XXH3_kSecret;
 
 5939    XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 15) == 0);
 
 5941#if defined(__GNUC__) && defined(__aarch64__) 
 5974    XXH_COMPILER_GUARD(kSecretPtr);
 
 5976    {   
int const nbRounds = XXH_SECRET_DEFAULT_SIZE / 16;
 
 5978        for (i=0; i < nbRounds; i++) {
 
 5985            xxh_u64 lo = XXH_readLE64(kSecretPtr + 16*i)     + seed64;
 
 5986            xxh_u64 hi = XXH_readLE64(kSecretPtr + 16*i + 8) - seed64;
 
 5987            XXH_writeLE64((xxh_u8*)customSecret + 16*i,     lo);
 
 5988            XXH_writeLE64((xxh_u8*)customSecret + 16*i + 8, hi);
 
 5993typedef void (*XXH3_f_accumulate)(xxh_u64* XXH_RESTRICT, 
const xxh_u8* XXH_RESTRICT, 
const xxh_u8* XXH_RESTRICT, size_t);
 
 5994typedef void (*XXH3_f_scrambleAcc)(
void* XXH_RESTRICT, 
const void*);
 
 5995typedef void (*XXH3_f_initCustomSecret)(
void* XXH_RESTRICT, xxh_u64);
 
 5998#if (XXH_VECTOR == XXH_AVX512) 
 6000#define XXH3_accumulate_512 XXH3_accumulate_512_avx512 
 6001#define XXH3_accumulate     XXH3_accumulate_avx512 
 6002#define XXH3_scrambleAcc    XXH3_scrambleAcc_avx512 
 6003#define XXH3_initCustomSecret XXH3_initCustomSecret_avx512 
 6005#elif (XXH_VECTOR == XXH_AVX2) 
 6007#define XXH3_accumulate_512 XXH3_accumulate_512_avx2 
 6008#define XXH3_accumulate     XXH3_accumulate_avx2 
 6009#define XXH3_scrambleAcc    XXH3_scrambleAcc_avx2 
 6010#define XXH3_initCustomSecret XXH3_initCustomSecret_avx2 
 6012#elif (XXH_VECTOR == XXH_SSE2) 
 6014#define XXH3_accumulate_512 XXH3_accumulate_512_sse2 
 6015#define XXH3_accumulate     XXH3_accumulate_sse2 
 6016#define XXH3_scrambleAcc    XXH3_scrambleAcc_sse2 
 6017#define XXH3_initCustomSecret XXH3_initCustomSecret_sse2 
 6019#elif (XXH_VECTOR == XXH_NEON) 
 6021#define XXH3_accumulate_512 XXH3_accumulate_512_neon 
 6022#define XXH3_accumulate     XXH3_accumulate_neon 
 6023#define XXH3_scrambleAcc    XXH3_scrambleAcc_neon 
 6024#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar 
 6026#elif (XXH_VECTOR == XXH_VSX) 
 6028#define XXH3_accumulate_512 XXH3_accumulate_512_vsx 
 6029#define XXH3_accumulate     XXH3_accumulate_vsx 
 6030#define XXH3_scrambleAcc    XXH3_scrambleAcc_vsx 
 6031#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar 
 6033#elif (XXH_VECTOR == XXH_SVE) 
 6034#define XXH3_accumulate_512 XXH3_accumulate_512_sve 
 6035#define XXH3_accumulate     XXH3_accumulate_sve 
 6036#define XXH3_scrambleAcc    XXH3_scrambleAcc_scalar 
 6037#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar 
 6039#elif (XXH_VECTOR == XXH_LASX) 
 6040#define XXH3_accumulate_512 XXH3_accumulate_512_lasx 
 6041#define XXH3_accumulate     XXH3_accumulate_lasx 
 6042#define XXH3_scrambleAcc    XXH3_scrambleAcc_lasx 
 6043#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar 
 6045#elif (XXH_VECTOR == XXH_LSX) 
 6046#define XXH3_accumulate_512 XXH3_accumulate_512_lsx 
 6047#define XXH3_accumulate     XXH3_accumulate_lsx 
 6048#define XXH3_scrambleAcc    XXH3_scrambleAcc_lsx 
 6049#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar 
 6053#define XXH3_accumulate_512 XXH3_accumulate_512_scalar 
 6054#define XXH3_accumulate     XXH3_accumulate_scalar 
 6055#define XXH3_scrambleAcc    XXH3_scrambleAcc_scalar 
 6056#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar 
 6060#if XXH_SIZE_OPT >= 1  
 6061#  undef XXH3_initCustomSecret 
 6062#  define XXH3_initCustomSecret XXH3_initCustomSecret_scalar 
 6065XXH_FORCE_INLINE 
void 
 6066XXH3_hashLong_internal_loop(xxh_u64* XXH_RESTRICT acc,
 
 6067                      const xxh_u8* XXH_RESTRICT input, 
size_t len,
 
 6068                      const xxh_u8* XXH_RESTRICT secret, 
size_t secretSize,
 
 6069                            XXH3_f_accumulate f_acc,
 
 6070                            XXH3_f_scrambleAcc f_scramble)
 
 6072    size_t const nbStripesPerBlock = (secretSize - XXH_STRIPE_LEN) / XXH_SECRET_CONSUME_RATE;
 
 6073    size_t const block_len = XXH_STRIPE_LEN * nbStripesPerBlock;
 
 6074    size_t const nb_blocks = (len - 1) / block_len;
 
 6080    for (n = 0; n < nb_blocks; n++) {
 
 6081        f_acc(acc, input + n*block_len, secret, nbStripesPerBlock);
 
 6082        f_scramble(acc, secret + secretSize - XXH_STRIPE_LEN);
 
 6086    XXH_ASSERT(len > XXH_STRIPE_LEN);
 
 6087    {   
size_t const nbStripes = ((len - 1) - (block_len * nb_blocks)) / XXH_STRIPE_LEN;
 
 6088        XXH_ASSERT(nbStripes <= (secretSize / XXH_SECRET_CONSUME_RATE));
 
 6089        f_acc(acc, input + nb_blocks*block_len, secret, nbStripes);
 
 6092        {   
const xxh_u8* 
const p = input + len - XXH_STRIPE_LEN;
 
 6093#define XXH_SECRET_LASTACC_START 7   
 6094            XXH3_accumulate_512(acc, p, secret + secretSize - XXH_STRIPE_LEN - XXH_SECRET_LASTACC_START);
 
 6098XXH_FORCE_INLINE xxh_u64
 
 6099XXH3_mix2Accs(
const xxh_u64* XXH_RESTRICT acc, 
const xxh_u8* XXH_RESTRICT secret)
 
 6101    return XXH3_mul128_fold64(
 
 6102               acc[0] ^ XXH_readLE64(secret),
 
 6103               acc[1] ^ XXH_readLE64(secret+8) );
 
 6107XXH3_mergeAccs(
const xxh_u64* XXH_RESTRICT acc, 
const xxh_u8* XXH_RESTRICT secret, xxh_u64 start)
 
 6109    xxh_u64 result64 = start;
 
 6112    for (i = 0; i < 4; i++) {
 
 6113        result64 += XXH3_mix2Accs(acc+2*i, secret + 16*i);
 
 6114#if defined(__clang__)                                 \ 
 6115    && (defined(__arm__) || defined(__thumb__))        \ 
 6116    && (defined(__ARM_NEON) || defined(__ARM_NEON__))   \ 
 6117    && !defined(XXH_ENABLE_AUTOVECTORIZE)              
 6126        XXH_COMPILER_GUARD(result64);
 
 6130    return XXH3_avalanche(result64);
 
 6134#define XXH_SECRET_MERGEACCS_START 11 
 6137XXH3_finalizeLong_64b(
const xxh_u64* XXH_RESTRICT acc, 
const xxh_u8* XXH_RESTRICT secret, xxh_u64 len)
 
 6139    return XXH3_mergeAccs(acc, secret + XXH_SECRET_MERGEACCS_START, len * 
XXH_PRIME64_1);
 
 6142#define XXH3_INIT_ACC { XXH_PRIME32_3, XXH_PRIME64_1, XXH_PRIME64_2, XXH_PRIME64_3, \ 
 6143                        XXH_PRIME64_4, XXH_PRIME32_2, XXH_PRIME64_5, XXH_PRIME32_1 } 
 6146XXH3_hashLong_64b_internal(
const void* XXH_RESTRICT input, 
size_t len,
 
 6147                           const void* XXH_RESTRICT secret, 
size_t secretSize,
 
 6148                           XXH3_f_accumulate f_acc,
 
 6149                           XXH3_f_scrambleAcc f_scramble)
 
 6151    XXH_ALIGN(
XXH_ACC_ALIGN) xxh_u64 acc[XXH_ACC_NB] = XXH3_INIT_ACC;
 
 6153    XXH3_hashLong_internal_loop(acc, (
const xxh_u8*)input, len, (
const xxh_u8*)secret, secretSize, f_acc, f_scramble);
 
 6156    XXH_STATIC_ASSERT(
sizeof(acc) == 64);
 
 6157    XXH_ASSERT(secretSize >= 
sizeof(acc) + XXH_SECRET_MERGEACCS_START);
 
 6158    return XXH3_finalizeLong_64b(acc, (
const xxh_u8*)secret, (xxh_u64)len);
 
 6169XXH3_hashLong_64b_withSecret(
const void* XXH_RESTRICT input, 
size_t len,
 
 6170                             XXH64_hash_t seed64, 
const xxh_u8* XXH_RESTRICT secret, 
size_t secretLen)
 
 6173    return XXH3_hashLong_64b_internal(input, len, secret, secretLen, XXH3_accumulate, XXH3_scrambleAcc);
 
 6183XXH3_hashLong_64b_default(
const void* XXH_RESTRICT input, 
size_t len,
 
 6184                          XXH64_hash_t seed64, 
const xxh_u8* XXH_RESTRICT secret, 
size_t secretLen)
 
 6186    (void)seed64; (void)secret; (void)secretLen;
 
 6187    return XXH3_hashLong_64b_internal(input, len, XXH3_kSecret, 
sizeof(XXH3_kSecret), XXH3_accumulate, XXH3_scrambleAcc);
 
 6202XXH3_hashLong_64b_withSeed_internal(
const void* input, 
size_t len,
 
 6204                                    XXH3_f_accumulate f_acc,
 
 6205                                    XXH3_f_scrambleAcc f_scramble,
 
 6206                                    XXH3_f_initCustomSecret f_initSec)
 
 6208#if XXH_SIZE_OPT <= 0 
 6210        return XXH3_hashLong_64b_internal(input, len,
 
 6211                                          XXH3_kSecret, 
sizeof(XXH3_kSecret),
 
 6214    {   XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
 
 6215        f_initSec(secret, seed);
 
 6216        return XXH3_hashLong_64b_internal(input, len, secret, 
sizeof(secret),
 
 6225XXH3_hashLong_64b_withSeed(
const void* XXH_RESTRICT input, 
size_t len,
 
 6226                           XXH64_hash_t seed, 
const xxh_u8* XXH_RESTRICT secret, 
size_t secretLen)
 
 6228    (void)secret; (void)secretLen;
 
 6229    return XXH3_hashLong_64b_withSeed_internal(input, len, seed,
 
 6230                XXH3_accumulate, XXH3_scrambleAcc, XXH3_initCustomSecret);
 
 6234typedef XXH64_hash_t (*XXH3_hashLong64_f)(
const void* XXH_RESTRICT, size_t,
 
 6238XXH3_64bits_internal(
const void* XXH_RESTRICT input, 
size_t len,
 
 6239                     XXH64_hash_t seed64, 
const void* XXH_RESTRICT secret, 
size_t secretLen,
 
 6240                     XXH3_hashLong64_f f_hashLong)
 
 6251        return XXH3_len_0to16_64b((
const xxh_u8*)input, len, (
const xxh_u8*)secret, seed64);
 
 6253        return XXH3_len_17to128_64b((
const xxh_u8*)input, len, (
const xxh_u8*)secret, secretLen, seed64);
 
 6255        return XXH3_len_129to240_64b((
const xxh_u8*)input, len, (
const xxh_u8*)secret, secretLen, seed64);
 
 6256    return f_hashLong(input, len, seed64, (
const xxh_u8*)secret, secretLen);
 
 6265    return XXH3_64bits_internal(input, length, 0, XXH3_kSecret, 
sizeof(XXH3_kSecret), XXH3_hashLong_64b_default);
 
 
 6272    return XXH3_64bits_internal(input, length, 0, secret, secretSize, XXH3_hashLong_64b_withSecret);
 
 
 6279    return XXH3_64bits_internal(input, length, seed, XXH3_kSecret, 
sizeof(XXH3_kSecret), XXH3_hashLong_64b_withSeed);
 
 
 6286        return XXH3_64bits_internal(input, length, seed, XXH3_kSecret, 
sizeof(XXH3_kSecret), NULL);
 
 6287    return XXH3_hashLong_64b_withSecret(input, length, seed, (
const xxh_u8*)secret, secretSize);
 
 
 6292#ifndef XXH_NO_STREAM 
 6316static XXH_MALLOCF 
void* XXH_alignedMalloc(
size_t s, 
size_t align)
 
 6318    XXH_ASSERT(align <= 128 && align >= 8); 
 
 6319    XXH_ASSERT((align & (align-1)) == 0);   
 
 6320    XXH_ASSERT(s != 0 && s < (s + align));  
 
 6322        xxh_u8* base = (xxh_u8*)XXH_malloc(s + align);
 
 6330            size_t offset = align - ((size_t)base & (align - 1)); 
 
 6332            xxh_u8* ptr = base + offset;
 
 6334            XXH_ASSERT((
size_t)ptr % align == 0);
 
 6337            ptr[-1] = (xxh_u8)offset;
 
 6347static void XXH_alignedFree(
void* p)
 
 6350        xxh_u8* ptr = (xxh_u8*)p;
 
 6352        xxh_u8 offset = ptr[-1];
 
 6354        xxh_u8* base = ptr - offset;
 
 6372    if (state==NULL) 
return NULL;
 
 
 6391    XXH_alignedFree(statePtr);
 
 
 6399    XXH_memcpy(dst_state, src_state, 
sizeof(*dst_state));
 
 
 6405                    const void* secret, 
size_t secretSize)
 
 6407    size_t const initStart = offsetof(
XXH3_state_t, bufferedSize);
 
 6408    size_t const initLength = offsetof(
XXH3_state_t, nbStripesPerBlock) - initStart;
 
 6409    XXH_ASSERT(offsetof(
XXH3_state_t, nbStripesPerBlock) > initStart);
 
 6410    XXH_ASSERT(statePtr != NULL);
 
 6412    memset((
char*)statePtr + initStart, 0, initLength);
 
 6421    statePtr->
seed = seed;
 
 6422    statePtr->
useSeed = (seed != 0);
 
 6423    statePtr->
extSecret = (
const unsigned char*)secret;
 
 6425    statePtr->
secretLimit = secretSize - XXH_STRIPE_LEN;
 
 6434    XXH3_reset_internal(statePtr, 0, XXH3_kSecret, XXH_SECRET_DEFAULT_SIZE);
 
 
 6443    XXH3_reset_internal(statePtr, 0, secret, secretSize);
 
 
 6455    if ((seed != statePtr->seed) || (statePtr->extSecret != NULL))
 
 6456        XXH3_initCustomSecret(statePtr->customSecret, seed);
 
 6457    XXH3_reset_internal(statePtr, seed, NULL, XXH_SECRET_DEFAULT_SIZE);
 
 
 6468    XXH3_reset_internal(statePtr, seed64, secret, secretSize);
 
 6469    statePtr->useSeed = 1; 
 
 
 6490XXH_FORCE_INLINE 
const xxh_u8 *
 
 6491XXH3_consumeStripes(xxh_u64* XXH_RESTRICT acc,
 
 6492                    size_t* XXH_RESTRICT nbStripesSoFarPtr, 
size_t nbStripesPerBlock,
 
 6493                    const xxh_u8* XXH_RESTRICT input, 
size_t nbStripes,
 
 6494                    const xxh_u8* XXH_RESTRICT secret, 
size_t secretLimit,
 
 6495                    XXH3_f_accumulate f_acc,
 
 6496                    XXH3_f_scrambleAcc f_scramble)
 
 6498    const xxh_u8* initialSecret = secret + *nbStripesSoFarPtr * XXH_SECRET_CONSUME_RATE;
 
 6500    if (nbStripes >= (nbStripesPerBlock - *nbStripesSoFarPtr)) {
 
 6502        size_t nbStripesThisIter = nbStripesPerBlock - *nbStripesSoFarPtr;
 
 6506            f_acc(acc, input, initialSecret, nbStripesThisIter);
 
 6507            f_scramble(acc, secret + secretLimit);
 
 6508            input += nbStripesThisIter * XXH_STRIPE_LEN;
 
 6509            nbStripes -= nbStripesThisIter;
 
 6511            nbStripesThisIter = nbStripesPerBlock;
 
 6512            initialSecret = secret;
 
 6513        } 
while (nbStripes >= nbStripesPerBlock);
 
 6514        *nbStripesSoFarPtr = 0;
 
 6517    if (nbStripes > 0) {
 
 6518        f_acc(acc, input, initialSecret, nbStripes);
 
 6519        input += nbStripes * XXH_STRIPE_LEN;
 
 6520        *nbStripesSoFarPtr += nbStripes;
 
 6526#ifndef XXH3_STREAM_USE_STACK 
 6527# if XXH_SIZE_OPT <= 0 && !defined(__clang__)  
 6528#   define XXH3_STREAM_USE_STACK 1 
 6536            const xxh_u8* XXH_RESTRICT input, 
size_t len,
 
 6537            XXH3_f_accumulate f_acc,
 
 6538            XXH3_f_scrambleAcc f_scramble)
 
 6541        XXH_ASSERT(len == 0);
 
 6545    XXH_ASSERT(state != NULL);
 
 6546    {   
const xxh_u8* 
const bEnd = input + len;
 
 6547        const unsigned char* 
const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
 
 6548#if defined(XXH3_STREAM_USE_STACK) && XXH3_STREAM_USE_STACK >= 1 
 6554        XXH_memcpy(acc, state->acc, 
sizeof(acc));
 
 6556        xxh_u64* XXH_RESTRICT 
const acc = state->acc;
 
 6558        state->totalLen += len;
 
 6559        XXH_ASSERT(state->bufferedSize <= XXH3_INTERNALBUFFER_SIZE);
 
 6562        if (len <= XXH3_INTERNALBUFFER_SIZE - state->bufferedSize) {
 
 6563            XXH_memcpy(state->buffer + state->bufferedSize, input, len);
 
 6569        #define XXH3_INTERNALBUFFER_STRIPES (XXH3_INTERNALBUFFER_SIZE / XXH_STRIPE_LEN) 
 6570        XXH_STATIC_ASSERT(XXH3_INTERNALBUFFER_SIZE % XXH_STRIPE_LEN == 0);   
 
 6576        if (state->bufferedSize) {
 
 6577            size_t const loadSize = XXH3_INTERNALBUFFER_SIZE - state->bufferedSize;
 
 6578            XXH_memcpy(state->buffer + state->bufferedSize, input, loadSize);
 
 6580            XXH3_consumeStripes(acc,
 
 6581                               &state->nbStripesSoFar, state->nbStripesPerBlock,
 
 6582                                state->buffer, XXH3_INTERNALBUFFER_STRIPES,
 
 6583                                secret, state->secretLimit,
 
 6585            state->bufferedSize = 0;
 
 6587        XXH_ASSERT(input < bEnd);
 
 6588        if (bEnd - input > XXH3_INTERNALBUFFER_SIZE) {
 
 6589            size_t nbStripes = (size_t)(bEnd - 1 - input) / XXH_STRIPE_LEN;
 
 6590            input = XXH3_consumeStripes(acc,
 
 6591                                       &state->nbStripesSoFar, state->nbStripesPerBlock,
 
 6593                                       secret, state->secretLimit,
 
 6595            XXH_memcpy(state->buffer + 
sizeof(state->buffer) - XXH_STRIPE_LEN, input - XXH_STRIPE_LEN, XXH_STRIPE_LEN);
 
 6599        XXH_ASSERT(input < bEnd);
 
 6600        XXH_ASSERT(bEnd - input <= XXH3_INTERNALBUFFER_SIZE);
 
 6601        XXH_ASSERT(state->bufferedSize == 0);
 
 6602        XXH_memcpy(state->buffer, input, (
size_t)(bEnd-input));
 
 6604#if defined(XXH3_STREAM_USE_STACK) && XXH3_STREAM_USE_STACK >= 1 
 6606        XXH_memcpy(state->acc, acc, 
sizeof(acc));
 
 6617    return XXH3_update(state, (
const xxh_u8*)input, len,
 
 6618                       XXH3_accumulate, XXH3_scrambleAcc);
 
 
 6622XXH_FORCE_INLINE 
void 
 6625                  const unsigned char* secret)
 
 6627    xxh_u8 lastStripe[XXH_STRIPE_LEN];
 
 6628    const xxh_u8* lastStripePtr;
 
 6634    XXH_memcpy(acc, state->
acc, 
sizeof(state->
acc));
 
 6637        size_t const nbStripes = (state->
bufferedSize - 1) / XXH_STRIPE_LEN;
 
 6639        XXH3_consumeStripes(acc,
 
 6641                            state->
buffer, nbStripes,
 
 6643                            XXH3_accumulate, XXH3_scrambleAcc);
 
 6647        size_t const catchupSize = XXH_STRIPE_LEN - state->
bufferedSize;
 
 6649        XXH_memcpy(lastStripe, state->
buffer + 
sizeof(state->
buffer) - catchupSize, catchupSize);
 
 6651        lastStripePtr = lastStripe;
 
 6654    XXH3_accumulate_512(acc,
 
 6656                        secret + state->
secretLimit - XXH_SECRET_LASTACC_START);
 
 6662    const unsigned char* 
const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
 
 6665        XXH3_digest_long(acc, state, secret);
 
 6666        return XXH3_finalizeLong_64b(acc, secret, (xxh_u64)state->totalLen);
 
 6672                                  secret, state->secretLimit + XXH_STRIPE_LEN);
 
 
 6695XXH3_len_1to3_128b(
const xxh_u8* input, 
size_t len, 
const xxh_u8* secret, 
XXH64_hash_t seed)
 
 6698    XXH_ASSERT(input != NULL);
 
 6699    XXH_ASSERT(1 <= len && len <= 3);
 
 6700    XXH_ASSERT(secret != NULL);
 
 6706    {   xxh_u8 
const c1 = input[0];
 
 6707        xxh_u8 
const c2 = input[len >> 1];
 
 6708        xxh_u8 
const c3 = input[len - 1];
 
 6709        xxh_u32 
const combinedl = ((xxh_u32)c1 <<16) | ((xxh_u32)c2 << 24)
 
 6710                                | ((xxh_u32)c3 << 0) | ((xxh_u32)len << 8);
 
 6711        xxh_u32 
const combinedh = XXH_rotl32(XXH_swap32(combinedl), 13);
 
 6712        xxh_u64 
const bitflipl = (XXH_readLE32(secret) ^ XXH_readLE32(secret+4)) + seed;
 
 6713        xxh_u64 
const bitfliph = (XXH_readLE32(secret+8) ^ XXH_readLE32(secret+12)) - seed;
 
 6714        xxh_u64 
const keyed_lo = (xxh_u64)combinedl ^ bitflipl;
 
 6715        xxh_u64 
const keyed_hi = (xxh_u64)combinedh ^ bitfliph;
 
 6717        h128.
low64  = XXH64_avalanche(keyed_lo);
 
 6718        h128.
high64 = XXH64_avalanche(keyed_hi);
 
 6724XXH3_len_4to8_128b(
const xxh_u8* input, 
size_t len, 
const xxh_u8* secret, 
XXH64_hash_t seed)
 
 6726    XXH_ASSERT(input != NULL);
 
 6727    XXH_ASSERT(secret != NULL);
 
 6728    XXH_ASSERT(4 <= len && len <= 8);
 
 6729    seed ^= (xxh_u64)XXH_swap32((xxh_u32)seed) << 32;
 
 6730    {   xxh_u32 
const input_lo = XXH_readLE32(input);
 
 6731        xxh_u32 
const input_hi = XXH_readLE32(input + len - 4);
 
 6732        xxh_u64 
const input_64 = input_lo + ((xxh_u64)input_hi << 32);
 
 6733        xxh_u64 
const bitflip = (XXH_readLE64(secret+16) ^ XXH_readLE64(secret+24)) + seed;
 
 6734        xxh_u64 
const keyed = input_64 ^ bitflip;
 
 6743        m128.
low64  *= PRIME_MX2;
 
 6751XXH3_len_9to16_128b(
const xxh_u8* input, 
size_t len, 
const xxh_u8* secret, 
XXH64_hash_t seed)
 
 6753    XXH_ASSERT(input != NULL);
 
 6754    XXH_ASSERT(secret != NULL);
 
 6755    XXH_ASSERT(9 <= len && len <= 16);
 
 6756    {   xxh_u64 
const bitflipl = (XXH_readLE64(secret+32) ^ XXH_readLE64(secret+40)) - seed;
 
 6757        xxh_u64 
const bitfliph = (XXH_readLE64(secret+48) ^ XXH_readLE64(secret+56)) + seed;
 
 6758        xxh_u64 
const input_lo = XXH_readLE64(input);
 
 6759        xxh_u64       input_hi = XXH_readLE64(input + len - 8);
 
 6765        m128.
low64 += (xxh_u64)(len - 1) << 54;
 
 6766        input_hi   ^= bitfliph;
 
 6774        if (
sizeof(
void *) < 
sizeof(xxh_u64)) { 
 
 6781            m128.
high64 += (input_hi & 0xFFFFFFFF00000000ULL) + XXH_mult32to64((xxh_u32)input_hi, 
XXH_PRIME32_2);
 
 6826XXH3_len_0to16_128b(
const xxh_u8* input, 
size_t len, 
const xxh_u8* secret, 
XXH64_hash_t seed)
 
 6828    XXH_ASSERT(len <= 16);
 
 6829    {   
if (len > 8) 
return XXH3_len_9to16_128b(input, len, secret, seed);
 
 6830        if (len >= 4) 
return XXH3_len_4to8_128b(input, len, secret, seed);
 
 6831        if (len) 
return XXH3_len_1to3_128b(input, len, secret, seed);
 
 6833            xxh_u64 
const bitflipl = XXH_readLE64(secret+64) ^ XXH_readLE64(secret+72);
 
 6834            xxh_u64 
const bitfliph = XXH_readLE64(secret+80) ^ XXH_readLE64(secret+88);
 
 6835            h128.
low64 = XXH64_avalanche(seed ^ bitflipl);
 
 6836            h128.
high64 = XXH64_avalanche( seed ^ bitfliph);
 
 6845XXH128_mix32B(
XXH128_hash_t acc, 
const xxh_u8* input_1, 
const xxh_u8* input_2,
 
 6848    acc.
low64  += XXH3_mix16B (input_1, secret+0, seed);
 
 6849    acc.
low64  ^= XXH_readLE64(input_2) + XXH_readLE64(input_2 + 8);
 
 6850    acc.
high64 += XXH3_mix16B (input_2, secret+16, seed);
 
 6851    acc.
high64 ^= XXH_readLE64(input_1) + XXH_readLE64(input_1 + 8);
 
 6857XXH3_len_17to128_128b(
const xxh_u8* XXH_RESTRICT input, 
size_t len,
 
 6858                      const xxh_u8* XXH_RESTRICT secret, 
size_t secretSize,
 
 6862    XXH_ASSERT(16 < len && len <= 128);
 
 6868#if XXH_SIZE_OPT >= 1 
 6871            unsigned int i = (
unsigned int)(len - 1) / 32;
 
 6873                acc = XXH128_mix32B(acc, input+16*i, input+len-16*(i+1), secret+32*i, seed);
 
 6880                    acc = XXH128_mix32B(acc, input+48, input+len-64, secret+96, seed);
 
 6882                acc = XXH128_mix32B(acc, input+32, input+len-48, secret+64, seed);
 
 6884            acc = XXH128_mix32B(acc, input+16, input+len-32, secret+32, seed);
 
 6886        acc = XXH128_mix32B(acc, input, input+len-16, secret, seed);
 
 6901XXH3_len_129to240_128b(
const xxh_u8* XXH_RESTRICT input, 
size_t len,
 
 6902                       const xxh_u8* XXH_RESTRICT secret, 
size_t secretSize,
 
 6918        for (i = 32; i < 160; i += 32) {
 
 6919            acc = XXH128_mix32B(acc,
 
 6932        for (i=160; i <= len; i += 32) {
 
 6933            acc = XXH128_mix32B(acc,
 
 6936                                secret + XXH3_MIDSIZE_STARTOFFSET + i - 160,
 
 6940        acc = XXH128_mix32B(acc,
 
 6959XXH3_finalizeLong_128b(
const xxh_u64* XXH_RESTRICT acc, 
const xxh_u8* XXH_RESTRICT secret, 
size_t secretSize, xxh_u64 len)
 
 6962    h128.
low64 = XXH3_finalizeLong_64b(acc, secret, len);
 
 6963    h128.
high64 = XXH3_mergeAccs(acc, secret + secretSize
 
 6964                                             - XXH_STRIPE_LEN - XXH_SECRET_MERGEACCS_START,
 
 6970XXH3_hashLong_128b_internal(
const void* XXH_RESTRICT input, 
size_t len,
 
 6971                            const xxh_u8* XXH_RESTRICT secret, 
size_t secretSize,
 
 6972                            XXH3_f_accumulate f_acc,
 
 6973                            XXH3_f_scrambleAcc f_scramble)
 
 6975    XXH_ALIGN(
XXH_ACC_ALIGN) xxh_u64 acc[XXH_ACC_NB] = XXH3_INIT_ACC;
 
 6977    XXH3_hashLong_internal_loop(acc, (
const xxh_u8*)input, len, secret, secretSize, f_acc, f_scramble);
 
 6980    XXH_STATIC_ASSERT(
sizeof(acc) == 64);
 
 6981    XXH_ASSERT(secretSize >= 
sizeof(acc) + XXH_SECRET_MERGEACCS_START);
 
 6982    return XXH3_finalizeLong_128b(acc, secret, secretSize, (xxh_u64)len);
 
 6989XXH3_hashLong_128b_default(
const void* XXH_RESTRICT input, 
size_t len,
 
 6991                           const void* XXH_RESTRICT secret, 
size_t secretLen)
 
 6993    (void)seed64; (void)secret; (void)secretLen;
 
 6994    return XXH3_hashLong_128b_internal(input, len, XXH3_kSecret, 
sizeof(XXH3_kSecret),
 
 6995                                       XXH3_accumulate, XXH3_scrambleAcc);
 
 7006XXH3_hashLong_128b_withSecret(
const void* XXH_RESTRICT input, 
size_t len,
 
 7008                              const void* XXH_RESTRICT secret, 
size_t secretLen)
 
 7011    return XXH3_hashLong_128b_internal(input, len, (
const xxh_u8*)secret, secretLen,
 
 7012                                       XXH3_accumulate, XXH3_scrambleAcc);
 
 7016XXH3_hashLong_128b_withSeed_internal(
const void* XXH_RESTRICT input, 
size_t len,
 
 7018                                XXH3_f_accumulate f_acc,
 
 7019                                XXH3_f_scrambleAcc f_scramble,
 
 7020                                XXH3_f_initCustomSecret f_initSec)
 
 7023        return XXH3_hashLong_128b_internal(input, len,
 
 7024                                           XXH3_kSecret, 
sizeof(XXH3_kSecret),
 
 7026    {   XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
 
 7027        f_initSec(secret, seed64);
 
 7028        return XXH3_hashLong_128b_internal(input, len, (
const xxh_u8*)secret, 
sizeof(secret),
 
 7037XXH3_hashLong_128b_withSeed(
const void* input, 
size_t len,
 
 7038                            XXH64_hash_t seed64, 
const void* XXH_RESTRICT secret, 
size_t secretLen)
 
 7040    (void)secret; (void)secretLen;
 
 7041    return XXH3_hashLong_128b_withSeed_internal(input, len, seed64,
 
 7042                XXH3_accumulate, XXH3_scrambleAcc, XXH3_initCustomSecret);
 
 7045typedef XXH128_hash_t (*XXH3_hashLong128_f)(
const void* XXH_RESTRICT, size_t,
 
 7049XXH3_128bits_internal(
const void* input, 
size_t len,
 
 7050                      XXH64_hash_t seed64, 
const void* XXH_RESTRICT secret, 
size_t secretLen,
 
 7051                      XXH3_hashLong128_f f_hl128)
 
 7061        return XXH3_len_0to16_128b((
const xxh_u8*)input, len, (
const xxh_u8*)secret, seed64);
 
 7063        return XXH3_len_17to128_128b((
const xxh_u8*)input, len, (
const xxh_u8*)secret, secretLen, seed64);
 
 7065        return XXH3_len_129to240_128b((
const xxh_u8*)input, len, (
const xxh_u8*)secret, secretLen, seed64);
 
 7066    return f_hl128(input, len, seed64, secret, secretLen);
 
 7075    return XXH3_128bits_internal(input, len, 0,
 
 7076                                 XXH3_kSecret, 
sizeof(XXH3_kSecret),
 
 7077                                 XXH3_hashLong_128b_default);
 
 
 7084    return XXH3_128bits_internal(input, len, 0,
 
 7085                                 (
const xxh_u8*)secret, secretSize,
 
 7086                                 XXH3_hashLong_128b_withSecret);
 
 
 7093    return XXH3_128bits_internal(input, len, seed,
 
 7094                                 XXH3_kSecret, 
sizeof(XXH3_kSecret),
 
 7095                                 XXH3_hashLong_128b_withSeed);
 
 
 7103        return XXH3_128bits_internal(input, len, seed, XXH3_kSecret, 
sizeof(XXH3_kSecret), NULL);
 
 7104    return XXH3_hashLong_128b_withSecret(input, len, seed, secret, secretSize);
 
 
 7116#ifndef XXH_NO_STREAM 
 7160    const unsigned char* 
const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
 
 7163        XXH3_digest_long(acc, state, secret);
 
 7164        XXH_ASSERT(state->secretLimit + XXH_STRIPE_LEN >= 
sizeof(acc) + XXH_SECRET_MERGEACCS_START);
 
 7165        return XXH3_finalizeLong_128b(acc, secret, state->secretLimit + XXH_STRIPE_LEN,  (xxh_u64)state->totalLen);
 
 7171                                   secret, state->secretLimit + XXH_STRIPE_LEN);
 
 
 7183    return !(memcmp(&h1, &h2, 
sizeof(h1)));
 
 
 7197    if (hcmp) 
return hcmp;
 
 
 7213    XXH_memcpy((
char*)dst + 
sizeof(hash.
high64), &hash.
low64, 
sizeof(hash.
low64));
 
 
 7221    h.
high64 = XXH_readBE64(src);
 
 7222    h.
low64  = XXH_readBE64(src->digest + 8);
 
 
 7232#define XXH_MIN(x, y) (((x) > (y)) ? (y) : (x)) 
 7234XXH_FORCE_INLINE 
void XXH3_combine16(
void* dst, 
XXH128_hash_t h128)
 
 7236    XXH_writeLE64( dst, XXH_readLE64(dst) ^ h128.
low64 );
 
 7237    XXH_writeLE64( (
char*)dst+8, XXH_readLE64((
char*)dst+8) ^ h128.
high64 );
 
 7242XXH3_generateSecret(XXH_NOESCAPE 
void* secretBuffer, 
size_t secretSize, XXH_NOESCAPE 
const void* customSeed, 
size_t customSeedSize)
 
 7244#if (XXH_DEBUGLEVEL >= 1) 
 7245    XXH_ASSERT(secretBuffer != NULL);
 
 7249    if (secretBuffer == NULL) 
return XXH_ERROR;
 
 7253    if (customSeedSize == 0) {
 
 7254        customSeed = XXH3_kSecret;
 
 7255        customSeedSize = XXH_SECRET_DEFAULT_SIZE;
 
 7257#if (XXH_DEBUGLEVEL >= 1) 
 7258    XXH_ASSERT(customSeed != NULL);
 
 7260    if (customSeed == NULL) 
return XXH_ERROR;
 
 7265        while (pos < secretSize) {
 
 7266            size_t const toCopy = XXH_MIN((secretSize - pos), customSeedSize);
 
 7267            memcpy((
char*)secretBuffer + pos, customSeed, toCopy);
 
 7271    {   
size_t const nbSeg16 = secretSize / 16;
 
 7275        for (n=0; n<nbSeg16; n++) {
 
 7277            XXH3_combine16((
char*)secretBuffer + n*16, h128);
 
 
 7289    XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
 
 7290    XXH3_initCustomSecret(secret, seed);
 
 7291    XXH_ASSERT(secretBuffer != NULL);
 
 7292    memcpy(secretBuffer, secret, XXH_SECRET_DEFAULT_SIZE);
 
 
 7298#if XXH_VECTOR == XXH_AVX2  \ 
 7299  && defined(__GNUC__) && !defined(__clang__)  \ 
 7300  && defined(__OPTIMIZE__) && XXH_SIZE_OPT <= 0  
 7301#  pragma GCC pop_options 
 7314#if defined (__cplusplus) 
struct XXH32_state_s XXH32_state_t
The opaque state struct for the XXH32 streaming API.
Definition xxhash.h:653
 
XXH_errorcode XXH32_reset(XXH32_state_t *statePtr, XXH32_hash_t seed)
Resets an XXH32_state_t to begin a new hash.
Definition xxhash.h:3224
 
XXH32_hash_t XXH32(const void *input, size_t length, XXH32_hash_t seed)
Calculates the 32-bit hash of input using xxHash32.
Definition xxhash.h:3183
 
XXH_errorcode XXH32_update(XXH32_state_t *statePtr, const void *input, size_t length)
Consumes a block of input to an XXH32_state_t.
Definition xxhash.h:3235
 
XXH32_state_t * XXH32_createState(void)
Allocates an XXH32_state_t.
Definition xxhash.h:3206
 
XXH_errorcode XXH32_freeState(XXH32_state_t *statePtr)
Frees an XXH32_state_t.
Definition xxhash.h:3211
 
void XXH32_canonicalFromHash(XXH32_canonical_t *dst, XXH32_hash_t hash)
Converts an XXH32_hash_t to a big endian XXH32_canonical_t.
Definition xxhash.h:3300
 
XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t *src)
Converts an XXH32_canonical_t to a native XXH32_hash_t.
Definition xxhash.h:3307
 
XXH32_hash_t XXH32_digest(const XXH32_state_t *statePtr)
Returns the calculated hash value from an XXH32_state_t.
Definition xxhash.h:3281
 
void XXH32_copyState(XXH32_state_t *dst_state, const XXH32_state_t *src_state)
Copies one XXH32_state_t to another.
Definition xxhash.h:3218
 
#define XXH_PRIME32_2
Definition xxhash.h:2907
 
#define XXH_PRIME32_1
Definition xxhash.h:2906
 
#define XXH_PRIME32_5
Definition xxhash.h:2910
 
#define XXH_PRIME32_3
Definition xxhash.h:2908
 
XXH64_hash_t XXH3_64bits_digest(const XXH3_state_t *statePtr)
Returns the calculated XXH3 64-bit hash value from an XXH3_state_t.
Definition xxhash.h:6660
 
XXH_errorcode XXH3_128bits_update(XXH3_state_t *statePtr, const void *input, size_t length)
Consumes a block of input to an XXH3_state_t.
Definition xxhash.h:7152
 
void XXH3_generateSecret_fromSeed(void *secretBuffer, XXH64_hash_t seed)
Generate the same secret as the _withSeed() variants.
Definition xxhash.h:7287
 
void XXH128_canonicalFromHash(XXH128_canonical_t *dst, XXH128_hash_t hash)
Converts an XXH128_hash_t to a big endian XXH128_canonical_t.
Definition xxhash.h:7205
 
XXH64_hash_t XXH3_64bits_withSeed(const void *input, size_t length, XXH64_hash_t seed)
Calculates 64-bit seeded variant of XXH3 hash of input.
Definition xxhash.h:6277
 
int XXH128_cmp(const void *h128_1, const void *h128_2)
Compares two XXH128_hash_t.
Definition xxhash.h:7191
 
XXH128_hash_t XXH3_128bits_withSeed(const void *data, size_t len, XXH64_hash_t seed)
Calculates 128-bit seeded variant of XXH3 hash of data.
Definition xxhash.h:7091
 
XXH128_hash_t XXH128(const void *data, size_t len, XXH64_hash_t seed)
Calculates the 128-bit hash of data using XXH3.
Definition xxhash.h:7109
 
XXH_errorcode XXH3_generateSecret(void *secretBuffer, size_t secretSize, const void *customSeed, size_t customSeedSize)
Derive a high-entropy secret from any user-defined content, named customSeed.
Definition xxhash.h:7242
 
XXH_errorcode XXH3_64bits_reset_withSecretandSeed(XXH3_state_t *statePtr, const void *secret, size_t secretSize, XXH64_hash_t seed64)
Resets an XXH3_state_t with secret data to begin a new hash.
Definition xxhash.h:6463
 
XXH_errorcode XXH3_64bits_reset_withSeed(XXH3_state_t *statePtr, XXH64_hash_t seed)
Resets an XXH3_state_t with 64-bit seed to begin a new hash.
Definition xxhash.h:6451
 
struct XXH3_state_s XXH3_state_t
The opaque state struct for the XXH3 streaming API.
Definition xxhash.h:1244
 
XXH128_hash_t XXH3_128bits_digest(const XXH3_state_t *statePtr)
Returns the calculated XXH3 128-bit hash value from an XXH3_state_t.
Definition xxhash.h:7158
 
XXH_errorcode XXH3_64bits_reset_withSecret(XXH3_state_t *statePtr, const void *secret, size_t secretSize)
Resets an XXH3_state_t with secret data to begin a new hash.
Definition xxhash.h:6440
 
XXH3_state_t * XXH3_createState(void)
Allocate an XXH3_state_t.
Definition xxhash.h:6369
 
XXH_errorcode XXH3_128bits_reset_withSeed(XXH3_state_t *statePtr, XXH64_hash_t seed)
Resets an XXH3_state_t with 64-bit seed to begin a new hash.
Definition xxhash.h:7138
 
XXH128_hash_t XXH3_128bits(const void *data, size_t len)
Calculates 128-bit unseeded variant of XXH3 of data.
Definition xxhash.h:7073
 
XXH128_hash_t XXH3_128bits_withSecret(const void *data, size_t len, const void *secret, size_t secretSize)
Calculates 128-bit variant of XXH3 with a custom "secret".
Definition xxhash.h:7082
 
XXH_errorcode XXH3_128bits_reset_withSecretandSeed(XXH3_state_t *statePtr, const void *secret, size_t secretSize, XXH64_hash_t seed64)
Resets an XXH3_state_t with secret data to begin a new hash.
Definition xxhash.h:7145
 
XXH_errorcode XXH3_128bits_reset(XXH3_state_t *statePtr)
Resets an XXH3_state_t to begin a new hash.
Definition xxhash.h:7124
 
XXH128_hash_t XXH128_hashFromCanonical(const XXH128_canonical_t *src)
Converts an XXH128_canonical_t to a native XXH128_hash_t.
Definition xxhash.h:7218
 
void XXH3_copyState(XXH3_state_t *dst_state, const XXH3_state_t *src_state)
Copies one XXH3_state_t to another.
Definition xxhash.h:6397
 
XXH64_hash_t XXH3_64bits_withSecret(const void *data, size_t len, const void *secret, size_t secretSize)
Calculates 64-bit variant of XXH3 with a custom "secret".
Definition xxhash.h:6270
 
XXH64_hash_t XXH3_64bits(const void *input, size_t length)
Calculates 64-bit unseeded variant of XXH3 hash of input.
Definition xxhash.h:6263
 
XXH_errorcode XXH3_64bits_update(XXH3_state_t *statePtr, const void *input, size_t length)
Consumes a block of input to an XXH3_state_t.
Definition xxhash.h:6615
 
XXH128_hash_t XXH3_128bits_withSecretandSeed(const void *input, size_t length, const void *secret, size_t secretSize, XXH64_hash_t seed64)
Calculates 128-bit seeded variant of XXH3 hash of data.
Definition xxhash.h:7100
 
XXH_errorcode XXH3_64bits_reset(XXH3_state_t *statePtr)
Resets an XXH3_state_t to begin a new hash.
Definition xxhash.h:6431
 
int XXH128_isEqual(XXH128_hash_t h1, XXH128_hash_t h2)
Check equality of two XXH128_hash_t values.
Definition xxhash.h:7180
 
#define XXH3_SECRET_SIZE_MIN
Definition xxhash.h:1192
 
XXH_errorcode XXH3_freeState(XXH3_state_t *statePtr)
Frees an XXH3_state_t.
Definition xxhash.h:6389
 
XXH_errorcode XXH3_128bits_reset_withSecret(XXH3_state_t *statePtr, const void *secret, size_t secretSize)
Resets an XXH3_state_t with secret data to begin a new hash.
Definition xxhash.h:7131
 
XXH64_hash_t XXH3_64bits_withSecretandSeed(const void *data, size_t len, const void *secret, size_t secretSize, XXH64_hash_t seed)
Calculates 64/128-bit seeded variant of XXH3 hash of data.
Definition xxhash.h:6283
 
struct XXH64_state_s XXH64_state_t
The opaque state struct for the XXH64 streaming API.
Definition xxhash.h:927
 
void XXH64_canonicalFromHash(XXH64_canonical_t *dst, XXH64_hash_t hash)
Converts an XXH64_hash_t to a big endian XXH64_canonical_t.
Definition xxhash.h:3795
 
void XXH64_copyState(XXH64_state_t *dst_state, const XXH64_state_t *src_state)
Copies one XXH64_state_t to another.
Definition xxhash.h:3715
 
XXH64_hash_t XXH64_hashFromCanonical(const XXH64_canonical_t *src)
Converts an XXH64_canonical_t to a native XXH64_hash_t.
Definition xxhash.h:3803
 
XXH64_hash_t XXH64(const void *input, size_t length, XXH64_hash_t seed)
Calculates the 64-bit hash of input using xxHash64.
Definition xxhash.h:3681
 
XXH_errorcode XXH64_freeState(XXH64_state_t *statePtr)
Frees an XXH64_state_t.
Definition xxhash.h:3708
 
XXH64_state_t * XXH64_createState(void)
Allocates an XXH64_state_t.
Definition xxhash.h:3703
 
XXH_errorcode XXH64_update(XXH64_state_t *statePtr, const void *input, size_t length)
Consumes a block of input to an XXH64_state_t.
Definition xxhash.h:3731
 
XXH64_hash_t XXH64_digest(const XXH64_state_t *statePtr)
Returns the calculated hash value from an XXH64_state_t.
Definition xxhash.h:3776
 
XXH_errorcode XXH64_reset(XXH64_state_t *statePtr, XXH64_hash_t seed)
Resets an XXH64_state_t to begin a new hash.
Definition xxhash.h:3721
 
#define XXH_PRIME64_1
Definition xxhash.h:3457
 
#define XXH_PRIME64_2
Definition xxhash.h:3458
 
#define XXH_PRIME64_4
Definition xxhash.h:3460
 
#define XXH_PRIME64_3
Definition xxhash.h:3459
 
#define XXH_PRIME64_5
Definition xxhash.h:3461
 
#define XXH_TARGET_SSE2
Allows a function to be compiled with SSE2 intrinsics.
Definition xxhash.h:5183
 
#define XXH_TARGET_AVX512
Like XXH_TARGET_SSE2, but for AVX512.
Definition xxhash.h:4974
 
#define XXH_TARGET_AVX2
Like XXH_TARGET_SSE2, but for AVX2.
Definition xxhash.h:5077
 
XXH_alignment
Definition xxhash.h:2834
 
@ XXH_aligned
Definition xxhash.h:2835
 
@ XXH_unaligned
Definition xxhash.h:2836
 
uint32_t XXH32_hash_t
An unsigned 32-bit integer.
Definition xxhash.h:587
 
XXH_errorcode
Exit code for the streaming API.
Definition xxhash.h:572
 
uint64_t XXH64_hash_t
An unsigned 64-bit integer.
Definition xxhash.h:866
 
#define XXH_PUBLIC_API
Marks a global symbol.
Definition xxhash.h:455
 
unsigned XXH_versionNumber(void)
Obtains the xxHash version.
Definition xxhash.h:2891
 
#define XXH_VERSION_NUMBER
Version number, encoded as two digits each.
Definition xxhash.h:552
 
@ XXH_ERROR
Definition xxhash.h:574
 
@ XXH_OK
Definition xxhash.h:573
 
#define XXH_ACC_ALIGN
Selects the minimum alignment for XXH3's accumulators.
Definition xxhash.h:3971
 
#define XXH_CPU_LITTLE_ENDIAN
Whether the target is little endian.
Definition xxhash.h:2711
 
#define XXH3_NEON_LANES
Controls the NEON to scalar ratio for XXH3.
Definition xxhash.h:4208
 
#define XXH32_ENDJMP
Whether to use a jump for XXH32_finalize.
Definition xxhash.h:2272
 
#define XXH_FORCE_ALIGN_CHECK
If defined to non-zero, adds a special path for aligned inputs (XXH32() and XXH64() only).
Definition xxhash.h:2221
 
The return value from 128-bit hashes.
Definition xxhash.h:1383
 
XXH64_hash_t low64
Definition xxhash.h:1384
 
XXH64_hash_t high64
Definition xxhash.h:1385
 
Canonical (big endian) representation of XXH32_hash_t.
Definition xxhash.h:754
 
unsigned char digest[4]
Definition xxhash.h:755
 
XXH32_hash_t bufferedSize
Definition xxhash.h:1678
 
XXH32_hash_t total_len_32
Definition xxhash.h:1674
 
XXH32_hash_t large_len
Definition xxhash.h:1675
 
XXH32_hash_t reserved
Definition xxhash.h:1679
 
XXH32_hash_t acc[4]
Definition xxhash.h:1676
 
unsigned char buffer[16]
Definition xxhash.h:1677
 
const unsigned char * extSecret
Definition xxhash.h:1796
 
XXH32_hash_t bufferedSize
Definition xxhash.h:1780
 
XXH64_hash_t reserved64
Definition xxhash.h:1794
 
XXH64_hash_t totalLen
Definition xxhash.h:1786
 
size_t nbStripesSoFar
Definition xxhash.h:1784
 
XXH32_hash_t useSeed
Definition xxhash.h:1782
 
size_t secretLimit
Definition xxhash.h:1790
 
size_t nbStripesPerBlock
Definition xxhash.h:1788
 
XXH64_hash_t seed
Definition xxhash.h:1792
 
unsigned char buffer[XXH3_INTERNALBUFFER_SIZE]
Definition xxhash.h:1778
 
unsigned char customSecret[XXH3_SECRET_DEFAULT_SIZE]
Definition xxhash.h:1776
 
XXH64_hash_t acc[8]
Definition xxhash.h:1774
 
Canonical (big endian) representation of XXH64_hash_t.
Definition xxhash.h:1028
 
unsigned char buffer[32]
Definition xxhash.h:1700
 
XXH32_hash_t reserved32
Definition xxhash.h:1702
 
XXH64_hash_t acc[4]
Definition xxhash.h:1699
 
XXH64_hash_t reserved64
Definition xxhash.h:1703
 
XXH32_hash_t bufferedSize
Definition xxhash.h:1701
 
XXH64_hash_t total_len
Definition xxhash.h:1698
 
#define XXH3_MIDSIZE_MAX
Maximum size of "short" key in bytes.
Definition xxhash.h:1948
 
#define XXH3_SECRET_DEFAULT_SIZE
Default Secret's size.
Definition xxhash.h:1749
 
#define XXH3_INITSTATE(XXH3_state_ptr)
Initializes a stack-allocated XXH3_state_s.
Definition xxhash.h:1815