Crypto++  8.7
Free C++ class library of cryptographic schemes
cpu.cpp
1 // cpu.cpp - originally written and placed in the public domain by Wei Dai
2 // modified by Jeffrey Walton and the community over the years.
3 
4 #include "pch.h"
5 #include "config.h"
6 
7 #ifndef EXCEPTION_EXECUTE_HANDLER
8 # define EXCEPTION_EXECUTE_HANDLER 1
9 #endif
10 
11 #ifndef CRYPTOPP_IMPORTS
12 
13 #include "cpu.h"
14 #include "misc.h"
15 #include "stdcpp.h"
16 
17 // For _xgetbv on Microsoft 32-bit and 64-bit Intel platforms
18 // https://github.com/weidai11/cryptopp/issues/972
19 #if _MSC_VER >= 1600 && (defined(_M_IX86) || defined(_M_X64))
20 # include <immintrin.h>
21 #endif
22 
23 // For IsProcessorFeaturePresent on Microsoft Arm64 platforms,
24 // https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-isprocessorfeaturepresent
25 #if defined(_WIN32) && defined(_M_ARM64)
26 # include <Windows.h>
27 # include <processthreadsapi.h>
28 #endif
29 
30 #ifdef _AIX
31 # include <sys/systemcfg.h>
32 #endif
33 
34 #ifdef __linux__
35 # include <unistd.h>
36 #endif
37 
38 // Capability queries, requires Glibc 2.16, http://lwn.net/Articles/519085/
39 // CRYPTOPP_GLIBC_VERSION not used because config.h is missing <feature.h>
40 #if (((__GLIBC__ * 100) + __GLIBC_MINOR__) >= 216)
41 # define CRYPTOPP_GETAUXV_AVAILABLE 1
42 #endif
43 
44 #if CRYPTOPP_GETAUXV_AVAILABLE
45 # include <sys/auxv.h>
46 #else
47 #ifndef AT_HWCAP
48 # define AT_HWCAP 16
49 #endif
50 #ifndef AT_HWCAP2
51 # define AT_HWCAP2 26
52 #endif
53 unsigned long int getauxval(unsigned long int) { return 0; }
54 #endif
55 
56 #if defined(__APPLE__)
57 # include <sys/utsname.h>
58 # include <sys/sysctl.h>
59 #endif
60 
61 // FreeBSD headers are giving us trouble...
62 // https://github.com/weidai11/cryptopp/pull/1029
63 #if defined(__FreeBSD__)
64 # include <sys/auxv.h>
65 # include <sys/elf_common.h>
66 #endif
67 
68 // The cpu-features header and source file are located in
69 // "$ANDROID_NDK_ROOT/sources/android/cpufeatures".
70 // setenv-android.sh will copy the header and source file
71 // into PWD and the makefile will build it in place.
72 #if defined(__ANDROID__)
73 # include "cpu-features.h"
74 #endif
75 
76 #ifdef CRYPTOPP_GNU_STYLE_INLINE_ASSEMBLY
77 # include <signal.h>
78 # include <setjmp.h>
79 #endif
80 
81 // Visual Studio 2008 and below are missing _xgetbv and _cpuidex.
82 // The 32-bit versions use inline ASM below. The 64-bit versions are in x64dll.asm.
83 #if defined(_MSC_VER) && defined(_M_X64)
84 extern "C" unsigned long long __fastcall XGETBV64(unsigned int);
85 extern "C" unsigned long long __fastcall CPUID64(unsigned int, unsigned int, unsigned int*);
86 #endif
87 
88 #ifdef CRYPTOPP_GNU_STYLE_INLINE_ASSEMBLY
89 extern "C" {
90  typedef void (*SigHandler)(int);
91 }
92 
93 extern "C"
94 {
95  static jmp_buf s_jmpNoCPUID;
96  static void SigIllHandler(int)
97  {
98  longjmp(s_jmpNoCPUID, 1);
99  }
100 }
101 #endif // CRYPTOPP_GNU_STYLE_INLINE_ASSEMBLY
102 
103 ANONYMOUS_NAMESPACE_BEGIN
104 
105 #if (CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 || CRYPTOPP_BOOL_X64)
106 
107 using CryptoPP::word32;
108 
109 inline bool IsIntel(const word32 output[4])
110 {
111  // This is the "GenuineIntel" string
112  return (output[1] /*EBX*/ == 0x756e6547) &&
113  (output[2] /*ECX*/ == 0x6c65746e) &&
114  (output[3] /*EDX*/ == 0x49656e69);
115 }
116 
117 inline bool IsAMD(const word32 output[4])
118 {
119  // This is the "AuthenticAMD" string.
120  return ((output[1] /*EBX*/ == 0x68747541) &&
121  (output[2] /*ECX*/ == 0x444D4163) &&
122  (output[3] /*EDX*/ == 0x69746E65)) ||
123  // Early K5's can return "AMDisbetter!"
124  ((output[1] /*EBX*/ == 0x69444d41) &&
125  (output[2] /*ECX*/ == 0x74656273) &&
126  (output[3] /*EDX*/ == 0x21726574));
127 }
128 
129 inline bool IsHygon(const word32 output[4])
130 {
131  // This is the "HygonGenuine" string.
132  return (output[1] /*EBX*/ == 0x6f677948) &&
133  (output[2] /*ECX*/ == 0x656e6975) &&
134  (output[3] /*EDX*/ == 0x6e65476e);
135 }
136 
137 inline bool IsVIA(const word32 output[4])
138 {
139  // This is the "CentaurHauls" string.
140  return ((output[1] /*EBX*/ == 0x746e6543) &&
141  (output[2] /*ECX*/ == 0x736c7561) &&
142  (output[3] /*EDX*/ == 0x48727561)) ||
143  // Some non-PadLock's return "VIA VIA VIA "
144  ((output[1] /*EBX*/ == 0x32414956) &&
145  (output[2] /*ECX*/ == 0x32414956) &&
146  (output[3] /*EDX*/ == 0x32414956));
147 }
148 
149 #endif // X86, X32 and X64
150 
151 #if defined(__APPLE__)
152 
153 // http://stackoverflow.com/questions/45637888/how-to-determine-armv8-features-at-runtime-on-ios
154 class AppleMachineInfo
155 {
156 public:
157  enum { PowerMac=1, Mac, iPhone, iPod, iPad, AppleTV, AppleWatch };
158  enum { PowerPC=1, I386, I686, X86_64, ARM32, ARMV8, ARMV82, ARMV83 };
159 
160  AppleMachineInfo() : m_device(0), m_version(0), m_arch(0)
161  {
162  struct utsname systemInfo;
163  systemInfo.machine[0] = '\0';
164  uname(&systemInfo);
165 
166  std::string machine(systemInfo.machine);
167 
168  std::string::size_type pos = machine.find_first_of("0123456789");
169  if (pos != std::string::npos)
170  m_version = std::atoi(machine.substr(pos).c_str());
171 
172  if (machine.find("iPhone") != std::string::npos)
173  {
174  m_device = iPhone;
175  if (m_version >= 6) { m_arch = ARMV8; }
176  else { m_arch = ARM32; }
177  }
178  else if (machine.find("iPod") != std::string::npos)
179  {
180  m_device = iPod;
181  if (m_version >= 6) { m_arch = ARMV8; }
182  else { m_arch = ARM32; }
183  }
184  else if (machine.find("iPad") != std::string::npos)
185  {
186  m_device = iPad;
187  if (m_version >= 5) { m_arch = ARMV8; }
188  else { m_arch = ARM32; }
189  }
190  else if (machine.find("PowerMac") != std::string::npos ||
191  machine.find("Power Macintosh") != std::string::npos)
192  {
193  m_device = PowerMac;
194  m_arch = PowerPC;
195  }
196  else if (machine.find("Mac") != std::string::npos ||
197  machine.find("Macintosh") != std::string::npos)
198  {
199 #if defined(__x86_64) || defined(__amd64)
200  m_device = Mac;
201  m_arch = X86_64;
202 #elif defined(__i386)
203  m_device = Mac;
204  m_arch = I386;
205 #elif defined(__i686)
206  m_device = Mac;
207  m_arch = I686;
208 #else
209  // Should never get here
210  m_device = Mac;
211  m_arch = 0;
212 #endif
213  }
214  else if (machine.find("AppleTV") != std::string::npos)
215  {
216  m_device = AppleTV;
217  if (m_version >= 4) { m_arch = ARMV8; }
218  else { m_arch = ARM32; }
219  }
220  else if (machine.find("AppleWatch") != std::string::npos)
221  {
222  m_device = AppleWatch;
223  if (m_version >= 4) { m_arch = ARMV8; }
224  else { m_arch = ARM32; }
225  }
226  else if (machine.find("arm64") != std::string::npos)
227  {
228  // M1 machine?
229  std::string brand;
230  size_t size = 32;
231 
232  // Supply an oversized buffer, and avoid
233  // an extra call to sysctlbyname.
234  brand.resize(size);
235  if (sysctlbyname("machdep.cpu.brand_string", &brand[0], &size, NULL, 0) == 0 && size > 0)
236  {
237  if (brand[size-1] == '\0')
238  size--;
239  brand.resize(size);
240  }
241 
242  if (brand == "Apple M1")
243  {
244  m_device = Mac;
245  m_arch = ARMV82;
246  }
247  else
248  {
249  // ???
250  m_device = 0;
251  m_arch = ARMV8;
252  }
253  }
254  else
255  {
256  CRYPTOPP_ASSERT(0);
257  }
258  }
259 
260  unsigned int Device() const {
261  return m_device;
262  }
263 
264  unsigned int Version() const {
265  return m_version;
266  }
267 
268  unsigned int Arch() const {
269  return m_arch;
270  }
271 
272  bool IsARM32() const {
273  return m_arch == ARM32;
274  }
275 
276  bool IsARMv8() const {
277  return m_arch >= ARMV8;
278  }
279 
280  bool IsARMv82() const {
281  return m_arch >= ARMV82;
282  }
283 
284  bool IsARMv83() const {
285  return m_arch >= ARMV83;
286  }
287 
288 private:
289  unsigned int m_device, m_version, m_arch;
290 };
291 
292 void GetAppleMachineInfo(unsigned int& device, unsigned int& version, unsigned int& arch)
293 {
294 #if CRYPTOPP_CXX11_STATIC_INIT
295  static const AppleMachineInfo info;
296 #else
297  using CryptoPP::Singleton;
298  const AppleMachineInfo& info = Singleton<AppleMachineInfo>().Ref();
299 #endif
300 
301  device = info.Device();
302  version = info.Version();
303  arch = info.Arch();
304 }
305 
306 inline bool IsAppleMachineARM32()
307 {
308  static unsigned int arch;
309  if (arch == 0)
310  {
311  unsigned int unused;
312  GetAppleMachineInfo(unused, unused, arch);
313  }
314  return arch == AppleMachineInfo::ARM32;
315 }
316 
317 inline bool IsAppleMachineARMv8()
318 {
319  static unsigned int arch;
320  if (arch == 0)
321  {
322  unsigned int unused;
323  GetAppleMachineInfo(unused, unused, arch);
324  }
325  return arch >= AppleMachineInfo::ARMV8;
326 }
327 
328 inline bool IsAppleMachineARMv82()
329 {
330  static unsigned int arch;
331  if (arch == 0)
332  {
333  unsigned int unused;
334  GetAppleMachineInfo(unused, unused, arch);
335  }
336  return arch >= AppleMachineInfo::ARMV82;
337 }
338 
339 inline bool IsAppleMachineARMv83()
340 {
341  static unsigned int arch;
342  if (arch == 0)
343  {
344  unsigned int unused;
345  GetAppleMachineInfo(unused, unused, arch);
346  }
347  return arch >= AppleMachineInfo::ARMV83;
348 }
349 
350 #endif // __APPLE__
351 
352 ANONYMOUS_NAMESPACE_END
353 
354 NAMESPACE_BEGIN(CryptoPP)
355 
356 // *************************** IA-32 CPUs ***************************
357 
358 #if (CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 || CRYPTOPP_BOOL_X64)
359 
360 bool CRYPTOPP_SECTION_INIT g_x86DetectionDone = false;
361 bool CRYPTOPP_SECTION_INIT g_hasSSE2 = false;
362 bool CRYPTOPP_SECTION_INIT g_hasSSSE3 = false;
363 bool CRYPTOPP_SECTION_INIT g_hasSSE41 = false;
364 bool CRYPTOPP_SECTION_INIT g_hasSSE42 = false;
365 bool CRYPTOPP_SECTION_INIT g_hasAESNI = false;
366 bool CRYPTOPP_SECTION_INIT g_hasCLMUL = false;
367 bool CRYPTOPP_SECTION_INIT g_hasMOVBE = false;
368 bool CRYPTOPP_SECTION_INIT g_hasAVX = false;
369 bool CRYPTOPP_SECTION_INIT g_hasAVX2 = false;
370 bool CRYPTOPP_SECTION_INIT g_hasADX = false;
371 bool CRYPTOPP_SECTION_INIT g_hasSHA = false;
372 bool CRYPTOPP_SECTION_INIT g_hasRDRAND = false;
373 bool CRYPTOPP_SECTION_INIT g_hasRDSEED = false;
374 bool CRYPTOPP_SECTION_INIT g_isP4 = false;
375 bool CRYPTOPP_SECTION_INIT g_hasPadlockRNG = false;
376 bool CRYPTOPP_SECTION_INIT g_hasPadlockACE = false;
377 bool CRYPTOPP_SECTION_INIT g_hasPadlockACE2 = false;
378 bool CRYPTOPP_SECTION_INIT g_hasPadlockPHE = false;
379 bool CRYPTOPP_SECTION_INIT g_hasPadlockPMM = false;
381 
382 // For Solaris 11
383 extern bool CPU_ProbeSSE2();
384 
385 // xcr0 is available when xgetbv is present.
386 // The intrinsic is broke on GCC 8.1 and earlier. Also see
387 // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=85684.
388 word64 XGetBV(word32 num)
389 {
390 // Visual Studio 2010 SP1 and above, 32 and 64-bit
391 // https://github.com/weidai11/cryptopp/issues/972
392 #if defined(_MSC_VER) && (_MSC_FULL_VER >= 160040219)
393 
394  return _xgetbv(num);
395 
396 // Visual Studio 2008 and below, 64-bit
397 #elif defined(_MSC_VER) && defined(_M_X64)
398 
399  return XGETBV64(num);
400 
401 // Visual Studio 2008 and below, 32-bit
402 #elif defined(_MSC_VER) && defined(_M_IX86)
403 
404  word32 a=0, d=0;
405  __asm {
406  push eax
407  push edx
408  push ecx
409  mov ecx, num
410  _emit 0x0f
411  _emit 0x01
412  _emit 0xd0
413  mov a, eax
414  mov d, edx
415  pop ecx
416  pop edx
417  pop eax
418  }
419  return (static_cast<word64>(d) << 32) | a;
420 
421 // GCC 4.4 and above
422 #elif (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 4))
423 
424  word32 a=0, d=0;
425  __asm__
426  (
427  "xgetbv" : "=a"(a), "=d"(d) : "c"(num) : "cc"
428  );
429  return (static_cast<word64>(d) << 32) | a;
430 
431 // Remainder of GCC and compatibles.
432 #elif defined(__GNUC__) || defined(__clang__) || defined(__SUNPRO_CC)
433 
434  // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=71659 and
435  // http://www.agner.org/optimize/vectorclass/read.php?i=65
436  word32 a=0, d=0;
437  __asm__
438  (
439  ".byte 0x0f, 0x01, 0xd0" "\n\t"
440  : "=a"(a), "=d"(d) : "c"(num) : "cc"
441  );
442  return (static_cast<word64>(d) << 32) | a;
443 #else
444  # error "Need an xgetbv function"
445 #endif
446 }
447 
448 // No inline due to Borland/Embarcadero and Issue 498
449 // cpu.cpp (131): E2211 Inline assembly not allowed in inline and template functions
450 bool CpuId(word32 func, word32 subfunc, word32 output[4])
451 {
452 // Visual Studio 2010 and above, 32 and 64-bit
453 #if defined(_MSC_VER) && (_MSC_VER >= 1600)
454 
455  __cpuidex((int *)output, func, subfunc);
456  return true;
457 
458 // Visual Studio 2008 and below, 64-bit
459 #elif defined(_MSC_VER) && defined(_M_X64)
460 
461  CPUID64(func, subfunc, output);
462  return true;
463 
464 // Visual Studio 2008 and below, 32-bit
465 #elif (defined(_MSC_VER) && defined(_M_IX86)) || defined(__BORLANDC__)
466 
467  __try
468  {
469  // Borland/Embarcadero and Issue 500
470  // Local variables for cpuid output
471  word32 a, b, c, d;
472  __asm
473  {
474  push ebx
475  mov eax, func
476  mov ecx, subfunc
477  cpuid
478  mov [a], eax
479  mov [b], ebx
480  mov [c], ecx
481  mov [d], edx
482  pop ebx
483  }
484  output[0] = a;
485  output[1] = b;
486  output[2] = c;
487  output[3] = d;
488  }
489  __except (EXCEPTION_EXECUTE_HANDLER)
490  {
491  return false;
492  }
493 
494  return true;
495 
496 // Linux, Unix, OS X, Solaris, Cygwin, MinGW
497 #else
498 
499  // longjmp and clobber warnings. Volatile is required.
500  // http://github.com/weidai11/cryptopp/issues/24 and http://stackoverflow.com/q/7721854
501  volatile bool result = true;
502 
503  volatile SigHandler oldHandler = signal(SIGILL, SigIllHandler);
504  if (oldHandler == SIG_ERR)
505  return false;
506 
507 # ifndef __MINGW32__
508  volatile sigset_t oldMask;
509  if (sigprocmask(0, NULLPTR, (sigset_t*)&oldMask) != 0)
510  {
511  signal(SIGILL, oldHandler);
512  return false;
513  }
514 # endif
515 
516  if (setjmp(s_jmpNoCPUID))
517  result = false;
518  else
519  {
520  asm volatile
521  (
522  // save ebx in case -fPIC is being used
523 # if CRYPTOPP_BOOL_X32 || CRYPTOPP_BOOL_X64
524  "pushq %%rbx; cpuid; mov %%ebx, %%edi; popq %%rbx"
525 # else
526  "push %%ebx; cpuid; mov %%ebx, %%edi; pop %%ebx"
527 # endif
528  : "=a" (output[0]), "=D" (output[1]), "=c" (output[2]), "=d" (output[3])
529  : "a" (func), "c" (subfunc)
530  : "cc"
531  );
532  }
533 
534 # ifndef __MINGW32__
535  sigprocmask(SIG_SETMASK, (sigset_t*)&oldMask, NULLPTR);
536 # endif
537 
538  signal(SIGILL, oldHandler);
539  return result;
540 #endif
541 }
542 
543 void DetectX86Features()
544 {
545  // Coverity finding CID 171239. Initialize arrays.
546  // Indexes: EAX=0, EBX=1, ECX=2, EDX=3
547  word32 cpuid0[4]={0}, cpuid1[4]={0}, cpuid2[4]={0};
548 
549 #if defined(CRYPTOPP_DISABLE_ASM)
550  // Not available
551  goto done;
552 #else
553  if (!CpuId(0, 0, cpuid0))
554  goto done;
555  if (!CpuId(1, 0, cpuid1))
556  goto done;
557 #endif
558 
559  CRYPTOPP_CONSTANT(EAX_REG = 0);
560  CRYPTOPP_CONSTANT(EBX_REG = 1);
561  CRYPTOPP_CONSTANT(ECX_REG = 2);
562  CRYPTOPP_CONSTANT(EDX_REG = 3);
563 
564  CRYPTOPP_CONSTANT(MMX_FLAG = (1 << 24)); // EDX
565  CRYPTOPP_CONSTANT(SSE_FLAG = (1 << 25)); // EDX
566  CRYPTOPP_CONSTANT(SSE2_FLAG = (1 << 26)); // EDX
567 
568  CRYPTOPP_CONSTANT(SSE3_FLAG = (1 << 0)); // ECX
569  CRYPTOPP_CONSTANT(SSSE3_FLAG = (1 << 9)); // ECX
570  CRYPTOPP_CONSTANT(SSE41_FLAG = (1 << 19)); // ECX
571  CRYPTOPP_CONSTANT(SSE42_FLAG = (1 << 20)); // ECX
572  CRYPTOPP_CONSTANT(MOVBE_FLAG = (1 << 22)); // ECX
573  CRYPTOPP_CONSTANT(AESNI_FLAG = (1 << 25)); // ECX
574  CRYPTOPP_CONSTANT(CLMUL_FLAG = (1 << 1)); // ECX
575 
576  CRYPTOPP_CONSTANT(XSAVE_FLAG = (1 << 26)); // ECX
577  CRYPTOPP_CONSTANT(OSXSAVE_FLAG = (1 << 27)); // ECX
578 
579  CRYPTOPP_CONSTANT(AVX_FLAG = (3 << 27)); // ECX
580  CRYPTOPP_CONSTANT(YMM_FLAG = (3 << 1)); // CR0
581 
582  // x86_64 machines don't check some flags because SSE2
583  // is part of the core instruction set architecture
584  CRYPTOPP_UNUSED(MMX_FLAG); CRYPTOPP_UNUSED(SSE_FLAG);
585  CRYPTOPP_UNUSED(SSE2_FLAG); CRYPTOPP_UNUSED(SSE3_FLAG);
586  CRYPTOPP_UNUSED(XSAVE_FLAG);
587 
588 #if (CRYPTOPP_BOOL_X32 || CRYPTOPP_BOOL_X64)
589  // 64-bit core instruction set includes SSE2. Just check
590  // the OS enabled SSE2 support using OSXSAVE.
591  g_hasSSE2 = (cpuid1[ECX_REG] & OSXSAVE_FLAG) != 0;
592 #else
593  // Check the processor supports SSE2. Then use OSXSAVE to
594  // signal OS support for SSE2 to avoid probes.
595  // Also see http://stackoverflow.com/a/22521619/608639
596  // and http://github.com/weidai11/cryptopp/issues/511.
597  if ((cpuid1[EDX_REG] & SSE2_FLAG) == SSE2_FLAG)
598  g_hasSSE2 = (cpuid1[ECX_REG] & XSAVE_FLAG) != 0 &&
599  (cpuid1[ECX_REG] & OSXSAVE_FLAG) != 0;
600 #endif
601 
602  // Solaris 11 i86pc does not signal SSE support using
603  // OSXSAVE. We need to probe for SSE support.
604  if (g_hasSSE2 == false)
605  g_hasSSE2 = CPU_ProbeSSE2();
606 
607  if (g_hasSSE2 == false)
608  goto done;
609 
610  g_hasSSSE3 = (cpuid1[ECX_REG] & SSSE3_FLAG) != 0;
611  g_hasSSE41 = (cpuid1[ECX_REG] & SSE41_FLAG) != 0;
612  g_hasSSE42 = (cpuid1[ECX_REG] & SSE42_FLAG) != 0;
613  g_hasMOVBE = (cpuid1[ECX_REG] & MOVBE_FLAG) != 0;
614  g_hasAESNI = (cpuid1[ECX_REG] & AESNI_FLAG) != 0;
615  g_hasCLMUL = (cpuid1[ECX_REG] & CLMUL_FLAG) != 0;
616 
617  // AVX is similar to SSE. Check if AVX is available on the cpu, then
618  // check if the OS enabled XSAVE/XRESTORE for the extended registers.
619  // https://software.intel.com/en-us/blogs/2011/04/14/is-avx-enabled
620  if ((cpuid1[ECX_REG] & AVX_FLAG) == AVX_FLAG)
621  {
622  word64 xcr0 = XGetBV(0);
623  g_hasAVX = (xcr0 & YMM_FLAG) == YMM_FLAG;
624  }
625 
626  if (IsIntel(cpuid0))
627  {
628  CRYPTOPP_CONSTANT(RDRAND_FLAG = (1 << 30));
629  CRYPTOPP_CONSTANT(RDSEED_FLAG = (1 << 18));
630  CRYPTOPP_CONSTANT( ADX_FLAG = (1 << 19));
631  CRYPTOPP_CONSTANT( SHA_FLAG = (1 << 29));
632  CRYPTOPP_CONSTANT( AVX2_FLAG = (1 << 5));
633 
634  g_isP4 = ((cpuid1[0] >> 8) & 0xf) == 0xf;
635  g_cacheLineSize = 8 * GETBYTE(cpuid1[1], 1);
636  g_hasRDRAND = (cpuid1[ECX_REG] & RDRAND_FLAG) != 0;
637 
638  if (cpuid0[EAX_REG] >= 7)
639  {
640  if (CpuId(7, 0, cpuid2))
641  {
642  g_hasRDSEED = (cpuid2[EBX_REG] & RDSEED_FLAG) != 0;
643  g_hasADX = (cpuid2[EBX_REG] & ADX_FLAG) != 0;
644  g_hasSHA = (cpuid2[EBX_REG] & SHA_FLAG) != 0;
645  g_hasAVX2 = (cpuid2[EBX_REG] & AVX2_FLAG) != 0;
646  }
647  }
648  }
649  else if (IsAMD(cpuid0) || IsHygon(cpuid0))
650  {
651  CRYPTOPP_CONSTANT(RDRAND_FLAG = (1 << 30));
652  CRYPTOPP_CONSTANT(RDSEED_FLAG = (1 << 18));
653  CRYPTOPP_CONSTANT( ADX_FLAG = (1 << 19));
654  CRYPTOPP_CONSTANT( SHA_FLAG = (1 << 29));
655  CRYPTOPP_CONSTANT( AVX2_FLAG = (1 << 5));
656 
657  CpuId(0x80000005, 0, cpuid2);
658  g_cacheLineSize = GETBYTE(cpuid2[ECX_REG], 0);
659  g_hasRDRAND = (cpuid1[ECX_REG] & RDRAND_FLAG) != 0;
660 
661  if (cpuid0[EAX_REG] >= 7)
662  {
663  if (CpuId(7, 0, cpuid2))
664  {
665  g_hasRDSEED = (cpuid2[EBX_REG] & RDSEED_FLAG) != 0;
666  g_hasADX = (cpuid2[EBX_REG] & ADX_FLAG) != 0;
667  g_hasSHA = (cpuid2[EBX_REG] & SHA_FLAG) != 0;
668  g_hasAVX2 = (cpuid2[EBX_REG] & AVX2_FLAG) != 0;
669  }
670  }
671 
672  // Unconditionally disable RDRAND and RDSEED on AMD cpu's with family 15h or 16h.
673  // See Crypto++ Issue 924, https://github.com/weidai11/cryptopp/issues/924,
674  // Clear RDRAND CPUID bit on AMD family 15h/16h, https://lore.kernel.org/patchwork/patch/1115413/,
675  // and AMD CPUID Specification, https://www.amd.com/system/files/TechDocs/25481.pdf
676  {
677  CRYPTOPP_CONSTANT(FAMILY_BASE_FLAG = (0x0f << 8));
678  CRYPTOPP_CONSTANT(FAMILY_EXT_FLAG = (0xff << 20));
679 
680  word32 family = (cpuid1[0] & FAMILY_BASE_FLAG) >> 8;
681  if (family == 0xf)
682  family += (cpuid1[0] & FAMILY_EXT_FLAG) >> 20;
683  if (family == 0x15 || family == 0x16)
684  {
685  g_hasRDRAND = false;
686  g_hasRDSEED = false;
687  }
688  }
689  }
690  else if (IsVIA(cpuid0))
691  {
692  // Two bits: available and enabled
693  CRYPTOPP_CONSTANT( RNG_FLAGS = (0x3 << 2));
694  CRYPTOPP_CONSTANT( ACE_FLAGS = (0x3 << 6));
695  CRYPTOPP_CONSTANT(ACE2_FLAGS = (0x3 << 8));
696  CRYPTOPP_CONSTANT( PHE_FLAGS = (0x3 << 10));
697  CRYPTOPP_CONSTANT( PMM_FLAGS = (0x3 << 12));
698 
699  CpuId(0xC0000000, 0, cpuid2);
700  word32 extendedFeatures = cpuid2[0];
701 
702  if (extendedFeatures >= 0xC0000001)
703  {
704  CpuId(0xC0000001, 0, cpuid2);
705  g_hasPadlockRNG = (cpuid2[EDX_REG] & RNG_FLAGS) != 0;
706  g_hasPadlockACE = (cpuid2[EDX_REG] & ACE_FLAGS) != 0;
707  g_hasPadlockACE2 = (cpuid2[EDX_REG] & ACE2_FLAGS) != 0;
708  g_hasPadlockPHE = (cpuid2[EDX_REG] & PHE_FLAGS) != 0;
709  g_hasPadlockPMM = (cpuid2[EDX_REG] & PMM_FLAGS) != 0;
710  }
711 
712  if (extendedFeatures >= 0xC0000005)
713  {
714  CpuId(0xC0000005, 0, cpuid2);
715  g_cacheLineSize = GETBYTE(cpuid2[ECX_REG], 0);
716  }
717  }
718 
719  // Keep AVX2 in sync with OS support for AVX. AVX tests both
720  // cpu support and OS support, while AVX2 only tests cpu support.
721  g_hasAVX2 &= g_hasAVX;
722 
723 done:
724 
725 #if defined(_SC_LEVEL1_DCACHE_LINESIZE)
726  // Glibc does not implement on some platforms. The runtime returns 0 instead of error.
727  // https://sourceware.org/git/?p=glibc.git;a=blob;f=sysdeps/posix/sysconf.c
728  int cacheLineSize = (int)sysconf(_SC_LEVEL1_DCACHE_LINESIZE);
729  if (g_cacheLineSize == 0 && cacheLineSize > 0)
730  g_cacheLineSize = cacheLineSize;
731 #endif
732 
733  if (g_cacheLineSize == 0)
734  g_cacheLineSize = CRYPTOPP_L1_CACHE_LINE_SIZE;
735 
736  *const_cast<volatile bool*>(&g_x86DetectionDone) = true;
737 }
738 
739 // *************************** ARM-32, Aarch32 and Aarch64 ***************************
740 
741 #elif (CRYPTOPP_BOOL_ARM32 || CRYPTOPP_BOOL_ARMV8)
742 
743 bool CRYPTOPP_SECTION_INIT g_ArmDetectionDone = false;
744 bool CRYPTOPP_SECTION_INIT g_hasARMv7 = false;
745 bool CRYPTOPP_SECTION_INIT g_hasNEON = false;
746 bool CRYPTOPP_SECTION_INIT g_hasPMULL = false;
747 bool CRYPTOPP_SECTION_INIT g_hasCRC32 = false;
748 bool CRYPTOPP_SECTION_INIT g_hasAES = false;
749 bool CRYPTOPP_SECTION_INIT g_hasSHA1 = false;
750 bool CRYPTOPP_SECTION_INIT g_hasSHA2 = false;
751 bool CRYPTOPP_SECTION_INIT g_hasSHA512 = false;
752 bool CRYPTOPP_SECTION_INIT g_hasSHA3 = false;
753 bool CRYPTOPP_SECTION_INIT g_hasSM3 = false;
754 bool CRYPTOPP_SECTION_INIT g_hasSM4 = false;
756 
757 // ARM does not have an unprivileged equivalent to CPUID on IA-32. We have to
758 // jump through some hoops to detect features on a wide array of platforms.
759 // Our strategy is two part. First, attempt to *Query* the OS for a feature,
760 // like using getauxval on Linux. If that fails, then *Probe* the cpu
761 // executing an instruction and an observe a SIGILL if unsupported. The probes
762 // are in source files where compilation options like -march=armv8-a+crc make
763 // intrinsics available. They are expensive when compared to a standard OS
764 // feature query. Always perform the feature query first. For Linux see
765 // http://sourceware.org/ml/libc-help/2017-08/msg00012.html
766 // Avoid probes on Apple platforms because Apple's signal handling for SIGILLs
767 // appears broken. We are trying to figure out a way to feature test without
768 // probes. Also see http://stackoverflow.com/a/11197770/608639 and
769 // http://gist.github.com/erkanyildiz/390a480f27e86f8cd6ba.
770 
771 extern bool CPU_ProbeARMv7();
772 extern bool CPU_ProbeNEON();
773 extern bool CPU_ProbeCRC32();
774 extern bool CPU_ProbeAES();
775 extern bool CPU_ProbeSHA1();
776 extern bool CPU_ProbeSHA256();
777 extern bool CPU_ProbeSHA512();
778 extern bool CPU_ProbeSHA3();
779 extern bool CPU_ProbeSM3();
780 extern bool CPU_ProbeSM4();
781 extern bool CPU_ProbePMULL();
782 
783 // https://github.com/torvalds/linux/blob/master/arch/arm/include/uapi/asm/hwcap.h
784 // https://github.com/torvalds/linux/blob/master/arch/arm64/include/uapi/asm/hwcap.h
785 #ifndef HWCAP_ARMv7
786 # define HWCAP_ARMv7 (1 << 29)
787 #endif
788 #ifndef HWCAP_ASIMD
789 # define HWCAP_ASIMD (1 << 1)
790 #endif
791 #ifndef HWCAP_NEON
792 # define HWCAP_NEON (1 << 12)
793 #endif
794 #ifndef HWCAP_CRC32
795 # define HWCAP_CRC32 (1 << 7)
796 #endif
797 #ifndef HWCAP2_CRC32
798 # define HWCAP2_CRC32 (1 << 4)
799 #endif
800 #ifndef HWCAP_PMULL
801 # define HWCAP_PMULL (1 << 4)
802 #endif
803 #ifndef HWCAP2_PMULL
804 # define HWCAP2_PMULL (1 << 1)
805 #endif
806 #ifndef HWCAP_AES
807 # define HWCAP_AES (1 << 3)
808 #endif
809 #ifndef HWCAP2_AES
810 # define HWCAP2_AES (1 << 0)
811 #endif
812 #ifndef HWCAP_SHA1
813 # define HWCAP_SHA1 (1 << 5)
814 #endif
815 #ifndef HWCAP_SHA2
816 # define HWCAP_SHA2 (1 << 6)
817 #endif
818 #ifndef HWCAP2_SHA1
819 # define HWCAP2_SHA1 (1 << 2)
820 #endif
821 #ifndef HWCAP2_SHA2
822 # define HWCAP2_SHA2 (1 << 3)
823 #endif
824 #ifndef HWCAP_SHA3
825 # define HWCAP_SHA3 (1 << 17)
826 #endif
827 #ifndef HWCAP_SM3
828 # define HWCAP_SM3 (1 << 18)
829 #endif
830 #ifndef HWCAP_SM4
831 # define HWCAP_SM4 (1 << 19)
832 #endif
833 #ifndef HWCAP_SHA512
834 # define HWCAP_SHA512 (1 << 21)
835 #endif
836 
837 inline bool CPU_QueryARMv7()
838 {
839 #if defined(__ANDROID__) && defined(__arm__)
840  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM) != 0) &&
841  ((android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_ARMv7) != 0))
842  return true;
843 #elif defined(__linux__) && defined(__arm__)
844  if ((getauxval(AT_HWCAP) & HWCAP_ARMv7) != 0 ||
845  (getauxval(AT_HWCAP) & HWCAP_NEON) != 0)
846  return true;
847 #elif defined(__APPLE__) && defined(__arm__)
848  // Apple hardware is ARMv7 or above.
849  return true;
850 #elif defined(_WIN32) && defined(_M_ARM64)
851  // Windows 10 ARM64 is only supported on Armv8a and above
852  return true;
853 #endif
854  return false;
855 }
856 
857 inline bool CPU_QueryNEON()
858 {
859 #if defined(__ANDROID__) && defined(__aarch64__)
860  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM64) != 0) &&
861  ((android_getCpuFeatures() & ANDROID_CPU_ARM64_FEATURE_ASIMD) != 0))
862  return true;
863 #elif defined(__ANDROID__) && defined(__arm__)
864  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM) != 0) &&
865  ((android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_NEON) != 0))
866  return true;
867 #elif defined(__linux__) && defined(__aarch64__)
868  if ((getauxval(AT_HWCAP) & HWCAP_ASIMD) != 0)
869  return true;
870 #elif defined(__linux__) && defined(__aarch32__)
871  if ((getauxval(AT_HWCAP2) & HWCAP2_ASIMD) != 0)
872  return true;
873 #elif defined(__linux__) && defined(__arm__)
874  if ((getauxval(AT_HWCAP) & HWCAP_NEON) != 0)
875  return true;
876 #elif defined(__APPLE__) && defined(__aarch64__)
877  // Core feature set for Aarch32 and Aarch64.
878  if (IsAppleMachineARMv8())
879  return true;
880 #elif defined(_WIN32) && defined(_M_ARM64)
881  // Windows 10 ARM64 is only supported on Armv8a and above
882  if (IsProcessorFeaturePresent(PF_ARM_V8_INSTRUCTIONS_AVAILABLE) != 0)
883  return true;
884 #endif
885  return false;
886 }
887 
888 inline bool CPU_QueryCRC32()
889 {
890 #if defined(__ANDROID__) && defined(__aarch64__)
891  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM64) != 0) &&
892  ((android_getCpuFeatures() & ANDROID_CPU_ARM64_FEATURE_CRC32) != 0))
893  return true;
894 #elif defined(__ANDROID__) && defined(__aarch32__)
895  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM) != 0) &&
896  ((android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_CRC32) != 0))
897  return true;
898 #elif defined(__linux__) && defined(__aarch64__)
899  if ((getauxval(AT_HWCAP) & HWCAP_CRC32) != 0)
900  return true;
901 #elif defined(__linux__) && defined(__aarch32__)
902  if ((getauxval(AT_HWCAP2) & HWCAP2_CRC32) != 0)
903  return true;
904 #elif defined(__APPLE__) && defined(__aarch64__)
905  // M1 processor
906  if (IsAppleMachineARMv82())
907  return true;
908 #elif defined(_WIN32) && defined(_M_ARM64)
909  if (IsProcessorFeaturePresent(PF_ARM_V8_CRC32_INSTRUCTIONS_AVAILABLE) != 0)
910  return true;
911 #endif
912  return false;
913 }
914 
915 inline bool CPU_QueryPMULL()
916 {
917 #if defined(__ANDROID__) && defined(__aarch64__)
918  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM64) != 0) &&
919  ((android_getCpuFeatures() & ANDROID_CPU_ARM64_FEATURE_PMULL) != 0))
920  return true;
921 #elif defined(__ANDROID__) && defined(__aarch32__)
922  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM) != 0) &&
923  ((android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_PMULL) != 0))
924  return true;
925 #elif defined(__linux__) && defined(__aarch64__)
926  if ((getauxval(AT_HWCAP) & HWCAP_PMULL) != 0)
927  return true;
928 #elif defined(__linux__) && defined(__aarch32__)
929  if ((getauxval(AT_HWCAP2) & HWCAP2_PMULL) != 0)
930  return true;
931 #elif defined(__APPLE__) && defined(__aarch64__)
932  // M1 processor
933  if (IsAppleMachineARMv82())
934  return true;
935 #elif defined(_WIN32) && defined(_M_ARM64)
936  if (IsProcessorFeaturePresent(PF_ARM_V8_CRYPTO_INSTRUCTIONS_AVAILABLE) != 0)
937  return true;
938 #endif
939  return false;
940 }
941 
942 inline bool CPU_QueryAES()
943 {
944 #if defined(__ANDROID__) && defined(__aarch64__)
945  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM64) != 0) &&
946  ((android_getCpuFeatures() & ANDROID_CPU_ARM64_FEATURE_AES) != 0))
947  return true;
948 #elif defined(__ANDROID__) && defined(__aarch32__)
949  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM) != 0) &&
950  ((android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_AES) != 0))
951  return true;
952 #elif defined(__linux__) && defined(__aarch64__)
953  if ((getauxval(AT_HWCAP) & HWCAP_AES) != 0)
954  return true;
955 #elif defined(__linux__) && defined(__aarch32__)
956  if ((getauxval(AT_HWCAP2) & HWCAP2_AES) != 0)
957  return true;
958 #elif defined(__APPLE__) && defined(__aarch64__)
959  // M1 processor
960  if (IsAppleMachineARMv82())
961  return true;
962 #elif defined(_WIN32) && defined(_M_ARM64)
963  if (IsProcessorFeaturePresent(PF_ARM_V8_CRYPTO_INSTRUCTIONS_AVAILABLE) != 0)
964  return true;
965 #endif
966  return false;
967 }
968 
969 inline bool CPU_QuerySHA1()
970 {
971 #if defined(__ANDROID__) && defined(__aarch64__)
972  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM64) != 0) &&
973  ((android_getCpuFeatures() & ANDROID_CPU_ARM64_FEATURE_SHA1) != 0))
974  return true;
975 #elif defined(__ANDROID__) && defined(__aarch32__)
976  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM) != 0) &&
977  ((android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_SHA1) != 0))
978  return true;
979 #elif defined(__linux__) && defined(__aarch64__)
980  if ((getauxval(AT_HWCAP) & HWCAP_SHA1) != 0)
981  return true;
982 #elif defined(__linux__) && defined(__aarch32__)
983  if ((getauxval(AT_HWCAP2) & HWCAP2_SHA1) != 0)
984  return true;
985 #elif defined(__APPLE__) && defined(__aarch64__)
986  // M1 processor
987  if (IsAppleMachineARMv82())
988  return true;
989 #elif defined(_WIN32) && defined(_M_ARM64)
990  if (IsProcessorFeaturePresent(PF_ARM_V8_CRYPTO_INSTRUCTIONS_AVAILABLE) != 0)
991  return true;
992 #endif
993  return false;
994 }
995 
996 inline bool CPU_QuerySHA256()
997 {
998 #if defined(__ANDROID__) && defined(__aarch64__)
999  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM64) != 0) &&
1000  ((android_getCpuFeatures() & ANDROID_CPU_ARM64_FEATURE_SHA2) != 0))
1001  return true;
1002 #elif defined(__ANDROID__) && defined(__aarch32__)
1003  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM) != 0) &&
1004  ((android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_SHA2) != 0))
1005  return true;
1006 #elif defined(__linux__) && defined(__aarch64__)
1007  if ((getauxval(AT_HWCAP) & HWCAP_SHA2) != 0)
1008  return true;
1009 #elif defined(__linux__) && defined(__aarch32__)
1010  if ((getauxval(AT_HWCAP2) & HWCAP2_SHA2) != 0)
1011  return true;
1012 #elif defined(__APPLE__) && defined(__aarch64__)
1013  // M1 processor
1014  if (IsAppleMachineARMv82())
1015  return true;
1016 #elif defined(_WIN32) && defined(_M_ARM64)
1017  if (IsProcessorFeaturePresent(PF_ARM_V8_CRYPTO_INSTRUCTIONS_AVAILABLE) != 0)
1018  return true;
1019 #endif
1020  return false;
1021 }
1022 
1023 // Some ARMv8.2 features are disabled at the moment
1024 inline bool CPU_QuerySHA3()
1025 {
1026  // According to the ARM manual, SHA3 depends upon SHA1 and SHA2.
1027  // If SHA1 and SHA2 are not present, then SHA3 and SHA512 are
1028  // not present. Also see Arm A64 Instruction Set Architecture,
1029  // https://developer.arm.com/documentation/ddi0596/2020-12/
1030  if (!g_hasSHA1 || !g_hasSHA2) { return false; }
1031 
1032 #if defined(__ANDROID__) && defined(__aarch64__) && 0
1033  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM64) != 0) &&
1034  ((android_getCpuFeatures() & ANDROID_CPU_ARM64_FEATURE_SHA3) != 0))
1035  return true;
1036 #elif defined(__ANDROID__) && defined(__aarch32__) && 0
1037  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM) != 0) &&
1038  ((android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_SHA3) != 0))
1039  return true;
1040 #elif defined(__linux__) && defined(__aarch64__)
1041  if ((getauxval(AT_HWCAP) & HWCAP_SHA3) != 0)
1042  return true;
1043 #elif defined(__linux__) && defined(__aarch32__)
1044  if ((getauxval(AT_HWCAP2) & HWCAP2_SHA3) != 0)
1045  return true;
1046 #elif defined(__APPLE__) && defined(__aarch64__)
1047  // M1 processor
1048  if (IsAppleMachineARMv82())
1049  return true;
1050 #endif
1051  return false;
1052 }
1053 
1054 // Some ARMv8.2 features are disabled at the moment
1055 inline bool CPU_QuerySHA512()
1056 {
1057  // According to the ARM manual, SHA512 depends upon SHA1 and SHA2.
1058  // If SHA1 and SHA2 are not present, then SHA3 and SHA512 are
1059  // not present. Also see Arm A64 Instruction Set Architecture,
1060  // https://developer.arm.com/documentation/ddi0596/2020-12/
1061  if (!g_hasSHA1 || !g_hasSHA2) { return false; }
1062 
1063 #if defined(__ANDROID__) && defined(__aarch64__) && 0
1064  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM64) != 0) &&
1065  ((android_getCpuFeatures() & ANDROID_CPU_ARM64_FEATURE_SHA512) != 0))
1066  return true;
1067 #elif defined(__ANDROID__) && defined(__aarch32__) && 0
1068  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM) != 0) &&
1069  ((android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_SHA512) != 0))
1070  return true;
1071 #elif defined(__linux__) && defined(__aarch64__)
1072  if ((getauxval(AT_HWCAP) & HWCAP_SHA512) != 0)
1073  return true;
1074 #elif defined(__linux__) && defined(__aarch32__)
1075  if ((getauxval(AT_HWCAP2) & HWCAP2_SHA512) != 0)
1076  return true;
1077 #elif defined(__APPLE__) && defined(__aarch64__)
1078  // M1 processor
1079  if (IsAppleMachineARMv82())
1080  return true;
1081 #endif
1082  return false;
1083 }
1084 
1085 // Some ARMv8.2 features are disabled at the moment
1086 inline bool CPU_QuerySM3()
1087 {
1088 #if defined(__ANDROID__) && defined(__aarch64__) && 0
1089  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM64) != 0) &&
1090  ((android_getCpuFeatures() & ANDROID_CPU_ARM64_FEATURE_SM3) != 0))
1091  return true;
1092 #elif defined(__ANDROID__) && defined(__aarch32__) && 0
1093  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM) != 0) &&
1094  ((android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_SM3) != 0))
1095  return true;
1096 #elif defined(__linux__) && defined(__aarch64__)
1097  if ((getauxval(AT_HWCAP) & HWCAP_SM3) != 0)
1098  return true;
1099 #elif defined(__linux__) && defined(__aarch32__)
1100  if ((getauxval(AT_HWCAP2) & HWCAP2_SM3) != 0)
1101  return true;
1102 #elif defined(__APPLE__) && defined(__aarch64__) && 0
1103  // No Apple support yet.
1104 #endif
1105  return false;
1106 }
1107 
1108 // Some ARMv8.2 features are disabled at the moment
1109 inline bool CPU_QuerySM4()
1110 {
1111 #if defined(__ANDROID__) && defined(__aarch64__) && 0
1112  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM64) != 0) &&
1113  ((android_getCpuFeatures() & ANDROID_CPU_ARM64_FEATURE_SM4) != 0))
1114  return true;
1115 #elif defined(__ANDROID__) && defined(__aarch32__) && 0
1116  if (((android_getCpuFamily() & ANDROID_CPU_FAMILY_ARM) != 0) &&
1117  ((android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_SM4) != 0))
1118  return true;
1119 #elif defined(__linux__) && defined(__aarch64__)
1120  if ((getauxval(AT_HWCAP) & HWCAP_SM4) != 0)
1121  return true;
1122 #elif defined(__linux__) && defined(__aarch32__)
1123  if ((getauxval(AT_HWCAP2) & HWCAP2_SM4) != 0)
1124  return true;
1125 #elif defined(__APPLE__) && defined(__aarch64__) && 0
1126  // No Apple support yet.
1127 #endif
1128  return false;
1129 }
1130 
1131 void DetectArmFeatures()
1132 {
1133 #ifndef CRYPTOPP_DISABLE_ASM
1134 
1135  // The CPU_ProbeXXX's return false for OSes which
1136  // can't tolerate SIGILL-based probes
1137  g_hasARMv7 = CPU_QueryARMv7() || CPU_ProbeARMv7();
1138  g_hasNEON = CPU_QueryNEON() || CPU_ProbeNEON();
1139  g_hasCRC32 = CPU_QueryCRC32() || CPU_ProbeCRC32();
1140  g_hasPMULL = CPU_QueryPMULL() || CPU_ProbePMULL();
1141  g_hasAES = CPU_QueryAES() || CPU_ProbeAES();
1142  g_hasSHA1 = CPU_QuerySHA1() || CPU_ProbeSHA1();
1143  g_hasSHA2 = CPU_QuerySHA256() || CPU_ProbeSHA256();
1144  g_hasSHA512 = CPU_QuerySHA512(); // || CPU_ProbeSHA512();
1145  g_hasSHA3 = CPU_QuerySHA3(); // || CPU_ProbeSHA3();
1146  g_hasSM3 = CPU_QuerySM3(); // || CPU_ProbeSM3();
1147  g_hasSM4 = CPU_QuerySM4(); // || CPU_ProbeSM4();
1148 
1149 #if defined(_SC_LEVEL1_DCACHE_LINESIZE)
1150  // Glibc does not implement on some platforms. The runtime returns 0 instead of error.
1151  // https://sourceware.org/git/?p=glibc.git;a=blob;f=sysdeps/posix/sysconf.c
1152  int cacheLineSize = (int)sysconf(_SC_LEVEL1_DCACHE_LINESIZE);
1153  if (cacheLineSize > 0)
1154  g_cacheLineSize = cacheLineSize;
1155 #endif
1156 
1157  if (g_cacheLineSize == 0)
1158  g_cacheLineSize = CRYPTOPP_L1_CACHE_LINE_SIZE;
1159 
1160 #endif // CRYPTOPP_DISABLE_ASM
1161 
1162  *const_cast<volatile bool*>(&g_ArmDetectionDone) = true;
1163 }
1164 
1165 // *************************** PowerPC and PowerPC64 ***************************
1166 
1167 #elif (CRYPTOPP_BOOL_PPC32 || CRYPTOPP_BOOL_PPC64)
1168 
1169 bool CRYPTOPP_SECTION_INIT g_PowerPcDetectionDone = false;
1170 bool CRYPTOPP_SECTION_INIT g_hasAltivec = false;
1171 bool CRYPTOPP_SECTION_INIT g_hasPower7 = false;
1172 bool CRYPTOPP_SECTION_INIT g_hasPower8 = false;
1173 bool CRYPTOPP_SECTION_INIT g_hasPower9 = false;
1174 bool CRYPTOPP_SECTION_INIT g_hasAES = false;
1175 bool CRYPTOPP_SECTION_INIT g_hasPMULL = false;
1176 bool CRYPTOPP_SECTION_INIT g_hasSHA256 = false;
1177 bool CRYPTOPP_SECTION_INIT g_hasSHA512 = false;
1178 bool CRYPTOPP_SECTION_INIT g_hasDARN = false;
1180 
1181 extern bool CPU_ProbeAltivec();
1182 extern bool CPU_ProbePower7();
1183 extern bool CPU_ProbePower8();
1184 extern bool CPU_ProbePower9();
1185 extern bool CPU_ProbeAES();
1186 extern bool CPU_ProbePMULL();
1187 extern bool CPU_ProbeSHA256();
1188 extern bool CPU_ProbeSHA512();
1189 extern bool CPU_ProbeDARN();
1190 
1191 // AIX defines. We used to just call __power_7_andup()
1192 // and friends but at Power9, too many compilers were
1193 // missing __power_9_andup(). Instead we switched to
1194 // a pattern similar to OpenSSL caps testing.
1195 #ifndef __power_6_andup
1196 # define __power_6_andup() __power_set(0xffffffffU<<14)
1197 #endif
1198 #ifndef __power_7_andup
1199 # define __power_7_andup() __power_set(0xffffffffU<<15)
1200 #endif
1201 #ifndef __power_8_andup
1202 # define __power_8_andup() __power_set(0xffffffffU<<16)
1203 #endif
1204 #ifndef __power_9_andup
1205 # define __power_9_andup() __power_set(0xffffffffU<<17)
1206 #endif
1207 
1208 // AIX first supported Altivec at Power6, though it
1209 // was available much earlier for other vendors.
1210 inline bool CPU_QueryAltivec()
1211 {
1212 #if defined(__linux__) && defined(PPC_FEATURE_HAS_ALTIVEC)
1213  if ((getauxval(AT_HWCAP) & PPC_FEATURE_HAS_ALTIVEC) != 0)
1214  return true;
1215 #elif defined(_AIX)
1216  if (__power_6_andup() != 0)
1217  return true;
1218 #elif defined(__APPLE__) && defined(__POWERPC__)
1219  unsigned int unused, arch;
1220  GetAppleMachineInfo(unused, unused, arch);
1221  return arch == AppleMachineInfo::PowerMac;
1222 #elif defined(__FreeBSD__) && defined(PPC_FEATURE_HAS_ALTIVEC)
1223  unsigned long cpufeatures;
1224  if (elf_aux_info(AT_HWCAP, &cpufeatures, sizeof(cpufeatures)) == 0)
1225  if ((cpufeatures & PPC_FEATURE_HAS_ALTIVEC) != 0)
1226  return true;
1227 #endif
1228  return false;
1229 }
1230 
1231 inline bool CPU_QueryPower7()
1232 {
1233  // Power7 and ISA 2.06
1234 #if defined(__linux__) && defined(PPC_FEATURE_ARCH_2_06)
1235  if ((getauxval(AT_HWCAP) & PPC_FEATURE_ARCH_2_06) != 0)
1236  return true;
1237 #elif defined(_AIX)
1238  if (__power_7_andup() != 0)
1239  return true;
1240 #elif defined(__FreeBSD__) && defined(PPC_FEATURE_ARCH_2_06)
1241  unsigned long cpufeatures;
1242  if (elf_aux_info(AT_HWCAP, &cpufeatures, sizeof(cpufeatures)) == 0)
1243  if ((cpufeatures & PPC_FEATURE_ARCH_2_06) != 0)
1244  return true;
1245 #endif
1246  return false;
1247 }
1248 
1249 inline bool CPU_QueryPower8()
1250 {
1251  // Power8 and ISA 2.07 provide in-core crypto.
1252 #if defined(__linux__) && defined(PPC_FEATURE2_ARCH_2_07)
1253  if ((getauxval(AT_HWCAP2) & PPC_FEATURE2_ARCH_2_07) != 0)
1254  return true;
1255 #elif defined(_AIX)
1256  if (__power_8_andup() != 0)
1257  return true;
1258 #elif defined(__FreeBSD__) && defined(PPC_FEATURE2_ARCH_2_07)
1259  unsigned long cpufeatures;
1260  if (elf_aux_info(AT_HWCAP, &cpufeatures, sizeof(cpufeatures)) == 0)
1261  if ((cpufeatures & PPC_FEATURE_ARCH_2_07) != 0)
1262  return true;
1263 #endif
1264  return false;
1265 }
1266 
1267 inline bool CPU_QueryPower9()
1268 {
1269  // Power9 and ISA 3.0.
1270 #if defined(__linux__) && defined(PPC_FEATURE2_ARCH_3_00)
1271  if ((getauxval(AT_HWCAP2) & PPC_FEATURE2_ARCH_3_00) != 0)
1272  return true;
1273 #elif defined(_AIX)
1274  if (__power_9_andup() != 0)
1275  return true;
1276 #elif defined(__FreeBSD__) && defined(PPC_FEATURE2_ARCH_3_00)
1277  unsigned long cpufeatures;
1278  if (elf_aux_info(AT_HWCAP, &cpufeatures, sizeof(cpufeatures)) == 0)
1279  if ((cpufeatures & PPC_FEATURE_ARCH2_3_00) != 0)
1280  return true;
1281 #endif
1282  return false;
1283 }
1284 
1285 inline bool CPU_QueryAES()
1286 {
1287  // Power8 and ISA 2.07 provide in-core crypto. Glibc
1288  // 2.24 or higher is required for PPC_FEATURE2_VEC_CRYPTO.
1289 #if defined(__linux__) && defined(PPC_FEATURE2_VEC_CRYPTO)
1290  if ((getauxval(AT_HWCAP2) & PPC_FEATURE2_VEC_CRYPTO) != 0)
1291  return true;
1292 #elif defined(_AIX)
1293  if (__power_8_andup() != 0)
1294  return true;
1295 #elif defined(__FreeBSD__) && defined(PPC_FEATURE2_HAS_VEC_CRYPTO)
1296  unsigned long cpufeatures;
1297  if (elf_aux_info(AT_HWCAP2, &cpufeatures, sizeof(cpufeatures)) == 0)
1298  if ((cpufeatures & PPC_FEATURE2_HAS_VEC_CRYPTO != 0)
1299  return true;
1300 #endif
1301  return false;
1302 }
1303 
1304 inline bool CPU_QueryPMULL()
1305 {
1306  // Power8 and ISA 2.07 provide in-core crypto. Glibc
1307  // 2.24 or higher is required for PPC_FEATURE2_VEC_CRYPTO.
1308 #if defined(__linux__) && defined(PPC_FEATURE2_VEC_CRYPTO)
1309  if ((getauxval(AT_HWCAP2) & PPC_FEATURE2_VEC_CRYPTO) != 0)
1310  return true;
1311 #elif defined(_AIX)
1312  if (__power_8_andup() != 0)
1313  return true;
1314 #elif defined(__FreeBSD__) && defined(PPC_FEATURE2_HAS_VEC_CRYPTO)
1315  unsigned long cpufeatures;
1316  if (elf_aux_info(AT_HWCAP2, &cpufeatures, sizeof(cpufeatures)) == 0)
1317  if ((cpufeatures & PPC_FEATURE2_HAS_VEC_CRYPTO != 0)
1318  return true;
1319 #endif
1320  return false;
1321 }
1322 
1323 inline bool CPU_QuerySHA256()
1324 {
1325  // Power8 and ISA 2.07 provide in-core crypto. Glibc
1326  // 2.24 or higher is required for PPC_FEATURE2_VEC_CRYPTO.
1327 #if defined(__linux__) && defined(PPC_FEATURE2_VEC_CRYPTO)
1328  if ((getauxval(AT_HWCAP2) & PPC_FEATURE2_VEC_CRYPTO) != 0)
1329  return true;
1330 #elif defined(_AIX)
1331  if (__power_8_andup() != 0)
1332  return true;
1333 #elif defined(__FreeBSD__) && defined(PPC_FEATURE2_HAS_VEC_CRYPTO)
1334  unsigned long cpufeatures;
1335  if (elf_aux_info(AT_HWCAP2, &cpufeatures, sizeof(cpufeatures)) == 0)
1336  if ((cpufeatures & PPC_FEATURE2_HAS_VEC_CRYPTO != 0)
1337  return true;
1338 #endif
1339  return false;
1340 }
1341 inline bool CPU_QuerySHA512()
1342 {
1343  // Power8 and ISA 2.07 provide in-core crypto. Glibc
1344  // 2.24 or higher is required for PPC_FEATURE2_VEC_CRYPTO.
1345 #if defined(__linux__) && defined(PPC_FEATURE2_VEC_CRYPTO)
1346  if ((getauxval(AT_HWCAP2) & PPC_FEATURE2_VEC_CRYPTO) != 0)
1347  return true;
1348 #elif defined(_AIX)
1349  if (__power_8_andup() != 0)
1350  return true;
1351 #elif defined(__FreeBSD__) && defined(PPC_FEATURE2_HAS_VEC_CRYPTO)
1352  unsigned long cpufeatures;
1353  if (elf_aux_info(AT_HWCAP2, &cpufeatures, sizeof(cpufeatures)) == 0)
1354  if ((cpufeatures & PPC_FEATURE2_HAS_VEC_CRYPTO != 0)
1355  return true;
1356 #endif
1357  return false;
1358 }
1359 
1360 // Power9 random number generator
1361 inline bool CPU_QueryDARN()
1362 {
1363  // Power9 and ISA 3.0 provide DARN. It looks like
1364  // Glibc offers PPC_FEATURE2_DARN.
1365 #if defined(__linux__) && defined(PPC_FEATURE2_ARCH_3_00)
1366  if ((getauxval(AT_HWCAP2) & PPC_FEATURE2_ARCH_3_00) != 0)
1367  return true;
1368 #elif defined(_AIX)
1369  if (__power_9_andup() != 0)
1370  return true;
1371 #elif defined(__FreeBSD__) && defined(PPC_FEATURE2_ARCH_3_00)
1372  unsigned long cpufeatures;
1373  if (elf_aux_info(AT_HWCAP2, &cpufeatures, sizeof(cpufeatures)) == 0)
1374  if ((cpufeatures & PPC_FEATURE2_ARCH_3_00) != 0)
1375  return true;
1376 #endif
1377  return false;
1378 }
1379 
1380 void DetectPowerPcFeatures()
1381 {
1382  // GCC 10 is giving us trouble in CPU_ProbePower9() and CPU_ProbeDARN().
1383  // GCC is generating POWER9 instructions on POWER8 for ppc_power9.cpp.
1384  // The compiler idiots did not think through the consequences of
1385  // requiring us to use -mcpu=power9 to unlock the ISA. Epic fail.
1386  // https://github.com/weidai11/cryptopp/issues/986
1387 
1388 #ifndef CRYPTOPP_DISABLE_ASM
1389 
1390  // The CPU_ProbeXXX's return false for OSes which
1391  // can't tolerate SIGILL-based probes, like Apple
1392  g_hasAltivec = CPU_QueryAltivec() || CPU_ProbeAltivec();
1393  g_hasPower7 = CPU_QueryPower7() || CPU_ProbePower7();
1394  g_hasPower8 = CPU_QueryPower8() || CPU_ProbePower8();
1395  g_hasPower9 = CPU_QueryPower9() || CPU_ProbePower9();
1396  g_hasPMULL = CPU_QueryPMULL() || CPU_ProbePMULL();
1397  g_hasAES = CPU_QueryAES() || CPU_ProbeAES();
1398  g_hasSHA256 = CPU_QuerySHA256() || CPU_ProbeSHA256();
1399  g_hasSHA512 = CPU_QuerySHA512() || CPU_ProbeSHA512();
1400  g_hasDARN = CPU_QueryDARN() || CPU_ProbeDARN();
1401 
1402 #if defined(_AIX) && defined(SC_L1C_DLS)
1403  // /usr/include/sys/systemcfg.h
1404  int cacheLineSize = getsystemcfg(SC_L1C_DLS);
1405  if (cacheLineSize > 0)
1406  g_cacheLineSize = cacheLineSize;
1407 #elif defined(_SC_LEVEL1_DCACHE_LINESIZE)
1408  // Glibc does not implement on some platforms. The runtime returns 0 instead of error.
1409  // https://sourceware.org/git/?p=glibc.git;a=blob;f=sysdeps/posix/sysconf.c
1410  int cacheLineSize = (int)sysconf(_SC_LEVEL1_DCACHE_LINESIZE);
1411  if (cacheLineSize > 0)
1412  g_cacheLineSize = cacheLineSize;
1413 #endif
1414 
1415  if (g_cacheLineSize == 0)
1416  g_cacheLineSize = CRYPTOPP_L1_CACHE_LINE_SIZE;
1417 
1418 #endif // CRYPTOPP_DISABLE_ASM
1419 
1420  *const_cast<volatile bool*>(&g_PowerPcDetectionDone) = true;
1421 }
1422 
1423 #endif
1424 NAMESPACE_END
1425 
1426 // *************************** C++ Static Initialization ***************************
1427 
1428 ANONYMOUS_NAMESPACE_BEGIN
1429 
1430 class InitCpu
1431 {
1432 public:
1433  InitCpu()
1434  {
1435 #if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 || CRYPTOPP_BOOL_X64
1436  CryptoPP::DetectX86Features();
1437 #elif CRYPTOPP_BOOL_ARM32 || CRYPTOPP_BOOL_ARMV8
1438  CryptoPP::DetectArmFeatures();
1439 #elif CRYPTOPP_BOOL_PPC32 || CRYPTOPP_BOOL_PPC64
1440  CryptoPP::DetectPowerPcFeatures();
1441 #endif
1442  }
1443 };
1444 
1445 // This is not really needed because HasSSE() and friends can dynamically initialize.
1446 // Everything depends on CPU features so we initialize it once at load time.
1447 // Dynamic initialization will be used if init priorities are not available.
1448 
1449 #if HAVE_GCC_INIT_PRIORITY
1450  const InitCpu s_init __attribute__ ((init_priority (CRYPTOPP_INIT_PRIORITY + 10))) = InitCpu();
1451 #elif HAVE_MSC_INIT_PRIORITY
1452  #pragma warning(disable: 4075)
1453  #pragma init_seg(".CRT$XCU")
1454  const InitCpu s_init;
1455  #pragma warning(default: 4075)
1456 #elif HAVE_XLC_INIT_PRIORITY
1457  // XLC needs constant, not a define
1458  #pragma priority(270)
1459  const InitCpu s_init;
1460 #else
1461  const InitCpu s_init;
1462 #endif
1463 
1464 ANONYMOUS_NAMESPACE_END
1465 
1466 #endif // CRYPTOPP_IMPORTS
Restricts the instantiation of a class to one static object without locks.
Definition: misc.h:309
const T & Ref(...) const
Return a reference to the inner Singleton object.
Definition: misc.h:329
Library configuration file.
#define CRYPTOPP_L1_CACHE_LINE_SIZE
L1 data cache line size.
Definition: config_cpu.h:147
#define CRYPTOPP_SECTION_INIT
Initialized data section.
Definition: config_cpu.h:168
unsigned int word32
32-bit unsigned datatype
Definition: config_int.h:72
unsigned long long word64
64-bit unsigned datatype
Definition: config_int.h:101
Functions for CPU features and intrinsics.
Utility functions for the Crypto++ library.
Crypto++ library namespace.
const char * Version()
int
Definition: argnames.h:18
Precompiled header file.
Common C++ header files.
#define CRYPTOPP_ASSERT(exp)
Debugging and diagnostic assertion.
Definition: trap.h:68