< prev index next >

src/hotspot/cpu/aarch64/vm_version_aarch64.cpp

Print this page
rev 60737 : 8252204: AArch64: Implement SHA3 accelerator/intrinsic
Reviewed-by: duke
Contributed-by: dongbo4@huawei.com


  50 #ifndef HWCAP_SHA1
  51 #define HWCAP_SHA1  (1<<5)
  52 #endif
  53 
  54 #ifndef HWCAP_SHA2
  55 #define HWCAP_SHA2  (1<<6)
  56 #endif
  57 
  58 #ifndef HWCAP_CRC32
  59 #define HWCAP_CRC32 (1<<7)
  60 #endif
  61 
  62 #ifndef HWCAP_ATOMICS
  63 #define HWCAP_ATOMICS (1<<8)
  64 #endif
  65 
  66 #ifndef HWCAP_SHA512
  67 #define HWCAP_SHA512 (1 << 21)
  68 #endif
  69 




  70 int VM_Version::_cpu;
  71 int VM_Version::_model;
  72 int VM_Version::_model2;
  73 int VM_Version::_variant;
  74 int VM_Version::_revision;
  75 int VM_Version::_stepping;
  76 bool VM_Version::_dcpop;
  77 VM_Version::PsrInfo VM_Version::_psr_info   = { 0, };
  78 
  79 static BufferBlob* stub_blob;
  80 static const int stub_size = 550;
  81 
  82 extern "C" {
  83   typedef void (*getPsrInfo_stub_t)(void*);
  84 }
  85 static getPsrInfo_stub_t getPsrInfo_stub = NULL;
  86 
  87 
  88 class VM_Version_StubGenerator: public StubCodeGenerator {
  89  public:


 274     // A73 is faster with short-and-easy-for-speculative-execution-loop
 275     if (FLAG_IS_DEFAULT(UseSimpleArrayEquals)) {
 276       FLAG_SET_DEFAULT(UseSimpleArrayEquals, true);
 277     }
 278   }
 279 
 280   if (_cpu == CPU_ARM && (_model == 0xd07 || _model2 == 0xd07)) _features |= CPU_STXR_PREFETCH;
 281   // If an olde style /proc/cpuinfo (cpu_lines == 1) then if _model is an A57 (0xd07)
 282   // we assume the worst and assume we could be on a big little system and have
 283   // undisclosed A53 cores which we could be swapped to at any stage
 284   if (_cpu == CPU_ARM && cpu_lines == 1 && _model == 0xd07) _features |= CPU_A53MAC;
 285 
 286   sprintf(buf, "0x%02x:0x%x:0x%03x:%d", _cpu, _variant, _model, _revision);
 287   if (_model2) sprintf(buf+strlen(buf), "(0x%03x)", _model2);
 288   if (auxv & HWCAP_ASIMD) strcat(buf, ", simd");
 289   if (auxv & HWCAP_CRC32) strcat(buf, ", crc");
 290   if (auxv & HWCAP_AES)   strcat(buf, ", aes");
 291   if (auxv & HWCAP_SHA1)  strcat(buf, ", sha1");
 292   if (auxv & HWCAP_SHA2)  strcat(buf, ", sha256");
 293   if (auxv & HWCAP_SHA512) strcat(buf, ", sha512");

 294   if (auxv & HWCAP_ATOMICS) strcat(buf, ", lse");
 295 
 296   _features_string = os::strdup(buf);
 297 
 298   if (FLAG_IS_DEFAULT(UseCRC32)) {
 299     UseCRC32 = (auxv & HWCAP_CRC32) != 0;
 300   }
 301 
 302   if (UseCRC32 && (auxv & HWCAP_CRC32) == 0) {
 303     warning("UseCRC32 specified, but not supported on this CPU");
 304     FLAG_SET_DEFAULT(UseCRC32, false);
 305   }
 306 
 307   if (FLAG_IS_DEFAULT(UseAdler32Intrinsics)) {
 308     FLAG_SET_DEFAULT(UseAdler32Intrinsics, true);
 309   }
 310 
 311   if (UseVectorizedMismatchIntrinsic) {
 312     warning("UseVectorizedMismatchIntrinsic specified, but not available on this CPU.");
 313     FLAG_SET_DEFAULT(UseVectorizedMismatchIntrinsic, false);


 352   }
 353 
 354   if (auxv & HWCAP_CRC32) {
 355     if (FLAG_IS_DEFAULT(UseCRC32CIntrinsics)) {
 356       FLAG_SET_DEFAULT(UseCRC32CIntrinsics, true);
 357     }
 358   } else if (UseCRC32CIntrinsics) {
 359     warning("CRC32C is not available on the CPU");
 360     FLAG_SET_DEFAULT(UseCRC32CIntrinsics, false);
 361   }
 362 
 363   if (FLAG_IS_DEFAULT(UseFMA)) {
 364     FLAG_SET_DEFAULT(UseFMA, true);
 365   }
 366 
 367   if (UseMD5Intrinsics) {
 368     warning("MD5 intrinsics are not available on this CPU");
 369     FLAG_SET_DEFAULT(UseMD5Intrinsics, false);
 370   }
 371 
 372   if (auxv & (HWCAP_SHA1 | HWCAP_SHA2)) {
 373     if (FLAG_IS_DEFAULT(UseSHA)) {
 374       FLAG_SET_DEFAULT(UseSHA, true);
 375     }
 376   } else if (UseSHA) {
 377     warning("SHA instructions are not available on this CPU");
 378     FLAG_SET_DEFAULT(UseSHA, false);
 379   }
 380 
 381   if (UseSHA && (auxv & HWCAP_SHA1)) {
 382     if (FLAG_IS_DEFAULT(UseSHA1Intrinsics)) {
 383       FLAG_SET_DEFAULT(UseSHA1Intrinsics, true);
 384     }
 385   } else if (UseSHA1Intrinsics) {
 386     warning("Intrinsics for SHA-1 crypto hash functions not available on this CPU.");
 387     FLAG_SET_DEFAULT(UseSHA1Intrinsics, false);
 388   }
 389 
 390   if (UseSHA && (auxv & HWCAP_SHA2)) {
 391     if (FLAG_IS_DEFAULT(UseSHA256Intrinsics)) {
 392       FLAG_SET_DEFAULT(UseSHA256Intrinsics, true);
 393     }
 394   } else if (UseSHA256Intrinsics) {
 395     warning("Intrinsics for SHA-224 and SHA-256 crypto hash functions not available on this CPU.");
 396     FLAG_SET_DEFAULT(UseSHA256Intrinsics, false);
 397   }
 398 
 399   if (UseSHA && (auxv & HWCAP_SHA512)) {
 400     // Do not auto-enable UseSHA512Intrinsics until it has been fully tested on hardware
 401     // if (FLAG_IS_DEFAULT(UseSHA512Intrinsics)) {
 402       // FLAG_SET_DEFAULT(UseSHA512Intrinsics, true);
 403     // }
 404   } else if (UseSHA512Intrinsics) {
 405     warning("Intrinsics for SHA-384 and SHA-512 crypto hash functions not available on this CPU.");
 406     FLAG_SET_DEFAULT(UseSHA512Intrinsics, false);
 407   }
 408 
 409   if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics)) {










 410     FLAG_SET_DEFAULT(UseSHA, false);
 411   }
 412 
 413   if (auxv & HWCAP_PMULL) {
 414     if (FLAG_IS_DEFAULT(UseGHASHIntrinsics)) {
 415       FLAG_SET_DEFAULT(UseGHASHIntrinsics, true);
 416     }
 417   } else if (UseGHASHIntrinsics) {
 418     warning("GHASH intrinsics are not available on this CPU");
 419     FLAG_SET_DEFAULT(UseGHASHIntrinsics, false);
 420   }
 421 
 422   if (is_zva_enabled()) {
 423     if (FLAG_IS_DEFAULT(UseBlockZeroing)) {
 424       FLAG_SET_DEFAULT(UseBlockZeroing, true);
 425     }
 426     if (FLAG_IS_DEFAULT(BlockZeroingLowLimit)) {
 427       FLAG_SET_DEFAULT(BlockZeroingLowLimit, 4 * VM_Version::zva_length());
 428     }
 429   } else if (UseBlockZeroing) {




  50 #ifndef HWCAP_SHA1
  51 #define HWCAP_SHA1  (1<<5)
  52 #endif
  53 
  54 #ifndef HWCAP_SHA2
  55 #define HWCAP_SHA2  (1<<6)
  56 #endif
  57 
  58 #ifndef HWCAP_CRC32
  59 #define HWCAP_CRC32 (1<<7)
  60 #endif
  61 
  62 #ifndef HWCAP_ATOMICS
  63 #define HWCAP_ATOMICS (1<<8)
  64 #endif
  65 
  66 #ifndef HWCAP_SHA512
  67 #define HWCAP_SHA512 (1 << 21)
  68 #endif
  69 
  70 #ifndef HWCAP_SHA3
  71 #define HWCAP_SHA3 (1 << 17)
  72 #endif
  73 
  74 int VM_Version::_cpu;
  75 int VM_Version::_model;
  76 int VM_Version::_model2;
  77 int VM_Version::_variant;
  78 int VM_Version::_revision;
  79 int VM_Version::_stepping;
  80 bool VM_Version::_dcpop;
  81 VM_Version::PsrInfo VM_Version::_psr_info   = { 0, };
  82 
  83 static BufferBlob* stub_blob;
  84 static const int stub_size = 550;
  85 
  86 extern "C" {
  87   typedef void (*getPsrInfo_stub_t)(void*);
  88 }
  89 static getPsrInfo_stub_t getPsrInfo_stub = NULL;
  90 
  91 
  92 class VM_Version_StubGenerator: public StubCodeGenerator {
  93  public:


 278     // A73 is faster with short-and-easy-for-speculative-execution-loop
 279     if (FLAG_IS_DEFAULT(UseSimpleArrayEquals)) {
 280       FLAG_SET_DEFAULT(UseSimpleArrayEquals, true);
 281     }
 282   }
 283 
 284   if (_cpu == CPU_ARM && (_model == 0xd07 || _model2 == 0xd07)) _features |= CPU_STXR_PREFETCH;
 285   // If an olde style /proc/cpuinfo (cpu_lines == 1) then if _model is an A57 (0xd07)
 286   // we assume the worst and assume we could be on a big little system and have
 287   // undisclosed A53 cores which we could be swapped to at any stage
 288   if (_cpu == CPU_ARM && cpu_lines == 1 && _model == 0xd07) _features |= CPU_A53MAC;
 289 
 290   sprintf(buf, "0x%02x:0x%x:0x%03x:%d", _cpu, _variant, _model, _revision);
 291   if (_model2) sprintf(buf+strlen(buf), "(0x%03x)", _model2);
 292   if (auxv & HWCAP_ASIMD) strcat(buf, ", simd");
 293   if (auxv & HWCAP_CRC32) strcat(buf, ", crc");
 294   if (auxv & HWCAP_AES)   strcat(buf, ", aes");
 295   if (auxv & HWCAP_SHA1)  strcat(buf, ", sha1");
 296   if (auxv & HWCAP_SHA2)  strcat(buf, ", sha256");
 297   if (auxv & HWCAP_SHA512) strcat(buf, ", sha512");
 298   if (auxv & HWCAP_SHA3)  strcat(buf, ", sha3");
 299   if (auxv & HWCAP_ATOMICS) strcat(buf, ", lse");
 300 
 301   _features_string = os::strdup(buf);
 302 
 303   if (FLAG_IS_DEFAULT(UseCRC32)) {
 304     UseCRC32 = (auxv & HWCAP_CRC32) != 0;
 305   }
 306 
 307   if (UseCRC32 && (auxv & HWCAP_CRC32) == 0) {
 308     warning("UseCRC32 specified, but not supported on this CPU");
 309     FLAG_SET_DEFAULT(UseCRC32, false);
 310   }
 311 
 312   if (FLAG_IS_DEFAULT(UseAdler32Intrinsics)) {
 313     FLAG_SET_DEFAULT(UseAdler32Intrinsics, true);
 314   }
 315 
 316   if (UseVectorizedMismatchIntrinsic) {
 317     warning("UseVectorizedMismatchIntrinsic specified, but not available on this CPU.");
 318     FLAG_SET_DEFAULT(UseVectorizedMismatchIntrinsic, false);


 357   }
 358 
 359   if (auxv & HWCAP_CRC32) {
 360     if (FLAG_IS_DEFAULT(UseCRC32CIntrinsics)) {
 361       FLAG_SET_DEFAULT(UseCRC32CIntrinsics, true);
 362     }
 363   } else if (UseCRC32CIntrinsics) {
 364     warning("CRC32C is not available on the CPU");
 365     FLAG_SET_DEFAULT(UseCRC32CIntrinsics, false);
 366   }
 367 
 368   if (FLAG_IS_DEFAULT(UseFMA)) {
 369     FLAG_SET_DEFAULT(UseFMA, true);
 370   }
 371 
 372   if (UseMD5Intrinsics) {
 373     warning("MD5 intrinsics are not available on this CPU");
 374     FLAG_SET_DEFAULT(UseMD5Intrinsics, false);
 375   }
 376 
 377   if (auxv & (HWCAP_SHA1 | HWCAP_SHA2 | HWCAP_SHA512 | HWCAP_SHA3)) {
 378     if (FLAG_IS_DEFAULT(UseSHA)) {
 379       FLAG_SET_DEFAULT(UseSHA, true);
 380     }
 381   } else if (UseSHA) {
 382     warning("SHA instructions are not available on this CPU");
 383     FLAG_SET_DEFAULT(UseSHA, false);
 384   }
 385 
 386   if (UseSHA && (auxv & HWCAP_SHA1)) {
 387     if (FLAG_IS_DEFAULT(UseSHA1Intrinsics)) {
 388       FLAG_SET_DEFAULT(UseSHA1Intrinsics, true);
 389     }
 390   } else if (UseSHA1Intrinsics) {
 391     warning("Intrinsics for SHA-1 crypto hash functions not available on this CPU.");
 392     FLAG_SET_DEFAULT(UseSHA1Intrinsics, false);
 393   }
 394 
 395   if (UseSHA && (auxv & HWCAP_SHA2)) {
 396     if (FLAG_IS_DEFAULT(UseSHA256Intrinsics)) {
 397       FLAG_SET_DEFAULT(UseSHA256Intrinsics, true);
 398     }
 399   } else if (UseSHA256Intrinsics) {
 400     warning("Intrinsics for SHA-224 and SHA-256 crypto hash functions not available on this CPU.");
 401     FLAG_SET_DEFAULT(UseSHA256Intrinsics, false);
 402   }
 403 
 404   if (UseSHA && (auxv & HWCAP_SHA512)) {
 405     // Do not auto-enable UseSHA512Intrinsics until it has been fully tested on hardware
 406     // if (FLAG_IS_DEFAULT(UseSHA512Intrinsics)) {
 407       // FLAG_SET_DEFAULT(UseSHA512Intrinsics, true);
 408     // }
 409   } else if (UseSHA512Intrinsics) {
 410     warning("Intrinsics for SHA-384 and SHA-512 crypto hash functions not available on this CPU.");
 411     FLAG_SET_DEFAULT(UseSHA512Intrinsics, false);
 412   }
 413 
 414   if (UseSHA && (auxv & HWCAP_SHA3)) {
 415     // Do not auto-enable UseSHA3Intrinsics until it has been fully tested on hardware
 416     // if (FLAG_IS_DEFAULT(UseSHA3Intrinsics)) {
 417       // FLAG_SET_DEFAULT(UseSHA3Intrinsics, true);
 418     // }
 419   } else if (UseSHA3Intrinsics) {
 420     warning("Intrinsics for SHA3-224, SHA3-256, SHA3-384 and SHA3-512 crypto hash functions not available on this CPU.");
 421     FLAG_SET_DEFAULT(UseSHA3Intrinsics, false);
 422   }
 423 
 424   if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics || UseSHA3Intrinsics)) {
 425     FLAG_SET_DEFAULT(UseSHA, false);
 426   }
 427 
 428   if (auxv & HWCAP_PMULL) {
 429     if (FLAG_IS_DEFAULT(UseGHASHIntrinsics)) {
 430       FLAG_SET_DEFAULT(UseGHASHIntrinsics, true);
 431     }
 432   } else if (UseGHASHIntrinsics) {
 433     warning("GHASH intrinsics are not available on this CPU");
 434     FLAG_SET_DEFAULT(UseGHASHIntrinsics, false);
 435   }
 436 
 437   if (is_zva_enabled()) {
 438     if (FLAG_IS_DEFAULT(UseBlockZeroing)) {
 439       FLAG_SET_DEFAULT(UseBlockZeroing, true);
 440     }
 441     if (FLAG_IS_DEFAULT(BlockZeroingLowLimit)) {
 442       FLAG_SET_DEFAULT(BlockZeroingLowLimit, 4 * VM_Version::zva_length());
 443     }
 444   } else if (UseBlockZeroing) {


< prev index next >