1 /*
   2  * Copyright (c) 1999, 2014, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "opto/c2compiler.hpp"
  27 #include "opto/compile.hpp"
  28 #include "opto/optoreg.hpp"
  29 #include "opto/output.hpp"
  30 #include "opto/runtime.hpp"
  31 
  32 // register information defined by ADLC
  33 extern const char register_save_policy[];
  34 extern const int  register_save_type[];
  35 
  36 const char* C2Compiler::retry_no_subsuming_loads() {
  37   return "retry without subsuming loads";
  38 }
  39 const char* C2Compiler::retry_no_escape_analysis() {
  40   return "retry without escape analysis";
  41 }
  42 const char* C2Compiler::retry_class_loading_during_parsing() {
  43   return "retry class loading during parsing";
  44 }
  45 bool C2Compiler::init_c2_runtime() {
  46 
  47   // Check assumptions used while running ADLC
  48   Compile::adlc_verification();
  49   assert(REG_COUNT <= ConcreteRegisterImpl::number_of_registers, "incompatible register counts");
  50 
  51   for (int i = 0; i < ConcreteRegisterImpl::number_of_registers ; i++ ) {
  52       OptoReg::vm2opto[i] = OptoReg::Bad;
  53   }
  54 
  55   for( OptoReg::Name i=OptoReg::Name(0); i<OptoReg::Name(REG_COUNT); i = OptoReg::add(i,1) ) {
  56     VMReg r = OptoReg::as_VMReg(i);
  57     if (r->is_valid()) {
  58       OptoReg::vm2opto[r->value()] = i;
  59     }
  60   }
  61 
  62   // Check that runtime and architecture description agree on callee-saved-floats
  63   bool callee_saved_floats = false;
  64   for( OptoReg::Name i=OptoReg::Name(0); i<OptoReg::Name(_last_Mach_Reg); i = OptoReg::add(i,1) ) {
  65     // Is there a callee-saved float or double?
  66     if( register_save_policy[i] == 'E' /* callee-saved */ &&
  67        (register_save_type[i] == Op_RegF || register_save_type[i] == Op_RegD) ) {
  68       callee_saved_floats = true;
  69     }
  70   }
  71 
  72   DEBUG_ONLY( Node::init_NodeProperty(); )
  73 
  74   Compile::pd_compiler2_init();
  75 
  76   CompilerThread* thread = CompilerThread::current();
  77 
  78   HandleMark handle_mark(thread);
  79   return OptoRuntime::generate(thread->env());
  80 }
  81 
  82 void C2Compiler::initialize() {
  83   // The first compiler thread that gets here will initialize the
  84   // small amount of global state (and runtime stubs) that C2 needs.
  85 
  86   // There is a race possible once at startup and then we're fine
  87 
  88   // Note that this is being called from a compiler thread not the
  89   // main startup thread.
  90   if (should_perform_init()) {
  91     bool successful = C2Compiler::init_c2_runtime();
  92     int new_state = (successful) ? initialized : failed;
  93     set_state(new_state);
  94   }
  95 }
  96 
  97 void C2Compiler::compile_method(ciEnv* env, ciMethod* target, int entry_bci) {
  98   assert(is_initialized(), "Compiler thread must be initialized");
  99 
 100   bool subsume_loads = SubsumeLoads;
 101   bool do_escape_analysis = DoEscapeAnalysis && !env->should_retain_local_variables();
 102   bool eliminate_boxing = EliminateAutoBox;
 103   while (!env->failing()) {
 104     // Attempt to compile while subsuming loads into machine instructions.
 105     Compile C(env, this, target, entry_bci, subsume_loads, do_escape_analysis, eliminate_boxing);
 106 
 107     // Check result and retry if appropriate.
 108     if (C.failure_reason() != NULL) {
 109       if (C.failure_reason_is(retry_class_loading_during_parsing())) {
 110         env->report_failure(C.failure_reason());
 111         continue;  // retry
 112       }
 113       if (C.failure_reason_is(retry_no_subsuming_loads())) {
 114         assert(subsume_loads, "must make progress");
 115         subsume_loads = false;
 116         env->report_failure(C.failure_reason());
 117         continue;  // retry
 118       }
 119       if (C.failure_reason_is(retry_no_escape_analysis())) {
 120         assert(do_escape_analysis, "must make progress");
 121         do_escape_analysis = false;
 122         env->report_failure(C.failure_reason());
 123         continue;  // retry
 124       }
 125       if (C.has_boxed_value()) {
 126         // Recompile without boxing elimination regardless failure reason.
 127         assert(eliminate_boxing, "must make progress");
 128         eliminate_boxing = false;
 129         env->report_failure(C.failure_reason());
 130         continue;  // retry
 131       }
 132       // Pass any other failure reason up to the ciEnv.
 133       // Note that serious, irreversible failures are already logged
 134       // on the ciEnv via env->record_method_not_compilable().
 135       env->record_failure(C.failure_reason());
 136     }
 137     if (StressRecompilation) {
 138       if (subsume_loads) {
 139         subsume_loads = false;
 140         continue;  // retry
 141       }
 142       if (do_escape_analysis) {
 143         do_escape_analysis = false;
 144         continue;  // retry
 145       }
 146     }
 147 
 148     // print inlining for last compilation only
 149     C.dump_print_inlining();
 150 
 151     // No retry; just break the loop.
 152     break;
 153   }
 154 }
 155 
 156 void C2Compiler::print_timers() {
 157   Compile::print_timers();
 158 }
 159 
 160 bool C2Compiler::is_intrinsic_available(methodHandle method, methodHandle compilation_context) {
 161   // Assume a non-virtual dispatch. A virtual dispatch is
 162   // possible for only a limited set of available intrinsics whereas
 163   // a non-virtual dispatch is possible for all available intrinsics.
 164   return is_intrinsic_supported(method, false) &&
 165          !is_intrinsic_disabled_by_flag(method, compilation_context);
 166 }
 167 
 168 bool C2Compiler::is_intrinsic_supported(methodHandle method, bool is_virtual) {
 169   vmIntrinsics::ID id = method->intrinsic_id();
 170   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 171 
 172   if (id < vmIntrinsics::FIRST_ID || id >= vmIntrinsics::LAST_COMPILER_INLINE) {
 173     return false;
 174   }
 175 
 176   // Only Object.hashCode and Object.clone intrinsics implement also a virtual
 177   // dispatch because calling both methods is expensive but both methods are
 178   // frequently overridden. All other intrinsics implement only a non-virtual
 179   // dispatch.
 180   if (is_virtual) {
 181     switch (id) {
 182     case vmIntrinsics::_hashCode:
 183     case vmIntrinsics::_clone:
 184       break;
 185     default:
 186       return false;
 187     }
 188   }
 189 
 190   switch (id) {
 191   case vmIntrinsics::_compareTo:
 192     if (!Matcher::match_rule_supported(Op_StrComp)) return false;
 193     break;
 194   case vmIntrinsics::_equals:
 195     if (!Matcher::match_rule_supported(Op_StrEquals)) return false;
 196     break;
 197   case vmIntrinsics::_equalsC:
 198     if (!Matcher::match_rule_supported(Op_AryEq)) return false;
 199     break;
 200   case vmIntrinsics::_copyMemory:
 201     if (StubRoutines::unsafe_arraycopy() == NULL) return false;
 202     break;
 203   case vmIntrinsics::_encodeISOArray:
 204     if (!Matcher::match_rule_supported(Op_EncodeISOArray)) return false;
 205     break;
 206   case vmIntrinsics::_bitCount_i:
 207     if (!Matcher::match_rule_supported(Op_PopCountI)) return false;
 208     break;
 209   case vmIntrinsics::_bitCount_l:
 210     if (!Matcher::match_rule_supported(Op_PopCountL)) return false;
 211     break;
 212   case vmIntrinsics::_numberOfLeadingZeros_i:
 213     if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return false;
 214     break;
 215   case vmIntrinsics::_numberOfLeadingZeros_l:
 216     if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return false;
 217     break;
 218   case vmIntrinsics::_numberOfTrailingZeros_i:
 219     if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return false;
 220     break;
 221   case vmIntrinsics::_numberOfTrailingZeros_l:
 222     if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return false;
 223     break;
 224   case vmIntrinsics::_reverseBytes_c:
 225     if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return false;
 226     break;
 227   case vmIntrinsics::_reverseBytes_s:
 228     if (!Matcher::match_rule_supported(Op_ReverseBytesS)) return false;
 229     break;
 230   case vmIntrinsics::_reverseBytes_i:
 231     if (!Matcher::match_rule_supported(Op_ReverseBytesI)) return false;
 232     break;
 233   case vmIntrinsics::_reverseBytes_l:
 234     if (!Matcher::match_rule_supported(Op_ReverseBytesL)) return false;
 235     break;
 236   case vmIntrinsics::_compareAndSwapObject:
 237 #ifdef _LP64
 238     if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return false;
 239 #endif
 240     break;
 241   case vmIntrinsics::_compareAndSwapLong:
 242     if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return false;
 243     break;
 244   case vmIntrinsics::_getAndAddInt:
 245     if (!Matcher::match_rule_supported(Op_GetAndAddI)) return false;
 246     break;
 247   case vmIntrinsics::_getAndAddLong:
 248     if (!Matcher::match_rule_supported(Op_GetAndAddL)) return false;
 249     break;
 250   case vmIntrinsics::_getAndSetInt:
 251     if (!Matcher::match_rule_supported(Op_GetAndSetI)) return false;
 252     break;
 253   case vmIntrinsics::_getAndSetLong:
 254     if (!Matcher::match_rule_supported(Op_GetAndSetL)) return false;
 255     break;
 256   case vmIntrinsics::_getAndSetObject:
 257 #ifdef _LP64
 258     if (!UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetP)) return false;
 259     if (UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetN)) return false;
 260     break;
 261 #else
 262     if (!Matcher::match_rule_supported(Op_GetAndSetP)) return false;
 263     break;
 264 #endif
 265   case vmIntrinsics::_incrementExactI:
 266   case vmIntrinsics::_addExactI:
 267     if (!Matcher::match_rule_supported(Op_OverflowAddI)) return false;
 268     break;
 269   case vmIntrinsics::_incrementExactL:
 270   case vmIntrinsics::_addExactL:
 271     if (!Matcher::match_rule_supported(Op_OverflowAddL)) return false;
 272     break;
 273   case vmIntrinsics::_decrementExactI:
 274   case vmIntrinsics::_subtractExactI:
 275     if (!Matcher::match_rule_supported(Op_OverflowSubI)) return false;
 276     break;
 277   case vmIntrinsics::_decrementExactL:
 278   case vmIntrinsics::_subtractExactL:
 279     if (!Matcher::match_rule_supported(Op_OverflowSubL)) return false;
 280     break;
 281   case vmIntrinsics::_negateExactI:
 282     if (!Matcher::match_rule_supported(Op_OverflowSubI)) return false;
 283     break;
 284   case vmIntrinsics::_negateExactL:
 285     if (!Matcher::match_rule_supported(Op_OverflowSubL)) return false;
 286     break;
 287   case vmIntrinsics::_multiplyExactI:
 288     if (!Matcher::match_rule_supported(Op_OverflowMulI)) return false;
 289     break;
 290   case vmIntrinsics::_multiplyExactL:
 291     if (!Matcher::match_rule_supported(Op_OverflowMulL)) return false;
 292     break;
 293   case vmIntrinsics::_getCallerClass:
 294     if (SystemDictionary::reflect_CallerSensitive_klass() == NULL) return false;
 295     break;
 296   case vmIntrinsics::_hashCode:
 297   case vmIntrinsics::_identityHashCode:
 298   case vmIntrinsics::_getClass:
 299   case vmIntrinsics::_dsin:
 300   case vmIntrinsics::_dcos:
 301   case vmIntrinsics::_dtan:
 302   case vmIntrinsics::_dabs:
 303   case vmIntrinsics::_datan2:
 304   case vmIntrinsics::_dsqrt:
 305   case vmIntrinsics::_dexp:
 306   case vmIntrinsics::_dlog:
 307   case vmIntrinsics::_dlog10:
 308   case vmIntrinsics::_dpow:
 309   case vmIntrinsics::_min:
 310   case vmIntrinsics::_max:
 311   case vmIntrinsics::_arraycopy:
 312   case vmIntrinsics::_indexOf:
 313   case vmIntrinsics::_getObject:
 314   case vmIntrinsics::_getBoolean:
 315   case vmIntrinsics::_getByte:
 316   case vmIntrinsics::_getShort:
 317   case vmIntrinsics::_getChar:
 318   case vmIntrinsics::_getInt:
 319   case vmIntrinsics::_getLong:
 320   case vmIntrinsics::_getFloat:
 321   case vmIntrinsics::_getDouble:
 322   case vmIntrinsics::_putObject:
 323   case vmIntrinsics::_putBoolean:
 324   case vmIntrinsics::_putByte:
 325   case vmIntrinsics::_putShort:
 326   case vmIntrinsics::_putChar:
 327   case vmIntrinsics::_putInt:
 328   case vmIntrinsics::_putLong:
 329   case vmIntrinsics::_putFloat:
 330   case vmIntrinsics::_putDouble:
 331   case vmIntrinsics::_getByte_raw:
 332   case vmIntrinsics::_getShort_raw:
 333   case vmIntrinsics::_getChar_raw:
 334   case vmIntrinsics::_getInt_raw:
 335   case vmIntrinsics::_getLong_raw:
 336   case vmIntrinsics::_getFloat_raw:
 337   case vmIntrinsics::_getDouble_raw:
 338   case vmIntrinsics::_getAddress_raw:
 339   case vmIntrinsics::_putByte_raw:
 340   case vmIntrinsics::_putShort_raw:
 341   case vmIntrinsics::_putChar_raw:
 342   case vmIntrinsics::_putInt_raw:
 343   case vmIntrinsics::_putLong_raw:
 344   case vmIntrinsics::_putFloat_raw:
 345   case vmIntrinsics::_putDouble_raw:
 346   case vmIntrinsics::_putAddress_raw:
 347   case vmIntrinsics::_getObjectVolatile:
 348   case vmIntrinsics::_getBooleanVolatile:
 349   case vmIntrinsics::_getByteVolatile:
 350   case vmIntrinsics::_getShortVolatile:
 351   case vmIntrinsics::_getCharVolatile:
 352   case vmIntrinsics::_getIntVolatile:
 353   case vmIntrinsics::_getLongVolatile:
 354   case vmIntrinsics::_getFloatVolatile:
 355   case vmIntrinsics::_getDoubleVolatile:
 356   case vmIntrinsics::_putObjectVolatile:
 357   case vmIntrinsics::_putBooleanVolatile:
 358   case vmIntrinsics::_putByteVolatile:
 359   case vmIntrinsics::_putShortVolatile:
 360   case vmIntrinsics::_putCharVolatile:
 361   case vmIntrinsics::_putIntVolatile:
 362   case vmIntrinsics::_putLongVolatile:
 363   case vmIntrinsics::_putFloatVolatile:
 364   case vmIntrinsics::_putDoubleVolatile:
 365   case vmIntrinsics::_getShortUnaligned:
 366   case vmIntrinsics::_getCharUnaligned:
 367   case vmIntrinsics::_getIntUnaligned:
 368   case vmIntrinsics::_getLongUnaligned:
 369   case vmIntrinsics::_putShortUnaligned:
 370   case vmIntrinsics::_putCharUnaligned:
 371   case vmIntrinsics::_putIntUnaligned:
 372   case vmIntrinsics::_putLongUnaligned:
 373   case vmIntrinsics::_compareAndSwapInt:
 374   case vmIntrinsics::_putOrderedObject:
 375   case vmIntrinsics::_putOrderedInt:
 376   case vmIntrinsics::_putOrderedLong:
 377   case vmIntrinsics::_loadFence:
 378   case vmIntrinsics::_storeFence:
 379   case vmIntrinsics::_fullFence:
 380   case vmIntrinsics::_currentThread:
 381   case vmIntrinsics::_isInterrupted:
 382 #ifdef TRACE_HAVE_INTRINSICS
 383   case vmIntrinsics::_classID:
 384   case vmIntrinsics::_threadID:
 385   case vmIntrinsics::_counterTime:
 386 #endif
 387   case vmIntrinsics::_currentTimeMillis:
 388   case vmIntrinsics::_nanoTime:
 389   case vmIntrinsics::_allocateInstance:
 390   case vmIntrinsics::_newArray:
 391   case vmIntrinsics::_getLength:
 392   case vmIntrinsics::_copyOf:
 393   case vmIntrinsics::_copyOfRange:
 394   case vmIntrinsics::_clone:
 395   case vmIntrinsics::_isAssignableFrom:
 396   case vmIntrinsics::_isInstance:
 397   case vmIntrinsics::_getModifiers:
 398   case vmIntrinsics::_isInterface:
 399   case vmIntrinsics::_isArray:
 400   case vmIntrinsics::_isPrimitive:
 401   case vmIntrinsics::_getSuperclass:
 402   case vmIntrinsics::_getClassAccessFlags:
 403   case vmIntrinsics::_floatToRawIntBits:
 404   case vmIntrinsics::_floatToIntBits:
 405   case vmIntrinsics::_intBitsToFloat:
 406   case vmIntrinsics::_doubleToRawLongBits:
 407   case vmIntrinsics::_doubleToLongBits:
 408   case vmIntrinsics::_longBitsToDouble:
 409   case vmIntrinsics::_Reference_get:
 410   case vmIntrinsics::_Class_cast:
 411   case vmIntrinsics::_aescrypt_encryptBlock:
 412   case vmIntrinsics::_aescrypt_decryptBlock:
 413   case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
 414   case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
 415   case vmIntrinsics::_sha_implCompress:
 416   case vmIntrinsics::_sha2_implCompress:
 417   case vmIntrinsics::_sha5_implCompress:
 418   case vmIntrinsics::_digestBase_implCompressMB:
 419   case vmIntrinsics::_multiplyToLen:
 420   case vmIntrinsics::_squareToLen:
 421   case vmIntrinsics::_mulAdd:
 422   case vmIntrinsics::_montgomeryMultiply:
 423   case vmIntrinsics::_montgomerySquare:
 424   case vmIntrinsics::_ghash_processBlocks:
 425   case vmIntrinsics::_updateCRC32:
 426   case vmIntrinsics::_updateBytesCRC32:
 427   case vmIntrinsics::_updateByteBufferCRC32:
 428   case vmIntrinsics::_updateBytesCRC32C:
 429   case vmIntrinsics::_updateDirectByteBufferCRC32C:
 430   case vmIntrinsics::_profileBoolean:
 431   case vmIntrinsics::_isCompileConstant:
 432     break;
 433   default:
 434     return false;
 435   }
 436   return true;
 437 }
 438 
 439 bool C2Compiler::is_intrinsic_disabled_by_flag(methodHandle method, methodHandle compilation_context) {
 440   vmIntrinsics::ID id = method->intrinsic_id();
 441   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 442 
 443   if (vmIntrinsics::is_disabled_by_flags(method->intrinsic_id())) {
 444     return true;
 445   }
 446 
 447   // Check if the intrinsic corresponding to 'method' has been disabled on
 448   // the command line by using the DisableIntrinsic flag (either globally
 449   // or on a per-method level, see src/share/vm/compiler/abstractCompiler.hpp
 450   // for details).
 451   // Usually, the compilation context is the caller of the method 'method'.
 452   // The only case when for a non-recursive method 'method' the compilation context
 453   // is not the caller of the 'method' (but it is the method itself) is
 454   // java.lang.ref.Referene::get.
 455   // For java.lang.ref.Reference::get, the intrinsic version is used
 456   // instead of the C2-compiled version so that the value in the referent
 457   // field can be registered by the G1 pre-barrier code. The intrinsified
 458   // version of Reference::get also adds a memory barrier to prevent
 459   // commoning reads from the referent field across safepoint since GC
 460   // can change the referent field's value. See Compile::Compile()
 461   // in src/share/vm/opto/compile.cpp for more details.
 462   ccstr disable_intr = NULL;
 463   if ((DisableIntrinsic[0] != '\0' && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) ||
 464       (!compilation_context.is_null() &&
 465        CompilerOracle::has_option_value(compilation_context, "DisableIntrinsic", disable_intr) &&
 466        strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL)
 467   ) {
 468     return true;
 469   }
 470 
 471   // -XX:-InlineNatives disables nearly all intrinsics except the ones listed in
 472   // the following switch statement.
 473   if (!InlineNatives) {
 474     switch (id) {
 475     case vmIntrinsics::_indexOf:
 476     case vmIntrinsics::_compareTo:
 477     case vmIntrinsics::_equals:
 478     case vmIntrinsics::_equalsC:
 479     case vmIntrinsics::_getAndAddInt:
 480     case vmIntrinsics::_getAndAddLong:
 481     case vmIntrinsics::_getAndSetInt:
 482     case vmIntrinsics::_getAndSetLong:
 483     case vmIntrinsics::_getAndSetObject:
 484     case vmIntrinsics::_loadFence:
 485     case vmIntrinsics::_storeFence:
 486     case vmIntrinsics::_fullFence:
 487     case vmIntrinsics::_Reference_get:
 488       break;
 489     default:
 490       return true;
 491     }
 492   }
 493 
 494   if (!InlineUnsafeOps) {
 495     switch (id) {
 496     case vmIntrinsics::_loadFence:
 497     case vmIntrinsics::_storeFence:
 498     case vmIntrinsics::_fullFence:
 499     case vmIntrinsics::_compareAndSwapObject:
 500     case vmIntrinsics::_compareAndSwapLong:
 501     case vmIntrinsics::_compareAndSwapInt:
 502       return true;
 503     default:
 504       return false;
 505     }
 506   }
 507 
 508   return false;
 509 }
 510 
 511 int C2Compiler::initial_code_buffer_size() {
 512   assert(SegmentedCodeCache, "Should be only used with a segmented code cache");
 513   return Compile::MAX_inst_size + Compile::MAX_locs_size + initial_const_capacity;
 514 }