1327 for( int i= matcher._null_check_tests.size()-2; i>=0; i-=2 ) {
1328 Node *proj = matcher._null_check_tests[i ];
1329 Node *val = matcher._null_check_tests[i+1];
1330 _bbs[proj->_idx]->implicit_null_check(this, proj, val, allowed_reasons);
1331 // The implicit_null_check will only perform the transformation
1332 // if the null branch is truly uncommon, *and* it leads to an
1333 // uncommon trap. Combined with the too_many_traps guards
1334 // above, this prevents SEGV storms reported in 6366351,
1335 // by recompiling offending methods without this optimization.
1336 }
1337 }
1338
1339 #ifndef PRODUCT
1340 if (trace_opto_pipelining()) {
1341 tty->print("\n---- Start Local Scheduling ----\n");
1342 }
1343 #endif
1344
1345 // Schedule locally. Right now a simple topological sort.
1346 // Later, do a real latency aware scheduler.
1347 int *ready_cnt = NEW_RESOURCE_ARRAY(int,C->unique());
1348 memset( ready_cnt, -1, C->unique() * sizeof(int) );
1349 visited.Clear();
1350 for (i = 0; i < _num_blocks; i++) {
1351 if (!_blocks[i]->schedule_local(this, matcher, ready_cnt, visited)) {
1352 if (!C->failure_reason_is(C2Compiler::retry_no_subsuming_loads())) {
1353 C->record_method_not_compilable("local schedule failed");
1354 }
1355 return;
1356 }
1357 }
1358
1359 // If we inserted any instructions between a Call and his CatchNode,
1360 // clone the instructions on all paths below the Catch.
1361 for( i=0; i < _num_blocks; i++ )
1362 _blocks[i]->call_catch_cleanup(_bbs);
1363
1364 #ifndef PRODUCT
1365 if (trace_opto_pipelining()) {
1366 tty->print("\n---- After GlobalCodeMotion ----\n");
1367 for (uint i = 0; i < _num_blocks; i++) {
1368 _blocks[i]->dump();
|
1327 for( int i= matcher._null_check_tests.size()-2; i>=0; i-=2 ) {
1328 Node *proj = matcher._null_check_tests[i ];
1329 Node *val = matcher._null_check_tests[i+1];
1330 _bbs[proj->_idx]->implicit_null_check(this, proj, val, allowed_reasons);
1331 // The implicit_null_check will only perform the transformation
1332 // if the null branch is truly uncommon, *and* it leads to an
1333 // uncommon trap. Combined with the too_many_traps guards
1334 // above, this prevents SEGV storms reported in 6366351,
1335 // by recompiling offending methods without this optimization.
1336 }
1337 }
1338
1339 #ifndef PRODUCT
1340 if (trace_opto_pipelining()) {
1341 tty->print("\n---- Start Local Scheduling ----\n");
1342 }
1343 #endif
1344
1345 // Schedule locally. Right now a simple topological sort.
1346 // Later, do a real latency aware scheduler.
1347 uint max_idx = C->unique();
1348 GrowableArray<int> ready_cnt(max_idx, max_idx, -1);
1349 visited.Clear();
1350 for (i = 0; i < _num_blocks; i++) {
1351 if (!_blocks[i]->schedule_local(this, matcher, ready_cnt, visited)) {
1352 if (!C->failure_reason_is(C2Compiler::retry_no_subsuming_loads())) {
1353 C->record_method_not_compilable("local schedule failed");
1354 }
1355 return;
1356 }
1357 }
1358
1359 // If we inserted any instructions between a Call and his CatchNode,
1360 // clone the instructions on all paths below the Catch.
1361 for( i=0; i < _num_blocks; i++ )
1362 _blocks[i]->call_catch_cleanup(_bbs);
1363
1364 #ifndef PRODUCT
1365 if (trace_opto_pipelining()) {
1366 tty->print("\n---- After GlobalCodeMotion ----\n");
1367 for (uint i = 0; i < _num_blocks; i++) {
1368 _blocks[i]->dump();
|