src/share/vm/opto/compile.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8136457 Sdiff src/share/vm/opto

src/share/vm/opto/compile.cpp

Print this page
rev 7539 : 8011858: Use Compile::live_nodes() instead of Compile::unique() in appropriate places
Reviewed-by: kvn, vlivanov
Contributed-by: vlad.ureche@gmail.com


 310       // reinsert into table
 311       initial_gvn()->hash_find_insert(use);
 312     }
 313     record_for_igvn(use);
 314     i -= uses_found;    // we deleted 1 or more copies of this edge
 315   }
 316 }
 317 
 318 
 319 static inline bool not_a_node(const Node* n) {
 320   if (n == NULL)                   return true;
 321   if (((intptr_t)n & 1) != 0)      return true;  // uninitialized, etc.
 322   if (*(address*)n == badAddress)  return true;  // kill by Node::destruct
 323   return false;
 324 }
 325 
 326 // Identify all nodes that are reachable from below, useful.
 327 // Use breadth-first pass that records state in a Unique_Node_List,
 328 // recursive traversal is slower.
 329 void Compile::identify_useful_nodes(Unique_Node_List &useful) {
 330   int estimated_worklist_size = unique();
 331   useful.map( estimated_worklist_size, NULL );  // preallocate space
 332 
 333   // Initialize worklist
 334   if (root() != NULL)     { useful.push(root()); }
 335   // If 'top' is cached, declare it useful to preserve cached node
 336   if( cached_top_node() ) { useful.push(cached_top_node()); }
 337 
 338   // Push all useful nodes onto the list, breadthfirst
 339   for( uint next = 0; next < useful.size(); ++next ) {
 340     assert( next < unique(), "Unique useful nodes < total nodes");
 341     Node *n  = useful.at(next);
 342     uint max = n->len();
 343     for( uint i = 0; i < max; ++i ) {
 344       Node *m = n->in(i);
 345       if (not_a_node(m))  continue;
 346       useful.push(m);
 347     }
 348   }
 349 }
 350 


3195 bool Compile::final_graph_reshaping() {
3196   // an infinite loop may have been eliminated by the optimizer,
3197   // in which case the graph will be empty.
3198   if (root()->req() == 1) {
3199     record_method_not_compilable("trivial infinite loop");
3200     return true;
3201   }
3202 
3203   // Expensive nodes have their control input set to prevent the GVN
3204   // from freely commoning them. There's no GVN beyond this point so
3205   // no need to keep the control input. We want the expensive nodes to
3206   // be freely moved to the least frequent code path by gcm.
3207   assert(OptimizeExpensiveOps || expensive_count() == 0, "optimization off but list non empty?");
3208   for (int i = 0; i < expensive_count(); i++) {
3209     _expensive_nodes->at(i)->set_req(0, NULL);
3210   }
3211 
3212   Final_Reshape_Counts frc;
3213 
3214   // Visit everybody reachable!
3215   // Allocate stack of size C->unique()/2 to avoid frequent realloc
3216   Node_Stack nstack(unique() >> 1);
3217   final_graph_reshaping_walk(nstack, root(), frc);
3218 
3219   // Check for unreachable (from below) code (i.e., infinite loops).
3220   for( uint i = 0; i < frc._tests.size(); i++ ) {
3221     MultiBranchNode *n = frc._tests[i]->as_MultiBranch();
3222     // Get number of CFG targets.
3223     // Note that PCTables include exception targets after calls.
3224     uint required_outcnt = n->required_outcnt();
3225     if (n->outcnt() != required_outcnt) {
3226       // Check for a few special cases.  Rethrow Nodes never take the
3227       // 'fall-thru' path, so expected kids is 1 less.
3228       if (n->is_PCTable() && n->in(0) && n->in(0)->in(0)) {
3229         if (n->in(0)->in(0)->is_Call()) {
3230           CallNode *call = n->in(0)->in(0)->as_Call();
3231           if (call->entry_point() == OptoRuntime::rethrow_stub()) {
3232             required_outcnt--;      // Rethrow always has 1 less kid
3233           } else if (call->req() > TypeFunc::Parms &&
3234                      call->is_CallDynamicJava()) {
3235             // Check for null receiver. In such case, the optimizer has
3236             // detected that the virtual call will always result in a null




 310       // reinsert into table
 311       initial_gvn()->hash_find_insert(use);
 312     }
 313     record_for_igvn(use);
 314     i -= uses_found;    // we deleted 1 or more copies of this edge
 315   }
 316 }
 317 
 318 
 319 static inline bool not_a_node(const Node* n) {
 320   if (n == NULL)                   return true;
 321   if (((intptr_t)n & 1) != 0)      return true;  // uninitialized, etc.
 322   if (*(address*)n == badAddress)  return true;  // kill by Node::destruct
 323   return false;
 324 }
 325 
 326 // Identify all nodes that are reachable from below, useful.
 327 // Use breadth-first pass that records state in a Unique_Node_List,
 328 // recursive traversal is slower.
 329 void Compile::identify_useful_nodes(Unique_Node_List &useful) {
 330   int estimated_worklist_size = live_nodes();
 331   useful.map( estimated_worklist_size, NULL );  // preallocate space
 332 
 333   // Initialize worklist
 334   if (root() != NULL)     { useful.push(root()); }
 335   // If 'top' is cached, declare it useful to preserve cached node
 336   if( cached_top_node() ) { useful.push(cached_top_node()); }
 337 
 338   // Push all useful nodes onto the list, breadthfirst
 339   for( uint next = 0; next < useful.size(); ++next ) {
 340     assert( next < unique(), "Unique useful nodes < total nodes");
 341     Node *n  = useful.at(next);
 342     uint max = n->len();
 343     for( uint i = 0; i < max; ++i ) {
 344       Node *m = n->in(i);
 345       if (not_a_node(m))  continue;
 346       useful.push(m);
 347     }
 348   }
 349 }
 350 


3195 bool Compile::final_graph_reshaping() {
3196   // an infinite loop may have been eliminated by the optimizer,
3197   // in which case the graph will be empty.
3198   if (root()->req() == 1) {
3199     record_method_not_compilable("trivial infinite loop");
3200     return true;
3201   }
3202 
3203   // Expensive nodes have their control input set to prevent the GVN
3204   // from freely commoning them. There's no GVN beyond this point so
3205   // no need to keep the control input. We want the expensive nodes to
3206   // be freely moved to the least frequent code path by gcm.
3207   assert(OptimizeExpensiveOps || expensive_count() == 0, "optimization off but list non empty?");
3208   for (int i = 0; i < expensive_count(); i++) {
3209     _expensive_nodes->at(i)->set_req(0, NULL);
3210   }
3211 
3212   Final_Reshape_Counts frc;
3213 
3214   // Visit everybody reachable!
3215   // Allocate stack of size C->live_nodes()/2 to avoid frequent realloc
3216   Node_Stack nstack(live_nodes() >> 1);
3217   final_graph_reshaping_walk(nstack, root(), frc);
3218 
3219   // Check for unreachable (from below) code (i.e., infinite loops).
3220   for( uint i = 0; i < frc._tests.size(); i++ ) {
3221     MultiBranchNode *n = frc._tests[i]->as_MultiBranch();
3222     // Get number of CFG targets.
3223     // Note that PCTables include exception targets after calls.
3224     uint required_outcnt = n->required_outcnt();
3225     if (n->outcnt() != required_outcnt) {
3226       // Check for a few special cases.  Rethrow Nodes never take the
3227       // 'fall-thru' path, so expected kids is 1 less.
3228       if (n->is_PCTable() && n->in(0) && n->in(0)->in(0)) {
3229         if (n->in(0)->in(0)->is_Call()) {
3230           CallNode *call = n->in(0)->in(0)->as_Call();
3231           if (call->entry_point() == OptoRuntime::rethrow_stub()) {
3232             required_outcnt--;      // Rethrow always has 1 less kid
3233           } else if (call->req() > TypeFunc::Parms &&
3234                      call->is_CallDynamicJava()) {
3235             // Check for null receiver. In such case, the optimizer has
3236             // detected that the virtual call will always result in a null


src/share/vm/opto/compile.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File