2329 // AddP ( base == top )
2330 //
2331 Node *base = addp->in(AddPNode::Base);
2332 if (base->uncast()->is_top()) { // The AddP case #3 and #6 and #9.
2333 base = addp->in(AddPNode::Address);
2334 while (base->is_AddP()) {
2335 // Case #6 (unsafe access) may have several chained AddP nodes.
2336 assert(base->in(AddPNode::Base)->uncast()->is_top(), "expected unsafe access address only");
2337 base = base->in(AddPNode::Address);
2338 }
2339 if (base->Opcode() == Op_CheckCastPP &&
2340 base->bottom_type()->isa_rawptr() &&
2341 _igvn->type(base->in(1))->isa_oopptr()) {
2342 base = base->in(1); // Case #9
2343 } else {
2344 Node* uncast_base = base->uncast();
2345 int opcode = uncast_base->Opcode();
2346 assert(opcode == Op_ConP || opcode == Op_ThreadLocal ||
2347 opcode == Op_CastX2P || uncast_base->is_DecodeNarrowPtr() ||
2348 (uncast_base->is_Mem() && (uncast_base->bottom_type()->isa_rawptr() != NULL)) ||
2349 (uncast_base->is_Proj() && uncast_base->in(0)->is_Allocate()) ||
2350 BarrierSet::barrier_set()->barrier_set_c2()->escape_is_barrier_node(uncast_base), "sanity");
2351 }
2352 }
2353 return base;
2354 }
2355
2356 Node* ConnectionGraph::find_second_addp(Node* addp, Node* n) {
2357 assert(addp->is_AddP() && addp->outcnt() > 0, "Don't process dead nodes");
2358 Node* addp2 = addp->raw_out(0);
2359 if (addp->outcnt() == 1 && addp2->is_AddP() &&
2360 addp2->in(AddPNode::Base) == n &&
2361 addp2->in(AddPNode::Address) == addp) {
2362 assert(addp->in(AddPNode::Base) == n, "expecting the same base");
2363 //
2364 // Find array's offset to push it on worklist first and
2365 // as result process an array's element offset first (pushed second)
2366 // to avoid CastPP for the array's offset.
2367 // Otherwise the inserted CastPP (LocalVar) will point to what
2368 // the AddP (Field) points to. Which would be wrong since
2369 // the algorithm expects the CastPP has the same point as
2370 // as AddP's base CheckCastPP (LocalVar).
3068 }
3069 }
3070 }
3071 } else if (n->is_AddP()) {
3072 JavaObjectNode* jobj = unique_java_object(get_addp_base(n));
3073 if (jobj == NULL || jobj == phantom_obj) {
3074 #ifdef ASSERT
3075 ptnode_adr(get_addp_base(n)->_idx)->dump();
3076 ptnode_adr(n->_idx)->dump();
3077 assert(jobj != NULL && jobj != phantom_obj, "escaped allocation");
3078 #endif
3079 _compile->record_failure(C2Compiler::retry_no_escape_analysis());
3080 return;
3081 }
3082 Node *base = get_map(jobj->idx()); // CheckCastPP node
3083 if (!split_AddP(n, base)) continue; // wrong type from dead path
3084 } else if (n->is_Phi() ||
3085 n->is_CheckCastPP() ||
3086 n->is_EncodeP() ||
3087 n->is_DecodeN() ||
3088 BarrierSet::barrier_set()->barrier_set_c2()->escape_is_barrier_node(n) ||
3089 (n->is_ConstraintCast() && n->Opcode() == Op_CastPP)) {
3090 if (visited.test_set(n->_idx)) {
3091 assert(n->is_Phi(), "loops only through Phi's");
3092 continue; // already processed
3093 }
3094 JavaObjectNode* jobj = unique_java_object(n);
3095 if (jobj == NULL || jobj == phantom_obj) {
3096 #ifdef ASSERT
3097 ptnode_adr(n->_idx)->dump();
3098 assert(jobj != NULL && jobj != phantom_obj, "escaped allocation");
3099 #endif
3100 _compile->record_failure(C2Compiler::retry_no_escape_analysis());
3101 return;
3102 } else {
3103 Node *val = get_map(jobj->idx()); // CheckCastPP node
3104 TypeNode *tn = n->as_Type();
3105 const TypeOopPtr* tinst = igvn->type(val)->isa_oopptr();
3106 assert(tinst != NULL && tinst->is_known_instance() &&
3107 tinst->instance_id() == jobj->idx() , "instance type expected.");
3108
3139 // push allocation's users on appropriate worklist
3140 for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
3141 Node *use = n->fast_out(i);
3142 if(use->is_Mem() && use->in(MemNode::Address) == n) {
3143 // Load/store to instance's field
3144 memnode_worklist.append_if_missing(use);
3145 } else if (use->is_MemBar()) {
3146 if (use->in(TypeFunc::Memory) == n) { // Ignore precedent edge
3147 memnode_worklist.append_if_missing(use);
3148 }
3149 } else if (use->is_AddP() && use->outcnt() > 0) { // No dead nodes
3150 Node* addp2 = find_second_addp(use, n);
3151 if (addp2 != NULL) {
3152 alloc_worklist.append_if_missing(addp2);
3153 }
3154 alloc_worklist.append_if_missing(use);
3155 } else if (use->is_Phi() ||
3156 use->is_CheckCastPP() ||
3157 use->is_EncodeNarrowPtr() ||
3158 use->is_DecodeNarrowPtr() ||
3159 BarrierSet::barrier_set()->barrier_set_c2()->escape_is_barrier_node(use) ||
3160 (use->is_ConstraintCast() && use->Opcode() == Op_CastPP)) {
3161 alloc_worklist.append_if_missing(use);
3162 #ifdef ASSERT
3163 } else if (use->is_Mem()) {
3164 assert(use->in(MemNode::Address) != n, "EA: missing allocation reference path");
3165 } else if (use->is_MergeMem()) {
3166 assert(_mergemem_worklist.contains(use->as_MergeMem()), "EA: missing MergeMem node in the worklist");
3167 } else if (use->is_SafePoint()) {
3168 // Look for MergeMem nodes for calls which reference unique allocation
3169 // (through CheckCastPP nodes) even for debug info.
3170 Node* m = use->in(TypeFunc::Memory);
3171 if (m->is_MergeMem()) {
3172 assert(_mergemem_worklist.contains(m->as_MergeMem()), "EA: missing MergeMem node in the worklist");
3173 }
3174 } else if (use->Opcode() == Op_EncodeISOArray) {
3175 if (use->in(MemNode::Memory) == n || use->in(3) == n) {
3176 // EncodeISOArray overwrites destination array
3177 memnode_worklist.append_if_missing(use);
3178 }
3179 } else {
|
2329 // AddP ( base == top )
2330 //
2331 Node *base = addp->in(AddPNode::Base);
2332 if (base->uncast()->is_top()) { // The AddP case #3 and #6 and #9.
2333 base = addp->in(AddPNode::Address);
2334 while (base->is_AddP()) {
2335 // Case #6 (unsafe access) may have several chained AddP nodes.
2336 assert(base->in(AddPNode::Base)->uncast()->is_top(), "expected unsafe access address only");
2337 base = base->in(AddPNode::Address);
2338 }
2339 if (base->Opcode() == Op_CheckCastPP &&
2340 base->bottom_type()->isa_rawptr() &&
2341 _igvn->type(base->in(1))->isa_oopptr()) {
2342 base = base->in(1); // Case #9
2343 } else {
2344 Node* uncast_base = base->uncast();
2345 int opcode = uncast_base->Opcode();
2346 assert(opcode == Op_ConP || opcode == Op_ThreadLocal ||
2347 opcode == Op_CastX2P || uncast_base->is_DecodeNarrowPtr() ||
2348 (uncast_base->is_Mem() && (uncast_base->bottom_type()->isa_rawptr() != NULL)) ||
2349 (uncast_base->is_Proj() && uncast_base->in(0)->is_Allocate()), "sanity");
2350 }
2351 }
2352 return base;
2353 }
2354
2355 Node* ConnectionGraph::find_second_addp(Node* addp, Node* n) {
2356 assert(addp->is_AddP() && addp->outcnt() > 0, "Don't process dead nodes");
2357 Node* addp2 = addp->raw_out(0);
2358 if (addp->outcnt() == 1 && addp2->is_AddP() &&
2359 addp2->in(AddPNode::Base) == n &&
2360 addp2->in(AddPNode::Address) == addp) {
2361 assert(addp->in(AddPNode::Base) == n, "expecting the same base");
2362 //
2363 // Find array's offset to push it on worklist first and
2364 // as result process an array's element offset first (pushed second)
2365 // to avoid CastPP for the array's offset.
2366 // Otherwise the inserted CastPP (LocalVar) will point to what
2367 // the AddP (Field) points to. Which would be wrong since
2368 // the algorithm expects the CastPP has the same point as
2369 // as AddP's base CheckCastPP (LocalVar).
3067 }
3068 }
3069 }
3070 } else if (n->is_AddP()) {
3071 JavaObjectNode* jobj = unique_java_object(get_addp_base(n));
3072 if (jobj == NULL || jobj == phantom_obj) {
3073 #ifdef ASSERT
3074 ptnode_adr(get_addp_base(n)->_idx)->dump();
3075 ptnode_adr(n->_idx)->dump();
3076 assert(jobj != NULL && jobj != phantom_obj, "escaped allocation");
3077 #endif
3078 _compile->record_failure(C2Compiler::retry_no_escape_analysis());
3079 return;
3080 }
3081 Node *base = get_map(jobj->idx()); // CheckCastPP node
3082 if (!split_AddP(n, base)) continue; // wrong type from dead path
3083 } else if (n->is_Phi() ||
3084 n->is_CheckCastPP() ||
3085 n->is_EncodeP() ||
3086 n->is_DecodeN() ||
3087 (n->is_ConstraintCast() && n->Opcode() == Op_CastPP)) {
3088 if (visited.test_set(n->_idx)) {
3089 assert(n->is_Phi(), "loops only through Phi's");
3090 continue; // already processed
3091 }
3092 JavaObjectNode* jobj = unique_java_object(n);
3093 if (jobj == NULL || jobj == phantom_obj) {
3094 #ifdef ASSERT
3095 ptnode_adr(n->_idx)->dump();
3096 assert(jobj != NULL && jobj != phantom_obj, "escaped allocation");
3097 #endif
3098 _compile->record_failure(C2Compiler::retry_no_escape_analysis());
3099 return;
3100 } else {
3101 Node *val = get_map(jobj->idx()); // CheckCastPP node
3102 TypeNode *tn = n->as_Type();
3103 const TypeOopPtr* tinst = igvn->type(val)->isa_oopptr();
3104 assert(tinst != NULL && tinst->is_known_instance() &&
3105 tinst->instance_id() == jobj->idx() , "instance type expected.");
3106
3137 // push allocation's users on appropriate worklist
3138 for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
3139 Node *use = n->fast_out(i);
3140 if(use->is_Mem() && use->in(MemNode::Address) == n) {
3141 // Load/store to instance's field
3142 memnode_worklist.append_if_missing(use);
3143 } else if (use->is_MemBar()) {
3144 if (use->in(TypeFunc::Memory) == n) { // Ignore precedent edge
3145 memnode_worklist.append_if_missing(use);
3146 }
3147 } else if (use->is_AddP() && use->outcnt() > 0) { // No dead nodes
3148 Node* addp2 = find_second_addp(use, n);
3149 if (addp2 != NULL) {
3150 alloc_worklist.append_if_missing(addp2);
3151 }
3152 alloc_worklist.append_if_missing(use);
3153 } else if (use->is_Phi() ||
3154 use->is_CheckCastPP() ||
3155 use->is_EncodeNarrowPtr() ||
3156 use->is_DecodeNarrowPtr() ||
3157 (use->is_ConstraintCast() && use->Opcode() == Op_CastPP)) {
3158 alloc_worklist.append_if_missing(use);
3159 #ifdef ASSERT
3160 } else if (use->is_Mem()) {
3161 assert(use->in(MemNode::Address) != n, "EA: missing allocation reference path");
3162 } else if (use->is_MergeMem()) {
3163 assert(_mergemem_worklist.contains(use->as_MergeMem()), "EA: missing MergeMem node in the worklist");
3164 } else if (use->is_SafePoint()) {
3165 // Look for MergeMem nodes for calls which reference unique allocation
3166 // (through CheckCastPP nodes) even for debug info.
3167 Node* m = use->in(TypeFunc::Memory);
3168 if (m->is_MergeMem()) {
3169 assert(_mergemem_worklist.contains(m->as_MergeMem()), "EA: missing MergeMem node in the worklist");
3170 }
3171 } else if (use->Opcode() == Op_EncodeISOArray) {
3172 if (use->in(MemNode::Memory) == n || use->in(3) == n) {
3173 // EncodeISOArray overwrites destination array
3174 memnode_worklist.append_if_missing(use);
3175 }
3176 } else {
|