584 // from the start of the call to the point where the return address
585 // will point.
586 // The "return address" is the address of the call instruction, plus 8.
587
588 int MachCallStaticJavaNode::ret_addr_offset() {
589 int offset = NativeCall::instruction_size; // call; delay slot
590 if (_method_handle_invoke)
591 offset += 4; // restore SP
592 return offset;
593 }
594
595 int MachCallDynamicJavaNode::ret_addr_offset() {
596 int vtable_index = this->_vtable_index;
597 if (vtable_index < 0) {
598 // must be invalid_vtable_index, not nonvirtual_vtable_index
599 assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value");
600 return (NativeMovConstReg::instruction_size +
601 NativeCall::instruction_size); // sethi; setlo; call; delay slot
602 } else {
603 assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
604 int entry_offset = in_bytes(InstanceKlass::vtable_start_offset()) + vtable_index*vtableEntry::size_in_bytes();
605 int v_off = entry_offset + vtableEntry::method_offset_in_bytes();
606 int klass_load_size;
607 if (UseCompressedClassPointers) {
608 assert(Universe::heap() != NULL, "java heap should be initialized");
609 klass_load_size = MacroAssembler::instr_size_for_decode_klass_not_null() + 1*BytesPerInstWord;
610 } else {
611 klass_load_size = 1*BytesPerInstWord;
612 }
613 if (Assembler::is_simm13(v_off)) {
614 return klass_load_size +
615 (2*BytesPerInstWord + // ld_ptr, ld_ptr
616 NativeCall::instruction_size); // call; delay slot
617 } else {
618 return klass_load_size +
619 (4*BytesPerInstWord + // set_hi, set, ld_ptr, ld_ptr
620 NativeCall::instruction_size); // call; delay slot
621 }
622 }
623 }
624
2641 // must be invalid_vtable_index, not nonvirtual_vtable_index
2642 assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value");
2643 Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode());
2644 assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()");
2645 assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub");
2646 __ ic_call((address)$meth$$method, /*emit_delay=*/true, resolved_method_index(cbuf));
2647 } else {
2648 assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
2649 // Just go thru the vtable
2650 // get receiver klass (receiver already checked for non-null)
2651 // If we end up going thru a c2i adapter interpreter expects method in G5
2652 int off = __ offset();
2653 __ load_klass(O0, G3_scratch);
2654 int klass_load_size;
2655 if (UseCompressedClassPointers) {
2656 assert(Universe::heap() != NULL, "java heap should be initialized");
2657 klass_load_size = MacroAssembler::instr_size_for_decode_klass_not_null() + 1*BytesPerInstWord;
2658 } else {
2659 klass_load_size = 1*BytesPerInstWord;
2660 }
2661 int entry_offset = in_bytes(InstanceKlass::vtable_start_offset()) + vtable_index*vtableEntry::size_in_bytes();
2662 int v_off = entry_offset + vtableEntry::method_offset_in_bytes();
2663 if (Assembler::is_simm13(v_off)) {
2664 __ ld_ptr(G3, v_off, G5_method);
2665 } else {
2666 // Generate 2 instructions
2667 __ Assembler::sethi(v_off & ~0x3ff, G5_method);
2668 __ or3(G5_method, v_off & 0x3ff, G5_method);
2669 // ld_ptr, set_hi, set
2670 assert(__ offset() - off == klass_load_size + 2*BytesPerInstWord,
2671 "Unexpected instruction size(s)");
2672 __ ld_ptr(G3, G5_method, G5_method);
2673 }
2674 // NOTE: for vtable dispatches, the vtable entry will never be null.
2675 // However it may very well end up in handle_wrong_method if the
2676 // method is abstract for the particular class.
2677 __ ld_ptr(G5_method, in_bytes(Method::from_compiled_offset()), G3_scratch);
2678 // jump to target (either compiled code or c2iadapter)
2679 __ jmpl(G3_scratch, G0, O7);
2680 __ delayed()->nop();
2681 }
|
584 // from the start of the call to the point where the return address
585 // will point.
586 // The "return address" is the address of the call instruction, plus 8.
587
588 int MachCallStaticJavaNode::ret_addr_offset() {
589 int offset = NativeCall::instruction_size; // call; delay slot
590 if (_method_handle_invoke)
591 offset += 4; // restore SP
592 return offset;
593 }
594
595 int MachCallDynamicJavaNode::ret_addr_offset() {
596 int vtable_index = this->_vtable_index;
597 if (vtable_index < 0) {
598 // must be invalid_vtable_index, not nonvirtual_vtable_index
599 assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value");
600 return (NativeMovConstReg::instruction_size +
601 NativeCall::instruction_size); // sethi; setlo; call; delay slot
602 } else {
603 assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
604 int entry_offset = in_bytes(Klass::vtable_start_offset()) + vtable_index*vtableEntry::size_in_bytes();
605 int v_off = entry_offset + vtableEntry::method_offset_in_bytes();
606 int klass_load_size;
607 if (UseCompressedClassPointers) {
608 assert(Universe::heap() != NULL, "java heap should be initialized");
609 klass_load_size = MacroAssembler::instr_size_for_decode_klass_not_null() + 1*BytesPerInstWord;
610 } else {
611 klass_load_size = 1*BytesPerInstWord;
612 }
613 if (Assembler::is_simm13(v_off)) {
614 return klass_load_size +
615 (2*BytesPerInstWord + // ld_ptr, ld_ptr
616 NativeCall::instruction_size); // call; delay slot
617 } else {
618 return klass_load_size +
619 (4*BytesPerInstWord + // set_hi, set, ld_ptr, ld_ptr
620 NativeCall::instruction_size); // call; delay slot
621 }
622 }
623 }
624
2641 // must be invalid_vtable_index, not nonvirtual_vtable_index
2642 assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value");
2643 Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode());
2644 assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()");
2645 assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub");
2646 __ ic_call((address)$meth$$method, /*emit_delay=*/true, resolved_method_index(cbuf));
2647 } else {
2648 assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
2649 // Just go thru the vtable
2650 // get receiver klass (receiver already checked for non-null)
2651 // If we end up going thru a c2i adapter interpreter expects method in G5
2652 int off = __ offset();
2653 __ load_klass(O0, G3_scratch);
2654 int klass_load_size;
2655 if (UseCompressedClassPointers) {
2656 assert(Universe::heap() != NULL, "java heap should be initialized");
2657 klass_load_size = MacroAssembler::instr_size_for_decode_klass_not_null() + 1*BytesPerInstWord;
2658 } else {
2659 klass_load_size = 1*BytesPerInstWord;
2660 }
2661 int entry_offset = in_bytes(Klass::vtable_start_offset()) + vtable_index*vtableEntry::size_in_bytes();
2662 int v_off = entry_offset + vtableEntry::method_offset_in_bytes();
2663 if (Assembler::is_simm13(v_off)) {
2664 __ ld_ptr(G3, v_off, G5_method);
2665 } else {
2666 // Generate 2 instructions
2667 __ Assembler::sethi(v_off & ~0x3ff, G5_method);
2668 __ or3(G5_method, v_off & 0x3ff, G5_method);
2669 // ld_ptr, set_hi, set
2670 assert(__ offset() - off == klass_load_size + 2*BytesPerInstWord,
2671 "Unexpected instruction size(s)");
2672 __ ld_ptr(G3, G5_method, G5_method);
2673 }
2674 // NOTE: for vtable dispatches, the vtable entry will never be null.
2675 // However it may very well end up in handle_wrong_method if the
2676 // method is abstract for the particular class.
2677 __ ld_ptr(G5_method, in_bytes(Method::from_compiled_offset()), G3_scratch);
2678 // jump to target (either compiled code or c2iadapter)
2679 __ jmpl(G3_scratch, G0, O7);
2680 __ delayed()->nop();
2681 }
|