1728 __ beq(CCR0, set_out_args);
1729 __ lwa(tmp2_reg, arrayOopDesc::length_offset_in_bytes(), reg.first()->as_Register());
1730 __ addi(tmp_reg, reg.first()->as_Register(), arrayOopDesc::base_offset_in_bytes(in_elem_type));
1731 __ bind(set_out_args);
1732 move_ptr(masm, tmp, body_arg, r_caller_sp, /*unused*/ R0);
1733 move_ptr(masm, tmp2, length_arg, r_caller_sp, /*unused*/ R0); // Same as move32_64 on PPC64.
1734 }
1735
1736 static void verify_oop_args(MacroAssembler* masm,
1737 const methodHandle& method,
1738 const BasicType* sig_bt,
1739 const VMRegPair* regs) {
1740 Register temp_reg = R19_method; // not part of any compiled calling seq
1741 if (VerifyOops) {
1742 for (int i = 0; i < method->size_of_parameters(); i++) {
1743 if (is_reference_type(sig_bt[i])) {
1744 VMReg r = regs[i].first();
1745 assert(r->is_valid(), "bad oop arg");
1746 if (r->is_stack()) {
1747 __ ld(temp_reg, reg2offset(r), R1_SP);
1748 __ verify_oop(temp_reg);
1749 } else {
1750 __ verify_oop(r->as_Register());
1751 }
1752 }
1753 }
1754 }
1755 }
1756
1757 static void gen_special_dispatch(MacroAssembler* masm,
1758 const methodHandle& method,
1759 const BasicType* sig_bt,
1760 const VMRegPair* regs) {
1761 verify_oop_args(masm, method, sig_bt, regs);
1762 vmIntrinsics::ID iid = method->intrinsic_id();
1763
1764 // Now write the args into the outgoing interpreter space
1765 bool has_receiver = false;
1766 Register receiver_reg = noreg;
1767 int member_arg_pos = -1;
1768 Register member_reg = noreg;
1769 int ref_kind = MethodHandles::signature_polymorphic_intrinsic_ref_kind(iid);
1770 if (ref_kind != 0) {
2096
2097 Register r_carg1_jnienv = noreg;
2098 Register r_carg2_classorobject = noreg;
2099 if (!is_critical_native) {
2100 r_carg1_jnienv = out_regs[0].first()->as_Register();
2101 r_carg2_classorobject = out_regs[1].first()->as_Register();
2102 }
2103
2104
2105 // Generate the Unverified Entry Point (UEP).
2106 // --------------------------------------------------------------------------
2107 assert(start_pc == (intptr_t)__ pc(), "uep must be at start");
2108
2109 // Check ic: object class == cached class?
2110 if (!method_is_static) {
2111 Register ic = as_Register(Matcher::inline_cache_reg_encode());
2112 Register receiver_klass = r_temp_1;
2113
2114 __ cmpdi(CCR0, R3_ARG1, 0);
2115 __ beq(CCR0, ic_miss);
2116 __ verify_oop(R3_ARG1);
2117 __ load_klass(receiver_klass, R3_ARG1);
2118
2119 __ cmpd(CCR0, receiver_klass, ic);
2120 __ bne(CCR0, ic_miss);
2121 }
2122
2123
2124 // Generate the Verified Entry Point (VEP).
2125 // --------------------------------------------------------------------------
2126 vep_start_pc = (intptr_t)__ pc();
2127
2128 if (UseRTMLocking) {
2129 // Abort RTM transaction before calling JNI
2130 // because critical section can be large and
2131 // abort anyway. Also nmethod can be deoptimized.
2132 __ tabort_();
2133 }
2134
2135 if (VM_Version::supports_fast_class_init_checks() && method->needs_clinit_barrier()) {
2136 Label L_skip_barrier;
|
1728 __ beq(CCR0, set_out_args);
1729 __ lwa(tmp2_reg, arrayOopDesc::length_offset_in_bytes(), reg.first()->as_Register());
1730 __ addi(tmp_reg, reg.first()->as_Register(), arrayOopDesc::base_offset_in_bytes(in_elem_type));
1731 __ bind(set_out_args);
1732 move_ptr(masm, tmp, body_arg, r_caller_sp, /*unused*/ R0);
1733 move_ptr(masm, tmp2, length_arg, r_caller_sp, /*unused*/ R0); // Same as move32_64 on PPC64.
1734 }
1735
1736 static void verify_oop_args(MacroAssembler* masm,
1737 const methodHandle& method,
1738 const BasicType* sig_bt,
1739 const VMRegPair* regs) {
1740 Register temp_reg = R19_method; // not part of any compiled calling seq
1741 if (VerifyOops) {
1742 for (int i = 0; i < method->size_of_parameters(); i++) {
1743 if (is_reference_type(sig_bt[i])) {
1744 VMReg r = regs[i].first();
1745 assert(r->is_valid(), "bad oop arg");
1746 if (r->is_stack()) {
1747 __ ld(temp_reg, reg2offset(r), R1_SP);
1748 __ verify_oop(temp_reg, FILE_AND_LINE);
1749 } else {
1750 __ verify_oop(r->as_Register(), FILE_AND_LINE);
1751 }
1752 }
1753 }
1754 }
1755 }
1756
1757 static void gen_special_dispatch(MacroAssembler* masm,
1758 const methodHandle& method,
1759 const BasicType* sig_bt,
1760 const VMRegPair* regs) {
1761 verify_oop_args(masm, method, sig_bt, regs);
1762 vmIntrinsics::ID iid = method->intrinsic_id();
1763
1764 // Now write the args into the outgoing interpreter space
1765 bool has_receiver = false;
1766 Register receiver_reg = noreg;
1767 int member_arg_pos = -1;
1768 Register member_reg = noreg;
1769 int ref_kind = MethodHandles::signature_polymorphic_intrinsic_ref_kind(iid);
1770 if (ref_kind != 0) {
2096
2097 Register r_carg1_jnienv = noreg;
2098 Register r_carg2_classorobject = noreg;
2099 if (!is_critical_native) {
2100 r_carg1_jnienv = out_regs[0].first()->as_Register();
2101 r_carg2_classorobject = out_regs[1].first()->as_Register();
2102 }
2103
2104
2105 // Generate the Unverified Entry Point (UEP).
2106 // --------------------------------------------------------------------------
2107 assert(start_pc == (intptr_t)__ pc(), "uep must be at start");
2108
2109 // Check ic: object class == cached class?
2110 if (!method_is_static) {
2111 Register ic = as_Register(Matcher::inline_cache_reg_encode());
2112 Register receiver_klass = r_temp_1;
2113
2114 __ cmpdi(CCR0, R3_ARG1, 0);
2115 __ beq(CCR0, ic_miss);
2116 __ verify_oop(R3_ARG1, FILE_AND_LINE);
2117 __ load_klass(receiver_klass, R3_ARG1);
2118
2119 __ cmpd(CCR0, receiver_klass, ic);
2120 __ bne(CCR0, ic_miss);
2121 }
2122
2123
2124 // Generate the Verified Entry Point (VEP).
2125 // --------------------------------------------------------------------------
2126 vep_start_pc = (intptr_t)__ pc();
2127
2128 if (UseRTMLocking) {
2129 // Abort RTM transaction before calling JNI
2130 // because critical section can be large and
2131 // abort anyway. Also nmethod can be deoptimized.
2132 __ tabort_();
2133 }
2134
2135 if (VM_Version::supports_fast_class_init_checks() && method->needs_clinit_barrier()) {
2136 Label L_skip_barrier;
|