1 /* 2 * Copyright (c) 2018, Red Hat, Inc. All rights reserved. 3 * 4 * This code is free software; you can redistribute it and/or modify it 5 * under the terms of the GNU General Public License version 2 only, as 6 * published by the Free Software Foundation. 7 * 8 * This code is distributed in the hope that it will be useful, but WITHOUT 9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 11 * version 2 for more details (a copy is included in the LICENSE file that 12 * accompanied this code). 13 * 14 * You should have received a copy of the GNU General Public License version 15 * 2 along with this work; if not, write to the Free Software Foundation, 16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 17 * 18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 19 * or visit www.oracle.com if you need additional information or have any 20 * questions. 21 * 22 */ 23 24 #include "precompiled.hpp" 25 #include "c1/c1_MacroAssembler.hpp" 26 #include "c1/c1_LIRAssembler.hpp" 27 #include "macroAssembler_aarch64.hpp" 28 #include "shenandoahBarrierSetAssembler_aarch64.hpp" 29 #include "gc_implementation/shenandoah/shenandoahBarrierSet.hpp" 30 #include "gc_implementation/shenandoah/shenandoahBarrierSetC1.hpp" 31 #include "gc_implementation/shenandoah/shenandoahForwarding.hpp" 32 #include "gc_implementation/shenandoah/shenandoahHeap.hpp" 33 #include "gc_implementation/shenandoah/shenandoahRuntime.hpp" 34 #include "runtime/stubCodeGenerator.hpp" 35 #include "runtime/thread.hpp" 36 37 ShenandoahBarrierSetAssembler* ShenandoahBarrierSetAssembler::bsasm() { 38 return ShenandoahBarrierSet::barrier_set()->bsasm(); 39 } 40 41 #define __ masm-> 42 43 void ShenandoahBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm, bool dest_uninitialized, 44 Register src, Register dst, Register count) { 45 if ((ShenandoahSATBBarrier && !dest_uninitialized) || ShenandoahStoreValEnqueueBarrier || ShenandoahLoadRefBarrier) { 46 47 Label done; 48 49 // Avoid calling runtime if count == 0 50 __ cbz(count, done); 51 52 // Is GC active? 53 Address gc_state(rthread, in_bytes(JavaThread::gc_state_offset())); 54 __ ldrb(rscratch1, gc_state); 55 if (ShenandoahSATBBarrier && dest_uninitialized) { 56 __ tbz(rscratch1, ShenandoahHeap::HAS_FORWARDED_BITPOS, done); 57 } else { 58 __ mov(rscratch2, ShenandoahHeap::HAS_FORWARDED | ShenandoahHeap::MARKING); 59 __ tst(rscratch1, rscratch2); 60 __ br(Assembler::EQ, done); 61 } 62 63 __ push_call_clobbered_registers(); 64 if (UseCompressedOops) { 65 __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::arraycopy_barrier_narrow_oop_entry), src, dst, count); 66 } else { 67 __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::arraycopy_barrier_oop_entry), src, dst, count); 68 } 69 __ pop_call_clobbered_registers(); 70 __ bind(done); 71 } 72 } 73 74 void ShenandoahBarrierSetAssembler::resolve_forward_pointer(MacroAssembler* masm, Register dst, Register tmp) { 75 assert(ShenandoahCASBarrier, "should be enabled"); 76 Label is_null; 77 __ cbz(dst, is_null); 78 resolve_forward_pointer_not_null(masm, dst, tmp); 79 __ bind(is_null); 80 } 81 82 // IMPORTANT: This must preserve all registers, even rscratch1 and rscratch2, except those explicitely 83 // passed in. 84 void ShenandoahBarrierSetAssembler::resolve_forward_pointer_not_null(MacroAssembler* masm, Register dst, Register tmp) { 85 assert(ShenandoahCASBarrier || ShenandoahLoadRefBarrier, "should be enabled"); 86 // The below loads the mark word, checks if the lowest two bits are 87 // set, and if so, clear the lowest two bits and copy the result 88 // to dst. Otherwise it leaves dst alone. 89 // Implementing this is surprisingly awkward. I do it here by: 90 // - Inverting the mark word 91 // - Test lowest two bits == 0 92 // - If so, set the lowest two bits 93 // - Invert the result back, and copy to dst 94 95 bool borrow_reg = (tmp == noreg); 96 if (borrow_reg) { 97 // No free registers available. Make one useful. 98 tmp = rscratch1; 99 if (tmp == dst) { 100 tmp = rscratch2; 101 } 102 __ push(RegSet::of(tmp), sp); 103 } 104 105 assert_different_registers(tmp, dst); 106 107 Label done; 108 __ ldr(tmp, Address(dst, oopDesc::mark_offset_in_bytes())); 109 __ eon(tmp, tmp, zr); 110 __ ands(zr, tmp, markOopDesc::lock_mask_in_place); 111 __ br(Assembler::NE, done); 112 __ orr(tmp, tmp, markOopDesc::marked_value); 113 __ eon(dst, tmp, zr); 114 __ bind(done); 115 116 if (borrow_reg) { 117 __ pop(RegSet::of(tmp), sp); 118 } 119 } 120 121 void ShenandoahBarrierSetAssembler::load_reference_barrier_not_null(MacroAssembler* masm, Register dst) { 122 assert(ShenandoahLoadRefBarrier, "Should be enabled"); 123 assert(dst != rscratch2, "need rscratch2"); 124 125 Label done; 126 __ enter(); 127 Address gc_state(rthread, in_bytes(JavaThread::gc_state_offset())); 128 __ ldrb(rscratch2, gc_state); 129 130 // Check for heap stability 131 __ tbz(rscratch2, ShenandoahHeap::HAS_FORWARDED_BITPOS, done); 132 133 RegSet to_save = RegSet::of(r0); 134 if (dst != r0) { 135 __ push(to_save, sp); 136 __ mov(r0, dst); 137 } 138 139 __ push_call_clobbered_registers(); 140 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_interpreter), r0); 141 __ mov(rscratch1, r0); 142 __ pop_call_clobbered_registers(); 143 __ mov(r0, rscratch1); 144 145 if (dst != r0) { 146 __ mov(dst, r0); 147 __ pop(to_save, sp); 148 } 149 150 __ bind(done); 151 __ leave(); 152 } 153 154 void ShenandoahBarrierSetAssembler::storeval_barrier(MacroAssembler* masm, Register dst, Register tmp) { 155 if (ShenandoahStoreValEnqueueBarrier) { 156 // Save possibly live regs. 157 RegSet live_regs = RegSet::range(r0, r4) - dst; 158 __ push(live_regs, sp); 159 __ strd(v0, __ pre(sp, 2 * -wordSize)); 160 161 __ g1_write_barrier_pre(noreg, dst, rthread, tmp, true, false); 162 163 // Restore possibly live regs. 164 __ ldrd(v0, __ post(sp, 2 * wordSize)); 165 __ pop(live_regs, sp); 166 } 167 } 168 169 void ShenandoahBarrierSetAssembler::load_reference_barrier(MacroAssembler* masm, Register dst) { 170 if (ShenandoahLoadRefBarrier) { 171 Label is_null; 172 __ cbz(dst, is_null); 173 load_reference_barrier_not_null(masm, dst); 174 __ bind(is_null); 175 } 176 } 177 178 void ShenandoahBarrierSetAssembler::cmpxchg_oop(MacroAssembler* masm, Register addr, Register expected, Register new_val, 179 bool acquire, bool release, bool weak, bool is_cae, 180 Register result) { 181 182 Register tmp1 = rscratch1; 183 Register tmp2 = rscratch2; 184 bool is_narrow = UseCompressedOops; 185 Assembler::operand_size size = is_narrow ? Assembler::word : Assembler::xword; 186 187 assert_different_registers(addr, expected, new_val, tmp1, tmp2); 188 189 Label retry, done, fail; 190 191 // CAS, using LL/SC pair. 192 __ bind(retry); 193 __ load_exclusive(tmp1, addr, size, acquire); 194 if (is_narrow) { 195 __ cmpw(tmp1, expected); 196 } else { 197 __ cmp(tmp1, expected); 198 } 199 __ br(Assembler::NE, fail); 200 __ store_exclusive(tmp2, new_val, addr, size, release); 201 if (weak) { 202 __ cmpw(tmp2, 0u); // If the store fails, return NE to our caller 203 } else { 204 __ cbnzw(tmp2, retry); 205 } 206 __ b(done); 207 208 __ bind(fail); 209 // Check if rb(expected)==rb(tmp1) 210 // Shuffle registers so that we have memory value ready for next expected. 211 __ mov(tmp2, expected); 212 __ mov(expected, tmp1); 213 if (is_narrow) { 214 __ decode_heap_oop(tmp1, tmp1); 215 __ decode_heap_oop(tmp2, tmp2); 216 } 217 resolve_forward_pointer(masm, tmp1); 218 resolve_forward_pointer(masm, tmp2); 219 __ cmp(tmp1, tmp2); 220 // Retry with expected now being the value we just loaded from addr. 221 __ br(Assembler::EQ, retry); 222 if (is_cae && is_narrow) { 223 // For cmp-and-exchange and narrow oops, we need to restore 224 // the compressed old-value. We moved it to 'expected' a few lines up. 225 __ mov(result, expected); 226 } 227 __ bind(done); 228 229 if (is_cae) { 230 __ mov(result, tmp1); 231 } else { 232 __ cset(result, Assembler::EQ); 233 } 234 } 235 236 #undef __ 237 238 #ifdef COMPILER1 239 240 #define __ ce->masm()-> 241 242 void ShenandoahBarrierSetAssembler::gen_load_reference_barrier_stub(LIR_Assembler* ce, ShenandoahLoadReferenceBarrierStub* stub) { 243 244 Register obj = stub->obj()->as_register(); 245 Register res = stub->result()->as_register(); 246 247 Label done; 248 249 __ bind(*stub->entry()); 250 251 if (res != obj) { 252 __ mov(res, obj); 253 } 254 // Check for null. 255 __ cbz(res, done); 256 257 load_reference_barrier_not_null(ce->masm(), res); 258 259 __ bind(done); 260 __ b(*stub->continuation()); 261 } 262 263 #undef __ 264 265 #endif // COMPILER1