< prev index next >

src/hotspot/cpu/aarch64/gc/g1/g1BarrierSetAssembler_aarch64.cpp

Print this page




 258   ModRefBarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
 259   if (on_oop && on_reference) {
 260     // LR is live.  It must be saved around calls.
 261     __ enter(); // barrier may call runtime
 262     // Generate the G1 pre-barrier code to log the value of
 263     // the referent field in an SATB buffer.
 264     g1_write_barrier_pre(masm /* masm */,
 265                          noreg /* obj */,
 266                          dst /* pre_val */,
 267                          rthread /* thread */,
 268                          tmp1 /* tmp */,
 269                          true /* tosca_live */,
 270                          true /* expand_call */);
 271     __ leave();
 272   }
 273 }
 274 
 275 void G1BarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
 276                                          Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
 277 








 278    if (tmp3 == noreg) {
 279        tmp3 = r8;  
 280    }


 281 
 282   // flatten object address if needed
 283   if (dst.index() == noreg && dst.offset() == 0) {
 284     if (dst.base() != r3) {
 285       __ mov(r3, dst.base());
 286     }
 287   } else {
 288     __ lea(r3, dst);
 289   }
 290 


 291   g1_write_barrier_pre(masm,
 292                        r3 /* obj */,
 293                        tmp2 /* pre_val */,
 294                        rthread /* thread */,
 295                        tmp1  /* tmp */,
 296                        val != noreg /* tosca_live */,
 297                        false /* expand_call */);

 298 
 299   if (val == noreg) {
 300     BarrierSetAssembler::store_at(masm, decorators, type, Address(r3, 0), noreg, noreg, noreg, noreg);
 301   } else {
 302     // G1 barrier needs uncompressed oop for region cross check.
 303     Register new_val = val;

 304     if (UseCompressedOops) {
 305       new_val = rscratch2;

 306       __ mov(new_val, val);
 307     }
 308     BarrierSetAssembler::store_at(masm, decorators, type, Address(r3, 0), val, noreg, noreg, noreg);




 309     g1_write_barrier_post(masm,
 310                           r3 /* store_adr */,
 311                           new_val /* new_val */,
 312                           rthread /* thread */,
 313                           tmp1 /* tmp */,
 314                           tmp2 /* tmp2 */);

 315   }
 316 
 317 }
 318 
 319 #ifdef COMPILER1
 320 
 321 #undef __
 322 #define __ ce->masm()->
 323 
 324 void G1BarrierSetAssembler::gen_pre_barrier_stub(LIR_Assembler* ce, G1PreBarrierStub* stub) {
 325   G1BarrierSetC1* bs = (G1BarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
 326   // At this point we know that marking is in progress.
 327   // If do_load() is true then we have to emit the
 328   // load of the previous value; otherwise it has already
 329   // been loaded into _pre_val.
 330 
 331   __ bind(*stub->entry());
 332 
 333   assert(stub->pre_val()->is_register(), "Precondition.");
 334 




 258   ModRefBarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
 259   if (on_oop && on_reference) {
 260     // LR is live.  It must be saved around calls.
 261     __ enter(); // barrier may call runtime
 262     // Generate the G1 pre-barrier code to log the value of
 263     // the referent field in an SATB buffer.
 264     g1_write_barrier_pre(masm /* masm */,
 265                          noreg /* obj */,
 266                          dst /* pre_val */,
 267                          rthread /* thread */,
 268                          tmp1 /* tmp */,
 269                          true /* tosca_live */,
 270                          true /* expand_call */);
 271     __ leave();
 272   }
 273 }
 274 
 275 void G1BarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
 276                                          Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
 277 
 278   bool in_heap = (decorators & IN_HEAP) != 0;
 279   bool as_normal = (decorators & AS_NORMAL) != 0;
 280   assert((decorators & IS_DEST_UNINITIALIZED) == 0, "unsupported");
 281 
 282   bool needs_pre_barrier = as_normal;
 283   bool needs_post_barrier = (val != noreg && in_heap);
 284 
 285 
 286    if (tmp3 == noreg) {
 287      tmp3 = rscratch2;
 288    }
 289    // assert_different_registers(val, tmp1, tmp2, tmp3, rscratch1, rscratch2);
 290    assert_different_registers(val, tmp1, tmp2, tmp3);
 291 
 292   // flatten object address if needed
 293   if (dst.index() == noreg && dst.offset() == 0) {
 294     if (dst.base() != tmp1) {
 295       __ mov(tmp1, dst.base());
 296     }
 297   } else {
 298     __ lea(tmp1, dst);
 299   }
 300 
 301 
 302   if (needs_pre_barrier) {
 303       g1_write_barrier_pre(masm,
 304                        tmp1 /* obj */,
 305                        tmp2 /* pre_val */,  
 306                        rthread /* thread */,
 307                        tmp3  /* tmp */,
 308                        val != noreg /* tosca_live */,
 309                        false /* expand_call */);
 310   }
 311 
 312   if (val == noreg) {
 313     BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp1, 0), noreg, noreg, noreg, noreg);
 314   } else {
 315     // G1 barrier needs uncompressed oop for region cross check.
 316     Register new_val = val;
 317     if (needs_post_barrier) {
 318       if (UseCompressedOops) { 
 319         // FIXME: Refactor the code to avoid usage of r19 and stay within tmpX
 320         new_val = r19;
 321         __ mov(new_val, val);
 322       }
 323    }
 324 
 325    BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp1, 0), val, noreg, noreg, noreg);
 326 
 327     if (needs_post_barrier) {
 328        g1_write_barrier_post(masm,
 329                           tmp1 /* store_adr */,
 330                           new_val /* new_val */,
 331                           rthread /* thread */,
 332                           tmp2 /* tmp */,
 333                           tmp3 /* tmp2 */);
 334    }
 335  }
 336 
 337 }
 338 
 339 #ifdef COMPILER1
 340 
 341 #undef __
 342 #define __ ce->masm()->
 343 
 344 void G1BarrierSetAssembler::gen_pre_barrier_stub(LIR_Assembler* ce, G1PreBarrierStub* stub) {
 345   G1BarrierSetC1* bs = (G1BarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
 346   // At this point we know that marking is in progress.
 347   // If do_load() is true then we have to emit the
 348   // load of the previous value; otherwise it has already
 349   // been loaded into _pre_val.
 350 
 351   __ bind(*stub->entry());
 352 
 353   assert(stub->pre_val()->is_register(), "Precondition.");
 354 


< prev index next >