< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page




2460 }
2461 
2462 void TemplateTable::if_nullcmp(Condition cc) {
2463   transition(atos, vtos);
2464   // assume branch is more often taken than not (loops use backward branches)
2465   Label not_taken;
2466   __ testptr(rax, rax);
2467   __ jcc(j_not(cc), not_taken);
2468   branch(false, false);
2469   __ bind(not_taken);
2470   __ profile_not_taken_branch(rax);
2471 }
2472 
2473 void TemplateTable::if_acmp(Condition cc) {
2474   transition(atos, vtos);
2475   // assume branch is more often taken than not (loops use backward branches)
2476   Label taken, not_taken;
2477   __ pop_ptr(rdx);
2478 
2479   const int is_value_mask = markOopDesc::always_locked_pattern;
2480   if (EnableValhalla && UsePointerPerturbation) {
2481     Label is_null;
2482     __ testptr(rdx, rdx);
2483     __ jcc(Assembler::zero, is_null);
2484     __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2485     __ andptr(rbx, is_value_mask);
2486     __ cmpl(rbx, is_value_mask);
2487     __ setb(Assembler::equal, rbx);
2488     __ movzbl(rbx, rbx);
2489     __ orptr(rdx, rbx);
2490     __ bind(is_null);
2491   }
2492 
2493   __ cmpoop(rdx, rax);
2494 
2495   if (EnableValhalla && !UsePointerPerturbation) {
2496     __ jcc(Assembler::notEqual, (cc == not_equal) ? taken : not_taken);
2497     __ testptr(rdx, rdx);
2498     __ jcc(Assembler::zero, (cc == equal) ? taken : not_taken);
2499     __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2500     __ andptr(rbx, is_value_mask);
2501     __ cmpl(rbx, is_value_mask);
2502     cc = (cc == equal) ? not_equal : equal;
2503   }
2504 
2505   __ jcc(j_not(cc), not_taken);
2506   __ bind(taken);
2507   branch(false, false);
2508   __ bind(not_taken);
2509   __ profile_not_taken_branch(rax);
2510 }
2511 
2512 void TemplateTable::ret() {
2513   transition(vtos, vtos);
2514   locals_index(rbx);
2515   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp




2460 }
2461 
2462 void TemplateTable::if_nullcmp(Condition cc) {
2463   transition(atos, vtos);
2464   // assume branch is more often taken than not (loops use backward branches)
2465   Label not_taken;
2466   __ testptr(rax, rax);
2467   __ jcc(j_not(cc), not_taken);
2468   branch(false, false);
2469   __ bind(not_taken);
2470   __ profile_not_taken_branch(rax);
2471 }
2472 
2473 void TemplateTable::if_acmp(Condition cc) {
2474   transition(atos, vtos);
2475   // assume branch is more often taken than not (loops use backward branches)
2476   Label taken, not_taken;
2477   __ pop_ptr(rdx);
2478 
2479   const int is_value_mask = markOopDesc::always_locked_pattern;
2480   if (EnableValhalla && ACmpOnValues == 1) {
2481     Label is_null;
2482     __ testptr(rdx, rdx);
2483     __ jcc(Assembler::zero, is_null);
2484     __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2485     __ andptr(rbx, is_value_mask);
2486     __ cmpl(rbx, is_value_mask);
2487     __ setb(Assembler::equal, rbx);
2488     __ movzbl(rbx, rbx);
2489     __ orptr(rdx, rbx);
2490     __ bind(is_null);
2491   }
2492 
2493   __ cmpoop(rdx, rax);
2494 
2495   if (EnableValhalla && ACmpOnValues != 1) {
2496     __ jcc(Assembler::notEqual, (cc == not_equal) ? taken : not_taken);
2497     __ testptr(rdx, rdx);
2498     __ jcc(Assembler::zero, (cc == equal) ? taken : not_taken);
2499     __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2500     __ andptr(rbx, is_value_mask);
2501     __ cmpl(rbx, is_value_mask);
2502     cc = (cc == equal) ? not_equal : equal;
2503   }
2504 
2505   __ jcc(j_not(cc), not_taken);
2506   __ bind(taken);
2507   branch(false, false);
2508   __ bind(not_taken);
2509   __ profile_not_taken_branch(rax);
2510 }
2511 
2512 void TemplateTable::ret() {
2513   transition(vtos, vtos);
2514   locals_index(rbx);
2515   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp


< prev index next >