< prev index next >

src/hotspot/cpu/x86/gc/z/z_x86_64.ad

Print this page




   9 // This code is distributed in the hope that it will be useful, but WITHOUT
  10 // ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11 // FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12 // version 2 for more details (a copy is included in the LICENSE file that
  13 // accompanied this code).
  14 //
  15 // You should have received a copy of the GNU General Public License version
  16 // 2 along with this work; if not, write to the Free Software Foundation,
  17 // Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18 //
  19 // Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20 // or visit www.oracle.com if you need additional information or have any
  21 // questions.
  22 //
  23 
  24 source %{
  25 
  26 #include "gc/z/zBarrierSetAssembler.hpp"
  27 
  28 static void z_load_barrier_slow_reg(MacroAssembler& _masm, Register dst, Address src, bool weak) {
  29   assert(dst != r12, "Invalid register");
  30   assert(dst != r15, "Invalid register");
  31   assert(dst != rsp, "Invalid register");

  32 
  33   const address stub = weak ? ZBarrierSet::assembler()->load_barrier_weak_slow_stub(dst)
  34                             : ZBarrierSet::assembler()->load_barrier_slow_stub(dst);
  35   __ lea(dst, src);
  36   __ call(RuntimeAddress(stub));
  37 }
  38 
  39 %}
  40 
  41 // For XMM and YMM enabled processors
  42 instruct zLoadBarrierSlowRegXmmAndYmm(rRegP dst, memory src, rFlagsReg cr,
  43                                       rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3,
  44                                       rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7,
  45                                       rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11,
  46                                       rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{
  47 
  48   match(Set dst (LoadBarrierSlowReg src));
  49   predicate(UseAVX <= 2);
  50 
  51   effect(DEF dst, KILL cr,




   9 // This code is distributed in the hope that it will be useful, but WITHOUT
  10 // ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11 // FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12 // version 2 for more details (a copy is included in the LICENSE file that
  13 // accompanied this code).
  14 //
  15 // You should have received a copy of the GNU General Public License version
  16 // 2 along with this work; if not, write to the Free Software Foundation,
  17 // Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18 //
  19 // Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20 // or visit www.oracle.com if you need additional information or have any
  21 // questions.
  22 //
  23 
  24 source %{
  25 
  26 #include "gc/z/zBarrierSetAssembler.hpp"
  27 
  28 static void z_load_barrier_slow_reg(MacroAssembler& _masm, Register dst, Address src, bool weak) {


  29   assert(dst != rsp, "Invalid register");
  30   assert(dst != r15, "Invalid register");
  31 
  32   const address stub = weak ? ZBarrierSet::assembler()->load_barrier_weak_slow_stub(dst)
  33                             : ZBarrierSet::assembler()->load_barrier_slow_stub(dst);
  34   __ lea(dst, src);
  35   __ call(RuntimeAddress(stub));
  36 }
  37 
  38 %}
  39 
  40 // For XMM and YMM enabled processors
  41 instruct zLoadBarrierSlowRegXmmAndYmm(rRegP dst, memory src, rFlagsReg cr,
  42                                       rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3,
  43                                       rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7,
  44                                       rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11,
  45                                       rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{
  46 
  47   match(Set dst (LoadBarrierSlowReg src));
  48   predicate(UseAVX <= 2);
  49 
  50   effect(DEF dst, KILL cr,


< prev index next >