< prev index next >

src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp

Print this page

        

@@ -26,10 +26,13 @@
 #ifndef CPU_AARCH64_MACROASSEMBLER_AARCH64_HPP
 #define CPU_AARCH64_MACROASSEMBLER_AARCH64_HPP
 
 #include "asm/assembler.hpp"
 #include "oops/compressedOops.hpp"
+#include "utilities/macros.hpp"
+#include "runtime/signature.hpp"
+
 
 class ciValueKlass;
 
 // MacroAssembler extends Assembler by frequently used macros.
 //

@@ -1165,13 +1168,31 @@
      reg_written
   };
 
   void verified_entry(Compile* C, int sp_inc);
 
+  int store_value_type_fields_to_buf(ciValueKlass* vk, bool from_interpreter = true);
+
 // Unpack all value type arguments passed as oops 
   void unpack_value_args(Compile* C, bool receiver_only);
-  void store_value_type_fields_to_buf(ciValueKlass* vk);
+  bool move_helper(VMReg from, VMReg to, BasicType bt, RegState reg_state[], int ret_off, int extra_stack_offset);
+  bool unpack_value_helper(const GrowableArray<SigEntry>* sig, int& sig_index, VMReg from, VMRegPair* regs_to, int& to_index,
+                           RegState reg_state[], int ret_off, int extra_stack_offset);
+  bool pack_value_helper(const GrowableArray<SigEntry>* sig, int& sig_index, int vtarg_index,
+                         VMReg to, VMRegPair* regs_from, int regs_from_count, int& from_index, RegState reg_state[],
+                         int ret_off, int extra_stack_offset);
+  void restore_stack(Compile* C);
+
+  int shuffle_value_args(bool is_packing, bool receiver_only, int extra_stack_offset,
+                         BasicType* sig_bt, const GrowableArray<SigEntry>* sig_cc,
+                         int args_passed, int args_on_stack, VMRegPair* regs,
+                         int args_passed_to, int args_on_stack_to, VMRegPair* regs_to);
+  bool shuffle_value_args_spill(bool is_packing,  const GrowableArray<SigEntry>* sig_cc, int sig_cc_index,
+                                VMRegPair* regs_from, int from_index, int regs_from_count,
+                                RegState* reg_state, int sp_inc, int extra_stack_offset);
+  VMReg spill_reg_for(VMReg reg);
+
 
   void tableswitch(Register index, jint lowbound, jint highbound,
                    Label &jumptable, Label &jumptable_end, int stride = 1) {
     adr(rscratch1, jumptable);
     subsw(rscratch2, index, lowbound);

@@ -1390,10 +1411,13 @@
       spill(tmp1, true, dst_offset);
       unspill(tmp1, true, src_offset+8);
       spill(tmp1, true, dst_offset+8);
     }
   }
+
+  #include "asm/macroAssembler_common.hpp"
+
 };
 
 #ifdef ASSERT
 inline bool AbstractAssembler::pd_check_instruction_mark() { return false; }
 #endif
< prev index next >