187
188 const Register cache = G3_scratch;
189 const Register index = G1_scratch;
190 __ get_cache_and_index_at_bcp(cache, index, 1, index_size);
191
192 const Register flags = cache;
193 __ ld_ptr(cache, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset(), flags);
194 const Register parameter_size = flags;
195 __ and3(flags, ConstantPoolCacheEntry::parameter_size_mask, parameter_size); // argument size in words
196 __ sll(parameter_size, Interpreter::logStackElementSize, parameter_size); // each argument size in bytes
197 __ add(Lesp, parameter_size, Lesp); // pop arguments
198 __ dispatch_next(state, step);
199
200 return entry;
201 }
202
203
204 address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, int step) {
205 address entry = __ pc();
206 __ get_constant_pool_cache(LcpoolCache); // load LcpoolCache
207 { Label L;
208 Address exception_addr(G2_thread, Thread::pending_exception_offset());
209 __ ld_ptr(exception_addr, Gtemp); // Load pending exception.
210 __ br_null_short(Gtemp, Assembler::pt, L);
211 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_pending_exception));
212 __ should_not_reach_here();
213 __ bind(L);
214 }
215 __ dispatch_next(state, step);
216 return entry;
217 }
218
219 // A result handler converts/unboxes a native call result into
220 // a java interpreter/compiler result. The current frame is an
221 // interpreter frame. The activation frame unwind code must be
222 // consistent with that of TemplateTable::_return(...). In the
223 // case of native methods, the caller's SP was not modified.
224 address TemplateInterpreterGenerator::generate_result_handler_for(BasicType type) {
225 address entry = __ pc();
226 Register Itos_i = Otos_i ->after_save();
332 __ ld(profile_limit, G1_scratch);
333 __ cmp_and_br_short(O0, G1_scratch, Assembler::lessUnsigned, Assembler::pn, *profile_method_continue);
334
335 // if no method data exists, go to profile_method
336 __ test_method_data_pointer(*profile_method);
337 }
338
339 Address invocation_limit(G3_method_counters, in_bytes(MethodCounters::interpreter_invocation_limit_offset()));
340 __ ld(invocation_limit, G3_scratch);
341 __ cmp(O0, G3_scratch);
342 __ br(Assembler::greaterEqualUnsigned, false, Assembler::pn, *overflow); // Far distance
343 __ delayed()->nop();
344 __ bind(done);
345 }
346
347 }
348
349 // Allocate monitor and lock method (asm interpreter)
350 // ebx - Method*
351 //
352 void InterpreterGenerator::lock_method(void) {
353 __ ld(Lmethod, in_bytes(Method::access_flags_offset()), O0); // Load access flags.
354
355 #ifdef ASSERT
356 { Label ok;
357 __ btst(JVM_ACC_SYNCHRONIZED, O0);
358 __ br( Assembler::notZero, false, Assembler::pt, ok);
359 __ delayed()->nop();
360 __ stop("method doesn't need synchronization");
361 __ bind(ok);
362 }
363 #endif // ASSERT
364
365 // get synchronization object to O0
366 { Label done;
367 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
368 __ btst(JVM_ACC_STATIC, O0);
369 __ br( Assembler::zero, true, Assembler::pt, done);
370 __ delayed()->ld_ptr(Llocals, Interpreter::local_offset_in_bytes(0), O0); // get receiver for not-static case
371
372 __ ld_ptr( Lmethod, in_bytes(Method::const_offset()), O0);
|
187
188 const Register cache = G3_scratch;
189 const Register index = G1_scratch;
190 __ get_cache_and_index_at_bcp(cache, index, 1, index_size);
191
192 const Register flags = cache;
193 __ ld_ptr(cache, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset(), flags);
194 const Register parameter_size = flags;
195 __ and3(flags, ConstantPoolCacheEntry::parameter_size_mask, parameter_size); // argument size in words
196 __ sll(parameter_size, Interpreter::logStackElementSize, parameter_size); // each argument size in bytes
197 __ add(Lesp, parameter_size, Lesp); // pop arguments
198 __ dispatch_next(state, step);
199
200 return entry;
201 }
202
203
204 address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, int step) {
205 address entry = __ pc();
206 __ get_constant_pool_cache(LcpoolCache); // load LcpoolCache
207 #if INCLUDE_JVMCI
208 // Check if we need to take lock at entry of synchronized method.
209 if (UseJVMCICompiler) {
210 Label L;
211 Address pending_monitor_enter_addr(G2_thread, JavaThread::pending_monitorenter_offset());
212 __ ldbool(pending_monitor_enter_addr, Gtemp); // Load if pending monitor enter
213 __ cmp_and_br_short(Gtemp, G0, Assembler::equal, Assembler::pn, L);
214 // Clear flag.
215 __ stbool(G0, pending_monitor_enter_addr);
216 // Take lock.
217 lock_method();
218 __ bind(L);
219 }
220 #endif
221 { Label L;
222 Address exception_addr(G2_thread, Thread::pending_exception_offset());
223 __ ld_ptr(exception_addr, Gtemp); // Load pending exception.
224 __ br_null_short(Gtemp, Assembler::pt, L);
225 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_pending_exception));
226 __ should_not_reach_here();
227 __ bind(L);
228 }
229 __ dispatch_next(state, step);
230 return entry;
231 }
232
233 // A result handler converts/unboxes a native call result into
234 // a java interpreter/compiler result. The current frame is an
235 // interpreter frame. The activation frame unwind code must be
236 // consistent with that of TemplateTable::_return(...). In the
237 // case of native methods, the caller's SP was not modified.
238 address TemplateInterpreterGenerator::generate_result_handler_for(BasicType type) {
239 address entry = __ pc();
240 Register Itos_i = Otos_i ->after_save();
346 __ ld(profile_limit, G1_scratch);
347 __ cmp_and_br_short(O0, G1_scratch, Assembler::lessUnsigned, Assembler::pn, *profile_method_continue);
348
349 // if no method data exists, go to profile_method
350 __ test_method_data_pointer(*profile_method);
351 }
352
353 Address invocation_limit(G3_method_counters, in_bytes(MethodCounters::interpreter_invocation_limit_offset()));
354 __ ld(invocation_limit, G3_scratch);
355 __ cmp(O0, G3_scratch);
356 __ br(Assembler::greaterEqualUnsigned, false, Assembler::pn, *overflow); // Far distance
357 __ delayed()->nop();
358 __ bind(done);
359 }
360
361 }
362
363 // Allocate monitor and lock method (asm interpreter)
364 // ebx - Method*
365 //
366 void TemplateInterpreterGenerator::lock_method() {
367 __ ld(Lmethod, in_bytes(Method::access_flags_offset()), O0); // Load access flags.
368
369 #ifdef ASSERT
370 { Label ok;
371 __ btst(JVM_ACC_SYNCHRONIZED, O0);
372 __ br( Assembler::notZero, false, Assembler::pt, ok);
373 __ delayed()->nop();
374 __ stop("method doesn't need synchronization");
375 __ bind(ok);
376 }
377 #endif // ASSERT
378
379 // get synchronization object to O0
380 { Label done;
381 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
382 __ btst(JVM_ACC_STATIC, O0);
383 __ br( Assembler::zero, true, Assembler::pt, done);
384 __ delayed()->ld_ptr(Llocals, Interpreter::local_offset_in_bytes(0), O0); // get receiver for not-static case
385
386 __ ld_ptr( Lmethod, in_bytes(Method::const_offset()), O0);
|