1 /*
2 * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/metadataOnStackMark.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "code/debugInfoRec.hpp"
29 #include "gc_interface/collectedHeap.inline.hpp"
30 #include "interpreter/bytecodeStream.hpp"
31 #include "interpreter/bytecodeTracer.hpp"
32 #include "interpreter/bytecodes.hpp"
33 #include "interpreter/interpreter.hpp"
34 #include "interpreter/oopMapCache.hpp"
35 #include "memory/gcLocker.hpp"
36 #include "memory/generation.hpp"
37 #include "memory/heapInspection.hpp"
38 #include "memory/metadataFactory.hpp"
39 #include "memory/oopFactory.hpp"
40 #include "oops/constMethod.hpp"
41 #include "oops/methodData.hpp"
42 #include "oops/method.hpp"
43 #include "oops/oop.inline.hpp"
44 #include "oops/symbol.hpp"
45 #include "prims/jvmtiExport.hpp"
46 #include "prims/methodHandles.hpp"
47 #include "prims/nativeLookup.hpp"
48 #include "runtime/arguments.hpp"
49 #include "runtime/compilationPolicy.hpp"
50 #include "runtime/frame.inline.hpp"
51 #include "runtime/handles.inline.hpp"
52 #include "runtime/orderAccess.inline.hpp"
53 #include "runtime/relocator.hpp"
54 #include "runtime/sharedRuntime.hpp"
55 #include "runtime/signature.hpp"
56 #include "utilities/quickSort.hpp"
57 #include "utilities/xmlstream.hpp"
58
59 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
60
61 // Implementation of Method
62
63 Method* Method::allocate(ClassLoaderData* loader_data,
64 int byte_code_size,
65 AccessFlags access_flags,
66 InlineTableSizes* sizes,
67 ConstMethod::MethodType method_type,
68 TRAPS) {
69 assert(!access_flags.is_native() || byte_code_size == 0,
70 "native methods should not contain byte codes");
71 ConstMethod* cm = ConstMethod::allocate(loader_data,
72 byte_code_size,
73 sizes,
74 method_type,
75 CHECK_NULL);
76
77 int size = Method::size(access_flags.is_native());
78
79 return new (loader_data, size, false, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, size);
80 }
81
82 Method::Method(ConstMethod* xconst, AccessFlags access_flags, int size) {
83 No_Safepoint_Verifier no_safepoint;
84 set_constMethod(xconst);
85 set_access_flags(access_flags);
86 set_method_size(size);
87 set_intrinsic_id(vmIntrinsics::_none);
88 set_jfr_towrite(false);
89 set_force_inline(false);
90 set_hidden(false);
91 set_dont_inline(false);
92 set_has_injected_profile(false);
93 set_method_data(NULL);
94 clear_method_counters();
95 set_vtable_index(Method::garbage_vtable_index);
96
97 // Fix and bury in Method*
98 set_interpreter_entry(NULL); // sets i2i entry and from_int
99 set_adapter_entry(NULL);
100 clear_code(); // from_c/from_i get set to c2i/i2i
101
102 if (access_flags.is_native()) {
103 clear_native_function();
104 set_signature_handler(NULL);
105 }
106
107 NOT_PRODUCT(set_compiled_invocation_count(0);)
108 }
109
110 // Release Method*. The nmethod will be gone when we get here because
111 // we've walked the code cache.
112 void Method::deallocate_contents(ClassLoaderData* loader_data) {
113 MetadataFactory::free_metadata(loader_data, constMethod());
114 set_constMethod(NULL);
115 MetadataFactory::free_metadata(loader_data, method_data());
116 set_method_data(NULL);
117 MetadataFactory::free_metadata(loader_data, method_counters());
118 clear_method_counters();
119 // The nmethod will be gone when we get here.
120 if (code() != NULL) _code = NULL;
121 }
122
123 address Method::get_i2c_entry() {
124 assert(_adapter != NULL, "must have");
125 return _adapter->get_i2c_entry();
126 }
127
128 address Method::get_c2i_entry() {
129 assert(_adapter != NULL, "must have");
130 return _adapter->get_c2i_entry();
131 }
132
133 address Method::get_c2i_unverified_entry() {
134 assert(_adapter != NULL, "must have");
135 return _adapter->get_c2i_unverified_entry();
136 }
137
138 char* Method::name_and_sig_as_C_string() const {
139 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
140 }
141
142 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
143 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
144 }
145
146 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
147 const char* klass_name = klass->external_name();
148 int klass_name_len = (int)strlen(klass_name);
149 int method_name_len = method_name->utf8_length();
150 int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
151 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
152 strcpy(dest, klass_name);
153 dest[klass_name_len] = '.';
154 strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
155 strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
156 dest[len] = 0;
157 return dest;
158 }
159
160 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
161 Symbol* klass_name = klass->name();
162 klass_name->as_klass_external_name(buf, size);
163 int len = (int)strlen(buf);
164
165 if (len < size - 1) {
166 buf[len++] = '.';
167
168 method_name->as_C_string(&(buf[len]), size - len);
169 len = (int)strlen(buf);
170
171 signature->as_C_string(&(buf[len]), size - len);
172 }
173
174 return buf;
175 }
176
177 int Method::fast_exception_handler_bci_for(methodHandle mh, KlassHandle ex_klass, int throw_bci, TRAPS) {
178 // exception table holds quadruple entries of the form (beg_bci, end_bci, handler_bci, klass_index)
179 // access exception table
180 ExceptionTable table(mh());
181 int length = table.length();
182 // iterate through all entries sequentially
183 constantPoolHandle pool(THREAD, mh->constants());
184 for (int i = 0; i < length; i ++) {
185 //reacquire the table in case a GC happened
186 ExceptionTable table(mh());
187 int beg_bci = table.start_pc(i);
188 int end_bci = table.end_pc(i);
189 assert(beg_bci <= end_bci, "inconsistent exception table");
190 if (beg_bci <= throw_bci && throw_bci < end_bci) {
191 // exception handler bci range covers throw_bci => investigate further
192 int handler_bci = table.handler_pc(i);
193 int klass_index = table.catch_type_index(i);
194 if (klass_index == 0) {
195 return handler_bci;
196 } else if (ex_klass.is_null()) {
197 return handler_bci;
198 } else {
199 // we know the exception class => get the constraint class
200 // this may require loading of the constraint class; if verification
201 // fails or some other exception occurs, return handler_bci
202 Klass* k = pool->klass_at(klass_index, CHECK_(handler_bci));
203 KlassHandle klass = KlassHandle(THREAD, k);
204 assert(klass.not_null(), "klass not loaded");
205 if (ex_klass->is_subtype_of(klass())) {
206 return handler_bci;
207 }
208 }
209 }
210 }
211
212 return -1;
213 }
214
215 void Method::mask_for(int bci, InterpreterOopMap* mask) {
216
217 Thread* myThread = Thread::current();
218 methodHandle h_this(myThread, this);
219 #ifdef ASSERT
220 bool has_capability = myThread->is_VM_thread() ||
221 myThread->is_ConcurrentGC_thread() ||
222 myThread->is_GC_task_thread();
223
224 if (!has_capability) {
225 if (!VerifyStack && !VerifyLastFrame) {
226 // verify stack calls this outside VM thread
227 warning("oopmap should only be accessed by the "
228 "VM, GC task or CMS threads (or during debugging)");
229 InterpreterOopMap local_mask;
230 method_holder()->mask_for(h_this, bci, &local_mask);
231 local_mask.print();
232 }
233 }
234 #endif
235 method_holder()->mask_for(h_this, bci, mask);
236 return;
237 }
238
239
240 int Method::bci_from(address bcp) const {
241 #ifdef ASSERT
242 { ResourceMark rm;
243 assert(is_native() && bcp == code_base() || contains(bcp) || is_error_reported(),
244 err_msg("bcp doesn't belong to this method: bcp: " INTPTR_FORMAT ", method: %s", bcp, name_and_sig_as_C_string()));
245 }
246 #endif
247 return bcp - code_base();
248 }
249
250
251 // Return (int)bcx if it appears to be a valid BCI.
252 // Return bci_from((address)bcx) if it appears to be a valid BCP.
253 // Return -1 otherwise.
254 // Used by profiling code, when invalid data is a possibility.
255 // The caller is responsible for validating the Method* itself.
256 int Method::validate_bci_from_bcx(intptr_t bcx) const {
257 // keep bci as -1 if not a valid bci
258 int bci = -1;
259 if (bcx == 0 || (address)bcx == code_base()) {
260 // code_size() may return 0 and we allow 0 here
261 // the method may be native
262 bci = 0;
263 } else if (frame::is_bci(bcx)) {
264 if (bcx < code_size()) {
265 bci = (int)bcx;
266 }
267 } else if (contains((address)bcx)) {
268 bci = (address)bcx - code_base();
269 }
270 // Assert that if we have dodged any asserts, bci is negative.
271 assert(bci == -1 || bci == bci_from(bcp_from(bci)), "sane bci if >=0");
272 return bci;
273 }
274
275 address Method::bcp_from(int bci) const {
276 assert((is_native() && bci == 0) || (!is_native() && 0 <= bci && bci < code_size()), err_msg("illegal bci: %d", bci));
277 address bcp = code_base() + bci;
278 assert(is_native() && bcp == code_base() || contains(bcp), "bcp doesn't belong to this method");
279 return bcp;
280 }
281
282
283 int Method::size(bool is_native) {
284 // If native, then include pointers for native_function and signature_handler
285 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
286 int extra_words = align_size_up(extra_bytes, BytesPerWord) / BytesPerWord;
287 return align_object_size(header_size() + extra_words);
288 }
289
290
291 Symbol* Method::klass_name() const {
292 Klass* k = method_holder();
293 assert(k->is_klass(), "must be klass");
294 InstanceKlass* ik = (InstanceKlass*) k;
295 return ik->name();
296 }
297
298
299 // Attempt to return method oop to original state. Clear any pointers
300 // (to objects outside the shared spaces). We won't be able to predict
301 // where they should point in a new JVM. Further initialize some
302 // entries now in order allow them to be write protected later.
303
304 void Method::remove_unshareable_info() {
305 unlink_method();
306 }
307
308
309 bool Method::was_executed_more_than(int n) {
310 // Invocation counter is reset when the Method* is compiled.
311 // If the method has compiled code we therefore assume it has
312 // be excuted more than n times.
313 if (is_accessor() || is_empty_method() || (code() != NULL)) {
314 // interpreter doesn't bump invocation counter of trivial methods
315 // compiler does not bump invocation counter of compiled methods
316 return true;
317 }
318 else if ((method_counters() != NULL &&
319 method_counters()->invocation_counter()->carry()) ||
320 (method_data() != NULL &&
321 method_data()->invocation_counter()->carry())) {
322 // The carry bit is set when the counter overflows and causes
323 // a compilation to occur. We don't know how many times
324 // the counter has been reset, so we simply assume it has
325 // been executed more than n times.
326 return true;
327 } else {
328 return invocation_count() > n;
329 }
330 }
331
332 #ifndef PRODUCT
333 void Method::print_invocation_count() {
334 if (is_static()) tty->print("static ");
335 if (is_final()) tty->print("final ");
336 if (is_synchronized()) tty->print("synchronized ");
337 if (is_native()) tty->print("native ");
338 method_holder()->name()->print_symbol_on(tty);
339 tty->print(".");
340 name()->print_symbol_on(tty);
341 signature()->print_symbol_on(tty);
342
343 if (WizardMode) {
344 // dump the size of the byte codes
345 tty->print(" {%d}", code_size());
346 }
347 tty->cr();
348
349 tty->print_cr (" interpreter_invocation_count: %8d ", interpreter_invocation_count());
350 tty->print_cr (" invocation_counter: %8d ", invocation_count());
351 tty->print_cr (" backedge_counter: %8d ", backedge_count());
352 if (CountCompiledCalls) {
353 tty->print_cr (" compiled_invocation_count: %8d ", compiled_invocation_count());
354 }
355
356 }
357 #endif
358
359 // Build a MethodData* object to hold information about this method
360 // collected in the interpreter.
361 void Method::build_interpreter_method_data(methodHandle method, TRAPS) {
362 // Do not profile method if current thread holds the pending list lock,
363 // which avoids deadlock for acquiring the MethodData_lock.
364 if (InstanceRefKlass::owns_pending_list_lock((JavaThread*)THREAD)) {
365 return;
366 }
367
368 // Grab a lock here to prevent multiple
369 // MethodData*s from being created.
370 MutexLocker ml(MethodData_lock, THREAD);
371 if (method->method_data() == NULL) {
372 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
373 MethodData* method_data = MethodData::allocate(loader_data, method, CHECK);
374 method->set_method_data(method_data);
375 if (PrintMethodData && (Verbose || WizardMode)) {
376 ResourceMark rm(THREAD);
377 tty->print("build_interpreter_method_data for ");
378 method->print_name(tty);
379 tty->cr();
380 // At the end of the run, the MDO, full of data, will be dumped.
381 }
382 }
383 }
384
385 MethodCounters* Method::build_method_counters(Method* m, TRAPS) {
386 methodHandle mh(m);
387 ClassLoaderData* loader_data = mh->method_holder()->class_loader_data();
388 MethodCounters* counters = MethodCounters::allocate(loader_data, CHECK_NULL);
389 if (!mh->init_method_counters(counters)) {
390 MetadataFactory::free_metadata(loader_data, counters);
391 }
392 return mh->method_counters();
393 }
394
395 void Method::cleanup_inline_caches() {
396 // The current system doesn't use inline caches in the interpreter
397 // => nothing to do (keep this method around for future use)
398 }
399
400
401 int Method::extra_stack_words() {
402 // not an inline function, to avoid a header dependency on Interpreter
403 return extra_stack_entries() * Interpreter::stackElementSize;
404 }
405
406
407 void Method::compute_size_of_parameters(Thread *thread) {
408 ArgumentSizeComputer asc(signature());
409 set_size_of_parameters(asc.size() + (is_static() ? 0 : 1));
410 }
411
412 BasicType Method::result_type() const {
413 ResultTypeFinder rtf(signature());
414 return rtf.type();
415 }
416
417
418 bool Method::is_empty_method() const {
419 return code_size() == 1
420 && *code_base() == Bytecodes::_return;
421 }
422
423
424 bool Method::is_vanilla_constructor() const {
425 // Returns true if this method is a vanilla constructor, i.e. an "<init>" "()V" method
426 // which only calls the superclass vanilla constructor and possibly does stores of
427 // zero constants to local fields:
428 //
429 // aload_0
430 // invokespecial
431 // indexbyte1
432 // indexbyte2
433 //
434 // followed by an (optional) sequence of:
435 //
436 // aload_0
437 // aconst_null / iconst_0 / fconst_0 / dconst_0
438 // putfield
439 // indexbyte1
440 // indexbyte2
441 //
442 // followed by:
443 //
444 // return
445
446 assert(name() == vmSymbols::object_initializer_name(), "Should only be called for default constructors");
447 assert(signature() == vmSymbols::void_method_signature(), "Should only be called for default constructors");
448 int size = code_size();
449 // Check if size match
450 if (size == 0 || size % 5 != 0) return false;
451 address cb = code_base();
452 int last = size - 1;
453 if (cb[0] != Bytecodes::_aload_0 || cb[1] != Bytecodes::_invokespecial || cb[last] != Bytecodes::_return) {
454 // Does not call superclass default constructor
455 return false;
456 }
457 // Check optional sequence
458 for (int i = 4; i < last; i += 5) {
459 if (cb[i] != Bytecodes::_aload_0) return false;
460 if (!Bytecodes::is_zero_const(Bytecodes::cast(cb[i+1]))) return false;
461 if (cb[i+2] != Bytecodes::_putfield) return false;
462 }
463 return true;
464 }
465
466
467 bool Method::compute_has_loops_flag() {
468 BytecodeStream bcs(this);
469 Bytecodes::Code bc;
470
471 while ((bc = bcs.next()) >= 0) {
472 switch( bc ) {
473 case Bytecodes::_ifeq:
474 case Bytecodes::_ifnull:
475 case Bytecodes::_iflt:
476 case Bytecodes::_ifle:
477 case Bytecodes::_ifne:
478 case Bytecodes::_ifnonnull:
479 case Bytecodes::_ifgt:
480 case Bytecodes::_ifge:
481 case Bytecodes::_if_icmpeq:
482 case Bytecodes::_if_icmpne:
483 case Bytecodes::_if_icmplt:
484 case Bytecodes::_if_icmpgt:
485 case Bytecodes::_if_icmple:
486 case Bytecodes::_if_icmpge:
487 case Bytecodes::_if_acmpeq:
488 case Bytecodes::_if_acmpne:
489 case Bytecodes::_goto:
490 case Bytecodes::_jsr:
491 if( bcs.dest() < bcs.next_bci() ) _access_flags.set_has_loops();
492 break;
493
494 case Bytecodes::_goto_w:
495 case Bytecodes::_jsr_w:
496 if( bcs.dest_w() < bcs.next_bci() ) _access_flags.set_has_loops();
497 break;
498 }
499 }
500 _access_flags.set_loops_flag_init();
501 return _access_flags.has_loops();
502 }
503
504 bool Method::is_final_method(AccessFlags class_access_flags) const {
505 // or "does_not_require_vtable_entry"
506 // default method or overpass can occur, is not final (reuses vtable entry)
507 // private methods get vtable entries for backward class compatibility.
508 if (is_overpass() || is_default_method()) return false;
509 return is_final() || class_access_flags.is_final();
510 }
511
512 bool Method::is_final_method() const {
513 return is_final_method(method_holder()->access_flags());
514 }
515
516 bool Method::is_default_method() const {
517 if (method_holder() != NULL &&
518 method_holder()->is_interface() &&
519 !is_abstract()) {
520 return true;
521 } else {
522 return false;
523 }
524 }
525
526 bool Method::can_be_statically_bound(AccessFlags class_access_flags) const {
527 if (is_final_method(class_access_flags)) return true;
528 #ifdef ASSERT
529 ResourceMark rm;
530 bool is_nonv = (vtable_index() == nonvirtual_vtable_index);
531 if (class_access_flags.is_interface()) {
532 assert(is_nonv == is_static(), err_msg("is_nonv=%s", name_and_sig_as_C_string()));
533 }
534 #endif
535 assert(valid_vtable_index() || valid_itable_index(), "method must be linked before we ask this question");
536 return vtable_index() == nonvirtual_vtable_index;
537 }
538
539 bool Method::can_be_statically_bound() const {
540 return can_be_statically_bound(method_holder()->access_flags());
541 }
542
543 bool Method::is_accessor() const {
544 if (code_size() != 5) return false;
545 if (size_of_parameters() != 1) return false;
546 if (java_code_at(0) != Bytecodes::_aload_0 ) return false;
547 if (java_code_at(1) != Bytecodes::_getfield) return false;
548 if (java_code_at(4) != Bytecodes::_areturn &&
549 java_code_at(4) != Bytecodes::_ireturn ) return false;
550 return true;
551 }
552
553 bool Method::is_constant_getter() const {
554 int last_index = code_size() - 1;
555 // Check if the first 1-3 bytecodes are a constant push
556 // and the last bytecode is a return.
557 return (2 <= code_size() && code_size() <= 4 &&
558 Bytecodes::is_const(java_code_at(0)) &&
559 Bytecodes::length_for(java_code_at(0)) == last_index &&
560 Bytecodes::is_return(java_code_at(last_index)));
561 }
562
563 bool Method::is_initializer() const {
564 return name() == vmSymbols::object_initializer_name() || is_static_initializer();
565 }
566
567 bool Method::has_valid_initializer_flags() const {
568 return (is_static() ||
569 method_holder()->major_version() < 51);
570 }
571
572 bool Method::is_static_initializer() const {
573 // For classfiles version 51 or greater, ensure that the clinit method is
574 // static. Non-static methods with the name "<clinit>" are not static
575 // initializers. (older classfiles exempted for backward compatibility)
576 return name() == vmSymbols::class_initializer_name() &&
577 has_valid_initializer_flags();
578 }
579
580
581 objArrayHandle Method::resolved_checked_exceptions_impl(Method* this_oop, TRAPS) {
582 int length = this_oop->checked_exceptions_length();
583 if (length == 0) { // common case
584 return objArrayHandle(THREAD, Universe::the_empty_class_klass_array());
585 } else {
586 methodHandle h_this(THREAD, this_oop);
587 objArrayOop m_oop = oopFactory::new_objArray(SystemDictionary::Class_klass(), length, CHECK_(objArrayHandle()));
588 objArrayHandle mirrors (THREAD, m_oop);
589 for (int i = 0; i < length; i++) {
590 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
591 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
592 assert(k->is_subclass_of(SystemDictionary::Throwable_klass()), "invalid exception class");
593 mirrors->obj_at_put(i, k->java_mirror());
594 }
595 return mirrors;
596 }
597 };
598
599
600 int Method::line_number_from_bci(int bci) const {
601 if (bci == SynchronizationEntryBCI) bci = 0;
602 assert(bci == 0 || 0 <= bci && bci < code_size(), "illegal bci");
603 int best_bci = 0;
604 int best_line = -1;
605
606 if (has_linenumber_table()) {
607 // The line numbers are a short array of 2-tuples [start_pc, line_number].
608 // Not necessarily sorted and not necessarily one-to-one.
609 CompressedLineNumberReadStream stream(compressed_linenumber_table());
610 while (stream.read_pair()) {
611 if (stream.bci() == bci) {
612 // perfect match
613 return stream.line();
614 } else {
615 // update best_bci/line
616 if (stream.bci() < bci && stream.bci() >= best_bci) {
617 best_bci = stream.bci();
618 best_line = stream.line();
619 }
620 }
621 }
622 }
623 return best_line;
624 }
625
626
627 bool Method::is_klass_loaded_by_klass_index(int klass_index) const {
628 if( constants()->tag_at(klass_index).is_unresolved_klass() ) {
629 Thread *thread = Thread::current();
630 Symbol* klass_name = constants()->klass_name_at(klass_index);
631 Handle loader(thread, method_holder()->class_loader());
632 Handle prot (thread, method_holder()->protection_domain());
633 return SystemDictionary::find(klass_name, loader, prot, thread) != NULL;
634 } else {
635 return true;
636 }
637 }
638
639
640 bool Method::is_klass_loaded(int refinfo_index, bool must_be_resolved) const {
641 int klass_index = constants()->klass_ref_index_at(refinfo_index);
642 if (must_be_resolved) {
643 // Make sure klass is resolved in constantpool.
644 if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;
645 }
646 return is_klass_loaded_by_klass_index(klass_index);
647 }
648
649
650 void Method::set_native_function(address function, bool post_event_flag) {
651 assert(function != NULL, "use clear_native_function to unregister natives");
652 assert(!is_method_handle_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
653 address* native_function = native_function_addr();
654
655 // We can see racers trying to place the same native function into place. Once
656 // is plenty.
657 address current = *native_function;
658 if (current == function) return;
659 if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
660 function != NULL) {
661 // native_method_throw_unsatisfied_link_error_entry() should only
662 // be passed when post_event_flag is false.
663 assert(function !=
664 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
665 "post_event_flag mis-match");
666
667 // post the bind event, and possible change the bind function
668 JvmtiExport::post_native_method_bind(this, &function);
669 }
670 *native_function = function;
671 // This function can be called more than once. We must make sure that we always
672 // use the latest registered method -> check if a stub already has been generated.
673 // If so, we have to make it not_entrant.
674 nmethod* nm = code(); // Put it into local variable to guard against concurrent updates
675 if (nm != NULL) {
676 nm->make_not_entrant();
677 }
678 }
679
680
681 bool Method::has_native_function() const {
682 if (is_method_handle_intrinsic())
683 return false; // special-cased in SharedRuntime::generate_native_wrapper
684 address func = native_function();
685 return (func != NULL && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry());
686 }
687
688
689 void Method::clear_native_function() {
690 // Note: is_method_handle_intrinsic() is allowed here.
691 set_native_function(
692 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
693 !native_bind_event_is_interesting);
694 clear_code();
695 }
696
697 address Method::critical_native_function() {
698 methodHandle mh(this);
699 return NativeLookup::lookup_critical_entry(mh);
700 }
701
702
703 void Method::set_signature_handler(address handler) {
704 address* signature_handler = signature_handler_addr();
705 *signature_handler = handler;
706 }
707
708
709 void Method::print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason) {
710 if (PrintCompilation && report) {
711 ttyLocker ttyl;
712 tty->print("made not %scompilable on ", is_osr ? "OSR " : "");
713 if (comp_level == CompLevel_all) {
714 tty->print("all levels ");
715 } else {
716 tty->print("levels ");
717 for (int i = (int)CompLevel_none; i <= comp_level; i++) {
718 tty->print("%d ", i);
719 }
720 }
721 this->print_short_name(tty);
722 int size = this->code_size();
723 if (size > 0) {
724 tty->print(" (%d bytes)", size);
725 }
726 if (reason != NULL) {
727 tty->print(" %s", reason);
728 }
729 tty->cr();
730 }
731 if ((TraceDeoptimization || LogCompilation) && (xtty != NULL)) {
732 ttyLocker ttyl;
733 xtty->begin_elem("make_not_compilable thread='" UINTX_FORMAT "' osr='%d' level='%d'",
734 os::current_thread_id(), is_osr, comp_level);
735 if (reason != NULL) {
736 xtty->print(" reason=\'%s\'", reason);
737 }
738 xtty->method(this);
739 xtty->stamp();
740 xtty->end_elem();
741 }
742 }
743
744 bool Method::is_always_compilable() const {
745 // Generated adapters must be compiled
746 if (is_method_handle_intrinsic() && is_synthetic()) {
747 assert(!is_not_c1_compilable(), "sanity check");
748 assert(!is_not_c2_compilable(), "sanity check");
749 return true;
750 }
751
752 return false;
753 }
754
755 bool Method::is_not_compilable(int comp_level) const {
756 if (number_of_breakpoints() > 0)
757 return true;
758 if (is_always_compilable())
759 return false;
760 if (comp_level == CompLevel_any)
761 return is_not_c1_compilable() || is_not_c2_compilable();
762 if (is_c1_compile(comp_level))
763 return is_not_c1_compilable();
764 if (is_c2_compile(comp_level))
765 return is_not_c2_compilable();
766 return false;
767 }
768
769 // call this when compiler finds that this method is not compilable
770 void Method::set_not_compilable(int comp_level, bool report, const char* reason) {
771 if (is_always_compilable()) {
772 // Don't mark a method which should be always compilable
773 return;
774 }
775 print_made_not_compilable(comp_level, /*is_osr*/ false, report, reason);
776 if (comp_level == CompLevel_all) {
777 set_not_c1_compilable();
778 set_not_c2_compilable();
779 } else {
780 if (is_c1_compile(comp_level))
781 set_not_c1_compilable();
782 if (is_c2_compile(comp_level))
783 set_not_c2_compilable();
784 }
785 CompilationPolicy::policy()->disable_compilation(this);
786 assert(!CompilationPolicy::can_be_compiled(this, comp_level), "sanity check");
787 }
788
789 bool Method::is_not_osr_compilable(int comp_level) const {
790 if (is_not_compilable(comp_level))
791 return true;
792 if (comp_level == CompLevel_any)
793 return is_not_c1_osr_compilable() || is_not_c2_osr_compilable();
794 if (is_c1_compile(comp_level))
795 return is_not_c1_osr_compilable();
796 if (is_c2_compile(comp_level))
797 return is_not_c2_osr_compilable();
798 return false;
799 }
800
801 void Method::set_not_osr_compilable(int comp_level, bool report, const char* reason) {
802 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
803 if (comp_level == CompLevel_all) {
804 set_not_c1_osr_compilable();
805 set_not_c2_osr_compilable();
806 } else {
807 if (is_c1_compile(comp_level))
808 set_not_c1_osr_compilable();
809 if (is_c2_compile(comp_level))
810 set_not_c2_osr_compilable();
811 }
812 CompilationPolicy::policy()->disable_compilation(this);
813 assert(!CompilationPolicy::can_be_osr_compiled(this, comp_level), "sanity check");
814 }
815
816 // Revert to using the interpreter and clear out the nmethod
817 void Method::clear_code() {
818
819 // this may be NULL if c2i adapters have not been made yet
820 // Only should happen at allocate time.
821 if (_adapter == NULL) {
822 _from_compiled_entry = NULL;
823 } else {
824 _from_compiled_entry = _adapter->get_c2i_entry();
825 }
826 OrderAccess::storestore();
827 _from_interpreted_entry = _i2i_entry;
828 OrderAccess::storestore();
829 _code = NULL;
830 }
831
832 // Called by class data sharing to remove any entry points (which are not shared)
833 void Method::unlink_method() {
834 _code = NULL;
835 _i2i_entry = NULL;
836 _from_interpreted_entry = NULL;
837 if (is_native()) {
838 *native_function_addr() = NULL;
839 set_signature_handler(NULL);
840 }
841 NOT_PRODUCT(set_compiled_invocation_count(0);)
842 _adapter = NULL;
843 _from_compiled_entry = NULL;
844
845 // In case of DumpSharedSpaces, _method_data should always be NULL.
846 //
847 // During runtime (!DumpSharedSpaces), when we are cleaning a
848 // shared class that failed to load, this->link_method() may
849 // have already been called (before an exception happened), so
850 // this->_method_data may not be NULL.
851 assert(!DumpSharedSpaces || _method_data == NULL, "unexpected method data?");
852
853 set_method_data(NULL);
854 clear_method_counters();
855 }
856
857 // Called when the method_holder is getting linked. Setup entrypoints so the method
858 // is ready to be called from interpreter, compiler, and vtables.
859 void Method::link_method(methodHandle h_method, TRAPS) {
860 // If the code cache is full, we may reenter this function for the
861 // leftover methods that weren't linked.
862 if (_i2i_entry != NULL) return;
863
864 assert(_adapter == NULL, "init'd to NULL" );
865 assert( _code == NULL, "nothing compiled yet" );
866
867 // Setup interpreter entrypoint
868 assert(this == h_method(), "wrong h_method()" );
869 address entry = Interpreter::entry_for_method(h_method);
870 assert(entry != NULL, "interpreter entry must be non-null");
871 // Sets both _i2i_entry and _from_interpreted_entry
872 set_interpreter_entry(entry);
873
874 // Don't overwrite already registered native entries.
875 if (is_native() && !has_native_function()) {
876 set_native_function(
877 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
878 !native_bind_event_is_interesting);
879 }
880
881 // Setup compiler entrypoint. This is made eagerly, so we do not need
882 // special handling of vtables. An alternative is to make adapters more
883 // lazily by calling make_adapter() from from_compiled_entry() for the
884 // normal calls. For vtable calls life gets more complicated. When a
885 // call-site goes mega-morphic we need adapters in all methods which can be
886 // called from the vtable. We need adapters on such methods that get loaded
887 // later. Ditto for mega-morphic itable calls. If this proves to be a
888 // problem we'll make these lazily later.
889 (void) make_adapters(h_method, CHECK);
890
891 // ONLY USE the h_method now as make_adapter may have blocked
892
893 }
894
895 address Method::make_adapters(methodHandle mh, TRAPS) {
896 // Adapters for compiled code are made eagerly here. They are fairly
897 // small (generally < 100 bytes) and quick to make (and cached and shared)
898 // so making them eagerly shouldn't be too expensive.
899 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
900 if (adapter == NULL ) {
901 THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "out of space in CodeCache for adapters");
902 }
903
904 mh->set_adapter_entry(adapter);
905 mh->_from_compiled_entry = adapter->get_c2i_entry();
906 return adapter->get_c2i_entry();
907 }
908
909 void Method::restore_unshareable_info(TRAPS) {
910 // Since restore_unshareable_info can be called more than once for a method, don't
911 // redo any work. If this field is restored, there is nothing to do.
912 if (_from_compiled_entry == NULL) {
913 // restore method's vtable by calling a virtual function
914 restore_vtable();
915
916 methodHandle mh(THREAD, this);
917 link_method(mh, CHECK);
918 }
919 }
920
921
922 // The verified_code_entry() must be called when a invoke is resolved
923 // on this method.
924
925 // It returns the compiled code entry point, after asserting not null.
926 // This function is called after potential safepoints so that nmethod
927 // or adapter that it points to is still live and valid.
928 // This function must not hit a safepoint!
929 address Method::verified_code_entry() {
930 debug_only(No_Safepoint_Verifier nsv;)
931 assert(_from_compiled_entry != NULL, "must be set");
932 return _from_compiled_entry;
933 }
934
935 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
936 // (could be racing a deopt).
937 // Not inline to avoid circular ref.
938 bool Method::check_code() const {
939 // cached in a register or local. There's a race on the value of the field.
940 nmethod *code = (nmethod *)OrderAccess::load_ptr_acquire(&_code);
941 return code == NULL || (code->method() == NULL) || (code->method() == (Method*)this && !code->is_osr_method());
942 }
943
944 // Install compiled code. Instantly it can execute.
945 void Method::set_code(methodHandle mh, nmethod *code) {
946 assert( code, "use clear_code to remove code" );
947 assert( mh->check_code(), "" );
948
949 guarantee(mh->adapter() != NULL, "Adapter blob must already exist!");
950
951 // These writes must happen in this order, because the interpreter will
952 // directly jump to from_interpreted_entry which jumps to an i2c adapter
953 // which jumps to _from_compiled_entry.
954 mh->_code = code; // Assign before allowing compiled code to exec
955
956 int comp_level = code->comp_level();
957 // In theory there could be a race here. In practice it is unlikely
958 // and not worth worrying about.
959 if (comp_level > mh->highest_comp_level()) {
960 mh->set_highest_comp_level(comp_level);
961 }
962
963 OrderAccess::storestore();
964 #ifdef SHARK
965 mh->_from_interpreted_entry = code->insts_begin();
966 #else //!SHARK
967 mh->_from_compiled_entry = code->verified_entry_point();
968 OrderAccess::storestore();
969 // Instantly compiled code can execute.
970 if (!mh->is_method_handle_intrinsic())
971 mh->_from_interpreted_entry = mh->get_i2c_entry();
972 #endif //!SHARK
973 }
974
975
976 bool Method::is_overridden_in(Klass* k) const {
977 InstanceKlass* ik = InstanceKlass::cast(k);
978
979 if (ik->is_interface()) return false;
980
981 // If method is an interface, we skip it - except if it
982 // is a miranda method
983 if (method_holder()->is_interface()) {
984 // Check that method is not a miranda method
985 if (ik->lookup_method(name(), signature()) == NULL) {
986 // No implementation exist - so miranda method
987 return false;
988 }
989 return true;
990 }
991
992 assert(ik->is_subclass_of(method_holder()), "should be subklass");
993 assert(ik->vtable() != NULL, "vtable should exist");
994 if (!has_vtable_index()) {
995 return false;
996 } else {
997 Method* vt_m = ik->method_at_vtable(vtable_index());
998 return vt_m != this;
999 }
1000 }
1001
1002
1003 // give advice about whether this Method* should be cached or not
1004 bool Method::should_not_be_cached() const {
1005 if (is_old()) {
1006 // This method has been redefined. It is either EMCP or obsolete
1007 // and we don't want to cache it because that would pin the method
1008 // down and prevent it from being collectible if and when it
1009 // finishes executing.
1010 return true;
1011 }
1012
1013 // caching this method should be just fine
1014 return false;
1015 }
1016
1017
1018 /**
1019 * Returns true if this is one of the specially treated methods for
1020 * security related stack walks (like Reflection.getCallerClass).
1021 */
1022 bool Method::is_ignored_by_security_stack_walk() const {
1023 const bool use_new_reflection = JDK_Version::is_gte_jdk14x_version() && UseNewReflection;
1024
1025 if (intrinsic_id() == vmIntrinsics::_invoke) {
1026 // This is Method.invoke() -- ignore it
1027 return true;
1028 }
1029 if (use_new_reflection &&
1030 method_holder()->is_subclass_of(SystemDictionary::reflect_MethodAccessorImpl_klass())) {
1031 // This is an auxilary frame -- ignore it
1032 return true;
1033 }
1034 if (is_method_handle_intrinsic() || is_compiled_lambda_form()) {
1035 // This is an internal adapter frame for method handles -- ignore it
1036 return true;
1037 }
1038 return false;
1039 }
1040
1041
1042 // Constant pool structure for invoke methods:
1043 enum {
1044 _imcp_invoke_name = 1, // utf8: 'invokeExact', etc.
1045 _imcp_invoke_signature, // utf8: (variable Symbol*)
1046 _imcp_limit
1047 };
1048
1049 // Test if this method is an MH adapter frame generated by Java code.
1050 // Cf. java/lang/invoke/InvokerBytecodeGenerator
1051 bool Method::is_compiled_lambda_form() const {
1052 return intrinsic_id() == vmIntrinsics::_compiledLambdaForm;
1053 }
1054
1055 // Test if this method is an internal MH primitive method.
1056 bool Method::is_method_handle_intrinsic() const {
1057 vmIntrinsics::ID iid = intrinsic_id();
1058 return (MethodHandles::is_signature_polymorphic(iid) &&
1059 MethodHandles::is_signature_polymorphic_intrinsic(iid));
1060 }
1061
1062 bool Method::has_member_arg() const {
1063 vmIntrinsics::ID iid = intrinsic_id();
1064 return (MethodHandles::is_signature_polymorphic(iid) &&
1065 MethodHandles::has_member_arg(iid));
1066 }
1067
1068 // Make an instance of a signature-polymorphic internal MH primitive.
1069 methodHandle Method::make_method_handle_intrinsic(vmIntrinsics::ID iid,
1070 Symbol* signature,
1071 TRAPS) {
1072 ResourceMark rm;
1073 methodHandle empty;
1074
1075 KlassHandle holder = SystemDictionary::MethodHandle_klass();
1076 Symbol* name = MethodHandles::signature_polymorphic_intrinsic_name(iid);
1077 assert(iid == MethodHandles::signature_polymorphic_name_id(name), "");
1078 if (TraceMethodHandles) {
1079 tty->print_cr("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string());
1080 }
1081
1082 // invariant: cp->symbol_at_put is preceded by a refcount increment (more usually a lookup)
1083 name->increment_refcount();
1084 signature->increment_refcount();
1085
1086 int cp_length = _imcp_limit;
1087 ClassLoaderData* loader_data = holder->class_loader_data();
1088 constantPoolHandle cp;
1089 {
1090 ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty));
1091 cp = constantPoolHandle(THREAD, cp_oop);
1092 }
1093 cp->set_pool_holder(InstanceKlass::cast(holder()));
1094 cp->symbol_at_put(_imcp_invoke_name, name);
1095 cp->symbol_at_put(_imcp_invoke_signature, signature);
1096 cp->set_has_preresolution();
1097
1098 // decide on access bits: public or not?
1099 int flags_bits = (JVM_ACC_NATIVE | JVM_ACC_SYNTHETIC | JVM_ACC_FINAL);
1100 bool must_be_static = MethodHandles::is_signature_polymorphic_static(iid);
1101 if (must_be_static) flags_bits |= JVM_ACC_STATIC;
1102 assert((flags_bits & JVM_ACC_PUBLIC) == 0, "do not expose these methods");
1103
1104 methodHandle m;
1105 {
1106 InlineTableSizes sizes;
1107 Method* m_oop = Method::allocate(loader_data, 0,
1108 accessFlags_from(flags_bits), &sizes,
1109 ConstMethod::NORMAL, CHECK_(empty));
1110 m = methodHandle(THREAD, m_oop);
1111 }
1112 m->set_constants(cp());
1113 m->set_name_index(_imcp_invoke_name);
1114 m->set_signature_index(_imcp_invoke_signature);
1115 assert(MethodHandles::is_signature_polymorphic_name(m->name()), "");
1116 assert(m->signature() == signature, "");
1117 ResultTypeFinder rtf(signature);
1118 m->constMethod()->set_result_type(rtf.type());
1119 m->compute_size_of_parameters(THREAD);
1120 m->init_intrinsic_id();
1121 assert(m->is_method_handle_intrinsic(), "");
1122 #ifdef ASSERT
1123 if (!MethodHandles::is_signature_polymorphic(m->intrinsic_id())) m->print();
1124 assert(MethodHandles::is_signature_polymorphic(m->intrinsic_id()), "must be an invoker");
1125 assert(m->intrinsic_id() == iid, "correctly predicted iid");
1126 #endif //ASSERT
1127
1128 // Finally, set up its entry points.
1129 assert(m->can_be_statically_bound(), "");
1130 m->set_vtable_index(Method::nonvirtual_vtable_index);
1131 m->link_method(m, CHECK_(empty));
1132
1133 if (TraceMethodHandles && (Verbose || WizardMode))
1134 m->print_on(tty);
1135
1136 return m;
1137 }
1138
1139 Klass* Method::check_non_bcp_klass(Klass* klass) {
1140 if (klass != NULL && klass->class_loader() != NULL) {
1141 if (klass->oop_is_objArray())
1142 klass = ObjArrayKlass::cast(klass)->bottom_klass();
1143 return klass;
1144 }
1145 return NULL;
1146 }
1147
1148
1149 methodHandle Method::clone_with_new_data(methodHandle m, u_char* new_code, int new_code_length,
1150 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
1151 // Code below does not work for native methods - they should never get rewritten anyway
1152 assert(!m->is_native(), "cannot rewrite native methods");
1153 // Allocate new Method*
1154 AccessFlags flags = m->access_flags();
1155
1156 ConstMethod* cm = m->constMethod();
1157 int checked_exceptions_len = cm->checked_exceptions_length();
1158 int localvariable_len = cm->localvariable_table_length();
1159 int exception_table_len = cm->exception_table_length();
1160 int method_parameters_len = cm->method_parameters_length();
1161 int method_annotations_len = cm->method_annotations_length();
1162 int parameter_annotations_len = cm->parameter_annotations_length();
1163 int type_annotations_len = cm->type_annotations_length();
1164 int default_annotations_len = cm->default_annotations_length();
1165
1166 InlineTableSizes sizes(
1167 localvariable_len,
1168 new_compressed_linenumber_size,
1169 exception_table_len,
1170 checked_exceptions_len,
1171 method_parameters_len,
1172 cm->generic_signature_index(),
1173 method_annotations_len,
1174 parameter_annotations_len,
1175 type_annotations_len,
1176 default_annotations_len,
1177 0);
1178
1179 ClassLoaderData* loader_data = m->method_holder()->class_loader_data();
1180 Method* newm_oop = Method::allocate(loader_data,
1181 new_code_length,
1182 flags,
1183 &sizes,
1184 m->method_type(),
1185 CHECK_(methodHandle()));
1186 methodHandle newm (THREAD, newm_oop);
1187 int new_method_size = newm->method_size();
1188
1189 // Create a shallow copy of Method part, but be careful to preserve the new ConstMethod*
1190 ConstMethod* newcm = newm->constMethod();
1191 int new_const_method_size = newm->constMethod()->size();
1192
1193 memcpy(newm(), m(), sizeof(Method));
1194
1195 // Create shallow copy of ConstMethod.
1196 memcpy(newcm, m->constMethod(), sizeof(ConstMethod));
1197
1198 // Reset correct method/const method, method size, and parameter info
1199 newm->set_constMethod(newcm);
1200 newm->constMethod()->set_code_size(new_code_length);
1201 newm->constMethod()->set_constMethod_size(new_const_method_size);
1202 newm->set_method_size(new_method_size);
1203 assert(newm->code_size() == new_code_length, "check");
1204 assert(newm->method_parameters_length() == method_parameters_len, "check");
1205 assert(newm->checked_exceptions_length() == checked_exceptions_len, "check");
1206 assert(newm->exception_table_length() == exception_table_len, "check");
1207 assert(newm->localvariable_table_length() == localvariable_len, "check");
1208 // Copy new byte codes
1209 memcpy(newm->code_base(), new_code, new_code_length);
1210 // Copy line number table
1211 if (new_compressed_linenumber_size > 0) {
1212 memcpy(newm->compressed_linenumber_table(),
1213 new_compressed_linenumber_table,
1214 new_compressed_linenumber_size);
1215 }
1216 // Copy method_parameters
1217 if (method_parameters_len > 0) {
1218 memcpy(newm->method_parameters_start(),
1219 m->method_parameters_start(),
1220 method_parameters_len * sizeof(MethodParametersElement));
1221 }
1222 // Copy checked_exceptions
1223 if (checked_exceptions_len > 0) {
1224 memcpy(newm->checked_exceptions_start(),
1225 m->checked_exceptions_start(),
1226 checked_exceptions_len * sizeof(CheckedExceptionElement));
1227 }
1228 // Copy exception table
1229 if (exception_table_len > 0) {
1230 memcpy(newm->exception_table_start(),
1231 m->exception_table_start(),
1232 exception_table_len * sizeof(ExceptionTableElement));
1233 }
1234 // Copy local variable number table
1235 if (localvariable_len > 0) {
1236 memcpy(newm->localvariable_table_start(),
1237 m->localvariable_table_start(),
1238 localvariable_len * sizeof(LocalVariableTableElement));
1239 }
1240 // Copy stackmap table
1241 if (m->has_stackmap_table()) {
1242 int code_attribute_length = m->stackmap_data()->length();
1243 Array<u1>* stackmap_data =
1244 MetadataFactory::new_array<u1>(loader_data, code_attribute_length, 0, CHECK_NULL);
1245 memcpy((void*)stackmap_data->adr_at(0),
1246 (void*)m->stackmap_data()->adr_at(0), code_attribute_length);
1247 newm->set_stackmap_data(stackmap_data);
1248 }
1249
1250 // copy annotations over to new method
1251 newcm->copy_annotations_from(cm);
1252 return newm;
1253 }
1254
1255 vmSymbols::SID Method::klass_id_for_intrinsics(Klass* holder) {
1256 // if loader is not the default loader (i.e., != NULL), we can't know the intrinsics
1257 // because we are not loading from core libraries
1258 // exception: the AES intrinsics come from lib/ext/sunjce_provider.jar
1259 // which does not use the class default class loader so we check for its loader here
1260 InstanceKlass* ik = InstanceKlass::cast(holder);
1261 if ((ik->class_loader() != NULL) && !SystemDictionary::is_ext_class_loader(ik->class_loader())) {
1262 return vmSymbols::NO_SID; // regardless of name, no intrinsics here
1263 }
1264
1265 // see if the klass name is well-known:
1266 Symbol* klass_name = ik->name();
1267 return vmSymbols::find_sid(klass_name);
1268 }
1269
1270 void Method::init_intrinsic_id() {
1271 assert(_intrinsic_id == vmIntrinsics::_none, "do this just once");
1272 const uintptr_t max_id_uint = right_n_bits((int)(sizeof(_intrinsic_id) * BitsPerByte));
1273 assert((uintptr_t)vmIntrinsics::ID_LIMIT <= max_id_uint, "else fix size");
1274 assert(intrinsic_id_size_in_bytes() == sizeof(_intrinsic_id), "");
1275
1276 // the klass name is well-known:
1277 vmSymbols::SID klass_id = klass_id_for_intrinsics(method_holder());
1278 assert(klass_id != vmSymbols::NO_SID, "caller responsibility");
1279
1280 // ditto for method and signature:
1281 vmSymbols::SID name_id = vmSymbols::find_sid(name());
1282 if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
1283 && name_id == vmSymbols::NO_SID)
1284 return;
1285 vmSymbols::SID sig_id = vmSymbols::find_sid(signature());
1286 if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
1287 && sig_id == vmSymbols::NO_SID) return;
1288 jshort flags = access_flags().as_short();
1289
1290 vmIntrinsics::ID id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
1291 if (id != vmIntrinsics::_none) {
1292 set_intrinsic_id(id);
1293 return;
1294 }
1295
1296 // A few slightly irregular cases:
1297 switch (klass_id) {
1298 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_StrictMath):
1299 // Second chance: check in regular Math.
1300 switch (name_id) {
1301 case vmSymbols::VM_SYMBOL_ENUM_NAME(min_name):
1302 case vmSymbols::VM_SYMBOL_ENUM_NAME(max_name):
1303 case vmSymbols::VM_SYMBOL_ENUM_NAME(sqrt_name):
1304 // pretend it is the corresponding method in the non-strict class:
1305 klass_id = vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_Math);
1306 id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
1307 break;
1308 }
1309 break;
1310
1311 // Signature-polymorphic methods: MethodHandle.invoke*, InvokeDynamic.*.
1312 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle):
1313 if (!is_native()) break;
1314 id = MethodHandles::signature_polymorphic_name_id(method_holder(), name());
1315 if (is_static() != MethodHandles::is_signature_polymorphic_static(id))
1316 id = vmIntrinsics::_none;
1317 break;
1318 }
1319
1320 if (id != vmIntrinsics::_none) {
1321 // Set up its iid. It is an alias method.
1322 set_intrinsic_id(id);
1323 return;
1324 }
1325 }
1326
1327 // These two methods are static since a GC may move the Method
1328 bool Method::load_signature_classes(methodHandle m, TRAPS) {
1329 if (THREAD->is_Compiler_thread()) {
1330 // There is nothing useful this routine can do from within the Compile thread.
1331 // Hopefully, the signature contains only well-known classes.
1332 // We could scan for this and return true/false, but the caller won't care.
1333 return false;
1334 }
1335 bool sig_is_loaded = true;
1336 Handle class_loader(THREAD, m->method_holder()->class_loader());
1337 Handle protection_domain(THREAD, m->method_holder()->protection_domain());
1338 ResourceMark rm(THREAD);
1339 Symbol* signature = m->signature();
1340 for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
1341 if (ss.is_object()) {
1342 Symbol* sym = ss.as_symbol(CHECK_(false));
1343 Symbol* name = sym;
1344 Klass* klass = SystemDictionary::resolve_or_null(name, class_loader,
1345 protection_domain, THREAD);
1346 // We are loading classes eagerly. If a ClassNotFoundException or
1347 // a LinkageError was generated, be sure to ignore it.
1348 if (HAS_PENDING_EXCEPTION) {
1349 if (PENDING_EXCEPTION->is_a(SystemDictionary::ClassNotFoundException_klass()) ||
1350 PENDING_EXCEPTION->is_a(SystemDictionary::LinkageError_klass())) {
1351 CLEAR_PENDING_EXCEPTION;
1352 } else {
1353 return false;
1354 }
1355 }
1356 if( klass == NULL) { sig_is_loaded = false; }
1357 }
1358 }
1359 return sig_is_loaded;
1360 }
1361
1362 bool Method::has_unloaded_classes_in_signature(methodHandle m, TRAPS) {
1363 Handle class_loader(THREAD, m->method_holder()->class_loader());
1364 Handle protection_domain(THREAD, m->method_holder()->protection_domain());
1365 ResourceMark rm(THREAD);
1366 Symbol* signature = m->signature();
1367 for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
1368 if (ss.type() == T_OBJECT) {
1369 Symbol* name = ss.as_symbol_or_null();
1370 if (name == NULL) return true;
1371 Klass* klass = SystemDictionary::find(name, class_loader, protection_domain, THREAD);
1372 if (klass == NULL) return true;
1373 }
1374 }
1375 return false;
1376 }
1377
1378 // Exposed so field engineers can debug VM
1379 void Method::print_short_name(outputStream* st) {
1380 ResourceMark rm;
1381 #ifdef PRODUCT
1382 st->print(" %s::", method_holder()->external_name());
1383 #else
1384 st->print(" %s::", method_holder()->internal_name());
1385 #endif
1386 name()->print_symbol_on(st);
1387 if (WizardMode) signature()->print_symbol_on(st);
1388 else if (MethodHandles::is_signature_polymorphic(intrinsic_id()))
1389 MethodHandles::print_as_basic_type_signature_on(st, signature(), true);
1390 }
1391
1392 // Comparer for sorting an object array containing
1393 // Method*s.
1394 static int method_comparator(Method* a, Method* b) {
1395 return a->name()->fast_compare(b->name());
1396 }
1397
1398 // This is only done during class loading, so it is OK to assume method_idnum matches the methods() array
1399 // default_methods also uses this without the ordering for fast find_method
1400 void Method::sort_methods(Array<Method*>* methods, bool idempotent, bool set_idnums) {
1401 int length = methods->length();
1402 if (length > 1) {
1403 {
1404 No_Safepoint_Verifier nsv;
1405 QuickSort::sort<Method*>(methods->data(), length, method_comparator, idempotent);
1406 }
1407 // Reset method ordering
1408 if (set_idnums) {
1409 for (int i = 0; i < length; i++) {
1410 Method* m = methods->at(i);
1411 m->set_method_idnum(i);
1412 m->set_orig_method_idnum(i);
1413 }
1414 }
1415 }
1416 }
1417
1418 //-----------------------------------------------------------------------------------
1419 // Non-product code unless JVM/TI needs it
1420
1421 #if !defined(PRODUCT) || INCLUDE_JVMTI
1422 class SignatureTypePrinter : public SignatureTypeNames {
1423 private:
1424 outputStream* _st;
1425 bool _use_separator;
1426
1427 void type_name(const char* name) {
1428 if (_use_separator) _st->print(", ");
1429 _st->print("%s", name);
1430 _use_separator = true;
1431 }
1432
1433 public:
1434 SignatureTypePrinter(Symbol* signature, outputStream* st) : SignatureTypeNames(signature) {
1435 _st = st;
1436 _use_separator = false;
1437 }
1438
1439 void print_parameters() { _use_separator = false; iterate_parameters(); }
1440 void print_returntype() { _use_separator = false; iterate_returntype(); }
1441 };
1442
1443
1444 void Method::print_name(outputStream* st) {
1445 Thread *thread = Thread::current();
1446 ResourceMark rm(thread);
1447 SignatureTypePrinter sig(signature(), st);
1448 st->print("%s ", is_static() ? "static" : "virtual");
1449 sig.print_returntype();
1450 st->print(" %s.", method_holder()->internal_name());
1451 name()->print_symbol_on(st);
1452 st->print("(");
1453 sig.print_parameters();
1454 st->print(")");
1455 }
1456 #endif // !PRODUCT || INCLUDE_JVMTI
1457
1458
1459 //-----------------------------------------------------------------------------------
1460 // Non-product code
1461
1462 #ifndef PRODUCT
1463 void Method::print_codes_on(outputStream* st) const {
1464 print_codes_on(0, code_size(), st);
1465 }
1466
1467 void Method::print_codes_on(int from, int to, outputStream* st) const {
1468 Thread *thread = Thread::current();
1469 ResourceMark rm(thread);
1470 methodHandle mh (thread, (Method*)this);
1471 BytecodeStream s(mh);
1472 s.set_interval(from, to);
1473 BytecodeTracer::set_closure(BytecodeTracer::std_closure());
1474 while (s.next() >= 0) BytecodeTracer::trace(mh, s.bcp(), st);
1475 }
1476 #endif // not PRODUCT
1477
1478
1479 // Simple compression of line number tables. We use a regular compressed stream, except that we compress deltas
1480 // between (bci,line) pairs since they are smaller. If (bci delta, line delta) fits in (5-bit unsigned, 3-bit unsigned)
1481 // we save it as one byte, otherwise we write a 0xFF escape character and use regular compression. 0x0 is used
1482 // as end-of-stream terminator.
1483
1484 void CompressedLineNumberWriteStream::write_pair_regular(int bci_delta, int line_delta) {
1485 // bci and line number does not compress into single byte.
1486 // Write out escape character and use regular compression for bci and line number.
1487 write_byte((jubyte)0xFF);
1488 write_signed_int(bci_delta);
1489 write_signed_int(line_delta);
1490 }
1491
1492 // See comment in method.hpp which explains why this exists.
1493 #if defined(_M_AMD64) && _MSC_VER >= 1400
1494 #pragma optimize("", off)
1495 void CompressedLineNumberWriteStream::write_pair(int bci, int line) {
1496 write_pair_inline(bci, line);
1497 }
1498 #pragma optimize("", on)
1499 #endif
1500
1501 CompressedLineNumberReadStream::CompressedLineNumberReadStream(u_char* buffer) : CompressedReadStream(buffer) {
1502 _bci = 0;
1503 _line = 0;
1504 };
1505
1506
1507 bool CompressedLineNumberReadStream::read_pair() {
1508 jubyte next = read_byte();
1509 // Check for terminator
1510 if (next == 0) return false;
1511 if (next == 0xFF) {
1512 // Escape character, regular compression used
1513 _bci += read_signed_int();
1514 _line += read_signed_int();
1515 } else {
1516 // Single byte compression used
1517 _bci += next >> 3;
1518 _line += next & 0x7;
1519 }
1520 return true;
1521 }
1522
1523
1524 Bytecodes::Code Method::orig_bytecode_at(int bci) const {
1525 BreakpointInfo* bp = method_holder()->breakpoints();
1526 for (; bp != NULL; bp = bp->next()) {
1527 if (bp->match(this, bci)) {
1528 return bp->orig_bytecode();
1529 }
1530 }
1531 {
1532 ResourceMark rm;
1533 fatal(err_msg("no original bytecode found in %s at bci %d", name_and_sig_as_C_string(), bci));
1534 }
1535 return Bytecodes::_shouldnotreachhere;
1536 }
1537
1538 void Method::set_orig_bytecode_at(int bci, Bytecodes::Code code) {
1539 assert(code != Bytecodes::_breakpoint, "cannot patch breakpoints this way");
1540 BreakpointInfo* bp = method_holder()->breakpoints();
1541 for (; bp != NULL; bp = bp->next()) {
1542 if (bp->match(this, bci)) {
1543 bp->set_orig_bytecode(code);
1544 // and continue, in case there is more than one
1545 }
1546 }
1547 }
1548
1549 void Method::set_breakpoint(int bci) {
1550 InstanceKlass* ik = method_holder();
1551 BreakpointInfo *bp = new BreakpointInfo(this, bci);
1552 bp->set_next(ik->breakpoints());
1553 ik->set_breakpoints(bp);
1554 // do this last:
1555 bp->set(this);
1556 }
1557
1558 static void clear_matches(Method* m, int bci) {
1559 InstanceKlass* ik = m->method_holder();
1560 BreakpointInfo* prev_bp = NULL;
1561 BreakpointInfo* next_bp;
1562 for (BreakpointInfo* bp = ik->breakpoints(); bp != NULL; bp = next_bp) {
1563 next_bp = bp->next();
1564 // bci value of -1 is used to delete all breakpoints in method m (ex: clear_all_breakpoint).
1565 if (bci >= 0 ? bp->match(m, bci) : bp->match(m)) {
1566 // do this first:
1567 bp->clear(m);
1568 // unhook it
1569 if (prev_bp != NULL)
1570 prev_bp->set_next(next_bp);
1571 else
1572 ik->set_breakpoints(next_bp);
1573 delete bp;
1574 // When class is redefined JVMTI sets breakpoint in all versions of EMCP methods
1575 // at same location. So we have multiple matching (method_index and bci)
1576 // BreakpointInfo nodes in BreakpointInfo list. We should just delete one
1577 // breakpoint for clear_breakpoint request and keep all other method versions
1578 // BreakpointInfo for future clear_breakpoint request.
1579 // bcivalue of -1 is used to clear all breakpoints (see clear_all_breakpoints)
1580 // which is being called when class is unloaded. We delete all the Breakpoint
1581 // information for all versions of method. We may not correctly restore the original
1582 // bytecode in all method versions, but that is ok. Because the class is being unloaded
1583 // so these methods won't be used anymore.
1584 if (bci >= 0) {
1585 break;
1586 }
1587 } else {
1588 // This one is a keeper.
1589 prev_bp = bp;
1590 }
1591 }
1592 }
1593
1594 void Method::clear_breakpoint(int bci) {
1595 assert(bci >= 0, "");
1596 clear_matches(this, bci);
1597 }
1598
1599 void Method::clear_all_breakpoints() {
1600 clear_matches(this, -1);
1601 }
1602
1603
1604 int Method::invocation_count() {
1605 MethodCounters *mcs = method_counters();
1606 if (TieredCompilation) {
1607 MethodData* const mdo = method_data();
1608 if (((mcs != NULL) ? mcs->invocation_counter()->carry() : false) ||
1609 ((mdo != NULL) ? mdo->invocation_counter()->carry() : false)) {
1610 return InvocationCounter::count_limit;
1611 } else {
1612 return ((mcs != NULL) ? mcs->invocation_counter()->count() : 0) +
1613 ((mdo != NULL) ? mdo->invocation_counter()->count() : 0);
1614 }
1615 } else {
1616 return (mcs == NULL) ? 0 : mcs->invocation_counter()->count();
1617 }
1618 }
1619
1620 int Method::backedge_count() {
1621 MethodCounters *mcs = method_counters();
1622 if (TieredCompilation) {
1623 MethodData* const mdo = method_data();
1624 if (((mcs != NULL) ? mcs->backedge_counter()->carry() : false) ||
1625 ((mdo != NULL) ? mdo->backedge_counter()->carry() : false)) {
1626 return InvocationCounter::count_limit;
1627 } else {
1628 return ((mcs != NULL) ? mcs->backedge_counter()->count() : 0) +
1629 ((mdo != NULL) ? mdo->backedge_counter()->count() : 0);
1630 }
1631 } else {
1632 return (mcs == NULL) ? 0 : mcs->backedge_counter()->count();
1633 }
1634 }
1635
1636 int Method::highest_comp_level() const {
1637 const MethodCounters* mcs = method_counters();
1638 if (mcs != NULL) {
1639 return mcs->highest_comp_level();
1640 } else {
1641 return CompLevel_none;
1642 }
1643 }
1644
1645 int Method::highest_osr_comp_level() const {
1646 const MethodCounters* mcs = method_counters();
1647 if (mcs != NULL) {
1648 return mcs->highest_osr_comp_level();
1649 } else {
1650 return CompLevel_none;
1651 }
1652 }
1653
1654 void Method::set_highest_comp_level(int level) {
1655 MethodCounters* mcs = method_counters();
1656 if (mcs != NULL) {
1657 mcs->set_highest_comp_level(level);
1658 }
1659 }
1660
1661 void Method::set_highest_osr_comp_level(int level) {
1662 MethodCounters* mcs = method_counters();
1663 if (mcs != NULL) {
1664 mcs->set_highest_osr_comp_level(level);
1665 }
1666 }
1667
1668 BreakpointInfo::BreakpointInfo(Method* m, int bci) {
1669 _bci = bci;
1670 _name_index = m->name_index();
1671 _signature_index = m->signature_index();
1672 _orig_bytecode = (Bytecodes::Code) *m->bcp_from(_bci);
1673 if (_orig_bytecode == Bytecodes::_breakpoint)
1674 _orig_bytecode = m->orig_bytecode_at(_bci);
1675 _next = NULL;
1676 }
1677
1678 void BreakpointInfo::set(Method* method) {
1679 #ifdef ASSERT
1680 {
1681 Bytecodes::Code code = (Bytecodes::Code) *method->bcp_from(_bci);
1682 if (code == Bytecodes::_breakpoint)
1683 code = method->orig_bytecode_at(_bci);
1684 assert(orig_bytecode() == code, "original bytecode must be the same");
1685 }
1686 #endif
1687 Thread *thread = Thread::current();
1688 *method->bcp_from(_bci) = Bytecodes::_breakpoint;
1689 method->incr_number_of_breakpoints(thread);
1690 SystemDictionary::notice_modification();
1691 {
1692 // Deoptimize all dependents on this method
1693 HandleMark hm(thread);
1694 methodHandle mh(thread, method);
1695 Universe::flush_dependents_on_method(mh);
1696 }
1697 }
1698
1699 void BreakpointInfo::clear(Method* method) {
1700 *method->bcp_from(_bci) = orig_bytecode();
1701 assert(method->number_of_breakpoints() > 0, "must not go negative");
1702 method->decr_number_of_breakpoints(Thread::current());
1703 }
1704
1705 // jmethodID handling
1706
1707 // This is a block allocating object, sort of like JNIHandleBlock, only a
1708 // lot simpler. There aren't many of these, they aren't long, they are rarely
1709 // deleted and so we can do some suboptimal things.
1710 // It's allocated on the CHeap because once we allocate a jmethodID, we can
1711 // never get rid of it.
1712 // It would be nice to be able to parameterize the number of methods for
1713 // the null_class_loader but then we'd have to turn this and ClassLoaderData
1714 // into templates.
1715
1716 // I feel like this brain dead class should exist somewhere in the STL
1717
1718 class JNIMethodBlock : public CHeapObj<mtClass> {
1719 enum { number_of_methods = 8 };
1720
1721 Method* _methods[number_of_methods];
1722 int _top;
1723 JNIMethodBlock* _next;
1724 public:
1725 static Method* const _free_method;
1726
1727 JNIMethodBlock() : _next(NULL), _top(0) {
1728 for (int i = 0; i< number_of_methods; i++) _methods[i] = _free_method;
1729 }
1730
1731 Method** add_method(Method* m) {
1732 if (_top < number_of_methods) {
1733 // top points to the next free entry.
1734 int i = _top;
1735 _methods[i] = m;
1736 _top++;
1737 return &_methods[i];
1738 } else if (_top == number_of_methods) {
1739 // if the next free entry ran off the block see if there's a free entry
1740 for (int i = 0; i< number_of_methods; i++) {
1741 if (_methods[i] == _free_method) {
1742 _methods[i] = m;
1743 return &_methods[i];
1744 }
1745 }
1746 // Only check each block once for frees. They're very unlikely.
1747 // Increment top past the end of the block.
1748 _top++;
1749 }
1750 // need to allocate a next block.
1751 if (_next == NULL) {
1752 _next = new JNIMethodBlock();
1753 }
1754 return _next->add_method(m);
1755 }
1756
1757 bool contains(Method** m) {
1758 for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {
1759 for (int i = 0; i< number_of_methods; i++) {
1760 if (&(b->_methods[i]) == m) {
1761 return true;
1762 }
1763 }
1764 }
1765 return false; // not found
1766 }
1767
1768 // Doesn't really destroy it, just marks it as free so it can be reused.
1769 void destroy_method(Method** m) {
1770 #ifdef ASSERT
1771 assert(contains(m), "should be a methodID");
1772 #endif // ASSERT
1773 *m = _free_method;
1774 }
1775
1776 // During class unloading the methods are cleared, which is different
1777 // than freed.
1778 void clear_all_methods() {
1779 for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {
1780 for (int i = 0; i< number_of_methods; i++) {
1781 b->_methods[i] = NULL;
1782 }
1783 }
1784 }
1785 #ifndef PRODUCT
1786 int count_methods() {
1787 // count all allocated methods
1788 int count = 0;
1789 for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {
1790 for (int i = 0; i< number_of_methods; i++) {
1791 if (b->_methods[i] != _free_method) count++;
1792 }
1793 }
1794 return count;
1795 }
1796 #endif // PRODUCT
1797 };
1798
1799 // Something that can't be mistaken for an address or a markOop
1800 Method* const JNIMethodBlock::_free_method = (Method*)55;
1801
1802 // Add a method id to the jmethod_ids
1803 jmethodID Method::make_jmethod_id(ClassLoaderData* loader_data, Method* m) {
1804 ClassLoaderData* cld = loader_data;
1805
1806 if (!SafepointSynchronize::is_at_safepoint()) {
1807 // Have to add jmethod_ids() to class loader data thread-safely.
1808 // Also have to add the method to the list safely, which the cld lock
1809 // protects as well.
1810 MutexLockerEx ml(cld->metaspace_lock(), Mutex::_no_safepoint_check_flag);
1811 if (cld->jmethod_ids() == NULL) {
1812 cld->set_jmethod_ids(new JNIMethodBlock());
1813 }
1814 // jmethodID is a pointer to Method*
1815 return (jmethodID)cld->jmethod_ids()->add_method(m);
1816 } else {
1817 // At safepoint, we are single threaded and can set this.
1818 if (cld->jmethod_ids() == NULL) {
1819 cld->set_jmethod_ids(new JNIMethodBlock());
1820 }
1821 // jmethodID is a pointer to Method*
1822 return (jmethodID)cld->jmethod_ids()->add_method(m);
1823 }
1824 }
1825
1826 // Mark a jmethodID as free. This is called when there is a data race in
1827 // InstanceKlass while creating the jmethodID cache.
1828 void Method::destroy_jmethod_id(ClassLoaderData* loader_data, jmethodID m) {
1829 ClassLoaderData* cld = loader_data;
1830 Method** ptr = (Method**)m;
1831 assert(cld->jmethod_ids() != NULL, "should have method handles");
1832 cld->jmethod_ids()->destroy_method(ptr);
1833 }
1834
1835 void Method::change_method_associated_with_jmethod_id(jmethodID jmid, Method* new_method) {
1836 // Can't assert the method_holder is the same because the new method has the
1837 // scratch method holder.
1838 assert(resolve_jmethod_id(jmid)->method_holder()->class_loader()
1839 == new_method->method_holder()->class_loader(),
1840 "changing to a different class loader");
1841 // Just change the method in place, jmethodID pointer doesn't change.
1842 *((Method**)jmid) = new_method;
1843 }
1844
1845 bool Method::is_method_id(jmethodID mid) {
1846 Method* m = resolve_jmethod_id(mid);
1847 assert(m != NULL, "should be called with non-null method");
1848 InstanceKlass* ik = m->method_holder();
1849 if (ik == NULL) {
1850 return false;
1851 }
1852 ClassLoaderData* cld = ik->class_loader_data();
1853 if (cld->jmethod_ids() == NULL) return false;
1854 return (cld->jmethod_ids()->contains((Method**)mid));
1855 }
1856
1857 Method* Method::checked_resolve_jmethod_id(jmethodID mid) {
1858 if (mid == NULL) return NULL;
1859 if (!Method::is_method_id(mid)) {
1860 return NULL;
1861 }
1862 Method* o = resolve_jmethod_id(mid);
1863 if (o == NULL || o == JNIMethodBlock::_free_method || !((Metadata*)o)->is_method()) {
1864 return NULL;
1865 }
1866 return o;
1867 };
1868
1869 void Method::set_on_stack(const bool value) {
1870 // Set both the method itself and its constant pool. The constant pool
1871 // on stack means some method referring to it is also on the stack.
1872 constants()->set_on_stack(value);
1873
1874 bool succeeded = _access_flags.set_on_stack(value);
1875 if (value && succeeded) {
1876 MetadataOnStackMark::record(this, Thread::current());
1877 }
1878 }
1879
1880 // Called when the class loader is unloaded to make all methods weak.
1881 void Method::clear_jmethod_ids(ClassLoaderData* loader_data) {
1882 loader_data->jmethod_ids()->clear_all_methods();
1883 }
1884
1885 bool Method::has_method_vptr(const void* ptr) {
1886 Method m;
1887 // This assumes that the vtbl pointer is the first word of a C++ object.
1888 // This assumption is also in universe.cpp patch_klass_vtble
1889 void* vtbl2 = dereference_vptr((const void*)&m);
1890 void* this_vtbl = dereference_vptr(ptr);
1891 return vtbl2 == this_vtbl;
1892 }
1893
1894 // Check that this pointer is valid by checking that the vtbl pointer matches
1895 bool Method::is_valid_method() const {
1896 if (this == NULL) {
1897 return false;
1898 } else if (!is_metaspace_object()) {
1899 return false;
1900 } else {
1901 return has_method_vptr((const void*)this);
1902 }
1903 }
1904
1905 #ifndef PRODUCT
1906 void Method::print_jmethod_ids(ClassLoaderData* loader_data, outputStream* out) {
1907 out->print_cr("jni_method_id count = %d", loader_data->jmethod_ids()->count_methods());
1908 }
1909 #endif // PRODUCT
1910
1911
1912 // Printing
1913
1914 #ifndef PRODUCT
1915
1916 void Method::print_on(outputStream* st) const {
1917 ResourceMark rm;
1918 assert(is_method(), "must be method");
1919 st->print_cr("%s", internal_name());
1920 // get the effect of PrintOopAddress, always, for methods:
1921 st->print_cr(" - this oop: "INTPTR_FORMAT, (intptr_t)this);
1922 st->print (" - method holder: "); method_holder()->print_value_on(st); st->cr();
1923 st->print (" - constants: "INTPTR_FORMAT" ", (address)constants());
1924 constants()->print_value_on(st); st->cr();
1925 st->print (" - access: 0x%x ", access_flags().as_int()); access_flags().print_on(st); st->cr();
1926 st->print (" - name: "); name()->print_value_on(st); st->cr();
1927 st->print (" - signature: "); signature()->print_value_on(st); st->cr();
1928 st->print_cr(" - max stack: %d", max_stack());
1929 st->print_cr(" - max locals: %d", max_locals());
1930 st->print_cr(" - size of params: %d", size_of_parameters());
1931 st->print_cr(" - method size: %d", method_size());
1932 if (intrinsic_id() != vmIntrinsics::_none)
1933 st->print_cr(" - intrinsic id: %d %s", intrinsic_id(), vmIntrinsics::name_at(intrinsic_id()));
1934 if (highest_comp_level() != CompLevel_none)
1935 st->print_cr(" - highest level: %d", highest_comp_level());
1936 st->print_cr(" - vtable index: %d", _vtable_index);
1937 st->print_cr(" - i2i entry: " INTPTR_FORMAT, interpreter_entry());
1938 st->print( " - adapters: ");
1939 AdapterHandlerEntry* a = ((Method*)this)->adapter();
1940 if (a == NULL)
1941 st->print_cr(INTPTR_FORMAT, a);
1942 else
1943 a->print_adapter_on(st);
1944 st->print_cr(" - compiled entry " INTPTR_FORMAT, from_compiled_entry());
1945 st->print_cr(" - code size: %d", code_size());
1946 if (code_size() != 0) {
1947 st->print_cr(" - code start: " INTPTR_FORMAT, code_base());
1948 st->print_cr(" - code end (excl): " INTPTR_FORMAT, code_base() + code_size());
1949 }
1950 if (method_data() != NULL) {
1951 st->print_cr(" - method data: " INTPTR_FORMAT, (address)method_data());
1952 }
1953 st->print_cr(" - checked ex length: %d", checked_exceptions_length());
1954 if (checked_exceptions_length() > 0) {
1955 CheckedExceptionElement* table = checked_exceptions_start();
1956 st->print_cr(" - checked ex start: " INTPTR_FORMAT, table);
1957 if (Verbose) {
1958 for (int i = 0; i < checked_exceptions_length(); i++) {
1959 st->print_cr(" - throws %s", constants()->printable_name_at(table[i].class_cp_index));
1960 }
1961 }
1962 }
1963 if (has_linenumber_table()) {
1964 u_char* table = compressed_linenumber_table();
1965 st->print_cr(" - linenumber start: " INTPTR_FORMAT, table);
1966 if (Verbose) {
1967 CompressedLineNumberReadStream stream(table);
1968 while (stream.read_pair()) {
1969 st->print_cr(" - line %d: %d", stream.line(), stream.bci());
1970 }
1971 }
1972 }
1973 st->print_cr(" - localvar length: %d", localvariable_table_length());
1974 if (localvariable_table_length() > 0) {
1975 LocalVariableTableElement* table = localvariable_table_start();
1976 st->print_cr(" - localvar start: " INTPTR_FORMAT, table);
1977 if (Verbose) {
1978 for (int i = 0; i < localvariable_table_length(); i++) {
1979 int bci = table[i].start_bci;
1980 int len = table[i].length;
1981 const char* name = constants()->printable_name_at(table[i].name_cp_index);
1982 const char* desc = constants()->printable_name_at(table[i].descriptor_cp_index);
1983 int slot = table[i].slot;
1984 st->print_cr(" - %s %s bci=%d len=%d slot=%d", desc, name, bci, len, slot);
1985 }
1986 }
1987 }
1988 if (code() != NULL) {
1989 st->print (" - compiled code: ");
1990 code()->print_value_on(st);
1991 }
1992 if (is_native()) {
1993 st->print_cr(" - native function: " INTPTR_FORMAT, native_function());
1994 st->print_cr(" - signature handler: " INTPTR_FORMAT, signature_handler());
1995 }
1996 }
1997
1998 #endif //PRODUCT
1999
2000 void Method::print_value_on(outputStream* st) const {
2001 assert(is_method(), "must be method");
2002 st->print("%s", internal_name());
2003 print_address_on(st);
2004 st->print(" ");
2005 name()->print_value_on(st);
2006 st->print(" ");
2007 signature()->print_value_on(st);
2008 st->print(" in ");
2009 method_holder()->print_value_on(st);
2010 if (WizardMode) st->print("#%d", _vtable_index);
2011 if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());
2012 if (WizardMode && code() != NULL) st->print(" ((nmethod*)%p)", code());
2013 }
2014
2015 #if INCLUDE_SERVICES
2016 // Size Statistics
2017 void Method::collect_statistics(KlassSizeStats *sz) const {
2018 int mysize = sz->count(this);
2019 sz->_method_bytes += mysize;
2020 sz->_method_all_bytes += mysize;
2021 sz->_rw_bytes += mysize;
2022
2023 if (constMethod()) {
2024 constMethod()->collect_statistics(sz);
2025 }
2026 if (method_data()) {
2027 method_data()->collect_statistics(sz);
2028 }
2029 }
2030 #endif // INCLUDE_SERVICES
2031
2032 // Verification
2033
2034 void Method::verify_on(outputStream* st) {
2035 guarantee(is_method(), "object must be method");
2036 guarantee(constants()->is_constantPool(), "should be constant pool");
2037 guarantee(constMethod()->is_constMethod(), "should be ConstMethod*");
2038 MethodData* md = method_data();
2039 guarantee(md == NULL ||
2040 md->is_methodData(), "should be method data");
2041 }
--- EOF ---