Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/share/vm/oops/method.cpp
32285 views
/*1* Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "classfile/metadataOnStackMark.hpp"26#include "classfile/systemDictionary.hpp"27#include "code/debugInfoRec.hpp"28#include "gc_interface/collectedHeap.inline.hpp"29#include "interpreter/bytecodeStream.hpp"30#include "interpreter/bytecodeTracer.hpp"31#include "interpreter/bytecodes.hpp"32#include "interpreter/interpreter.hpp"33#include "interpreter/oopMapCache.hpp"34#include "memory/gcLocker.hpp"35#include "memory/generation.hpp"36#include "memory/heapInspection.hpp"37#include "memory/metadataFactory.hpp"38#include "memory/metaspaceShared.hpp"39#include "memory/oopFactory.hpp"40#include "oops/constMethod.hpp"41#include "oops/methodData.hpp"42#include "oops/method.hpp"43#include "oops/oop.inline.hpp"44#include "oops/symbol.hpp"45#include "prims/jvmtiExport.hpp"46#include "prims/methodHandles.hpp"47#include "prims/nativeLookup.hpp"48#include "runtime/arguments.hpp"49#include "runtime/compilationPolicy.hpp"50#include "runtime/frame.inline.hpp"51#include "runtime/handles.inline.hpp"52#include "runtime/orderAccess.inline.hpp"53#include "runtime/relocator.hpp"54#include "runtime/sharedRuntime.hpp"55#include "runtime/signature.hpp"56#include "utilities/quickSort.hpp"57#include "utilities/xmlstream.hpp"5859PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC6061// Implementation of Method6263Method* Method::allocate(ClassLoaderData* loader_data,64int byte_code_size,65AccessFlags access_flags,66InlineTableSizes* sizes,67ConstMethod::MethodType method_type,68TRAPS) {69assert(!access_flags.is_native() || byte_code_size == 0,70"native methods should not contain byte codes");71ConstMethod* cm = ConstMethod::allocate(loader_data,72byte_code_size,73sizes,74method_type,75CHECK_NULL);7677int size = Method::size(access_flags.is_native());7879return new (loader_data, size, false, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, size);80}8182Method::Method(ConstMethod* xconst, AccessFlags access_flags, int size) {83No_Safepoint_Verifier no_safepoint;84set_constMethod(xconst);85set_access_flags(access_flags);86set_method_size(size);87set_intrinsic_id(vmIntrinsics::_none);88set_jfr_towrite(false);89set_force_inline(false);90set_hidden(false);91set_dont_inline(false);92set_has_injected_profile(false);93set_running_emcp(false);94set_method_data(NULL);95clear_method_counters();96set_vtable_index(Method::garbage_vtable_index);9798// Fix and bury in Method*99set_interpreter_entry(NULL); // sets i2i entry and from_int100set_adapter_entry(NULL);101clear_code(false /* don't need a lock */); // from_c/from_i get set to c2i/i2i102103if (access_flags.is_native()) {104clear_native_function();105set_signature_handler(NULL);106}107108NOT_PRODUCT(set_compiled_invocation_count(0);)109}110111// Release Method*. The nmethod will be gone when we get here because112// we've walked the code cache.113void Method::deallocate_contents(ClassLoaderData* loader_data) {114clear_jmethod_id(loader_data);115MetadataFactory::free_metadata(loader_data, constMethod());116set_constMethod(NULL);117MetadataFactory::free_metadata(loader_data, method_data());118set_method_data(NULL);119MetadataFactory::free_metadata(loader_data, method_counters());120clear_method_counters();121// The nmethod will be gone when we get here.122if (code() != NULL) _code = NULL;123}124125address Method::get_i2c_entry() {126assert(_adapter != NULL, "must have");127return _adapter->get_i2c_entry();128}129130address Method::get_c2i_entry() {131assert(_adapter != NULL, "must have");132return _adapter->get_c2i_entry();133}134135address Method::get_c2i_unverified_entry() {136assert(_adapter != NULL, "must have");137return _adapter->get_c2i_unverified_entry();138}139140char* Method::name_and_sig_as_C_string() const {141return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());142}143144char* Method::name_and_sig_as_C_string(char* buf, int size) const {145return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);146}147148char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {149const char* klass_name = klass->external_name();150int klass_name_len = (int)strlen(klass_name);151int method_name_len = method_name->utf8_length();152int len = klass_name_len + 1 + method_name_len + signature->utf8_length();153char* dest = NEW_RESOURCE_ARRAY(char, len + 1);154strcpy(dest, klass_name);155dest[klass_name_len] = '.';156strcpy(&dest[klass_name_len + 1], method_name->as_C_string());157strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());158dest[len] = 0;159return dest;160}161162char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {163Symbol* klass_name = klass->name();164klass_name->as_klass_external_name(buf, size);165int len = (int)strlen(buf);166167if (len < size - 1) {168buf[len++] = '.';169170method_name->as_C_string(&(buf[len]), size - len);171len = (int)strlen(buf);172173signature->as_C_string(&(buf[len]), size - len);174}175176return buf;177}178179int Method::fast_exception_handler_bci_for(methodHandle mh, KlassHandle ex_klass, int throw_bci, TRAPS) {180// exception table holds quadruple entries of the form (beg_bci, end_bci, handler_bci, klass_index)181// access exception table182ExceptionTable table(mh());183int length = table.length();184// iterate through all entries sequentially185constantPoolHandle pool(THREAD, mh->constants());186for (int i = 0; i < length; i ++) {187//reacquire the table in case a GC happened188ExceptionTable table(mh());189int beg_bci = table.start_pc(i);190int end_bci = table.end_pc(i);191assert(beg_bci <= end_bci, "inconsistent exception table");192if (beg_bci <= throw_bci && throw_bci < end_bci) {193// exception handler bci range covers throw_bci => investigate further194int handler_bci = table.handler_pc(i);195int klass_index = table.catch_type_index(i);196if (klass_index == 0) {197return handler_bci;198} else if (ex_klass.is_null()) {199return handler_bci;200} else {201// we know the exception class => get the constraint class202// this may require loading of the constraint class; if verification203// fails or some other exception occurs, return handler_bci204Klass* k = pool->klass_at(klass_index, CHECK_(handler_bci));205KlassHandle klass = KlassHandle(THREAD, k);206assert(klass.not_null(), "klass not loaded");207if (ex_klass->is_subtype_of(klass())) {208return handler_bci;209}210}211}212}213214return -1;215}216217void Method::mask_for(int bci, InterpreterOopMap* mask) {218219Thread* myThread = Thread::current();220methodHandle h_this(myThread, this);221#ifdef ASSERT222bool has_capability = myThread->is_VM_thread() ||223myThread->is_ConcurrentGC_thread() ||224myThread->is_GC_task_thread();225226if (!has_capability) {227if (!VerifyStack && !VerifyLastFrame) {228// verify stack calls this outside VM thread229warning("oopmap should only be accessed by the "230"VM, GC task or CMS threads (or during debugging)");231InterpreterOopMap local_mask;232method_holder()->mask_for(h_this, bci, &local_mask);233local_mask.print();234}235}236#endif237method_holder()->mask_for(h_this, bci, mask);238return;239}240241242int Method::bci_from(address bcp) const {243#ifdef ASSERT244{ ResourceMark rm;245assert(is_native() && bcp == code_base() || contains(bcp) || is_error_reported(),246err_msg("bcp doesn't belong to this method: bcp: " INTPTR_FORMAT ", method: %s", bcp, name_and_sig_as_C_string()));247}248#endif249return bcp - code_base();250}251252253// Return (int)bcx if it appears to be a valid BCI.254// Return bci_from((address)bcx) if it appears to be a valid BCP.255// Return -1 otherwise.256// Used by profiling code, when invalid data is a possibility.257// The caller is responsible for validating the Method* itself.258int Method::validate_bci_from_bcx(intptr_t bcx) const {259// keep bci as -1 if not a valid bci260int bci = -1;261if (bcx == 0 || (address)bcx == code_base()) {262// code_size() may return 0 and we allow 0 here263// the method may be native264bci = 0;265} else if (frame::is_bci(bcx)) {266if (bcx < code_size()) {267bci = (int)bcx;268}269} else if (contains((address)bcx)) {270bci = (address)bcx - code_base();271}272// Assert that if we have dodged any asserts, bci is negative.273assert(bci == -1 || bci == bci_from(bcp_from(bci)), "sane bci if >=0");274return bci;275}276277address Method::bcp_from(int bci) const {278assert((is_native() && bci == 0) || (!is_native() && 0 <= bci && bci < code_size()), err_msg("illegal bci: %d", bci));279address bcp = code_base() + bci;280assert(is_native() && bcp == code_base() || contains(bcp), "bcp doesn't belong to this method");281return bcp;282}283284285int Method::size(bool is_native) {286// If native, then include pointers for native_function and signature_handler287int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;288int extra_words = align_size_up(extra_bytes, BytesPerWord) / BytesPerWord;289return align_object_size(header_size() + extra_words);290}291292293Symbol* Method::klass_name() const {294Klass* k = method_holder();295assert(k->is_klass(), "must be klass");296InstanceKlass* ik = (InstanceKlass*) k;297return ik->name();298}299300301// Attempt to return method oop to original state. Clear any pointers302// (to objects outside the shared spaces). We won't be able to predict303// where they should point in a new JVM. Further initialize some304// entries now in order allow them to be write protected later.305306void Method::remove_unshareable_info() {307unlink_method();308}309310void Method::set_vtable_index(int index) {311if (is_shared() && !MetaspaceShared::remapped_readwrite()) {312// At runtime initialize_vtable is rerun as part of link_class_impl()313// for a shared class loaded by the non-boot loader to obtain the loader314// constraints based on the runtime classloaders' context.315return; // don't write into the shared class316} else {317_vtable_index = index;318}319}320321void Method::set_itable_index(int index) {322if (is_shared() && !MetaspaceShared::remapped_readwrite()) {323// At runtime initialize_itable is rerun as part of link_class_impl()324// for a shared class loaded by the non-boot loader to obtain the loader325// constraints based on the runtime classloaders' context. The dumptime326// itable index should be the same as the runtime index.327assert(_vtable_index == itable_index_max - index,328"archived itable index is different from runtime index");329return; // don’t write into the shared class330} else {331_vtable_index = itable_index_max - index;332}333assert(valid_itable_index(), "");334}335336337338bool Method::was_executed_more_than(int n) {339// Invocation counter is reset when the Method* is compiled.340// If the method has compiled code we therefore assume it has341// be excuted more than n times.342if (is_accessor() || is_empty_method() || (code() != NULL)) {343// interpreter doesn't bump invocation counter of trivial methods344// compiler does not bump invocation counter of compiled methods345return true;346}347else if ((method_counters() != NULL &&348method_counters()->invocation_counter()->carry()) ||349(method_data() != NULL &&350method_data()->invocation_counter()->carry())) {351// The carry bit is set when the counter overflows and causes352// a compilation to occur. We don't know how many times353// the counter has been reset, so we simply assume it has354// been executed more than n times.355return true;356} else {357return invocation_count() > n;358}359}360361#ifndef PRODUCT362void Method::print_invocation_count() {363if (is_static()) tty->print("static ");364if (is_final()) tty->print("final ");365if (is_synchronized()) tty->print("synchronized ");366if (is_native()) tty->print("native ");367method_holder()->name()->print_symbol_on(tty);368tty->print(".");369name()->print_symbol_on(tty);370signature()->print_symbol_on(tty);371372if (WizardMode) {373// dump the size of the byte codes374tty->print(" {%d}", code_size());375}376tty->cr();377378tty->print_cr (" interpreter_invocation_count: %8d ", interpreter_invocation_count());379tty->print_cr (" invocation_counter: %8d ", invocation_count());380tty->print_cr (" backedge_counter: %8d ", backedge_count());381if (CountCompiledCalls) {382tty->print_cr (" compiled_invocation_count: %8d ", compiled_invocation_count());383}384385}386#endif387388// Build a MethodData* object to hold information about this method389// collected in the interpreter.390void Method::build_interpreter_method_data(methodHandle method, TRAPS) {391// Do not profile method if current thread holds the pending list lock,392// which avoids deadlock for acquiring the MethodData_lock.393if (InstanceRefKlass::owns_pending_list_lock((JavaThread*)THREAD)) {394return;395}396397// Grab a lock here to prevent multiple398// MethodData*s from being created.399MutexLocker ml(MethodData_lock, THREAD);400if (method->method_data() == NULL) {401ClassLoaderData* loader_data = method->method_holder()->class_loader_data();402MethodData* method_data = MethodData::allocate(loader_data, method, CHECK);403method->set_method_data(method_data);404if (PrintMethodData && (Verbose || WizardMode)) {405ResourceMark rm(THREAD);406tty->print("build_interpreter_method_data for ");407method->print_name(tty);408tty->cr();409// At the end of the run, the MDO, full of data, will be dumped.410}411}412}413414MethodCounters* Method::build_method_counters(Method* m, TRAPS) {415methodHandle mh(m);416ClassLoaderData* loader_data = mh->method_holder()->class_loader_data();417MethodCounters* counters = MethodCounters::allocate(loader_data, CHECK_NULL);418if (!mh->init_method_counters(counters)) {419MetadataFactory::free_metadata(loader_data, counters);420}421return mh->method_counters();422}423424void Method::cleanup_inline_caches() {425// The current system doesn't use inline caches in the interpreter426// => nothing to do (keep this method around for future use)427}428429430int Method::extra_stack_words() {431// not an inline function, to avoid a header dependency on Interpreter432return extra_stack_entries() * Interpreter::stackElementSize;433}434435436void Method::compute_size_of_parameters(Thread *thread) {437ArgumentSizeComputer asc(signature());438set_size_of_parameters(asc.size() + (is_static() ? 0 : 1));439}440441BasicType Method::result_type() const {442ResultTypeFinder rtf(signature());443return rtf.type();444}445446447bool Method::is_empty_method() const {448return code_size() == 1449&& *code_base() == Bytecodes::_return;450}451452453bool Method::is_vanilla_constructor() const {454// Returns true if this method is a vanilla constructor, i.e. an "<init>" "()V" method455// which only calls the superclass vanilla constructor and possibly does stores of456// zero constants to local fields:457//458// aload_0459// invokespecial460// indexbyte1461// indexbyte2462//463// followed by an (optional) sequence of:464//465// aload_0466// aconst_null / iconst_0 / fconst_0 / dconst_0467// putfield468// indexbyte1469// indexbyte2470//471// followed by:472//473// return474475assert(name() == vmSymbols::object_initializer_name(), "Should only be called for default constructors");476assert(signature() == vmSymbols::void_method_signature(), "Should only be called for default constructors");477int size = code_size();478// Check if size match479if (size == 0 || size % 5 != 0) return false;480address cb = code_base();481int last = size - 1;482if (cb[0] != Bytecodes::_aload_0 || cb[1] != Bytecodes::_invokespecial || cb[last] != Bytecodes::_return) {483// Does not call superclass default constructor484return false;485}486// Check optional sequence487for (int i = 4; i < last; i += 5) {488if (cb[i] != Bytecodes::_aload_0) return false;489if (!Bytecodes::is_zero_const(Bytecodes::cast(cb[i+1]))) return false;490if (cb[i+2] != Bytecodes::_putfield) return false;491}492return true;493}494495496bool Method::compute_has_loops_flag() {497BytecodeStream bcs(this);498Bytecodes::Code bc;499500while ((bc = bcs.next()) >= 0) {501switch( bc ) {502case Bytecodes::_ifeq:503case Bytecodes::_ifnull:504case Bytecodes::_iflt:505case Bytecodes::_ifle:506case Bytecodes::_ifne:507case Bytecodes::_ifnonnull:508case Bytecodes::_ifgt:509case Bytecodes::_ifge:510case Bytecodes::_if_icmpeq:511case Bytecodes::_if_icmpne:512case Bytecodes::_if_icmplt:513case Bytecodes::_if_icmpgt:514case Bytecodes::_if_icmple:515case Bytecodes::_if_icmpge:516case Bytecodes::_if_acmpeq:517case Bytecodes::_if_acmpne:518case Bytecodes::_goto:519case Bytecodes::_jsr:520if( bcs.dest() < bcs.next_bci() ) _access_flags.set_has_loops();521break;522523case Bytecodes::_goto_w:524case Bytecodes::_jsr_w:525if( bcs.dest_w() < bcs.next_bci() ) _access_flags.set_has_loops();526break;527}528}529_access_flags.set_loops_flag_init();530return _access_flags.has_loops();531}532533bool Method::is_final_method(AccessFlags class_access_flags) const {534// or "does_not_require_vtable_entry"535// default method or overpass can occur, is not final (reuses vtable entry)536// private methods get vtable entries for backward class compatibility.537if (is_overpass() || is_default_method()) return false;538return is_final() || class_access_flags.is_final();539}540541bool Method::is_final_method() const {542return is_final_method(method_holder()->access_flags());543}544545bool Method::is_default_method() const {546if (method_holder() != NULL &&547method_holder()->is_interface() &&548!is_abstract()) {549return true;550} else {551return false;552}553}554555bool Method::can_be_statically_bound(AccessFlags class_access_flags) const {556if (is_final_method(class_access_flags)) return true;557#ifdef ASSERT558ResourceMark rm;559bool is_nonv = (vtable_index() == nonvirtual_vtable_index);560if (class_access_flags.is_interface()) {561assert(is_nonv == is_static(), err_msg("is_nonv=%s", name_and_sig_as_C_string()));562}563#endif564assert(valid_vtable_index() || valid_itable_index(), "method must be linked before we ask this question");565return vtable_index() == nonvirtual_vtable_index;566}567568bool Method::can_be_statically_bound() const {569return can_be_statically_bound(method_holder()->access_flags());570}571572bool Method::is_accessor() const {573if (code_size() != 5) return false;574if (size_of_parameters() != 1) return false;575if (java_code_at(0) != Bytecodes::_aload_0 ) return false;576if (java_code_at(1) != Bytecodes::_getfield) return false;577if (java_code_at(4) != Bytecodes::_areturn &&578java_code_at(4) != Bytecodes::_ireturn ) return false;579return true;580}581582bool Method::is_constant_getter() const {583int last_index = code_size() - 1;584// Check if the first 1-3 bytecodes are a constant push585// and the last bytecode is a return.586return (2 <= code_size() && code_size() <= 4 &&587Bytecodes::is_const(java_code_at(0)) &&588Bytecodes::length_for(java_code_at(0)) == last_index &&589Bytecodes::is_return(java_code_at(last_index)));590}591592bool Method::is_initializer() const {593return is_object_initializer() || is_static_initializer();594}595596bool Method::has_valid_initializer_flags() const {597return (is_static() ||598method_holder()->major_version() < 51);599}600601bool Method::is_static_initializer() const {602// For classfiles version 51 or greater, ensure that the clinit method is603// static. Non-static methods with the name "<clinit>" are not static604// initializers. (older classfiles exempted for backward compatibility)605return name() == vmSymbols::class_initializer_name() &&606has_valid_initializer_flags();607}608609bool Method::is_object_initializer() const {610return name() == vmSymbols::object_initializer_name();611}612613objArrayHandle Method::resolved_checked_exceptions_impl(Method* this_oop, TRAPS) {614int length = this_oop->checked_exceptions_length();615if (length == 0) { // common case616return objArrayHandle(THREAD, Universe::the_empty_class_klass_array());617} else {618methodHandle h_this(THREAD, this_oop);619objArrayOop m_oop = oopFactory::new_objArray(SystemDictionary::Class_klass(), length, CHECK_(objArrayHandle()));620objArrayHandle mirrors (THREAD, m_oop);621for (int i = 0; i < length; i++) {622CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe623Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));624assert(k->is_subclass_of(SystemDictionary::Throwable_klass()), "invalid exception class");625mirrors->obj_at_put(i, k->java_mirror());626}627return mirrors;628}629};630631632int Method::line_number_from_bci(int bci) const {633if (bci == SynchronizationEntryBCI) bci = 0;634assert(bci == 0 || 0 <= bci && bci < code_size(), "illegal bci");635int best_bci = 0;636int best_line = -1;637638if (has_linenumber_table()) {639// The line numbers are a short array of 2-tuples [start_pc, line_number].640// Not necessarily sorted and not necessarily one-to-one.641CompressedLineNumberReadStream stream(compressed_linenumber_table());642while (stream.read_pair()) {643if (stream.bci() == bci) {644// perfect match645return stream.line();646} else {647// update best_bci/line648if (stream.bci() < bci && stream.bci() >= best_bci) {649best_bci = stream.bci();650best_line = stream.line();651}652}653}654}655return best_line;656}657658659bool Method::is_klass_loaded_by_klass_index(int klass_index) const {660if( constants()->tag_at(klass_index).is_unresolved_klass() ) {661Thread *thread = Thread::current();662Symbol* klass_name = constants()->klass_name_at(klass_index);663Handle loader(thread, method_holder()->class_loader());664Handle prot (thread, method_holder()->protection_domain());665return SystemDictionary::find(klass_name, loader, prot, thread) != NULL;666} else {667return true;668}669}670671672bool Method::is_klass_loaded(int refinfo_index, bool must_be_resolved) const {673int klass_index = constants()->klass_ref_index_at(refinfo_index);674if (must_be_resolved) {675// Make sure klass is resolved in constantpool.676if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;677}678return is_klass_loaded_by_klass_index(klass_index);679}680681682void Method::set_native_function(address function, bool post_event_flag) {683assert(function != NULL, "use clear_native_function to unregister natives");684assert(!is_method_handle_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");685address* native_function = native_function_addr();686687// We can see racers trying to place the same native function into place. Once688// is plenty.689address current = *native_function;690if (current == function) return;691if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&692function != NULL) {693// native_method_throw_unsatisfied_link_error_entry() should only694// be passed when post_event_flag is false.695assert(function !=696SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),697"post_event_flag mis-match");698699// post the bind event, and possible change the bind function700JvmtiExport::post_native_method_bind(this, &function);701}702*native_function = function;703// This function can be called more than once. We must make sure that we always704// use the latest registered method -> check if a stub already has been generated.705// If so, we have to make it not_entrant.706nmethod* nm = code(); // Put it into local variable to guard against concurrent updates707if (nm != NULL) {708nm->make_not_entrant();709}710}711712713bool Method::has_native_function() const {714if (is_method_handle_intrinsic())715return false; // special-cased in SharedRuntime::generate_native_wrapper716address func = native_function();717return (func != NULL && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry());718}719720721void Method::clear_native_function() {722// Note: is_method_handle_intrinsic() is allowed here.723set_native_function(724SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),725!native_bind_event_is_interesting);726clear_code();727}728729address Method::critical_native_function() {730methodHandle mh(this);731return NativeLookup::lookup_critical_entry(mh);732}733734735void Method::set_signature_handler(address handler) {736address* signature_handler = signature_handler_addr();737*signature_handler = handler;738}739740741void Method::print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason) {742if (PrintCompilation && report) {743ttyLocker ttyl;744tty->print("made not %scompilable on ", is_osr ? "OSR " : "");745if (comp_level == CompLevel_all) {746tty->print("all levels ");747} else {748tty->print("levels ");749for (int i = (int)CompLevel_none; i <= comp_level; i++) {750tty->print("%d ", i);751}752}753this->print_short_name(tty);754int size = this->code_size();755if (size > 0) {756tty->print(" (%d bytes)", size);757}758if (reason != NULL) {759tty->print(" %s", reason);760}761tty->cr();762}763if ((TraceDeoptimization || LogCompilation) && (xtty != NULL)) {764ttyLocker ttyl;765xtty->begin_elem("make_not_compilable thread='" UINTX_FORMAT "' osr='%d' level='%d'",766os::current_thread_id(), is_osr, comp_level);767if (reason != NULL) {768xtty->print(" reason=\'%s\'", reason);769}770xtty->method(this);771xtty->stamp();772xtty->end_elem();773}774}775776bool Method::is_always_compilable() const {777// Generated adapters must be compiled778if (is_method_handle_intrinsic() && is_synthetic()) {779assert(!is_not_c1_compilable(), "sanity check");780assert(!is_not_c2_compilable(), "sanity check");781return true;782}783784return false;785}786787bool Method::is_not_compilable(int comp_level) const {788if (number_of_breakpoints() > 0)789return true;790if (is_always_compilable())791return false;792if (comp_level == CompLevel_any)793return is_not_c1_compilable() || is_not_c2_compilable();794if (is_c1_compile(comp_level))795return is_not_c1_compilable();796if (is_c2_compile(comp_level))797return is_not_c2_compilable();798return false;799}800801// call this when compiler finds that this method is not compilable802void Method::set_not_compilable(int comp_level, bool report, const char* reason) {803if (is_always_compilable()) {804// Don't mark a method which should be always compilable805return;806}807print_made_not_compilable(comp_level, /*is_osr*/ false, report, reason);808if (comp_level == CompLevel_all) {809set_not_c1_compilable();810set_not_c2_compilable();811} else {812if (is_c1_compile(comp_level))813set_not_c1_compilable();814if (is_c2_compile(comp_level))815set_not_c2_compilable();816}817CompilationPolicy::policy()->disable_compilation(this);818assert(!CompilationPolicy::can_be_compiled(this, comp_level), "sanity check");819}820821bool Method::is_not_osr_compilable(int comp_level) const {822if (is_not_compilable(comp_level))823return true;824if (comp_level == CompLevel_any)825return is_not_c1_osr_compilable() || is_not_c2_osr_compilable();826if (is_c1_compile(comp_level))827return is_not_c1_osr_compilable();828if (is_c2_compile(comp_level))829return is_not_c2_osr_compilable();830return false;831}832833void Method::set_not_osr_compilable(int comp_level, bool report, const char* reason) {834print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);835if (comp_level == CompLevel_all) {836set_not_c1_osr_compilable();837set_not_c2_osr_compilable();838} else {839if (is_c1_compile(comp_level))840set_not_c1_osr_compilable();841if (is_c2_compile(comp_level))842set_not_c2_osr_compilable();843}844CompilationPolicy::policy()->disable_compilation(this);845assert(!CompilationPolicy::can_be_osr_compiled(this, comp_level), "sanity check");846}847848// Revert to using the interpreter and clear out the nmethod849void Method::clear_code(bool acquire_lock /* = true */) {850MutexLockerEx pl(acquire_lock ? Patching_lock : NULL, Mutex::_no_safepoint_check_flag);851// this may be NULL if c2i adapters have not been made yet852// Only should happen at allocate time.853if (_adapter == NULL) {854_from_compiled_entry = NULL;855} else {856_from_compiled_entry = _adapter->get_c2i_entry();857}858OrderAccess::storestore();859_from_interpreted_entry = _i2i_entry;860OrderAccess::storestore();861_code = NULL;862}863864// Called by class data sharing to remove any entry points (which are not shared)865void Method::unlink_method() {866_code = NULL;867_i2i_entry = NULL;868_from_interpreted_entry = NULL;869if (is_native()) {870*native_function_addr() = NULL;871set_signature_handler(NULL);872}873NOT_PRODUCT(set_compiled_invocation_count(0);)874_adapter = NULL;875_from_compiled_entry = NULL;876877// In case of DumpSharedSpaces, _method_data should always be NULL.878//879// During runtime (!DumpSharedSpaces), when we are cleaning a880// shared class that failed to load, this->link_method() may881// have already been called (before an exception happened), so882// this->_method_data may not be NULL.883assert(!DumpSharedSpaces || _method_data == NULL, "unexpected method data?");884885set_method_data(NULL);886clear_method_counters();887}888889// Called when the method_holder is getting linked. Setup entrypoints so the method890// is ready to be called from interpreter, compiler, and vtables.891void Method::link_method(methodHandle h_method, TRAPS) {892// If the code cache is full, we may reenter this function for the893// leftover methods that weren't linked.894if (_i2i_entry != NULL) return;895896assert(_adapter == NULL, "init'd to NULL" );897assert( _code == NULL, "nothing compiled yet" );898899// Setup interpreter entrypoint900assert(this == h_method(), "wrong h_method()" );901address entry = Interpreter::entry_for_method(h_method);902assert(entry != NULL, "interpreter entry must be non-null");903// Sets both _i2i_entry and _from_interpreted_entry904set_interpreter_entry(entry);905906// Don't overwrite already registered native entries.907if (is_native() && !has_native_function()) {908set_native_function(909SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),910!native_bind_event_is_interesting);911}912913// Setup compiler entrypoint. This is made eagerly, so we do not need914// special handling of vtables. An alternative is to make adapters more915// lazily by calling make_adapter() from from_compiled_entry() for the916// normal calls. For vtable calls life gets more complicated. When a917// call-site goes mega-morphic we need adapters in all methods which can be918// called from the vtable. We need adapters on such methods that get loaded919// later. Ditto for mega-morphic itable calls. If this proves to be a920// problem we'll make these lazily later.921(void) make_adapters(h_method, CHECK);922923// ONLY USE the h_method now as make_adapter may have blocked924925}926927address Method::make_adapters(methodHandle mh, TRAPS) {928// Adapters for compiled code are made eagerly here. They are fairly929// small (generally < 100 bytes) and quick to make (and cached and shared)930// so making them eagerly shouldn't be too expensive.931AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);932if (adapter == NULL ) {933THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "out of space in CodeCache for adapters");934}935936mh->set_adapter_entry(adapter);937mh->_from_compiled_entry = adapter->get_c2i_entry();938return adapter->get_c2i_entry();939}940941void Method::restore_unshareable_info(TRAPS) {942// Since restore_unshareable_info can be called more than once for a method, don't943// redo any work. If this field is restored, there is nothing to do.944if (_from_compiled_entry == NULL) {945// restore method's vtable by calling a virtual function946restore_vtable();947948methodHandle mh(THREAD, this);949link_method(mh, CHECK);950}951}952953954// The verified_code_entry() must be called when a invoke is resolved955// on this method.956957// It returns the compiled code entry point, after asserting not null.958// This function is called after potential safepoints so that nmethod959// or adapter that it points to is still live and valid.960// This function must not hit a safepoint!961address Method::verified_code_entry() {962debug_only(No_Safepoint_Verifier nsv;)963assert(_from_compiled_entry != NULL, "must be set");964return _from_compiled_entry;965}966967// Check that if an nmethod ref exists, it has a backlink to this or no backlink at all968// (could be racing a deopt).969// Not inline to avoid circular ref.970bool Method::check_code() const {971// cached in a register or local. There's a race on the value of the field.972nmethod *code = (nmethod *)OrderAccess::load_ptr_acquire(&_code);973return code == NULL || (code->method() == NULL) || (code->method() == (Method*)this && !code->is_osr_method());974}975976// Install compiled code. Instantly it can execute.977void Method::set_code(methodHandle mh, nmethod *code) {978MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);979assert( code, "use clear_code to remove code" );980assert( mh->check_code(), "" );981982guarantee(mh->adapter() != NULL, "Adapter blob must already exist!");983984// These writes must happen in this order, because the interpreter will985// directly jump to from_interpreted_entry which jumps to an i2c adapter986// which jumps to _from_compiled_entry.987mh->_code = code; // Assign before allowing compiled code to exec988989int comp_level = code->comp_level();990// In theory there could be a race here. In practice it is unlikely991// and not worth worrying about.992if (comp_level > mh->highest_comp_level()) {993mh->set_highest_comp_level(comp_level);994}995996OrderAccess::storestore();997#ifdef SHARK998mh->_from_interpreted_entry = code->insts_begin();999#else //!SHARK1000mh->_from_compiled_entry = code->verified_entry_point();1001OrderAccess::storestore();1002// Instantly compiled code can execute.1003if (!mh->is_method_handle_intrinsic())1004mh->_from_interpreted_entry = mh->get_i2c_entry();1005#endif //!SHARK1006}100710081009bool Method::is_overridden_in(Klass* k) const {1010InstanceKlass* ik = InstanceKlass::cast(k);10111012if (ik->is_interface()) return false;10131014// If method is an interface, we skip it - except if it1015// is a miranda method1016if (method_holder()->is_interface()) {1017// Check that method is not a miranda method1018if (ik->lookup_method(name(), signature()) == NULL) {1019// No implementation exist - so miranda method1020return false;1021}1022return true;1023}10241025assert(ik->is_subclass_of(method_holder()), "should be subklass");1026assert(ik->vtable() != NULL, "vtable should exist");1027if (!has_vtable_index()) {1028return false;1029} else {1030Method* vt_m = ik->method_at_vtable(vtable_index());1031return vt_m != this;1032}1033}103410351036// give advice about whether this Method* should be cached or not1037bool Method::should_not_be_cached() const {1038if (is_old()) {1039// This method has been redefined. It is either EMCP or obsolete1040// and we don't want to cache it because that would pin the method1041// down and prevent it from being collectible if and when it1042// finishes executing.1043return true;1044}10451046// caching this method should be just fine1047return false;1048}104910501051/**1052* Returns true if this is one of the specially treated methods for1053* security related stack walks (like Reflection.getCallerClass).1054*/1055bool Method::is_ignored_by_security_stack_walk() const {1056const bool use_new_reflection = JDK_Version::is_gte_jdk14x_version() && UseNewReflection;10571058if (intrinsic_id() == vmIntrinsics::_invoke) {1059// This is Method.invoke() -- ignore it1060return true;1061}1062if (use_new_reflection &&1063method_holder()->is_subclass_of(SystemDictionary::reflect_MethodAccessorImpl_klass())) {1064// This is an auxilary frame -- ignore it1065return true;1066}1067if (is_method_handle_intrinsic() || is_compiled_lambda_form()) {1068// This is an internal adapter frame for method handles -- ignore it1069return true;1070}1071return false;1072}107310741075// Constant pool structure for invoke methods:1076enum {1077_imcp_invoke_name = 1, // utf8: 'invokeExact', etc.1078_imcp_invoke_signature, // utf8: (variable Symbol*)1079_imcp_limit1080};10811082// Test if this method is an MH adapter frame generated by Java code.1083// Cf. java/lang/invoke/InvokerBytecodeGenerator1084bool Method::is_compiled_lambda_form() const {1085return intrinsic_id() == vmIntrinsics::_compiledLambdaForm;1086}10871088// Test if this method is an internal MH primitive method.1089bool Method::is_method_handle_intrinsic() const {1090vmIntrinsics::ID iid = intrinsic_id();1091return (MethodHandles::is_signature_polymorphic(iid) &&1092MethodHandles::is_signature_polymorphic_intrinsic(iid));1093}10941095bool Method::has_member_arg() const {1096vmIntrinsics::ID iid = intrinsic_id();1097return (MethodHandles::is_signature_polymorphic(iid) &&1098MethodHandles::has_member_arg(iid));1099}11001101// Make an instance of a signature-polymorphic internal MH primitive.1102methodHandle Method::make_method_handle_intrinsic(vmIntrinsics::ID iid,1103Symbol* signature,1104TRAPS) {1105ResourceMark rm;1106methodHandle empty;11071108KlassHandle holder = SystemDictionary::MethodHandle_klass();1109Symbol* name = MethodHandles::signature_polymorphic_intrinsic_name(iid);1110assert(iid == MethodHandles::signature_polymorphic_name_id(name), "");1111if (TraceMethodHandles) {1112tty->print_cr("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string());1113}11141115// invariant: cp->symbol_at_put is preceded by a refcount increment (more usually a lookup)1116name->increment_refcount();1117signature->increment_refcount();11181119int cp_length = _imcp_limit;1120ClassLoaderData* loader_data = holder->class_loader_data();1121constantPoolHandle cp;1122{1123ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty));1124cp = constantPoolHandle(THREAD, cp_oop);1125}1126cp->set_pool_holder(InstanceKlass::cast(holder()));1127cp->symbol_at_put(_imcp_invoke_name, name);1128cp->symbol_at_put(_imcp_invoke_signature, signature);1129cp->set_has_preresolution();11301131// decide on access bits: public or not?1132int flags_bits = (JVM_ACC_NATIVE | JVM_ACC_SYNTHETIC | JVM_ACC_FINAL);1133bool must_be_static = MethodHandles::is_signature_polymorphic_static(iid);1134if (must_be_static) flags_bits |= JVM_ACC_STATIC;1135assert((flags_bits & JVM_ACC_PUBLIC) == 0, "do not expose these methods");11361137methodHandle m;1138{1139InlineTableSizes sizes;1140Method* m_oop = Method::allocate(loader_data, 0,1141accessFlags_from(flags_bits), &sizes,1142ConstMethod::NORMAL, CHECK_(empty));1143m = methodHandle(THREAD, m_oop);1144}1145m->set_constants(cp());1146m->set_name_index(_imcp_invoke_name);1147m->set_signature_index(_imcp_invoke_signature);1148assert(MethodHandles::is_signature_polymorphic_name(m->name()), "");1149assert(m->signature() == signature, "");1150ResultTypeFinder rtf(signature);1151m->constMethod()->set_result_type(rtf.type());1152m->compute_size_of_parameters(THREAD);1153m->init_intrinsic_id();1154assert(m->is_method_handle_intrinsic(), "");1155#ifdef ASSERT1156if (!MethodHandles::is_signature_polymorphic(m->intrinsic_id())) m->print();1157assert(MethodHandles::is_signature_polymorphic(m->intrinsic_id()), "must be an invoker");1158assert(m->intrinsic_id() == iid, "correctly predicted iid");1159#endif //ASSERT11601161// Finally, set up its entry points.1162assert(m->can_be_statically_bound(), "");1163m->set_vtable_index(Method::nonvirtual_vtable_index);1164m->link_method(m, CHECK_(empty));11651166if (TraceMethodHandles && (Verbose || WizardMode))1167m->print_on(tty);11681169return m;1170}11711172Klass* Method::check_non_bcp_klass(Klass* klass) {1173if (klass != NULL && klass->class_loader() != NULL) {1174if (klass->oop_is_objArray())1175klass = ObjArrayKlass::cast(klass)->bottom_klass();1176return klass;1177}1178return NULL;1179}118011811182methodHandle Method::clone_with_new_data(methodHandle m, u_char* new_code, int new_code_length,1183u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {1184// Code below does not work for native methods - they should never get rewritten anyway1185assert(!m->is_native(), "cannot rewrite native methods");1186// Allocate new Method*1187AccessFlags flags = m->access_flags();11881189ConstMethod* cm = m->constMethod();1190int checked_exceptions_len = cm->checked_exceptions_length();1191int localvariable_len = cm->localvariable_table_length();1192int exception_table_len = cm->exception_table_length();1193int method_parameters_len = cm->method_parameters_length();1194int method_annotations_len = cm->method_annotations_length();1195int parameter_annotations_len = cm->parameter_annotations_length();1196int type_annotations_len = cm->type_annotations_length();1197int default_annotations_len = cm->default_annotations_length();11981199InlineTableSizes sizes(1200localvariable_len,1201new_compressed_linenumber_size,1202exception_table_len,1203checked_exceptions_len,1204method_parameters_len,1205cm->generic_signature_index(),1206method_annotations_len,1207parameter_annotations_len,1208type_annotations_len,1209default_annotations_len,12100);12111212ClassLoaderData* loader_data = m->method_holder()->class_loader_data();1213Method* newm_oop = Method::allocate(loader_data,1214new_code_length,1215flags,1216&sizes,1217m->method_type(),1218CHECK_(methodHandle()));1219methodHandle newm (THREAD, newm_oop);1220int new_method_size = newm->method_size();12211222// Create a shallow copy of Method part, but be careful to preserve the new ConstMethod*1223ConstMethod* newcm = newm->constMethod();1224int new_const_method_size = newm->constMethod()->size();12251226memcpy(newm(), m(), sizeof(Method));12271228// Create shallow copy of ConstMethod.1229memcpy(newcm, m->constMethod(), sizeof(ConstMethod));12301231// Reset correct method/const method, method size, and parameter info1232newm->set_constMethod(newcm);1233newm->constMethod()->set_code_size(new_code_length);1234newm->constMethod()->set_constMethod_size(new_const_method_size);1235newm->set_method_size(new_method_size);1236assert(newm->code_size() == new_code_length, "check");1237assert(newm->method_parameters_length() == method_parameters_len, "check");1238assert(newm->checked_exceptions_length() == checked_exceptions_len, "check");1239assert(newm->exception_table_length() == exception_table_len, "check");1240assert(newm->localvariable_table_length() == localvariable_len, "check");1241// Copy new byte codes1242memcpy(newm->code_base(), new_code, new_code_length);1243// Copy line number table1244if (new_compressed_linenumber_size > 0) {1245memcpy(newm->compressed_linenumber_table(),1246new_compressed_linenumber_table,1247new_compressed_linenumber_size);1248}1249// Copy method_parameters1250if (method_parameters_len > 0) {1251memcpy(newm->method_parameters_start(),1252m->method_parameters_start(),1253method_parameters_len * sizeof(MethodParametersElement));1254}1255// Copy checked_exceptions1256if (checked_exceptions_len > 0) {1257memcpy(newm->checked_exceptions_start(),1258m->checked_exceptions_start(),1259checked_exceptions_len * sizeof(CheckedExceptionElement));1260}1261// Copy exception table1262if (exception_table_len > 0) {1263memcpy(newm->exception_table_start(),1264m->exception_table_start(),1265exception_table_len * sizeof(ExceptionTableElement));1266}1267// Copy local variable number table1268if (localvariable_len > 0) {1269memcpy(newm->localvariable_table_start(),1270m->localvariable_table_start(),1271localvariable_len * sizeof(LocalVariableTableElement));1272}1273// Copy stackmap table1274if (m->has_stackmap_table()) {1275int code_attribute_length = m->stackmap_data()->length();1276Array<u1>* stackmap_data =1277MetadataFactory::new_array<u1>(loader_data, code_attribute_length, 0, CHECK_NULL);1278memcpy((void*)stackmap_data->adr_at(0),1279(void*)m->stackmap_data()->adr_at(0), code_attribute_length);1280newm->set_stackmap_data(stackmap_data);1281}12821283// copy annotations over to new method1284newcm->copy_annotations_from(cm);1285return newm;1286}12871288vmSymbols::SID Method::klass_id_for_intrinsics(Klass* holder) {1289// if loader is not the default loader (i.e., != NULL), we can't know the intrinsics1290// because we are not loading from core libraries1291// exception: the AES intrinsics come from lib/ext/sunjce_provider.jar1292// which does not use the class default class loader so we check for its loader here1293InstanceKlass* ik = InstanceKlass::cast(holder);1294if ((ik->class_loader() != NULL) && !SystemDictionary::is_ext_class_loader(ik->class_loader())) {1295return vmSymbols::NO_SID; // regardless of name, no intrinsics here1296}12971298// see if the klass name is well-known:1299Symbol* klass_name = ik->name();1300return vmSymbols::find_sid(klass_name);1301}13021303void Method::init_intrinsic_id() {1304assert(_intrinsic_id == vmIntrinsics::_none, "do this just once");1305const uintptr_t max_id_uint = right_n_bits((int)(sizeof(_intrinsic_id) * BitsPerByte));1306assert((uintptr_t)vmIntrinsics::ID_LIMIT <= max_id_uint, "else fix size");1307assert(intrinsic_id_size_in_bytes() == sizeof(_intrinsic_id), "");13081309// the klass name is well-known:1310vmSymbols::SID klass_id = klass_id_for_intrinsics(method_holder());1311assert(klass_id != vmSymbols::NO_SID, "caller responsibility");13121313// ditto for method and signature:1314vmSymbols::SID name_id = vmSymbols::find_sid(name());1315if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)1316&& name_id == vmSymbols::NO_SID)1317return;1318vmSymbols::SID sig_id = vmSymbols::find_sid(signature());1319if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)1320&& sig_id == vmSymbols::NO_SID) return;1321jshort flags = access_flags().as_short();13221323vmIntrinsics::ID id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);1324if (id != vmIntrinsics::_none) {1325set_intrinsic_id(id);1326return;1327}13281329// A few slightly irregular cases:1330switch (klass_id) {1331case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_StrictMath):1332// Second chance: check in regular Math.1333switch (name_id) {1334case vmSymbols::VM_SYMBOL_ENUM_NAME(min_name):1335case vmSymbols::VM_SYMBOL_ENUM_NAME(max_name):1336case vmSymbols::VM_SYMBOL_ENUM_NAME(sqrt_name):1337// pretend it is the corresponding method in the non-strict class:1338klass_id = vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_Math);1339id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);1340break;1341}1342break;13431344// Signature-polymorphic methods: MethodHandle.invoke*, InvokeDynamic.*.1345case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle):1346if (!is_native()) break;1347id = MethodHandles::signature_polymorphic_name_id(method_holder(), name());1348if (is_static() != MethodHandles::is_signature_polymorphic_static(id))1349id = vmIntrinsics::_none;1350break;1351}13521353if (id != vmIntrinsics::_none) {1354// Set up its iid. It is an alias method.1355set_intrinsic_id(id);1356return;1357}1358}13591360// These two methods are static since a GC may move the Method1361bool Method::load_signature_classes(methodHandle m, TRAPS) {1362if (THREAD->is_Compiler_thread()) {1363// There is nothing useful this routine can do from within the Compile thread.1364// Hopefully, the signature contains only well-known classes.1365// We could scan for this and return true/false, but the caller won't care.1366return false;1367}1368bool sig_is_loaded = true;1369Handle class_loader(THREAD, m->method_holder()->class_loader());1370Handle protection_domain(THREAD, m->method_holder()->protection_domain());1371ResourceMark rm(THREAD);1372Symbol* signature = m->signature();1373for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {1374if (ss.is_object()) {1375Symbol* sym = ss.as_symbol(CHECK_(false));1376Symbol* name = sym;1377Klass* klass = SystemDictionary::resolve_or_null(name, class_loader,1378protection_domain, THREAD);1379// We are loading classes eagerly. If a ClassNotFoundException or1380// a LinkageError was generated, be sure to ignore it.1381if (HAS_PENDING_EXCEPTION) {1382if (PENDING_EXCEPTION->is_a(SystemDictionary::ClassNotFoundException_klass()) ||1383PENDING_EXCEPTION->is_a(SystemDictionary::LinkageError_klass())) {1384CLEAR_PENDING_EXCEPTION;1385} else {1386return false;1387}1388}1389if( klass == NULL) { sig_is_loaded = false; }1390}1391}1392return sig_is_loaded;1393}13941395bool Method::has_unloaded_classes_in_signature(methodHandle m, TRAPS) {1396Handle class_loader(THREAD, m->method_holder()->class_loader());1397Handle protection_domain(THREAD, m->method_holder()->protection_domain());1398ResourceMark rm(THREAD);1399Symbol* signature = m->signature();1400for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {1401if (ss.type() == T_OBJECT) {1402Symbol* name = ss.as_symbol_or_null();1403if (name == NULL) return true;1404Klass* klass = SystemDictionary::find(name, class_loader, protection_domain, THREAD);1405if (klass == NULL) return true;1406}1407}1408return false;1409}14101411// Exposed so field engineers can debug VM1412void Method::print_short_name(outputStream* st) {1413ResourceMark rm;1414#ifdef PRODUCT1415st->print(" %s::", method_holder()->external_name());1416#else1417st->print(" %s::", method_holder()->internal_name());1418#endif1419name()->print_symbol_on(st);1420if (WizardMode) signature()->print_symbol_on(st);1421else if (MethodHandles::is_signature_polymorphic(intrinsic_id()))1422MethodHandles::print_as_basic_type_signature_on(st, signature(), true);1423}14241425// Comparer for sorting an object array containing1426// Method*s.1427static int method_comparator(Method* a, Method* b) {1428return a->name()->fast_compare(b->name());1429}14301431// This is only done during class loading, so it is OK to assume method_idnum matches the methods() array1432// default_methods also uses this without the ordering for fast find_method1433void Method::sort_methods(Array<Method*>* methods, bool idempotent, bool set_idnums) {1434int length = methods->length();1435if (length > 1) {1436{1437No_Safepoint_Verifier nsv;1438QuickSort::sort<Method*>(methods->data(), length, method_comparator, idempotent);1439}1440// Reset method ordering1441if (set_idnums) {1442for (int i = 0; i < length; i++) {1443Method* m = methods->at(i);1444m->set_method_idnum(i);1445m->set_orig_method_idnum(i);1446}1447}1448}1449}14501451//-----------------------------------------------------------------------------------1452// Non-product code unless JVM/TI needs it14531454#if !defined(PRODUCT) || INCLUDE_JVMTI1455class SignatureTypePrinter : public SignatureTypeNames {1456private:1457outputStream* _st;1458bool _use_separator;14591460void type_name(const char* name) {1461if (_use_separator) _st->print(", ");1462_st->print("%s", name);1463_use_separator = true;1464}14651466public:1467SignatureTypePrinter(Symbol* signature, outputStream* st) : SignatureTypeNames(signature) {1468_st = st;1469_use_separator = false;1470}14711472void print_parameters() { _use_separator = false; iterate_parameters(); }1473void print_returntype() { _use_separator = false; iterate_returntype(); }1474};147514761477void Method::print_name(outputStream* st) {1478Thread *thread = Thread::current();1479ResourceMark rm(thread);1480SignatureTypePrinter sig(signature(), st);1481st->print("%s ", is_static() ? "static" : "virtual");1482sig.print_returntype();1483st->print(" %s.", method_holder()->internal_name());1484name()->print_symbol_on(st);1485st->print("(");1486sig.print_parameters();1487st->print(")");1488}1489#endif // !PRODUCT || INCLUDE_JVMTI149014911492//-----------------------------------------------------------------------------------1493// Non-product code14941495#ifndef PRODUCT1496void Method::print_codes_on(outputStream* st) const {1497print_codes_on(0, code_size(), st);1498}14991500void Method::print_codes_on(int from, int to, outputStream* st) const {1501Thread *thread = Thread::current();1502ResourceMark rm(thread);1503methodHandle mh (thread, (Method*)this);1504BytecodeStream s(mh);1505s.set_interval(from, to);1506BytecodeTracer::set_closure(BytecodeTracer::std_closure());1507while (s.next() >= 0) BytecodeTracer::trace(mh, s.bcp(), st);1508}1509#endif // not PRODUCT151015111512// Simple compression of line number tables. We use a regular compressed stream, except that we compress deltas1513// between (bci,line) pairs since they are smaller. If (bci delta, line delta) fits in (5-bit unsigned, 3-bit unsigned)1514// we save it as one byte, otherwise we write a 0xFF escape character and use regular compression. 0x0 is used1515// as end-of-stream terminator.15161517void CompressedLineNumberWriteStream::write_pair_regular(int bci_delta, int line_delta) {1518// bci and line number does not compress into single byte.1519// Write out escape character and use regular compression for bci and line number.1520write_byte((jubyte)0xFF);1521write_signed_int(bci_delta);1522write_signed_int(line_delta);1523}15241525// See comment in method.hpp which explains why this exists.1526#if defined(_M_AMD64) && _MSC_VER >= 14001527#pragma optimize("", off)1528void CompressedLineNumberWriteStream::write_pair(int bci, int line) {1529write_pair_inline(bci, line);1530}1531#pragma optimize("", on)1532#endif15331534CompressedLineNumberReadStream::CompressedLineNumberReadStream(u_char* buffer) : CompressedReadStream(buffer) {1535_bci = 0;1536_line = 0;1537};153815391540bool CompressedLineNumberReadStream::read_pair() {1541jubyte next = read_byte();1542// Check for terminator1543if (next == 0) return false;1544if (next == 0xFF) {1545// Escape character, regular compression used1546_bci += read_signed_int();1547_line += read_signed_int();1548} else {1549// Single byte compression used1550_bci += next >> 3;1551_line += next & 0x7;1552}1553return true;1554}155515561557Bytecodes::Code Method::orig_bytecode_at(int bci) const {1558BreakpointInfo* bp = method_holder()->breakpoints();1559for (; bp != NULL; bp = bp->next()) {1560if (bp->match(this, bci)) {1561return bp->orig_bytecode();1562}1563}1564{1565ResourceMark rm;1566fatal(err_msg("no original bytecode found in %s at bci %d", name_and_sig_as_C_string(), bci));1567}1568return Bytecodes::_shouldnotreachhere;1569}15701571void Method::set_orig_bytecode_at(int bci, Bytecodes::Code code) {1572assert(code != Bytecodes::_breakpoint, "cannot patch breakpoints this way");1573BreakpointInfo* bp = method_holder()->breakpoints();1574for (; bp != NULL; bp = bp->next()) {1575if (bp->match(this, bci)) {1576bp->set_orig_bytecode(code);1577// and continue, in case there is more than one1578}1579}1580}15811582void Method::set_breakpoint(int bci) {1583InstanceKlass* ik = method_holder();1584BreakpointInfo *bp = new BreakpointInfo(this, bci);1585bp->set_next(ik->breakpoints());1586ik->set_breakpoints(bp);1587// do this last:1588bp->set(this);1589}15901591static void clear_matches(Method* m, int bci) {1592InstanceKlass* ik = m->method_holder();1593BreakpointInfo* prev_bp = NULL;1594BreakpointInfo* next_bp;1595for (BreakpointInfo* bp = ik->breakpoints(); bp != NULL; bp = next_bp) {1596next_bp = bp->next();1597// bci value of -1 is used to delete all breakpoints in method m (ex: clear_all_breakpoint).1598if (bci >= 0 ? bp->match(m, bci) : bp->match(m)) {1599// do this first:1600bp->clear(m);1601// unhook it1602if (prev_bp != NULL)1603prev_bp->set_next(next_bp);1604else1605ik->set_breakpoints(next_bp);1606delete bp;1607// When class is redefined JVMTI sets breakpoint in all versions of EMCP methods1608// at same location. So we have multiple matching (method_index and bci)1609// BreakpointInfo nodes in BreakpointInfo list. We should just delete one1610// breakpoint for clear_breakpoint request and keep all other method versions1611// BreakpointInfo for future clear_breakpoint request.1612// bcivalue of -1 is used to clear all breakpoints (see clear_all_breakpoints)1613// which is being called when class is unloaded. We delete all the Breakpoint1614// information for all versions of method. We may not correctly restore the original1615// bytecode in all method versions, but that is ok. Because the class is being unloaded1616// so these methods won't be used anymore.1617if (bci >= 0) {1618break;1619}1620} else {1621// This one is a keeper.1622prev_bp = bp;1623}1624}1625}16261627void Method::clear_breakpoint(int bci) {1628assert(bci >= 0, "");1629clear_matches(this, bci);1630}16311632void Method::clear_all_breakpoints() {1633clear_matches(this, -1);1634}163516361637int Method::invocation_count() {1638MethodCounters *mcs = method_counters();1639if (TieredCompilation) {1640MethodData* const mdo = method_data();1641if (((mcs != NULL) ? mcs->invocation_counter()->carry() : false) ||1642((mdo != NULL) ? mdo->invocation_counter()->carry() : false)) {1643return InvocationCounter::count_limit;1644} else {1645return ((mcs != NULL) ? mcs->invocation_counter()->count() : 0) +1646((mdo != NULL) ? mdo->invocation_counter()->count() : 0);1647}1648} else {1649return (mcs == NULL) ? 0 : mcs->invocation_counter()->count();1650}1651}16521653int Method::backedge_count() {1654MethodCounters *mcs = method_counters();1655if (TieredCompilation) {1656MethodData* const mdo = method_data();1657if (((mcs != NULL) ? mcs->backedge_counter()->carry() : false) ||1658((mdo != NULL) ? mdo->backedge_counter()->carry() : false)) {1659return InvocationCounter::count_limit;1660} else {1661return ((mcs != NULL) ? mcs->backedge_counter()->count() : 0) +1662((mdo != NULL) ? mdo->backedge_counter()->count() : 0);1663}1664} else {1665return (mcs == NULL) ? 0 : mcs->backedge_counter()->count();1666}1667}16681669int Method::highest_comp_level() const {1670const MethodCounters* mcs = method_counters();1671if (mcs != NULL) {1672return mcs->highest_comp_level();1673} else {1674return CompLevel_none;1675}1676}16771678int Method::highest_osr_comp_level() const {1679const MethodCounters* mcs = method_counters();1680if (mcs != NULL) {1681return mcs->highest_osr_comp_level();1682} else {1683return CompLevel_none;1684}1685}16861687void Method::set_highest_comp_level(int level) {1688MethodCounters* mcs = method_counters();1689if (mcs != NULL) {1690mcs->set_highest_comp_level(level);1691}1692}16931694void Method::set_highest_osr_comp_level(int level) {1695MethodCounters* mcs = method_counters();1696if (mcs != NULL) {1697mcs->set_highest_osr_comp_level(level);1698}1699}17001701BreakpointInfo::BreakpointInfo(Method* m, int bci) {1702_bci = bci;1703_name_index = m->name_index();1704_signature_index = m->signature_index();1705_orig_bytecode = (Bytecodes::Code) *m->bcp_from(_bci);1706if (_orig_bytecode == Bytecodes::_breakpoint)1707_orig_bytecode = m->orig_bytecode_at(_bci);1708_next = NULL;1709}17101711void BreakpointInfo::set(Method* method) {1712#ifdef ASSERT1713{1714Bytecodes::Code code = (Bytecodes::Code) *method->bcp_from(_bci);1715if (code == Bytecodes::_breakpoint)1716code = method->orig_bytecode_at(_bci);1717assert(orig_bytecode() == code, "original bytecode must be the same");1718}1719#endif1720Thread *thread = Thread::current();1721*method->bcp_from(_bci) = Bytecodes::_breakpoint;1722method->incr_number_of_breakpoints(thread);1723SystemDictionary::notice_modification();1724{1725// Deoptimize all dependents on this method1726HandleMark hm(thread);1727methodHandle mh(thread, method);1728Universe::flush_dependents_on_method(mh);1729}1730}17311732void BreakpointInfo::clear(Method* method) {1733*method->bcp_from(_bci) = orig_bytecode();1734assert(method->number_of_breakpoints() > 0, "must not go negative");1735method->decr_number_of_breakpoints(Thread::current());1736}17371738// jmethodID handling17391740// This is a block allocating object, sort of like JNIHandleBlock, only a1741// lot simpler. There aren't many of these, they aren't long, they are rarely1742// deleted and so we can do some suboptimal things.1743// It's allocated on the CHeap because once we allocate a jmethodID, we can1744// never get rid of it.1745// It would be nice to be able to parameterize the number of methods for1746// the null_class_loader but then we'd have to turn this and ClassLoaderData1747// into templates.17481749// I feel like this brain dead class should exist somewhere in the STL17501751class JNIMethodBlock : public CHeapObj<mtClass> {1752enum { number_of_methods = 8 };17531754Method* _methods[number_of_methods];1755int _top;1756JNIMethodBlock* _next;1757public:1758static Method* const _free_method;17591760JNIMethodBlock() : _next(NULL), _top(0) {1761for (int i = 0; i< number_of_methods; i++) _methods[i] = _free_method;1762}17631764Method** add_method(Method* m) {1765if (_top < number_of_methods) {1766// top points to the next free entry.1767int i = _top;1768_methods[i] = m;1769_top++;1770return &_methods[i];1771} else if (_top == number_of_methods) {1772// if the next free entry ran off the block see if there's a free entry1773for (int i = 0; i< number_of_methods; i++) {1774if (_methods[i] == _free_method) {1775_methods[i] = m;1776return &_methods[i];1777}1778}1779// Only check each block once for frees. They're very unlikely.1780// Increment top past the end of the block.1781_top++;1782}1783// need to allocate a next block.1784if (_next == NULL) {1785_next = new JNIMethodBlock();1786}1787return _next->add_method(m);1788}17891790bool contains(Method** m) {1791for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {1792for (int i = 0; i< number_of_methods; i++) {1793if (&(b->_methods[i]) == m) {1794return true;1795}1796}1797}1798return false; // not found1799}18001801// Doesn't really destroy it, just marks it as free so it can be reused.1802void destroy_method(Method** m) {1803#ifdef ASSERT1804assert(contains(m), "should be a methodID");1805#endif // ASSERT1806*m = _free_method;1807}1808void clear_method(Method* m) {1809for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {1810for (int i = 0; i < number_of_methods; i++) {1811if (b->_methods[i] == m) {1812b->_methods[i] = NULL;1813return;1814}1815}1816}1817// not found1818}18191820// During class unloading the methods are cleared, which is different1821// than freed.1822void clear_all_methods() {1823for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {1824for (int i = 0; i< number_of_methods; i++) {1825b->_methods[i] = NULL;1826}1827}1828}1829#ifndef PRODUCT1830int count_methods() {1831// count all allocated methods1832int count = 0;1833for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {1834for (int i = 0; i< number_of_methods; i++) {1835if (b->_methods[i] != _free_method) count++;1836}1837}1838return count;1839}1840#endif // PRODUCT1841};18421843// Something that can't be mistaken for an address or a markOop1844Method* const JNIMethodBlock::_free_method = (Method*)55;18451846// Add a method id to the jmethod_ids1847jmethodID Method::make_jmethod_id(ClassLoaderData* loader_data, Method* m) {1848ClassLoaderData* cld = loader_data;18491850if (!SafepointSynchronize::is_at_safepoint()) {1851// Have to add jmethod_ids() to class loader data thread-safely.1852// Also have to add the method to the list safely, which the cld lock1853// protects as well.1854MutexLockerEx ml(cld->metaspace_lock(), Mutex::_no_safepoint_check_flag);1855if (cld->jmethod_ids() == NULL) {1856cld->set_jmethod_ids(new JNIMethodBlock());1857}1858// jmethodID is a pointer to Method*1859return (jmethodID)cld->jmethod_ids()->add_method(m);1860} else {1861// At safepoint, we are single threaded and can set this.1862if (cld->jmethod_ids() == NULL) {1863cld->set_jmethod_ids(new JNIMethodBlock());1864}1865// jmethodID is a pointer to Method*1866return (jmethodID)cld->jmethod_ids()->add_method(m);1867}1868}18691870// Mark a jmethodID as free. This is called when there is a data race in1871// InstanceKlass while creating the jmethodID cache.1872void Method::destroy_jmethod_id(ClassLoaderData* loader_data, jmethodID m) {1873ClassLoaderData* cld = loader_data;1874Method** ptr = (Method**)m;1875assert(cld->jmethod_ids() != NULL, "should have method handles");1876cld->jmethod_ids()->destroy_method(ptr);1877}18781879void Method::change_method_associated_with_jmethod_id(jmethodID jmid, Method* new_method) {1880// Can't assert the method_holder is the same because the new method has the1881// scratch method holder.1882assert(resolve_jmethod_id(jmid)->method_holder()->class_loader()1883== new_method->method_holder()->class_loader(),1884"changing to a different class loader");1885// Just change the method in place, jmethodID pointer doesn't change.1886*((Method**)jmid) = new_method;1887}18881889bool Method::is_method_id(jmethodID mid) {1890Method* m = resolve_jmethod_id(mid);1891if (m == NULL) {1892return false;1893}1894InstanceKlass* ik = m->method_holder();1895if (ik == NULL) {1896return false;1897}1898ClassLoaderData* cld = ik->class_loader_data();1899if (cld->jmethod_ids() == NULL) return false;1900return (cld->jmethod_ids()->contains((Method**)mid));1901}19021903Method* Method::checked_resolve_jmethod_id(jmethodID mid) {1904if (mid == NULL) return NULL;1905if (!Method::is_method_id(mid)) {1906return NULL;1907}1908Method* o = resolve_jmethod_id(mid);1909if (o == NULL || o == JNIMethodBlock::_free_method || !((Metadata*)o)->is_method()) {1910return NULL;1911}1912return o;1913};19141915void Method::set_on_stack(const bool value) {1916// Set both the method itself and its constant pool. The constant pool1917// on stack means some method referring to it is also on the stack.1918constants()->set_on_stack(value);19191920bool succeeded = _access_flags.set_on_stack(value);1921if (value && succeeded) {1922MetadataOnStackMark::record(this, Thread::current());1923}1924}19251926void Method::clear_jmethod_id(ClassLoaderData* loader_data) {1927loader_data->jmethod_ids()->clear_method(this);1928}19291930// Called when the class loader is unloaded to make all methods weak.1931void Method::clear_jmethod_ids(ClassLoaderData* loader_data) {1932loader_data->jmethod_ids()->clear_all_methods();1933}19341935bool Method::has_method_vptr(const void* ptr) {1936Method m;1937// This assumes that the vtbl pointer is the first word of a C++ object.1938// This assumption is also in universe.cpp patch_klass_vtble1939void* vtbl2 = dereference_vptr((const void*)&m);1940void* this_vtbl = dereference_vptr(ptr);1941return vtbl2 == this_vtbl;1942}19431944// Check that this pointer is valid by checking that the vtbl pointer matches1945bool Method::is_valid_method() const {1946if (this == NULL) {1947return false;1948} else if (!is_metaspace_object()) {1949return false;1950} else {1951return has_method_vptr((const void*)this);1952}1953}19541955#ifndef PRODUCT1956void Method::print_jmethod_ids(ClassLoaderData* loader_data, outputStream* out) {1957out->print_cr("jni_method_id count = %d", loader_data->jmethod_ids()->count_methods());1958}1959#endif // PRODUCT196019611962// Printing19631964#ifndef PRODUCT19651966void Method::print_on(outputStream* st) const {1967ResourceMark rm;1968assert(is_method(), "must be method");1969st->print_cr("%s", internal_name());1970// get the effect of PrintOopAddress, always, for methods:1971st->print_cr(" - this oop: " INTPTR_FORMAT, (intptr_t)this);1972st->print (" - method holder: "); method_holder()->print_value_on(st); st->cr();1973st->print (" - constants: " INTPTR_FORMAT " ", (address)constants());1974constants()->print_value_on(st); st->cr();1975st->print (" - access: 0x%x ", access_flags().as_int()); access_flags().print_on(st); st->cr();1976st->print (" - name: "); name()->print_value_on(st); st->cr();1977st->print (" - signature: "); signature()->print_value_on(st); st->cr();1978st->print_cr(" - max stack: %d", max_stack());1979st->print_cr(" - max locals: %d", max_locals());1980st->print_cr(" - size of params: %d", size_of_parameters());1981st->print_cr(" - method size: %d", method_size());1982if (intrinsic_id() != vmIntrinsics::_none)1983st->print_cr(" - intrinsic id: %d %s", intrinsic_id(), vmIntrinsics::name_at(intrinsic_id()));1984if (highest_comp_level() != CompLevel_none)1985st->print_cr(" - highest level: %d", highest_comp_level());1986st->print_cr(" - vtable index: %d", _vtable_index);1987st->print_cr(" - i2i entry: " INTPTR_FORMAT, interpreter_entry());1988st->print( " - adapters: ");1989AdapterHandlerEntry* a = ((Method*)this)->adapter();1990if (a == NULL)1991st->print_cr(INTPTR_FORMAT, a);1992else1993a->print_adapter_on(st);1994st->print_cr(" - compiled entry " INTPTR_FORMAT, from_compiled_entry());1995st->print_cr(" - code size: %d", code_size());1996if (code_size() != 0) {1997st->print_cr(" - code start: " INTPTR_FORMAT, code_base());1998st->print_cr(" - code end (excl): " INTPTR_FORMAT, code_base() + code_size());1999}2000if (method_data() != NULL) {2001st->print_cr(" - method data: " INTPTR_FORMAT, (address)method_data());2002}2003st->print_cr(" - checked ex length: %d", checked_exceptions_length());2004if (checked_exceptions_length() > 0) {2005CheckedExceptionElement* table = checked_exceptions_start();2006st->print_cr(" - checked ex start: " INTPTR_FORMAT, table);2007if (Verbose) {2008for (int i = 0; i < checked_exceptions_length(); i++) {2009st->print_cr(" - throws %s", constants()->printable_name_at(table[i].class_cp_index));2010}2011}2012}2013if (has_linenumber_table()) {2014u_char* table = compressed_linenumber_table();2015st->print_cr(" - linenumber start: " INTPTR_FORMAT, table);2016if (Verbose) {2017CompressedLineNumberReadStream stream(table);2018while (stream.read_pair()) {2019st->print_cr(" - line %d: %d", stream.line(), stream.bci());2020}2021}2022}2023st->print_cr(" - localvar length: %d", localvariable_table_length());2024if (localvariable_table_length() > 0) {2025LocalVariableTableElement* table = localvariable_table_start();2026st->print_cr(" - localvar start: " INTPTR_FORMAT, table);2027if (Verbose) {2028for (int i = 0; i < localvariable_table_length(); i++) {2029int bci = table[i].start_bci;2030int len = table[i].length;2031const char* name = constants()->printable_name_at(table[i].name_cp_index);2032const char* desc = constants()->printable_name_at(table[i].descriptor_cp_index);2033int slot = table[i].slot;2034st->print_cr(" - %s %s bci=%d len=%d slot=%d", desc, name, bci, len, slot);2035}2036}2037}2038if (code() != NULL) {2039st->print (" - compiled code: ");2040code()->print_value_on(st);2041}2042if (is_native()) {2043st->print_cr(" - native function: " INTPTR_FORMAT, native_function());2044st->print_cr(" - signature handler: " INTPTR_FORMAT, signature_handler());2045}2046}20472048#endif //PRODUCT20492050void Method::print_value_on(outputStream* st) const {2051assert(is_method(), "must be method");2052st->print("%s", internal_name());2053print_address_on(st);2054st->print(" ");2055name()->print_value_on(st);2056st->print(" ");2057signature()->print_value_on(st);2058st->print(" in ");2059method_holder()->print_value_on(st);2060if (WizardMode) st->print("#%d", _vtable_index);2061if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());2062if (WizardMode && code() != NULL) st->print(" ((nmethod*)%p)", code());2063}20642065#if INCLUDE_SERVICES2066// Size Statistics2067void Method::collect_statistics(KlassSizeStats *sz) const {2068int mysize = sz->count(this);2069sz->_method_bytes += mysize;2070sz->_method_all_bytes += mysize;2071sz->_rw_bytes += mysize;20722073if (constMethod()) {2074constMethod()->collect_statistics(sz);2075}2076if (method_data()) {2077method_data()->collect_statistics(sz);2078}2079}2080#endif // INCLUDE_SERVICES20812082// Verification20832084void Method::verify_on(outputStream* st) {2085guarantee(is_method(), "object must be method");2086guarantee(constants()->is_constantPool(), "should be constant pool");2087guarantee(constMethod()->is_constMethod(), "should be ConstMethod*");2088MethodData* md = method_data();2089guarantee(md == NULL ||2090md->is_methodData(), "should be method data");2091}209220932094