Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/share/vm/oops/method.hpp
32285 views
/*1* Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#ifndef SHARE_VM_OOPS_METHODOOP_HPP25#define SHARE_VM_OOPS_METHODOOP_HPP2627#include "classfile/vmSymbols.hpp"28#include "code/compressedStream.hpp"29#include "compiler/oopMap.hpp"30#include "interpreter/invocationCounter.hpp"31#include "oops/annotations.hpp"32#include "oops/constantPool.hpp"33#include "oops/methodCounters.hpp"34#include "oops/instanceKlass.hpp"35#include "oops/oop.hpp"36#include "oops/typeArrayOop.hpp"37#include "utilities/accessFlags.hpp"38#include "utilities/growableArray.hpp"39#include "utilities/macros.hpp"40#if INCLUDE_JFR41#include "jfr/support/jfrTraceIdExtension.hpp"42#endif4344// A Method* represents a Java method.45//46// Memory layout (each line represents a word). Note that most applications load thousands of methods,47// so keeping the size of this structure small has a big impact on footprint.48//49// We put all oops and method_size first for better gc cache locality.50//51// The actual bytecodes are inlined after the end of the Method struct.52//53// There are bits in the access_flags telling whether inlined tables are present.54// Note that accessing the line number and local variable tables is not performance critical at all.55// Accessing the checked exceptions table is used by reflection, so we put that last to make access56// to it fast.57//58// The line number table is compressed and inlined following the byte codes. It is found as the first59// byte following the byte codes. The checked exceptions table and the local variable table are inlined60// after the line number table, and indexed from the end of the method. We do not compress the checked61// exceptions table since the average length is less than 2, and do not bother to compress the local62// variable table either since it is mostly absent.63//64// Note that native_function and signature_handler has to be at fixed offsets (required by the interpreter)65//66// |------------------------------------------------------|67// | header |68// | klass |69// |------------------------------------------------------|70// | ConstMethod* (oop) |71// |------------------------------------------------------|72// | methodData (oop) |73// | methodCounters |74// |------------------------------------------------------|75// | access_flags |76// | vtable_index |77// |------------------------------------------------------|78// | result_index (C++ interpreter only) |79// |------------------------------------------------------|80// | method_size | intrinsic_id| flags |81// |------------------------------------------------------|82// | code (pointer) |83// | i2i (pointer) |84// | adapter (pointer) |85// | from_compiled_entry (pointer) |86// | from_interpreted_entry (pointer) |87// |------------------------------------------------------|88// | native_function (present only if native) |89// | signature_handler (present only if native) |90// |------------------------------------------------------|919293class CheckedExceptionElement;94class LocalVariableTableElement;95class AdapterHandlerEntry;96class MethodData;97class MethodCounters;98class ConstMethod;99class InlineTableSizes;100class KlassSizeStats;101102class Method : public Metadata {103friend class VMStructs;104private:105ConstMethod* _constMethod; // Method read-only data.106MethodData* _method_data;107MethodCounters* _method_counters;108AccessFlags _access_flags; // Access flags109int _vtable_index; // vtable index of this method (see VtableIndexFlag)110// note: can have vtables with >2**16 elements (because of inheritance)111u2 _method_size; // size of this object112u1 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)113u1 _jfr_towrite : 1, // Flags114_caller_sensitive : 1,115_force_inline : 1,116_hidden : 1,117_running_emcp : 1,118_dont_inline : 1,119_has_injected_profile : 1,120: 2;121122JFR_ONLY(DEFINE_TRACE_FLAG;)123124#ifndef PRODUCT125int _compiled_invocation_count; // Number of nmethod invocations so far (for perf. debugging)126#endif127// Entry point for calling both from and to the interpreter.128address _i2i_entry; // All-args-on-stack calling convention129// Adapter blob (i2c/c2i) for this Method*. Set once when method is linked.130AdapterHandlerEntry* _adapter;131// Entry point for calling from compiled code, to compiled code if it exists132// or else the interpreter.133volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()134// The entry point for calling both from and to compiled code is135// "_code->entry_point()". Because of tiered compilation and de-opt, this136// field can come and go. It can transition from NULL to not-null at any137// time (whenever a compile completes). It can transition from not-null to138// NULL only at safepoints (because of a de-opt).139nmethod* volatile _code; // Points to the corresponding piece of native code140volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry141142// Constructor143Method(ConstMethod* xconst, AccessFlags access_flags, int size);144public:145146static Method* allocate(ClassLoaderData* loader_data,147int byte_code_size,148AccessFlags access_flags,149InlineTableSizes* sizes,150ConstMethod::MethodType method_type,151TRAPS);152153// CDS and vtbl checking can create an empty Method to get vtbl pointer.154Method(){}155156// The Method vtable is restored by this call when the Method is in the157// shared archive. See patch_klass_vtables() in metaspaceShared.cpp for158// all the gory details. SA, dtrace and pstack helpers distinguish metadata159// by their vtable.160void restore_vtable() { guarantee(is_method(), "vtable restored by this call"); }161bool is_method() const volatile { return true; }162163void restore_unshareable_info(TRAPS);164165// accessors for instance variables166167ConstMethod* constMethod() const { return _constMethod; }168void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }169170171static address make_adapters(methodHandle mh, TRAPS);172volatile address from_compiled_entry() const { return (address)OrderAccess::load_ptr_acquire(&_from_compiled_entry); }173volatile address from_interpreted_entry() const{ return (address)OrderAccess::load_ptr_acquire(&_from_interpreted_entry); }174175// access flag176AccessFlags access_flags() const { return _access_flags; }177void set_access_flags(AccessFlags flags) { _access_flags = flags; }178179// name180Symbol* name() const { return constants()->symbol_at(name_index()); }181int name_index() const { return constMethod()->name_index(); }182void set_name_index(int index) { constMethod()->set_name_index(index); }183184// signature185Symbol* signature() const { return constants()->symbol_at(signature_index()); }186int signature_index() const { return constMethod()->signature_index(); }187void set_signature_index(int index) { constMethod()->set_signature_index(index); }188189// generics support190Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : (Symbol*)NULL); }191int generic_signature_index() const { return constMethod()->generic_signature_index(); }192void set_generic_signature_index(int index) { constMethod()->set_generic_signature_index(index); }193194// annotations support195AnnotationArray* annotations() const {196return constMethod()->method_annotations();197}198AnnotationArray* parameter_annotations() const {199return constMethod()->parameter_annotations();200}201AnnotationArray* annotation_default() const {202return constMethod()->default_annotations();203}204AnnotationArray* type_annotations() const {205return constMethod()->type_annotations();206}207208// Helper routine: get klass name + "." + method name + signature as209// C string, for the purpose of providing more useful NoSuchMethodErrors210// and fatal error handling. The string is allocated in resource211// area if a buffer is not provided by the caller.212char* name_and_sig_as_C_string() const;213char* name_and_sig_as_C_string(char* buf, int size) const;214215// Static routine in the situations we don't have a Method*216static char* name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature);217static char* name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size);218219Bytecodes::Code java_code_at(int bci) const {220return Bytecodes::java_code_at(this, bcp_from(bci));221}222Bytecodes::Code code_at(int bci) const {223return Bytecodes::code_at(this, bcp_from(bci));224}225226// JVMTI breakpoints227Bytecodes::Code orig_bytecode_at(int bci) const;228void set_orig_bytecode_at(int bci, Bytecodes::Code code);229void set_breakpoint(int bci);230void clear_breakpoint(int bci);231void clear_all_breakpoints();232// Tracking number of breakpoints, for fullspeed debugging.233// Only mutated by VM thread.234u2 number_of_breakpoints() const {235MethodCounters* mcs = method_counters();236if (mcs == NULL) {237return 0;238} else {239return mcs->number_of_breakpoints();240}241}242void incr_number_of_breakpoints(TRAPS) {243MethodCounters* mcs = get_method_counters(CHECK);244if (mcs != NULL) {245mcs->incr_number_of_breakpoints();246}247}248void decr_number_of_breakpoints(TRAPS) {249MethodCounters* mcs = get_method_counters(CHECK);250if (mcs != NULL) {251mcs->decr_number_of_breakpoints();252}253}254// Initialization only255void clear_number_of_breakpoints() {256MethodCounters* mcs = method_counters();257if (mcs != NULL) {258mcs->clear_number_of_breakpoints();259}260}261262// index into InstanceKlass methods() array263// note: also used by jfr264u2 method_idnum() const { return constMethod()->method_idnum(); }265void set_method_idnum(u2 idnum) { constMethod()->set_method_idnum(idnum); }266267u2 orig_method_idnum() const { return constMethod()->orig_method_idnum(); }268void set_orig_method_idnum(u2 idnum) { constMethod()->set_orig_method_idnum(idnum); }269270// code size271int code_size() const { return constMethod()->code_size(); }272273// method size274int method_size() const { return _method_size; }275void set_method_size(int size) {276assert(0 <= size && size < (1 << 16), "invalid method size");277_method_size = size;278}279280// constant pool for Klass* holding this method281ConstantPool* constants() const { return constMethod()->constants(); }282void set_constants(ConstantPool* c) { constMethod()->set_constants(c); }283284// max stack285// return original max stack size for method verification286int verifier_max_stack() const { return constMethod()->max_stack(); }287int max_stack() const { return constMethod()->max_stack() + extra_stack_entries(); }288void set_max_stack(int size) { constMethod()->set_max_stack(size); }289290// max locals291int max_locals() const { return constMethod()->max_locals(); }292void set_max_locals(int size) { constMethod()->set_max_locals(size); }293294int highest_comp_level() const;295void set_highest_comp_level(int level);296int highest_osr_comp_level() const;297void set_highest_osr_comp_level(int level);298299// Count of times method was exited via exception while interpreting300void interpreter_throwout_increment(TRAPS) {301MethodCounters* mcs = get_method_counters(CHECK);302if (mcs != NULL) {303mcs->interpreter_throwout_increment();304}305}306307int interpreter_throwout_count() const {308MethodCounters* mcs = method_counters();309if (mcs == NULL) {310return 0;311} else {312return mcs->interpreter_throwout_count();313}314}315316// size of parameters317int size_of_parameters() const { return constMethod()->size_of_parameters(); }318void set_size_of_parameters(int size) { constMethod()->set_size_of_parameters(size); }319320bool has_stackmap_table() const {321return constMethod()->has_stackmap_table();322}323324Array<u1>* stackmap_data() const {325return constMethod()->stackmap_data();326}327328void set_stackmap_data(Array<u1>* sd) {329constMethod()->set_stackmap_data(sd);330}331332// exception handler table333bool has_exception_handler() const334{ return constMethod()->has_exception_handler(); }335int exception_table_length() const336{ return constMethod()->exception_table_length(); }337ExceptionTableElement* exception_table_start() const338{ return constMethod()->exception_table_start(); }339340// Finds the first entry point bci of an exception handler for an341// exception of klass ex_klass thrown at throw_bci. A value of NULL342// for ex_klass indicates that the exception klass is not known; in343// this case it matches any constraint class. Returns -1 if the344// exception cannot be handled in this method. The handler345// constraint classes are loaded if necessary. Note that this may346// throw an exception if loading of the constraint classes causes347// an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.348// If an exception is thrown, returns the bci of the349// exception handler which caused the exception to be thrown, which350// is needed for proper retries. See, for example,351// InterpreterRuntime::exception_handler_for_exception.352static int fast_exception_handler_bci_for(methodHandle mh, KlassHandle ex_klass, int throw_bci, TRAPS);353354// method data access355MethodData* method_data() const {356return _method_data;357}358359void set_method_data(MethodData* data) {360// The store into method must be released. On platforms without361// total store order (TSO) the reference may become visible before362// the initialization of data otherwise.363OrderAccess::release_store_ptr((volatile void *)&_method_data, data);364}365366MethodCounters* method_counters() const {367return _method_counters;368}369370void clear_method_counters() {371_method_counters = NULL;372}373374bool init_method_counters(MethodCounters* counters) {375// Try to install a pointer to MethodCounters, return true on success.376return Atomic::cmpxchg_ptr(counters, (volatile void*)&_method_counters, NULL) == NULL;377}378379#ifdef TIERED380// We are reusing interpreter_invocation_count as a holder for the previous event count!381// We can do that since interpreter_invocation_count is not used in tiered.382int prev_event_count() const {383if (method_counters() == NULL) {384return 0;385} else {386return method_counters()->interpreter_invocation_count();387}388}389void set_prev_event_count(int count) {390MethodCounters* mcs = method_counters();391if (mcs != NULL) {392mcs->set_interpreter_invocation_count(count);393}394}395jlong prev_time() const {396MethodCounters* mcs = method_counters();397return mcs == NULL ? 0 : mcs->prev_time();398}399void set_prev_time(jlong time) {400MethodCounters* mcs = method_counters();401if (mcs != NULL) {402mcs->set_prev_time(time);403}404}405float rate() const {406MethodCounters* mcs = method_counters();407return mcs == NULL ? 0 : mcs->rate();408}409void set_rate(float rate) {410MethodCounters* mcs = method_counters();411if (mcs != NULL) {412mcs->set_rate(rate);413}414}415#endif416417int invocation_count();418int backedge_count();419420bool was_executed_more_than(int n);421bool was_never_executed() { return !was_executed_more_than(0); }422423static void build_interpreter_method_data(methodHandle method, TRAPS);424425static MethodCounters* build_method_counters(Method* m, TRAPS);426427int interpreter_invocation_count() {428if (TieredCompilation) {429return invocation_count();430} else {431MethodCounters* mcs = method_counters();432return (mcs == NULL) ? 0 : mcs->interpreter_invocation_count();433}434}435int increment_interpreter_invocation_count(TRAPS) {436if (TieredCompilation) ShouldNotReachHere();437MethodCounters* mcs = get_method_counters(CHECK_0);438return (mcs == NULL) ? 0 : mcs->increment_interpreter_invocation_count();439}440441#ifndef PRODUCT442int compiled_invocation_count() const { return _compiled_invocation_count; }443void set_compiled_invocation_count(int count) { _compiled_invocation_count = count; }444#endif // not PRODUCT445446// Clear (non-shared space) pointers which could not be relevant447// if this (shared) method were mapped into another JVM.448void remove_unshareable_info();449450// nmethod/verified compiler entry451address verified_code_entry();452bool check_code() const; // Not inline to avoid circular ref453nmethod* volatile code() const { assert( check_code(), "" ); return (nmethod *)OrderAccess::load_ptr_acquire(&_code); }454void clear_code(bool acquire_lock = true); // Clear out any compiled code455static void set_code(methodHandle mh, nmethod* code);456void set_adapter_entry(AdapterHandlerEntry* adapter) { _adapter = adapter; }457address get_i2c_entry();458address get_c2i_entry();459address get_c2i_unverified_entry();460AdapterHandlerEntry* adapter() { return _adapter; }461// setup entry points462void link_method(methodHandle method, TRAPS);463// clear entry points. Used by sharing code464void unlink_method();465466// vtable index467enum VtableIndexFlag {468// Valid vtable indexes are non-negative (>= 0).469// These few negative values are used as sentinels.470itable_index_max = -10, // first itable index, growing downward471pending_itable_index = -9, // itable index will be assigned472invalid_vtable_index = -4, // distinct from any valid vtable index473garbage_vtable_index = -3, // not yet linked; no vtable layout yet474nonvirtual_vtable_index = -2 // there is no need for vtable dispatch475// 6330203 Note: Do not use -1, which was overloaded with many meanings.476};477DEBUG_ONLY(bool valid_vtable_index() const { return _vtable_index >= nonvirtual_vtable_index; })478bool has_vtable_index() const { return _vtable_index >= 0; }479int vtable_index() const { return _vtable_index; }480void set_vtable_index(int index);481DEBUG_ONLY(bool valid_itable_index() const { return _vtable_index <= pending_itable_index; })482bool has_itable_index() const { return _vtable_index <= itable_index_max; }483int itable_index() const { assert(valid_itable_index(), "");484return itable_index_max - _vtable_index; }485void set_itable_index(int index);486487// interpreter entry488address interpreter_entry() const { return _i2i_entry; }489// Only used when first initialize so we can set _i2i_entry and _from_interpreted_entry490void set_interpreter_entry(address entry) { _i2i_entry = entry; _from_interpreted_entry = entry; }491492// native function (used for native methods only)493enum {494native_bind_event_is_interesting = true495};496address native_function() const { return *(native_function_addr()); }497address critical_native_function();498499// Must specify a real function (not NULL).500// Use clear_native_function() to unregister.501void set_native_function(address function, bool post_event_flag);502bool has_native_function() const;503void clear_native_function();504505// signature handler (used for native methods only)506address signature_handler() const { return *(signature_handler_addr()); }507void set_signature_handler(address handler);508509// Interpreter oopmap support510void mask_for(int bci, InterpreterOopMap* mask);511512#ifndef PRODUCT513// operations on invocation counter514void print_invocation_count();515#endif516517// byte codes518void set_code(address code) { return constMethod()->set_code(code); }519address code_base() const { return constMethod()->code_base(); }520bool contains(address bcp) const { return constMethod()->contains(bcp); }521522// prints byte codes523void print_codes() const { print_codes_on(tty); }524void print_codes_on(outputStream* st) const PRODUCT_RETURN;525void print_codes_on(int from, int to, outputStream* st) const PRODUCT_RETURN;526527// method parameters528bool has_method_parameters() const529{ return constMethod()->has_method_parameters(); }530int method_parameters_length() const531{ return constMethod()->method_parameters_length(); }532MethodParametersElement* method_parameters_start() const533{ return constMethod()->method_parameters_start(); }534535// checked exceptions536int checked_exceptions_length() const537{ return constMethod()->checked_exceptions_length(); }538CheckedExceptionElement* checked_exceptions_start() const539{ return constMethod()->checked_exceptions_start(); }540541// localvariable table542bool has_localvariable_table() const543{ return constMethod()->has_localvariable_table(); }544int localvariable_table_length() const545{ return constMethod()->localvariable_table_length(); }546LocalVariableTableElement* localvariable_table_start() const547{ return constMethod()->localvariable_table_start(); }548549bool has_linenumber_table() const550{ return constMethod()->has_linenumber_table(); }551u_char* compressed_linenumber_table() const552{ return constMethod()->compressed_linenumber_table(); }553554// method holder (the Klass* holding this method)555InstanceKlass* method_holder() const { return constants()->pool_holder(); }556557void compute_size_of_parameters(Thread *thread); // word size of parameters (receiver if any + arguments)558Symbol* klass_name() const; // returns the name of the method holder559BasicType result_type() const; // type of the method result560bool is_returning_oop() const { BasicType r = result_type(); return (r == T_OBJECT || r == T_ARRAY); }561bool is_returning_fp() const { BasicType r = result_type(); return (r == T_FLOAT || r == T_DOUBLE); }562563// Checked exceptions thrown by this method (resolved to mirrors)564objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }565566// Access flags567bool is_public() const { return access_flags().is_public(); }568bool is_private() const { return access_flags().is_private(); }569bool is_protected() const { return access_flags().is_protected(); }570bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }571bool is_static() const { return access_flags().is_static(); }572bool is_final() const { return access_flags().is_final(); }573bool is_synchronized() const { return access_flags().is_synchronized();}574bool is_native() const { return access_flags().is_native(); }575bool is_abstract() const { return access_flags().is_abstract(); }576bool is_strict() const { return access_flags().is_strict(); }577bool is_synthetic() const { return access_flags().is_synthetic(); }578579// returns true if contains only return operation580bool is_empty_method() const;581582// returns true if this is a vanilla constructor583bool is_vanilla_constructor() const;584585// checks method and its method holder586bool is_final_method() const;587bool is_final_method(AccessFlags class_access_flags) const;588bool is_default_method() const;589590// true if method needs no dynamic dispatch (final and/or no vtable entry)591bool can_be_statically_bound() const;592bool can_be_statically_bound(AccessFlags class_access_flags) const;593594// returns true if the method has any backward branches.595bool has_loops() {596return access_flags().loops_flag_init() ? access_flags().has_loops() : compute_has_loops_flag();597};598599bool compute_has_loops_flag();600601bool has_jsrs() {602return access_flags().has_jsrs();603};604void set_has_jsrs() {605_access_flags.set_has_jsrs();606}607608// returns true if the method has any monitors.609bool has_monitors() const { return is_synchronized() || access_flags().has_monitor_bytecodes(); }610bool has_monitor_bytecodes() const { return access_flags().has_monitor_bytecodes(); }611612void set_has_monitor_bytecodes() { _access_flags.set_has_monitor_bytecodes(); }613614// monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes615// propererly nest in the method. It might return false, even though they actually nest properly, since the info.616// has not been computed yet.617bool guaranteed_monitor_matching() const { return access_flags().is_monitor_matching(); }618void set_guaranteed_monitor_matching() { _access_flags.set_monitor_matching(); }619620// returns true if the method is an accessor function (setter/getter).621bool is_accessor() const;622623// returns true if the method does nothing but return a constant of primitive type624bool is_constant_getter() const;625626// returns true if the method is an initializer (<init> or <clinit>).627bool is_initializer() const;628629// returns true if the method is static OR if the classfile version < 51630bool has_valid_initializer_flags() const;631632// returns true if the method name is <clinit> and the method has633// valid static initializer flags.634bool is_static_initializer() const;635636// returns true if the method name is <init>637bool is_object_initializer() const;638639// compiled code support640// NOTE: code() is inherently racy as deopt can be clearing code641// simultaneously. Use with caution.642bool has_compiled_code() const { return code() != NULL; }643644// sizing645static int header_size() { return sizeof(Method)/HeapWordSize; }646static int size(bool is_native);647int size() const { return method_size(); }648#if INCLUDE_SERVICES649void collect_statistics(KlassSizeStats *sz) const;650#endif651652// interpreter support653static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }654static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }655static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }656static ByteSize code_offset() { return byte_offset_of(Method, _code); }657static ByteSize method_data_offset() {658return byte_offset_of(Method, _method_data);659}660static ByteSize method_counters_offset() {661return byte_offset_of(Method, _method_counters);662}663#ifndef PRODUCT664static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }665#endif // not PRODUCT666static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }667static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }668static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }669static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }670static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }671672// for code generation673static int method_data_offset_in_bytes() { return offset_of(Method, _method_data); }674static int intrinsic_id_offset_in_bytes() { return offset_of(Method, _intrinsic_id); }675static int intrinsic_id_size_in_bytes() { return sizeof(u1); }676677// Static methods that are used to implement member methods where an exposed this pointer678// is needed due to possible GCs679static objArrayHandle resolved_checked_exceptions_impl(Method* this_oop, TRAPS);680681// Returns the byte code index from the byte code pointer682int bci_from(address bcp) const;683address bcp_from(int bci) const;684int validate_bci_from_bcx(intptr_t bcx) const;685686// Returns the line number for a bci if debugging information for the method is prowided,687// -1 is returned otherwise.688int line_number_from_bci(int bci) const;689690// Reflection support691bool is_overridden_in(Klass* k) const;692693// Stack walking support694bool is_ignored_by_security_stack_walk() const;695696// JSR 292 support697bool is_method_handle_intrinsic() const; // MethodHandles::is_signature_polymorphic_intrinsic(intrinsic_id)698bool is_compiled_lambda_form() const; // intrinsic_id() == vmIntrinsics::_compiledLambdaForm699bool has_member_arg() const; // intrinsic_id() == vmIntrinsics::_linkToSpecial, etc.700static methodHandle make_method_handle_intrinsic(vmIntrinsics::ID iid, // _invokeBasic, _linkToVirtual701Symbol* signature, //anything at all702TRAPS);703static Klass* check_non_bcp_klass(Klass* klass);704705// How many extra stack entries for invokedynamic when it's enabled706static const int extra_stack_entries_for_jsr292 = 1;707708// this operates only on invoke methods:709// presize interpreter frames for extra interpreter stack entries, if needed710// Account for the extra appendix argument for invokehandle/invokedynamic711static int extra_stack_entries() { return EnableInvokeDynamic ? extra_stack_entries_for_jsr292 : 0; }712static int extra_stack_words(); // = extra_stack_entries() * Interpreter::stackElementSize713714// RedefineClasses() support:715bool is_old() const { return access_flags().is_old(); }716void set_is_old() { _access_flags.set_is_old(); }717bool is_obsolete() const { return access_flags().is_obsolete(); }718void set_is_obsolete() { _access_flags.set_is_obsolete(); }719bool is_deleted() const { return access_flags().is_deleted(); }720void set_is_deleted() { _access_flags.set_is_deleted(); }721722bool is_running_emcp() const {723// EMCP methods are old but not obsolete or deleted. Equivalent724// Modulo Constant Pool means the method is equivalent except725// the constant pool and instructions that access the constant726// pool might be different.727// If a breakpoint is set in a redefined method, its EMCP methods that are728// still running must have a breakpoint also.729return _running_emcp;730}731732void set_running_emcp(bool x) {733_running_emcp = x;734}735736bool on_stack() const { return access_flags().on_stack(); }737void set_on_stack(const bool value);738739// see the definition in Method*.cpp for the gory details740bool should_not_be_cached() const;741742// JVMTI Native method prefixing support:743bool is_prefixed_native() const { return access_flags().is_prefixed_native(); }744void set_is_prefixed_native() { _access_flags.set_is_prefixed_native(); }745746// Rewriting support747static methodHandle clone_with_new_data(methodHandle m, u_char* new_code, int new_code_length,748u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS);749750// jmethodID handling751// Because the useful life-span of a jmethodID cannot be determined,752// once created they are never reclaimed. The methods to which they refer,753// however, can be GC'ed away if the class is unloaded or if the method is754// made obsolete or deleted -- in these cases, the jmethodID755// refers to NULL (as is the case for any weak reference).756static jmethodID make_jmethod_id(ClassLoaderData* loader_data, Method* mh);757static void destroy_jmethod_id(ClassLoaderData* loader_data, jmethodID mid);758759// Use resolve_jmethod_id() in situations where the caller is expected760// to provide a valid jmethodID; the only sanity checks are in asserts;761// result guaranteed not to be NULL.762inline static Method* resolve_jmethod_id(jmethodID mid) {763assert(mid != NULL, "JNI method id should not be null");764return *((Method**)mid);765}766767// Use checked_resolve_jmethod_id() in situations where the caller768// should provide a valid jmethodID, but might not. NULL is returned769// when the jmethodID does not refer to a valid method.770static Method* checked_resolve_jmethod_id(jmethodID mid);771772static void change_method_associated_with_jmethod_id(jmethodID old_jmid_ptr, Method* new_method);773static bool is_method_id(jmethodID mid);774775// Clear methods776static void clear_jmethod_ids(ClassLoaderData* loader_data);777static void print_jmethod_ids(ClassLoaderData* loader_data, outputStream* out) PRODUCT_RETURN;778779// Get this method's jmethodID -- allocate if it doesn't exist780jmethodID jmethod_id() { methodHandle this_h(this);781return InstanceKlass::get_jmethod_id(method_holder(), this_h); }782783// Lookup the jmethodID for this method. Return NULL if not found.784// NOTE that this function can be called from a signal handler785// (see AsyncGetCallTrace support for Forte Analyzer) and this786// needs to be async-safe. No allocation should be done and787// so handles are not used to avoid deadlock.788jmethodID find_jmethod_id_or_null() { return method_holder()->jmethod_id_or_null(this); }789790// Support for inlining of intrinsic methods791vmIntrinsics::ID intrinsic_id() const { return (vmIntrinsics::ID) _intrinsic_id; }792void set_intrinsic_id(vmIntrinsics::ID id) { _intrinsic_id = (u1) id; }793794// Helper routines for intrinsic_id() and vmIntrinsics::method().795void init_intrinsic_id(); // updates from _none if a match796void clear_jmethod_id(ClassLoaderData* loader_data);797798static vmSymbols::SID klass_id_for_intrinsics(Klass* holder);799800bool jfr_towrite() { return _jfr_towrite; }801void set_jfr_towrite(bool x) { _jfr_towrite = x; }802bool caller_sensitive() { return _caller_sensitive; }803void set_caller_sensitive(bool x) { _caller_sensitive = x; }804bool force_inline() { return _force_inline; }805void set_force_inline(bool x) { _force_inline = x; }806bool dont_inline() { return _dont_inline; }807void set_dont_inline(bool x) { _dont_inline = x; }808bool is_hidden() { return _hidden; }809void set_hidden(bool x) { _hidden = x; }810bool has_injected_profile() { return _has_injected_profile; }811void set_has_injected_profile(bool x) { _has_injected_profile = x; }812813JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)814815ConstMethod::MethodType method_type() const {816return _constMethod->method_type();817}818bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }819820// On-stack replacement support821bool has_osr_nmethod(int level, bool match_level) {822return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;823}824825int mark_osr_nmethods() {826return method_holder()->mark_osr_nmethods(this);827}828829nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {830return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);831}832833// Inline cache support834void cleanup_inline_caches();835836// Find if klass for method is loaded837bool is_klass_loaded_by_klass_index(int klass_index) const;838bool is_klass_loaded(int refinfo_index, bool must_be_resolved = false) const;839840// Indicates whether compilation failed earlier for this method, or841// whether it is not compilable for another reason like having a842// breakpoint set in it.843bool is_not_compilable(int comp_level = CompLevel_any) const;844void set_not_compilable(int comp_level = CompLevel_all, bool report = true, const char* reason = NULL);845void set_not_compilable_quietly(int comp_level = CompLevel_all) {846set_not_compilable(comp_level, false);847}848bool is_not_osr_compilable(int comp_level = CompLevel_any) const;849void set_not_osr_compilable(int comp_level = CompLevel_all, bool report = true, const char* reason = NULL);850void set_not_osr_compilable_quietly(int comp_level = CompLevel_all) {851set_not_osr_compilable(comp_level, false);852}853bool is_always_compilable() const;854855private:856void print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason);857858public:859MethodCounters* get_method_counters(TRAPS) {860if (_method_counters == NULL) {861build_method_counters(this, CHECK_AND_CLEAR_NULL);862}863return _method_counters;864}865866bool is_not_c1_compilable() const { return access_flags().is_not_c1_compilable(); }867void set_not_c1_compilable() { _access_flags.set_not_c1_compilable(); }868void clear_not_c1_compilable() { _access_flags.clear_not_c1_compilable(); }869bool is_not_c2_compilable() const { return access_flags().is_not_c2_compilable(); }870void set_not_c2_compilable() { _access_flags.set_not_c2_compilable(); }871void clear_not_c2_compilable() { _access_flags.clear_not_c2_compilable(); }872873bool is_not_c1_osr_compilable() const { return is_not_c1_compilable(); } // don't waste an accessFlags bit874void set_not_c1_osr_compilable() { set_not_c1_compilable(); } // don't waste an accessFlags bit875void clear_not_c1_osr_compilable() { clear_not_c1_compilable(); } // don't waste an accessFlags bit876bool is_not_c2_osr_compilable() const { return access_flags().is_not_c2_osr_compilable(); }877void set_not_c2_osr_compilable() { _access_flags.set_not_c2_osr_compilable(); }878void clear_not_c2_osr_compilable() { _access_flags.clear_not_c2_osr_compilable(); }879880// Background compilation support881bool queued_for_compilation() const { return access_flags().queued_for_compilation(); }882void set_queued_for_compilation() { _access_flags.set_queued_for_compilation(); }883void clear_queued_for_compilation() { _access_flags.clear_queued_for_compilation(); }884885// Resolve all classes in signature, return 'true' if successful886static bool load_signature_classes(methodHandle m, TRAPS);887888// Return if true if not all classes references in signature, including return type, has been loaded889static bool has_unloaded_classes_in_signature(methodHandle m, TRAPS);890891// Printing892void print_short_name(outputStream* st = tty); // prints as klassname::methodname; Exposed so field engineers can debug VM893#if INCLUDE_JVMTI894void print_name(outputStream* st = tty); // prints as "virtual void foo(int)"; exposed for TraceRedefineClasses895#else896void print_name(outputStream* st = tty) PRODUCT_RETURN; // prints as "virtual void foo(int)"897#endif898899// Helper routine used for method sorting900static void sort_methods(Array<Method*>* methods, bool idempotent = false, bool set_idnums = true);901902// Deallocation function for redefine classes or if an error occurs903void deallocate_contents(ClassLoaderData* loader_data);904905// Printing906#ifndef PRODUCT907void print_on(outputStream* st) const;908#endif909void print_value_on(outputStream* st) const;910911const char* internal_name() const { return "{method}"; }912913// Check for valid method pointer914static bool has_method_vptr(const void* ptr);915bool is_valid_method() const;916917// Verify918void verify() { verify_on(tty); }919void verify_on(outputStream* st);920921private:922923// Inlined elements924address* native_function_addr() const { assert(is_native(), "must be native"); return (address*) (this+1); }925address* signature_handler_addr() const { return native_function_addr() + 1; }926};927928929// Utility class for compressing line number tables930931class CompressedLineNumberWriteStream: public CompressedWriteStream {932private:933int _bci;934int _line;935public:936// Constructor937CompressedLineNumberWriteStream(int initial_size) : CompressedWriteStream(initial_size), _bci(0), _line(0) {}938CompressedLineNumberWriteStream(u_char* buffer, int initial_size) : CompressedWriteStream(buffer, initial_size), _bci(0), _line(0) {}939940// Write (bci, line number) pair to stream941void write_pair_regular(int bci_delta, int line_delta);942943inline void write_pair_inline(int bci, int line) {944int bci_delta = bci - _bci;945int line_delta = line - _line;946_bci = bci;947_line = line;948// Skip (0,0) deltas - they do not add information and conflict with terminator.949if (bci_delta == 0 && line_delta == 0) return;950// Check if bci is 5-bit and line number 3-bit unsigned.951if (((bci_delta & ~0x1F) == 0) && ((line_delta & ~0x7) == 0)) {952// Compress into single byte.953jubyte value = ((jubyte) bci_delta << 3) | (jubyte) line_delta;954// Check that value doesn't match escape character.955if (value != 0xFF) {956write_byte(value);957return;958}959}960write_pair_regular(bci_delta, line_delta);961}962963// Windows AMD64 + Apr 2005 PSDK with /O2 generates bad code for write_pair.964// Disabling optimization doesn't work for methods in header files965// so we force it to call through the non-optimized version in the .cpp.966// It's gross, but it's the only way we can ensure that all callers are967// fixed. _MSC_VER is defined by the windows compiler968#if defined(_M_AMD64) && _MSC_VER >= 1400969void write_pair(int bci, int line);970#else971void write_pair(int bci, int line) { write_pair_inline(bci, line); }972#endif973974// Write end-of-stream marker975void write_terminator() { write_byte(0); }976};977978979// Utility class for decompressing line number tables980981class CompressedLineNumberReadStream: public CompressedReadStream {982private:983int _bci;984int _line;985public:986// Constructor987CompressedLineNumberReadStream(u_char* buffer);988// Read (bci, line number) pair from stream. Returns false at end-of-stream.989bool read_pair();990// Accessing bci and line number (after calling read_pair)991int bci() const { return _bci; }992int line() const { return _line; }993};994995996/// Fast Breakpoints.997998// If this structure gets more complicated (because bpts get numerous),999// move it into its own header.10001001// There is presently no provision for concurrent access1002// to breakpoint lists, which is only OK for JVMTI because1003// breakpoints are written only at safepoints, and are read1004// concurrently only outside of safepoints.10051006class BreakpointInfo : public CHeapObj<mtClass> {1007friend class VMStructs;1008private:1009Bytecodes::Code _orig_bytecode;1010int _bci;1011u2 _name_index; // of method1012u2 _signature_index; // of method1013BreakpointInfo* _next; // simple storage allocation10141015public:1016BreakpointInfo(Method* m, int bci);10171018// accessors1019Bytecodes::Code orig_bytecode() { return _orig_bytecode; }1020void set_orig_bytecode(Bytecodes::Code code) { _orig_bytecode = code; }1021int bci() { return _bci; }10221023BreakpointInfo* next() const { return _next; }1024void set_next(BreakpointInfo* n) { _next = n; }10251026// helps for searchers1027bool match(const Method* m, int bci) {1028return bci == _bci && match(m);1029}10301031bool match(const Method* m) {1032return _name_index == m->name_index() &&1033_signature_index == m->signature_index();1034}10351036void set(Method* method);1037void clear(Method* method);1038};10391040// Utility class for access exception handlers1041class ExceptionTable : public StackObj {1042private:1043ExceptionTableElement* _table;1044u2 _length;10451046public:1047ExceptionTable(const Method* m) {1048if (m->has_exception_handler()) {1049_table = m->exception_table_start();1050_length = m->exception_table_length();1051} else {1052_table = NULL;1053_length = 0;1054}1055}10561057int length() const {1058return _length;1059}10601061u2 start_pc(int idx) const {1062assert(idx < _length, "out of bounds");1063return _table[idx].start_pc;1064}10651066void set_start_pc(int idx, u2 value) {1067assert(idx < _length, "out of bounds");1068_table[idx].start_pc = value;1069}10701071u2 end_pc(int idx) const {1072assert(idx < _length, "out of bounds");1073return _table[idx].end_pc;1074}10751076void set_end_pc(int idx, u2 value) {1077assert(idx < _length, "out of bounds");1078_table[idx].end_pc = value;1079}10801081u2 handler_pc(int idx) const {1082assert(idx < _length, "out of bounds");1083return _table[idx].handler_pc;1084}10851086void set_handler_pc(int idx, u2 value) {1087assert(idx < _length, "out of bounds");1088_table[idx].handler_pc = value;1089}10901091u2 catch_type_index(int idx) const {1092assert(idx < _length, "out of bounds");1093return _table[idx].catch_type_index;1094}10951096void set_catch_type_index(int idx, u2 value) {1097assert(idx < _length, "out of bounds");1098_table[idx].catch_type_index = value;1099}1100};11011102#endif // SHARE_VM_OOPS_METHODOOP_HPP110311041105