Path: blob/master/src/hotspot/share/code/compiledIC.cpp
40931 views
/*1* Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "code/codeBehaviours.hpp"26#include "code/codeCache.hpp"27#include "code/compiledIC.hpp"28#include "code/icBuffer.hpp"29#include "code/nmethod.hpp"30#include "code/vtableStubs.hpp"31#include "interpreter/interpreter.hpp"32#include "interpreter/linkResolver.hpp"33#include "memory/metadataFactory.hpp"34#include "memory/oopFactory.hpp"35#include "memory/resourceArea.hpp"36#include "memory/universe.hpp"37#include "oops/klass.inline.hpp"38#include "oops/method.inline.hpp"39#include "oops/oop.inline.hpp"40#include "oops/symbol.hpp"41#include "runtime/handles.inline.hpp"42#include "runtime/icache.hpp"43#include "runtime/safepoint.hpp"44#include "runtime/sharedRuntime.hpp"45#include "runtime/stubRoutines.hpp"46#include "utilities/events.hpp"474849// Every time a compiled IC is changed or its type is being accessed,50// either the CompiledIC_lock must be set or we must be at a safe point.5152CompiledICLocker::CompiledICLocker(CompiledMethod* method)53: _method(method),54_behaviour(CompiledICProtectionBehaviour::current()),55_locked(_behaviour->lock(_method)) {56}5758CompiledICLocker::~CompiledICLocker() {59if (_locked) {60_behaviour->unlock(_method);61}62}6364bool CompiledICLocker::is_safe(CompiledMethod* method) {65return CompiledICProtectionBehaviour::current()->is_safe(method);66}6768bool CompiledICLocker::is_safe(address code) {69CodeBlob* cb = CodeCache::find_blob_unsafe(code);70assert(cb != NULL && cb->is_compiled(), "must be compiled");71CompiledMethod* cm = cb->as_compiled_method();72return CompiledICProtectionBehaviour::current()->is_safe(cm);73}7475//-----------------------------------------------------------------------------76// Low-level access to an inline cache. Private, since they might not be77// MT-safe to use.7879void* CompiledIC::cached_value() const {80assert(CompiledICLocker::is_safe(_method), "mt unsafe call");81assert (!is_optimized(), "an optimized virtual call does not have a cached metadata");8283if (!is_in_transition_state()) {84void* data = get_data();85// If we let the metadata value here be initialized to zero...86assert(data != NULL || Universe::non_oop_word() == NULL,87"no raw nulls in CompiledIC metadatas, because of patching races");88return (data == (void*)Universe::non_oop_word()) ? NULL : data;89} else {90return InlineCacheBuffer::cached_value_for((CompiledIC *)this);91}92}939495void CompiledIC::internal_set_ic_destination(address entry_point, bool is_icstub, void* cache, bool is_icholder) {96assert(entry_point != NULL, "must set legal entry point");97assert(CompiledICLocker::is_safe(_method), "mt unsafe call");98assert (!is_optimized() || cache == NULL, "an optimized virtual call does not have a cached metadata");99assert (cache == NULL || cache != (Metadata*)badOopVal, "invalid metadata");100101assert(!is_icholder || is_icholder_entry(entry_point), "must be");102103// Don't use ic_destination for this test since that forwards104// through ICBuffer instead of returning the actual current state of105// the CompiledIC.106if (is_icholder_entry(_call->destination())) {107// When patching for the ICStub case the cached value isn't108// overwritten until the ICStub copied into the CompiledIC during109// the next safepoint. Make sure that the CompiledICHolder* is110// marked for release at this point since it won't be identifiable111// once the entry point is overwritten.112InlineCacheBuffer::queue_for_release((CompiledICHolder*)get_data());113}114115if (TraceCompiledIC) {116tty->print(" ");117print_compiled_ic();118tty->print(" changing destination to " INTPTR_FORMAT, p2i(entry_point));119if (!is_optimized()) {120tty->print(" changing cached %s to " INTPTR_FORMAT, is_icholder ? "icholder" : "metadata", p2i((address)cache));121}122if (is_icstub) {123tty->print(" (icstub)");124}125tty->cr();126}127128{129CodeBlob* cb = CodeCache::find_blob_unsafe(_call->instruction_address());130assert(cb != NULL && cb->is_compiled(), "must be compiled");131_call->set_destination_mt_safe(entry_point);132}133134if (is_optimized() || is_icstub) {135// Optimized call sites don't have a cache value and ICStub call136// sites only change the entry point. Changing the value in that137// case could lead to MT safety issues.138assert(cache == NULL, "must be null");139return;140}141142if (cache == NULL) cache = Universe::non_oop_word();143144set_data((intptr_t)cache);145}146147148void CompiledIC::set_ic_destination(ICStub* stub) {149internal_set_ic_destination(stub->code_begin(), true, NULL, false);150}151152153154address CompiledIC::ic_destination() const {155assert(CompiledICLocker::is_safe(_method), "mt unsafe call");156if (!is_in_transition_state()) {157return _call->destination();158} else {159return InlineCacheBuffer::ic_destination_for((CompiledIC *)this);160}161}162163164bool CompiledIC::is_in_transition_state() const {165assert(CompiledICLocker::is_safe(_method), "mt unsafe call");166return InlineCacheBuffer::contains(_call->destination());;167}168169170bool CompiledIC::is_icholder_call() const {171assert(CompiledICLocker::is_safe(_method), "mt unsafe call");172return !_is_optimized && is_icholder_entry(ic_destination());173}174175// Returns native address of 'call' instruction in inline-cache. Used by176// the InlineCacheBuffer when it needs to find the stub.177address CompiledIC::stub_address() const {178assert(is_in_transition_state(), "should only be called when we are in a transition state");179return _call->destination();180}181182// Clears the IC stub if the compiled IC is in transition state183void CompiledIC::clear_ic_stub() {184if (is_in_transition_state()) {185ICStub* stub = ICStub_from_destination_address(stub_address());186stub->clear();187}188}189190//-----------------------------------------------------------------------------191// High-level access to an inline cache. Guaranteed to be MT-safe.192193void CompiledIC::initialize_from_iter(RelocIterator* iter) {194assert(iter->addr() == _call->instruction_address(), "must find ic_call");195196if (iter->type() == relocInfo::virtual_call_type) {197virtual_call_Relocation* r = iter->virtual_call_reloc();198_is_optimized = false;199_value = _call->get_load_instruction(r);200} else {201assert(iter->type() == relocInfo::opt_virtual_call_type, "must be a virtual call");202_is_optimized = true;203_value = NULL;204}205}206207CompiledIC::CompiledIC(CompiledMethod* cm, NativeCall* call)208: _method(cm)209{210_call = _method->call_wrapper_at((address) call);211address ic_call = _call->instruction_address();212213assert(ic_call != NULL, "ic_call address must be set");214assert(cm != NULL, "must pass compiled method");215assert(cm->contains(ic_call), "must be in compiled method");216217// Search for the ic_call at the given address.218RelocIterator iter(cm, ic_call, ic_call+1);219bool ret = iter.next();220assert(ret == true, "relocInfo must exist at this address");221assert(iter.addr() == ic_call, "must find ic_call");222223initialize_from_iter(&iter);224}225226CompiledIC::CompiledIC(RelocIterator* iter)227: _method(iter->code())228{229_call = _method->call_wrapper_at(iter->addr());230address ic_call = _call->instruction_address();231232CompiledMethod* nm = iter->code();233assert(ic_call != NULL, "ic_call address must be set");234assert(nm != NULL, "must pass compiled method");235assert(nm->contains(ic_call), "must be in compiled method");236237initialize_from_iter(iter);238}239240// This function may fail for two reasons: either due to running out of vtable241// stubs, or due to running out of IC stubs in an attempted transition to a242// transitional state. The needs_ic_stub_refill value will be set if the failure243// was due to running out of IC stubs, in which case the caller will refill IC244// stubs and retry.245bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode,246bool& needs_ic_stub_refill, TRAPS) {247assert(CompiledICLocker::is_safe(_method), "mt unsafe call");248assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");249assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");250251address entry;252if (call_info->call_kind() == CallInfo::itable_call) {253assert(bytecode == Bytecodes::_invokeinterface, "");254int itable_index = call_info->itable_index();255entry = VtableStubs::find_itable_stub(itable_index);256if (entry == NULL) {257return false;258}259#ifdef ASSERT260int index = call_info->resolved_method()->itable_index();261assert(index == itable_index, "CallInfo pre-computes this");262InstanceKlass* k = call_info->resolved_method()->method_holder();263assert(k->verify_itable_index(itable_index), "sanity check");264#endif //ASSERT265CompiledICHolder* holder = new CompiledICHolder(call_info->resolved_method()->method_holder(),266call_info->resolved_klass(), false);267holder->claim();268if (!InlineCacheBuffer::create_transition_stub(this, holder, entry)) {269delete holder;270needs_ic_stub_refill = true;271return false;272}273} else {274assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");275// Can be different than selected_method->vtable_index(), due to package-private etc.276int vtable_index = call_info->vtable_index();277assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");278entry = VtableStubs::find_vtable_stub(vtable_index);279if (entry == NULL) {280return false;281}282if (!InlineCacheBuffer::create_transition_stub(this, NULL, entry)) {283needs_ic_stub_refill = true;284return false;285}286}287288if (TraceICs) {289ResourceMark rm;290assert(call_info->selected_method() != NULL, "Unexpected null selected method");291tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,292p2i(instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));293}294295// We can't check this anymore. With lazy deopt we could have already296// cleaned this IC entry before we even return. This is possible if297// we ran out of space in the inline cache buffer trying to do the298// set_next and we safepointed to free up space. This is a benign299// race because the IC entry was complete when we safepointed so300// cleaning it immediately is harmless.301// assert(is_megamorphic(), "sanity check");302return true;303}304305306// true if destination is megamorphic stub307bool CompiledIC::is_megamorphic() const {308assert(CompiledICLocker::is_safe(_method), "mt unsafe call");309assert(!is_optimized(), "an optimized call cannot be megamorphic");310311// Cannot rely on cached_value. It is either an interface or a method.312return VtableStubs::entry_point(ic_destination()) != NULL;313}314315bool CompiledIC::is_call_to_compiled() const {316assert(CompiledICLocker::is_safe(_method), "mt unsafe call");317318// Use unsafe, since an inline cache might point to a zombie method. However, the zombie319// method is guaranteed to still exist, since we only remove methods after all inline caches320// has been cleaned up321CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());322bool is_monomorphic = (cb != NULL && cb->is_compiled());323// Check that the cached_value is a klass for non-optimized monomorphic calls324// This assertion is invalid for compiler1: a call that does not look optimized (no static stub) can be used325// for calling directly to vep without using the inline cache (i.e., cached_value == NULL).326// For JVMCI this occurs because CHA is only used to improve inlining so call sites which could be optimized327// virtuals because there are no currently loaded subclasses of a type are left as virtual call sites.328#ifdef ASSERT329CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address());330bool is_c1_or_jvmci_method = caller->is_compiled_by_c1() || caller->is_compiled_by_jvmci();331assert( is_c1_or_jvmci_method ||332!is_monomorphic ||333is_optimized() ||334!caller->is_alive() ||335(cached_metadata() != NULL && cached_metadata()->is_klass()), "sanity check");336#endif // ASSERT337return is_monomorphic;338}339340341bool CompiledIC::is_call_to_interpreted() const {342assert(CompiledICLocker::is_safe(_method), "mt unsafe call");343// Call to interpreter if destination is either calling to a stub (if it344// is optimized), or calling to an I2C blob345bool is_call_to_interpreted = false;346if (!is_optimized()) {347// must use unsafe because the destination can be a zombie (and we're cleaning)348// and the print_compiled_ic code wants to know if site (in the non-zombie)349// is to the interpreter.350CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());351is_call_to_interpreted = (cb != NULL && cb->is_adapter_blob());352assert(!is_call_to_interpreted || (is_icholder_call() && cached_icholder() != NULL), "sanity check");353} else {354// Check if we are calling into our own codeblob (i.e., to a stub)355address dest = ic_destination();356#ifdef ASSERT357{358_call->verify_resolve_call(dest);359}360#endif /* ASSERT */361is_call_to_interpreted = _call->is_call_to_interpreted(dest);362}363return is_call_to_interpreted;364}365366bool CompiledIC::set_to_clean(bool in_use) {367assert(CompiledICLocker::is_safe(_method), "mt unsafe call");368if (TraceInlineCacheClearing || TraceICs) {369tty->print_cr("IC@" INTPTR_FORMAT ": set to clean", p2i(instruction_address()));370print();371}372373address entry = _call->get_resolve_call_stub(is_optimized());374375// A zombie transition will always be safe, since the metadata has already been set to NULL, so376// we only need to patch the destination377bool safe_transition = _call->is_safe_for_patching() || !in_use || is_optimized() || SafepointSynchronize::is_at_safepoint();378379if (safe_transition) {380// Kill any leftover stub we might have too381clear_ic_stub();382if (is_optimized()) {383set_ic_destination(entry);384} else {385set_ic_destination_and_value(entry, (void*)NULL);386}387} else {388// Unsafe transition - create stub.389if (!InlineCacheBuffer::create_transition_stub(this, NULL, entry)) {390return false;391}392}393// We can't check this anymore. With lazy deopt we could have already394// cleaned this IC entry before we even return. This is possible if395// we ran out of space in the inline cache buffer trying to do the396// set_next and we safepointed to free up space. This is a benign397// race because the IC entry was complete when we safepointed so398// cleaning it immediately is harmless.399// assert(is_clean(), "sanity check");400return true;401}402403bool CompiledIC::is_clean() const {404assert(CompiledICLocker::is_safe(_method), "mt unsafe call");405bool is_clean = false;406address dest = ic_destination();407is_clean = dest == _call->get_resolve_call_stub(is_optimized());408assert(!is_clean || is_optimized() || cached_value() == NULL, "sanity check");409return is_clean;410}411412bool CompiledIC::set_to_monomorphic(CompiledICInfo& info) {413assert(CompiledICLocker::is_safe(_method), "mt unsafe call");414// Updating a cache to the wrong entry can cause bugs that are very hard415// to track down - if cache entry gets invalid - we just clean it. In416// this way it is always the same code path that is responsible for417// updating and resolving an inline cache418//419// The above is no longer true. SharedRuntime::fixup_callers_callsite will change optimized420// callsites. In addition ic_miss code will update a site to monomorphic if it determines421// that an monomorphic call to the interpreter can now be monomorphic to compiled code.422//423// In both of these cases the only thing being modifed is the jump/call target and these424// transitions are mt_safe425426Thread *thread = Thread::current();427if (info.to_interpreter()) {428// Call to interpreter429if (info.is_optimized() && is_optimized()) {430assert(is_clean(), "unsafe IC path");431// the call analysis (callee structure) specifies that the call is optimized432// (either because of CHA or the static target is final)433// At code generation time, this call has been emitted as static call434// Call via stub435assert(info.cached_metadata() != NULL && info.cached_metadata()->is_method(), "sanity check");436methodHandle method (thread, (Method*)info.cached_metadata());437_call->set_to_interpreted(method, info);438439if (TraceICs) {440ResourceMark rm(thread);441tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to interpreter: %s",442p2i(instruction_address()),443method->print_value_string());444}445} else {446// Call via method-klass-holder447CompiledICHolder* holder = info.claim_cached_icholder();448if (!InlineCacheBuffer::create_transition_stub(this, holder, info.entry())) {449delete holder;450return false;451}452if (TraceICs) {453ResourceMark rm(thread);454tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to interpreter via icholder ", p2i(instruction_address()));455}456}457} else {458// Call to compiled code459bool static_bound = info.is_optimized() || (info.cached_metadata() == NULL);460#ifdef ASSERT461CodeBlob* cb = CodeCache::find_blob_unsafe(info.entry());462assert (cb != NULL && cb->is_compiled(), "must be compiled!");463#endif /* ASSERT */464465// This is MT safe if we come from a clean-cache and go through a466// non-verified entry point467bool safe = SafepointSynchronize::is_at_safepoint() ||468(!is_in_transition_state() && (info.is_optimized() || static_bound || is_clean()));469470if (!safe) {471if (!InlineCacheBuffer::create_transition_stub(this, info.cached_metadata(), info.entry())) {472return false;473}474} else {475if (is_optimized()) {476set_ic_destination(info.entry());477} else {478set_ic_destination_and_value(info.entry(), info.cached_metadata());479}480}481482if (TraceICs) {483ResourceMark rm(thread);484assert(info.cached_metadata() == NULL || info.cached_metadata()->is_klass(), "must be");485tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to compiled (rcvr klass = %s) %s",486p2i(instruction_address()),487(info.cached_metadata() != NULL) ? ((Klass*)info.cached_metadata())->print_value_string() : "NULL",488(safe) ? "" : " via stub");489}490}491// We can't check this anymore. With lazy deopt we could have already492// cleaned this IC entry before we even return. This is possible if493// we ran out of space in the inline cache buffer trying to do the494// set_next and we safepointed to free up space. This is a benign495// race because the IC entry was complete when we safepointed so496// cleaning it immediately is harmless.497// assert(is_call_to_compiled() || is_call_to_interpreted(), "sanity check");498return true;499}500501502// is_optimized: Compiler has generated an optimized call (i.e. fixed, no inline cache)503// static_bound: The call can be static bound. If it isn't also optimized, the property504// wasn't provable at time of compilation. An optimized call will have any necessary505// null check, while a static_bound won't. A static_bound (but not optimized) must506// therefore use the unverified entry point.507void CompiledIC::compute_monomorphic_entry(const methodHandle& method,508Klass* receiver_klass,509bool is_optimized,510bool static_bound,511bool caller_is_nmethod,512CompiledICInfo& info,513TRAPS) {514CompiledMethod* method_code = method->code();515516address entry = NULL;517if (method_code != NULL && method_code->is_in_use()) {518assert(method_code->is_compiled(), "must be compiled");519// Call to compiled code520//521// Note: the following problem exists with Compiler1:522// - at compile time we may or may not know if the destination is final523// - if we know that the destination is final (is_optimized), we will emit524// an optimized virtual call (no inline cache), and need a Method* to make525// a call to the interpreter526// - if we don't know if the destination is final, we emit a standard527// virtual call, and use CompiledICHolder to call interpreted code528// (no static call stub has been generated)529// - In the case that we here notice the call is static bound we530// convert the call into what looks to be an optimized virtual call,531// but we must use the unverified entry point (since there will be no532// null check on a call when the target isn't loaded).533// This causes problems when verifying the IC because534// it looks vanilla but is optimized. Code in is_call_to_interpreted535// is aware of this and weakens its asserts.536if (is_optimized) {537entry = method_code->verified_entry_point();538} else {539entry = method_code->entry_point();540}541}542if (entry != NULL) {543// Call to near compiled code.544info.set_compiled_entry(entry, is_optimized ? NULL : receiver_klass, is_optimized);545} else {546if (is_optimized) {547// Use stub entry548info.set_interpreter_entry(method()->get_c2i_entry(), method());549} else {550// Use icholder entry551assert(method_code == NULL || method_code->is_compiled(), "must be compiled");552CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass);553info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder);554}555}556assert(info.is_optimized() == is_optimized, "must agree");557}558559560bool CompiledIC::is_icholder_entry(address entry) {561CodeBlob* cb = CodeCache::find_blob_unsafe(entry);562if (cb != NULL && cb->is_adapter_blob()) {563return true;564}565// itable stubs also use CompiledICHolder566if (cb != NULL && cb->is_vtable_blob()) {567VtableStub* s = VtableStubs::entry_point(entry);568return (s != NULL) && s->is_itable_stub();569}570571return false;572}573574bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site, const CompiledMethod* cm) {575// This call site might have become stale so inspect it carefully.576address dest = cm->call_wrapper_at(call_site->addr())->destination();577return is_icholder_entry(dest);578}579580// ----------------------------------------------------------------------------581582bool CompiledStaticCall::set_to_clean(bool in_use) {583// in_use is unused but needed to match template function in CompiledMethod584assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");585// Reset call site586set_destination_mt_safe(resolve_call_stub());587588// Do not reset stub here: It is too expensive to call find_stub.589// Instead, rely on caller (nmethod::clear_inline_caches) to clear590// both the call and its stub.591return true;592}593594bool CompiledStaticCall::is_clean() const {595return destination() == resolve_call_stub();596}597598bool CompiledStaticCall::is_call_to_compiled() const {599return CodeCache::contains(destination());600}601602bool CompiledDirectStaticCall::is_call_to_interpreted() const {603// It is a call to interpreted, if it calls to a stub. Hence, the destination604// must be in the stub part of the nmethod that contains the call605CompiledMethod* cm = CodeCache::find_compiled(instruction_address());606return cm->stub_contains(destination());607}608609void CompiledStaticCall::set_to_compiled(address entry) {610if (TraceICs) {611ResourceMark rm;612tty->print_cr("%s@" INTPTR_FORMAT ": set_to_compiled " INTPTR_FORMAT,613name(),614p2i(instruction_address()),615p2i(entry));616}617// Call to compiled code618assert(CodeCache::contains(entry), "wrong entry point");619set_destination_mt_safe(entry);620}621622void CompiledStaticCall::set(const StaticCallInfo& info) {623assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");624// Updating a cache to the wrong entry can cause bugs that are very hard625// to track down - if cache entry gets invalid - we just clean it. In626// this way it is always the same code path that is responsible for627// updating and resolving an inline cache628assert(is_clean(), "do not update a call entry - use clean");629630if (info._to_interpreter) {631// Call to interpreted code632set_to_interpreted(info.callee(), info.entry());633} else {634set_to_compiled(info.entry());635}636}637638// Compute settings for a CompiledStaticCall. Since we might have to set639// the stub when calling to the interpreter, we need to return arguments.640void CompiledStaticCall::compute_entry(const methodHandle& m, bool caller_is_nmethod, StaticCallInfo& info) {641CompiledMethod* m_code = m->code();642info._callee = m;643if (m_code != NULL && m_code->is_in_use()) {644info._to_interpreter = false;645info._entry = m_code->verified_entry_point();646} else {647// Callee is interpreted code. In any case entering the interpreter648// puts a converter-frame on the stack to save arguments.649assert(!m->is_method_handle_intrinsic(), "Compiled code should never call interpreter MH intrinsics");650info._to_interpreter = true;651info._entry = m()->get_c2i_entry();652}653}654655address CompiledDirectStaticCall::find_stub_for(address instruction) {656// Find reloc. information containing this call-site657RelocIterator iter((nmethod*)NULL, instruction);658while (iter.next()) {659if (iter.addr() == instruction) {660switch(iter.type()) {661case relocInfo::static_call_type:662return iter.static_call_reloc()->static_stub();663// We check here for opt_virtual_call_type, since we reuse the code664// from the CompiledIC implementation665case relocInfo::opt_virtual_call_type:666return iter.opt_virtual_call_reloc()->static_stub();667case relocInfo::poll_type:668case relocInfo::poll_return_type: // A safepoint can't overlap a call.669default:670ShouldNotReachHere();671}672}673}674return NULL;675}676677address CompiledDirectStaticCall::find_stub() {678return CompiledDirectStaticCall::find_stub_for(instruction_address());679}680681address CompiledDirectStaticCall::resolve_call_stub() const {682return SharedRuntime::get_resolve_static_call_stub();683}684685//-----------------------------------------------------------------------------686// Non-product mode code687#ifndef PRODUCT688689void CompiledIC::verify() {690_call->verify();691assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted()692|| is_optimized() || is_megamorphic(), "sanity check");693}694695void CompiledIC::print() {696print_compiled_ic();697tty->cr();698}699700void CompiledIC::print_compiled_ic() {701tty->print("Inline cache at " INTPTR_FORMAT ", calling %s " INTPTR_FORMAT " cached_value " INTPTR_FORMAT,702p2i(instruction_address()), is_call_to_interpreted() ? "interpreted " : "", p2i(ic_destination()), p2i(is_optimized() ? NULL : cached_value()));703}704705void CompiledDirectStaticCall::print() {706tty->print("static call at " INTPTR_FORMAT " -> ", p2i(instruction_address()));707if (is_clean()) {708tty->print("clean");709} else if (is_call_to_compiled()) {710tty->print("compiled");711} else if (is_call_to_interpreted()) {712tty->print("interpreted");713}714tty->cr();715}716717void CompiledDirectStaticCall::verify_mt_safe(const methodHandle& callee, address entry,718NativeMovConstReg* method_holder,719NativeJump* jump) {720// A generated lambda form might be deleted from the Lambdaform721// cache in MethodTypeForm. If a jit compiled lambdaform method722// becomes not entrant and the cache access returns null, the new723// resolve will lead to a new generated LambdaForm.724Method* old_method = reinterpret_cast<Method*>(method_holder->data());725assert(old_method == NULL || old_method == callee() ||726callee->is_compiled_lambda_form() ||727!old_method->method_holder()->is_loader_alive() ||728old_method->is_old(), // may be race patching deoptimized nmethod due to redefinition.729"a) MT-unsafe modification of inline cache");730731address destination = jump->jump_destination();732assert(destination == (address)-1 || destination == entry733|| old_method == NULL || !old_method->method_holder()->is_loader_alive() // may have a race due to class unloading.734|| old_method->is_old(), // may be race patching deoptimized nmethod due to redefinition.735"b) MT-unsafe modification of inline cache");736}737#endif // !PRODUCT738739740