Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/share/vm/code/compiledIC.hpp
32285 views
/*1* Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#ifndef SHARE_VM_CODE_COMPILEDIC_HPP25#define SHARE_VM_CODE_COMPILEDIC_HPP2627#include "interpreter/linkResolver.hpp"28#include "oops/compiledICHolder.hpp"29#ifdef TARGET_ARCH_x8630# include "nativeInst_x86.hpp"31#endif32#ifdef TARGET_ARCH_aarch3233# include "nativeInst_aarch32.hpp"34#endif35#ifdef TARGET_ARCH_aarch6436# include "nativeInst_aarch64.hpp"37#endif38#ifdef TARGET_ARCH_sparc39# include "nativeInst_sparc.hpp"40#endif41#ifdef TARGET_ARCH_zero42# include "nativeInst_zero.hpp"43#endif44#ifdef TARGET_ARCH_arm45# include "nativeInst_arm.hpp"46#endif47#ifdef TARGET_ARCH_ppc48# include "nativeInst_ppc.hpp"49#endif5051//-----------------------------------------------------------------------------52// The CompiledIC represents a compiled inline cache.53//54// In order to make patching of the inline cache MT-safe, we only allow the following55// transitions (when not at a safepoint):56//57//58// [1] --<-- Clean -->--- [1]59// / (null) \60// / \ /-<-\61// / [2] \ / \62// Interpreted ---------> Monomorphic | [3]63// (CompiledICHolder*) (Klass*) |64// \ / \ /65// [4] \ / [4] \->-/66// \->- Megamorphic -<-/67// (CompiledICHolder*)68//69// The text in parentheses () refers to the value of the inline cache receiver (mov instruction)70//71// The numbers in square brackets refer to the kind of transition:72// [1]: Initial fixup. Receiver it found from debug information73// [2]: Compilation of a method74// [3]: Recompilation of a method (note: only entry is changed. The Klass* must stay the same)75// [4]: Inline cache miss. We go directly to megamorphic call.76//77// The class automatically inserts transition stubs (using the InlineCacheBuffer) when an MT-unsafe78// transition is made to a stub.79//80class CompiledIC;81class ICStub;8283class CompiledICInfo : public StackObj {84private:85address _entry; // entry point for call86void* _cached_value; // Value of cached_value (either in stub or inline cache)87bool _is_icholder; // Is the cached value a CompiledICHolder*88bool _is_optimized; // it is an optimized virtual call (i.e., can be statically bound)89bool _to_interpreter; // Call it to interpreter90bool _release_icholder;91public:92address entry() const { return _entry; }93Metadata* cached_metadata() const { assert(!_is_icholder, ""); return (Metadata*)_cached_value; }94CompiledICHolder* claim_cached_icholder() {95assert(_is_icholder, "");96assert(_cached_value != NULL, "must be non-NULL");97_release_icholder = false;98CompiledICHolder* icholder = (CompiledICHolder*)_cached_value;99icholder->claim();100return icholder;101}102bool is_optimized() const { return _is_optimized; }103bool to_interpreter() const { return _to_interpreter; }104105void set_compiled_entry(address entry, Klass* klass, bool is_optimized) {106_entry = entry;107_cached_value = (void*)klass;108_to_interpreter = false;109_is_icholder = false;110_is_optimized = is_optimized;111_release_icholder = false;112}113114void set_interpreter_entry(address entry, Method* method) {115_entry = entry;116_cached_value = (void*)method;117_to_interpreter = true;118_is_icholder = false;119_is_optimized = true;120_release_icholder = false;121}122123void set_icholder_entry(address entry, CompiledICHolder* icholder) {124_entry = entry;125_cached_value = (void*)icholder;126_to_interpreter = true;127_is_icholder = true;128_is_optimized = false;129_release_icholder = true;130}131132CompiledICInfo(): _entry(NULL), _cached_value(NULL), _is_icholder(false),133_to_interpreter(false), _is_optimized(false), _release_icholder(false) {134}135~CompiledICInfo() {136// In rare cases the info is computed but not used, so release any137// CompiledICHolder* that was created138if (_release_icholder) {139assert(_is_icholder, "must be");140CompiledICHolder* icholder = (CompiledICHolder*)_cached_value;141icholder->claim();142delete icholder;143}144}145};146147class CompiledIC: public ResourceObj {148friend class InlineCacheBuffer;149friend class ICStub;150151152private:153NativeCall* _ic_call; // the call instruction154NativeMovConstReg* _value; // patchable value cell for this IC155bool _is_optimized; // an optimized virtual call (i.e., no compiled IC)156157CompiledIC(nmethod* nm, NativeCall* ic_call);158CompiledIC(RelocIterator* iter);159160void initialize_from_iter(RelocIterator* iter);161162static bool is_icholder_entry(address entry);163164// low-level inline-cache manipulation. Cannot be accessed directly, since it might not be MT-safe165// to change an inline-cache. These changes the underlying inline-cache directly. They *newer* make166// changes to a transition stub.167void internal_set_ic_destination(address entry_point, bool is_icstub, void* cache, bool is_icholder);168void set_ic_destination(ICStub* stub);169void set_ic_destination(address entry_point) {170assert(_is_optimized, "use set_ic_destination_and_value instead");171internal_set_ic_destination(entry_point, false, NULL, false);172}173// This only for use by ICStubs where the type of the value isn't known174void set_ic_destination_and_value(address entry_point, void* value) {175internal_set_ic_destination(entry_point, false, value, is_icholder_entry(entry_point));176}177void set_ic_destination_and_value(address entry_point, Metadata* value) {178internal_set_ic_destination(entry_point, false, value, false);179}180void set_ic_destination_and_value(address entry_point, CompiledICHolder* value) {181internal_set_ic_destination(entry_point, false, value, true);182}183184// Reads the location of the transition stub. This will fail with an assertion, if no transition stub is185// associated with the inline cache.186address stub_address() const;187bool is_in_transition_state() const; // Use InlineCacheBuffer188189public:190// conversion (machine PC to CompiledIC*)191friend CompiledIC* CompiledIC_before(nmethod* nm, address return_addr);192friend CompiledIC* CompiledIC_at(nmethod* nm, address call_site);193friend CompiledIC* CompiledIC_at(Relocation* call_site);194friend CompiledIC* CompiledIC_at(RelocIterator* reloc_iter);195196// This is used to release CompiledICHolder*s from nmethods that197// are about to be freed. The callsite might contain other stale198// values of other kinds so it must be careful.199static void cleanup_call_site(virtual_call_Relocation* call_site);200static bool is_icholder_call_site(virtual_call_Relocation* call_site);201202// Return the cached_metadata/destination associated with this inline cache. If the cache currently points203// to a transition stub, it will read the values from the transition stub.204void* cached_value() const;205CompiledICHolder* cached_icholder() const {206assert(is_icholder_call(), "must be");207return (CompiledICHolder*) cached_value();208}209Metadata* cached_metadata() const {210assert(!is_icholder_call(), "must be");211return (Metadata*) cached_value();212}213214address ic_destination() const;215216bool is_optimized() const { return _is_optimized; }217218// State219bool is_clean() const;220bool is_megamorphic() const;221bool is_call_to_compiled() const;222bool is_call_to_interpreted() const;223224bool is_icholder_call() const;225226address end_of_call() { return _ic_call->return_address(); }227228// MT-safe patching of inline caches. Note: Only safe to call is_xxx when holding the CompiledIC_ock229// so you are guaranteed that no patching takes place. The same goes for verify.230//231// Note: We do not provide any direct access to the stub code, to prevent parts of the code232// to manipulate the inline cache in MT-unsafe ways.233//234// They all takes a TRAP argument, since they can cause a GC if the inline-cache buffer is full.235//236void set_to_clean(bool in_use = true);237void set_to_monomorphic(CompiledICInfo& info);238void clear_ic_stub();239240// Returns true if successful and false otherwise. The call can fail if memory241// allocation in the code cache fails.242bool set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS);243244static void compute_monomorphic_entry(methodHandle method, KlassHandle receiver_klass,245bool is_optimized, bool static_bound, CompiledICInfo& info, TRAPS);246247// Location248address instruction_address() const { return _ic_call->instruction_address(); }249250// Misc251void print() PRODUCT_RETURN;252void print_compiled_ic() PRODUCT_RETURN;253void verify() PRODUCT_RETURN;254};255256inline CompiledIC* CompiledIC_before(nmethod* nm, address return_addr) {257CompiledIC* c_ic = new CompiledIC(nm, nativeCall_before(return_addr));258c_ic->verify();259return c_ic;260}261262inline CompiledIC* CompiledIC_at(nmethod* nm, address call_site) {263CompiledIC* c_ic = new CompiledIC(nm, nativeCall_at(call_site));264c_ic->verify();265return c_ic;266}267268inline CompiledIC* CompiledIC_at(Relocation* call_site) {269assert(call_site->type() == relocInfo::virtual_call_type ||270call_site->type() == relocInfo::opt_virtual_call_type, "wrong reloc. info");271CompiledIC* c_ic = new CompiledIC(call_site->code(), nativeCall_at(call_site->addr()));272c_ic->verify();273return c_ic;274}275276inline CompiledIC* CompiledIC_at(RelocIterator* reloc_iter) {277assert(reloc_iter->type() == relocInfo::virtual_call_type ||278reloc_iter->type() == relocInfo::opt_virtual_call_type, "wrong reloc. info");279CompiledIC* c_ic = new CompiledIC(reloc_iter);280c_ic->verify();281return c_ic;282}283284//-----------------------------------------------------------------------------285// The CompiledStaticCall represents a call to a static method in the compiled286//287// Transition diagram of a static call site is somewhat simpler than for an inlined cache:288//289//290// -----<----- Clean ----->-----291// / \292// / \293// compilled code <------------> interpreted code294//295// Clean: Calls directly to runtime method for fixup296// Compiled code: Calls directly to compiled code297// Interpreted code: Calls to stub that set Method* reference298//299//300class CompiledStaticCall;301302class StaticCallInfo {303private:304address _entry; // Entrypoint305methodHandle _callee; // Callee (used when calling interpreter)306bool _to_interpreter; // call to interpreted method (otherwise compiled)307308friend class CompiledStaticCall;309public:310address entry() const { return _entry; }311methodHandle callee() const { return _callee; }312};313314315class CompiledStaticCall: public NativeCall {316friend class CompiledIC;317318// Also used by CompiledIC319void set_to_interpreted(methodHandle callee, address entry);320bool is_optimized_virtual();321322public:323friend CompiledStaticCall* compiledStaticCall_before(address return_addr);324friend CompiledStaticCall* compiledStaticCall_at(address native_call);325friend CompiledStaticCall* compiledStaticCall_at(Relocation* call_site);326327// Code328#if defined(AARCH64) && !defined(ZERO)329static address emit_to_interp_stub(CodeBuffer &cbuf, address mark);330#else331static address emit_to_interp_stub(CodeBuffer &cbuf);332#endif333static int to_interp_stub_size();334static int reloc_to_interp_stub();335336// State337bool is_clean() const;338bool is_call_to_compiled() const;339bool is_call_to_interpreted() const;340341// Clean static call (will force resolving on next use)342void set_to_clean();343344// Set state. The entry must be the same, as computed by compute_entry.345// Computation and setting is split up, since the actions are separate during346// a OptoRuntime::resolve_xxx.347void set(const StaticCallInfo& info);348349// Compute entry point given a method350static void compute_entry(methodHandle m, StaticCallInfo& info);351352// Stub support353address find_stub();354static void set_stub_to_clean(static_stub_Relocation* static_stub);355356// Misc.357void print() PRODUCT_RETURN;358void verify() PRODUCT_RETURN;359};360361362inline CompiledStaticCall* compiledStaticCall_before(address return_addr) {363CompiledStaticCall* st = (CompiledStaticCall*)nativeCall_before(return_addr);364st->verify();365return st;366}367368inline CompiledStaticCall* compiledStaticCall_at(address native_call) {369CompiledStaticCall* st = (CompiledStaticCall*)native_call;370st->verify();371return st;372}373374inline CompiledStaticCall* compiledStaticCall_at(Relocation* call_site) {375return compiledStaticCall_at(call_site->addr());376}377378#endif // SHARE_VM_CODE_COMPILEDIC_HPP379380381