Path: blob/master/src/hotspot/share/opto/callnode.hpp
40930 views
/*1* Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#ifndef SHARE_OPTO_CALLNODE_HPP25#define SHARE_OPTO_CALLNODE_HPP2627#include "opto/connode.hpp"28#include "opto/mulnode.hpp"29#include "opto/multnode.hpp"30#include "opto/opcodes.hpp"31#include "opto/phaseX.hpp"32#include "opto/replacednodes.hpp"33#include "opto/type.hpp"34#include "utilities/growableArray.hpp"3536// Portions of code courtesy of Clifford Click3738// Optimization - Graph Style3940class NamedCounter;41class MultiNode;42class SafePointNode;43class CallNode;44class CallJavaNode;45class CallStaticJavaNode;46class CallDynamicJavaNode;47class CallRuntimeNode;48class CallLeafNode;49class CallLeafNoFPNode;50class CallLeafVectorNode;51class CallNativeNode;52class AllocateNode;53class AllocateArrayNode;54class AbstractLockNode;55class LockNode;56class UnlockNode;57class FastLockNode;5859//------------------------------StartNode--------------------------------------60// The method start node61class StartNode : public MultiNode {62virtual bool cmp( const Node &n ) const;63virtual uint size_of() const; // Size is bigger64public:65const TypeTuple *_domain;66StartNode( Node *root, const TypeTuple *domain ) : MultiNode(2), _domain(domain) {67init_class_id(Class_Start);68init_req(0,this);69init_req(1,root);70}71virtual int Opcode() const;72virtual bool pinned() const { return true; };73virtual const Type *bottom_type() const;74virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; }75virtual const Type* Value(PhaseGVN* phase) const;76virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);77virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_reg, uint length ) const;78virtual const RegMask &in_RegMask(uint) const;79virtual Node *match( const ProjNode *proj, const Matcher *m );80virtual uint ideal_reg() const { return 0; }81#ifndef PRODUCT82virtual void dump_spec(outputStream *st) const;83virtual void dump_compact_spec(outputStream *st) const;84#endif85};8687//------------------------------StartOSRNode-----------------------------------88// The method start node for on stack replacement code89class StartOSRNode : public StartNode {90public:91StartOSRNode( Node *root, const TypeTuple *domain ) : StartNode(root, domain) {}92virtual int Opcode() const;93static const TypeTuple *osr_domain();94};959697//------------------------------ParmNode---------------------------------------98// Incoming parameters99class ParmNode : public ProjNode {100static const char * const names[TypeFunc::Parms+1];101public:102ParmNode( StartNode *src, uint con ) : ProjNode(src,con) {103init_class_id(Class_Parm);104}105virtual int Opcode() const;106virtual bool is_CFG() const { return (_con == TypeFunc::Control); }107virtual uint ideal_reg() const;108#ifndef PRODUCT109virtual void dump_spec(outputStream *st) const;110virtual void dump_compact_spec(outputStream *st) const;111virtual void related(GrowableArray<Node*> *in_rel, GrowableArray<Node*> *out_rel, bool compact) const;112#endif113};114115116//------------------------------ReturnNode-------------------------------------117// Return from subroutine node118class ReturnNode : public Node {119public:120ReturnNode( uint edges, Node *cntrl, Node *i_o, Node *memory, Node *retadr, Node *frameptr );121virtual int Opcode() const;122virtual bool is_CFG() const { return true; }123virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash124virtual bool depends_only_on_test() const { return false; }125virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);126virtual const Type* Value(PhaseGVN* phase) const;127virtual uint ideal_reg() const { return NotAMachineReg; }128virtual uint match_edge(uint idx) const;129#ifndef PRODUCT130virtual void dump_req(outputStream *st = tty) const;131#endif132};133134135//------------------------------RethrowNode------------------------------------136// Rethrow of exception at call site. Ends a procedure before rethrowing;137// ends the current basic block like a ReturnNode. Restores registers and138// unwinds stack. Rethrow happens in the caller's method.139class RethrowNode : public Node {140public:141RethrowNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *ret_adr, Node *exception );142virtual int Opcode() const;143virtual bool is_CFG() const { return true; }144virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash145virtual bool depends_only_on_test() const { return false; }146virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);147virtual const Type* Value(PhaseGVN* phase) const;148virtual uint match_edge(uint idx) const;149virtual uint ideal_reg() const { return NotAMachineReg; }150#ifndef PRODUCT151virtual void dump_req(outputStream *st = tty) const;152#endif153};154155156//------------------------------TailCallNode-----------------------------------157// Pop stack frame and jump indirect158class TailCallNode : public ReturnNode {159public:160TailCallNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *retadr, Node *target, Node *moop )161: ReturnNode( TypeFunc::Parms+2, cntrl, i_o, memory, frameptr, retadr ) {162init_req(TypeFunc::Parms, target);163init_req(TypeFunc::Parms+1, moop);164}165166virtual int Opcode() const;167virtual uint match_edge(uint idx) const;168};169170//------------------------------TailJumpNode-----------------------------------171// Pop stack frame and jump indirect172class TailJumpNode : public ReturnNode {173public:174TailJumpNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *target, Node *ex_oop)175: ReturnNode(TypeFunc::Parms+2, cntrl, i_o, memory, frameptr, Compile::current()->top()) {176init_req(TypeFunc::Parms, target);177init_req(TypeFunc::Parms+1, ex_oop);178}179180virtual int Opcode() const;181virtual uint match_edge(uint idx) const;182};183184//-------------------------------JVMState-------------------------------------185// A linked list of JVMState nodes captures the whole interpreter state,186// plus GC roots, for all active calls at some call site in this compilation187// unit. (If there is no inlining, then the list has exactly one link.)188// This provides a way to map the optimized program back into the interpreter,189// or to let the GC mark the stack.190class JVMState : public ResourceObj {191friend class VMStructs;192public:193typedef enum {194Reexecute_Undefined = -1, // not defined -- will be translated into false later195Reexecute_False = 0, // false -- do not reexecute196Reexecute_True = 1 // true -- reexecute the bytecode197} ReexecuteState; //Reexecute State198199private:200JVMState* _caller; // List pointer for forming scope chains201uint _depth; // One more than caller depth, or one.202uint _locoff; // Offset to locals in input edge mapping203uint _stkoff; // Offset to stack in input edge mapping204uint _monoff; // Offset to monitors in input edge mapping205uint _scloff; // Offset to fields of scalar objs in input edge mapping206uint _endoff; // Offset to end of input edge mapping207uint _sp; // Jave Expression Stack Pointer for this state208int _bci; // Byte Code Index of this JVM point209ReexecuteState _reexecute; // Whether this bytecode need to be re-executed210ciMethod* _method; // Method Pointer211SafePointNode* _map; // Map node associated with this scope212public:213friend class Compile;214friend class PreserveReexecuteState;215216// Because JVMState objects live over the entire lifetime of the217// Compile object, they are allocated into the comp_arena, which218// does not get resource marked or reset during the compile process219void *operator new( size_t x, Compile* C ) throw() { return C->comp_arena()->Amalloc(x); }220void operator delete( void * ) { } // fast deallocation221222// Create a new JVMState, ready for abstract interpretation.223JVMState(ciMethod* method, JVMState* caller);224JVMState(int stack_size); // root state; has a null method225226// Access functions for the JVM227// ... --|--- loc ---|--- stk ---|--- arg ---|--- mon ---|--- scl ---|228// \ locoff \ stkoff \ argoff \ monoff \ scloff \ endoff229uint locoff() const { return _locoff; }230uint stkoff() const { return _stkoff; }231uint argoff() const { return _stkoff + _sp; }232uint monoff() const { return _monoff; }233uint scloff() const { return _scloff; }234uint endoff() const { return _endoff; }235uint oopoff() const { return debug_end(); }236237int loc_size() const { return stkoff() - locoff(); }238int stk_size() const { return monoff() - stkoff(); }239int mon_size() const { return scloff() - monoff(); }240int scl_size() const { return endoff() - scloff(); }241242bool is_loc(uint i) const { return locoff() <= i && i < stkoff(); }243bool is_stk(uint i) const { return stkoff() <= i && i < monoff(); }244bool is_mon(uint i) const { return monoff() <= i && i < scloff(); }245bool is_scl(uint i) const { return scloff() <= i && i < endoff(); }246247uint sp() const { return _sp; }248int bci() const { return _bci; }249bool should_reexecute() const { return _reexecute==Reexecute_True; }250bool is_reexecute_undefined() const { return _reexecute==Reexecute_Undefined; }251bool has_method() const { return _method != NULL; }252ciMethod* method() const { assert(has_method(), ""); return _method; }253JVMState* caller() const { return _caller; }254SafePointNode* map() const { return _map; }255uint depth() const { return _depth; }256uint debug_start() const; // returns locoff of root caller257uint debug_end() const; // returns endoff of self258uint debug_size() const {259return loc_size() + sp() + mon_size() + scl_size();260}261uint debug_depth() const; // returns sum of debug_size values at all depths262263// Returns the JVM state at the desired depth (1 == root).264JVMState* of_depth(int d) const;265266// Tells if two JVM states have the same call chain (depth, methods, & bcis).267bool same_calls_as(const JVMState* that) const;268269// Monitors (monitors are stored as (boxNode, objNode) pairs270enum { logMonitorEdges = 1 };271int nof_monitors() const { return mon_size() >> logMonitorEdges; }272int monitor_depth() const { return nof_monitors() + (caller() ? caller()->monitor_depth() : 0); }273int monitor_box_offset(int idx) const { return monoff() + (idx << logMonitorEdges) + 0; }274int monitor_obj_offset(int idx) const { return monoff() + (idx << logMonitorEdges) + 1; }275bool is_monitor_box(uint off) const {276assert(is_mon(off), "should be called only for monitor edge");277return (0 == bitfield(off - monoff(), 0, logMonitorEdges));278}279bool is_monitor_use(uint off) const { return (is_mon(off)280&& is_monitor_box(off))281|| (caller() && caller()->is_monitor_use(off)); }282283// Initialization functions for the JVM284void set_locoff(uint off) { _locoff = off; }285void set_stkoff(uint off) { _stkoff = off; }286void set_monoff(uint off) { _monoff = off; }287void set_scloff(uint off) { _scloff = off; }288void set_endoff(uint off) { _endoff = off; }289void set_offsets(uint off) {290_locoff = _stkoff = _monoff = _scloff = _endoff = off;291}292void set_map(SafePointNode* map) { _map = map; }293void bind_map(SafePointNode* map); // set_map() and set_jvms() for the SafePointNode294void set_sp(uint sp) { _sp = sp; }295// _reexecute is initialized to "undefined" for a new bci296void set_bci(int bci) {if(_bci != bci)_reexecute=Reexecute_Undefined; _bci = bci; }297void set_should_reexecute(bool reexec) {_reexecute = reexec ? Reexecute_True : Reexecute_False;}298299// Miscellaneous utility functions300JVMState* clone_deep(Compile* C) const; // recursively clones caller chain301JVMState* clone_shallow(Compile* C) const; // retains uncloned caller302void set_map_deep(SafePointNode *map);// reset map for all callers303void adapt_position(int delta); // Adapt offsets in in-array after adding an edge.304int interpreter_frame_size() const;305306#ifndef PRODUCT307void print_method_with_lineno(outputStream* st, bool show_name) const;308void format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) const;309void dump_spec(outputStream *st) const;310void dump_on(outputStream* st) const;311void dump() const {312dump_on(tty);313}314#endif315};316317//------------------------------SafePointNode----------------------------------318// A SafePointNode is a subclass of a MultiNode for convenience (and319// potential code sharing) only - conceptually it is independent of320// the Node semantics.321class SafePointNode : public MultiNode {322friend JVMState;323friend class GraphKit;324friend class VMStructs;325326virtual bool cmp( const Node &n ) const;327virtual uint size_of() const; // Size is bigger328329protected:330JVMState* const _jvms; // Pointer to list of JVM State objects331// Many calls take *all* of memory as input,332// but some produce a limited subset of that memory as output.333// The adr_type reports the call's behavior as a store, not a load.334const TypePtr* _adr_type; // What type of memory does this node produce?335ReplacedNodes _replaced_nodes; // During parsing: list of pair of nodes from calls to GraphKit::replace_in_map()336bool _has_ea_local_in_scope; // NoEscape or ArgEscape objects in JVM States337338void set_jvms(JVMState* s) {339assert(s != nullptr, "assign NULL value to _jvms");340*(JVMState**)&_jvms = s; // override const attribute in the accessor341}342public:343SafePointNode(uint edges, JVMState* jvms,344// A plain safepoint advertises no memory effects (NULL):345const TypePtr* adr_type = NULL)346: MultiNode( edges ),347_jvms(jvms),348_adr_type(adr_type),349_has_ea_local_in_scope(false)350{351init_class_id(Class_SafePoint);352}353354JVMState* jvms() const { return _jvms; }355virtual bool needs_deep_clone_jvms(Compile* C) { return false; }356void clone_jvms(Compile* C) {357if (jvms() != NULL) {358if (needs_deep_clone_jvms(C)) {359set_jvms(jvms()->clone_deep(C));360jvms()->set_map_deep(this);361} else {362jvms()->clone_shallow(C)->bind_map(this);363}364}365}366367private:368void verify_input(JVMState* jvms, uint idx) const {369assert(verify_jvms(jvms), "jvms must match");370Node* n = in(idx);371assert((!n->bottom_type()->isa_long() && !n->bottom_type()->isa_double()) ||372in(idx + 1)->is_top(), "2nd half of long/double");373}374375public:376// Functionality from old debug nodes which has changed377Node *local(JVMState* jvms, uint idx) const {378verify_input(jvms, jvms->locoff() + idx);379return in(jvms->locoff() + idx);380}381Node *stack(JVMState* jvms, uint idx) const {382verify_input(jvms, jvms->stkoff() + idx);383return in(jvms->stkoff() + idx);384}385Node *argument(JVMState* jvms, uint idx) const {386verify_input(jvms, jvms->argoff() + idx);387return in(jvms->argoff() + idx);388}389Node *monitor_box(JVMState* jvms, uint idx) const {390assert(verify_jvms(jvms), "jvms must match");391return in(jvms->monitor_box_offset(idx));392}393Node *monitor_obj(JVMState* jvms, uint idx) const {394assert(verify_jvms(jvms), "jvms must match");395return in(jvms->monitor_obj_offset(idx));396}397398void set_local(JVMState* jvms, uint idx, Node *c);399400void set_stack(JVMState* jvms, uint idx, Node *c) {401assert(verify_jvms(jvms), "jvms must match");402set_req(jvms->stkoff() + idx, c);403}404void set_argument(JVMState* jvms, uint idx, Node *c) {405assert(verify_jvms(jvms), "jvms must match");406set_req(jvms->argoff() + idx, c);407}408void ensure_stack(JVMState* jvms, uint stk_size) {409assert(verify_jvms(jvms), "jvms must match");410int grow_by = (int)stk_size - (int)jvms->stk_size();411if (grow_by > 0) grow_stack(jvms, grow_by);412}413void grow_stack(JVMState* jvms, uint grow_by);414// Handle monitor stack415void push_monitor( const FastLockNode *lock );416void pop_monitor ();417Node *peek_monitor_box() const;418Node *peek_monitor_obj() const;419420// Access functions for the JVM421Node *control () const { return in(TypeFunc::Control ); }422Node *i_o () const { return in(TypeFunc::I_O ); }423Node *memory () const { return in(TypeFunc::Memory ); }424Node *returnadr() const { return in(TypeFunc::ReturnAdr); }425Node *frameptr () const { return in(TypeFunc::FramePtr ); }426427void set_control ( Node *c ) { set_req(TypeFunc::Control,c); }428void set_i_o ( Node *c ) { set_req(TypeFunc::I_O ,c); }429void set_memory ( Node *c ) { set_req(TypeFunc::Memory ,c); }430431MergeMemNode* merged_memory() const {432return in(TypeFunc::Memory)->as_MergeMem();433}434435// The parser marks useless maps as dead when it's done with them:436bool is_killed() { return in(TypeFunc::Control) == NULL; }437438// Exception states bubbling out of subgraphs such as inlined calls439// are recorded here. (There might be more than one, hence the "next".)440// This feature is used only for safepoints which serve as "maps"441// for JVM states during parsing, intrinsic expansion, etc.442SafePointNode* next_exception() const;443void set_next_exception(SafePointNode* n);444bool has_exceptions() const { return next_exception() != NULL; }445446// Helper methods to operate on replaced nodes447ReplacedNodes replaced_nodes() const {448return _replaced_nodes;449}450451void set_replaced_nodes(ReplacedNodes replaced_nodes) {452_replaced_nodes = replaced_nodes;453}454455void clone_replaced_nodes() {456_replaced_nodes.clone();457}458void record_replaced_node(Node* initial, Node* improved) {459_replaced_nodes.record(initial, improved);460}461void transfer_replaced_nodes_from(SafePointNode* sfpt, uint idx = 0) {462_replaced_nodes.transfer_from(sfpt->_replaced_nodes, idx);463}464void delete_replaced_nodes() {465_replaced_nodes.reset();466}467void apply_replaced_nodes(uint idx) {468_replaced_nodes.apply(this, idx);469}470void merge_replaced_nodes_with(SafePointNode* sfpt) {471_replaced_nodes.merge_with(sfpt->_replaced_nodes);472}473bool has_replaced_nodes() const {474return !_replaced_nodes.is_empty();475}476void set_has_ea_local_in_scope(bool b) {477_has_ea_local_in_scope = b;478}479bool has_ea_local_in_scope() const {480return _has_ea_local_in_scope;481}482483void disconnect_from_root(PhaseIterGVN *igvn);484485// Standard Node stuff486virtual int Opcode() const;487virtual bool pinned() const { return true; }488virtual const Type* Value(PhaseGVN* phase) const;489virtual const Type* bottom_type() const { return Type::CONTROL; }490virtual const TypePtr* adr_type() const { return _adr_type; }491void set_adr_type(const TypePtr* adr_type) { _adr_type = adr_type; }492virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);493virtual Node* Identity(PhaseGVN* phase);494virtual uint ideal_reg() const { return 0; }495virtual const RegMask &in_RegMask(uint) const;496virtual const RegMask &out_RegMask() const;497virtual uint match_edge(uint idx) const;498499#ifndef PRODUCT500virtual void dump_spec(outputStream *st) const;501virtual void related(GrowableArray<Node*> *in_rel, GrowableArray<Node*> *out_rel, bool compact) const;502#endif503};504505//------------------------------SafePointScalarObjectNode----------------------506// A SafePointScalarObjectNode represents the state of a scalarized object507// at a safepoint.508509class SafePointScalarObjectNode: public TypeNode {510uint _first_index; // First input edge relative index of a SafePoint node where511// states of the scalarized object fields are collected.512// It is relative to the last (youngest) jvms->_scloff.513uint _n_fields; // Number of non-static fields of the scalarized object.514bool _is_auto_box; // True if the scalarized object is an auto box.515DEBUG_ONLY(Node* _alloc;)516517virtual uint hash() const ; // { return NO_HASH; }518virtual bool cmp( const Node &n ) const;519520uint first_index() const { return _first_index; }521522public:523SafePointScalarObjectNode(const TypeOopPtr* tp,524#ifdef ASSERT525Node* alloc,526#endif527uint first_index, uint n_fields, bool is_auto_box = false);528virtual int Opcode() const;529virtual uint ideal_reg() const;530virtual const RegMask &in_RegMask(uint) const;531virtual const RegMask &out_RegMask() const;532virtual uint match_edge(uint idx) const;533534uint first_index(JVMState* jvms) const {535assert(jvms != NULL, "missed JVMS");536return jvms->scloff() + _first_index;537}538uint n_fields() const { return _n_fields; }539540bool is_auto_box() const { return _is_auto_box; }541#ifdef ASSERT542Node* alloc() const { return _alloc; }543#endif544545virtual uint size_of() const { return sizeof(*this); }546547// Assumes that "this" is an argument to a safepoint node "s", and that548// "new_call" is being created to correspond to "s". But the difference549// between the start index of the jvmstates of "new_call" and "s" is550// "jvms_adj". Produce and return a SafePointScalarObjectNode that551// corresponds appropriately to "this" in "new_call". Assumes that552// "sosn_map" is a map, specific to the translation of "s" to "new_call",553// mapping old SafePointScalarObjectNodes to new, to avoid multiple copies.554SafePointScalarObjectNode* clone(Dict* sosn_map, bool& new_node) const;555556#ifndef PRODUCT557virtual void dump_spec(outputStream *st) const;558#endif559};560561562// Simple container for the outgoing projections of a call. Useful563// for serious surgery on calls.564class CallProjections : public StackObj {565public:566Node* fallthrough_proj;567Node* fallthrough_catchproj;568Node* fallthrough_memproj;569Node* fallthrough_ioproj;570Node* catchall_catchproj;571Node* catchall_memproj;572Node* catchall_ioproj;573Node* resproj;574Node* exobj;575};576577class CallGenerator;578579//------------------------------CallNode---------------------------------------580// Call nodes now subsume the function of debug nodes at callsites, so they581// contain the functionality of a full scope chain of debug nodes.582class CallNode : public SafePointNode {583friend class VMStructs;584585protected:586bool may_modify_arraycopy_helper(const TypeOopPtr* dest_t, const TypeOopPtr* t_oop, PhaseTransform* phase);587588public:589const TypeFunc* _tf; // Function type590address _entry_point; // Address of method being called591float _cnt; // Estimate of number of times called592CallGenerator* _generator; // corresponding CallGenerator for some late inline calls593const char* _name; // Printable name, if _method is NULL594595CallNode(const TypeFunc* tf, address addr, const TypePtr* adr_type, JVMState* jvms = nullptr)596: SafePointNode(tf->domain()->cnt(), jvms, adr_type),597_tf(tf),598_entry_point(addr),599_cnt(COUNT_UNKNOWN),600_generator(NULL),601_name(NULL)602{603init_class_id(Class_Call);604}605606const TypeFunc* tf() const { return _tf; }607const address entry_point() const { return _entry_point; }608const float cnt() const { return _cnt; }609CallGenerator* generator() const { return _generator; }610611void set_tf(const TypeFunc* tf) { _tf = tf; }612void set_entry_point(address p) { _entry_point = p; }613void set_cnt(float c) { _cnt = c; }614void set_generator(CallGenerator* cg) { _generator = cg; }615616virtual const Type* bottom_type() const;617virtual const Type* Value(PhaseGVN* phase) const;618virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);619virtual Node* Identity(PhaseGVN* phase) { return this; }620virtual bool cmp(const Node &n) const;621virtual uint size_of() const = 0;622virtual void calling_convention(BasicType* sig_bt, VMRegPair* parm_regs, uint argcnt) const;623virtual Node* match(const ProjNode* proj, const Matcher* m);624virtual uint ideal_reg() const { return NotAMachineReg; }625// Are we guaranteed that this node is a safepoint? Not true for leaf calls and626// for some macro nodes whose expansion does not have a safepoint on the fast path.627virtual bool guaranteed_safepoint() { return true; }628// For macro nodes, the JVMState gets modified during expansion. If calls629// use MachConstantBase, it gets modified during matching. So when cloning630// the node the JVMState must be deep cloned. Default is to shallow clone.631virtual bool needs_deep_clone_jvms(Compile* C) { return C->needs_deep_clone_jvms(); }632633// Returns true if the call may modify n634virtual bool may_modify(const TypeOopPtr* t_oop, PhaseTransform* phase);635// Does this node have a use of n other than in debug information?636bool has_non_debug_use(Node* n);637// Returns the unique CheckCastPP of a call638// or result projection is there are several CheckCastPP639// or returns NULL if there is no one.640Node* result_cast();641// Does this node returns pointer?642bool returns_pointer() const {643const TypeTuple* r = tf()->range();644return (r->cnt() > TypeFunc::Parms &&645r->field_at(TypeFunc::Parms)->isa_ptr());646}647648// Collect all the interesting edges from a call for use in649// replacing the call by something else. Used by macro expansion650// and the late inlining support.651void extract_projections(CallProjections* projs, bool separate_io_proj, bool do_asserts = true);652653virtual uint match_edge(uint idx) const;654655bool is_call_to_arraycopystub() const;656657virtual void copy_call_debug_info(PhaseIterGVN* phase, SafePointNode* sfpt) {}658659#ifndef PRODUCT660virtual void dump_req(outputStream* st = tty) const;661virtual void dump_spec(outputStream* st) const;662#endif663};664665666//------------------------------CallJavaNode-----------------------------------667// Make a static or dynamic subroutine call node using Java calling668// convention. (The "Java" calling convention is the compiler's calling669// convention, as opposed to the interpreter's or that of native C.)670class CallJavaNode : public CallNode {671friend class VMStructs;672protected:673virtual bool cmp( const Node &n ) const;674virtual uint size_of() const; // Size is bigger675676bool _optimized_virtual;677bool _method_handle_invoke;678bool _override_symbolic_info; // Override symbolic call site info from bytecode679ciMethod* _method; // Method being direct called680bool _arg_escape; // ArgEscape in parameter list681public:682CallJavaNode(const TypeFunc* tf , address addr, ciMethod* method)683: CallNode(tf, addr, TypePtr::BOTTOM),684_optimized_virtual(false),685_method_handle_invoke(false),686_override_symbolic_info(false),687_method(method),688_arg_escape(false)689{690init_class_id(Class_CallJava);691}692693virtual int Opcode() const;694ciMethod* method() const { return _method; }695void set_method(ciMethod *m) { _method = m; }696void set_optimized_virtual(bool f) { _optimized_virtual = f; }697bool is_optimized_virtual() const { return _optimized_virtual; }698void set_method_handle_invoke(bool f) { _method_handle_invoke = f; }699bool is_method_handle_invoke() const { return _method_handle_invoke; }700void set_override_symbolic_info(bool f) { _override_symbolic_info = f; }701bool override_symbolic_info() const { return _override_symbolic_info; }702void set_arg_escape(bool f) { _arg_escape = f; }703bool arg_escape() const { return _arg_escape; }704void copy_call_debug_info(PhaseIterGVN* phase, SafePointNode *sfpt);705706DEBUG_ONLY( bool validate_symbolic_info() const; )707708#ifndef PRODUCT709virtual void dump_spec(outputStream *st) const;710virtual void dump_compact_spec(outputStream *st) const;711#endif712};713714//------------------------------CallStaticJavaNode-----------------------------715// Make a direct subroutine call using Java calling convention (for static716// calls and optimized virtual calls, plus calls to wrappers for run-time717// routines); generates static stub.718class CallStaticJavaNode : public CallJavaNode {719virtual bool cmp( const Node &n ) const;720virtual uint size_of() const; // Size is bigger721public:722CallStaticJavaNode(Compile* C, const TypeFunc* tf, address addr, ciMethod* method)723: CallJavaNode(tf, addr, method) {724init_class_id(Class_CallStaticJava);725if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) {726init_flags(Flag_is_macro);727C->add_macro_node(this);728}729}730CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, const TypePtr* adr_type)731: CallJavaNode(tf, addr, NULL) {732init_class_id(Class_CallStaticJava);733// This node calls a runtime stub, which often has narrow memory effects.734_adr_type = adr_type;735_name = name;736}737738// If this is an uncommon trap, return the request code, else zero.739int uncommon_trap_request() const;740static int extract_uncommon_trap_request(const Node* call);741742bool is_boxing_method() const {743return is_macro() && (method() != NULL) && method()->is_boxing_method();744}745// Late inlining modifies the JVMState, so we need to deep clone it746// when the call node is cloned (because it is macro node).747virtual bool needs_deep_clone_jvms(Compile* C) {748return is_boxing_method() || CallNode::needs_deep_clone_jvms(C);749}750751virtual int Opcode() const;752virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);753754#ifndef PRODUCT755virtual void dump_spec(outputStream *st) const;756virtual void dump_compact_spec(outputStream *st) const;757#endif758};759760//------------------------------CallDynamicJavaNode----------------------------761// Make a dispatched call using Java calling convention.762class CallDynamicJavaNode : public CallJavaNode {763virtual bool cmp( const Node &n ) const;764virtual uint size_of() const; // Size is bigger765public:766CallDynamicJavaNode(const TypeFunc* tf , address addr, ciMethod* method, int vtable_index)767: CallJavaNode(tf,addr,method), _vtable_index(vtable_index) {768init_class_id(Class_CallDynamicJava);769}770771// Late inlining modifies the JVMState, so we need to deep clone it772// when the call node is cloned.773virtual bool needs_deep_clone_jvms(Compile* C) {774return IncrementalInlineVirtual || CallNode::needs_deep_clone_jvms(C);775}776777int _vtable_index;778virtual int Opcode() const;779virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);780#ifndef PRODUCT781virtual void dump_spec(outputStream *st) const;782#endif783};784785//------------------------------CallRuntimeNode--------------------------------786// Make a direct subroutine call node into compiled C++ code.787class CallRuntimeNode : public CallNode {788protected:789virtual bool cmp( const Node &n ) const;790virtual uint size_of() const; // Size is bigger791public:792CallRuntimeNode(const TypeFunc* tf, address addr, const char* name,793const TypePtr* adr_type, JVMState* jvms = nullptr)794: CallNode(tf, addr, adr_type, jvms)795{796init_class_id(Class_CallRuntime);797_name = name;798}799800virtual int Opcode() const;801virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const;802803#ifndef PRODUCT804virtual void dump_spec(outputStream *st) const;805#endif806};807808//------------------------------CallLeafNode-----------------------------------809// Make a direct subroutine call node into compiled C++ code, without810// safepoints811class CallLeafNode : public CallRuntimeNode {812public:813CallLeafNode(const TypeFunc* tf, address addr, const char* name,814const TypePtr* adr_type)815: CallRuntimeNode(tf, addr, name, adr_type)816{817init_class_id(Class_CallLeaf);818}819virtual int Opcode() const;820virtual bool guaranteed_safepoint() { return false; }821#ifndef PRODUCT822virtual void dump_spec(outputStream *st) const;823#endif824};825826//------------------------------CallNativeNode-----------------------------------827// Make a direct call into a foreign function with an arbitrary ABI828// safepoints829class CallNativeNode : public CallNode {830friend class MachCallNativeNode;831virtual bool cmp( const Node &n ) const;832virtual uint size_of() const;833static void print_regs(const GrowableArray<VMReg>& regs, outputStream* st);834public:835GrowableArray<VMReg> _arg_regs;836GrowableArray<VMReg> _ret_regs;837const int _shadow_space_bytes;838const bool _need_transition;839840CallNativeNode(const TypeFunc* tf, address addr, const char* name,841const TypePtr* adr_type,842const GrowableArray<VMReg>& arg_regs,843const GrowableArray<VMReg>& ret_regs,844int shadow_space_bytes,845bool need_transition)846: CallNode(tf, addr, adr_type), _arg_regs(arg_regs),847_ret_regs(ret_regs), _shadow_space_bytes(shadow_space_bytes),848_need_transition(need_transition)849{850init_class_id(Class_CallNative);851_name = name;852}853virtual int Opcode() const;854virtual bool guaranteed_safepoint() { return _need_transition; }855virtual Node* match(const ProjNode *proj, const Matcher *m);856virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const;857#ifndef PRODUCT858virtual void dump_spec(outputStream *st) const;859#endif860};861862//------------------------------CallLeafNoFPNode-------------------------------863// CallLeafNode, not using floating point or using it in the same manner as864// the generated code865class CallLeafNoFPNode : public CallLeafNode {866public:867CallLeafNoFPNode(const TypeFunc* tf, address addr, const char* name,868const TypePtr* adr_type)869: CallLeafNode(tf, addr, name, adr_type)870{871init_class_id(Class_CallLeafNoFP);872}873virtual int Opcode() const;874};875876//------------------------------CallLeafVectorNode-------------------------------877// CallLeafNode but calling with vector calling convention instead.878class CallLeafVectorNode : public CallLeafNode {879private:880uint _num_bits;881protected:882virtual bool cmp( const Node &n ) const;883virtual uint size_of() const; // Size is bigger884public:885CallLeafVectorNode(const TypeFunc* tf, address addr, const char* name,886const TypePtr* adr_type, uint num_bits)887: CallLeafNode(tf, addr, name, adr_type), _num_bits(num_bits)888{889}890virtual int Opcode() const;891virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const;892};893894895//------------------------------Allocate---------------------------------------896// High-level memory allocation897//898// AllocateNode and AllocateArrayNode are subclasses of CallNode because they will899// get expanded into a code sequence containing a call. Unlike other CallNodes,900// they have 2 memory projections and 2 i_o projections (which are distinguished by901// the _is_io_use flag in the projection.) This is needed when expanding the node in902// order to differentiate the uses of the projection on the normal control path from903// those on the exception return path.904//905class AllocateNode : public CallNode {906public:907enum {908// Output:909RawAddress = TypeFunc::Parms, // the newly-allocated raw address910// Inputs:911AllocSize = TypeFunc::Parms, // size (in bytes) of the new object912KlassNode, // type (maybe dynamic) of the obj.913InitialTest, // slow-path test (may be constant)914ALength, // array length (or TOP if none)915ParmLimit916};917918static const TypeFunc* alloc_type(const Type* t) {919const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms);920fields[AllocSize] = TypeInt::POS;921fields[KlassNode] = TypeInstPtr::NOTNULL;922fields[InitialTest] = TypeInt::BOOL;923fields[ALength] = t; // length (can be a bad length)924925const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);926927// create result type (range)928fields = TypeTuple::fields(1);929fields[TypeFunc::Parms+0] = TypeRawPtr::NOTNULL; // Returned oop930931const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields);932933return TypeFunc::make(domain, range);934}935936// Result of Escape Analysis937bool _is_scalar_replaceable;938bool _is_non_escaping;939// True when MemBar for new is redundant with MemBar at initialzer exit940bool _is_allocation_MemBar_redundant;941942virtual uint size_of() const; // Size is bigger943AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,944Node *size, Node *klass_node, Node *initial_test);945// Expansion modifies the JVMState, so we need to deep clone it946virtual bool needs_deep_clone_jvms(Compile* C) { return true; }947virtual int Opcode() const;948virtual uint ideal_reg() const { return Op_RegP; }949virtual bool guaranteed_safepoint() { return false; }950951// allocations do not modify their arguments952virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;}953954// Pattern-match a possible usage of AllocateNode.955// Return null if no allocation is recognized.956// The operand is the pointer produced by the (possible) allocation.957// It must be a projection of the Allocate or its subsequent CastPP.958// (Note: This function is defined in file graphKit.cpp, near959// GraphKit::new_instance/new_array, whose output it recognizes.)960// The 'ptr' may not have an offset unless the 'offset' argument is given.961static AllocateNode* Ideal_allocation(Node* ptr, PhaseTransform* phase);962963// Fancy version which uses AddPNode::Ideal_base_and_offset to strip964// an offset, which is reported back to the caller.965// (Note: AllocateNode::Ideal_allocation is defined in graphKit.cpp.)966static AllocateNode* Ideal_allocation(Node* ptr, PhaseTransform* phase,967intptr_t& offset);968969// Dig the klass operand out of a (possible) allocation site.970static Node* Ideal_klass(Node* ptr, PhaseTransform* phase) {971AllocateNode* allo = Ideal_allocation(ptr, phase);972return (allo == NULL) ? NULL : allo->in(KlassNode);973}974975// Conservatively small estimate of offset of first non-header byte.976int minimum_header_size() {977return is_AllocateArray() ? arrayOopDesc::base_offset_in_bytes(T_BYTE) :978instanceOopDesc::base_offset_in_bytes();979}980981// Return the corresponding initialization barrier (or null if none).982// Walks out edges to find it...983// (Note: Both InitializeNode::allocation and AllocateNode::initialization984// are defined in graphKit.cpp, which sets up the bidirectional relation.)985InitializeNode* initialization();986987// Convenience for initialization->maybe_set_complete(phase)988bool maybe_set_complete(PhaseGVN* phase);989990// Return true if allocation doesn't escape thread, its escape state991// needs be noEscape or ArgEscape. InitializeNode._does_not_escape992// is true when its allocation's escape state is noEscape or993// ArgEscape. In case allocation's InitializeNode is NULL, check994// AlllocateNode._is_non_escaping flag.995// AlllocateNode._is_non_escaping is true when its escape state is996// noEscape.997bool does_not_escape_thread() {998InitializeNode* init = NULL;999return _is_non_escaping || (((init = initialization()) != NULL) && init->does_not_escape());1000}10011002// If object doesn't escape in <.init> method and there is memory barrier1003// inserted at exit of its <.init>, memory barrier for new is not necessary.1004// Inovke this method when MemBar at exit of initializer and post-dominate1005// allocation node.1006void compute_MemBar_redundancy(ciMethod* initializer);1007bool is_allocation_MemBar_redundant() { return _is_allocation_MemBar_redundant; }10081009Node* make_ideal_mark(PhaseGVN *phase, Node* obj, Node* control, Node* mem);1010};10111012//------------------------------AllocateArray---------------------------------1013//1014// High-level array allocation1015//1016class AllocateArrayNode : public AllocateNode {1017public:1018AllocateArrayNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,1019Node* size, Node* klass_node, Node* initial_test,1020Node* count_val1021)1022: AllocateNode(C, atype, ctrl, mem, abio, size, klass_node,1023initial_test)1024{1025init_class_id(Class_AllocateArray);1026set_req(AllocateNode::ALength, count_val);1027}1028virtual int Opcode() const;1029virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);10301031// Dig the length operand out of a array allocation site.1032Node* Ideal_length() {1033return in(AllocateNode::ALength);1034}10351036// Dig the length operand out of a array allocation site and narrow the1037// type with a CastII, if necesssary1038Node* make_ideal_length(const TypeOopPtr* ary_type, PhaseTransform *phase, bool can_create = true);10391040// Pattern-match a possible usage of AllocateArrayNode.1041// Return null if no allocation is recognized.1042static AllocateArrayNode* Ideal_array_allocation(Node* ptr, PhaseTransform* phase) {1043AllocateNode* allo = Ideal_allocation(ptr, phase);1044return (allo == NULL || !allo->is_AllocateArray())1045? NULL : allo->as_AllocateArray();1046}1047};10481049//------------------------------AbstractLockNode-----------------------------------1050class AbstractLockNode: public CallNode {1051private:1052enum {1053Regular = 0, // Normal lock1054NonEscObj, // Lock is used for non escaping object1055Coarsened, // Lock was coarsened1056Nested // Nested lock1057} _kind;1058#ifndef PRODUCT1059NamedCounter* _counter;1060static const char* _kind_names[Nested+1];1061#endif10621063protected:1064// helper functions for lock elimination1065//10661067bool find_matching_unlock(const Node* ctrl, LockNode* lock,1068GrowableArray<AbstractLockNode*> &lock_ops);1069bool find_lock_and_unlock_through_if(Node* node, LockNode* lock,1070GrowableArray<AbstractLockNode*> &lock_ops);1071bool find_unlocks_for_region(const RegionNode* region, LockNode* lock,1072GrowableArray<AbstractLockNode*> &lock_ops);1073LockNode *find_matching_lock(UnlockNode* unlock);10741075// Update the counter to indicate that this lock was eliminated.1076void set_eliminated_lock_counter() PRODUCT_RETURN;10771078public:1079AbstractLockNode(const TypeFunc *tf)1080: CallNode(tf, NULL, TypeRawPtr::BOTTOM),1081_kind(Regular)1082{1083#ifndef PRODUCT1084_counter = NULL;1085#endif1086}1087virtual int Opcode() const = 0;1088Node * obj_node() const {return in(TypeFunc::Parms + 0); }1089Node * box_node() const {return in(TypeFunc::Parms + 1); }1090Node * fastlock_node() const {return in(TypeFunc::Parms + 2); }1091void set_box_node(Node* box) { set_req(TypeFunc::Parms + 1, box); }10921093const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;}10941095virtual uint size_of() const { return sizeof(*this); }10961097bool is_eliminated() const { return (_kind != Regular); }1098bool is_non_esc_obj() const { return (_kind == NonEscObj); }1099bool is_coarsened() const { return (_kind == Coarsened); }1100bool is_nested() const { return (_kind == Nested); }11011102const char * kind_as_string() const;1103void log_lock_optimization(Compile* c, const char * tag) const;11041105void set_non_esc_obj() { _kind = NonEscObj; set_eliminated_lock_counter(); }1106void set_coarsened() { _kind = Coarsened; set_eliminated_lock_counter(); }1107void set_nested() { _kind = Nested; set_eliminated_lock_counter(); }11081109// locking does not modify its arguments1110virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase){ return false;}11111112#ifndef PRODUCT1113void create_lock_counter(JVMState* s);1114NamedCounter* counter() const { return _counter; }1115virtual void dump_spec(outputStream* st) const;1116virtual void dump_compact_spec(outputStream* st) const;1117virtual void related(GrowableArray<Node*> *in_rel, GrowableArray<Node*> *out_rel, bool compact) const;1118#endif1119};11201121//------------------------------Lock---------------------------------------1122// High-level lock operation1123//1124// This is a subclass of CallNode because it is a macro node which gets expanded1125// into a code sequence containing a call. This node takes 3 "parameters":1126// 0 - object to lock1127// 1 - a BoxLockNode1128// 2 - a FastLockNode1129//1130class LockNode : public AbstractLockNode {1131public:11321133static const TypeFunc *lock_type() {1134// create input type (domain)1135const Type **fields = TypeTuple::fields(3);1136fields[TypeFunc::Parms+0] = TypeInstPtr::NOTNULL; // Object to be Locked1137fields[TypeFunc::Parms+1] = TypeRawPtr::BOTTOM; // Address of stack location for lock1138fields[TypeFunc::Parms+2] = TypeInt::BOOL; // FastLock1139const TypeTuple *domain = TypeTuple::make(TypeFunc::Parms+3,fields);11401141// create result type (range)1142fields = TypeTuple::fields(0);11431144const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0,fields);11451146return TypeFunc::make(domain,range);1147}11481149virtual int Opcode() const;1150virtual uint size_of() const; // Size is bigger1151LockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) {1152init_class_id(Class_Lock);1153init_flags(Flag_is_macro);1154C->add_macro_node(this);1155}1156virtual bool guaranteed_safepoint() { return false; }11571158virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);1159// Expansion modifies the JVMState, so we need to deep clone it1160virtual bool needs_deep_clone_jvms(Compile* C) { return true; }11611162bool is_nested_lock_region(); // Is this Lock nested?1163bool is_nested_lock_region(Compile * c); // Why isn't this Lock nested?1164};11651166//------------------------------Unlock---------------------------------------1167// High-level unlock operation1168class UnlockNode : public AbstractLockNode {1169private:1170#ifdef ASSERT1171JVMState* const _dbg_jvms; // Pointer to list of JVM State objects1172#endif1173public:1174virtual int Opcode() const;1175virtual uint size_of() const; // Size is bigger1176UnlockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf )1177#ifdef ASSERT1178, _dbg_jvms(NULL)1179#endif1180{1181init_class_id(Class_Unlock);1182init_flags(Flag_is_macro);1183C->add_macro_node(this);1184}1185virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);1186// unlock is never a safepoint1187virtual bool guaranteed_safepoint() { return false; }1188#ifdef ASSERT1189void set_dbg_jvms(JVMState* s) {1190*(JVMState**)&_dbg_jvms = s; // override const attribute in the accessor1191}1192JVMState* dbg_jvms() const { return _dbg_jvms; }1193#else1194JVMState* dbg_jvms() const { return NULL; }1195#endif1196};1197#endif // SHARE_OPTO_CALLNODE_HPP119811991200