Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/share/vm/opto/bytecodeInfo.cpp
32285 views
/*1* Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "ci/ciReplay.hpp"26#include "classfile/systemDictionary.hpp"27#include "classfile/vmSymbols.hpp"28#include "compiler/compileBroker.hpp"29#include "compiler/compileLog.hpp"30#include "interpreter/linkResolver.hpp"31#include "jfr/jfrEvents.hpp"32#include "oops/objArrayKlass.hpp"33#include "opto/callGenerator.hpp"34#include "opto/parse.hpp"35#include "runtime/handles.inline.hpp"3637//=============================================================================38//------------------------------InlineTree-------------------------------------39InlineTree::InlineTree(Compile* c,40const InlineTree *caller_tree, ciMethod* callee,41JVMState* caller_jvms, int caller_bci,42float site_invoke_ratio, int max_inline_level) :43C(c),44_caller_jvms(caller_jvms),45_caller_tree((InlineTree*) caller_tree),46_method(callee),47_site_invoke_ratio(site_invoke_ratio),48_max_inline_level(max_inline_level),49_count_inline_bcs(method()->code_size_for_inlining()),50_subtrees(c->comp_arena(), 2, 0, NULL),51_msg(NULL)52{53#ifndef PRODUCT54_count_inlines = 0;55_forced_inline = false;56#endif57if (_caller_jvms != NULL) {58// Keep a private copy of the caller_jvms:59_caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());60_caller_jvms->set_bci(caller_jvms->bci());61assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");62}63assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");64assert((caller_tree == NULL ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");65assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");66// Update hierarchical counts, count_inline_bcs() and count_inlines()67InlineTree *caller = (InlineTree *)caller_tree;68for( ; caller != NULL; caller = ((InlineTree *)(caller->caller_tree())) ) {69caller->_count_inline_bcs += count_inline_bcs();70NOT_PRODUCT(caller->_count_inlines++;)71}72}7374/**75* Return true when EA is ON and a java constructor is called or76* a super constructor is called from an inlined java constructor.77* Also return true for boxing methods.78*/79static bool is_init_with_ea(ciMethod* callee_method,80ciMethod* caller_method, Compile* C) {81if (!C->do_escape_analysis() || !EliminateAllocations) {82return false; // EA is off83}84if (callee_method->is_initializer()) {85return true; // constuctor86}87if (caller_method->is_initializer() &&88caller_method != C->method() &&89caller_method->holder()->is_subclass_of(callee_method->holder())) {90return true; // super constructor is called from inlined constructor91}92if (C->eliminate_boxing() && callee_method->is_boxing_method()) {93return true;94}95return false;96}9798/**99* Force inlining unboxing accessor.100*/101static bool is_unboxing_method(ciMethod* callee_method, Compile* C) {102return C->eliminate_boxing() && callee_method->is_unboxing_method();103}104105// positive filter: should callee be inlined?106bool InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method,107int caller_bci, ciCallProfile& profile,108WarmCallInfo* wci_result) {109// Allows targeted inlining110if (callee_method->should_inline()) {111*wci_result = *(WarmCallInfo::always_hot());112if (C->print_inlining() && Verbose) {113CompileTask::print_inline_indent(inline_level());114tty->print_cr("Inlined method is hot: ");115}116set_msg("force inline by CompilerOracle");117_forced_inline = true;118return true;119}120121if (callee_method->force_inline()) {122set_msg("force inline by annotation");123_forced_inline = true;124return true;125}126127#ifndef PRODUCT128int inline_depth = inline_level()+1;129if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {130set_msg("force inline by ciReplay");131_forced_inline = true;132return true;133}134#endif135136int size = callee_method->code_size_for_inlining();137138// Check for too many throws (and not too huge)139if(callee_method->interpreter_throwout_count() > InlineThrowCount &&140size < InlineThrowMaxSize ) {141wci_result->set_profit(wci_result->profit() * 100);142if (C->print_inlining() && Verbose) {143CompileTask::print_inline_indent(inline_level());144tty->print_cr("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());145}146set_msg("many throws");147return true;148}149150int default_max_inline_size = C->max_inline_size();151int inline_small_code_size = InlineSmallCode / 4;152int max_inline_size = default_max_inline_size;153154int call_site_count = method()->scale_count(profile.count());155int invoke_count = method()->interpreter_invocation_count();156157assert(invoke_count != 0, "require invocation count greater than zero");158int freq = call_site_count / invoke_count;159160// bump the max size if the call is frequent161if ((freq >= InlineFrequencyRatio) ||162(call_site_count >= InlineFrequencyCount) ||163is_unboxing_method(callee_method, C) ||164is_init_with_ea(callee_method, caller_method, C)) {165166max_inline_size = C->freq_inline_size();167if (size <= max_inline_size && TraceFrequencyInlining) {168CompileTask::print_inline_indent(inline_level());169tty->print_cr("Inlined frequent method (freq=%d count=%d):", freq, call_site_count);170CompileTask::print_inline_indent(inline_level());171callee_method->print();172tty->cr();173}174} else {175// Not hot. Check for medium-sized pre-existing nmethod at cold sites.176if (callee_method->has_compiled_code() &&177callee_method->instructions_size() > inline_small_code_size) {178set_msg("already compiled into a medium method");179return false;180}181}182if (size > max_inline_size) {183if (max_inline_size > default_max_inline_size) {184set_msg("hot method too big");185} else {186set_msg("too big");187}188return false;189}190return true;191}192193194// negative filter: should callee NOT be inlined?195bool InlineTree::should_not_inline(ciMethod *callee_method,196ciMethod* caller_method,197JVMState* jvms,198WarmCallInfo* wci_result) {199200const char* fail_msg = NULL;201202// First check all inlining restrictions which are required for correctness203if ( callee_method->is_abstract()) {204fail_msg = "abstract method"; // // note: we allow ik->is_abstract()205} else if (!callee_method->holder()->is_initialized()) {206fail_msg = "method holder not initialized";207} else if ( callee_method->is_native()) {208fail_msg = "native method";209} else if ( callee_method->dont_inline()) {210fail_msg = "don't inline by annotation";211}212213// one more inlining restriction214if (fail_msg == NULL && callee_method->has_unloaded_classes_in_signature()) {215fail_msg = "unloaded signature classes";216}217218if (fail_msg != NULL) {219set_msg(fail_msg);220return true;221}222223// ignore heuristic controls on inlining224if (callee_method->should_inline()) {225set_msg("force inline by CompilerOracle");226return false;227}228229if (callee_method->should_not_inline()) {230set_msg("disallowed by CompilerOracle");231return true;232}233234#ifndef PRODUCT235int caller_bci = jvms->bci();236int inline_depth = inline_level()+1;237if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {238set_msg("force inline by ciReplay");239return false;240}241242if (ciReplay::should_not_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {243set_msg("disallowed by ciReplay");244return true;245}246247if (ciReplay::should_not_inline(callee_method)) {248set_msg("disallowed by ciReplay");249return true;250}251#endif252253if (callee_method->force_inline()) {254set_msg("force inline by annotation");255return false;256}257258// Now perform checks which are heuristic259260if (is_unboxing_method(callee_method, C)) {261// Inline unboxing methods.262return false;263}264265if (callee_method->has_compiled_code() &&266callee_method->instructions_size() > InlineSmallCode) {267set_msg("already compiled into a big method");268return true;269}270271// don't inline exception code unless the top method belongs to an272// exception class273if (caller_tree() != NULL &&274callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {275const InlineTree *top = this;276while (top->caller_tree() != NULL) top = top->caller_tree();277ciInstanceKlass* k = top->method()->holder();278if (!k->is_subclass_of(C->env()->Throwable_klass())) {279set_msg("exception method");280return true;281}282}283284// use frequency-based objections only for non-trivial methods285if (callee_method->code_size() <= MaxTrivialSize) {286return false;287}288289// don't use counts with -Xcomp or CTW290if (UseInterpreter && !CompileTheWorld) {291292if (!callee_method->has_compiled_code() &&293!callee_method->was_executed_more_than(0)) {294set_msg("never executed");295return true;296}297298if (is_init_with_ea(callee_method, caller_method, C)) {299// Escape Analysis: inline all executed constructors300return false;301} else if (!callee_method->was_executed_more_than(MIN2(MinInliningThreshold,302CompileThreshold >> 1))) {303set_msg("executed < MinInliningThreshold times");304return true;305}306}307308return false;309}310311//-----------------------------try_to_inline-----------------------------------312// return true if ok313// Relocated from "InliningClosure::try_to_inline"314bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method,315int caller_bci, JVMState* jvms, ciCallProfile& profile,316WarmCallInfo* wci_result, bool& should_delay) {317318if (ClipInlining && (int)count_inline_bcs() >= DesiredMethodLimit) {319if (!callee_method->force_inline() || !IncrementalInline) {320set_msg("size > DesiredMethodLimit");321return false;322} else if (!C->inlining_incrementally()) {323should_delay = true;324}325}326327_forced_inline = false; // Reset328if (!should_inline(callee_method, caller_method, caller_bci, profile,329wci_result)) {330return false;331}332if (should_not_inline(callee_method, caller_method, jvms, wci_result)) {333return false;334}335336if (InlineAccessors && callee_method->is_accessor()) {337// accessor methods are not subject to any of the following limits.338set_msg("accessor");339return true;340}341342// suppress a few checks for accessors and trivial methods343if (callee_method->code_size() > MaxTrivialSize) {344345// don't inline into giant methods346if (C->over_inlining_cutoff()) {347if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form())348|| !IncrementalInline) {349set_msg("NodeCountInliningCutoff");350return false;351} else {352should_delay = true;353}354}355356if ((!UseInterpreter || CompileTheWorld) &&357is_init_with_ea(callee_method, caller_method, C)) {358// Escape Analysis stress testing when running Xcomp or CTW:359// inline constructors even if they are not reached.360} else if (forced_inline()) {361// Inlining was forced by CompilerOracle, ciReplay or annotation362} else if (profile.count() == 0) {363// don't inline unreached call sites364set_msg("call site not reached");365return false;366}367}368369if (!C->do_inlining() && InlineAccessors) {370set_msg("not an accessor");371return false;372}373374// Limit inlining depth in case inlining is forced or375// _max_inline_level was increased to compensate for lambda forms.376if (inline_level() > MaxForceInlineLevel) {377set_msg("MaxForceInlineLevel");378return false;379}380if (inline_level() > _max_inline_level) {381if (!callee_method->force_inline() || !IncrementalInline) {382set_msg("inlining too deep");383return false;384} else if (!C->inlining_incrementally()) {385should_delay = true;386}387}388389// detect direct and indirect recursive inlining390{391// count the current method and the callee392const bool is_compiled_lambda_form = callee_method->is_compiled_lambda_form();393int inline_level = 0;394if (!is_compiled_lambda_form) {395if (method() == callee_method) {396inline_level++;397}398}399// count callers of current method and callee400Node* callee_argument0 = is_compiled_lambda_form ? jvms->map()->argument(jvms, 0)->uncast() : NULL;401for (JVMState* j = jvms->caller(); j != NULL && j->has_method(); j = j->caller()) {402if (j->method() == callee_method) {403if (is_compiled_lambda_form) {404// Since compiled lambda forms are heavily reused we allow recursive inlining. If it is truly405// a recursion (using the same "receiver") we limit inlining otherwise we can easily blow the406// compiler stack.407Node* caller_argument0 = j->map()->argument(j, 0)->uncast();408if (caller_argument0 == callee_argument0) {409inline_level++;410}411} else {412inline_level++;413}414}415}416if (inline_level > MaxRecursiveInlineLevel) {417set_msg("recursive inlining is too deep");418return false;419}420}421422int size = callee_method->code_size_for_inlining();423424if (ClipInlining && (int)count_inline_bcs() + size >= DesiredMethodLimit) {425if (!callee_method->force_inline() || !IncrementalInline) {426set_msg("size > DesiredMethodLimit");427return false;428} else if (!C->inlining_incrementally()) {429should_delay = true;430}431}432433// ok, inline this method434return true;435}436437//------------------------------pass_initial_checks----------------------------438bool pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {439ciInstanceKlass *callee_holder = callee_method ? callee_method->holder() : NULL;440// Check if a callee_method was suggested441if( callee_method == NULL ) return false;442// Check if klass of callee_method is loaded443if( !callee_holder->is_loaded() ) return false;444if( !callee_holder->is_initialized() ) return false;445if( !UseInterpreter || CompileTheWorld /* running Xcomp or CTW */ ) {446// Checks that constant pool's call site has been visited447// stricter than callee_holder->is_initialized()448ciBytecodeStream iter(caller_method);449iter.force_bci(caller_bci);450Bytecodes::Code call_bc = iter.cur_bc();451// An invokedynamic instruction does not have a klass.452if (call_bc != Bytecodes::_invokedynamic) {453int index = iter.get_index_u2_cpcache();454if (!caller_method->is_klass_loaded(index, true)) {455return false;456}457// Try to do constant pool resolution if running Xcomp458if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) {459return false;460}461}462}463// We will attempt to see if a class/field/etc got properly loaded. If it464// did not, it may attempt to throw an exception during our probing. Catch465// and ignore such exceptions and do not attempt to compile the method.466if( callee_method->should_exclude() ) return false;467468return true;469}470471//------------------------------check_can_parse--------------------------------472const char* InlineTree::check_can_parse(ciMethod* callee) {473// Certain methods cannot be parsed at all:474if ( callee->is_native()) return "native method";475if ( callee->is_abstract()) return "abstract method";476if (!callee->can_be_compiled()) return "not compilable (disabled)";477if (!callee->has_balanced_monitors()) return "not compilable (unbalanced monitors)";478if ( callee->get_flow_analysis()->failing()) return "not compilable (flow analysis failed)";479return NULL;480}481482static void post_inlining_event(int compile_id,const char* msg, bool success, int bci, ciMethod* caller, ciMethod* callee) {483assert(caller != NULL, "invariant");484assert(callee != NULL, "invariant");485EventCompilerInlining event;486if (event.should_commit()) {487JfrStructCalleeMethod callee_struct;488callee_struct.set_type(callee->holder()->name()->as_utf8());489callee_struct.set_name(callee->name()->as_utf8());490callee_struct.set_descriptor(callee->signature()->as_symbol()->as_utf8());491event.set_compileId(compile_id);492event.set_message(msg);493event.set_succeeded(success);494event.set_bci(bci);495event.set_caller(caller->get_Method());496event.set_callee(callee_struct);497event.commit();498}499}500501//------------------------------print_inlining---------------------------------502void InlineTree::print_inlining(ciMethod* callee_method, int caller_bci,503ciMethod* caller_method, bool success) const {504const char* inline_msg = msg();505assert(inline_msg != NULL, "just checking");506if (C->log() != NULL) {507if (success) {508C->log()->inline_success(inline_msg);509} else {510C->log()->inline_fail(inline_msg);511}512}513if (C->print_inlining()) {514C->print_inlining(callee_method, inline_level(), caller_bci, inline_msg);515if (callee_method == NULL) tty->print(" callee not monotonic or profiled");516if (Verbose && callee_method) {517const InlineTree *top = this;518while( top->caller_tree() != NULL ) { top = top->caller_tree(); }519//tty->print(" bcs: %d+%d invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count());520}521}522post_inlining_event(C->compile_id(), inline_msg, success, caller_bci, caller_method, callee_method);523}524525//------------------------------ok_to_inline-----------------------------------526WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci, bool& should_delay) {527assert(callee_method != NULL, "caller checks for optimized virtual!");528assert(!should_delay, "should be initialized to false");529#ifdef ASSERT530// Make sure the incoming jvms has the same information content as me.531// This means that we can eventually make this whole class AllStatic.532if (jvms->caller() == NULL) {533assert(_caller_jvms == NULL, "redundant instance state");534} else {535assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");536}537assert(_method == jvms->method(), "redundant instance state");538#endif539int caller_bci = jvms->bci();540ciMethod* caller_method = jvms->method();541542// Do some initial checks.543if (!pass_initial_checks(caller_method, caller_bci, callee_method)) {544set_msg("failed initial checks");545print_inlining(callee_method, caller_bci, caller_method, false /* !success */);546return NULL;547}548549// Do some parse checks.550set_msg(check_can_parse(callee_method));551if (msg() != NULL) {552print_inlining(callee_method, caller_bci, caller_method, false /* !success */);553return NULL;554}555556// Check if inlining policy says no.557WarmCallInfo wci = *(initial_wci);558bool success = try_to_inline(callee_method, caller_method, caller_bci,559jvms, profile, &wci, should_delay);560561#ifndef PRODUCT562if (InlineWarmCalls && (PrintOpto || C->print_inlining())) {563bool cold = wci.is_cold();564bool hot = !cold && wci.is_hot();565bool old_cold = !success;566if (old_cold != cold || (Verbose || WizardMode)) {567if (msg() == NULL) {568set_msg("OK");569}570tty->print(" OldInlining= %4s : %s\n WCI=",571old_cold ? "cold" : "hot", msg());572wci.print();573}574}575#endif576if (success) {577wci = *(WarmCallInfo::always_hot());578} else {579wci = *(WarmCallInfo::always_cold());580}581582if (!InlineWarmCalls) {583if (!wci.is_cold() && !wci.is_hot()) {584// Do not inline the warm calls.585wci = *(WarmCallInfo::always_cold());586}587}588589if (!wci.is_cold()) {590// Inline!591if (msg() == NULL) {592set_msg("inline (hot)");593}594print_inlining(callee_method, caller_bci, caller_method, true /* success */);595build_inline_tree_for_callee(callee_method, jvms, caller_bci);596if (InlineWarmCalls && !wci.is_hot())597return new (C) WarmCallInfo(wci); // copy to heap598return WarmCallInfo::always_hot();599}600601// Do not inline602if (msg() == NULL) {603set_msg("too cold to inline");604}605print_inlining(callee_method, caller_bci, caller_method, false /* !success */ );606return NULL;607}608609//------------------------------compute_callee_frequency-----------------------610float InlineTree::compute_callee_frequency( int caller_bci ) const {611int count = method()->interpreter_call_site_count(caller_bci);612int invcnt = method()->interpreter_invocation_count();613float freq = (float)count/(float)invcnt;614// Call-site count / interpreter invocation count, scaled recursively.615// Always between 0.0 and 1.0. Represents the percentage of the method's616// total execution time used at this call site.617618return freq;619}620621//------------------------------build_inline_tree_for_callee-------------------622InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) {623float recur_frequency = _site_invoke_ratio * compute_callee_frequency(caller_bci);624// Attempt inlining.625InlineTree* old_ilt = callee_at(caller_bci, callee_method);626if (old_ilt != NULL) {627return old_ilt;628}629int max_inline_level_adjust = 0;630if (caller_jvms->method() != NULL) {631if (caller_jvms->method()->is_compiled_lambda_form()) {632max_inline_level_adjust += 1; // don't count actions in MH or indy adapter frames633} else if (callee_method->is_method_handle_intrinsic() ||634callee_method->is_compiled_lambda_form()) {635max_inline_level_adjust += 1; // don't count method handle calls from java.lang.invoke implementation636}637if (max_inline_level_adjust != 0 && C->print_inlining() && (Verbose || WizardMode)) {638CompileTask::print_inline_indent(inline_level());639tty->print_cr(" \\-> discounting inline depth");640}641if (max_inline_level_adjust != 0 && C->log()) {642int id1 = C->log()->identify(caller_jvms->method());643int id2 = C->log()->identify(callee_method);644C->log()->elem("inline_level_discount caller='%d' callee='%d'", id1, id2);645}646}647InlineTree* ilt = new InlineTree(C, this, callee_method, caller_jvms, caller_bci, recur_frequency, _max_inline_level + max_inline_level_adjust);648_subtrees.append(ilt);649650NOT_PRODUCT( _count_inlines += 1; )651652return ilt;653}654655656//---------------------------------------callee_at-----------------------------657InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const {658for (int i = 0; i < _subtrees.length(); i++) {659InlineTree* sub = _subtrees.at(i);660if (sub->caller_bci() == bci && callee == sub->method()) {661return sub;662}663}664return NULL;665}666667668//------------------------------build_inline_tree_root-------------------------669InlineTree *InlineTree::build_inline_tree_root() {670Compile* C = Compile::current();671672// Root of inline tree673InlineTree* ilt = new InlineTree(C, NULL, C->method(), NULL, -1, 1.0F, MaxInlineLevel);674675return ilt;676}677678679//-------------------------find_subtree_from_root-----------------------------680// Given a jvms, which determines a call chain from the root method,681// find the corresponding inline tree.682// Note: This method will be removed or replaced as InlineTree goes away.683InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee) {684InlineTree* iltp = root;685uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;686for (uint d = 1; d <= depth; d++) {687JVMState* jvmsp = jvms->of_depth(d);688// Select the corresponding subtree for this bci.689assert(jvmsp->method() == iltp->method(), "tree still in sync");690ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();691InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);692if (sub == NULL) {693if (d == depth) {694sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());695}696guarantee(sub != NULL, "should be a sub-ilt here");697return sub;698}699iltp = sub;700}701return iltp;702}703704// Count number of nodes in this subtree705int InlineTree::count() const {706int result = 1;707for (int i = 0 ; i < _subtrees.length(); i++) {708result += _subtrees.at(i)->count();709}710return result;711}712713void InlineTree::dump_replay_data(outputStream* out) {714out->print(" %d %d ", inline_level(), caller_bci());715method()->dump_name_as_ascii(out);716for (int i = 0 ; i < _subtrees.length(); i++) {717_subtrees.at(i)->dump_replay_data(out);718}719}720721722#ifndef PRODUCT723void InlineTree::print_impl(outputStream* st, int indent) const {724for (int i = 0; i < indent; i++) st->print(" ");725st->print(" @ %d", caller_bci());726method()->print_short_name(st);727st->cr();728729for (int i = 0 ; i < _subtrees.length(); i++) {730_subtrees.at(i)->print_impl(st, indent + 2);731}732}733734void InlineTree::print_value_on(outputStream* st) const {735print_impl(st, 2);736}737#endif738739740