Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/share/vm/c1/c1_GraphBuilder.cpp
32285 views
/*1* Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "c1/c1_CFGPrinter.hpp"26#include "c1/c1_Canonicalizer.hpp"27#include "c1/c1_Compilation.hpp"28#include "c1/c1_GraphBuilder.hpp"29#include "c1/c1_InstructionPrinter.hpp"30#include "ci/ciCallSite.hpp"31#include "ci/ciField.hpp"32#include "ci/ciKlass.hpp"33#include "ci/ciMemberName.hpp"34#include "compiler/compileBroker.hpp"35#include "interpreter/bytecode.hpp"36#include "jfr/jfrEvents.hpp"37#include "runtime/sharedRuntime.hpp"38#include "runtime/compilationPolicy.hpp"39#include "utilities/bitMap.inline.hpp"4041class BlockListBuilder VALUE_OBJ_CLASS_SPEC {42private:43Compilation* _compilation;44IRScope* _scope;4546BlockList _blocks; // internal list of all blocks47BlockList* _bci2block; // mapping from bci to blocks for GraphBuilder4849// fields used by mark_loops50BitMap _active; // for iteration of control flow graph51BitMap _visited; // for iteration of control flow graph52intArray _loop_map; // caches the information if a block is contained in a loop53int _next_loop_index; // next free loop number54int _next_block_number; // for reverse postorder numbering of blocks5556// accessors57Compilation* compilation() const { return _compilation; }58IRScope* scope() const { return _scope; }59ciMethod* method() const { return scope()->method(); }60XHandlers* xhandlers() const { return scope()->xhandlers(); }6162// unified bailout support63void bailout(const char* msg) const { compilation()->bailout(msg); }64bool bailed_out() const { return compilation()->bailed_out(); }6566// helper functions67BlockBegin* make_block_at(int bci, BlockBegin* predecessor);68void handle_exceptions(BlockBegin* current, int cur_bci);69void handle_jsr(BlockBegin* current, int sr_bci, int next_bci);70void store_one(BlockBegin* current, int local);71void store_two(BlockBegin* current, int local);72void set_entries(int osr_bci);73void set_leaders();7475void make_loop_header(BlockBegin* block);76void mark_loops();77int mark_loops(BlockBegin* b, bool in_subroutine);7879// debugging80#ifndef PRODUCT81void print();82#endif8384public:85// creation86BlockListBuilder(Compilation* compilation, IRScope* scope, int osr_bci);8788// accessors for GraphBuilder89BlockList* bci2block() const { return _bci2block; }90};919293// Implementation of BlockListBuilder9495BlockListBuilder::BlockListBuilder(Compilation* compilation, IRScope* scope, int osr_bci)96: _compilation(compilation)97, _scope(scope)98, _blocks(16)99, _bci2block(new BlockList(scope->method()->code_size(), NULL))100, _next_block_number(0)101, _active() // size not known yet102, _visited() // size not known yet103, _next_loop_index(0)104, _loop_map() // size not known yet105{106set_entries(osr_bci);107set_leaders();108CHECK_BAILOUT();109110mark_loops();111NOT_PRODUCT(if (PrintInitialBlockList) print());112113#ifndef PRODUCT114if (PrintCFGToFile) {115stringStream title;116title.print("BlockListBuilder ");117scope->method()->print_name(&title);118CFGPrinter::print_cfg(_bci2block, title.as_string(), false, false);119}120#endif121}122123124void BlockListBuilder::set_entries(int osr_bci) {125// generate start blocks126BlockBegin* std_entry = make_block_at(0, NULL);127if (scope()->caller() == NULL) {128std_entry->set(BlockBegin::std_entry_flag);129}130if (osr_bci != -1) {131BlockBegin* osr_entry = make_block_at(osr_bci, NULL);132osr_entry->set(BlockBegin::osr_entry_flag);133}134135// generate exception entry blocks136XHandlers* list = xhandlers();137const int n = list->length();138for (int i = 0; i < n; i++) {139XHandler* h = list->handler_at(i);140BlockBegin* entry = make_block_at(h->handler_bci(), NULL);141entry->set(BlockBegin::exception_entry_flag);142h->set_entry_block(entry);143}144}145146147BlockBegin* BlockListBuilder::make_block_at(int cur_bci, BlockBegin* predecessor) {148assert(method()->bci_block_start().at(cur_bci), "wrong block starts of MethodLivenessAnalyzer");149150BlockBegin* block = _bci2block->at(cur_bci);151if (block == NULL) {152block = new BlockBegin(cur_bci);153block->init_stores_to_locals(method()->max_locals());154_bci2block->at_put(cur_bci, block);155_blocks.append(block);156157assert(predecessor == NULL || predecessor->bci() < cur_bci, "targets for backward branches must already exist");158}159160if (predecessor != NULL) {161if (block->is_set(BlockBegin::exception_entry_flag)) {162BAILOUT_("Exception handler can be reached by both normal and exceptional control flow", block);163}164165predecessor->add_successor(block);166block->increment_total_preds();167}168169return block;170}171172173inline void BlockListBuilder::store_one(BlockBegin* current, int local) {174current->stores_to_locals().set_bit(local);175}176inline void BlockListBuilder::store_two(BlockBegin* current, int local) {177store_one(current, local);178store_one(current, local + 1);179}180181182void BlockListBuilder::handle_exceptions(BlockBegin* current, int cur_bci) {183// Draws edges from a block to its exception handlers184XHandlers* list = xhandlers();185const int n = list->length();186187for (int i = 0; i < n; i++) {188XHandler* h = list->handler_at(i);189190if (h->covers(cur_bci)) {191BlockBegin* entry = h->entry_block();192assert(entry != NULL && entry == _bci2block->at(h->handler_bci()), "entry must be set");193assert(entry->is_set(BlockBegin::exception_entry_flag), "flag must be set");194195// add each exception handler only once196if (!current->is_successor(entry)) {197current->add_successor(entry);198entry->increment_total_preds();199}200201// stop when reaching catchall202if (h->catch_type() == 0) break;203}204}205}206207void BlockListBuilder::handle_jsr(BlockBegin* current, int sr_bci, int next_bci) {208// start a new block after jsr-bytecode and link this block into cfg209make_block_at(next_bci, current);210211// start a new block at the subroutine entry at mark it with special flag212BlockBegin* sr_block = make_block_at(sr_bci, current);213if (!sr_block->is_set(BlockBegin::subroutine_entry_flag)) {214sr_block->set(BlockBegin::subroutine_entry_flag);215}216}217218219void BlockListBuilder::set_leaders() {220bool has_xhandlers = xhandlers()->has_handlers();221BlockBegin* current = NULL;222223// The information which bci starts a new block simplifies the analysis224// Without it, backward branches could jump to a bci where no block was created225// during bytecode iteration. This would require the creation of a new block at the226// branch target and a modification of the successor lists.227BitMap bci_block_start = method()->bci_block_start();228229ciBytecodeStream s(method());230while (s.next() != ciBytecodeStream::EOBC()) {231int cur_bci = s.cur_bci();232233if (bci_block_start.at(cur_bci)) {234current = make_block_at(cur_bci, current);235}236assert(current != NULL, "must have current block");237238if (has_xhandlers && GraphBuilder::can_trap(method(), s.cur_bc())) {239handle_exceptions(current, cur_bci);240}241242switch (s.cur_bc()) {243// track stores to local variables for selective creation of phi functions244case Bytecodes::_iinc: store_one(current, s.get_index()); break;245case Bytecodes::_istore: store_one(current, s.get_index()); break;246case Bytecodes::_lstore: store_two(current, s.get_index()); break;247case Bytecodes::_fstore: store_one(current, s.get_index()); break;248case Bytecodes::_dstore: store_two(current, s.get_index()); break;249case Bytecodes::_astore: store_one(current, s.get_index()); break;250case Bytecodes::_istore_0: store_one(current, 0); break;251case Bytecodes::_istore_1: store_one(current, 1); break;252case Bytecodes::_istore_2: store_one(current, 2); break;253case Bytecodes::_istore_3: store_one(current, 3); break;254case Bytecodes::_lstore_0: store_two(current, 0); break;255case Bytecodes::_lstore_1: store_two(current, 1); break;256case Bytecodes::_lstore_2: store_two(current, 2); break;257case Bytecodes::_lstore_3: store_two(current, 3); break;258case Bytecodes::_fstore_0: store_one(current, 0); break;259case Bytecodes::_fstore_1: store_one(current, 1); break;260case Bytecodes::_fstore_2: store_one(current, 2); break;261case Bytecodes::_fstore_3: store_one(current, 3); break;262case Bytecodes::_dstore_0: store_two(current, 0); break;263case Bytecodes::_dstore_1: store_two(current, 1); break;264case Bytecodes::_dstore_2: store_two(current, 2); break;265case Bytecodes::_dstore_3: store_two(current, 3); break;266case Bytecodes::_astore_0: store_one(current, 0); break;267case Bytecodes::_astore_1: store_one(current, 1); break;268case Bytecodes::_astore_2: store_one(current, 2); break;269case Bytecodes::_astore_3: store_one(current, 3); break;270271// track bytecodes that affect the control flow272case Bytecodes::_athrow: // fall through273case Bytecodes::_ret: // fall through274case Bytecodes::_ireturn: // fall through275case Bytecodes::_lreturn: // fall through276case Bytecodes::_freturn: // fall through277case Bytecodes::_dreturn: // fall through278case Bytecodes::_areturn: // fall through279case Bytecodes::_return:280current = NULL;281break;282283case Bytecodes::_ifeq: // fall through284case Bytecodes::_ifne: // fall through285case Bytecodes::_iflt: // fall through286case Bytecodes::_ifge: // fall through287case Bytecodes::_ifgt: // fall through288case Bytecodes::_ifle: // fall through289case Bytecodes::_if_icmpeq: // fall through290case Bytecodes::_if_icmpne: // fall through291case Bytecodes::_if_icmplt: // fall through292case Bytecodes::_if_icmpge: // fall through293case Bytecodes::_if_icmpgt: // fall through294case Bytecodes::_if_icmple: // fall through295case Bytecodes::_if_acmpeq: // fall through296case Bytecodes::_if_acmpne: // fall through297case Bytecodes::_ifnull: // fall through298case Bytecodes::_ifnonnull:299make_block_at(s.next_bci(), current);300make_block_at(s.get_dest(), current);301current = NULL;302break;303304case Bytecodes::_goto:305make_block_at(s.get_dest(), current);306current = NULL;307break;308309case Bytecodes::_goto_w:310make_block_at(s.get_far_dest(), current);311current = NULL;312break;313314case Bytecodes::_jsr:315handle_jsr(current, s.get_dest(), s.next_bci());316current = NULL;317break;318319case Bytecodes::_jsr_w:320handle_jsr(current, s.get_far_dest(), s.next_bci());321current = NULL;322break;323324case Bytecodes::_tableswitch: {325// set block for each case326Bytecode_tableswitch sw(&s);327int l = sw.length();328for (int i = 0; i < l; i++) {329make_block_at(cur_bci + sw.dest_offset_at(i), current);330}331make_block_at(cur_bci + sw.default_offset(), current);332current = NULL;333break;334}335336case Bytecodes::_lookupswitch: {337// set block for each case338Bytecode_lookupswitch sw(&s);339int l = sw.number_of_pairs();340for (int i = 0; i < l; i++) {341make_block_at(cur_bci + sw.pair_at(i).offset(), current);342}343make_block_at(cur_bci + sw.default_offset(), current);344current = NULL;345break;346}347}348}349}350351352void BlockListBuilder::mark_loops() {353ResourceMark rm;354355_active = BitMap(BlockBegin::number_of_blocks()); _active.clear();356_visited = BitMap(BlockBegin::number_of_blocks()); _visited.clear();357_loop_map = intArray(BlockBegin::number_of_blocks(), 0);358_next_loop_index = 0;359_next_block_number = _blocks.length();360361// recursively iterate the control flow graph362mark_loops(_bci2block->at(0), false);363assert(_next_block_number >= 0, "invalid block numbers");364}365366void BlockListBuilder::make_loop_header(BlockBegin* block) {367if (block->is_set(BlockBegin::exception_entry_flag)) {368// exception edges may look like loops but don't mark them as such369// since it screws up block ordering.370return;371}372if (!block->is_set(BlockBegin::parser_loop_header_flag)) {373block->set(BlockBegin::parser_loop_header_flag);374375assert(_loop_map.at(block->block_id()) == 0, "must not be set yet");376assert(0 <= _next_loop_index && _next_loop_index < BitsPerInt, "_next_loop_index is used as a bit-index in integer");377_loop_map.at_put(block->block_id(), 1 << _next_loop_index);378if (_next_loop_index < 31) _next_loop_index++;379} else {380// block already marked as loop header381assert(is_power_of_2((unsigned int)_loop_map.at(block->block_id())), "exactly one bit must be set");382}383}384385int BlockListBuilder::mark_loops(BlockBegin* block, bool in_subroutine) {386int block_id = block->block_id();387388if (_visited.at(block_id)) {389if (_active.at(block_id)) {390// reached block via backward branch391make_loop_header(block);392}393// return cached loop information for this block394return _loop_map.at(block_id);395}396397if (block->is_set(BlockBegin::subroutine_entry_flag)) {398in_subroutine = true;399}400401// set active and visited bits before successors are processed402_visited.set_bit(block_id);403_active.set_bit(block_id);404405intptr_t loop_state = 0;406for (int i = block->number_of_sux() - 1; i >= 0; i--) {407// recursively process all successors408loop_state |= mark_loops(block->sux_at(i), in_subroutine);409}410411// clear active-bit after all successors are processed412_active.clear_bit(block_id);413414// reverse-post-order numbering of all blocks415block->set_depth_first_number(_next_block_number);416_next_block_number--;417418if (loop_state != 0 || in_subroutine ) {419// block is contained at least in one loop, so phi functions are necessary420// phi functions are also necessary for all locals stored in a subroutine421scope()->requires_phi_function().set_union(block->stores_to_locals());422}423424if (block->is_set(BlockBegin::parser_loop_header_flag)) {425int header_loop_state = _loop_map.at(block_id);426assert(is_power_of_2((unsigned)header_loop_state), "exactly one bit must be set");427428// If the highest bit is set (i.e. when integer value is negative), the method429// has 32 or more loops. This bit is never cleared because it is used for multiple loops430if (header_loop_state >= 0) {431clear_bits(loop_state, header_loop_state);432}433}434435// cache and return loop information for this block436_loop_map.at_put(block_id, loop_state);437return loop_state;438}439440441#ifndef PRODUCT442443int compare_depth_first(BlockBegin** a, BlockBegin** b) {444return (*a)->depth_first_number() - (*b)->depth_first_number();445}446447void BlockListBuilder::print() {448tty->print("----- initial block list of BlockListBuilder for method ");449method()->print_short_name();450tty->cr();451452// better readability if blocks are sorted in processing order453_blocks.sort(compare_depth_first);454455for (int i = 0; i < _blocks.length(); i++) {456BlockBegin* cur = _blocks.at(i);457tty->print("%4d: B%-4d bci: %-4d preds: %-4d ", cur->depth_first_number(), cur->block_id(), cur->bci(), cur->total_preds());458459tty->print(cur->is_set(BlockBegin::std_entry_flag) ? " std" : " ");460tty->print(cur->is_set(BlockBegin::osr_entry_flag) ? " osr" : " ");461tty->print(cur->is_set(BlockBegin::exception_entry_flag) ? " ex" : " ");462tty->print(cur->is_set(BlockBegin::subroutine_entry_flag) ? " sr" : " ");463tty->print(cur->is_set(BlockBegin::parser_loop_header_flag) ? " lh" : " ");464465if (cur->number_of_sux() > 0) {466tty->print(" sux: ");467for (int j = 0; j < cur->number_of_sux(); j++) {468BlockBegin* sux = cur->sux_at(j);469tty->print("B%d ", sux->block_id());470}471}472tty->cr();473}474}475476#endif477478479// A simple growable array of Values indexed by ciFields480class FieldBuffer: public CompilationResourceObj {481private:482GrowableArray<Value> _values;483484public:485FieldBuffer() {}486487void kill() {488_values.trunc_to(0);489}490491Value at(ciField* field) {492assert(field->holder()->is_loaded(), "must be a loaded field");493int offset = field->offset();494if (offset < _values.length()) {495return _values.at(offset);496} else {497return NULL;498}499}500501void at_put(ciField* field, Value value) {502assert(field->holder()->is_loaded(), "must be a loaded field");503int offset = field->offset();504_values.at_put_grow(offset, value, NULL);505}506507};508509510// MemoryBuffer is fairly simple model of the current state of memory.511// It partitions memory into several pieces. The first piece is512// generic memory where little is known about the owner of the memory.513// This is conceptually represented by the tuple <O, F, V> which says514// that the field F of object O has value V. This is flattened so515// that F is represented by the offset of the field and the parallel516// arrays _objects and _values are used for O and V. Loads of O.F can517// simply use V. Newly allocated objects are kept in a separate list518// along with a parallel array for each object which represents the519// current value of its fields. Stores of the default value to fields520// which have never been stored to before are eliminated since they521// are redundant. Once newly allocated objects are stored into522// another object or they are passed out of the current compile they523// are treated like generic memory.524525class MemoryBuffer: public CompilationResourceObj {526private:527FieldBuffer _values;528GrowableArray<Value> _objects;529GrowableArray<Value> _newobjects;530GrowableArray<FieldBuffer*> _fields;531532public:533MemoryBuffer() {}534535StoreField* store(StoreField* st) {536if (!EliminateFieldAccess) {537return st;538}539540Value object = st->obj();541Value value = st->value();542ciField* field = st->field();543if (field->holder()->is_loaded()) {544int offset = field->offset();545int index = _newobjects.find(object);546if (index != -1) {547// newly allocated object with no other stores performed on this field548FieldBuffer* buf = _fields.at(index);549if (buf->at(field) == NULL && is_default_value(value)) {550#ifndef PRODUCT551if (PrintIRDuringConstruction && Verbose) {552tty->print_cr("Eliminated store for object %d:", index);553st->print_line();554}555#endif556return NULL;557} else {558buf->at_put(field, value);559}560} else {561_objects.at_put_grow(offset, object, NULL);562_values.at_put(field, value);563}564565store_value(value);566} else {567// if we held onto field names we could alias based on names but568// we don't know what's being stored to so kill it all.569kill();570}571return st;572}573574575// return true if this value correspond to the default value of a field.576bool is_default_value(Value value) {577Constant* con = value->as_Constant();578if (con) {579switch (con->type()->tag()) {580case intTag: return con->type()->as_IntConstant()->value() == 0;581case longTag: return con->type()->as_LongConstant()->value() == 0;582case floatTag: return jint_cast(con->type()->as_FloatConstant()->value()) == 0;583case doubleTag: return jlong_cast(con->type()->as_DoubleConstant()->value()) == jlong_cast(0);584case objectTag: return con->type() == objectNull;585default: ShouldNotReachHere();586}587}588return false;589}590591592// return either the actual value of a load or the load itself593Value load(LoadField* load) {594if (!EliminateFieldAccess) {595return load;596}597598if (RoundFPResults && UseSSE < 2 && load->type()->is_float_kind()) {599// can't skip load since value might get rounded as a side effect600return load;601}602603ciField* field = load->field();604Value object = load->obj();605if (field->holder()->is_loaded() && !field->is_volatile()) {606int offset = field->offset();607Value result = NULL;608int index = _newobjects.find(object);609if (index != -1) {610result = _fields.at(index)->at(field);611} else if (_objects.at_grow(offset, NULL) == object) {612result = _values.at(field);613}614if (result != NULL) {615#ifndef PRODUCT616if (PrintIRDuringConstruction && Verbose) {617tty->print_cr("Eliminated load: ");618load->print_line();619}620#endif621assert(result->type()->tag() == load->type()->tag(), "wrong types");622return result;623}624}625return load;626}627628// Record this newly allocated object629void new_instance(NewInstance* object) {630int index = _newobjects.length();631_newobjects.append(object);632if (_fields.at_grow(index, NULL) == NULL) {633_fields.at_put(index, new FieldBuffer());634} else {635_fields.at(index)->kill();636}637}638639void store_value(Value value) {640int index = _newobjects.find(value);641if (index != -1) {642// stored a newly allocated object into another object.643// Assume we've lost track of it as separate slice of memory.644// We could do better by keeping track of whether individual645// fields could alias each other.646_newobjects.remove_at(index);647// pull out the field info and store it at the end up the list648// of field info list to be reused later.649_fields.append(_fields.at(index));650_fields.remove_at(index);651}652}653654void kill() {655_newobjects.trunc_to(0);656_objects.trunc_to(0);657_values.kill();658}659};660661662// Implementation of GraphBuilder's ScopeData663664GraphBuilder::ScopeData::ScopeData(ScopeData* parent)665: _parent(parent)666, _bci2block(NULL)667, _scope(NULL)668, _has_handler(false)669, _stream(NULL)670, _work_list(NULL)671, _parsing_jsr(false)672, _jsr_xhandlers(NULL)673, _caller_stack_size(-1)674, _continuation(NULL)675, _num_returns(0)676, _cleanup_block(NULL)677, _cleanup_return_prev(NULL)678, _cleanup_state(NULL)679{680if (parent != NULL) {681_max_inline_size = (intx) ((float) NestedInliningSizeRatio * (float) parent->max_inline_size() / 100.0f);682} else {683_max_inline_size = MaxInlineSize;684}685if (_max_inline_size < MaxTrivialSize) {686_max_inline_size = MaxTrivialSize;687}688}689690691void GraphBuilder::kill_all() {692if (UseLocalValueNumbering) {693vmap()->kill_all();694}695_memory->kill();696}697698699BlockBegin* GraphBuilder::ScopeData::block_at(int bci) {700if (parsing_jsr()) {701// It is necessary to clone all blocks associated with a702// subroutine, including those for exception handlers in the scope703// of the method containing the jsr (because those exception704// handlers may contain ret instructions in some cases).705BlockBegin* block = bci2block()->at(bci);706if (block != NULL && block == parent()->bci2block()->at(bci)) {707BlockBegin* new_block = new BlockBegin(block->bci());708#ifndef PRODUCT709if (PrintInitialBlockList) {710tty->print_cr("CFG: cloned block %d (bci %d) as block %d for jsr",711block->block_id(), block->bci(), new_block->block_id());712}713#endif714// copy data from cloned blocked715new_block->set_depth_first_number(block->depth_first_number());716if (block->is_set(BlockBegin::parser_loop_header_flag)) new_block->set(BlockBegin::parser_loop_header_flag);717// Preserve certain flags for assertion checking718if (block->is_set(BlockBegin::subroutine_entry_flag)) new_block->set(BlockBegin::subroutine_entry_flag);719if (block->is_set(BlockBegin::exception_entry_flag)) new_block->set(BlockBegin::exception_entry_flag);720721// copy was_visited_flag to allow early detection of bailouts722// if a block that is used in a jsr has already been visited before,723// it is shared between the normal control flow and a subroutine724// BlockBegin::try_merge returns false when the flag is set, this leads725// to a compilation bailout726if (block->is_set(BlockBegin::was_visited_flag)) new_block->set(BlockBegin::was_visited_flag);727728bci2block()->at_put(bci, new_block);729block = new_block;730}731return block;732} else {733return bci2block()->at(bci);734}735}736737738XHandlers* GraphBuilder::ScopeData::xhandlers() const {739if (_jsr_xhandlers == NULL) {740assert(!parsing_jsr(), "");741return scope()->xhandlers();742}743assert(parsing_jsr(), "");744return _jsr_xhandlers;745}746747748void GraphBuilder::ScopeData::set_scope(IRScope* scope) {749_scope = scope;750bool parent_has_handler = false;751if (parent() != NULL) {752parent_has_handler = parent()->has_handler();753}754_has_handler = parent_has_handler || scope->xhandlers()->has_handlers();755}756757758void GraphBuilder::ScopeData::set_inline_cleanup_info(BlockBegin* block,759Instruction* return_prev,760ValueStack* return_state) {761_cleanup_block = block;762_cleanup_return_prev = return_prev;763_cleanup_state = return_state;764}765766767void GraphBuilder::ScopeData::add_to_work_list(BlockBegin* block) {768if (_work_list == NULL) {769_work_list = new BlockList();770}771772if (!block->is_set(BlockBegin::is_on_work_list_flag)) {773// Do not start parsing the continuation block while in a774// sub-scope775if (parsing_jsr()) {776if (block == jsr_continuation()) {777return;778}779} else {780if (block == continuation()) {781return;782}783}784block->set(BlockBegin::is_on_work_list_flag);785_work_list->push(block);786787sort_top_into_worklist(_work_list, block);788}789}790791792void GraphBuilder::sort_top_into_worklist(BlockList* worklist, BlockBegin* top) {793assert(worklist->top() == top, "");794// sort block descending into work list795const int dfn = top->depth_first_number();796assert(dfn != -1, "unknown depth first number");797int i = worklist->length()-2;798while (i >= 0) {799BlockBegin* b = worklist->at(i);800if (b->depth_first_number() < dfn) {801worklist->at_put(i+1, b);802} else {803break;804}805i --;806}807if (i >= -1) worklist->at_put(i + 1, top);808}809810811BlockBegin* GraphBuilder::ScopeData::remove_from_work_list() {812if (is_work_list_empty()) {813return NULL;814}815return _work_list->pop();816}817818819bool GraphBuilder::ScopeData::is_work_list_empty() const {820return (_work_list == NULL || _work_list->length() == 0);821}822823824void GraphBuilder::ScopeData::setup_jsr_xhandlers() {825assert(parsing_jsr(), "");826// clone all the exception handlers from the scope827XHandlers* handlers = new XHandlers(scope()->xhandlers());828const int n = handlers->length();829for (int i = 0; i < n; i++) {830// The XHandlers need to be adjusted to dispatch to the cloned831// handler block instead of the default one but the synthetic832// unlocker needs to be handled specially. The synthetic unlocker833// should be left alone since there can be only one and all code834// should dispatch to the same one.835XHandler* h = handlers->handler_at(i);836assert(h->handler_bci() != SynchronizationEntryBCI, "must be real");837h->set_entry_block(block_at(h->handler_bci()));838}839_jsr_xhandlers = handlers;840}841842843int GraphBuilder::ScopeData::num_returns() {844if (parsing_jsr()) {845return parent()->num_returns();846}847return _num_returns;848}849850851void GraphBuilder::ScopeData::incr_num_returns() {852if (parsing_jsr()) {853parent()->incr_num_returns();854} else {855++_num_returns;856}857}858859860// Implementation of GraphBuilder861862#define INLINE_BAILOUT(msg) { inline_bailout(msg); return false; }863864865void GraphBuilder::load_constant() {866ciConstant con = stream()->get_constant();867if (con.basic_type() == T_ILLEGAL) {868BAILOUT("could not resolve a constant");869} else {870ValueType* t = illegalType;871ValueStack* patch_state = NULL;872switch (con.basic_type()) {873case T_BOOLEAN: t = new IntConstant (con.as_boolean()); break;874case T_BYTE : t = new IntConstant (con.as_byte ()); break;875case T_CHAR : t = new IntConstant (con.as_char ()); break;876case T_SHORT : t = new IntConstant (con.as_short ()); break;877case T_INT : t = new IntConstant (con.as_int ()); break;878case T_LONG : t = new LongConstant (con.as_long ()); break;879case T_FLOAT : t = new FloatConstant (con.as_float ()); break;880case T_DOUBLE : t = new DoubleConstant (con.as_double ()); break;881case T_ARRAY : t = new ArrayConstant (con.as_object ()->as_array ()); break;882case T_OBJECT :883{884ciObject* obj = con.as_object();885if (!obj->is_loaded()886|| (PatchALot && obj->klass() != ciEnv::current()->String_klass())) {887patch_state = copy_state_before();888t = new ObjectConstant(obj);889} else {890assert(obj->is_instance(), "must be java_mirror of klass");891t = new InstanceConstant(obj->as_instance());892}893break;894}895default : ShouldNotReachHere();896}897Value x;898if (patch_state != NULL) {899x = new Constant(t, patch_state);900} else {901x = new Constant(t);902}903push(t, append(x));904}905}906907908void GraphBuilder::load_local(ValueType* type, int index) {909Value x = state()->local_at(index);910assert(x != NULL && !x->type()->is_illegal(), "access of illegal local variable");911push(type, x);912}913914915void GraphBuilder::store_local(ValueType* type, int index) {916Value x = pop(type);917store_local(state(), x, index);918}919920921void GraphBuilder::store_local(ValueStack* state, Value x, int index) {922if (parsing_jsr()) {923// We need to do additional tracking of the location of the return924// address for jsrs since we don't handle arbitrary jsr/ret925// constructs. Here we are figuring out in which circumstances we926// need to bail out.927if (x->type()->is_address()) {928scope_data()->set_jsr_return_address_local(index);929930// Also check parent jsrs (if any) at this time to see whether931// they are using this local. We don't handle skipping over a932// ret.933for (ScopeData* cur_scope_data = scope_data()->parent();934cur_scope_data != NULL && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();935cur_scope_data = cur_scope_data->parent()) {936if (cur_scope_data->jsr_return_address_local() == index) {937BAILOUT("subroutine overwrites return address from previous subroutine");938}939}940} else if (index == scope_data()->jsr_return_address_local()) {941scope_data()->set_jsr_return_address_local(-1);942}943}944945state->store_local(index, round_fp(x));946}947948949void GraphBuilder::load_indexed(BasicType type) {950// In case of in block code motion in range check elimination951ValueStack* state_before = copy_state_indexed_access();952compilation()->set_has_access_indexed(true);953Value index = ipop();954Value array = apop();955Value length = NULL;956if (CSEArrayLength ||957(array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||958(array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {959length = append(new ArrayLength(array, state_before));960}961push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));962}963964965void GraphBuilder::store_indexed(BasicType type) {966// In case of in block code motion in range check elimination967ValueStack* state_before = copy_state_indexed_access();968compilation()->set_has_access_indexed(true);969Value value = pop(as_ValueType(type));970Value index = ipop();971Value array = apop();972Value length = NULL;973if (CSEArrayLength ||974(array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||975(array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {976length = append(new ArrayLength(array, state_before));977}978ciType* array_type = array->declared_type();979bool check_boolean = false;980if (array_type != NULL) {981if (array_type->is_loaded() &&982array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {983assert(type == T_BYTE, "boolean store uses bastore");984Value mask = append(new Constant(new IntConstant(1)));985value = append(new LogicOp(Bytecodes::_iand, value, mask));986}987} else if (type == T_BYTE) {988check_boolean = true;989}990StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);991append(result);992_memory->store_value(value);993994if (type == T_OBJECT && is_profiling()) {995// Note that we'd collect profile data in this method if we wanted it.996compilation()->set_would_profile(true);997998if (profile_checkcasts()) {999result->set_profiled_method(method());1000result->set_profiled_bci(bci());1001result->set_should_profile(true);1002}1003}1004}100510061007void GraphBuilder::stack_op(Bytecodes::Code code) {1008switch (code) {1009case Bytecodes::_pop:1010{ state()->raw_pop();1011}1012break;1013case Bytecodes::_pop2:1014{ state()->raw_pop();1015state()->raw_pop();1016}1017break;1018case Bytecodes::_dup:1019{ Value w = state()->raw_pop();1020state()->raw_push(w);1021state()->raw_push(w);1022}1023break;1024case Bytecodes::_dup_x1:1025{ Value w1 = state()->raw_pop();1026Value w2 = state()->raw_pop();1027state()->raw_push(w1);1028state()->raw_push(w2);1029state()->raw_push(w1);1030}1031break;1032case Bytecodes::_dup_x2:1033{ Value w1 = state()->raw_pop();1034Value w2 = state()->raw_pop();1035Value w3 = state()->raw_pop();1036state()->raw_push(w1);1037state()->raw_push(w3);1038state()->raw_push(w2);1039state()->raw_push(w1);1040}1041break;1042case Bytecodes::_dup2:1043{ Value w1 = state()->raw_pop();1044Value w2 = state()->raw_pop();1045state()->raw_push(w2);1046state()->raw_push(w1);1047state()->raw_push(w2);1048state()->raw_push(w1);1049}1050break;1051case Bytecodes::_dup2_x1:1052{ Value w1 = state()->raw_pop();1053Value w2 = state()->raw_pop();1054Value w3 = state()->raw_pop();1055state()->raw_push(w2);1056state()->raw_push(w1);1057state()->raw_push(w3);1058state()->raw_push(w2);1059state()->raw_push(w1);1060}1061break;1062case Bytecodes::_dup2_x2:1063{ Value w1 = state()->raw_pop();1064Value w2 = state()->raw_pop();1065Value w3 = state()->raw_pop();1066Value w4 = state()->raw_pop();1067state()->raw_push(w2);1068state()->raw_push(w1);1069state()->raw_push(w4);1070state()->raw_push(w3);1071state()->raw_push(w2);1072state()->raw_push(w1);1073}1074break;1075case Bytecodes::_swap:1076{ Value w1 = state()->raw_pop();1077Value w2 = state()->raw_pop();1078state()->raw_push(w1);1079state()->raw_push(w2);1080}1081break;1082default:1083ShouldNotReachHere();1084break;1085}1086}108710881089void GraphBuilder::arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* state_before) {1090Value y = pop(type);1091Value x = pop(type);1092// NOTE: strictfp can be queried from current method since we don't1093// inline methods with differing strictfp bits1094Value res = new ArithmeticOp(code, x, y, method()->is_strict(), state_before);1095// Note: currently single-precision floating-point rounding on Intel is handled at the LIRGenerator level1096res = append(res);1097if (method()->is_strict()) {1098res = round_fp(res);1099}1100push(type, res);1101}110211031104void GraphBuilder::negate_op(ValueType* type) {1105push(type, append(new NegateOp(pop(type))));1106}110711081109void GraphBuilder::shift_op(ValueType* type, Bytecodes::Code code) {1110Value s = ipop();1111Value x = pop(type);1112// try to simplify1113// Note: This code should go into the canonicalizer as soon as it can1114// can handle canonicalized forms that contain more than one node.1115if (CanonicalizeNodes && code == Bytecodes::_iushr) {1116// pattern: x >>> s1117IntConstant* s1 = s->type()->as_IntConstant();1118if (s1 != NULL) {1119// pattern: x >>> s1, with s1 constant1120ShiftOp* l = x->as_ShiftOp();1121if (l != NULL && l->op() == Bytecodes::_ishl) {1122// pattern: (a << b) >>> s11123IntConstant* s0 = l->y()->type()->as_IntConstant();1124if (s0 != NULL) {1125// pattern: (a << s0) >>> s11126const int s0c = s0->value() & 0x1F; // only the low 5 bits are significant for shifts1127const int s1c = s1->value() & 0x1F; // only the low 5 bits are significant for shifts1128if (s0c == s1c) {1129if (s0c == 0) {1130// pattern: (a << 0) >>> 0 => simplify to: a1131ipush(l->x());1132} else {1133// pattern: (a << s0c) >>> s0c => simplify to: a & m, with m constant1134assert(0 < s0c && s0c < BitsPerInt, "adjust code below to handle corner cases");1135const int m = (1 << (BitsPerInt - s0c)) - 1;1136Value s = append(new Constant(new IntConstant(m)));1137ipush(append(new LogicOp(Bytecodes::_iand, l->x(), s)));1138}1139return;1140}1141}1142}1143}1144}1145// could not simplify1146push(type, append(new ShiftOp(code, x, s)));1147}114811491150void GraphBuilder::logic_op(ValueType* type, Bytecodes::Code code) {1151Value y = pop(type);1152Value x = pop(type);1153push(type, append(new LogicOp(code, x, y)));1154}115511561157void GraphBuilder::compare_op(ValueType* type, Bytecodes::Code code) {1158ValueStack* state_before = copy_state_before();1159Value y = pop(type);1160Value x = pop(type);1161ipush(append(new CompareOp(code, x, y, state_before)));1162}116311641165void GraphBuilder::convert(Bytecodes::Code op, BasicType from, BasicType to) {1166push(as_ValueType(to), append(new Convert(op, pop(as_ValueType(from)), as_ValueType(to))));1167}116811691170void GraphBuilder::increment() {1171int index = stream()->get_index();1172int delta = stream()->is_wide() ? (signed short)Bytes::get_Java_u2(stream()->cur_bcp() + 4) : (signed char)(stream()->cur_bcp()[2]);1173load_local(intType, index);1174ipush(append(new Constant(new IntConstant(delta))));1175arithmetic_op(intType, Bytecodes::_iadd);1176store_local(intType, index);1177}117811791180void GraphBuilder::_goto(int from_bci, int to_bci) {1181Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);1182if (is_profiling()) {1183compilation()->set_would_profile(true);1184x->set_profiled_bci(bci());1185if (profile_branches()) {1186x->set_profiled_method(method());1187x->set_should_profile(true);1188}1189}1190append(x);1191}119211931194void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {1195BlockBegin* tsux = block_at(stream()->get_dest());1196BlockBegin* fsux = block_at(stream()->next_bci());1197bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();1198// In case of loop invariant code motion or predicate insertion1199// before the body of a loop the state is needed1200Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic()) ? state_before : NULL, is_bb));12011202assert(i->as_Goto() == NULL ||1203(i->as_Goto()->sux_at(0) == tsux && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||1204(i->as_Goto()->sux_at(0) == fsux && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),1205"safepoint state of Goto returned by canonicalizer incorrect");12061207if (is_profiling()) {1208If* if_node = i->as_If();1209if (if_node != NULL) {1210// Note that we'd collect profile data in this method if we wanted it.1211compilation()->set_would_profile(true);1212// At level 2 we need the proper bci to count backedges1213if_node->set_profiled_bci(bci());1214if (profile_branches()) {1215// Successors can be rotated by the canonicalizer, check for this case.1216if_node->set_profiled_method(method());1217if_node->set_should_profile(true);1218if (if_node->tsux() == fsux) {1219if_node->set_swapped(true);1220}1221}1222return;1223}12241225// Check if this If was reduced to Goto.1226Goto *goto_node = i->as_Goto();1227if (goto_node != NULL) {1228compilation()->set_would_profile(true);1229goto_node->set_profiled_bci(bci());1230if (profile_branches()) {1231goto_node->set_profiled_method(method());1232goto_node->set_should_profile(true);1233// Find out which successor is used.1234if (goto_node->default_sux() == tsux) {1235goto_node->set_direction(Goto::taken);1236} else if (goto_node->default_sux() == fsux) {1237goto_node->set_direction(Goto::not_taken);1238} else {1239ShouldNotReachHere();1240}1241}1242return;1243}1244}1245}124612471248void GraphBuilder::if_zero(ValueType* type, If::Condition cond) {1249Value y = append(new Constant(intZero));1250ValueStack* state_before = copy_state_before();1251Value x = ipop();1252if_node(x, cond, y, state_before);1253}125412551256void GraphBuilder::if_null(ValueType* type, If::Condition cond) {1257Value y = append(new Constant(objectNull));1258ValueStack* state_before = copy_state_before();1259Value x = apop();1260if_node(x, cond, y, state_before);1261}126212631264void GraphBuilder::if_same(ValueType* type, If::Condition cond) {1265ValueStack* state_before = copy_state_before();1266Value y = pop(type);1267Value x = pop(type);1268if_node(x, cond, y, state_before);1269}127012711272void GraphBuilder::jsr(int dest) {1273// We only handle well-formed jsrs (those which are "block-structured").1274// If the bytecodes are strange (jumping out of a jsr block) then we1275// might end up trying to re-parse a block containing a jsr which1276// has already been activated. Watch for this case and bail out.1277for (ScopeData* cur_scope_data = scope_data();1278cur_scope_data != NULL && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();1279cur_scope_data = cur_scope_data->parent()) {1280if (cur_scope_data->jsr_entry_bci() == dest) {1281BAILOUT("too-complicated jsr/ret structure");1282}1283}12841285push(addressType, append(new Constant(new AddressConstant(next_bci()))));1286if (!try_inline_jsr(dest)) {1287return; // bailed out while parsing and inlining subroutine1288}1289}129012911292void GraphBuilder::ret(int local_index) {1293if (!parsing_jsr()) BAILOUT("ret encountered while not parsing subroutine");12941295if (local_index != scope_data()->jsr_return_address_local()) {1296BAILOUT("can not handle complicated jsr/ret constructs");1297}12981299// Rets simply become (NON-SAFEPOINT) gotos to the jsr continuation1300append(new Goto(scope_data()->jsr_continuation(), false));1301}130213031304void GraphBuilder::table_switch() {1305Bytecode_tableswitch sw(stream());1306const int l = sw.length();1307if (CanonicalizeNodes && l == 1) {1308// total of 2 successors => use If instead of switch1309// Note: This code should go into the canonicalizer as soon as it can1310// can handle canonicalized forms that contain more than one node.1311Value key = append(new Constant(new IntConstant(sw.low_key())));1312BlockBegin* tsux = block_at(bci() + sw.dest_offset_at(0));1313BlockBegin* fsux = block_at(bci() + sw.default_offset());1314bool is_bb = tsux->bci() < bci() || fsux->bci() < bci();1315// In case of loop invariant code motion or predicate insertion1316// before the body of a loop the state is needed1317ValueStack* state_before = copy_state_if_bb(is_bb);1318append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb));1319} else {1320// collect successors1321BlockList* sux = new BlockList(l + 1, NULL);1322int i;1323bool has_bb = false;1324for (i = 0; i < l; i++) {1325sux->at_put(i, block_at(bci() + sw.dest_offset_at(i)));1326if (sw.dest_offset_at(i) < 0) has_bb = true;1327}1328// add default successor1329if (sw.default_offset() < 0) has_bb = true;1330sux->at_put(i, block_at(bci() + sw.default_offset()));1331// In case of loop invariant code motion or predicate insertion1332// before the body of a loop the state is needed1333ValueStack* state_before = copy_state_if_bb(has_bb);1334Instruction* res = append(new TableSwitch(ipop(), sux, sw.low_key(), state_before, has_bb));1335#ifdef ASSERT1336if (res->as_Goto()) {1337for (i = 0; i < l; i++) {1338if (sux->at(i) == res->as_Goto()->sux_at(0)) {1339assert(res->as_Goto()->is_safepoint() == sw.dest_offset_at(i) < 0, "safepoint state of Goto returned by canonicalizer incorrect");1340}1341}1342}1343#endif1344}1345}134613471348void GraphBuilder::lookup_switch() {1349Bytecode_lookupswitch sw(stream());1350const int l = sw.number_of_pairs();1351if (CanonicalizeNodes && l == 1) {1352// total of 2 successors => use If instead of switch1353// Note: This code should go into the canonicalizer as soon as it can1354// can handle canonicalized forms that contain more than one node.1355// simplify to If1356LookupswitchPair pair = sw.pair_at(0);1357Value key = append(new Constant(new IntConstant(pair.match())));1358BlockBegin* tsux = block_at(bci() + pair.offset());1359BlockBegin* fsux = block_at(bci() + sw.default_offset());1360bool is_bb = tsux->bci() < bci() || fsux->bci() < bci();1361// In case of loop invariant code motion or predicate insertion1362// before the body of a loop the state is needed1363ValueStack* state_before = copy_state_if_bb(is_bb);;1364append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb));1365} else {1366// collect successors & keys1367BlockList* sux = new BlockList(l + 1, NULL);1368intArray* keys = new intArray(l, 0);1369int i;1370bool has_bb = false;1371for (i = 0; i < l; i++) {1372LookupswitchPair pair = sw.pair_at(i);1373if (pair.offset() < 0) has_bb = true;1374sux->at_put(i, block_at(bci() + pair.offset()));1375keys->at_put(i, pair.match());1376}1377// add default successor1378if (sw.default_offset() < 0) has_bb = true;1379sux->at_put(i, block_at(bci() + sw.default_offset()));1380// In case of loop invariant code motion or predicate insertion1381// before the body of a loop the state is needed1382ValueStack* state_before = copy_state_if_bb(has_bb);1383Instruction* res = append(new LookupSwitch(ipop(), sux, keys, state_before, has_bb));1384#ifdef ASSERT1385if (res->as_Goto()) {1386for (i = 0; i < l; i++) {1387if (sux->at(i) == res->as_Goto()->sux_at(0)) {1388assert(res->as_Goto()->is_safepoint() == sw.pair_at(i).offset() < 0, "safepoint state of Goto returned by canonicalizer incorrect");1389}1390}1391}1392#endif1393}1394}13951396void GraphBuilder::call_register_finalizer() {1397// If the receiver requires finalization then emit code to perform1398// the registration on return.13991400// Gather some type information about the receiver1401Value receiver = state()->local_at(0);1402assert(receiver != NULL, "must have a receiver");1403ciType* declared_type = receiver->declared_type();1404ciType* exact_type = receiver->exact_type();1405if (exact_type == NULL &&1406receiver->as_Local() &&1407receiver->as_Local()->java_index() == 0) {1408ciInstanceKlass* ik = compilation()->method()->holder();1409if (ik->is_final()) {1410exact_type = ik;1411} else if (UseCHA && !(ik->has_subklass() || ik->is_interface())) {1412// test class is leaf class1413compilation()->dependency_recorder()->assert_leaf_type(ik);1414exact_type = ik;1415} else {1416declared_type = ik;1417}1418}14191420// see if we know statically that registration isn't required1421bool needs_check = true;1422if (exact_type != NULL) {1423needs_check = exact_type->as_instance_klass()->has_finalizer();1424} else if (declared_type != NULL) {1425ciInstanceKlass* ik = declared_type->as_instance_klass();1426if (!Dependencies::has_finalizable_subclass(ik)) {1427compilation()->dependency_recorder()->assert_has_no_finalizable_subclasses(ik);1428needs_check = false;1429}1430}14311432if (needs_check) {1433// Perform the registration of finalizable objects.1434ValueStack* state_before = copy_state_for_exception();1435load_local(objectType, 0);1436append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,1437state()->pop_arguments(1),1438true, state_before, true));1439}1440}144114421443void GraphBuilder::method_return(Value x) {1444if (RegisterFinalizersAtInit &&1445method()->intrinsic_id() == vmIntrinsics::_Object_init) {1446call_register_finalizer();1447}14481449bool need_mem_bar = false;1450if (method()->name() == ciSymbol::object_initializer_name() &&1451scope()->wrote_final()) {1452need_mem_bar = true;1453}14541455BasicType bt = method()->return_type()->basic_type();1456switch (bt) {1457case T_BYTE:1458{1459Value shift = append(new Constant(new IntConstant(24)));1460x = append(new ShiftOp(Bytecodes::_ishl, x, shift));1461x = append(new ShiftOp(Bytecodes::_ishr, x, shift));1462break;1463}1464case T_SHORT:1465{1466Value shift = append(new Constant(new IntConstant(16)));1467x = append(new ShiftOp(Bytecodes::_ishl, x, shift));1468x = append(new ShiftOp(Bytecodes::_ishr, x, shift));1469break;1470}1471case T_CHAR:1472{1473Value mask = append(new Constant(new IntConstant(0xFFFF)));1474x = append(new LogicOp(Bytecodes::_iand, x, mask));1475break;1476}1477case T_BOOLEAN:1478{1479Value mask = append(new Constant(new IntConstant(1)));1480x = append(new LogicOp(Bytecodes::_iand, x, mask));1481break;1482}1483}14841485// Check to see whether we are inlining. If so, Return1486// instructions become Gotos to the continuation point.1487if (continuation() != NULL) {14881489int invoke_bci = state()->caller_state()->bci();14901491if (x != NULL) {1492ciMethod* caller = state()->scope()->caller()->method();1493Bytecodes::Code invoke_raw_bc = caller->raw_code_at_bci(invoke_bci);1494if (invoke_raw_bc == Bytecodes::_invokehandle || invoke_raw_bc == Bytecodes::_invokedynamic) {1495ciType* declared_ret_type = caller->get_declared_signature_at_bci(invoke_bci)->return_type();1496if (declared_ret_type->is_klass() && x->exact_type() == NULL &&1497x->declared_type() != declared_ret_type && declared_ret_type != compilation()->env()->Object_klass()) {1498x = append(new TypeCast(declared_ret_type->as_klass(), x, copy_state_before()));1499}1500}1501}15021503assert(!method()->is_synchronized() || InlineSynchronizedMethods, "can not inline synchronized methods yet");15041505if (compilation()->env()->dtrace_method_probes()) {1506// Report exit from inline methods1507Values* args = new Values(1);1508args->push(append(new Constant(new MethodConstant(method()))));1509append(new RuntimeCall(voidType, "dtrace_method_exit", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), args));1510}15111512// If the inlined method is synchronized, the monitor must be1513// released before we jump to the continuation block.1514if (method()->is_synchronized()) {1515assert(state()->locks_size() == 1, "receiver must be locked here");1516monitorexit(state()->lock_at(0), SynchronizationEntryBCI);1517}15181519if (need_mem_bar) {1520append(new MemBar(lir_membar_storestore));1521}15221523// State at end of inlined method is the state of the caller1524// without the method parameters on stack, including the1525// return value, if any, of the inlined method on operand stack.1526set_state(state()->caller_state()->copy_for_parsing());1527if (x != NULL) {1528state()->push(x->type(), x);1529if (profile_return() && x->type()->is_object_kind()) {1530ciMethod* caller = state()->scope()->method();1531ciMethodData* md = caller->method_data_or_null();1532ciProfileData* data = md->bci_to_data(invoke_bci);1533if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {1534bool has_return = data->is_CallTypeData() ? ((ciCallTypeData*)data)->has_return() : ((ciVirtualCallTypeData*)data)->has_return();1535// May not be true in case of an inlined call through a method handle intrinsic.1536if (has_return) {1537profile_return_type(x, method(), caller, invoke_bci);1538}1539}1540}1541}1542Goto* goto_callee = new Goto(continuation(), false);15431544// See whether this is the first return; if so, store off some1545// of the state for later examination1546if (num_returns() == 0) {1547set_inline_cleanup_info();1548}15491550// The current bci() is in the wrong scope, so use the bci() of1551// the continuation point.1552append_with_bci(goto_callee, scope_data()->continuation()->bci());1553incr_num_returns();1554return;1555}15561557state()->truncate_stack(0);1558if (method()->is_synchronized()) {1559// perform the unlocking before exiting the method1560Value receiver;1561if (!method()->is_static()) {1562receiver = _initial_state->local_at(0);1563} else {1564receiver = append(new Constant(new ClassConstant(method()->holder())));1565}1566append_split(new MonitorExit(receiver, state()->unlock()));1567}15681569if (need_mem_bar) {1570append(new MemBar(lir_membar_storestore));1571}15721573append(new Return(x));1574}157515761577void GraphBuilder::access_field(Bytecodes::Code code) {1578bool will_link;1579ciField* field = stream()->get_field(will_link);1580ciInstanceKlass* holder = field->holder();1581BasicType field_type = field->type()->basic_type();1582ValueType* type = as_ValueType(field_type);1583// call will_link again to determine if the field is valid.1584const bool needs_patching = !holder->is_loaded() ||1585!field->will_link(method()->holder(), code) ||1586PatchALot;15871588ValueStack* state_before = NULL;1589if (!holder->is_initialized() || needs_patching) {1590// save state before instruction for debug info when1591// deoptimization happens during patching1592state_before = copy_state_before();1593}15941595Value obj = NULL;1596if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {1597if (state_before != NULL) {1598// build a patching constant1599obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);1600} else {1601obj = new Constant(new InstanceConstant(holder->java_mirror()));1602}1603}16041605if (field->is_final() && (code == Bytecodes::_putfield)) {1606scope()->set_wrote_final();1607}16081609const int offset = !needs_patching ? field->offset() : -1;1610switch (code) {1611case Bytecodes::_getstatic: {1612// check for compile-time constants, i.e., initialized static final fields1613Instruction* constant = NULL;1614if (field->is_constant() && !PatchALot) {1615ciConstant field_val = field->constant_value();1616BasicType field_type = field_val.basic_type();1617switch (field_type) {1618case T_ARRAY:1619case T_OBJECT:1620if (field_val.as_object()->should_be_constant()) {1621constant = new Constant(as_ValueType(field_val));1622}1623break;16241625default:1626constant = new Constant(as_ValueType(field_val));1627}1628// Stable static fields are checked for non-default values in ciField::initialize_from().1629}1630if (constant != NULL) {1631push(type, append(constant));1632} else {1633if (state_before == NULL) {1634state_before = copy_state_for_exception();1635}1636push(type, append(new LoadField(append(obj), offset, field, true,1637state_before, needs_patching)));1638}1639break;1640}1641case Bytecodes::_putstatic:1642{ Value val = pop(type);1643if (state_before == NULL) {1644state_before = copy_state_for_exception();1645}1646if (field->type()->basic_type() == T_BOOLEAN) {1647Value mask = append(new Constant(new IntConstant(1)));1648val = append(new LogicOp(Bytecodes::_iand, val, mask));1649}1650append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));1651}1652break;1653case Bytecodes::_getfield: {1654// Check for compile-time constants, i.e., trusted final non-static fields.1655Instruction* constant = NULL;1656obj = apop();1657ObjectType* obj_type = obj->type()->as_ObjectType();1658if (obj_type->is_constant() && !PatchALot) {1659ciObject* const_oop = obj_type->constant_value();1660if (!const_oop->is_null_object() && const_oop->is_loaded()) {1661if (field->is_constant()) {1662ciConstant field_val = field->constant_value_of(const_oop);1663BasicType field_type = field_val.basic_type();1664switch (field_type) {1665case T_ARRAY:1666case T_OBJECT:1667if (field_val.as_object()->should_be_constant()) {1668constant = new Constant(as_ValueType(field_val));1669}1670break;1671default:1672constant = new Constant(as_ValueType(field_val));1673}1674if (FoldStableValues && field->is_stable() && field_val.is_null_or_zero()) {1675// Stable field with default value can't be constant.1676constant = NULL;1677}1678} else {1679// For CallSite objects treat the target field as a compile time constant.1680if (const_oop->is_call_site()) {1681ciCallSite* call_site = const_oop->as_call_site();1682if (field->is_call_site_target()) {1683ciMethodHandle* target = call_site->get_target();1684if (target != NULL) { // just in case1685ciConstant field_val(T_OBJECT, target);1686constant = new Constant(as_ValueType(field_val));1687// Add a dependence for invalidation of the optimization.1688if (!call_site->is_constant_call_site()) {1689dependency_recorder()->assert_call_site_target_value(call_site, target);1690}1691}1692}1693}1694}1695}1696}1697if (constant != NULL) {1698push(type, append(constant));1699} else {1700if (state_before == NULL) {1701state_before = copy_state_for_exception();1702}1703LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);1704Value replacement = !needs_patching ? _memory->load(load) : load;1705if (replacement != load) {1706assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");1707// Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing1708// conversion. Emit an explicit conversion here to get the correct field value after the write.1709BasicType bt = field->type()->basic_type();1710switch (bt) {1711case T_BOOLEAN:1712case T_BYTE:1713replacement = append(new Convert(Bytecodes::_i2b, replacement, as_ValueType(bt)));1714break;1715case T_CHAR:1716replacement = append(new Convert(Bytecodes::_i2c, replacement, as_ValueType(bt)));1717break;1718case T_SHORT:1719replacement = append(new Convert(Bytecodes::_i2s, replacement, as_ValueType(bt)));1720break;1721default:1722break;1723}1724push(type, replacement);1725} else {1726push(type, append(load));1727}1728}1729break;1730}1731case Bytecodes::_putfield: {1732Value val = pop(type);1733obj = apop();1734if (state_before == NULL) {1735state_before = copy_state_for_exception();1736}1737if (field->type()->basic_type() == T_BOOLEAN) {1738Value mask = append(new Constant(new IntConstant(1)));1739val = append(new LogicOp(Bytecodes::_iand, val, mask));1740}1741StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);1742if (!needs_patching) store = _memory->store(store);1743if (store != NULL) {1744append(store);1745}1746break;1747}1748default:1749ShouldNotReachHere();1750break;1751}1752}175317541755Dependencies* GraphBuilder::dependency_recorder() const {1756assert(DeoptC1, "need debug information");1757return compilation()->dependency_recorder();1758}17591760// How many arguments do we want to profile?1761Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {1762int n = 0;1763bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));1764start = has_receiver ? 1 : 0;1765if (profile_arguments()) {1766ciProfileData* data = method()->method_data()->bci_to_data(bci());1767if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {1768n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();1769}1770}1771// If we are inlining then we need to collect arguments to profile parameters for the target1772if (profile_parameters() && target != NULL) {1773if (target->method_data() != NULL && target->method_data()->parameters_type_data() != NULL) {1774// The receiver is profiled on method entry so it's included in1775// the number of parameters but here we're only interested in1776// actual arguments.1777n = MAX2(n, target->method_data()->parameters_type_data()->number_of_parameters() - start);1778}1779}1780if (n > 0) {1781return new Values(n);1782}1783return NULL;1784}17851786void GraphBuilder::check_args_for_profiling(Values* obj_args, int expected) {1787#ifdef ASSERT1788bool ignored_will_link;1789ciSignature* declared_signature = NULL;1790ciMethod* real_target = method()->get_method_at_bci(bci(), ignored_will_link, &declared_signature);1791assert(expected == obj_args->length() || real_target->is_method_handle_intrinsic(), "missed on arg?");1792#endif1793}17941795// Collect arguments that we want to profile in a list1796Values* GraphBuilder::collect_args_for_profiling(Values* args, ciMethod* target, bool may_have_receiver) {1797int start = 0;1798Values* obj_args = args_list_for_profiling(target, start, may_have_receiver);1799if (obj_args == NULL) {1800return NULL;1801}1802int s = obj_args->size();1803// if called through method handle invoke, some arguments may have been popped1804for (int i = start, j = 0; j < s && i < args->length(); i++) {1805if (args->at(i)->type()->is_object_kind()) {1806obj_args->push(args->at(i));1807j++;1808}1809}1810check_args_for_profiling(obj_args, s);1811return obj_args;1812}181318141815void GraphBuilder::invoke(Bytecodes::Code code) {1816bool will_link;1817ciSignature* declared_signature = NULL;1818ciMethod* target = stream()->get_method(will_link, &declared_signature);1819ciKlass* holder = stream()->get_declared_method_holder();1820const Bytecodes::Code bc_raw = stream()->cur_bc_raw();1821assert(declared_signature != NULL, "cannot be null");18221823if (!C1PatchInvokeDynamic && Bytecodes::has_optional_appendix(bc_raw) && !will_link) {1824BAILOUT("unlinked call site (C1PatchInvokeDynamic is off)");1825}18261827// we have to make sure the argument size (incl. the receiver)1828// is correct for compilation (the call would fail later during1829// linkage anyway) - was bug (gri 7/28/99)1830{1831// Use raw to get rewritten bytecode.1832const bool is_invokestatic = bc_raw == Bytecodes::_invokestatic;1833const bool allow_static =1834is_invokestatic ||1835bc_raw == Bytecodes::_invokehandle ||1836bc_raw == Bytecodes::_invokedynamic;1837if (target->is_loaded()) {1838if (( target->is_static() && !allow_static) ||1839(!target->is_static() && is_invokestatic)) {1840BAILOUT("will cause link error");1841}1842}1843}1844ciInstanceKlass* klass = target->holder();18451846// check if CHA possible: if so, change the code to invoke_special1847ciInstanceKlass* calling_klass = method()->holder();1848ciInstanceKlass* callee_holder = ciEnv::get_instance_klass_for_declared_method_holder(holder);1849ciInstanceKlass* actual_recv = callee_holder;18501851CompileLog* log = compilation()->log();1852if (log != NULL)1853log->elem("call method='%d' instr='%s'",1854log->identify(target),1855Bytecodes::name(code));18561857// invoke-special-super1858if (bc_raw == Bytecodes::_invokespecial && !target->is_object_initializer()) {1859ciInstanceKlass* sender_klass =1860calling_klass->is_anonymous() ? calling_klass->host_klass() :1861calling_klass;1862if (sender_klass->is_interface()) {1863int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);1864Value receiver = state()->stack_at(index);1865CheckCast* c = new CheckCast(sender_klass, receiver, copy_state_before());1866c->set_invokespecial_receiver_check();1867state()->stack_at_put(index, append_split(c));1868}1869}18701871// Some methods are obviously bindable without any type checks so1872// convert them directly to an invokespecial or invokestatic.1873if (target->is_loaded() && !target->is_abstract() && target->can_be_statically_bound()) {1874switch (bc_raw) {1875case Bytecodes::_invokevirtual:1876code = Bytecodes::_invokespecial;1877break;1878case Bytecodes::_invokehandle:1879code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;1880break;1881}1882} else {1883if (bc_raw == Bytecodes::_invokehandle) {1884assert(!will_link, "should come here only for unlinked call");1885code = Bytecodes::_invokespecial;1886}1887}18881889// Push appendix argument (MethodType, CallSite, etc.), if one.1890bool patch_for_appendix = false;1891int patching_appendix_arg = 0;1892if (C1PatchInvokeDynamic &&1893(Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot))) {1894Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));1895apush(arg);1896patch_for_appendix = true;1897patching_appendix_arg = (will_link && stream()->has_appendix()) ? 0 : 1;1898} else if (stream()->has_appendix()) {1899ciObject* appendix = stream()->get_appendix();1900Value arg = append(new Constant(new ObjectConstant(appendix)));1901apush(arg);1902}19031904// NEEDS_CLEANUP1905// I've added the target->is_loaded() test below but I don't really understand1906// how klass->is_loaded() can be true and yet target->is_loaded() is false.1907// this happened while running the JCK invokevirtual tests under doit. TKR1908ciMethod* cha_monomorphic_target = NULL;1909ciMethod* exact_target = NULL;1910Value better_receiver = NULL;1911if (UseCHA && DeoptC1 && klass->is_loaded() && target->is_loaded() &&1912!(// %%% FIXME: Are both of these relevant?1913target->is_method_handle_intrinsic() ||1914target->is_compiled_lambda_form()) &&1915!patch_for_appendix) {1916Value receiver = NULL;1917ciInstanceKlass* receiver_klass = NULL;1918bool type_is_exact = false;1919// try to find a precise receiver type1920if (will_link && !target->is_static()) {1921int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);1922receiver = state()->stack_at(index);1923ciType* type = receiver->exact_type();1924if (type != NULL && type->is_loaded() &&1925type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {1926receiver_klass = (ciInstanceKlass*) type;1927type_is_exact = true;1928}1929if (type == NULL) {1930type = receiver->declared_type();1931if (type != NULL && type->is_loaded() &&1932type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {1933receiver_klass = (ciInstanceKlass*) type;1934if (receiver_klass->is_leaf_type() && !receiver_klass->is_final()) {1935// Insert a dependency on this type since1936// find_monomorphic_target may assume it's already done.1937dependency_recorder()->assert_leaf_type(receiver_klass);1938type_is_exact = true;1939}1940}1941}1942}1943if (receiver_klass != NULL && type_is_exact &&1944receiver_klass->is_loaded() && code != Bytecodes::_invokespecial) {1945// If we have the exact receiver type we can bind directly to1946// the method to call.1947exact_target = target->resolve_invoke(calling_klass, receiver_klass);1948if (exact_target != NULL) {1949target = exact_target;1950code = Bytecodes::_invokespecial;1951}1952}1953if (receiver_klass != NULL &&1954receiver_klass->is_subtype_of(actual_recv) &&1955actual_recv->is_initialized()) {1956actual_recv = receiver_klass;1957}19581959if ((code == Bytecodes::_invokevirtual && callee_holder->is_initialized()) ||1960(code == Bytecodes::_invokeinterface && callee_holder->is_initialized() && !actual_recv->is_interface())) {1961// Use CHA on the receiver to select a more precise method.1962cha_monomorphic_target = target->find_monomorphic_target(calling_klass, callee_holder, actual_recv);1963} else if (code == Bytecodes::_invokeinterface && callee_holder->is_loaded() && receiver != NULL) {1964// if there is only one implementor of this interface then we1965// may be able bind this invoke directly to the implementing1966// klass but we need both a dependence on the single interface1967// and on the method we bind to. Additionally since all we know1968// about the receiver type is the it's supposed to implement the1969// interface we have to insert a check that it's the class we1970// expect. Interface types are not checked by the verifier so1971// they are roughly equivalent to Object.1972ciInstanceKlass* singleton = NULL;1973if (target->holder()->nof_implementors() == 1) {1974singleton = target->holder()->implementor();1975assert(singleton != NULL && singleton != target->holder(),1976"just checking");19771978assert(holder->is_interface(), "invokeinterface to non interface?");1979ciInstanceKlass* decl_interface = (ciInstanceKlass*)holder;1980// the number of implementors for decl_interface is less or1981// equal to the number of implementors for target->holder() so1982// if number of implementors of target->holder() == 1 then1983// number of implementors for decl_interface is 0 or 1. If1984// it's 0 then no class implements decl_interface and there's1985// no point in inlining.1986if (!holder->is_loaded() || decl_interface->nof_implementors() != 1 || decl_interface->has_default_methods()) {1987singleton = NULL;1988}1989}1990if (singleton) {1991cha_monomorphic_target = target->find_monomorphic_target(calling_klass, target->holder(), singleton);1992if (cha_monomorphic_target != NULL) {1993// If CHA is able to bind this invoke then update the class1994// to match that class, otherwise klass will refer to the1995// interface.1996klass = cha_monomorphic_target->holder();1997actual_recv = target->holder();19981999// insert a check it's really the expected class.2000CheckCast* c = new CheckCast(klass, receiver, copy_state_for_exception());2001c->set_incompatible_class_change_check();2002c->set_direct_compare(klass->is_final());2003// pass the result of the checkcast so that the compiler has2004// more accurate type info in the inlinee2005better_receiver = append_split(c);2006}2007}2008}2009}20102011if (cha_monomorphic_target != NULL) {2012if (cha_monomorphic_target->is_abstract()) {2013// Do not optimize for abstract methods2014cha_monomorphic_target = NULL;2015}2016}20172018if (cha_monomorphic_target != NULL) {2019if (!(target->is_final_method())) {2020// If we inlined because CHA revealed only a single target method,2021// then we are dependent on that target method not getting overridden2022// by dynamic class loading. Be sure to test the "static" receiver2023// dest_method here, as opposed to the actual receiver, which may2024// falsely lead us to believe that the receiver is final or private.2025dependency_recorder()->assert_unique_concrete_method(actual_recv, cha_monomorphic_target);2026}2027code = Bytecodes::_invokespecial;2028}20292030// check if we could do inlining2031if (!PatchALot && Inline && klass->is_loaded() &&2032(klass->is_initialized() || klass->is_interface() && target->holder()->is_initialized())2033&& target->is_loaded()2034&& !patch_for_appendix) {2035// callee is known => check if we have static binding2036assert(target->is_loaded(), "callee must be known");2037if (code == Bytecodes::_invokestatic ||2038code == Bytecodes::_invokespecial ||2039code == Bytecodes::_invokevirtual && target->is_final_method() ||2040code == Bytecodes::_invokedynamic) {2041ciMethod* inline_target = (cha_monomorphic_target != NULL) ? cha_monomorphic_target : target;2042// static binding => check if callee is ok2043bool success = try_inline(inline_target, (cha_monomorphic_target != NULL) || (exact_target != NULL), code, better_receiver);20442045CHECK_BAILOUT();2046clear_inline_bailout();20472048if (success) {2049// Register dependence if JVMTI has either breakpoint2050// setting or hotswapping of methods capabilities since they may2051// cause deoptimization.2052if (compilation()->env()->jvmti_can_hotswap_or_post_breakpoint()) {2053dependency_recorder()->assert_evol_method(inline_target);2054}2055return;2056}2057} else {2058print_inlining(target, "no static binding", /*success*/ false);2059}2060} else {2061print_inlining(target, "not inlineable", /*success*/ false);2062}20632064// If we attempted an inline which did not succeed because of a2065// bailout during construction of the callee graph, the entire2066// compilation has to be aborted. This is fairly rare and currently2067// seems to only occur for jasm-generated classes which contain2068// jsr/ret pairs which are not associated with finally clauses and2069// do not have exception handlers in the containing method, and are2070// therefore not caught early enough to abort the inlining without2071// corrupting the graph. (We currently bail out with a non-empty2072// stack at a ret in these situations.)2073CHECK_BAILOUT();20742075// inlining not successful => standard invoke2076bool is_loaded = target->is_loaded();2077ValueType* result_type = as_ValueType(declared_signature->return_type());2078ValueStack* state_before = copy_state_exhandling();20792080// The bytecode (code) might change in this method so we are checking this very late.2081const bool has_receiver =2082code == Bytecodes::_invokespecial ||2083code == Bytecodes::_invokevirtual ||2084code == Bytecodes::_invokeinterface;2085Values* args = state()->pop_arguments(target->arg_size_no_receiver() + patching_appendix_arg);2086Value recv = has_receiver ? apop() : NULL;2087int vtable_index = Method::invalid_vtable_index;20882089#ifdef SPARC2090// Currently only supported on Sparc.2091// The UseInlineCaches only controls dispatch to invokevirtuals for2092// loaded classes which we weren't able to statically bind.2093if (!UseInlineCaches && is_loaded && code == Bytecodes::_invokevirtual2094&& !target->can_be_statically_bound()) {2095// Find a vtable index if one is available2096// For arrays, callee_holder is Object. Resolving the call with2097// Object would allow an illegal call to finalize() on an2098// array. We use holder instead: illegal calls to finalize() won't2099// be compiled as vtable calls (IC call resolution will catch the2100// illegal call) and the few legal calls on array types won't be2101// either.2102vtable_index = target->resolve_vtable_index(calling_klass, holder);2103}2104#endif21052106if (recv != NULL &&2107(code == Bytecodes::_invokespecial ||2108!is_loaded || target->is_final())) {2109// invokespecial always needs a NULL check. invokevirtual where2110// the target is final or where it's not known that whether the2111// target is final requires a NULL check. Otherwise normal2112// invokevirtual will perform the null check during the lookup2113// logic or the unverified entry point. Profiling of calls2114// requires that the null check is performed in all cases.2115null_check(recv);2116}21172118if (is_profiling()) {2119if (recv != NULL && profile_calls()) {2120null_check(recv);2121}2122// Note that we'd collect profile data in this method if we wanted it.2123compilation()->set_would_profile(true);21242125if (profile_calls()) {2126assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");2127ciKlass* target_klass = NULL;2128if (cha_monomorphic_target != NULL) {2129target_klass = cha_monomorphic_target->holder();2130} else if (exact_target != NULL) {2131target_klass = exact_target->holder();2132}2133profile_call(target, recv, target_klass, collect_args_for_profiling(args, NULL, false), false);2134}2135}21362137Invoke* result = new Invoke(code, result_type, recv, args, vtable_index, target, state_before);2138// push result2139append_split(result);21402141if (result_type != voidType) {2142if (method()->is_strict()) {2143push(result_type, round_fp(result));2144} else {2145push(result_type, result);2146}2147}2148if (profile_return() && result_type->is_object_kind()) {2149profile_return_type(result, target);2150}2151}215221532154void GraphBuilder::new_instance(int klass_index) {2155ValueStack* state_before = copy_state_exhandling();2156bool will_link;2157ciKlass* klass = stream()->get_klass(will_link);2158assert(klass->is_instance_klass(), "must be an instance klass");2159NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());2160_memory->new_instance(new_instance);2161apush(append_split(new_instance));2162}216321642165void GraphBuilder::new_type_array() {2166ValueStack* state_before = copy_state_exhandling();2167apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));2168}216921702171void GraphBuilder::new_object_array() {2172bool will_link;2173ciKlass* klass = stream()->get_klass(will_link);2174ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();2175NewArray* n = new NewObjectArray(klass, ipop(), state_before);2176apush(append_split(n));2177}217821792180bool GraphBuilder::direct_compare(ciKlass* k) {2181if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {2182ciInstanceKlass* ik = k->as_instance_klass();2183if (ik->is_final()) {2184return true;2185} else {2186if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {2187// test class is leaf class2188dependency_recorder()->assert_leaf_type(ik);2189return true;2190}2191}2192}2193return false;2194}219521962197void GraphBuilder::check_cast(int klass_index) {2198bool will_link;2199ciKlass* klass = stream()->get_klass(will_link);2200ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();2201CheckCast* c = new CheckCast(klass, apop(), state_before);2202apush(append_split(c));2203c->set_direct_compare(direct_compare(klass));22042205if (is_profiling()) {2206// Note that we'd collect profile data in this method if we wanted it.2207compilation()->set_would_profile(true);22082209if (profile_checkcasts()) {2210c->set_profiled_method(method());2211c->set_profiled_bci(bci());2212c->set_should_profile(true);2213}2214}2215}221622172218void GraphBuilder::instance_of(int klass_index) {2219bool will_link;2220ciKlass* klass = stream()->get_klass(will_link);2221ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();2222InstanceOf* i = new InstanceOf(klass, apop(), state_before);2223ipush(append_split(i));2224i->set_direct_compare(direct_compare(klass));22252226if (is_profiling()) {2227// Note that we'd collect profile data in this method if we wanted it.2228compilation()->set_would_profile(true);22292230if (profile_checkcasts()) {2231i->set_profiled_method(method());2232i->set_profiled_bci(bci());2233i->set_should_profile(true);2234}2235}2236}223722382239void GraphBuilder::monitorenter(Value x, int bci) {2240// save state before locking in case of deoptimization after a NullPointerException2241ValueStack* state_before = copy_state_for_exception_with_bci(bci);2242append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);2243kill_all();2244}224522462247void GraphBuilder::monitorexit(Value x, int bci) {2248append_with_bci(new MonitorExit(x, state()->unlock()), bci);2249kill_all();2250}225122522253void GraphBuilder::new_multi_array(int dimensions) {2254bool will_link;2255ciKlass* klass = stream()->get_klass(will_link);2256ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();22572258Values* dims = new Values(dimensions, NULL);2259// fill in all dimensions2260int i = dimensions;2261while (i-- > 0) dims->at_put(i, ipop());2262// create array2263NewArray* n = new NewMultiArray(klass, dims, state_before);2264apush(append_split(n));2265}226622672268void GraphBuilder::throw_op(int bci) {2269// We require that the debug info for a Throw be the "state before"2270// the Throw (i.e., exception oop is still on TOS)2271ValueStack* state_before = copy_state_before_with_bci(bci);2272Throw* t = new Throw(apop(), state_before);2273// operand stack not needed after a throw2274state()->truncate_stack(0);2275append_with_bci(t, bci);2276}227722782279Value GraphBuilder::round_fp(Value fp_value) {2280// no rounding needed if SSE2 is used2281if (RoundFPResults && UseSSE < 2) {2282// Must currently insert rounding node for doubleword values that2283// are results of expressions (i.e., not loads from memory or2284// constants)2285if (fp_value->type()->tag() == doubleTag &&2286fp_value->as_Constant() == NULL &&2287fp_value->as_Local() == NULL && // method parameters need no rounding2288fp_value->as_RoundFP() == NULL) {2289return append(new RoundFP(fp_value));2290}2291}2292return fp_value;2293}229422952296Instruction* GraphBuilder::append_with_bci(Instruction* instr, int bci) {2297Canonicalizer canon(compilation(), instr, bci);2298Instruction* i1 = canon.canonical();2299if (i1->is_linked() || !i1->can_be_linked()) {2300// Canonicalizer returned an instruction which was already2301// appended so simply return it.2302return i1;2303}23042305if (UseLocalValueNumbering) {2306// Lookup the instruction in the ValueMap and add it to the map if2307// it's not found.2308Instruction* i2 = vmap()->find_insert(i1);2309if (i2 != i1) {2310// found an entry in the value map, so just return it.2311assert(i2->is_linked(), "should already be linked");2312return i2;2313}2314ValueNumberingEffects vne(vmap());2315i1->visit(&vne);2316}23172318// i1 was not eliminated => append it2319assert(i1->next() == NULL, "shouldn't already be linked");2320_last = _last->set_next(i1, canon.bci());23212322if (++_instruction_count >= InstructionCountCutoff && !bailed_out()) {2323// set the bailout state but complete normal processing. We2324// might do a little more work before noticing the bailout so we2325// want processing to continue normally until it's noticed.2326bailout("Method and/or inlining is too large");2327}23282329#ifndef PRODUCT2330if (PrintIRDuringConstruction) {2331InstructionPrinter ip;2332ip.print_line(i1);2333if (Verbose) {2334state()->print();2335}2336}2337#endif23382339// save state after modification of operand stack for StateSplit instructions2340StateSplit* s = i1->as_StateSplit();2341if (s != NULL) {2342if (EliminateFieldAccess) {2343Intrinsic* intrinsic = s->as_Intrinsic();2344if (s->as_Invoke() != NULL || (intrinsic && !intrinsic->preserves_state())) {2345_memory->kill();2346}2347}2348s->set_state(state()->copy(ValueStack::StateAfter, canon.bci()));2349}23502351// set up exception handlers for this instruction if necessary2352if (i1->can_trap()) {2353i1->set_exception_handlers(handle_exception(i1));2354assert(i1->exception_state() != NULL || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state");2355}2356return i1;2357}235823592360Instruction* GraphBuilder::append(Instruction* instr) {2361assert(instr->as_StateSplit() == NULL || instr->as_BlockEnd() != NULL, "wrong append used");2362return append_with_bci(instr, bci());2363}236423652366Instruction* GraphBuilder::append_split(StateSplit* instr) {2367return append_with_bci(instr, bci());2368}236923702371void GraphBuilder::null_check(Value value) {2372if (value->as_NewArray() != NULL || value->as_NewInstance() != NULL) {2373return;2374} else {2375Constant* con = value->as_Constant();2376if (con) {2377ObjectType* c = con->type()->as_ObjectType();2378if (c && c->is_loaded()) {2379ObjectConstant* oc = c->as_ObjectConstant();2380if (!oc || !oc->value()->is_null_object()) {2381return;2382}2383}2384}2385}2386append(new NullCheck(value, copy_state_for_exception()));2387}2388238923902391XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {2392if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != NULL)) {2393assert(instruction->exception_state() == NULL2394|| instruction->exception_state()->kind() == ValueStack::EmptyExceptionState2395|| (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),2396"exception_state should be of exception kind");2397return new XHandlers();2398}23992400XHandlers* exception_handlers = new XHandlers();2401ScopeData* cur_scope_data = scope_data();2402ValueStack* cur_state = instruction->state_before();2403ValueStack* prev_state = NULL;2404int scope_count = 0;24052406assert(cur_state != NULL, "state_before must be set");2407do {2408int cur_bci = cur_state->bci();2409assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");2410assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");24112412// join with all potential exception handlers2413XHandlers* list = cur_scope_data->xhandlers();2414const int n = list->length();2415for (int i = 0; i < n; i++) {2416XHandler* h = list->handler_at(i);2417if (h->covers(cur_bci)) {2418// h is a potential exception handler => join it2419compilation()->set_has_exception_handlers(true);24202421BlockBegin* entry = h->entry_block();2422if (entry == block()) {2423// It's acceptable for an exception handler to cover itself2424// but we don't handle that in the parser currently. It's2425// very rare so we bailout instead of trying to handle it.2426BAILOUT_("exception handler covers itself", exception_handlers);2427}2428assert(entry->bci() == h->handler_bci(), "must match");2429assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");24302431// previously this was a BAILOUT, but this is not necessary2432// now because asynchronous exceptions are not handled this way.2433assert(entry->state() == NULL || cur_state->total_locks_size() == entry->state()->total_locks_size(), "locks do not match");24342435// xhandler start with an empty expression stack2436if (cur_state->stack_size() != 0) {2437cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci());2438}2439if (instruction->exception_state() == NULL) {2440instruction->set_exception_state(cur_state);2441}24422443// Note: Usually this join must work. However, very2444// complicated jsr-ret structures where we don't ret from2445// the subroutine can cause the objects on the monitor2446// stacks to not match because blocks can be parsed twice.2447// The only test case we've seen so far which exhibits this2448// problem is caught by the infinite recursion test in2449// GraphBuilder::jsr() if the join doesn't work.2450if (!entry->try_merge(cur_state)) {2451BAILOUT_("error while joining with exception handler, prob. due to complicated jsr/rets", exception_handlers);2452}24532454// add current state for correct handling of phi functions at begin of xhandler2455int phi_operand = entry->add_exception_state(cur_state);24562457// add entry to the list of xhandlers of this block2458_block->add_exception_handler(entry);24592460// add back-edge from xhandler entry to this block2461if (!entry->is_predecessor(_block)) {2462entry->add_predecessor(_block);2463}24642465// clone XHandler because phi_operand and scope_count can not be shared2466XHandler* new_xhandler = new XHandler(h);2467new_xhandler->set_phi_operand(phi_operand);2468new_xhandler->set_scope_count(scope_count);2469exception_handlers->append(new_xhandler);24702471// fill in exception handler subgraph lazily2472assert(!entry->is_set(BlockBegin::was_visited_flag), "entry must not be visited yet");2473cur_scope_data->add_to_work_list(entry);24742475// stop when reaching catchall2476if (h->catch_type() == 0) {2477return exception_handlers;2478}2479}2480}24812482if (exception_handlers->length() == 0) {2483// This scope and all callees do not handle exceptions, so the local2484// variables of this scope are not needed. However, the scope itself is2485// required for a correct exception stack trace -> clear out the locals.2486if (_compilation->env()->should_retain_local_variables()) {2487cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci());2488} else {2489cur_state = cur_state->copy(ValueStack::EmptyExceptionState, cur_state->bci());2490}2491if (prev_state != NULL) {2492prev_state->set_caller_state(cur_state);2493}2494if (instruction->exception_state() == NULL) {2495instruction->set_exception_state(cur_state);2496}2497}24982499// Set up iteration for next time.2500// If parsing a jsr, do not grab exception handlers from the2501// parent scopes for this method (already got them, and they2502// needed to be cloned)25032504while (cur_scope_data->parsing_jsr()) {2505cur_scope_data = cur_scope_data->parent();2506}25072508assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");2509assert(cur_state->locks_size() == 0 || cur_state->locks_size() == 1, "unlocking must be done in a catchall exception handler");25102511prev_state = cur_state;2512cur_state = cur_state->caller_state();2513cur_scope_data = cur_scope_data->parent();2514scope_count++;2515} while (cur_scope_data != NULL);25162517return exception_handlers;2518}251925202521// Helper class for simplifying Phis.2522class PhiSimplifier : public BlockClosure {2523private:2524bool _has_substitutions;2525Value simplify(Value v);25262527public:2528PhiSimplifier(BlockBegin* start) : _has_substitutions(false) {2529start->iterate_preorder(this);2530if (_has_substitutions) {2531SubstitutionResolver sr(start);2532}2533}2534void block_do(BlockBegin* b);2535bool has_substitutions() const { return _has_substitutions; }2536};253725382539Value PhiSimplifier::simplify(Value v) {2540Phi* phi = v->as_Phi();25412542if (phi == NULL) {2543// no phi function2544return v;2545} else if (v->has_subst()) {2546// already substituted; subst can be phi itself -> simplify2547return simplify(v->subst());2548} else if (phi->is_set(Phi::cannot_simplify)) {2549// already tried to simplify phi before2550return phi;2551} else if (phi->is_set(Phi::visited)) {2552// break cycles in phi functions2553return phi;2554} else if (phi->type()->is_illegal()) {2555// illegal phi functions are ignored anyway2556return phi;25572558} else {2559// mark phi function as processed to break cycles in phi functions2560phi->set(Phi::visited);25612562// simplify x = [y, x] and x = [y, y] to y2563Value subst = NULL;2564int opd_count = phi->operand_count();2565for (int i = 0; i < opd_count; i++) {2566Value opd = phi->operand_at(i);2567assert(opd != NULL, "Operand must exist!");25682569if (opd->type()->is_illegal()) {2570// if one operand is illegal, the entire phi function is illegal2571phi->make_illegal();2572phi->clear(Phi::visited);2573return phi;2574}25752576Value new_opd = simplify(opd);2577assert(new_opd != NULL, "Simplified operand must exist!");25782579if (new_opd != phi && new_opd != subst) {2580if (subst == NULL) {2581subst = new_opd;2582} else {2583// no simplification possible2584phi->set(Phi::cannot_simplify);2585phi->clear(Phi::visited);2586return phi;2587}2588}2589}25902591// sucessfully simplified phi function2592assert(subst != NULL, "illegal phi function");2593_has_substitutions = true;2594phi->clear(Phi::visited);2595phi->set_subst(subst);25962597#ifndef PRODUCT2598if (PrintPhiFunctions) {2599tty->print_cr("simplified phi function %c%d to %c%d (Block B%d)", phi->type()->tchar(), phi->id(), subst->type()->tchar(), subst->id(), phi->block()->block_id());2600}2601#endif26022603return subst;2604}2605}260626072608void PhiSimplifier::block_do(BlockBegin* b) {2609for_each_phi_fun(b, phi,2610simplify(phi);2611);26122613#ifdef ASSERT2614for_each_phi_fun(b, phi,2615assert(phi->operand_count() != 1 || phi->subst() != phi, "missed trivial simplification");2616);26172618ValueStack* state = b->state()->caller_state();2619for_each_state_value(state, value,2620Phi* phi = value->as_Phi();2621assert(phi == NULL || phi->block() != b, "must not have phi function to simplify in caller state");2622);2623#endif2624}26252626// This method is called after all blocks are filled with HIR instructions2627// It eliminates all Phi functions of the form x = [y, y] and x = [y, x]2628void GraphBuilder::eliminate_redundant_phis(BlockBegin* start) {2629PhiSimplifier simplifier(start);2630}263126322633void GraphBuilder::connect_to_end(BlockBegin* beg) {2634// setup iteration2635kill_all();2636_block = beg;2637_state = beg->state()->copy_for_parsing();2638_last = beg;2639iterate_bytecodes_for_block(beg->bci());2640}264126422643BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) {2644#ifndef PRODUCT2645if (PrintIRDuringConstruction) {2646tty->cr();2647InstructionPrinter ip;2648ip.print_instr(_block); tty->cr();2649ip.print_stack(_block->state()); tty->cr();2650ip.print_inline_level(_block);2651ip.print_head();2652tty->print_cr("locals size: %d stack size: %d", state()->locals_size(), state()->stack_size());2653}2654#endif2655_skip_block = false;2656assert(state() != NULL, "ValueStack missing!");2657CompileLog* log = compilation()->log();2658ciBytecodeStream s(method());2659s.reset_to_bci(bci);2660int prev_bci = bci;2661scope_data()->set_stream(&s);2662// iterate2663Bytecodes::Code code = Bytecodes::_illegal;2664bool push_exception = false;26652666if (block()->is_set(BlockBegin::exception_entry_flag) && block()->next() == NULL) {2667// first thing in the exception entry block should be the exception object.2668push_exception = true;2669}26702671while (!bailed_out() && last()->as_BlockEnd() == NULL &&2672(code = stream()->next()) != ciBytecodeStream::EOBC() &&2673(block_at(s.cur_bci()) == NULL || block_at(s.cur_bci()) == block())) {2674assert(state()->kind() == ValueStack::Parsing, "invalid state kind");26752676if (log != NULL)2677log->set_context("bc code='%d' bci='%d'", (int)code, s.cur_bci());26782679// Check for active jsr during OSR compilation2680if (compilation()->is_osr_compile()2681&& scope()->is_top_scope()2682&& parsing_jsr()2683&& s.cur_bci() == compilation()->osr_bci()) {2684bailout("OSR not supported while a jsr is active");2685}26862687if (push_exception) {2688apush(append(new ExceptionObject()));2689push_exception = false;2690}26912692// handle bytecode2693switch (code) {2694case Bytecodes::_nop : /* nothing to do */ break;2695case Bytecodes::_aconst_null : apush(append(new Constant(objectNull ))); break;2696case Bytecodes::_iconst_m1 : ipush(append(new Constant(new IntConstant (-1)))); break;2697case Bytecodes::_iconst_0 : ipush(append(new Constant(intZero ))); break;2698case Bytecodes::_iconst_1 : ipush(append(new Constant(intOne ))); break;2699case Bytecodes::_iconst_2 : ipush(append(new Constant(new IntConstant ( 2)))); break;2700case Bytecodes::_iconst_3 : ipush(append(new Constant(new IntConstant ( 3)))); break;2701case Bytecodes::_iconst_4 : ipush(append(new Constant(new IntConstant ( 4)))); break;2702case Bytecodes::_iconst_5 : ipush(append(new Constant(new IntConstant ( 5)))); break;2703case Bytecodes::_lconst_0 : lpush(append(new Constant(new LongConstant ( 0)))); break;2704case Bytecodes::_lconst_1 : lpush(append(new Constant(new LongConstant ( 1)))); break;2705case Bytecodes::_fconst_0 : fpush(append(new Constant(new FloatConstant ( 0)))); break;2706case Bytecodes::_fconst_1 : fpush(append(new Constant(new FloatConstant ( 1)))); break;2707case Bytecodes::_fconst_2 : fpush(append(new Constant(new FloatConstant ( 2)))); break;2708case Bytecodes::_dconst_0 : dpush(append(new Constant(new DoubleConstant( 0)))); break;2709case Bytecodes::_dconst_1 : dpush(append(new Constant(new DoubleConstant( 1)))); break;2710case Bytecodes::_bipush : ipush(append(new Constant(new IntConstant(((signed char*)s.cur_bcp())[1])))); break;2711case Bytecodes::_sipush : ipush(append(new Constant(new IntConstant((short)Bytes::get_Java_u2(s.cur_bcp()+1))))); break;2712case Bytecodes::_ldc : // fall through2713case Bytecodes::_ldc_w : // fall through2714case Bytecodes::_ldc2_w : load_constant(); break;2715case Bytecodes::_iload : load_local(intType , s.get_index()); break;2716case Bytecodes::_lload : load_local(longType , s.get_index()); break;2717case Bytecodes::_fload : load_local(floatType , s.get_index()); break;2718case Bytecodes::_dload : load_local(doubleType , s.get_index()); break;2719case Bytecodes::_aload : load_local(instanceType, s.get_index()); break;2720case Bytecodes::_iload_0 : load_local(intType , 0); break;2721case Bytecodes::_iload_1 : load_local(intType , 1); break;2722case Bytecodes::_iload_2 : load_local(intType , 2); break;2723case Bytecodes::_iload_3 : load_local(intType , 3); break;2724case Bytecodes::_lload_0 : load_local(longType , 0); break;2725case Bytecodes::_lload_1 : load_local(longType , 1); break;2726case Bytecodes::_lload_2 : load_local(longType , 2); break;2727case Bytecodes::_lload_3 : load_local(longType , 3); break;2728case Bytecodes::_fload_0 : load_local(floatType , 0); break;2729case Bytecodes::_fload_1 : load_local(floatType , 1); break;2730case Bytecodes::_fload_2 : load_local(floatType , 2); break;2731case Bytecodes::_fload_3 : load_local(floatType , 3); break;2732case Bytecodes::_dload_0 : load_local(doubleType, 0); break;2733case Bytecodes::_dload_1 : load_local(doubleType, 1); break;2734case Bytecodes::_dload_2 : load_local(doubleType, 2); break;2735case Bytecodes::_dload_3 : load_local(doubleType, 3); break;2736case Bytecodes::_aload_0 : load_local(objectType, 0); break;2737case Bytecodes::_aload_1 : load_local(objectType, 1); break;2738case Bytecodes::_aload_2 : load_local(objectType, 2); break;2739case Bytecodes::_aload_3 : load_local(objectType, 3); break;2740case Bytecodes::_iaload : load_indexed(T_INT ); break;2741case Bytecodes::_laload : load_indexed(T_LONG ); break;2742case Bytecodes::_faload : load_indexed(T_FLOAT ); break;2743case Bytecodes::_daload : load_indexed(T_DOUBLE); break;2744case Bytecodes::_aaload : load_indexed(T_OBJECT); break;2745case Bytecodes::_baload : load_indexed(T_BYTE ); break;2746case Bytecodes::_caload : load_indexed(T_CHAR ); break;2747case Bytecodes::_saload : load_indexed(T_SHORT ); break;2748case Bytecodes::_istore : store_local(intType , s.get_index()); break;2749case Bytecodes::_lstore : store_local(longType , s.get_index()); break;2750case Bytecodes::_fstore : store_local(floatType , s.get_index()); break;2751case Bytecodes::_dstore : store_local(doubleType, s.get_index()); break;2752case Bytecodes::_astore : store_local(objectType, s.get_index()); break;2753case Bytecodes::_istore_0 : store_local(intType , 0); break;2754case Bytecodes::_istore_1 : store_local(intType , 1); break;2755case Bytecodes::_istore_2 : store_local(intType , 2); break;2756case Bytecodes::_istore_3 : store_local(intType , 3); break;2757case Bytecodes::_lstore_0 : store_local(longType , 0); break;2758case Bytecodes::_lstore_1 : store_local(longType , 1); break;2759case Bytecodes::_lstore_2 : store_local(longType , 2); break;2760case Bytecodes::_lstore_3 : store_local(longType , 3); break;2761case Bytecodes::_fstore_0 : store_local(floatType , 0); break;2762case Bytecodes::_fstore_1 : store_local(floatType , 1); break;2763case Bytecodes::_fstore_2 : store_local(floatType , 2); break;2764case Bytecodes::_fstore_3 : store_local(floatType , 3); break;2765case Bytecodes::_dstore_0 : store_local(doubleType, 0); break;2766case Bytecodes::_dstore_1 : store_local(doubleType, 1); break;2767case Bytecodes::_dstore_2 : store_local(doubleType, 2); break;2768case Bytecodes::_dstore_3 : store_local(doubleType, 3); break;2769case Bytecodes::_astore_0 : store_local(objectType, 0); break;2770case Bytecodes::_astore_1 : store_local(objectType, 1); break;2771case Bytecodes::_astore_2 : store_local(objectType, 2); break;2772case Bytecodes::_astore_3 : store_local(objectType, 3); break;2773case Bytecodes::_iastore : store_indexed(T_INT ); break;2774case Bytecodes::_lastore : store_indexed(T_LONG ); break;2775case Bytecodes::_fastore : store_indexed(T_FLOAT ); break;2776case Bytecodes::_dastore : store_indexed(T_DOUBLE); break;2777case Bytecodes::_aastore : store_indexed(T_OBJECT); break;2778case Bytecodes::_bastore : store_indexed(T_BYTE ); break;2779case Bytecodes::_castore : store_indexed(T_CHAR ); break;2780case Bytecodes::_sastore : store_indexed(T_SHORT ); break;2781case Bytecodes::_pop : // fall through2782case Bytecodes::_pop2 : // fall through2783case Bytecodes::_dup : // fall through2784case Bytecodes::_dup_x1 : // fall through2785case Bytecodes::_dup_x2 : // fall through2786case Bytecodes::_dup2 : // fall through2787case Bytecodes::_dup2_x1 : // fall through2788case Bytecodes::_dup2_x2 : // fall through2789case Bytecodes::_swap : stack_op(code); break;2790case Bytecodes::_iadd : arithmetic_op(intType , code); break;2791case Bytecodes::_ladd : arithmetic_op(longType , code); break;2792case Bytecodes::_fadd : arithmetic_op(floatType , code); break;2793case Bytecodes::_dadd : arithmetic_op(doubleType, code); break;2794case Bytecodes::_isub : arithmetic_op(intType , code); break;2795case Bytecodes::_lsub : arithmetic_op(longType , code); break;2796case Bytecodes::_fsub : arithmetic_op(floatType , code); break;2797case Bytecodes::_dsub : arithmetic_op(doubleType, code); break;2798case Bytecodes::_imul : arithmetic_op(intType , code); break;2799case Bytecodes::_lmul : arithmetic_op(longType , code); break;2800case Bytecodes::_fmul : arithmetic_op(floatType , code); break;2801case Bytecodes::_dmul : arithmetic_op(doubleType, code); break;2802case Bytecodes::_idiv : arithmetic_op(intType , code, copy_state_for_exception()); break;2803case Bytecodes::_ldiv : arithmetic_op(longType , code, copy_state_for_exception()); break;2804case Bytecodes::_fdiv : arithmetic_op(floatType , code); break;2805case Bytecodes::_ddiv : arithmetic_op(doubleType, code); break;2806case Bytecodes::_irem : arithmetic_op(intType , code, copy_state_for_exception()); break;2807case Bytecodes::_lrem : arithmetic_op(longType , code, copy_state_for_exception()); break;2808case Bytecodes::_frem : arithmetic_op(floatType , code); break;2809case Bytecodes::_drem : arithmetic_op(doubleType, code); break;2810case Bytecodes::_ineg : negate_op(intType ); break;2811case Bytecodes::_lneg : negate_op(longType ); break;2812case Bytecodes::_fneg : negate_op(floatType ); break;2813case Bytecodes::_dneg : negate_op(doubleType); break;2814case Bytecodes::_ishl : shift_op(intType , code); break;2815case Bytecodes::_lshl : shift_op(longType, code); break;2816case Bytecodes::_ishr : shift_op(intType , code); break;2817case Bytecodes::_lshr : shift_op(longType, code); break;2818case Bytecodes::_iushr : shift_op(intType , code); break;2819case Bytecodes::_lushr : shift_op(longType, code); break;2820case Bytecodes::_iand : logic_op(intType , code); break;2821case Bytecodes::_land : logic_op(longType, code); break;2822case Bytecodes::_ior : logic_op(intType , code); break;2823case Bytecodes::_lor : logic_op(longType, code); break;2824case Bytecodes::_ixor : logic_op(intType , code); break;2825case Bytecodes::_lxor : logic_op(longType, code); break;2826case Bytecodes::_iinc : increment(); break;2827case Bytecodes::_i2l : convert(code, T_INT , T_LONG ); break;2828case Bytecodes::_i2f : convert(code, T_INT , T_FLOAT ); break;2829case Bytecodes::_i2d : convert(code, T_INT , T_DOUBLE); break;2830case Bytecodes::_l2i : convert(code, T_LONG , T_INT ); break;2831case Bytecodes::_l2f : convert(code, T_LONG , T_FLOAT ); break;2832case Bytecodes::_l2d : convert(code, T_LONG , T_DOUBLE); break;2833case Bytecodes::_f2i : convert(code, T_FLOAT , T_INT ); break;2834case Bytecodes::_f2l : convert(code, T_FLOAT , T_LONG ); break;2835case Bytecodes::_f2d : convert(code, T_FLOAT , T_DOUBLE); break;2836case Bytecodes::_d2i : convert(code, T_DOUBLE, T_INT ); break;2837case Bytecodes::_d2l : convert(code, T_DOUBLE, T_LONG ); break;2838case Bytecodes::_d2f : convert(code, T_DOUBLE, T_FLOAT ); break;2839case Bytecodes::_i2b : convert(code, T_INT , T_BYTE ); break;2840case Bytecodes::_i2c : convert(code, T_INT , T_CHAR ); break;2841case Bytecodes::_i2s : convert(code, T_INT , T_SHORT ); break;2842case Bytecodes::_lcmp : compare_op(longType , code); break;2843case Bytecodes::_fcmpl : compare_op(floatType , code); break;2844case Bytecodes::_fcmpg : compare_op(floatType , code); break;2845case Bytecodes::_dcmpl : compare_op(doubleType, code); break;2846case Bytecodes::_dcmpg : compare_op(doubleType, code); break;2847case Bytecodes::_ifeq : if_zero(intType , If::eql); break;2848case Bytecodes::_ifne : if_zero(intType , If::neq); break;2849case Bytecodes::_iflt : if_zero(intType , If::lss); break;2850case Bytecodes::_ifge : if_zero(intType , If::geq); break;2851case Bytecodes::_ifgt : if_zero(intType , If::gtr); break;2852case Bytecodes::_ifle : if_zero(intType , If::leq); break;2853case Bytecodes::_if_icmpeq : if_same(intType , If::eql); break;2854case Bytecodes::_if_icmpne : if_same(intType , If::neq); break;2855case Bytecodes::_if_icmplt : if_same(intType , If::lss); break;2856case Bytecodes::_if_icmpge : if_same(intType , If::geq); break;2857case Bytecodes::_if_icmpgt : if_same(intType , If::gtr); break;2858case Bytecodes::_if_icmple : if_same(intType , If::leq); break;2859case Bytecodes::_if_acmpeq : if_same(objectType, If::eql); break;2860case Bytecodes::_if_acmpne : if_same(objectType, If::neq); break;2861case Bytecodes::_goto : _goto(s.cur_bci(), s.get_dest()); break;2862case Bytecodes::_jsr : jsr(s.get_dest()); break;2863case Bytecodes::_ret : ret(s.get_index()); break;2864case Bytecodes::_tableswitch : table_switch(); break;2865case Bytecodes::_lookupswitch : lookup_switch(); break;2866case Bytecodes::_ireturn : method_return(ipop()); break;2867case Bytecodes::_lreturn : method_return(lpop()); break;2868case Bytecodes::_freturn : method_return(fpop()); break;2869case Bytecodes::_dreturn : method_return(dpop()); break;2870case Bytecodes::_areturn : method_return(apop()); break;2871case Bytecodes::_return : method_return(NULL ); break;2872case Bytecodes::_getstatic : // fall through2873case Bytecodes::_putstatic : // fall through2874case Bytecodes::_getfield : // fall through2875case Bytecodes::_putfield : access_field(code); break;2876case Bytecodes::_invokevirtual : // fall through2877case Bytecodes::_invokespecial : // fall through2878case Bytecodes::_invokestatic : // fall through2879case Bytecodes::_invokedynamic : // fall through2880case Bytecodes::_invokeinterface: invoke(code); break;2881case Bytecodes::_new : new_instance(s.get_index_u2()); break;2882case Bytecodes::_newarray : new_type_array(); break;2883case Bytecodes::_anewarray : new_object_array(); break;2884case Bytecodes::_arraylength : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }2885case Bytecodes::_athrow : throw_op(s.cur_bci()); break;2886case Bytecodes::_checkcast : check_cast(s.get_index_u2()); break;2887case Bytecodes::_instanceof : instance_of(s.get_index_u2()); break;2888case Bytecodes::_monitorenter : monitorenter(apop(), s.cur_bci()); break;2889case Bytecodes::_monitorexit : monitorexit (apop(), s.cur_bci()); break;2890case Bytecodes::_wide : ShouldNotReachHere(); break;2891case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;2892case Bytecodes::_ifnull : if_null(objectType, If::eql); break;2893case Bytecodes::_ifnonnull : if_null(objectType, If::neq); break;2894case Bytecodes::_goto_w : _goto(s.cur_bci(), s.get_far_dest()); break;2895case Bytecodes::_jsr_w : jsr(s.get_far_dest()); break;2896case Bytecodes::_breakpoint : BAILOUT_("concurrent setting of breakpoint", NULL);2897default : ShouldNotReachHere(); break;2898}28992900if (log != NULL)2901log->clear_context(); // skip marker if nothing was printed29022903// save current bci to setup Goto at the end2904prev_bci = s.cur_bci();29052906}2907CHECK_BAILOUT_(NULL);2908// stop processing of this block (see try_inline_full)2909if (_skip_block) {2910_skip_block = false;2911assert(_last && _last->as_BlockEnd(), "");2912return _last->as_BlockEnd();2913}2914// if there are any, check if last instruction is a BlockEnd instruction2915BlockEnd* end = last()->as_BlockEnd();2916if (end == NULL) {2917// all blocks must end with a BlockEnd instruction => add a Goto2918end = new Goto(block_at(s.cur_bci()), false);2919append(end);2920}2921assert(end == last()->as_BlockEnd(), "inconsistency");29222923assert(end->state() != NULL, "state must already be present");2924assert(end->as_Return() == NULL || end->as_Throw() == NULL || end->state()->stack_size() == 0, "stack not needed for return and throw");29252926// connect to begin & set state2927// NOTE that inlining may have changed the block we are parsing2928block()->set_end(end);2929// propagate state2930for (int i = end->number_of_sux() - 1; i >= 0; i--) {2931BlockBegin* sux = end->sux_at(i);2932assert(sux->is_predecessor(block()), "predecessor missing");2933// be careful, bailout if bytecodes are strange2934if (!sux->try_merge(end->state())) BAILOUT_("block join failed", NULL);2935scope_data()->add_to_work_list(end->sux_at(i));2936}29372938scope_data()->set_stream(NULL);29392940// done2941return end;2942}294329442945void GraphBuilder::iterate_all_blocks(bool start_in_current_block_for_inlining) {2946do {2947if (start_in_current_block_for_inlining && !bailed_out()) {2948iterate_bytecodes_for_block(0);2949start_in_current_block_for_inlining = false;2950} else {2951BlockBegin* b;2952while ((b = scope_data()->remove_from_work_list()) != NULL) {2953if (!b->is_set(BlockBegin::was_visited_flag)) {2954if (b->is_set(BlockBegin::osr_entry_flag)) {2955// we're about to parse the osr entry block, so make sure2956// we setup the OSR edge leading into this block so that2957// Phis get setup correctly.2958setup_osr_entry_block();2959// this is no longer the osr entry block, so clear it.2960b->clear(BlockBegin::osr_entry_flag);2961}2962b->set(BlockBegin::was_visited_flag);2963connect_to_end(b);2964}2965}2966}2967} while (!bailed_out() && !scope_data()->is_work_list_empty());2968}296929702971bool GraphBuilder::_can_trap [Bytecodes::number_of_java_codes];29722973void GraphBuilder::initialize() {2974// the following bytecodes are assumed to potentially2975// throw exceptions in compiled code - note that e.g.2976// monitorexit & the return bytecodes do not throw2977// exceptions since monitor pairing proved that they2978// succeed (if monitor pairing succeeded)2979Bytecodes::Code can_trap_list[] =2980{ Bytecodes::_ldc2981, Bytecodes::_ldc_w2982, Bytecodes::_ldc2_w2983, Bytecodes::_iaload2984, Bytecodes::_laload2985, Bytecodes::_faload2986, Bytecodes::_daload2987, Bytecodes::_aaload2988, Bytecodes::_baload2989, Bytecodes::_caload2990, Bytecodes::_saload2991, Bytecodes::_iastore2992, Bytecodes::_lastore2993, Bytecodes::_fastore2994, Bytecodes::_dastore2995, Bytecodes::_aastore2996, Bytecodes::_bastore2997, Bytecodes::_castore2998, Bytecodes::_sastore2999, Bytecodes::_idiv3000, Bytecodes::_ldiv3001, Bytecodes::_irem3002, Bytecodes::_lrem3003, Bytecodes::_getstatic3004, Bytecodes::_putstatic3005, Bytecodes::_getfield3006, Bytecodes::_putfield3007, Bytecodes::_invokevirtual3008, Bytecodes::_invokespecial3009, Bytecodes::_invokestatic3010, Bytecodes::_invokedynamic3011, Bytecodes::_invokeinterface3012, Bytecodes::_new3013, Bytecodes::_newarray3014, Bytecodes::_anewarray3015, Bytecodes::_arraylength3016, Bytecodes::_athrow3017, Bytecodes::_checkcast3018, Bytecodes::_instanceof3019, Bytecodes::_monitorenter3020, Bytecodes::_multianewarray3021};30223023// inititialize trap tables3024for (int i = 0; i < Bytecodes::number_of_java_codes; i++) {3025_can_trap[i] = false;3026}3027// set standard trap info3028for (uint j = 0; j < ARRAY_SIZE(can_trap_list); j++) {3029_can_trap[can_trap_list[j]] = true;3030}3031}303230333034BlockBegin* GraphBuilder::header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state) {3035assert(entry->is_set(f), "entry/flag mismatch");3036// create header block3037BlockBegin* h = new BlockBegin(entry->bci());3038h->set_depth_first_number(0);30393040Value l = h;3041BlockEnd* g = new Goto(entry, false);3042l->set_next(g, entry->bci());3043h->set_end(g);3044h->set(f);3045// setup header block end state3046ValueStack* s = state->copy(ValueStack::StateAfter, entry->bci()); // can use copy since stack is empty (=> no phis)3047assert(s->stack_is_empty(), "must have empty stack at entry point");3048g->set_state(s);3049return h;3050}3051305230533054BlockBegin* GraphBuilder::setup_start_block(int osr_bci, BlockBegin* std_entry, BlockBegin* osr_entry, ValueStack* state) {3055BlockBegin* start = new BlockBegin(0);30563057// This code eliminates the empty start block at the beginning of3058// each method. Previously, each method started with the3059// start-block created below, and this block was followed by the3060// header block that was always empty. This header block is only3061// necesary if std_entry is also a backward branch target because3062// then phi functions may be necessary in the header block. It's3063// also necessary when profiling so that there's a single block that3064// can increment the interpreter_invocation_count.3065BlockBegin* new_header_block;3066if (std_entry->number_of_preds() > 0 || count_invocations() || count_backedges()) {3067new_header_block = header_block(std_entry, BlockBegin::std_entry_flag, state);3068} else {3069new_header_block = std_entry;3070}30713072// setup start block (root for the IR graph)3073Base* base =3074new Base(3075new_header_block,3076osr_entry3077);3078start->set_next(base, 0);3079start->set_end(base);3080// create & setup state for start block3081start->set_state(state->copy(ValueStack::StateAfter, std_entry->bci()));3082base->set_state(state->copy(ValueStack::StateAfter, std_entry->bci()));30833084if (base->std_entry()->state() == NULL) {3085// setup states for header blocks3086base->std_entry()->merge(state);3087}30883089assert(base->std_entry()->state() != NULL, "");3090return start;3091}309230933094void GraphBuilder::setup_osr_entry_block() {3095assert(compilation()->is_osr_compile(), "only for osrs");30963097int osr_bci = compilation()->osr_bci();3098ciBytecodeStream s(method());3099s.reset_to_bci(osr_bci);3100s.next();3101scope_data()->set_stream(&s);31023103// create a new block to be the osr setup code3104_osr_entry = new BlockBegin(osr_bci);3105_osr_entry->set(BlockBegin::osr_entry_flag);3106_osr_entry->set_depth_first_number(0);3107BlockBegin* target = bci2block()->at(osr_bci);3108assert(target != NULL && target->is_set(BlockBegin::osr_entry_flag), "must be there");3109// the osr entry has no values for locals3110ValueStack* state = target->state()->copy();3111_osr_entry->set_state(state);31123113kill_all();3114_block = _osr_entry;3115_state = _osr_entry->state()->copy();3116assert(_state->bci() == osr_bci, "mismatch");3117_last = _osr_entry;3118Value e = append(new OsrEntry());3119e->set_needs_null_check(false);31203121// OSR buffer is3122//3123// locals[nlocals-1..0]3124// monitors[number_of_locks-1..0]3125//3126// locals is a direct copy of the interpreter frame so in the osr buffer3127// so first slot in the local array is the last local from the interpreter3128// and last slot is local[0] (receiver) from the interpreter3129//3130// Similarly with locks. The first lock slot in the osr buffer is the nth lock3131// from the interpreter frame, the nth lock slot in the osr buffer is 0th lock3132// in the interpreter frame (the method lock if a sync method)31333134// Initialize monitors in the compiled activation.31353136int index;3137Value local;31383139// find all the locals that the interpreter thinks contain live oops3140const BitMap live_oops = method()->live_local_oops_at_bci(osr_bci);31413142// compute the offset into the locals so that we can treat the buffer3143// as if the locals were still in the interpreter frame3144int locals_offset = BytesPerWord * (method()->max_locals() - 1);3145for_each_local_value(state, index, local) {3146int offset = locals_offset - (index + local->type()->size() - 1) * BytesPerWord;3147Value get;3148if (local->type()->is_object_kind() && !live_oops.at(index)) {3149// The interpreter thinks this local is dead but the compiler3150// doesn't so pretend that the interpreter passed in null.3151get = append(new Constant(objectNull));3152} else {3153get = append(new UnsafeGetRaw(as_BasicType(local->type()), e,3154append(new Constant(new IntConstant(offset))),31550,3156true /*unaligned*/, true /*wide*/));3157}3158_state->store_local(index, get);3159}31603161// the storage for the OSR buffer is freed manually in the LIRGenerator.31623163assert(state->caller_state() == NULL, "should be top scope");3164state->clear_locals();3165Goto* g = new Goto(target, false);3166append(g);3167_osr_entry->set_end(g);3168target->merge(_osr_entry->end()->state());31693170scope_data()->set_stream(NULL);3171}317231733174ValueStack* GraphBuilder::state_at_entry() {3175ValueStack* state = new ValueStack(scope(), NULL);31763177// Set up locals for receiver3178int idx = 0;3179if (!method()->is_static()) {3180// we should always see the receiver3181state->store_local(idx, new Local(method()->holder(), objectType, idx));3182idx = 1;3183}31843185// Set up locals for incoming arguments3186ciSignature* sig = method()->signature();3187for (int i = 0; i < sig->count(); i++) {3188ciType* type = sig->type_at(i);3189BasicType basic_type = type->basic_type();3190// don't allow T_ARRAY to propagate into locals types3191if (basic_type == T_ARRAY) basic_type = T_OBJECT;3192ValueType* vt = as_ValueType(basic_type);3193state->store_local(idx, new Local(type, vt, idx));3194idx += type->size();3195}31963197// lock synchronized method3198if (method()->is_synchronized()) {3199state->lock(NULL);3200}32013202return state;3203}320432053206GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)3207: _scope_data(NULL)3208, _instruction_count(0)3209, _osr_entry(NULL)3210, _memory(new MemoryBuffer())3211, _compilation(compilation)3212, _inline_bailout_msg(NULL)3213{3214int osr_bci = compilation->osr_bci();32153216// determine entry points and bci2block mapping3217BlockListBuilder blm(compilation, scope, osr_bci);3218CHECK_BAILOUT();32193220BlockList* bci2block = blm.bci2block();3221BlockBegin* start_block = bci2block->at(0);32223223push_root_scope(scope, bci2block, start_block);32243225// setup state for std entry3226_initial_state = state_at_entry();3227start_block->merge(_initial_state);32283229// complete graph3230_vmap = new ValueMap();3231switch (scope->method()->intrinsic_id()) {3232case vmIntrinsics::_dabs : // fall through3233case vmIntrinsics::_dsqrt : // fall through3234case vmIntrinsics::_dsin : // fall through3235case vmIntrinsics::_dcos : // fall through3236case vmIntrinsics::_dtan : // fall through3237case vmIntrinsics::_dlog : // fall through3238case vmIntrinsics::_dlog10 : // fall through3239case vmIntrinsics::_dexp : // fall through3240case vmIntrinsics::_dpow : // fall through3241{3242// Compiles where the root method is an intrinsic need a special3243// compilation environment because the bytecodes for the method3244// shouldn't be parsed during the compilation, only the special3245// Intrinsic node should be emitted. If this isn't done the the3246// code for the inlined version will be different than the root3247// compiled version which could lead to monotonicity problems on3248// intel.32493250// Set up a stream so that appending instructions works properly.3251ciBytecodeStream s(scope->method());3252s.reset_to_bci(0);3253scope_data()->set_stream(&s);3254s.next();32553256// setup the initial block state3257_block = start_block;3258_state = start_block->state()->copy_for_parsing();3259_last = start_block;3260load_local(doubleType, 0);3261if (scope->method()->intrinsic_id() == vmIntrinsics::_dpow) {3262load_local(doubleType, 2);3263}32643265// Emit the intrinsic node.3266bool result = try_inline_intrinsics(scope->method());3267if (!result) BAILOUT("failed to inline intrinsic");3268method_return(dpop());32693270// connect the begin and end blocks and we're all done.3271BlockEnd* end = last()->as_BlockEnd();3272block()->set_end(end);3273break;3274}32753276case vmIntrinsics::_Reference_get:3277{3278{3279// With java.lang.ref.reference.get() we must go through the3280// intrinsic - when G1 is enabled - even when get() is the root3281// method of the compile so that, if necessary, the value in3282// the referent field of the reference object gets recorded by3283// the pre-barrier code.3284// Specifically, if G1 is enabled, the value in the referent3285// field is recorded by the G1 SATB pre barrier. This will3286// result in the referent being marked live and the reference3287// object removed from the list of discovered references during3288// reference processing.32893290// Also we need intrinsic to prevent commoning reads from this field3291// across safepoint since GC can change its value.32923293// Set up a stream so that appending instructions works properly.3294ciBytecodeStream s(scope->method());3295s.reset_to_bci(0);3296scope_data()->set_stream(&s);3297s.next();32983299// setup the initial block state3300_block = start_block;3301_state = start_block->state()->copy_for_parsing();3302_last = start_block;3303load_local(objectType, 0);33043305// Emit the intrinsic node.3306bool result = try_inline_intrinsics(scope->method());3307if (!result) BAILOUT("failed to inline intrinsic");3308method_return(apop());33093310// connect the begin and end blocks and we're all done.3311BlockEnd* end = last()->as_BlockEnd();3312block()->set_end(end);3313break;3314}3315// Otherwise, fall thru3316}33173318default:3319scope_data()->add_to_work_list(start_block);3320iterate_all_blocks();3321break;3322}3323CHECK_BAILOUT();33243325_start = setup_start_block(osr_bci, start_block, _osr_entry, _initial_state);33263327eliminate_redundant_phis(_start);33283329NOT_PRODUCT(if (PrintValueNumbering && Verbose) print_stats());3330// for osr compile, bailout if some requirements are not fulfilled3331if (osr_bci != -1) {3332BlockBegin* osr_block = blm.bci2block()->at(osr_bci);3333if (!osr_block->is_set(BlockBegin::was_visited_flag)) {3334BAILOUT("osr entry must have been visited for osr compile");3335}33363337// check if osr entry point has empty stack - we cannot handle non-empty stacks at osr entry points3338if (!osr_block->state()->stack_is_empty()) {3339BAILOUT("stack not empty at OSR entry point");3340}3341}3342#ifndef PRODUCT3343if (PrintCompilation && Verbose) tty->print_cr("Created %d Instructions", _instruction_count);3344#endif3345}334633473348ValueStack* GraphBuilder::copy_state_before() {3349return copy_state_before_with_bci(bci());3350}33513352ValueStack* GraphBuilder::copy_state_exhandling() {3353return copy_state_exhandling_with_bci(bci());3354}33553356ValueStack* GraphBuilder::copy_state_for_exception() {3357return copy_state_for_exception_with_bci(bci());3358}33593360ValueStack* GraphBuilder::copy_state_before_with_bci(int bci) {3361return state()->copy(ValueStack::StateBefore, bci);3362}33633364ValueStack* GraphBuilder::copy_state_exhandling_with_bci(int bci) {3365if (!has_handler()) return NULL;3366return state()->copy(ValueStack::StateBefore, bci);3367}33683369ValueStack* GraphBuilder::copy_state_for_exception_with_bci(int bci) {3370ValueStack* s = copy_state_exhandling_with_bci(bci);3371if (s == NULL) {3372if (_compilation->env()->should_retain_local_variables()) {3373s = state()->copy(ValueStack::ExceptionState, bci);3374} else {3375s = state()->copy(ValueStack::EmptyExceptionState, bci);3376}3377}3378return s;3379}33803381int GraphBuilder::recursive_inline_level(ciMethod* cur_callee) const {3382int recur_level = 0;3383for (IRScope* s = scope(); s != NULL; s = s->caller()) {3384if (s->method() == cur_callee) {3385++recur_level;3386}3387}3388return recur_level;3389}339033913392bool GraphBuilder::try_inline(ciMethod* callee, bool holder_known, Bytecodes::Code bc, Value receiver) {3393const char* msg = NULL;33943395// clear out any existing inline bailout condition3396clear_inline_bailout();33973398// exclude methods we don't want to inline3399msg = should_not_inline(callee);3400if (msg != NULL) {3401print_inlining(callee, msg, /*success*/ false);3402return false;3403}34043405// method handle invokes3406if (callee->is_method_handle_intrinsic()) {3407return try_method_handle_inline(callee);3408}34093410// handle intrinsics3411if (callee->intrinsic_id() != vmIntrinsics::_none) {3412if (try_inline_intrinsics(callee)) {3413print_inlining(callee, "intrinsic");3414return true;3415}3416// try normal inlining3417}34183419// certain methods cannot be parsed at all3420msg = check_can_parse(callee);3421if (msg != NULL) {3422print_inlining(callee, msg, /*success*/ false);3423return false;3424}34253426// If bytecode not set use the current one.3427if (bc == Bytecodes::_illegal) {3428bc = code();3429}3430if (try_inline_full(callee, holder_known, bc, receiver))3431return true;34323433// Entire compilation could fail during try_inline_full call.3434// In that case printing inlining decision info is useless.3435if (!bailed_out())3436print_inlining(callee, _inline_bailout_msg, /*success*/ false);34373438return false;3439}344034413442const char* GraphBuilder::check_can_parse(ciMethod* callee) const {3443// Certain methods cannot be parsed at all:3444if ( callee->is_native()) return "native method";3445if ( callee->is_abstract()) return "abstract method";3446if (!callee->can_be_compiled()) return "not compilable (disabled)";3447return NULL;3448}344934503451// negative filter: should callee NOT be inlined? returns NULL, ok to inline, or rejection msg3452const char* GraphBuilder::should_not_inline(ciMethod* callee) const {3453if ( callee->should_exclude()) return "excluded by CompilerOracle";3454if ( callee->should_not_inline()) return "disallowed by CompilerOracle";3455if ( callee->dont_inline()) return "don't inline by annotation";3456return NULL;3457}345834593460bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {3461if (callee->is_synchronized()) {3462// We don't currently support any synchronized intrinsics3463return false;3464}34653466// callee seems like a good candidate3467// determine id3468vmIntrinsics::ID id = callee->intrinsic_id();3469if (!InlineNatives && id != vmIntrinsics::_Reference_get) {3470// InlineNatives does not control Reference.get3471INLINE_BAILOUT("intrinsic method inlining disabled");3472}3473bool preserves_state = false;3474bool cantrap = true;3475switch (id) {3476case vmIntrinsics::_arraycopy:3477if (!InlineArrayCopy) return false;3478break;34793480#ifdef JFR_HAVE_INTRINSICS3481#if defined(_LP64) || !defined(TRACE_ID_CLASS_SHIFT)3482case vmIntrinsics::_getClassId:3483preserves_state = false;3484cantrap = false;3485break;3486#endif34873488case vmIntrinsics::_getEventWriter:3489preserves_state = false;3490cantrap = true;3491break;34923493case vmIntrinsics::_counterTime:3494preserves_state = true;3495cantrap = false;3496break;3497#endif34983499case vmIntrinsics::_currentTimeMillis:3500case vmIntrinsics::_nanoTime:3501preserves_state = true;3502cantrap = false;3503break;35043505case vmIntrinsics::_floatToRawIntBits :3506case vmIntrinsics::_intBitsToFloat :3507case vmIntrinsics::_doubleToRawLongBits :3508case vmIntrinsics::_longBitsToDouble :3509if (!InlineMathNatives) return false;3510preserves_state = true;3511cantrap = false;3512break;35133514case vmIntrinsics::_getClass :3515case vmIntrinsics::_isInstance :3516if (!InlineClassNatives) return false;3517preserves_state = true;3518break;35193520case vmIntrinsics::_currentThread :3521if (!InlineThreadNatives) return false;3522preserves_state = true;3523cantrap = false;3524break;35253526case vmIntrinsics::_dabs : // fall through3527case vmIntrinsics::_dsqrt : // fall through3528case vmIntrinsics::_dsin : // fall through3529case vmIntrinsics::_dcos : // fall through3530case vmIntrinsics::_dtan : // fall through3531case vmIntrinsics::_dlog : // fall through3532case vmIntrinsics::_dlog10 : // fall through3533case vmIntrinsics::_dexp : // fall through3534case vmIntrinsics::_dpow : // fall through3535if (!InlineMathNatives) return false;3536cantrap = false;3537preserves_state = true;3538break;35393540// Use special nodes for Unsafe instructions so we can more easily3541// perform an address-mode optimization on the raw variants3542case vmIntrinsics::_getObject : return append_unsafe_get_obj(callee, T_OBJECT, false);3543case vmIntrinsics::_getBoolean: return append_unsafe_get_obj(callee, T_BOOLEAN, false);3544case vmIntrinsics::_getByte : return append_unsafe_get_obj(callee, T_BYTE, false);3545case vmIntrinsics::_getShort : return append_unsafe_get_obj(callee, T_SHORT, false);3546case vmIntrinsics::_getChar : return append_unsafe_get_obj(callee, T_CHAR, false);3547case vmIntrinsics::_getInt : return append_unsafe_get_obj(callee, T_INT, false);3548case vmIntrinsics::_getLong : return append_unsafe_get_obj(callee, T_LONG, false);3549case vmIntrinsics::_getFloat : return append_unsafe_get_obj(callee, T_FLOAT, false);3550case vmIntrinsics::_getDouble : return append_unsafe_get_obj(callee, T_DOUBLE, false);35513552case vmIntrinsics::_putObject : return append_unsafe_put_obj(callee, T_OBJECT, false);3553case vmIntrinsics::_putBoolean: return append_unsafe_put_obj(callee, T_BOOLEAN, false);3554case vmIntrinsics::_putByte : return append_unsafe_put_obj(callee, T_BYTE, false);3555case vmIntrinsics::_putShort : return append_unsafe_put_obj(callee, T_SHORT, false);3556case vmIntrinsics::_putChar : return append_unsafe_put_obj(callee, T_CHAR, false);3557case vmIntrinsics::_putInt : return append_unsafe_put_obj(callee, T_INT, false);3558case vmIntrinsics::_putLong : return append_unsafe_put_obj(callee, T_LONG, false);3559case vmIntrinsics::_putFloat : return append_unsafe_put_obj(callee, T_FLOAT, false);3560case vmIntrinsics::_putDouble : return append_unsafe_put_obj(callee, T_DOUBLE, false);35613562case vmIntrinsics::_getObjectVolatile : return append_unsafe_get_obj(callee, T_OBJECT, true);3563case vmIntrinsics::_getBooleanVolatile: return append_unsafe_get_obj(callee, T_BOOLEAN, true);3564case vmIntrinsics::_getByteVolatile : return append_unsafe_get_obj(callee, T_BYTE, true);3565case vmIntrinsics::_getShortVolatile : return append_unsafe_get_obj(callee, T_SHORT, true);3566case vmIntrinsics::_getCharVolatile : return append_unsafe_get_obj(callee, T_CHAR, true);3567case vmIntrinsics::_getIntVolatile : return append_unsafe_get_obj(callee, T_INT, true);3568case vmIntrinsics::_getLongVolatile : return append_unsafe_get_obj(callee, T_LONG, true);3569case vmIntrinsics::_getFloatVolatile : return append_unsafe_get_obj(callee, T_FLOAT, true);3570case vmIntrinsics::_getDoubleVolatile : return append_unsafe_get_obj(callee, T_DOUBLE, true);35713572case vmIntrinsics::_putObjectVolatile : return append_unsafe_put_obj(callee, T_OBJECT, true);3573case vmIntrinsics::_putBooleanVolatile: return append_unsafe_put_obj(callee, T_BOOLEAN, true);3574case vmIntrinsics::_putByteVolatile : return append_unsafe_put_obj(callee, T_BYTE, true);3575case vmIntrinsics::_putShortVolatile : return append_unsafe_put_obj(callee, T_SHORT, true);3576case vmIntrinsics::_putCharVolatile : return append_unsafe_put_obj(callee, T_CHAR, true);3577case vmIntrinsics::_putIntVolatile : return append_unsafe_put_obj(callee, T_INT, true);3578case vmIntrinsics::_putLongVolatile : return append_unsafe_put_obj(callee, T_LONG, true);3579case vmIntrinsics::_putFloatVolatile : return append_unsafe_put_obj(callee, T_FLOAT, true);3580case vmIntrinsics::_putDoubleVolatile : return append_unsafe_put_obj(callee, T_DOUBLE, true);35813582case vmIntrinsics::_getByte_raw : return append_unsafe_get_raw(callee, T_BYTE);3583case vmIntrinsics::_getShort_raw : return append_unsafe_get_raw(callee, T_SHORT);3584case vmIntrinsics::_getChar_raw : return append_unsafe_get_raw(callee, T_CHAR);3585case vmIntrinsics::_getInt_raw : return append_unsafe_get_raw(callee, T_INT);3586case vmIntrinsics::_getLong_raw : return append_unsafe_get_raw(callee, T_LONG);3587case vmIntrinsics::_getFloat_raw : return append_unsafe_get_raw(callee, T_FLOAT);3588case vmIntrinsics::_getDouble_raw : return append_unsafe_get_raw(callee, T_DOUBLE);35893590case vmIntrinsics::_putByte_raw : return append_unsafe_put_raw(callee, T_BYTE);3591case vmIntrinsics::_putShort_raw : return append_unsafe_put_raw(callee, T_SHORT);3592case vmIntrinsics::_putChar_raw : return append_unsafe_put_raw(callee, T_CHAR);3593case vmIntrinsics::_putInt_raw : return append_unsafe_put_raw(callee, T_INT);3594case vmIntrinsics::_putLong_raw : return append_unsafe_put_raw(callee, T_LONG);3595case vmIntrinsics::_putFloat_raw : return append_unsafe_put_raw(callee, T_FLOAT);3596case vmIntrinsics::_putDouble_raw : return append_unsafe_put_raw(callee, T_DOUBLE);35973598case vmIntrinsics::_prefetchRead : return append_unsafe_prefetch(callee, false, false);3599case vmIntrinsics::_prefetchWrite : return append_unsafe_prefetch(callee, false, true);3600case vmIntrinsics::_prefetchReadStatic : return append_unsafe_prefetch(callee, true, false);3601case vmIntrinsics::_prefetchWriteStatic : return append_unsafe_prefetch(callee, true, true);36023603case vmIntrinsics::_checkIndex :3604if (!InlineNIOCheckIndex) return false;3605preserves_state = true;3606break;3607case vmIntrinsics::_putOrderedObject : return append_unsafe_put_obj(callee, T_OBJECT, true);3608case vmIntrinsics::_putOrderedInt : return append_unsafe_put_obj(callee, T_INT, true);3609case vmIntrinsics::_putOrderedLong : return append_unsafe_put_obj(callee, T_LONG, true);36103611case vmIntrinsics::_compareAndSwapLong:3612if (!VM_Version::supports_cx8()) return false;3613// fall through3614case vmIntrinsics::_compareAndSwapInt:3615case vmIntrinsics::_compareAndSwapObject:3616append_unsafe_CAS(callee);3617return true;36183619case vmIntrinsics::_getAndAddInt:3620if (!VM_Version::supports_atomic_getadd4()) {3621return false;3622}3623return append_unsafe_get_and_set_obj(callee, true);3624case vmIntrinsics::_getAndAddLong:3625if (!VM_Version::supports_atomic_getadd8()) {3626return false;3627}3628return append_unsafe_get_and_set_obj(callee, true);3629case vmIntrinsics::_getAndSetInt:3630if (!VM_Version::supports_atomic_getset4()) {3631return false;3632}3633return append_unsafe_get_and_set_obj(callee, false);3634case vmIntrinsics::_getAndSetLong:3635if (!VM_Version::supports_atomic_getset8()) {3636return false;3637}3638return append_unsafe_get_and_set_obj(callee, false);3639case vmIntrinsics::_getAndSetObject:3640#ifdef _LP643641if (!UseCompressedOops && !VM_Version::supports_atomic_getset8()) {3642return false;3643}3644if (UseCompressedOops && !VM_Version::supports_atomic_getset4()) {3645return false;3646}3647#else3648if (!VM_Version::supports_atomic_getset4()) {3649return false;3650}3651#endif3652return append_unsafe_get_and_set_obj(callee, false);36533654case vmIntrinsics::_Reference_get:3655// Use the intrinsic version of Reference.get() so that the value in3656// the referent field can be registered by the G1 pre-barrier code.3657// Also to prevent commoning reads from this field across safepoint3658// since GC can change its value.3659preserves_state = true;3660break;36613662case vmIntrinsics::_updateCRC32:3663case vmIntrinsics::_updateBytesCRC32:3664case vmIntrinsics::_updateByteBufferCRC32:3665if (!UseCRC32Intrinsics) return false;3666cantrap = false;3667preserves_state = true;3668break;36693670case vmIntrinsics::_loadFence :3671case vmIntrinsics::_storeFence:3672case vmIntrinsics::_fullFence :3673break;36743675default : return false; // do not inline3676}3677// create intrinsic node3678const bool has_receiver = !callee->is_static();3679ValueType* result_type = as_ValueType(callee->return_type());3680ValueStack* state_before = copy_state_for_exception();36813682Values* args = state()->pop_arguments(callee->arg_size());36833684if (is_profiling()) {3685// Don't profile in the special case where the root method3686// is the intrinsic3687if (callee != method()) {3688// Note that we'd collect profile data in this method if we wanted it.3689compilation()->set_would_profile(true);3690if (profile_calls()) {3691Value recv = NULL;3692if (has_receiver) {3693recv = args->at(0);3694null_check(recv);3695}3696profile_call(callee, recv, NULL, collect_args_for_profiling(args, callee, true), true);3697}3698}3699}37003701Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, state_before,3702preserves_state, cantrap);3703// append instruction & push result3704Value value = append_split(result);3705if (result_type != voidType) push(result_type, value);37063707if (callee != method() && profile_return() && result_type->is_object_kind()) {3708profile_return_type(result, callee);3709}37103711// done3712return true;3713}371437153716bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {3717// Introduce a new callee continuation point - all Ret instructions3718// will be replaced with Gotos to this point.3719BlockBegin* cont = block_at(next_bci());3720assert(cont != NULL, "continuation must exist (BlockListBuilder starts a new block after a jsr");37213722// Note: can not assign state to continuation yet, as we have to3723// pick up the state from the Ret instructions.37243725// Push callee scope3726push_scope_for_jsr(cont, jsr_dest_bci);37273728// Temporarily set up bytecode stream so we can append instructions3729// (only using the bci of this stream)3730scope_data()->set_stream(scope_data()->parent()->stream());37313732BlockBegin* jsr_start_block = block_at(jsr_dest_bci);3733assert(jsr_start_block != NULL, "jsr start block must exist");3734assert(!jsr_start_block->is_set(BlockBegin::was_visited_flag), "should not have visited jsr yet");3735Goto* goto_sub = new Goto(jsr_start_block, false);3736// Must copy state to avoid wrong sharing when parsing bytecodes3737assert(jsr_start_block->state() == NULL, "should have fresh jsr starting block");3738jsr_start_block->set_state(copy_state_before_with_bci(jsr_dest_bci));3739append(goto_sub);3740_block->set_end(goto_sub);3741_last = _block = jsr_start_block;37423743// Clear out bytecode stream3744scope_data()->set_stream(NULL);37453746scope_data()->add_to_work_list(jsr_start_block);37473748// Ready to resume parsing in subroutine3749iterate_all_blocks();37503751// If we bailed out during parsing, return immediately (this is bad news)3752CHECK_BAILOUT_(false);37533754// Detect whether the continuation can actually be reached. If not,3755// it has not had state set by the join() operations in3756// iterate_bytecodes_for_block()/ret() and we should not touch the3757// iteration state. The calling activation of3758// iterate_bytecodes_for_block will then complete normally.3759if (cont->state() != NULL) {3760if (!cont->is_set(BlockBegin::was_visited_flag)) {3761// add continuation to work list instead of parsing it immediately3762scope_data()->parent()->add_to_work_list(cont);3763}3764}37653766assert(jsr_continuation() == cont, "continuation must not have changed");3767assert(!jsr_continuation()->is_set(BlockBegin::was_visited_flag) ||3768jsr_continuation()->is_set(BlockBegin::parser_loop_header_flag),3769"continuation can only be visited in case of backward branches");3770assert(_last && _last->as_BlockEnd(), "block must have end");37713772// continuation is in work list, so end iteration of current block3773_skip_block = true;3774pop_scope_for_jsr();37753776return true;3777}377837793780// Inline the entry of a synchronized method as a monitor enter and3781// register the exception handler which releases the monitor if an3782// exception is thrown within the callee. Note that the monitor enter3783// cannot throw an exception itself, because the receiver is3784// guaranteed to be non-null by the explicit null check at the3785// beginning of inlining.3786void GraphBuilder::inline_sync_entry(Value lock, BlockBegin* sync_handler) {3787assert(lock != NULL && sync_handler != NULL, "lock or handler missing");37883789monitorenter(lock, SynchronizationEntryBCI);3790assert(_last->as_MonitorEnter() != NULL, "monitor enter expected");3791_last->set_needs_null_check(false);37923793sync_handler->set(BlockBegin::exception_entry_flag);3794sync_handler->set(BlockBegin::is_on_work_list_flag);37953796ciExceptionHandler* desc = new ciExceptionHandler(method()->holder(), 0, method()->code_size(), -1, 0);3797XHandler* h = new XHandler(desc);3798h->set_entry_block(sync_handler);3799scope_data()->xhandlers()->append(h);3800scope_data()->set_has_handler();3801}380238033804// If an exception is thrown and not handled within an inlined3805// synchronized method, the monitor must be released before the3806// exception is rethrown in the outer scope. Generate the appropriate3807// instructions here.3808void GraphBuilder::fill_sync_handler(Value lock, BlockBegin* sync_handler, bool default_handler) {3809BlockBegin* orig_block = _block;3810ValueStack* orig_state = _state;3811Instruction* orig_last = _last;3812_last = _block = sync_handler;3813_state = sync_handler->state()->copy();38143815assert(sync_handler != NULL, "handler missing");3816assert(!sync_handler->is_set(BlockBegin::was_visited_flag), "is visited here");38173818assert(lock != NULL || default_handler, "lock or handler missing");38193820XHandler* h = scope_data()->xhandlers()->remove_last();3821assert(h->entry_block() == sync_handler, "corrupt list of handlers");38223823block()->set(BlockBegin::was_visited_flag);3824Value exception = append_with_bci(new ExceptionObject(), SynchronizationEntryBCI);3825assert(exception->is_pinned(), "must be");38263827int bci = SynchronizationEntryBCI;3828if (compilation()->env()->dtrace_method_probes()) {3829// Report exit from inline methods. We don't have a stream here3830// so pass an explicit bci of SynchronizationEntryBCI.3831Values* args = new Values(1);3832args->push(append_with_bci(new Constant(new MethodConstant(method())), bci));3833append_with_bci(new RuntimeCall(voidType, "dtrace_method_exit", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), args), bci);3834}38353836if (lock) {3837assert(state()->locks_size() > 0 && state()->lock_at(state()->locks_size() - 1) == lock, "lock is missing");3838if (!lock->is_linked()) {3839lock = append_with_bci(lock, bci);3840}38413842// exit the monitor in the context of the synchronized method3843monitorexit(lock, bci);38443845// exit the context of the synchronized method3846if (!default_handler) {3847pop_scope();3848bci = _state->caller_state()->bci();3849_state = _state->caller_state()->copy_for_parsing();3850}3851}38523853// perform the throw as if at the the call site3854apush(exception);3855throw_op(bci);38563857BlockEnd* end = last()->as_BlockEnd();3858block()->set_end(end);38593860_block = orig_block;3861_state = orig_state;3862_last = orig_last;3863}386438653866bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known, Bytecodes::Code bc, Value receiver) {3867assert(!callee->is_native(), "callee must not be native");3868if (CompilationPolicy::policy()->should_not_inline(compilation()->env(), callee)) {3869INLINE_BAILOUT("inlining prohibited by policy");3870}3871// first perform tests of things it's not possible to inline3872if (callee->has_exception_handlers() &&3873!InlineMethodsWithExceptionHandlers) INLINE_BAILOUT("callee has exception handlers");3874if (callee->is_synchronized() &&3875!InlineSynchronizedMethods ) INLINE_BAILOUT("callee is synchronized");3876if (!callee->holder()->is_initialized()) INLINE_BAILOUT("callee's klass not initialized yet");3877if (!callee->has_balanced_monitors()) INLINE_BAILOUT("callee's monitors do not match");38783879// Proper inlining of methods with jsrs requires a little more work.3880if (callee->has_jsrs() ) INLINE_BAILOUT("jsrs not handled properly by inliner yet");38813882// When SSE2 is used on intel, then no special handling is needed3883// for strictfp because the enum-constant is fixed at compile time,3884// the check for UseSSE2 is needed here3885if (strict_fp_requires_explicit_rounding && UseSSE < 2 && method()->is_strict() != callee->is_strict()) {3886INLINE_BAILOUT("caller and callee have different strict fp requirements");3887}38883889if (is_profiling() && !callee->ensure_method_data()) {3890INLINE_BAILOUT("mdo allocation failed");3891}38923893// now perform tests that are based on flag settings3894if (callee->force_inline() || callee->should_inline()) {3895if (inline_level() > MaxForceInlineLevel ) INLINE_BAILOUT("MaxForceInlineLevel");3896if (recursive_inline_level(callee) > MaxRecursiveInlineLevel) INLINE_BAILOUT("recursive inlining too deep");38973898const char* msg = "";3899if (callee->force_inline()) msg = "force inline by annotation";3900if (callee->should_inline()) msg = "force inline by CompileOracle";3901print_inlining(callee, msg);3902} else {3903// use heuristic controls on inlining3904if (inline_level() > MaxInlineLevel ) INLINE_BAILOUT("inlining too deep");3905if (recursive_inline_level(callee) > MaxRecursiveInlineLevel) INLINE_BAILOUT("recursive inlining too deep");3906if (callee->code_size_for_inlining() > max_inline_size() ) INLINE_BAILOUT("callee is too large");39073908// don't inline throwable methods unless the inlining tree is rooted in a throwable class3909if (callee->name() == ciSymbol::object_initializer_name() &&3910callee->holder()->is_subclass_of(ciEnv::current()->Throwable_klass())) {3911// Throwable constructor call3912IRScope* top = scope();3913while (top->caller() != NULL) {3914top = top->caller();3915}3916if (!top->method()->holder()->is_subclass_of(ciEnv::current()->Throwable_klass())) {3917INLINE_BAILOUT("don't inline Throwable constructors");3918}3919}39203921if (compilation()->env()->num_inlined_bytecodes() > DesiredMethodLimit) {3922INLINE_BAILOUT("total inlining greater than DesiredMethodLimit");3923}3924// printing3925print_inlining(callee);3926}39273928// NOTE: Bailouts from this point on, which occur at the3929// GraphBuilder level, do not cause bailout just of the inlining but3930// in fact of the entire compilation.39313932BlockBegin* orig_block = block();39333934const bool is_invokedynamic = bc == Bytecodes::_invokedynamic;3935const bool has_receiver = (bc != Bytecodes::_invokestatic && !is_invokedynamic);39363937const int args_base = state()->stack_size() - callee->arg_size();3938assert(args_base >= 0, "stack underflow during inlining");39393940// Insert null check if necessary3941Value recv = NULL;3942if (has_receiver) {3943// note: null check must happen even if first instruction of callee does3944// an implicit null check since the callee is in a different scope3945// and we must make sure exception handling does the right thing3946assert(!callee->is_static(), "callee must not be static");3947assert(callee->arg_size() > 0, "must have at least a receiver");3948recv = state()->stack_at(args_base);3949null_check(recv);3950}39513952if (is_profiling()) {3953// Note that we'd collect profile data in this method if we wanted it.3954// this may be redundant here...3955compilation()->set_would_profile(true);39563957if (profile_calls()) {3958int start = 0;3959Values* obj_args = args_list_for_profiling(callee, start, has_receiver);3960if (obj_args != NULL) {3961int s = obj_args->size();3962// if called through method handle invoke, some arguments may have been popped3963for (int i = args_base+start, j = 0; j < obj_args->size() && i < state()->stack_size(); ) {3964Value v = state()->stack_at_inc(i);3965if (v->type()->is_object_kind()) {3966obj_args->push(v);3967j++;3968}3969}3970check_args_for_profiling(obj_args, s);3971}3972profile_call(callee, recv, holder_known ? callee->holder() : NULL, obj_args, true);3973}3974}39753976// Introduce a new callee continuation point - if the callee has3977// more than one return instruction or the return does not allow3978// fall-through of control flow, all return instructions of the3979// callee will need to be replaced by Goto's pointing to this3980// continuation point.3981BlockBegin* cont = block_at(next_bci());3982bool continuation_existed = true;3983if (cont == NULL) {3984cont = new BlockBegin(next_bci());3985// low number so that continuation gets parsed as early as possible3986cont->set_depth_first_number(0);3987#ifndef PRODUCT3988if (PrintInitialBlockList) {3989tty->print_cr("CFG: created block %d (bci %d) as continuation for inline at bci %d",3990cont->block_id(), cont->bci(), bci());3991}3992#endif3993continuation_existed = false;3994}3995// Record number of predecessors of continuation block before3996// inlining, to detect if inlined method has edges to its3997// continuation after inlining.3998int continuation_preds = cont->number_of_preds();39994000// Push callee scope4001push_scope(callee, cont);40024003// the BlockListBuilder for the callee could have bailed out4004if (bailed_out())4005return false;40064007// Temporarily set up bytecode stream so we can append instructions4008// (only using the bci of this stream)4009scope_data()->set_stream(scope_data()->parent()->stream());40104011// Pass parameters into callee state: add assignments4012// note: this will also ensure that all arguments are computed before being passed4013ValueStack* callee_state = state();4014ValueStack* caller_state = state()->caller_state();4015for (int i = args_base; i < caller_state->stack_size(); ) {4016const int arg_no = i - args_base;4017Value arg = caller_state->stack_at_inc(i);4018store_local(callee_state, arg, arg_no);4019}40204021// Remove args from stack.4022// Note that we preserve locals state in case we can use it later4023// (see use of pop_scope() below)4024caller_state->truncate_stack(args_base);4025assert(callee_state->stack_size() == 0, "callee stack must be empty");40264027Value lock = NULL;4028BlockBegin* sync_handler = NULL;40294030// Inline the locking of the receiver if the callee is synchronized4031if (callee->is_synchronized()) {4032lock = callee->is_static() ? append(new Constant(new InstanceConstant(callee->holder()->java_mirror())))4033: state()->local_at(0);4034sync_handler = new BlockBegin(SynchronizationEntryBCI);4035inline_sync_entry(lock, sync_handler);4036}40374038if (compilation()->env()->dtrace_method_probes()) {4039Values* args = new Values(1);4040args->push(append(new Constant(new MethodConstant(method()))));4041append(new RuntimeCall(voidType, "dtrace_method_entry", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), args));4042}40434044if (profile_inlined_calls()) {4045profile_invocation(callee, copy_state_before_with_bci(SynchronizationEntryBCI));4046}40474048BlockBegin* callee_start_block = block_at(0);4049if (callee_start_block != NULL) {4050assert(callee_start_block->is_set(BlockBegin::parser_loop_header_flag), "must be loop header");4051Goto* goto_callee = new Goto(callee_start_block, false);4052// The state for this goto is in the scope of the callee, so use4053// the entry bci for the callee instead of the call site bci.4054append_with_bci(goto_callee, 0);4055_block->set_end(goto_callee);4056callee_start_block->merge(callee_state);40574058_last = _block = callee_start_block;40594060scope_data()->add_to_work_list(callee_start_block);4061}40624063// Clear out bytecode stream4064scope_data()->set_stream(NULL);40654066CompileLog* log = compilation()->log();4067if (log != NULL) log->head("parse method='%d'", log->identify(callee));40684069// Ready to resume parsing in callee (either in the same block we4070// were in before or in the callee's start block)4071iterate_all_blocks(callee_start_block == NULL);40724073if (log != NULL) log->done("parse");40744075// If we bailed out during parsing, return immediately (this is bad news)4076if (bailed_out())4077return false;40784079// iterate_all_blocks theoretically traverses in random order; in4080// practice, we have only traversed the continuation if we are4081// inlining into a subroutine4082assert(continuation_existed ||4083!continuation()->is_set(BlockBegin::was_visited_flag),4084"continuation should not have been parsed yet if we created it");40854086// At this point we are almost ready to return and resume parsing of4087// the caller back in the GraphBuilder. The only thing we want to do4088// first is an optimization: during parsing of the callee we4089// generated at least one Goto to the continuation block. If we4090// generated exactly one, and if the inlined method spanned exactly4091// one block (and we didn't have to Goto its entry), then we snip4092// off the Goto to the continuation, allowing control to fall4093// through back into the caller block and effectively performing4094// block merging. This allows load elimination and CSE to take place4095// across multiple callee scopes if they are relatively simple, and4096// is currently essential to making inlining profitable.4097if (num_returns() == 14098&& block() == orig_block4099&& block() == inline_cleanup_block()) {4100_last = inline_cleanup_return_prev();4101_state = inline_cleanup_state();4102} else if (continuation_preds == cont->number_of_preds()) {4103// Inlining caused that the instructions after the invoke in the4104// caller are not reachable any more. So skip filling this block4105// with instructions!4106assert(cont == continuation(), "");4107assert(_last && _last->as_BlockEnd(), "");4108_skip_block = true;4109} else {4110// Resume parsing in continuation block unless it was already parsed.4111// Note that if we don't change _last here, iteration in4112// iterate_bytecodes_for_block will stop when we return.4113if (!continuation()->is_set(BlockBegin::was_visited_flag)) {4114// add continuation to work list instead of parsing it immediately4115assert(_last && _last->as_BlockEnd(), "");4116scope_data()->parent()->add_to_work_list(continuation());4117_skip_block = true;4118}4119}41204121// Fill the exception handler for synchronized methods with instructions4122if (callee->is_synchronized() && sync_handler->state() != NULL) {4123fill_sync_handler(lock, sync_handler);4124} else {4125pop_scope();4126}41274128compilation()->notice_inlined_method(callee);41294130return true;4131}413241334134bool GraphBuilder::try_method_handle_inline(ciMethod* callee) {4135ValueStack* state_before = state()->copy_for_parsing();4136vmIntrinsics::ID iid = callee->intrinsic_id();4137switch (iid) {4138case vmIntrinsics::_invokeBasic:4139{4140// get MethodHandle receiver4141const int args_base = state()->stack_size() - callee->arg_size();4142ValueType* type = state()->stack_at(args_base)->type();4143if (type->is_constant()) {4144ciMethod* target = type->as_ObjectType()->constant_value()->as_method_handle()->get_vmtarget();4145// We don't do CHA here so only inline static and statically bindable methods.4146if (target->is_static() || target->can_be_statically_bound()) {4147Bytecodes::Code bc = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokevirtual;4148if (try_inline(target, /*holder_known*/ true, bc)) {4149return true;4150}4151} else {4152print_inlining(target, "not static or statically bindable", /*success*/ false);4153}4154} else {4155print_inlining(callee, "receiver not constant", /*success*/ false);4156}4157}4158break;41594160case vmIntrinsics::_linkToVirtual:4161case vmIntrinsics::_linkToStatic:4162case vmIntrinsics::_linkToSpecial:4163case vmIntrinsics::_linkToInterface:4164{4165// pop MemberName argument4166const int args_base = state()->stack_size() - callee->arg_size();4167ValueType* type = apop()->type();4168if (type->is_constant()) {4169ciMethod* target = type->as_ObjectType()->constant_value()->as_member_name()->get_vmtarget();4170// If the target is another method handle invoke, try to recursively get4171// a better target.4172if (target->is_method_handle_intrinsic()) {4173if (try_method_handle_inline(target)) {4174return true;4175}4176} else {4177ciSignature* signature = target->signature();4178const int receiver_skip = target->is_static() ? 0 : 1;4179// Cast receiver to its type.4180if (!target->is_static()) {4181ciKlass* tk = signature->accessing_klass();4182Value obj = state()->stack_at(args_base);4183if (obj->exact_type() == NULL &&4184obj->declared_type() != tk && tk != compilation()->env()->Object_klass()) {4185TypeCast* c = new TypeCast(tk, obj, state_before);4186append(c);4187state()->stack_at_put(args_base, c);4188}4189}4190// Cast reference arguments to its type.4191for (int i = 0, j = 0; i < signature->count(); i++) {4192ciType* t = signature->type_at(i);4193if (t->is_klass()) {4194ciKlass* tk = t->as_klass();4195Value obj = state()->stack_at(args_base + receiver_skip + j);4196if (obj->exact_type() == NULL &&4197obj->declared_type() != tk && tk != compilation()->env()->Object_klass()) {4198TypeCast* c = new TypeCast(t, obj, state_before);4199append(c);4200state()->stack_at_put(args_base + receiver_skip + j, c);4201}4202}4203j += t->size(); // long and double take two slots4204}4205// We don't do CHA here so only inline static and statically bindable methods.4206if (target->is_static() || target->can_be_statically_bound()) {4207Bytecodes::Code bc = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokevirtual;4208if (try_inline(target, /*holder_known*/ true, bc)) {4209return true;4210}4211} else {4212print_inlining(target, "not static or statically bindable", /*success*/ false);4213}4214}4215} else {4216print_inlining(callee, "MemberName not constant", /*success*/ false);4217}4218}4219break;42204221default:4222fatal(err_msg("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));4223break;4224}4225set_state(state_before);4226return false;4227}422842294230void GraphBuilder::inline_bailout(const char* msg) {4231assert(msg != NULL, "inline bailout msg must exist");4232_inline_bailout_msg = msg;4233}423442354236void GraphBuilder::clear_inline_bailout() {4237_inline_bailout_msg = NULL;4238}423942404241void GraphBuilder::push_root_scope(IRScope* scope, BlockList* bci2block, BlockBegin* start) {4242ScopeData* data = new ScopeData(NULL);4243data->set_scope(scope);4244data->set_bci2block(bci2block);4245_scope_data = data;4246_block = start;4247}424842494250void GraphBuilder::push_scope(ciMethod* callee, BlockBegin* continuation) {4251IRScope* callee_scope = new IRScope(compilation(), scope(), bci(), callee, -1, false);4252scope()->add_callee(callee_scope);42534254BlockListBuilder blb(compilation(), callee_scope, -1);4255CHECK_BAILOUT();42564257if (!blb.bci2block()->at(0)->is_set(BlockBegin::parser_loop_header_flag)) {4258// this scope can be inlined directly into the caller so remove4259// the block at bci 0.4260blb.bci2block()->at_put(0, NULL);4261}42624263set_state(new ValueStack(callee_scope, state()->copy(ValueStack::CallerState, bci())));42644265ScopeData* data = new ScopeData(scope_data());4266data->set_scope(callee_scope);4267data->set_bci2block(blb.bci2block());4268data->set_continuation(continuation);4269_scope_data = data;4270}427142724273void GraphBuilder::push_scope_for_jsr(BlockBegin* jsr_continuation, int jsr_dest_bci) {4274ScopeData* data = new ScopeData(scope_data());4275data->set_parsing_jsr();4276data->set_jsr_entry_bci(jsr_dest_bci);4277data->set_jsr_return_address_local(-1);4278// Must clone bci2block list as we will be mutating it in order to4279// properly clone all blocks in jsr region as well as exception4280// handlers containing rets4281BlockList* new_bci2block = new BlockList(bci2block()->length());4282new_bci2block->push_all(bci2block());4283data->set_bci2block(new_bci2block);4284data->set_scope(scope());4285data->setup_jsr_xhandlers();4286data->set_continuation(continuation());4287data->set_jsr_continuation(jsr_continuation);4288_scope_data = data;4289}429042914292void GraphBuilder::pop_scope() {4293int number_of_locks = scope()->number_of_locks();4294_scope_data = scope_data()->parent();4295// accumulate minimum number of monitor slots to be reserved4296scope()->set_min_number_of_locks(number_of_locks);4297}429842994300void GraphBuilder::pop_scope_for_jsr() {4301_scope_data = scope_data()->parent();4302}43034304bool GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) {4305if (InlineUnsafeOps) {4306Values* args = state()->pop_arguments(callee->arg_size());4307null_check(args->at(0));4308Instruction* offset = args->at(2);4309#ifndef _LP644310offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));4311#endif4312Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile));4313push(op->type(), op);4314compilation()->set_has_unsafe_access(true);4315}4316return InlineUnsafeOps;4317}431843194320bool GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) {4321if (InlineUnsafeOps) {4322Values* args = state()->pop_arguments(callee->arg_size());4323null_check(args->at(0));4324Instruction* offset = args->at(2);4325#ifndef _LP644326offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));4327#endif4328Value val = args->at(3);4329if (t == T_BOOLEAN) {4330Value mask = append(new Constant(new IntConstant(1)));4331val = append(new LogicOp(Bytecodes::_iand, val, mask));4332}4333Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, val, is_volatile));4334compilation()->set_has_unsafe_access(true);4335kill_all();4336}4337return InlineUnsafeOps;4338}433943404341bool GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) {4342if (InlineUnsafeOps) {4343Values* args = state()->pop_arguments(callee->arg_size());4344null_check(args->at(0));4345Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false));4346push(op->type(), op);4347compilation()->set_has_unsafe_access(true);4348}4349return InlineUnsafeOps;4350}435143524353bool GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {4354if (InlineUnsafeOps) {4355Values* args = state()->pop_arguments(callee->arg_size());4356null_check(args->at(0));4357Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));4358compilation()->set_has_unsafe_access(true);4359}4360return InlineUnsafeOps;4361}436243634364bool GraphBuilder::append_unsafe_prefetch(ciMethod* callee, bool is_static, bool is_store) {4365if (InlineUnsafeOps) {4366Values* args = state()->pop_arguments(callee->arg_size());4367int obj_arg_index = 1; // Assume non-static case4368if (is_static) {4369obj_arg_index = 0;4370} else {4371null_check(args->at(0));4372}4373Instruction* offset = args->at(obj_arg_index + 1);4374#ifndef _LP644375offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));4376#endif4377Instruction* op = is_store ? append(new UnsafePrefetchWrite(args->at(obj_arg_index), offset))4378: append(new UnsafePrefetchRead (args->at(obj_arg_index), offset));4379compilation()->set_has_unsafe_access(true);4380}4381return InlineUnsafeOps;4382}438343844385void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {4386ValueStack* state_before = copy_state_for_exception();4387ValueType* result_type = as_ValueType(callee->return_type());4388assert(result_type->is_int(), "int result");4389Values* args = state()->pop_arguments(callee->arg_size());43904391// Pop off some args to speically handle, then push back4392Value newval = args->pop();4393Value cmpval = args->pop();4394Value offset = args->pop();4395Value src = args->pop();4396Value unsafe_obj = args->pop();43974398// Separately handle the unsafe arg. It is not needed for code4399// generation, but must be null checked4400null_check(unsafe_obj);44014402#ifndef _LP644403offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));4404#endif44054406args->push(src);4407args->push(offset);4408args->push(cmpval);4409args->push(newval);44104411// An unsafe CAS can alias with other field accesses, but we don't4412// know which ones so mark the state as no preserved. This will4413// cause CSE to invalidate memory across it.4414bool preserves_state = false;4415Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(), args, false, state_before, preserves_state);4416append_split(result);4417push(result_type, result);4418compilation()->set_has_unsafe_access(true);4419}442044214422static void post_inlining_event(EventCompilerInlining* event,4423int compile_id,4424const char* msg,4425bool success,4426int bci,4427ciMethod* caller,4428ciMethod* callee) {4429assert(caller != NULL, "invariant");4430assert(callee != NULL, "invariant");4431assert(event != NULL, "invariant");4432assert(event->should_commit(), "invariant");4433JfrStructCalleeMethod callee_struct;4434callee_struct.set_type(callee->holder()->name()->as_utf8());4435callee_struct.set_name(callee->name()->as_utf8());4436callee_struct.set_descriptor(callee->signature()->as_symbol()->as_utf8());4437event->set_compileId(compile_id);4438event->set_message(msg);4439event->set_succeeded(success);4440event->set_bci(bci);4441event->set_caller(caller->get_Method());4442event->set_callee(callee_struct);4443event->commit();4444}44454446void GraphBuilder::print_inlining(ciMethod* callee, const char* msg, bool success) {4447CompileLog* log = compilation()->log();4448if (log != NULL) {4449if (success) {4450if (msg != NULL)4451log->inline_success(msg);4452else4453log->inline_success("receiver is statically known");4454} else {4455if (msg != NULL)4456log->inline_fail(msg);4457else4458log->inline_fail("reason unknown");4459}4460}44614462EventCompilerInlining event;4463if (event.should_commit()) {4464post_inlining_event(&event, compilation()->env()->task()->compile_id(), msg, success, bci(), method(), callee);4465}44664467if (!PrintInlining && !compilation()->method()->has_option("PrintInlining")) {4468return;4469}4470CompileTask::print_inlining(callee, scope()->level(), bci(), msg);4471if (success && CIPrintMethodCodes) {4472callee->print_codes();4473}4474}44754476bool GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) {4477if (InlineUnsafeOps) {4478Values* args = state()->pop_arguments(callee->arg_size());4479BasicType t = callee->return_type()->basic_type();4480null_check(args->at(0));4481Instruction* offset = args->at(2);4482#ifndef _LP644483offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));4484#endif4485Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));4486compilation()->set_has_unsafe_access(true);4487kill_all();4488push(op->type(), op);4489}4490return InlineUnsafeOps;4491}44924493#ifndef PRODUCT4494void GraphBuilder::print_stats() {4495vmap()->print();4496}4497#endif // PRODUCT44984499void GraphBuilder::profile_call(ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) {4500assert(known_holder == NULL || (known_holder->is_instance_klass() &&4501(!known_holder->is_interface() ||4502((ciInstanceKlass*)known_holder)->has_default_methods())), "should be default method");4503if (known_holder != NULL) {4504if (known_holder->exact_klass() == NULL) {4505known_holder = compilation()->cha_exact_type(known_holder);4506}4507}45084509append(new ProfileCall(method(), bci(), callee, recv, known_holder, obj_args, inlined));4510}45114512void GraphBuilder::profile_return_type(Value ret, ciMethod* callee, ciMethod* m, int invoke_bci) {4513assert((m == NULL) == (invoke_bci < 0), "invalid method and invalid bci together");4514if (m == NULL) {4515m = method();4516}4517if (invoke_bci < 0) {4518invoke_bci = bci();4519}4520ciMethodData* md = m->method_data_or_null();4521ciProfileData* data = md->bci_to_data(invoke_bci);4522if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {4523append(new ProfileReturnType(m , invoke_bci, callee, ret));4524}4525}45264527void GraphBuilder::profile_invocation(ciMethod* callee, ValueStack* state) {4528append(new ProfileInvoke(callee, state));4529}453045314532