Path: blob/master/src/hotspot/share/c1/c1_LIRAssembler.cpp
40930 views
/*1* Copyright (c) 2000, 2021, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "asm/assembler.inline.hpp"26#include "c1/c1_Compilation.hpp"27#include "c1/c1_Instruction.hpp"28#include "c1/c1_InstructionPrinter.hpp"29#include "c1/c1_LIRAssembler.hpp"30#include "c1/c1_MacroAssembler.hpp"31#include "c1/c1_ValueStack.hpp"32#include "ci/ciInstance.hpp"33#include "compiler/oopMap.hpp"34#include "gc/shared/barrierSet.hpp"35#include "runtime/os.hpp"36#include "runtime/vm_version.hpp"3738void LIR_Assembler::patching_epilog(PatchingStub* patch, LIR_PatchCode patch_code, Register obj, CodeEmitInfo* info) {39// We must have enough patching space so that call can be inserted.40// We cannot use fat nops here, since the concurrent code rewrite may transiently41// create the illegal instruction sequence.42while ((intx) _masm->pc() - (intx) patch->pc_start() < NativeGeneralJump::instruction_size) {43_masm->nop();44}45patch->install(_masm, patch_code, obj, info);46append_code_stub(patch);4748#ifdef ASSERT49Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->stack()->bci());50if (patch->id() == PatchingStub::access_field_id) {51switch (code) {52case Bytecodes::_putstatic:53case Bytecodes::_getstatic:54case Bytecodes::_putfield:55case Bytecodes::_getfield:56break;57default:58ShouldNotReachHere();59}60} else if (patch->id() == PatchingStub::load_klass_id) {61switch (code) {62case Bytecodes::_new:63case Bytecodes::_anewarray:64case Bytecodes::_multianewarray:65case Bytecodes::_instanceof:66case Bytecodes::_checkcast:67break;68default:69ShouldNotReachHere();70}71} else if (patch->id() == PatchingStub::load_mirror_id) {72switch (code) {73case Bytecodes::_putstatic:74case Bytecodes::_getstatic:75case Bytecodes::_ldc:76case Bytecodes::_ldc_w:77break;78default:79ShouldNotReachHere();80}81} else if (patch->id() == PatchingStub::load_appendix_id) {82Bytecodes::Code bc_raw = info->scope()->method()->raw_code_at_bci(info->stack()->bci());83assert(Bytecodes::has_optional_appendix(bc_raw), "unexpected appendix resolution");84} else {85ShouldNotReachHere();86}87#endif88}8990PatchingStub::PatchID LIR_Assembler::patching_id(CodeEmitInfo* info) {91IRScope* scope = info->scope();92Bytecodes::Code bc_raw = scope->method()->raw_code_at_bci(info->stack()->bci());93if (Bytecodes::has_optional_appendix(bc_raw)) {94return PatchingStub::load_appendix_id;95}96return PatchingStub::load_mirror_id;97}9899//---------------------------------------------------------------100101102LIR_Assembler::LIR_Assembler(Compilation* c):103_masm(c->masm())104, _bs(BarrierSet::barrier_set())105, _compilation(c)106, _frame_map(c->frame_map())107, _current_block(NULL)108, _pending_non_safepoint(NULL)109, _pending_non_safepoint_offset(0)110{111_slow_case_stubs = new CodeStubList();112}113114115LIR_Assembler::~LIR_Assembler() {116// The unwind handler label may be unnbound if this destructor is invoked because of a bail-out.117// Reset it here to avoid an assertion.118_unwind_handler_entry.reset();119}120121122void LIR_Assembler::check_codespace() {123CodeSection* cs = _masm->code_section();124if (cs->remaining() < (int)(NOT_LP64(1*K)LP64_ONLY(2*K))) {125BAILOUT("CodeBuffer overflow");126}127}128129130void LIR_Assembler::append_code_stub(CodeStub* stub) {131_slow_case_stubs->append(stub);132}133134void LIR_Assembler::emit_stubs(CodeStubList* stub_list) {135for (int m = 0; m < stub_list->length(); m++) {136CodeStub* s = stub_list->at(m);137138check_codespace();139CHECK_BAILOUT();140141#ifndef PRODUCT142if (CommentedAssembly) {143stringStream st;144s->print_name(&st);145st.print(" slow case");146_masm->block_comment(st.as_string());147}148#endif149s->emit_code(this);150#ifdef ASSERT151s->assert_no_unbound_labels();152#endif153}154}155156157void LIR_Assembler::emit_slow_case_stubs() {158emit_stubs(_slow_case_stubs);159}160161162bool LIR_Assembler::needs_icache(ciMethod* method) const {163return !method->is_static();164}165166bool LIR_Assembler::needs_clinit_barrier_on_entry(ciMethod* method) const {167return VM_Version::supports_fast_class_init_checks() && method->needs_clinit_barrier();168}169170int LIR_Assembler::code_offset() const {171return _masm->offset();172}173174175address LIR_Assembler::pc() const {176return _masm->pc();177}178179// To bang the stack of this compiled method we use the stack size180// that the interpreter would need in case of a deoptimization. This181// removes the need to bang the stack in the deoptimization blob which182// in turn simplifies stack overflow handling.183int LIR_Assembler::bang_size_in_bytes() const {184return MAX2(initial_frame_size_in_bytes() + os::extra_bang_size_in_bytes(), _compilation->interpreter_frame_size());185}186187void LIR_Assembler::emit_exception_entries(ExceptionInfoList* info_list) {188for (int i = 0; i < info_list->length(); i++) {189XHandlers* handlers = info_list->at(i)->exception_handlers();190191for (int j = 0; j < handlers->length(); j++) {192XHandler* handler = handlers->handler_at(j);193assert(handler->lir_op_id() != -1, "handler not processed by LinearScan");194assert(handler->entry_code() == NULL ||195handler->entry_code()->instructions_list()->last()->code() == lir_branch ||196handler->entry_code()->instructions_list()->last()->code() == lir_delay_slot, "last operation must be branch");197198if (handler->entry_pco() == -1) {199// entry code not emitted yet200if (handler->entry_code() != NULL && handler->entry_code()->instructions_list()->length() > 1) {201handler->set_entry_pco(code_offset());202if (CommentedAssembly) {203_masm->block_comment("Exception adapter block");204}205emit_lir_list(handler->entry_code());206} else {207handler->set_entry_pco(handler->entry_block()->exception_handler_pco());208}209210assert(handler->entry_pco() != -1, "must be set now");211}212}213}214}215216217void LIR_Assembler::emit_code(BlockList* hir) {218if (PrintLIR) {219print_LIR(hir);220}221222int n = hir->length();223for (int i = 0; i < n; i++) {224emit_block(hir->at(i));225CHECK_BAILOUT();226}227228flush_debug_info(code_offset());229230DEBUG_ONLY(check_no_unbound_labels());231}232233234void LIR_Assembler::emit_block(BlockBegin* block) {235if (block->is_set(BlockBegin::backward_branch_target_flag)) {236align_backward_branch_target();237}238239// if this block is the start of an exception handler, record the240// PC offset of the first instruction for later construction of241// the ExceptionHandlerTable242if (block->is_set(BlockBegin::exception_entry_flag)) {243block->set_exception_handler_pco(code_offset());244}245246#ifndef PRODUCT247if (PrintLIRWithAssembly) {248// don't print Phi's249InstructionPrinter ip(false);250block->print(ip);251}252#endif /* PRODUCT */253254assert(block->lir() != NULL, "must have LIR");255X86_ONLY(assert(_masm->rsp_offset() == 0, "frame size should be fixed"));256257#ifndef PRODUCT258if (CommentedAssembly) {259stringStream st;260st.print_cr(" block B%d [%d, %d]", block->block_id(), block->bci(), block->end()->printable_bci());261_masm->block_comment(st.as_string());262}263#endif264265emit_lir_list(block->lir());266267X86_ONLY(assert(_masm->rsp_offset() == 0, "frame size should be fixed"));268}269270271void LIR_Assembler::emit_lir_list(LIR_List* list) {272peephole(list);273274int n = list->length();275for (int i = 0; i < n; i++) {276LIR_Op* op = list->at(i);277278check_codespace();279CHECK_BAILOUT();280281#ifndef PRODUCT282if (CommentedAssembly) {283// Don't record out every op since that's too verbose. Print284// branches since they include block and stub names. Also print285// patching moves since they generate funny looking code.286if (op->code() == lir_branch ||287(op->code() == lir_move && op->as_Op1()->patch_code() != lir_patch_none) ||288(op->code() == lir_leal && op->as_Op1()->patch_code() != lir_patch_none)) {289stringStream st;290op->print_on(&st);291_masm->block_comment(st.as_string());292}293}294if (PrintLIRWithAssembly) {295// print out the LIR operation followed by the resulting assembly296list->at(i)->print(); tty->cr();297}298#endif /* PRODUCT */299300op->emit_code(this);301302if (compilation()->debug_info_recorder()->recording_non_safepoints()) {303process_debug_info(op);304}305306#ifndef PRODUCT307if (PrintLIRWithAssembly) {308_masm->code()->decode();309}310#endif /* PRODUCT */311}312}313314#ifdef ASSERT315void LIR_Assembler::check_no_unbound_labels() {316CHECK_BAILOUT();317318for (int i = 0; i < _branch_target_blocks.length() - 1; i++) {319if (!_branch_target_blocks.at(i)->label()->is_bound()) {320tty->print_cr("label of block B%d is not bound", _branch_target_blocks.at(i)->block_id());321assert(false, "unbound label");322}323}324}325#endif326327//----------------------------------debug info--------------------------------328329330void LIR_Assembler::add_debug_info_for_branch(CodeEmitInfo* info) {331int pc_offset = code_offset();332flush_debug_info(pc_offset);333info->record_debug_info(compilation()->debug_info_recorder(), pc_offset);334if (info->exception_handlers() != NULL) {335compilation()->add_exception_handlers_for_pco(pc_offset, info->exception_handlers());336}337}338339340void LIR_Assembler::add_call_info(int pc_offset, CodeEmitInfo* cinfo) {341flush_debug_info(pc_offset);342cinfo->record_debug_info(compilation()->debug_info_recorder(), pc_offset);343if (cinfo->exception_handlers() != NULL) {344compilation()->add_exception_handlers_for_pco(pc_offset, cinfo->exception_handlers());345}346}347348static ValueStack* debug_info(Instruction* ins) {349StateSplit* ss = ins->as_StateSplit();350if (ss != NULL) return ss->state();351return ins->state_before();352}353354void LIR_Assembler::process_debug_info(LIR_Op* op) {355Instruction* src = op->source();356if (src == NULL) return;357int pc_offset = code_offset();358if (_pending_non_safepoint == src) {359_pending_non_safepoint_offset = pc_offset;360return;361}362ValueStack* vstack = debug_info(src);363if (vstack == NULL) return;364if (_pending_non_safepoint != NULL) {365// Got some old debug info. Get rid of it.366if (debug_info(_pending_non_safepoint) == vstack) {367_pending_non_safepoint_offset = pc_offset;368return;369}370if (_pending_non_safepoint_offset < pc_offset) {371record_non_safepoint_debug_info();372}373_pending_non_safepoint = NULL;374}375// Remember the debug info.376if (pc_offset > compilation()->debug_info_recorder()->last_pc_offset()) {377_pending_non_safepoint = src;378_pending_non_safepoint_offset = pc_offset;379}380}381382// Index caller states in s, where 0 is the oldest, 1 its callee, etc.383// Return NULL if n is too large.384// Returns the caller_bci for the next-younger state, also.385static ValueStack* nth_oldest(ValueStack* s, int n, int& bci_result) {386ValueStack* t = s;387for (int i = 0; i < n; i++) {388if (t == NULL) break;389t = t->caller_state();390}391if (t == NULL) return NULL;392for (;;) {393ValueStack* tc = t->caller_state();394if (tc == NULL) return s;395t = tc;396bci_result = tc->bci();397s = s->caller_state();398}399}400401void LIR_Assembler::record_non_safepoint_debug_info() {402int pc_offset = _pending_non_safepoint_offset;403ValueStack* vstack = debug_info(_pending_non_safepoint);404int bci = vstack->bci();405406DebugInformationRecorder* debug_info = compilation()->debug_info_recorder();407assert(debug_info->recording_non_safepoints(), "sanity");408409debug_info->add_non_safepoint(pc_offset);410411// Visit scopes from oldest to youngest.412for (int n = 0; ; n++) {413int s_bci = bci;414ValueStack* s = nth_oldest(vstack, n, s_bci);415if (s == NULL) break;416IRScope* scope = s->scope();417//Always pass false for reexecute since these ScopeDescs are never used for deopt418methodHandle null_mh;419debug_info->describe_scope(pc_offset, null_mh, scope->method(), s->bci(), false/*reexecute*/);420}421422debug_info->end_non_safepoint(pc_offset);423}424425426ImplicitNullCheckStub* LIR_Assembler::add_debug_info_for_null_check_here(CodeEmitInfo* cinfo) {427return add_debug_info_for_null_check(code_offset(), cinfo);428}429430ImplicitNullCheckStub* LIR_Assembler::add_debug_info_for_null_check(int pc_offset, CodeEmitInfo* cinfo) {431ImplicitNullCheckStub* stub = new ImplicitNullCheckStub(pc_offset, cinfo);432append_code_stub(stub);433return stub;434}435436void LIR_Assembler::add_debug_info_for_div0_here(CodeEmitInfo* info) {437add_debug_info_for_div0(code_offset(), info);438}439440void LIR_Assembler::add_debug_info_for_div0(int pc_offset, CodeEmitInfo* cinfo) {441DivByZeroStub* stub = new DivByZeroStub(pc_offset, cinfo);442append_code_stub(stub);443}444445void LIR_Assembler::emit_rtcall(LIR_OpRTCall* op) {446rt_call(op->result_opr(), op->addr(), op->arguments(), op->tmp(), op->info());447}448449450void LIR_Assembler::emit_call(LIR_OpJavaCall* op) {451verify_oop_map(op->info());452453// must align calls sites, otherwise they can't be updated atomically454align_call(op->code());455456// emit the static call stub stuff out of line457emit_static_call_stub();458CHECK_BAILOUT();459460switch (op->code()) {461case lir_static_call:462case lir_dynamic_call:463call(op, relocInfo::static_call_type);464break;465case lir_optvirtual_call:466call(op, relocInfo::opt_virtual_call_type);467break;468case lir_icvirtual_call:469ic_call(op);470break;471default:472fatal("unexpected op code: %s", op->name());473break;474}475476// JSR 292477// Record if this method has MethodHandle invokes.478if (op->is_method_handle_invoke()) {479compilation()->set_has_method_handle_invokes(true);480}481482#if defined(IA32) && defined(COMPILER2)483// C2 leave fpu stack dirty clean it484if (UseSSE < 2 && !CompilerConfig::is_c1_only_no_jvmci()) {485int i;486for ( i = 1; i <= 7 ; i++ ) {487ffree(i);488}489if (!op->result_opr()->is_float_kind()) {490ffree(0);491}492}493#endif // IA32 && COMPILER2494}495496497void LIR_Assembler::emit_opLabel(LIR_OpLabel* op) {498_masm->bind (*(op->label()));499}500501502void LIR_Assembler::emit_op1(LIR_Op1* op) {503switch (op->code()) {504case lir_move:505if (op->move_kind() == lir_move_volatile) {506assert(op->patch_code() == lir_patch_none, "can't patch volatiles");507volatile_move_op(op->in_opr(), op->result_opr(), op->type(), op->info());508} else {509move_op(op->in_opr(), op->result_opr(), op->type(),510op->patch_code(), op->info(), op->pop_fpu_stack(),511op->move_kind() == lir_move_unaligned,512op->move_kind() == lir_move_wide);513}514break;515516case lir_roundfp: {517LIR_OpRoundFP* round_op = op->as_OpRoundFP();518roundfp_op(round_op->in_opr(), round_op->tmp(), round_op->result_opr(), round_op->pop_fpu_stack());519break;520}521522case lir_return: {523assert(op->as_OpReturn() != NULL, "sanity");524LIR_OpReturn *ret_op = (LIR_OpReturn*)op;525return_op(ret_op->in_opr(), ret_op->stub());526if (ret_op->stub() != NULL) {527append_code_stub(ret_op->stub());528}529break;530}531532case lir_safepoint:533if (compilation()->debug_info_recorder()->last_pc_offset() == code_offset()) {534_masm->nop();535}536safepoint_poll(op->in_opr(), op->info());537break;538539#ifdef IA32540case lir_fxch:541fxch(op->in_opr()->as_jint());542break;543544case lir_fld:545fld(op->in_opr()->as_jint());546break;547#endif // IA32548549case lir_branch:550break;551552case lir_push:553push(op->in_opr());554break;555556case lir_pop:557pop(op->in_opr());558break;559560case lir_leal:561leal(op->in_opr(), op->result_opr(), op->patch_code(), op->info());562break;563564case lir_null_check: {565ImplicitNullCheckStub* stub = add_debug_info_for_null_check_here(op->info());566567if (op->in_opr()->is_single_cpu()) {568_masm->null_check(op->in_opr()->as_register(), stub->entry());569} else {570Unimplemented();571}572break;573}574575case lir_monaddr:576monitor_address(op->in_opr()->as_constant_ptr()->as_jint(), op->result_opr());577break;578579case lir_unwind:580unwind_op(op->in_opr());581break;582583default:584Unimplemented();585break;586}587}588589590void LIR_Assembler::emit_op0(LIR_Op0* op) {591switch (op->code()) {592case lir_nop:593assert(op->info() == NULL, "not supported");594_masm->nop();595break;596597case lir_label:598Unimplemented();599break;600601case lir_std_entry:602// init offsets603offsets()->set_value(CodeOffsets::OSR_Entry, _masm->offset());604_masm->align(CodeEntryAlignment);605if (needs_icache(compilation()->method())) {606check_icache();607}608offsets()->set_value(CodeOffsets::Verified_Entry, _masm->offset());609_masm->verified_entry();610if (needs_clinit_barrier_on_entry(compilation()->method())) {611clinit_barrier(compilation()->method());612}613build_frame();614offsets()->set_value(CodeOffsets::Frame_Complete, _masm->offset());615break;616617case lir_osr_entry:618offsets()->set_value(CodeOffsets::OSR_Entry, _masm->offset());619osr_entry();620break;621622#ifdef IA32623case lir_fpop_raw:624fpop();625break;626#endif // IA32627628case lir_breakpoint:629breakpoint();630break;631632case lir_membar:633membar();634break;635636case lir_membar_acquire:637membar_acquire();638break;639640case lir_membar_release:641membar_release();642break;643644case lir_membar_loadload:645membar_loadload();646break;647648case lir_membar_storestore:649membar_storestore();650break;651652case lir_membar_loadstore:653membar_loadstore();654break;655656case lir_membar_storeload:657membar_storeload();658break;659660case lir_get_thread:661get_thread(op->result_opr());662break;663664case lir_on_spin_wait:665on_spin_wait();666break;667668default:669ShouldNotReachHere();670break;671}672}673674675void LIR_Assembler::emit_op2(LIR_Op2* op) {676switch (op->code()) {677case lir_cmp:678if (op->info() != NULL) {679assert(op->in_opr1()->is_address() || op->in_opr2()->is_address(),680"shouldn't be codeemitinfo for non-address operands");681add_debug_info_for_null_check_here(op->info()); // exception possible682}683comp_op(op->condition(), op->in_opr1(), op->in_opr2(), op);684break;685686case lir_cmp_l2i:687case lir_cmp_fd2i:688case lir_ucmp_fd2i:689comp_fl2i(op->code(), op->in_opr1(), op->in_opr2(), op->result_opr(), op);690break;691692case lir_cmove:693cmove(op->condition(), op->in_opr1(), op->in_opr2(), op->result_opr(), op->type());694break;695696case lir_shl:697case lir_shr:698case lir_ushr:699if (op->in_opr2()->is_constant()) {700shift_op(op->code(), op->in_opr1(), op->in_opr2()->as_constant_ptr()->as_jint(), op->result_opr());701} else {702shift_op(op->code(), op->in_opr1(), op->in_opr2(), op->result_opr(), op->tmp1_opr());703}704break;705706case lir_add:707case lir_sub:708case lir_mul:709case lir_div:710case lir_rem:711assert(op->fpu_pop_count() < 2, "");712arith_op(713op->code(),714op->in_opr1(),715op->in_opr2(),716op->result_opr(),717op->info(),718op->fpu_pop_count() == 1);719break;720721case lir_abs:722case lir_sqrt:723case lir_tan:724case lir_log10:725intrinsic_op(op->code(), op->in_opr1(), op->in_opr2(), op->result_opr(), op);726break;727728case lir_neg:729negate(op->in_opr1(), op->result_opr(), op->in_opr2());730break;731732case lir_logic_and:733case lir_logic_or:734case lir_logic_xor:735logic_op(736op->code(),737op->in_opr1(),738op->in_opr2(),739op->result_opr());740break;741742case lir_throw:743throw_op(op->in_opr1(), op->in_opr2(), op->info());744break;745746case lir_xadd:747case lir_xchg:748atomic_op(op->code(), op->in_opr1(), op->in_opr2(), op->result_opr(), op->tmp1_opr());749break;750751default:752Unimplemented();753break;754}755}756757758void LIR_Assembler::build_frame() {759_masm->build_frame(initial_frame_size_in_bytes(), bang_size_in_bytes());760}761762763void LIR_Assembler::roundfp_op(LIR_Opr src, LIR_Opr tmp, LIR_Opr dest, bool pop_fpu_stack) {764assert(strict_fp_requires_explicit_rounding, "not required");765assert((src->is_single_fpu() && dest->is_single_stack()) ||766(src->is_double_fpu() && dest->is_double_stack()),767"round_fp: rounds register -> stack location");768769reg2stack (src, dest, src->type(), pop_fpu_stack);770}771772773void LIR_Assembler::move_op(LIR_Opr src, LIR_Opr dest, BasicType type, LIR_PatchCode patch_code, CodeEmitInfo* info, bool pop_fpu_stack, bool unaligned, bool wide) {774if (src->is_register()) {775if (dest->is_register()) {776assert(patch_code == lir_patch_none && info == NULL, "no patching and info allowed here");777reg2reg(src, dest);778} else if (dest->is_stack()) {779assert(patch_code == lir_patch_none && info == NULL, "no patching and info allowed here");780reg2stack(src, dest, type, pop_fpu_stack);781} else if (dest->is_address()) {782reg2mem(src, dest, type, patch_code, info, pop_fpu_stack, wide, unaligned);783} else {784ShouldNotReachHere();785}786787} else if (src->is_stack()) {788assert(patch_code == lir_patch_none && info == NULL, "no patching and info allowed here");789if (dest->is_register()) {790stack2reg(src, dest, type);791} else if (dest->is_stack()) {792stack2stack(src, dest, type);793} else {794ShouldNotReachHere();795}796797} else if (src->is_constant()) {798if (dest->is_register()) {799const2reg(src, dest, patch_code, info); // patching is possible800} else if (dest->is_stack()) {801assert(patch_code == lir_patch_none && info == NULL, "no patching and info allowed here");802const2stack(src, dest);803} else if (dest->is_address()) {804assert(patch_code == lir_patch_none, "no patching allowed here");805const2mem(src, dest, type, info, wide);806} else {807ShouldNotReachHere();808}809810} else if (src->is_address()) {811mem2reg(src, dest, type, patch_code, info, wide, unaligned);812813} else {814ShouldNotReachHere();815}816}817818819void LIR_Assembler::verify_oop_map(CodeEmitInfo* info) {820#ifndef PRODUCT821if (VerifyOops) {822OopMapStream s(info->oop_map());823while (!s.is_done()) {824OopMapValue v = s.current();825if (v.is_oop()) {826VMReg r = v.reg();827if (!r->is_stack()) {828stringStream st;829st.print("bad oop %s at %d", r->as_Register()->name(), _masm->offset());830_masm->verify_oop(r->as_Register());831} else {832_masm->verify_stack_oop(r->reg2stack() * VMRegImpl::stack_slot_size);833}834}835check_codespace();836CHECK_BAILOUT();837838s.next();839}840}841#endif842}843844845