Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/share/vm/interpreter/bytecodeInterpreter.cpp
32285 views
/*1* Copyright (c) 2002, 2016, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324// no precompiled headers25#include "classfile/vmSymbols.hpp"26#include "gc_interface/collectedHeap.hpp"27#include "interpreter/bytecodeHistogram.hpp"28#include "interpreter/bytecodeInterpreter.hpp"29#include "interpreter/bytecodeInterpreter.inline.hpp"30#include "interpreter/bytecodeInterpreterProfiling.hpp"31#include "interpreter/interpreter.hpp"32#include "interpreter/interpreterRuntime.hpp"33#include "memory/resourceArea.hpp"34#include "oops/methodCounters.hpp"35#include "oops/objArrayKlass.hpp"36#include "oops/oop.inline.hpp"37#include "prims/jvmtiExport.hpp"38#include "prims/jvmtiThreadState.hpp"39#include "runtime/biasedLocking.hpp"40#include "runtime/frame.inline.hpp"41#include "runtime/handles.inline.hpp"42#include "runtime/interfaceSupport.hpp"43#include "runtime/orderAccess.inline.hpp"44#include "runtime/sharedRuntime.hpp"45#include "runtime/threadCritical.hpp"46#include "utilities/exceptions.hpp"4748// no precompiled headers49#ifdef CC_INTERP5051/*52* USELABELS - If using GCC, then use labels for the opcode dispatching53* rather -then a switch statement. This improves performance because it54* gives us the oportunity to have the instructions that calculate the55* next opcode to jump to be intermixed with the rest of the instructions56* that implement the opcode (see UPDATE_PC_AND_TOS_AND_CONTINUE macro).57*/58#undef USELABELS59#ifdef __GNUC__60/*61ASSERT signifies debugging. It is much easier to step thru bytecodes if we62don't use the computed goto approach.63*/64#ifndef ASSERT65#define USELABELS66#endif67#endif6869#undef CASE70#ifdef USELABELS71#define CASE(opcode) opc ## opcode72#define DEFAULT opc_default73#else74#define CASE(opcode) case Bytecodes:: opcode75#define DEFAULT default76#endif7778/*79* PREFETCH_OPCCODE - Some compilers do better if you prefetch the next80* opcode before going back to the top of the while loop, rather then having81* the top of the while loop handle it. This provides a better opportunity82* for instruction scheduling. Some compilers just do this prefetch83* automatically. Some actually end up with worse performance if you84* force the prefetch. Solaris gcc seems to do better, but cc does worse.85*/86#undef PREFETCH_OPCCODE87#define PREFETCH_OPCCODE8889/*90Interpreter safepoint: it is expected that the interpreter will have no live91handles of its own creation live at an interpreter safepoint. Therefore we92run a HandleMarkCleaner and trash all handles allocated in the call chain93since the JavaCalls::call_helper invocation that initiated the chain.94There really shouldn't be any handles remaining to trash but this is cheap95in relation to a safepoint.96*/97#define SAFEPOINT \98if ( SafepointSynchronize::is_synchronizing()) { \99{ \100/* zap freed handles rather than GC'ing them */ \101HandleMarkCleaner __hmc(THREAD); \102} \103CALL_VM(SafepointSynchronize::block(THREAD), handle_exception); \104}105106/*107* VM_JAVA_ERROR - Macro for throwing a java exception from108* the interpreter loop. Should really be a CALL_VM but there109* is no entry point to do the transition to vm so we just110* do it by hand here.111*/112#define VM_JAVA_ERROR_NO_JUMP(name, msg, note_a_trap) \113DECACHE_STATE(); \114SET_LAST_JAVA_FRAME(); \115{ \116InterpreterRuntime::note_a_trap(THREAD, istate->method(), BCI()); \117ThreadInVMfromJava trans(THREAD); \118Exceptions::_throw_msg(THREAD, __FILE__, __LINE__, name, msg); \119} \120RESET_LAST_JAVA_FRAME(); \121CACHE_STATE();122123// Normal throw of a java error.124#define VM_JAVA_ERROR(name, msg, note_a_trap) \125VM_JAVA_ERROR_NO_JUMP(name, msg, note_a_trap) \126goto handle_exception;127128#ifdef PRODUCT129#define DO_UPDATE_INSTRUCTION_COUNT(opcode)130#else131#define DO_UPDATE_INSTRUCTION_COUNT(opcode) \132{ \133BytecodeCounter::_counter_value++; \134BytecodeHistogram::_counters[(Bytecodes::Code)opcode]++; \135if (StopInterpreterAt && StopInterpreterAt == BytecodeCounter::_counter_value) os::breakpoint(); \136if (TraceBytecodes) { \137CALL_VM((void)SharedRuntime::trace_bytecode(THREAD, 0, \138topOfStack[Interpreter::expr_index_at(1)], \139topOfStack[Interpreter::expr_index_at(2)]), \140handle_exception); \141} \142}143#endif144145#undef DEBUGGER_SINGLE_STEP_NOTIFY146#ifdef VM_JVMTI147/* NOTE: (kbr) This macro must be called AFTER the PC has been148incremented. JvmtiExport::at_single_stepping_point() may cause a149breakpoint opcode to get inserted at the current PC to allow the150debugger to coalesce single-step events.151152As a result if we call at_single_stepping_point() we refetch opcode153to get the current opcode. This will override any other prefetching154that might have occurred.155*/156#define DEBUGGER_SINGLE_STEP_NOTIFY() \157{ \158if (_jvmti_interp_events) { \159if (JvmtiExport::should_post_single_step()) { \160DECACHE_STATE(); \161SET_LAST_JAVA_FRAME(); \162ThreadInVMfromJava trans(THREAD); \163JvmtiExport::at_single_stepping_point(THREAD, \164istate->method(), \165pc); \166RESET_LAST_JAVA_FRAME(); \167CACHE_STATE(); \168if (THREAD->pop_frame_pending() && \169!THREAD->pop_frame_in_process()) { \170goto handle_Pop_Frame; \171} \172if (THREAD->jvmti_thread_state() && \173THREAD->jvmti_thread_state()->is_earlyret_pending()) { \174goto handle_Early_Return; \175} \176opcode = *pc; \177} \178} \179}180#else181#define DEBUGGER_SINGLE_STEP_NOTIFY()182#endif183184/*185* CONTINUE - Macro for executing the next opcode.186*/187#undef CONTINUE188#ifdef USELABELS189// Have to do this dispatch this way in C++ because otherwise gcc complains about crossing an190// initialization (which is is the initialization of the table pointer...)191#define DISPATCH(opcode) goto *(void*)dispatch_table[opcode]192#define CONTINUE { \193opcode = *pc; \194DO_UPDATE_INSTRUCTION_COUNT(opcode); \195DEBUGGER_SINGLE_STEP_NOTIFY(); \196DISPATCH(opcode); \197}198#else199#ifdef PREFETCH_OPCCODE200#define CONTINUE { \201opcode = *pc; \202DO_UPDATE_INSTRUCTION_COUNT(opcode); \203DEBUGGER_SINGLE_STEP_NOTIFY(); \204continue; \205}206#else207#define CONTINUE { \208DO_UPDATE_INSTRUCTION_COUNT(opcode); \209DEBUGGER_SINGLE_STEP_NOTIFY(); \210continue; \211}212#endif213#endif214215216#define UPDATE_PC(opsize) {pc += opsize; }217/*218* UPDATE_PC_AND_TOS - Macro for updating the pc and topOfStack.219*/220#undef UPDATE_PC_AND_TOS221#define UPDATE_PC_AND_TOS(opsize, stack) \222{pc += opsize; MORE_STACK(stack); }223224/*225* UPDATE_PC_AND_TOS_AND_CONTINUE - Macro for updating the pc and topOfStack,226* and executing the next opcode. It's somewhat similar to the combination227* of UPDATE_PC_AND_TOS and CONTINUE, but with some minor optimizations.228*/229#undef UPDATE_PC_AND_TOS_AND_CONTINUE230#ifdef USELABELS231#define UPDATE_PC_AND_TOS_AND_CONTINUE(opsize, stack) { \232pc += opsize; opcode = *pc; MORE_STACK(stack); \233DO_UPDATE_INSTRUCTION_COUNT(opcode); \234DEBUGGER_SINGLE_STEP_NOTIFY(); \235DISPATCH(opcode); \236}237238#define UPDATE_PC_AND_CONTINUE(opsize) { \239pc += opsize; opcode = *pc; \240DO_UPDATE_INSTRUCTION_COUNT(opcode); \241DEBUGGER_SINGLE_STEP_NOTIFY(); \242DISPATCH(opcode); \243}244#else245#ifdef PREFETCH_OPCCODE246#define UPDATE_PC_AND_TOS_AND_CONTINUE(opsize, stack) { \247pc += opsize; opcode = *pc; MORE_STACK(stack); \248DO_UPDATE_INSTRUCTION_COUNT(opcode); \249DEBUGGER_SINGLE_STEP_NOTIFY(); \250goto do_continue; \251}252253#define UPDATE_PC_AND_CONTINUE(opsize) { \254pc += opsize; opcode = *pc; \255DO_UPDATE_INSTRUCTION_COUNT(opcode); \256DEBUGGER_SINGLE_STEP_NOTIFY(); \257goto do_continue; \258}259#else260#define UPDATE_PC_AND_TOS_AND_CONTINUE(opsize, stack) { \261pc += opsize; MORE_STACK(stack); \262DO_UPDATE_INSTRUCTION_COUNT(opcode); \263DEBUGGER_SINGLE_STEP_NOTIFY(); \264goto do_continue; \265}266267#define UPDATE_PC_AND_CONTINUE(opsize) { \268pc += opsize; \269DO_UPDATE_INSTRUCTION_COUNT(opcode); \270DEBUGGER_SINGLE_STEP_NOTIFY(); \271goto do_continue; \272}273#endif /* PREFETCH_OPCCODE */274#endif /* USELABELS */275276// About to call a new method, update the save the adjusted pc and return to frame manager277#define UPDATE_PC_AND_RETURN(opsize) \278DECACHE_TOS(); \279istate->set_bcp(pc+opsize); \280return;281282283#define METHOD istate->method()284#define GET_METHOD_COUNTERS(res) \285res = METHOD->method_counters(); \286if (res == NULL) { \287CALL_VM(res = InterpreterRuntime::build_method_counters(THREAD, METHOD), handle_exception); \288}289290#define OSR_REQUEST(res, branch_pc) \291CALL_VM(res=InterpreterRuntime::frequency_counter_overflow(THREAD, branch_pc), handle_exception);292/*293* For those opcodes that need to have a GC point on a backwards branch294*/295296// Backedge counting is kind of strange. The asm interpreter will increment297// the backedge counter as a separate counter but it does it's comparisons298// to the sum (scaled) of invocation counter and backedge count to make299// a decision. Seems kind of odd to sum them together like that300301// skip is delta from current bcp/bci for target, branch_pc is pre-branch bcp302303304#define DO_BACKEDGE_CHECKS(skip, branch_pc) \305if ((skip) <= 0) { \306MethodCounters* mcs; \307GET_METHOD_COUNTERS(mcs); \308if (UseLoopCounter) { \309bool do_OSR = UseOnStackReplacement; \310mcs->backedge_counter()->increment(); \311if (ProfileInterpreter) { \312BI_PROFILE_GET_OR_CREATE_METHOD_DATA(handle_exception); \313/* Check for overflow against MDO count. */ \314do_OSR = do_OSR \315&& (mdo_last_branch_taken_count >= (uint)InvocationCounter::InterpreterBackwardBranchLimit)\316/* When ProfileInterpreter is on, the backedge_count comes */ \317/* from the methodDataOop, which value does not get reset on */ \318/* the call to frequency_counter_overflow(). To avoid */ \319/* excessive calls to the overflow routine while the method is */ \320/* being compiled, add a second test to make sure the overflow */ \321/* function is called only once every overflow_frequency. */ \322&& (!(mdo_last_branch_taken_count & 1023)); \323} else { \324/* check for overflow of backedge counter */ \325do_OSR = do_OSR \326&& mcs->invocation_counter()->reached_InvocationLimit(mcs->backedge_counter()); \327} \328if (do_OSR) { \329nmethod* osr_nmethod; \330OSR_REQUEST(osr_nmethod, branch_pc); \331if (osr_nmethod != NULL && osr_nmethod->osr_entry_bci() != InvalidOSREntryBci) { \332intptr_t* buf; \333/* Call OSR migration with last java frame only, no checks. */ \334CALL_VM_NAKED_LJF(buf=SharedRuntime::OSR_migration_begin(THREAD)); \335istate->set_msg(do_osr); \336istate->set_osr_buf((address)buf); \337istate->set_osr_entry(osr_nmethod->osr_entry()); \338return; \339} \340} \341} /* UseCompiler ... */ \342SAFEPOINT; \343}344345/*346* For those opcodes that need to have a GC point on a backwards branch347*/348349/*350* Macros for caching and flushing the interpreter state. Some local351* variables need to be flushed out to the frame before we do certain352* things (like pushing frames or becomming gc safe) and some need to353* be recached later (like after popping a frame). We could use one354* macro to cache or decache everything, but this would be less then355* optimal because we don't always need to cache or decache everything356* because some things we know are already cached or decached.357*/358#undef DECACHE_TOS359#undef CACHE_TOS360#undef CACHE_PREV_TOS361#define DECACHE_TOS() istate->set_stack(topOfStack);362363#define CACHE_TOS() topOfStack = (intptr_t *)istate->stack();364365#undef DECACHE_PC366#undef CACHE_PC367#define DECACHE_PC() istate->set_bcp(pc);368#define CACHE_PC() pc = istate->bcp();369#define CACHE_CP() cp = istate->constants();370#define CACHE_LOCALS() locals = istate->locals();371#undef CACHE_FRAME372#define CACHE_FRAME()373374// BCI() returns the current bytecode-index.375#undef BCI376#define BCI() ((int)(intptr_t)(pc - (intptr_t)istate->method()->code_base()))377378/*379* CHECK_NULL - Macro for throwing a NullPointerException if the object380* passed is a null ref.381* On some architectures/platforms it should be possible to do this implicitly382*/383#undef CHECK_NULL384#define CHECK_NULL(obj_) \385if ((obj_) == NULL) { \386VM_JAVA_ERROR(vmSymbols::java_lang_NullPointerException(), NULL, note_nullCheck_trap); \387} \388VERIFY_OOP(obj_)389390#define VMdoubleConstZero() 0.0391#define VMdoubleConstOne() 1.0392#define VMlongConstZero() (max_jlong-max_jlong)393#define VMlongConstOne() ((max_jlong-max_jlong)+1)394395/*396* Alignment397*/398#define VMalignWordUp(val) (((uintptr_t)(val) + 3) & ~3)399400// Decache the interpreter state that interpreter modifies directly (i.e. GC is indirect mod)401#define DECACHE_STATE() DECACHE_PC(); DECACHE_TOS();402403// Reload interpreter state after calling the VM or a possible GC404#define CACHE_STATE() \405CACHE_TOS(); \406CACHE_PC(); \407CACHE_CP(); \408CACHE_LOCALS();409410// Call the VM with last java frame only.411#define CALL_VM_NAKED_LJF(func) \412DECACHE_STATE(); \413SET_LAST_JAVA_FRAME(); \414func; \415RESET_LAST_JAVA_FRAME(); \416CACHE_STATE();417418// Call the VM. Don't check for pending exceptions.419#define CALL_VM_NOCHECK(func) \420CALL_VM_NAKED_LJF(func) \421if (THREAD->pop_frame_pending() && \422!THREAD->pop_frame_in_process()) { \423goto handle_Pop_Frame; \424} \425if (THREAD->jvmti_thread_state() && \426THREAD->jvmti_thread_state()->is_earlyret_pending()) { \427goto handle_Early_Return; \428}429430// Call the VM and check for pending exceptions431#define CALL_VM(func, label) { \432CALL_VM_NOCHECK(func); \433if (THREAD->has_pending_exception()) goto label; \434}435436/*437* BytecodeInterpreter::run(interpreterState istate)438* BytecodeInterpreter::runWithChecks(interpreterState istate)439*440* The real deal. This is where byte codes actually get interpreted.441* Basically it's a big while loop that iterates until we return from442* the method passed in.443*444* The runWithChecks is used if JVMTI is enabled.445*446*/447#if defined(VM_JVMTI)448void449BytecodeInterpreter::runWithChecks(interpreterState istate) {450#else451void452BytecodeInterpreter::run(interpreterState istate) {453#endif454455// In order to simplify some tests based on switches set at runtime456// we invoke the interpreter a single time after switches are enabled457// and set simpler to to test variables rather than method calls or complex458// boolean expressions.459460static int initialized = 0;461static int checkit = 0;462static intptr_t* c_addr = NULL;463static intptr_t c_value;464465if (checkit && *c_addr != c_value) {466os::breakpoint();467}468#ifdef VM_JVMTI469static bool _jvmti_interp_events = 0;470#endif471472static int _compiling; // (UseCompiler || CountCompiledCalls)473474#ifdef ASSERT475if (istate->_msg != initialize) {476// We have a problem here if we are running with a pre-hsx24 JDK (for example during bootstrap)477// because in that case, EnableInvokeDynamic is true by default but will be later switched off478// if java_lang_invoke_MethodHandle::compute_offsets() detects that the JDK only has the classes479// for the old JSR292 implementation.480// This leads to a situation where 'istate->_stack_limit' always accounts for481// methodOopDesc::extra_stack_entries() because it is computed in482// CppInterpreterGenerator::generate_compute_interpreter_state() which was generated while483// EnableInvokeDynamic was still true. On the other hand, istate->_method->max_stack() doesn't484// account for extra_stack_entries() anymore because at the time when it is called485// EnableInvokeDynamic was already set to false.486// So we have a second version of the assertion which handles the case where EnableInvokeDynamic was487// switched off because of the wrong classes.488if (EnableInvokeDynamic || FLAG_IS_CMDLINE(EnableInvokeDynamic)) {489assert(labs(istate->_stack_base - istate->_stack_limit) == (istate->_method->max_stack() + 1), "bad stack limit");490} else {491const int extra_stack_entries = Method::extra_stack_entries_for_jsr292;492assert(labs(istate->_stack_base - istate->_stack_limit) == (istate->_method->max_stack() + extra_stack_entries493+ 1), "bad stack limit");494}495#ifndef SHARK496IA32_ONLY(assert(istate->_stack_limit == istate->_thread->last_Java_sp() + 1, "wrong"));497#endif // !SHARK498}499// Verify linkages.500interpreterState l = istate;501do {502assert(l == l->_self_link, "bad link");503l = l->_prev_link;504} while (l != NULL);505// Screwups with stack management usually cause us to overwrite istate506// save a copy so we can verify it.507interpreterState orig = istate;508#endif509510register intptr_t* topOfStack = (intptr_t *)istate->stack(); /* access with STACK macros */511register address pc = istate->bcp();512register jubyte opcode;513register intptr_t* locals = istate->locals();514register ConstantPoolCache* cp = istate->constants(); // method()->constants()->cache()515#ifdef LOTS_OF_REGS516register JavaThread* THREAD = istate->thread();517#else518#undef THREAD519#define THREAD istate->thread()520#endif521522#ifdef USELABELS523const static void* const opclabels_data[256] = {524/* 0x00 */ &&opc_nop, &&opc_aconst_null,&&opc_iconst_m1,&&opc_iconst_0,525/* 0x04 */ &&opc_iconst_1,&&opc_iconst_2, &&opc_iconst_3, &&opc_iconst_4,526/* 0x08 */ &&opc_iconst_5,&&opc_lconst_0, &&opc_lconst_1, &&opc_fconst_0,527/* 0x0C */ &&opc_fconst_1,&&opc_fconst_2, &&opc_dconst_0, &&opc_dconst_1,528529/* 0x10 */ &&opc_bipush, &&opc_sipush, &&opc_ldc, &&opc_ldc_w,530/* 0x14 */ &&opc_ldc2_w, &&opc_iload, &&opc_lload, &&opc_fload,531/* 0x18 */ &&opc_dload, &&opc_aload, &&opc_iload_0,&&opc_iload_1,532/* 0x1C */ &&opc_iload_2,&&opc_iload_3,&&opc_lload_0,&&opc_lload_1,533534/* 0x20 */ &&opc_lload_2,&&opc_lload_3,&&opc_fload_0,&&opc_fload_1,535/* 0x24 */ &&opc_fload_2,&&opc_fload_3,&&opc_dload_0,&&opc_dload_1,536/* 0x28 */ &&opc_dload_2,&&opc_dload_3,&&opc_aload_0,&&opc_aload_1,537/* 0x2C */ &&opc_aload_2,&&opc_aload_3,&&opc_iaload, &&opc_laload,538539/* 0x30 */ &&opc_faload, &&opc_daload, &&opc_aaload, &&opc_baload,540/* 0x34 */ &&opc_caload, &&opc_saload, &&opc_istore, &&opc_lstore,541/* 0x38 */ &&opc_fstore, &&opc_dstore, &&opc_astore, &&opc_istore_0,542/* 0x3C */ &&opc_istore_1,&&opc_istore_2,&&opc_istore_3,&&opc_lstore_0,543544/* 0x40 */ &&opc_lstore_1,&&opc_lstore_2,&&opc_lstore_3,&&opc_fstore_0,545/* 0x44 */ &&opc_fstore_1,&&opc_fstore_2,&&opc_fstore_3,&&opc_dstore_0,546/* 0x48 */ &&opc_dstore_1,&&opc_dstore_2,&&opc_dstore_3,&&opc_astore_0,547/* 0x4C */ &&opc_astore_1,&&opc_astore_2,&&opc_astore_3,&&opc_iastore,548549/* 0x50 */ &&opc_lastore,&&opc_fastore,&&opc_dastore,&&opc_aastore,550/* 0x54 */ &&opc_bastore,&&opc_castore,&&opc_sastore,&&opc_pop,551/* 0x58 */ &&opc_pop2, &&opc_dup, &&opc_dup_x1, &&opc_dup_x2,552/* 0x5C */ &&opc_dup2, &&opc_dup2_x1,&&opc_dup2_x2,&&opc_swap,553554/* 0x60 */ &&opc_iadd,&&opc_ladd,&&opc_fadd,&&opc_dadd,555/* 0x64 */ &&opc_isub,&&opc_lsub,&&opc_fsub,&&opc_dsub,556/* 0x68 */ &&opc_imul,&&opc_lmul,&&opc_fmul,&&opc_dmul,557/* 0x6C */ &&opc_idiv,&&opc_ldiv,&&opc_fdiv,&&opc_ddiv,558559/* 0x70 */ &&opc_irem, &&opc_lrem, &&opc_frem,&&opc_drem,560/* 0x74 */ &&opc_ineg, &&opc_lneg, &&opc_fneg,&&opc_dneg,561/* 0x78 */ &&opc_ishl, &&opc_lshl, &&opc_ishr,&&opc_lshr,562/* 0x7C */ &&opc_iushr,&&opc_lushr,&&opc_iand,&&opc_land,563564/* 0x80 */ &&opc_ior, &&opc_lor,&&opc_ixor,&&opc_lxor,565/* 0x84 */ &&opc_iinc,&&opc_i2l,&&opc_i2f, &&opc_i2d,566/* 0x88 */ &&opc_l2i, &&opc_l2f,&&opc_l2d, &&opc_f2i,567/* 0x8C */ &&opc_f2l, &&opc_f2d,&&opc_d2i, &&opc_d2l,568569/* 0x90 */ &&opc_d2f, &&opc_i2b, &&opc_i2c, &&opc_i2s,570/* 0x94 */ &&opc_lcmp, &&opc_fcmpl,&&opc_fcmpg,&&opc_dcmpl,571/* 0x98 */ &&opc_dcmpg,&&opc_ifeq, &&opc_ifne, &&opc_iflt,572/* 0x9C */ &&opc_ifge, &&opc_ifgt, &&opc_ifle, &&opc_if_icmpeq,573574/* 0xA0 */ &&opc_if_icmpne,&&opc_if_icmplt,&&opc_if_icmpge, &&opc_if_icmpgt,575/* 0xA4 */ &&opc_if_icmple,&&opc_if_acmpeq,&&opc_if_acmpne, &&opc_goto,576/* 0xA8 */ &&opc_jsr, &&opc_ret, &&opc_tableswitch,&&opc_lookupswitch,577/* 0xAC */ &&opc_ireturn, &&opc_lreturn, &&opc_freturn, &&opc_dreturn,578579/* 0xB0 */ &&opc_areturn, &&opc_return, &&opc_getstatic, &&opc_putstatic,580/* 0xB4 */ &&opc_getfield, &&opc_putfield, &&opc_invokevirtual,&&opc_invokespecial,581/* 0xB8 */ &&opc_invokestatic,&&opc_invokeinterface,&&opc_invokedynamic,&&opc_new,582/* 0xBC */ &&opc_newarray, &&opc_anewarray, &&opc_arraylength, &&opc_athrow,583584/* 0xC0 */ &&opc_checkcast, &&opc_instanceof, &&opc_monitorenter, &&opc_monitorexit,585/* 0xC4 */ &&opc_wide, &&opc_multianewarray, &&opc_ifnull, &&opc_ifnonnull,586/* 0xC8 */ &&opc_goto_w, &&opc_jsr_w, &&opc_breakpoint, &&opc_default,587/* 0xCC */ &&opc_default, &&opc_default, &&opc_default, &&opc_default,588589/* 0xD0 */ &&opc_default, &&opc_default, &&opc_default, &&opc_default,590/* 0xD4 */ &&opc_default, &&opc_default, &&opc_default, &&opc_default,591/* 0xD8 */ &&opc_default, &&opc_default, &&opc_default, &&opc_default,592/* 0xDC */ &&opc_default, &&opc_default, &&opc_default, &&opc_default,593594/* 0xE0 */ &&opc_default, &&opc_default, &&opc_default, &&opc_default,595/* 0xE4 */ &&opc_default, &&opc_default, &&opc_fast_aldc, &&opc_fast_aldc_w,596/* 0xE8 */ &&opc_return_register_finalizer,597&&opc_invokehandle, &&opc_default, &&opc_default,598/* 0xEC */ &&opc_default, &&opc_default, &&opc_default, &&opc_default,599600/* 0xF0 */ &&opc_default, &&opc_default, &&opc_default, &&opc_default,601/* 0xF4 */ &&opc_default, &&opc_default, &&opc_default, &&opc_default,602/* 0xF8 */ &&opc_default, &&opc_default, &&opc_default, &&opc_default,603/* 0xFC */ &&opc_default, &&opc_default, &&opc_default, &&opc_default604};605register uintptr_t *dispatch_table = (uintptr_t*)&opclabels_data[0];606#endif /* USELABELS */607608#ifdef ASSERT609// this will trigger a VERIFY_OOP on entry610if (istate->msg() != initialize && ! METHOD->is_static()) {611oop rcvr = LOCALS_OBJECT(0);612VERIFY_OOP(rcvr);613}614#endif615// #define HACK616#ifdef HACK617bool interesting = false;618#endif // HACK619620/* QQQ this should be a stack method so we don't know actual direction */621guarantee(istate->msg() == initialize ||622topOfStack >= istate->stack_limit() &&623topOfStack < istate->stack_base(),624"Stack top out of range");625626#ifdef CC_INTERP_PROFILE627// MethodData's last branch taken count.628uint mdo_last_branch_taken_count = 0;629#else630const uint mdo_last_branch_taken_count = 0;631#endif632633switch (istate->msg()) {634case initialize: {635if (initialized++) ShouldNotReachHere(); // Only one initialize call.636_compiling = (UseCompiler || CountCompiledCalls);637#ifdef VM_JVMTI638_jvmti_interp_events = JvmtiExport::can_post_interpreter_events();639#endif640return;641}642break;643case method_entry: {644THREAD->set_do_not_unlock();645// count invocations646assert(initialized, "Interpreter not initialized");647if (_compiling) {648MethodCounters* mcs;649GET_METHOD_COUNTERS(mcs);650if (ProfileInterpreter) {651METHOD->increment_interpreter_invocation_count(THREAD);652}653mcs->invocation_counter()->increment();654if (mcs->invocation_counter()->reached_InvocationLimit(mcs->backedge_counter())) {655CALL_VM((void)InterpreterRuntime::frequency_counter_overflow(THREAD, NULL), handle_exception);656// We no longer retry on a counter overflow.657}658// Get or create profile data. Check for pending (async) exceptions.659BI_PROFILE_GET_OR_CREATE_METHOD_DATA(handle_exception);660SAFEPOINT;661}662663if ((istate->_stack_base - istate->_stack_limit) != istate->method()->max_stack() + 1) {664// initialize665os::breakpoint();666}667668#ifdef HACK669{670ResourceMark rm;671char *method_name = istate->method()->name_and_sig_as_C_string();672if (strstr(method_name, "runThese$TestRunner.run()V") != NULL) {673tty->print_cr("entering: depth %d bci: %d",674(istate->_stack_base - istate->_stack),675istate->_bcp - istate->_method->code_base());676interesting = true;677}678}679#endif // HACK680681// Lock method if synchronized.682if (METHOD->is_synchronized()) {683// oop rcvr = locals[0].j.r;684oop rcvr;685if (METHOD->is_static()) {686rcvr = METHOD->constants()->pool_holder()->java_mirror();687} else {688rcvr = LOCALS_OBJECT(0);689VERIFY_OOP(rcvr);690}691// The initial monitor is ours for the taking.692// Monitor not filled in frame manager any longer as this caused race condition with biased locking.693BasicObjectLock* mon = &istate->monitor_base()[-1];694mon->set_obj(rcvr);695bool success = false;696uintptr_t epoch_mask_in_place = (uintptr_t)markOopDesc::epoch_mask_in_place;697markOop mark = rcvr->mark();698intptr_t hash = (intptr_t) markOopDesc::no_hash;699// Implies UseBiasedLocking.700if (mark->has_bias_pattern()) {701uintptr_t thread_ident;702uintptr_t anticipated_bias_locking_value;703thread_ident = (uintptr_t)istate->thread();704anticipated_bias_locking_value =705(((uintptr_t)rcvr->klass()->prototype_header() | thread_ident) ^ (uintptr_t)mark) &706~((uintptr_t) markOopDesc::age_mask_in_place);707708if (anticipated_bias_locking_value == 0) {709// Already biased towards this thread, nothing to do.710if (PrintBiasedLockingStatistics) {711(* BiasedLocking::biased_lock_entry_count_addr())++;712}713success = true;714} else if ((anticipated_bias_locking_value & markOopDesc::biased_lock_mask_in_place) != 0) {715// Try to revoke bias.716markOop header = rcvr->klass()->prototype_header();717if (hash != markOopDesc::no_hash) {718header = header->copy_set_hash(hash);719}720if (Atomic::cmpxchg_ptr(header, rcvr->mark_addr(), mark) == mark) {721if (PrintBiasedLockingStatistics)722(*BiasedLocking::revoked_lock_entry_count_addr())++;723}724} else if ((anticipated_bias_locking_value & epoch_mask_in_place) != 0) {725// Try to rebias.726markOop new_header = (markOop) ( (intptr_t) rcvr->klass()->prototype_header() | thread_ident);727if (hash != markOopDesc::no_hash) {728new_header = new_header->copy_set_hash(hash);729}730if (Atomic::cmpxchg_ptr((void*)new_header, rcvr->mark_addr(), mark) == mark) {731if (PrintBiasedLockingStatistics) {732(* BiasedLocking::rebiased_lock_entry_count_addr())++;733}734} else {735CALL_VM(InterpreterRuntime::monitorenter(THREAD, mon), handle_exception);736}737success = true;738} else {739// Try to bias towards thread in case object is anonymously biased.740markOop header = (markOop) ((uintptr_t) mark &741((uintptr_t)markOopDesc::biased_lock_mask_in_place |742(uintptr_t)markOopDesc::age_mask_in_place | epoch_mask_in_place));743if (hash != markOopDesc::no_hash) {744header = header->copy_set_hash(hash);745}746markOop new_header = (markOop) ((uintptr_t) header | thread_ident);747// Debugging hint.748DEBUG_ONLY(mon->lock()->set_displaced_header((markOop) (uintptr_t) 0xdeaddead);)749if (Atomic::cmpxchg_ptr((void*)new_header, rcvr->mark_addr(), header) == header) {750if (PrintBiasedLockingStatistics) {751(* BiasedLocking::anonymously_biased_lock_entry_count_addr())++;752}753} else {754CALL_VM(InterpreterRuntime::monitorenter(THREAD, mon), handle_exception);755}756success = true;757}758}759760// Traditional lightweight locking.761if (!success) {762markOop displaced = rcvr->mark()->set_unlocked();763mon->lock()->set_displaced_header(displaced);764bool call_vm = UseHeavyMonitors;765if (call_vm || Atomic::cmpxchg_ptr(mon, rcvr->mark_addr(), displaced) != displaced) {766// Is it simple recursive case?767if (!call_vm && THREAD->is_lock_owned((address) displaced->clear_lock_bits())) {768mon->lock()->set_displaced_header(NULL);769} else {770CALL_VM(InterpreterRuntime::monitorenter(THREAD, mon), handle_exception);771}772}773}774}775THREAD->clr_do_not_unlock();776777// Notify jvmti778#ifdef VM_JVMTI779if (_jvmti_interp_events) {780// Whenever JVMTI puts a thread in interp_only_mode, method781// entry/exit events are sent for that thread to track stack depth.782if (THREAD->is_interp_only_mode()) {783CALL_VM(InterpreterRuntime::post_method_entry(THREAD),784handle_exception);785}786}787#endif /* VM_JVMTI */788789goto run;790}791792case popping_frame: {793// returned from a java call to pop the frame, restart the call794// clear the message so we don't confuse ourselves later795assert(THREAD->pop_frame_in_process(), "wrong frame pop state");796istate->set_msg(no_request);797if (_compiling) {798// Set MDX back to the ProfileData of the invoke bytecode that will be799// restarted.800SET_MDX(NULL);801BI_PROFILE_GET_OR_CREATE_METHOD_DATA(handle_exception);802}803THREAD->clr_pop_frame_in_process();804goto run;805}806807case method_resume: {808if ((istate->_stack_base - istate->_stack_limit) != istate->method()->max_stack() + 1) {809// resume810os::breakpoint();811}812#ifdef HACK813{814ResourceMark rm;815char *method_name = istate->method()->name_and_sig_as_C_string();816if (strstr(method_name, "runThese$TestRunner.run()V") != NULL) {817tty->print_cr("resume: depth %d bci: %d",818(istate->_stack_base - istate->_stack) ,819istate->_bcp - istate->_method->code_base());820interesting = true;821}822}823#endif // HACK824// returned from a java call, continue executing.825if (THREAD->pop_frame_pending() && !THREAD->pop_frame_in_process()) {826goto handle_Pop_Frame;827}828if (THREAD->jvmti_thread_state() &&829THREAD->jvmti_thread_state()->is_earlyret_pending()) {830goto handle_Early_Return;831}832833if (THREAD->has_pending_exception()) goto handle_exception;834// Update the pc by the saved amount of the invoke bytecode size835UPDATE_PC(istate->bcp_advance());836837if (_compiling) {838// Get or create profile data. Check for pending (async) exceptions.839BI_PROFILE_GET_OR_CREATE_METHOD_DATA(handle_exception);840}841goto run;842}843844case deopt_resume2: {845// Returned from an opcode that will reexecute. Deopt was846// a result of a PopFrame request.847//848849if (_compiling) {850// Get or create profile data. Check for pending (async) exceptions.851BI_PROFILE_GET_OR_CREATE_METHOD_DATA(handle_exception);852}853goto run;854}855856case deopt_resume: {857// Returned from an opcode that has completed. The stack has858// the result all we need to do is skip across the bytecode859// and continue (assuming there is no exception pending)860//861// compute continuation length862//863// Note: it is possible to deopt at a return_register_finalizer opcode864// because this requires entering the vm to do the registering. While the865// opcode is complete we can't advance because there are no more opcodes866// much like trying to deopt at a poll return. In that has we simply867// get out of here868//869if ( Bytecodes::code_at(METHOD, pc) == Bytecodes::_return_register_finalizer) {870// this will do the right thing even if an exception is pending.871goto handle_return;872}873UPDATE_PC(Bytecodes::length_at(METHOD, pc));874if (THREAD->has_pending_exception()) goto handle_exception;875876if (_compiling) {877// Get or create profile data. Check for pending (async) exceptions.878BI_PROFILE_GET_OR_CREATE_METHOD_DATA(handle_exception);879}880goto run;881}882case got_monitors: {883// continue locking now that we have a monitor to use884// we expect to find newly allocated monitor at the "top" of the monitor stack.885oop lockee = STACK_OBJECT(-1);886VERIFY_OOP(lockee);887// derefing's lockee ought to provoke implicit null check888// find a free monitor889BasicObjectLock* entry = (BasicObjectLock*) istate->stack_base();890assert(entry->obj() == NULL, "Frame manager didn't allocate the monitor");891entry->set_obj(lockee);892bool success = false;893uintptr_t epoch_mask_in_place = (uintptr_t)markOopDesc::epoch_mask_in_place;894895markOop mark = lockee->mark();896intptr_t hash = (intptr_t) markOopDesc::no_hash;897// implies UseBiasedLocking898if (mark->has_bias_pattern()) {899uintptr_t thread_ident;900uintptr_t anticipated_bias_locking_value;901thread_ident = (uintptr_t)istate->thread();902anticipated_bias_locking_value =903(((uintptr_t)lockee->klass()->prototype_header() | thread_ident) ^ (uintptr_t)mark) &904~((uintptr_t) markOopDesc::age_mask_in_place);905906if (anticipated_bias_locking_value == 0) {907// already biased towards this thread, nothing to do908if (PrintBiasedLockingStatistics) {909(* BiasedLocking::biased_lock_entry_count_addr())++;910}911success = true;912} else if ((anticipated_bias_locking_value & markOopDesc::biased_lock_mask_in_place) != 0) {913// try revoke bias914markOop header = lockee->klass()->prototype_header();915if (hash != markOopDesc::no_hash) {916header = header->copy_set_hash(hash);917}918if (Atomic::cmpxchg_ptr(header, lockee->mark_addr(), mark) == mark) {919if (PrintBiasedLockingStatistics) {920(*BiasedLocking::revoked_lock_entry_count_addr())++;921}922}923} else if ((anticipated_bias_locking_value & epoch_mask_in_place) !=0) {924// try rebias925markOop new_header = (markOop) ( (intptr_t) lockee->klass()->prototype_header() | thread_ident);926if (hash != markOopDesc::no_hash) {927new_header = new_header->copy_set_hash(hash);928}929if (Atomic::cmpxchg_ptr((void*)new_header, lockee->mark_addr(), mark) == mark) {930if (PrintBiasedLockingStatistics) {931(* BiasedLocking::rebiased_lock_entry_count_addr())++;932}933} else {934CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);935}936success = true;937} else {938// try to bias towards thread in case object is anonymously biased939markOop header = (markOop) ((uintptr_t) mark & ((uintptr_t)markOopDesc::biased_lock_mask_in_place |940(uintptr_t)markOopDesc::age_mask_in_place | epoch_mask_in_place));941if (hash != markOopDesc::no_hash) {942header = header->copy_set_hash(hash);943}944markOop new_header = (markOop) ((uintptr_t) header | thread_ident);945// debugging hint946DEBUG_ONLY(entry->lock()->set_displaced_header((markOop) (uintptr_t) 0xdeaddead);)947if (Atomic::cmpxchg_ptr((void*)new_header, lockee->mark_addr(), header) == header) {948if (PrintBiasedLockingStatistics) {949(* BiasedLocking::anonymously_biased_lock_entry_count_addr())++;950}951} else {952CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);953}954success = true;955}956}957958// traditional lightweight locking959if (!success) {960markOop displaced = lockee->mark()->set_unlocked();961entry->lock()->set_displaced_header(displaced);962bool call_vm = UseHeavyMonitors;963if (call_vm || Atomic::cmpxchg_ptr(entry, lockee->mark_addr(), displaced) != displaced) {964// Is it simple recursive case?965if (!call_vm && THREAD->is_lock_owned((address) displaced->clear_lock_bits())) {966entry->lock()->set_displaced_header(NULL);967} else {968CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);969}970}971}972UPDATE_PC_AND_TOS(1, -1);973goto run;974}975default: {976fatal("Unexpected message from frame manager");977}978}979980run:981982DO_UPDATE_INSTRUCTION_COUNT(*pc)983DEBUGGER_SINGLE_STEP_NOTIFY();984#ifdef PREFETCH_OPCCODE985opcode = *pc; /* prefetch first opcode */986#endif987988#ifndef USELABELS989while (1)990#endif991{992#ifndef PREFETCH_OPCCODE993opcode = *pc;994#endif995// Seems like this happens twice per opcode. At worst this is only996// need at entry to the loop.997// DEBUGGER_SINGLE_STEP_NOTIFY();998/* Using this labels avoids double breakpoints when quickening and999* when returing from transition frames.1000*/1001opcode_switch:1002assert(istate == orig, "Corrupted istate");1003/* QQQ Hmm this has knowledge of direction, ought to be a stack method */1004assert(topOfStack >= istate->stack_limit(), "Stack overrun");1005assert(topOfStack < istate->stack_base(), "Stack underrun");10061007#ifdef USELABELS1008DISPATCH(opcode);1009#else1010switch (opcode)1011#endif1012{1013CASE(_nop):1014UPDATE_PC_AND_CONTINUE(1);10151016/* Push miscellaneous constants onto the stack. */10171018CASE(_aconst_null):1019SET_STACK_OBJECT(NULL, 0);1020UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);10211022#undef OPC_CONST_n1023#define OPC_CONST_n(opcode, const_type, value) \1024CASE(opcode): \1025SET_STACK_ ## const_type(value, 0); \1026UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);10271028OPC_CONST_n(_iconst_m1, INT, -1);1029OPC_CONST_n(_iconst_0, INT, 0);1030OPC_CONST_n(_iconst_1, INT, 1);1031OPC_CONST_n(_iconst_2, INT, 2);1032OPC_CONST_n(_iconst_3, INT, 3);1033OPC_CONST_n(_iconst_4, INT, 4);1034OPC_CONST_n(_iconst_5, INT, 5);1035OPC_CONST_n(_fconst_0, FLOAT, 0.0);1036OPC_CONST_n(_fconst_1, FLOAT, 1.0);1037OPC_CONST_n(_fconst_2, FLOAT, 2.0);10381039#undef OPC_CONST2_n1040#define OPC_CONST2_n(opcname, value, key, kind) \1041CASE(_##opcname): \1042{ \1043SET_STACK_ ## kind(VM##key##Const##value(), 1); \1044UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2); \1045}1046OPC_CONST2_n(dconst_0, Zero, double, DOUBLE);1047OPC_CONST2_n(dconst_1, One, double, DOUBLE);1048OPC_CONST2_n(lconst_0, Zero, long, LONG);1049OPC_CONST2_n(lconst_1, One, long, LONG);10501051/* Load constant from constant pool: */10521053/* Push a 1-byte signed integer value onto the stack. */1054CASE(_bipush):1055SET_STACK_INT((jbyte)(pc[1]), 0);1056UPDATE_PC_AND_TOS_AND_CONTINUE(2, 1);10571058/* Push a 2-byte signed integer constant onto the stack. */1059CASE(_sipush):1060SET_STACK_INT((int16_t)Bytes::get_Java_u2(pc + 1), 0);1061UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);10621063/* load from local variable */10641065CASE(_aload):1066VERIFY_OOP(LOCALS_OBJECT(pc[1]));1067SET_STACK_OBJECT(LOCALS_OBJECT(pc[1]), 0);1068UPDATE_PC_AND_TOS_AND_CONTINUE(2, 1);10691070CASE(_iload):1071CASE(_fload):1072SET_STACK_SLOT(LOCALS_SLOT(pc[1]), 0);1073UPDATE_PC_AND_TOS_AND_CONTINUE(2, 1);10741075CASE(_lload):1076SET_STACK_LONG_FROM_ADDR(LOCALS_LONG_AT(pc[1]), 1);1077UPDATE_PC_AND_TOS_AND_CONTINUE(2, 2);10781079CASE(_dload):1080SET_STACK_DOUBLE_FROM_ADDR(LOCALS_DOUBLE_AT(pc[1]), 1);1081UPDATE_PC_AND_TOS_AND_CONTINUE(2, 2);10821083#undef OPC_LOAD_n1084#define OPC_LOAD_n(num) \1085CASE(_aload_##num): \1086VERIFY_OOP(LOCALS_OBJECT(num)); \1087SET_STACK_OBJECT(LOCALS_OBJECT(num), 0); \1088UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1); \1089\1090CASE(_iload_##num): \1091CASE(_fload_##num): \1092SET_STACK_SLOT(LOCALS_SLOT(num), 0); \1093UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1); \1094\1095CASE(_lload_##num): \1096SET_STACK_LONG_FROM_ADDR(LOCALS_LONG_AT(num), 1); \1097UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2); \1098CASE(_dload_##num): \1099SET_STACK_DOUBLE_FROM_ADDR(LOCALS_DOUBLE_AT(num), 1); \1100UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);11011102OPC_LOAD_n(0);1103OPC_LOAD_n(1);1104OPC_LOAD_n(2);1105OPC_LOAD_n(3);11061107/* store to a local variable */11081109CASE(_astore):1110astore(topOfStack, -1, locals, pc[1]);1111UPDATE_PC_AND_TOS_AND_CONTINUE(2, -1);11121113CASE(_istore):1114CASE(_fstore):1115SET_LOCALS_SLOT(STACK_SLOT(-1), pc[1]);1116UPDATE_PC_AND_TOS_AND_CONTINUE(2, -1);11171118CASE(_lstore):1119SET_LOCALS_LONG(STACK_LONG(-1), pc[1]);1120UPDATE_PC_AND_TOS_AND_CONTINUE(2, -2);11211122CASE(_dstore):1123SET_LOCALS_DOUBLE(STACK_DOUBLE(-1), pc[1]);1124UPDATE_PC_AND_TOS_AND_CONTINUE(2, -2);11251126CASE(_wide): {1127uint16_t reg = Bytes::get_Java_u2(pc + 2);11281129opcode = pc[1];11301131// Wide and it's sub-bytecode are counted as separate instructions. If we1132// don't account for this here, the bytecode trace skips the next bytecode.1133DO_UPDATE_INSTRUCTION_COUNT(opcode);11341135switch(opcode) {1136case Bytecodes::_aload:1137VERIFY_OOP(LOCALS_OBJECT(reg));1138SET_STACK_OBJECT(LOCALS_OBJECT(reg), 0);1139UPDATE_PC_AND_TOS_AND_CONTINUE(4, 1);11401141case Bytecodes::_iload:1142case Bytecodes::_fload:1143SET_STACK_SLOT(LOCALS_SLOT(reg), 0);1144UPDATE_PC_AND_TOS_AND_CONTINUE(4, 1);11451146case Bytecodes::_lload:1147SET_STACK_LONG_FROM_ADDR(LOCALS_LONG_AT(reg), 1);1148UPDATE_PC_AND_TOS_AND_CONTINUE(4, 2);11491150case Bytecodes::_dload:1151SET_STACK_DOUBLE_FROM_ADDR(LOCALS_LONG_AT(reg), 1);1152UPDATE_PC_AND_TOS_AND_CONTINUE(4, 2);11531154case Bytecodes::_astore:1155astore(topOfStack, -1, locals, reg);1156UPDATE_PC_AND_TOS_AND_CONTINUE(4, -1);11571158case Bytecodes::_istore:1159case Bytecodes::_fstore:1160SET_LOCALS_SLOT(STACK_SLOT(-1), reg);1161UPDATE_PC_AND_TOS_AND_CONTINUE(4, -1);11621163case Bytecodes::_lstore:1164SET_LOCALS_LONG(STACK_LONG(-1), reg);1165UPDATE_PC_AND_TOS_AND_CONTINUE(4, -2);11661167case Bytecodes::_dstore:1168SET_LOCALS_DOUBLE(STACK_DOUBLE(-1), reg);1169UPDATE_PC_AND_TOS_AND_CONTINUE(4, -2);11701171case Bytecodes::_iinc: {1172int16_t offset = (int16_t)Bytes::get_Java_u2(pc+4);1173// Be nice to see what this generates.... QQQ1174SET_LOCALS_INT(LOCALS_INT(reg) + offset, reg);1175UPDATE_PC_AND_CONTINUE(6);1176}1177case Bytecodes::_ret:1178// Profile ret.1179BI_PROFILE_UPDATE_RET(/*bci=*/((int)(intptr_t)(LOCALS_ADDR(reg))));1180// Now, update the pc.1181pc = istate->method()->code_base() + (intptr_t)(LOCALS_ADDR(reg));1182UPDATE_PC_AND_CONTINUE(0);1183default:1184VM_JAVA_ERROR(vmSymbols::java_lang_InternalError(), "undefined opcode", note_no_trap);1185}1186}118711881189#undef OPC_STORE_n1190#define OPC_STORE_n(num) \1191CASE(_astore_##num): \1192astore(topOfStack, -1, locals, num); \1193UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1); \1194CASE(_istore_##num): \1195CASE(_fstore_##num): \1196SET_LOCALS_SLOT(STACK_SLOT(-1), num); \1197UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);11981199OPC_STORE_n(0);1200OPC_STORE_n(1);1201OPC_STORE_n(2);1202OPC_STORE_n(3);12031204#undef OPC_DSTORE_n1205#define OPC_DSTORE_n(num) \1206CASE(_dstore_##num): \1207SET_LOCALS_DOUBLE(STACK_DOUBLE(-1), num); \1208UPDATE_PC_AND_TOS_AND_CONTINUE(1, -2); \1209CASE(_lstore_##num): \1210SET_LOCALS_LONG(STACK_LONG(-1), num); \1211UPDATE_PC_AND_TOS_AND_CONTINUE(1, -2);12121213OPC_DSTORE_n(0);1214OPC_DSTORE_n(1);1215OPC_DSTORE_n(2);1216OPC_DSTORE_n(3);12171218/* stack pop, dup, and insert opcodes */121912201221CASE(_pop): /* Discard the top item on the stack */1222UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);122312241225CASE(_pop2): /* Discard the top 2 items on the stack */1226UPDATE_PC_AND_TOS_AND_CONTINUE(1, -2);122712281229CASE(_dup): /* Duplicate the top item on the stack */1230dup(topOfStack);1231UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);12321233CASE(_dup2): /* Duplicate the top 2 items on the stack */1234dup2(topOfStack);1235UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);12361237CASE(_dup_x1): /* insert top word two down */1238dup_x1(topOfStack);1239UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);12401241CASE(_dup_x2): /* insert top word three down */1242dup_x2(topOfStack);1243UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);12441245CASE(_dup2_x1): /* insert top 2 slots three down */1246dup2_x1(topOfStack);1247UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);12481249CASE(_dup2_x2): /* insert top 2 slots four down */1250dup2_x2(topOfStack);1251UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);12521253CASE(_swap): { /* swap top two elements on the stack */1254swap(topOfStack);1255UPDATE_PC_AND_CONTINUE(1);1256}12571258/* Perform various binary integer operations */12591260#undef OPC_INT_BINARY1261#define OPC_INT_BINARY(opcname, opname, test) \1262CASE(_i##opcname): \1263if (test && (STACK_INT(-1) == 0)) { \1264VM_JAVA_ERROR(vmSymbols::java_lang_ArithmeticException(), \1265"/ by zero", note_div0Check_trap); \1266} \1267SET_STACK_INT(VMint##opname(STACK_INT(-2), \1268STACK_INT(-1)), \1269-2); \1270UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1); \1271CASE(_l##opcname): \1272{ \1273if (test) { \1274jlong l1 = STACK_LONG(-1); \1275if (VMlongEqz(l1)) { \1276VM_JAVA_ERROR(vmSymbols::java_lang_ArithmeticException(), \1277"/ by long zero", note_div0Check_trap); \1278} \1279} \1280/* First long at (-1,-2) next long at (-3,-4) */ \1281SET_STACK_LONG(VMlong##opname(STACK_LONG(-3), \1282STACK_LONG(-1)), \1283-3); \1284UPDATE_PC_AND_TOS_AND_CONTINUE(1, -2); \1285}12861287OPC_INT_BINARY(add, Add, 0);1288OPC_INT_BINARY(sub, Sub, 0);1289OPC_INT_BINARY(mul, Mul, 0);1290OPC_INT_BINARY(and, And, 0);1291OPC_INT_BINARY(or, Or, 0);1292OPC_INT_BINARY(xor, Xor, 0);1293OPC_INT_BINARY(div, Div, 1);1294OPC_INT_BINARY(rem, Rem, 1);129512961297/* Perform various binary floating number operations */1298/* On some machine/platforms/compilers div zero check can be implicit */12991300#undef OPC_FLOAT_BINARY1301#define OPC_FLOAT_BINARY(opcname, opname) \1302CASE(_d##opcname): { \1303SET_STACK_DOUBLE(VMdouble##opname(STACK_DOUBLE(-3), \1304STACK_DOUBLE(-1)), \1305-3); \1306UPDATE_PC_AND_TOS_AND_CONTINUE(1, -2); \1307} \1308CASE(_f##opcname): \1309SET_STACK_FLOAT(VMfloat##opname(STACK_FLOAT(-2), \1310STACK_FLOAT(-1)), \1311-2); \1312UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);131313141315OPC_FLOAT_BINARY(add, Add);1316OPC_FLOAT_BINARY(sub, Sub);1317OPC_FLOAT_BINARY(mul, Mul);1318OPC_FLOAT_BINARY(div, Div);1319OPC_FLOAT_BINARY(rem, Rem);13201321/* Shift operations1322* Shift left int and long: ishl, lshl1323* Logical shift right int and long w/zero extension: iushr, lushr1324* Arithmetic shift right int and long w/sign extension: ishr, lshr1325*/13261327#undef OPC_SHIFT_BINARY1328#define OPC_SHIFT_BINARY(opcname, opname) \1329CASE(_i##opcname): \1330SET_STACK_INT(VMint##opname(STACK_INT(-2), \1331STACK_INT(-1)), \1332-2); \1333UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1); \1334CASE(_l##opcname): \1335{ \1336SET_STACK_LONG(VMlong##opname(STACK_LONG(-2), \1337STACK_INT(-1)), \1338-2); \1339UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1); \1340}13411342OPC_SHIFT_BINARY(shl, Shl);1343OPC_SHIFT_BINARY(shr, Shr);1344OPC_SHIFT_BINARY(ushr, Ushr);13451346/* Increment local variable by constant */1347CASE(_iinc):1348{1349// locals[pc[1]].j.i += (jbyte)(pc[2]);1350SET_LOCALS_INT(LOCALS_INT(pc[1]) + (jbyte)(pc[2]), pc[1]);1351UPDATE_PC_AND_CONTINUE(3);1352}13531354/* negate the value on the top of the stack */13551356CASE(_ineg):1357SET_STACK_INT(VMintNeg(STACK_INT(-1)), -1);1358UPDATE_PC_AND_CONTINUE(1);13591360CASE(_fneg):1361SET_STACK_FLOAT(VMfloatNeg(STACK_FLOAT(-1)), -1);1362UPDATE_PC_AND_CONTINUE(1);13631364CASE(_lneg):1365{1366SET_STACK_LONG(VMlongNeg(STACK_LONG(-1)), -1);1367UPDATE_PC_AND_CONTINUE(1);1368}13691370CASE(_dneg):1371{1372SET_STACK_DOUBLE(VMdoubleNeg(STACK_DOUBLE(-1)), -1);1373UPDATE_PC_AND_CONTINUE(1);1374}13751376/* Conversion operations */13771378CASE(_i2f): /* convert top of stack int to float */1379SET_STACK_FLOAT(VMint2Float(STACK_INT(-1)), -1);1380UPDATE_PC_AND_CONTINUE(1);13811382CASE(_i2l): /* convert top of stack int to long */1383{1384// this is ugly QQQ1385jlong r = VMint2Long(STACK_INT(-1));1386MORE_STACK(-1); // Pop1387SET_STACK_LONG(r, 1);13881389UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);1390}13911392CASE(_i2d): /* convert top of stack int to double */1393{1394// this is ugly QQQ (why cast to jlong?? )1395jdouble r = (jlong)STACK_INT(-1);1396MORE_STACK(-1); // Pop1397SET_STACK_DOUBLE(r, 1);13981399UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);1400}14011402CASE(_l2i): /* convert top of stack long to int */1403{1404jint r = VMlong2Int(STACK_LONG(-1));1405MORE_STACK(-2); // Pop1406SET_STACK_INT(r, 0);1407UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);1408}14091410CASE(_l2f): /* convert top of stack long to float */1411{1412jlong r = STACK_LONG(-1);1413MORE_STACK(-2); // Pop1414SET_STACK_FLOAT(VMlong2Float(r), 0);1415UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);1416}14171418CASE(_l2d): /* convert top of stack long to double */1419{1420jlong r = STACK_LONG(-1);1421MORE_STACK(-2); // Pop1422SET_STACK_DOUBLE(VMlong2Double(r), 1);1423UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);1424}14251426CASE(_f2i): /* Convert top of stack float to int */1427SET_STACK_INT(SharedRuntime::f2i(STACK_FLOAT(-1)), -1);1428UPDATE_PC_AND_CONTINUE(1);14291430CASE(_f2l): /* convert top of stack float to long */1431{1432jlong r = SharedRuntime::f2l(STACK_FLOAT(-1));1433MORE_STACK(-1); // POP1434SET_STACK_LONG(r, 1);1435UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);1436}14371438CASE(_f2d): /* convert top of stack float to double */1439{1440jfloat f;1441jdouble r;1442f = STACK_FLOAT(-1);1443r = (jdouble) f;1444MORE_STACK(-1); // POP1445SET_STACK_DOUBLE(r, 1);1446UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);1447}14481449CASE(_d2i): /* convert top of stack double to int */1450{1451jint r1 = SharedRuntime::d2i(STACK_DOUBLE(-1));1452MORE_STACK(-2);1453SET_STACK_INT(r1, 0);1454UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);1455}14561457CASE(_d2f): /* convert top of stack double to float */1458{1459jfloat r1 = VMdouble2Float(STACK_DOUBLE(-1));1460MORE_STACK(-2);1461SET_STACK_FLOAT(r1, 0);1462UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);1463}14641465CASE(_d2l): /* convert top of stack double to long */1466{1467jlong r1 = SharedRuntime::d2l(STACK_DOUBLE(-1));1468MORE_STACK(-2);1469SET_STACK_LONG(r1, 1);1470UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);1471}14721473CASE(_i2b):1474SET_STACK_INT(VMint2Byte(STACK_INT(-1)), -1);1475UPDATE_PC_AND_CONTINUE(1);14761477CASE(_i2c):1478SET_STACK_INT(VMint2Char(STACK_INT(-1)), -1);1479UPDATE_PC_AND_CONTINUE(1);14801481CASE(_i2s):1482SET_STACK_INT(VMint2Short(STACK_INT(-1)), -1);1483UPDATE_PC_AND_CONTINUE(1);14841485/* comparison operators */148614871488#define COMPARISON_OP(name, comparison) \1489CASE(_if_icmp##name): { \1490const bool cmp = (STACK_INT(-2) comparison STACK_INT(-1)); \1491int skip = cmp \1492? (int16_t)Bytes::get_Java_u2(pc + 1) : 3; \1493address branch_pc = pc; \1494/* Profile branch. */ \1495BI_PROFILE_UPDATE_BRANCH(/*is_taken=*/cmp); \1496UPDATE_PC_AND_TOS(skip, -2); \1497DO_BACKEDGE_CHECKS(skip, branch_pc); \1498CONTINUE; \1499} \1500CASE(_if##name): { \1501const bool cmp = (STACK_INT(-1) comparison 0); \1502int skip = cmp \1503? (int16_t)Bytes::get_Java_u2(pc + 1) : 3; \1504address branch_pc = pc; \1505/* Profile branch. */ \1506BI_PROFILE_UPDATE_BRANCH(/*is_taken=*/cmp); \1507UPDATE_PC_AND_TOS(skip, -1); \1508DO_BACKEDGE_CHECKS(skip, branch_pc); \1509CONTINUE; \1510}15111512#define COMPARISON_OP2(name, comparison) \1513COMPARISON_OP(name, comparison) \1514CASE(_if_acmp##name): { \1515const bool cmp = (STACK_OBJECT(-2) comparison STACK_OBJECT(-1)); \1516int skip = cmp \1517? (int16_t)Bytes::get_Java_u2(pc + 1) : 3; \1518address branch_pc = pc; \1519/* Profile branch. */ \1520BI_PROFILE_UPDATE_BRANCH(/*is_taken=*/cmp); \1521UPDATE_PC_AND_TOS(skip, -2); \1522DO_BACKEDGE_CHECKS(skip, branch_pc); \1523CONTINUE; \1524}15251526#define NULL_COMPARISON_NOT_OP(name) \1527CASE(_if##name): { \1528const bool cmp = (!(STACK_OBJECT(-1) == NULL)); \1529int skip = cmp \1530? (int16_t)Bytes::get_Java_u2(pc + 1) : 3; \1531address branch_pc = pc; \1532/* Profile branch. */ \1533BI_PROFILE_UPDATE_BRANCH(/*is_taken=*/cmp); \1534UPDATE_PC_AND_TOS(skip, -1); \1535DO_BACKEDGE_CHECKS(skip, branch_pc); \1536CONTINUE; \1537}15381539#define NULL_COMPARISON_OP(name) \1540CASE(_if##name): { \1541const bool cmp = ((STACK_OBJECT(-1) == NULL)); \1542int skip = cmp \1543? (int16_t)Bytes::get_Java_u2(pc + 1) : 3; \1544address branch_pc = pc; \1545/* Profile branch. */ \1546BI_PROFILE_UPDATE_BRANCH(/*is_taken=*/cmp); \1547UPDATE_PC_AND_TOS(skip, -1); \1548DO_BACKEDGE_CHECKS(skip, branch_pc); \1549CONTINUE; \1550}1551COMPARISON_OP(lt, <);1552COMPARISON_OP(gt, >);1553COMPARISON_OP(le, <=);1554COMPARISON_OP(ge, >=);1555COMPARISON_OP2(eq, ==); /* include ref comparison */1556COMPARISON_OP2(ne, !=); /* include ref comparison */1557NULL_COMPARISON_OP(null);1558NULL_COMPARISON_NOT_OP(nonnull);15591560/* Goto pc at specified offset in switch table. */15611562CASE(_tableswitch): {1563jint* lpc = (jint*)VMalignWordUp(pc+1);1564int32_t key = STACK_INT(-1);1565int32_t low = Bytes::get_Java_u4((address)&lpc[1]);1566int32_t high = Bytes::get_Java_u4((address)&lpc[2]);1567int32_t skip;1568key -= low;1569if (((uint32_t) key > (uint32_t)(high - low))) {1570key = -1;1571skip = Bytes::get_Java_u4((address)&lpc[0]);1572} else {1573skip = Bytes::get_Java_u4((address)&lpc[key + 3]);1574}1575// Profile switch.1576BI_PROFILE_UPDATE_SWITCH(/*switch_index=*/key);1577// Does this really need a full backedge check (osr)?1578address branch_pc = pc;1579UPDATE_PC_AND_TOS(skip, -1);1580DO_BACKEDGE_CHECKS(skip, branch_pc);1581CONTINUE;1582}15831584/* Goto pc whose table entry matches specified key. */15851586CASE(_lookupswitch): {1587jint* lpc = (jint*)VMalignWordUp(pc+1);1588int32_t key = STACK_INT(-1);1589int32_t skip = Bytes::get_Java_u4((address) lpc); /* default amount */1590// Remember index.1591int index = -1;1592int newindex = 0;1593int32_t npairs = Bytes::get_Java_u4((address) &lpc[1]);1594while (--npairs >= 0) {1595lpc += 2;1596if (key == (int32_t)Bytes::get_Java_u4((address)lpc)) {1597skip = Bytes::get_Java_u4((address)&lpc[1]);1598index = newindex;1599break;1600}1601newindex += 1;1602}1603// Profile switch.1604BI_PROFILE_UPDATE_SWITCH(/*switch_index=*/index);1605address branch_pc = pc;1606UPDATE_PC_AND_TOS(skip, -1);1607DO_BACKEDGE_CHECKS(skip, branch_pc);1608CONTINUE;1609}16101611CASE(_fcmpl):1612CASE(_fcmpg):1613{1614SET_STACK_INT(VMfloatCompare(STACK_FLOAT(-2),1615STACK_FLOAT(-1),1616(opcode == Bytecodes::_fcmpl ? -1 : 1)),1617-2);1618UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);1619}16201621CASE(_dcmpl):1622CASE(_dcmpg):1623{1624int r = VMdoubleCompare(STACK_DOUBLE(-3),1625STACK_DOUBLE(-1),1626(opcode == Bytecodes::_dcmpl ? -1 : 1));1627MORE_STACK(-4); // Pop1628SET_STACK_INT(r, 0);1629UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);1630}16311632CASE(_lcmp):1633{1634int r = VMlongCompare(STACK_LONG(-3), STACK_LONG(-1));1635MORE_STACK(-4);1636SET_STACK_INT(r, 0);1637UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);1638}163916401641/* Return from a method */16421643CASE(_areturn):1644CASE(_ireturn):1645CASE(_freturn):1646{1647// Allow a safepoint before returning to frame manager.1648SAFEPOINT;16491650goto handle_return;1651}16521653CASE(_lreturn):1654CASE(_dreturn):1655{1656// Allow a safepoint before returning to frame manager.1657SAFEPOINT;1658goto handle_return;1659}16601661CASE(_return_register_finalizer): {16621663oop rcvr = LOCALS_OBJECT(0);1664VERIFY_OOP(rcvr);1665if (rcvr->klass()->has_finalizer()) {1666CALL_VM(InterpreterRuntime::register_finalizer(THREAD, rcvr), handle_exception);1667}1668goto handle_return;1669}1670CASE(_return): {16711672// Allow a safepoint before returning to frame manager.1673SAFEPOINT;1674goto handle_return;1675}16761677/* Array access byte-codes */16781679/* Every array access byte-code starts out like this */1680// arrayOopDesc* arrObj = (arrayOopDesc*)STACK_OBJECT(arrayOff);1681#define ARRAY_INTRO(arrayOff) \1682arrayOop arrObj = (arrayOop)STACK_OBJECT(arrayOff); \1683jint index = STACK_INT(arrayOff + 1); \1684char message[jintAsStringSize]; \1685CHECK_NULL(arrObj); \1686if ((uint32_t)index >= (uint32_t)arrObj->length()) { \1687sprintf(message, "%d", index); \1688VM_JAVA_ERROR(vmSymbols::java_lang_ArrayIndexOutOfBoundsException(), \1689message, note_rangeCheck_trap); \1690}16911692/* 32-bit loads. These handle conversion from < 32-bit types */1693#define ARRAY_LOADTO32(T, T2, format, stackRes, extra) \1694{ \1695ARRAY_INTRO(-2); \1696(void)extra; \1697SET_ ## stackRes(*(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)), \1698-2); \1699UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1); \1700}17011702/* 64-bit loads */1703#define ARRAY_LOADTO64(T,T2, stackRes, extra) \1704{ \1705ARRAY_INTRO(-2); \1706SET_ ## stackRes(*(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)), -1); \1707(void)extra; \1708UPDATE_PC_AND_CONTINUE(1); \1709}17101711CASE(_iaload):1712ARRAY_LOADTO32(T_INT, jint, "%d", STACK_INT, 0);1713CASE(_faload):1714ARRAY_LOADTO32(T_FLOAT, jfloat, "%f", STACK_FLOAT, 0);1715CASE(_aaload): {1716ARRAY_INTRO(-2);1717SET_STACK_OBJECT(((objArrayOop) arrObj)->obj_at(index), -2);1718UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);1719}1720CASE(_baload):1721ARRAY_LOADTO32(T_BYTE, jbyte, "%d", STACK_INT, 0);1722CASE(_caload):1723ARRAY_LOADTO32(T_CHAR, jchar, "%d", STACK_INT, 0);1724CASE(_saload):1725ARRAY_LOADTO32(T_SHORT, jshort, "%d", STACK_INT, 0);1726CASE(_laload):1727ARRAY_LOADTO64(T_LONG, jlong, STACK_LONG, 0);1728CASE(_daload):1729ARRAY_LOADTO64(T_DOUBLE, jdouble, STACK_DOUBLE, 0);17301731/* 32-bit stores. These handle conversion to < 32-bit types */1732#define ARRAY_STOREFROM32(T, T2, format, stackSrc, extra) \1733{ \1734ARRAY_INTRO(-3); \1735(void)extra; \1736*(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)) = stackSrc( -1); \1737UPDATE_PC_AND_TOS_AND_CONTINUE(1, -3); \1738}17391740/* 64-bit stores */1741#define ARRAY_STOREFROM64(T, T2, stackSrc, extra) \1742{ \1743ARRAY_INTRO(-4); \1744(void)extra; \1745*(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)) = stackSrc( -1); \1746UPDATE_PC_AND_TOS_AND_CONTINUE(1, -4); \1747}17481749CASE(_iastore):1750ARRAY_STOREFROM32(T_INT, jint, "%d", STACK_INT, 0);1751CASE(_fastore):1752ARRAY_STOREFROM32(T_FLOAT, jfloat, "%f", STACK_FLOAT, 0);1753/*1754* This one looks different because of the assignability check1755*/1756CASE(_aastore): {1757oop rhsObject = STACK_OBJECT(-1);1758VERIFY_OOP(rhsObject);1759ARRAY_INTRO( -3);1760// arrObj, index are set1761if (rhsObject != NULL) {1762/* Check assignability of rhsObject into arrObj */1763Klass* rhsKlass = rhsObject->klass(); // EBX (subclass)1764Klass* elemKlass = ObjArrayKlass::cast(arrObj->klass())->element_klass(); // superklass EAX1765//1766// Check for compatibilty. This check must not GC!!1767// Seems way more expensive now that we must dispatch1768//1769if (rhsKlass != elemKlass && !rhsKlass->is_subtype_of(elemKlass)) { // ebx->is...1770// Decrement counter if subtype check failed.1771BI_PROFILE_SUBTYPECHECK_FAILED(rhsKlass);1772VM_JAVA_ERROR(vmSymbols::java_lang_ArrayStoreException(), "", note_arrayCheck_trap);1773}1774// Profile checkcast with null_seen and receiver.1775BI_PROFILE_UPDATE_CHECKCAST(/*null_seen=*/false, rhsKlass);1776} else {1777// Profile checkcast with null_seen and receiver.1778BI_PROFILE_UPDATE_CHECKCAST(/*null_seen=*/true, NULL);1779}1780((objArrayOop) arrObj)->obj_at_put(index, rhsObject);1781UPDATE_PC_AND_TOS_AND_CONTINUE(1, -3);1782}1783CASE(_bastore): {1784ARRAY_INTRO(-3);1785int item = STACK_INT(-1);1786// if it is a T_BOOLEAN array, mask the stored value to 0/11787if (arrObj->klass() == Universe::boolArrayKlassObj()) {1788item &= 1;1789} else {1790assert(arrObj->klass() == Universe::byteArrayKlassObj(),1791"should be byte array otherwise");1792}1793((typeArrayOop)arrObj)->byte_at_put(index, item);1794UPDATE_PC_AND_TOS_AND_CONTINUE(1, -3);1795}1796CASE(_castore):1797ARRAY_STOREFROM32(T_CHAR, jchar, "%d", STACK_INT, 0);1798CASE(_sastore):1799ARRAY_STOREFROM32(T_SHORT, jshort, "%d", STACK_INT, 0);1800CASE(_lastore):1801ARRAY_STOREFROM64(T_LONG, jlong, STACK_LONG, 0);1802CASE(_dastore):1803ARRAY_STOREFROM64(T_DOUBLE, jdouble, STACK_DOUBLE, 0);18041805CASE(_arraylength):1806{1807arrayOop ary = (arrayOop) STACK_OBJECT(-1);1808CHECK_NULL(ary);1809SET_STACK_INT(ary->length(), -1);1810UPDATE_PC_AND_CONTINUE(1);1811}18121813/* monitorenter and monitorexit for locking/unlocking an object */18141815CASE(_monitorenter): {1816oop lockee = STACK_OBJECT(-1);1817// derefing's lockee ought to provoke implicit null check1818CHECK_NULL(lockee);1819// find a free monitor or one already allocated for this object1820// if we find a matching object then we need a new monitor1821// since this is recursive enter1822BasicObjectLock* limit = istate->monitor_base();1823BasicObjectLock* most_recent = (BasicObjectLock*) istate->stack_base();1824BasicObjectLock* entry = NULL;1825while (most_recent != limit ) {1826if (most_recent->obj() == NULL) entry = most_recent;1827else if (most_recent->obj() == lockee) break;1828most_recent++;1829}1830if (entry != NULL) {1831entry->set_obj(lockee);1832int success = false;1833uintptr_t epoch_mask_in_place = (uintptr_t)markOopDesc::epoch_mask_in_place;18341835markOop mark = lockee->mark();1836intptr_t hash = (intptr_t) markOopDesc::no_hash;1837// implies UseBiasedLocking1838if (mark->has_bias_pattern()) {1839uintptr_t thread_ident;1840uintptr_t anticipated_bias_locking_value;1841thread_ident = (uintptr_t)istate->thread();1842anticipated_bias_locking_value =1843(((uintptr_t)lockee->klass()->prototype_header() | thread_ident) ^ (uintptr_t)mark) &1844~((uintptr_t) markOopDesc::age_mask_in_place);18451846if (anticipated_bias_locking_value == 0) {1847// already biased towards this thread, nothing to do1848if (PrintBiasedLockingStatistics) {1849(* BiasedLocking::biased_lock_entry_count_addr())++;1850}1851success = true;1852}1853else if ((anticipated_bias_locking_value & markOopDesc::biased_lock_mask_in_place) != 0) {1854// try revoke bias1855markOop header = lockee->klass()->prototype_header();1856if (hash != markOopDesc::no_hash) {1857header = header->copy_set_hash(hash);1858}1859if (Atomic::cmpxchg_ptr(header, lockee->mark_addr(), mark) == mark) {1860if (PrintBiasedLockingStatistics)1861(*BiasedLocking::revoked_lock_entry_count_addr())++;1862}1863}1864else if ((anticipated_bias_locking_value & epoch_mask_in_place) !=0) {1865// try rebias1866markOop new_header = (markOop) ( (intptr_t) lockee->klass()->prototype_header() | thread_ident);1867if (hash != markOopDesc::no_hash) {1868new_header = new_header->copy_set_hash(hash);1869}1870if (Atomic::cmpxchg_ptr((void*)new_header, lockee->mark_addr(), mark) == mark) {1871if (PrintBiasedLockingStatistics)1872(* BiasedLocking::rebiased_lock_entry_count_addr())++;1873}1874else {1875CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);1876}1877success = true;1878}1879else {1880// try to bias towards thread in case object is anonymously biased1881markOop header = (markOop) ((uintptr_t) mark & ((uintptr_t)markOopDesc::biased_lock_mask_in_place |1882(uintptr_t)markOopDesc::age_mask_in_place |1883epoch_mask_in_place));1884if (hash != markOopDesc::no_hash) {1885header = header->copy_set_hash(hash);1886}1887markOop new_header = (markOop) ((uintptr_t) header | thread_ident);1888// debugging hint1889DEBUG_ONLY(entry->lock()->set_displaced_header((markOop) (uintptr_t) 0xdeaddead);)1890if (Atomic::cmpxchg_ptr((void*)new_header, lockee->mark_addr(), header) == header) {1891if (PrintBiasedLockingStatistics)1892(* BiasedLocking::anonymously_biased_lock_entry_count_addr())++;1893}1894else {1895CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);1896}1897success = true;1898}1899}19001901// traditional lightweight locking1902if (!success) {1903markOop displaced = lockee->mark()->set_unlocked();1904entry->lock()->set_displaced_header(displaced);1905bool call_vm = UseHeavyMonitors;1906if (call_vm || Atomic::cmpxchg_ptr(entry, lockee->mark_addr(), displaced) != displaced) {1907// Is it simple recursive case?1908if (!call_vm && THREAD->is_lock_owned((address) displaced->clear_lock_bits())) {1909entry->lock()->set_displaced_header(NULL);1910} else {1911CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);1912}1913}1914}1915UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);1916} else {1917istate->set_msg(more_monitors);1918UPDATE_PC_AND_RETURN(0); // Re-execute1919}1920}19211922CASE(_monitorexit): {1923oop lockee = STACK_OBJECT(-1);1924CHECK_NULL(lockee);1925// derefing's lockee ought to provoke implicit null check1926// find our monitor slot1927BasicObjectLock* limit = istate->monitor_base();1928BasicObjectLock* most_recent = (BasicObjectLock*) istate->stack_base();1929while (most_recent != limit ) {1930if ((most_recent)->obj() == lockee) {1931BasicLock* lock = most_recent->lock();1932markOop header = lock->displaced_header();1933most_recent->set_obj(NULL);1934if (!lockee->mark()->has_bias_pattern()) {1935bool call_vm = UseHeavyMonitors;1936// If it isn't recursive we either must swap old header or call the runtime1937if (header != NULL || call_vm) {1938if (call_vm || Atomic::cmpxchg_ptr(header, lockee->mark_addr(), lock) != lock) {1939// restore object for the slow case1940most_recent->set_obj(lockee);1941CALL_VM(InterpreterRuntime::monitorexit(THREAD, most_recent), handle_exception);1942}1943}1944}1945UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);1946}1947most_recent++;1948}1949// Need to throw illegal monitor state exception1950CALL_VM(InterpreterRuntime::throw_illegal_monitor_state_exception(THREAD), handle_exception);1951ShouldNotReachHere();1952}19531954/* All of the non-quick opcodes. */19551956/* -Set clobbersCpIndex true if the quickened opcode clobbers the1957* constant pool index in the instruction.1958*/1959CASE(_getfield):1960CASE(_getstatic):1961{1962u2 index;1963ConstantPoolCacheEntry* cache;1964index = Bytes::get_native_u2(pc+1);19651966// QQQ Need to make this as inlined as possible. Probably need to1967// split all the bytecode cases out so c++ compiler has a chance1968// for constant prop to fold everything possible away.19691970cache = cp->entry_at(index);1971if (!cache->is_resolved((Bytecodes::Code)opcode)) {1972CALL_VM(InterpreterRuntime::resolve_get_put(THREAD, (Bytecodes::Code)opcode),1973handle_exception);1974cache = cp->entry_at(index);1975}19761977#ifdef VM_JVMTI1978if (_jvmti_interp_events) {1979int *count_addr;1980oop obj;1981// Check to see if a field modification watch has been set1982// before we take the time to call into the VM.1983count_addr = (int *)JvmtiExport::get_field_access_count_addr();1984if ( *count_addr > 0 ) {1985if ((Bytecodes::Code)opcode == Bytecodes::_getstatic) {1986obj = (oop)NULL;1987} else {1988obj = (oop) STACK_OBJECT(-1);1989VERIFY_OOP(obj);1990}1991CALL_VM(InterpreterRuntime::post_field_access(THREAD,1992obj,1993cache),1994handle_exception);1995}1996}1997#endif /* VM_JVMTI */19981999oop obj;2000if ((Bytecodes::Code)opcode == Bytecodes::_getstatic) {2001Klass* k = cache->f1_as_klass();2002obj = k->java_mirror();2003MORE_STACK(1); // Assume single slot push2004} else {2005obj = (oop) STACK_OBJECT(-1);2006CHECK_NULL(obj);2007}20082009//2010// Now store the result on the stack2011//2012TosState tos_type = cache->flag_state();2013int field_offset = cache->f2_as_index();2014if (cache->is_volatile()) {2015if (support_IRIW_for_not_multiple_copy_atomic_cpu) {2016OrderAccess::fence();2017}2018if (tos_type == atos) {2019VERIFY_OOP(obj->obj_field_acquire(field_offset));2020SET_STACK_OBJECT(obj->obj_field_acquire(field_offset), -1);2021} else if (tos_type == itos) {2022SET_STACK_INT(obj->int_field_acquire(field_offset), -1);2023} else if (tos_type == ltos) {2024SET_STACK_LONG(obj->long_field_acquire(field_offset), 0);2025MORE_STACK(1);2026} else if (tos_type == btos || tos_type == ztos) {2027SET_STACK_INT(obj->byte_field_acquire(field_offset), -1);2028} else if (tos_type == ctos) {2029SET_STACK_INT(obj->char_field_acquire(field_offset), -1);2030} else if (tos_type == stos) {2031SET_STACK_INT(obj->short_field_acquire(field_offset), -1);2032} else if (tos_type == ftos) {2033SET_STACK_FLOAT(obj->float_field_acquire(field_offset), -1);2034} else {2035SET_STACK_DOUBLE(obj->double_field_acquire(field_offset), 0);2036MORE_STACK(1);2037}2038} else {2039if (tos_type == atos) {2040VERIFY_OOP(obj->obj_field(field_offset));2041SET_STACK_OBJECT(obj->obj_field(field_offset), -1);2042} else if (tos_type == itos) {2043SET_STACK_INT(obj->int_field(field_offset), -1);2044} else if (tos_type == ltos) {2045SET_STACK_LONG(obj->long_field(field_offset), 0);2046MORE_STACK(1);2047} else if (tos_type == btos || tos_type == ztos) {2048SET_STACK_INT(obj->byte_field(field_offset), -1);2049} else if (tos_type == ctos) {2050SET_STACK_INT(obj->char_field(field_offset), -1);2051} else if (tos_type == stos) {2052SET_STACK_INT(obj->short_field(field_offset), -1);2053} else if (tos_type == ftos) {2054SET_STACK_FLOAT(obj->float_field(field_offset), -1);2055} else {2056SET_STACK_DOUBLE(obj->double_field(field_offset), 0);2057MORE_STACK(1);2058}2059}20602061UPDATE_PC_AND_CONTINUE(3);2062}20632064CASE(_putfield):2065CASE(_putstatic):2066{2067u2 index = Bytes::get_native_u2(pc+1);2068ConstantPoolCacheEntry* cache = cp->entry_at(index);2069if (!cache->is_resolved((Bytecodes::Code)opcode)) {2070CALL_VM(InterpreterRuntime::resolve_get_put(THREAD, (Bytecodes::Code)opcode),2071handle_exception);2072cache = cp->entry_at(index);2073}20742075#ifdef VM_JVMTI2076if (_jvmti_interp_events) {2077int *count_addr;2078oop obj;2079// Check to see if a field modification watch has been set2080// before we take the time to call into the VM.2081count_addr = (int *)JvmtiExport::get_field_modification_count_addr();2082if ( *count_addr > 0 ) {2083if ((Bytecodes::Code)opcode == Bytecodes::_putstatic) {2084obj = (oop)NULL;2085}2086else {2087if (cache->is_long() || cache->is_double()) {2088obj = (oop) STACK_OBJECT(-3);2089} else {2090obj = (oop) STACK_OBJECT(-2);2091}2092VERIFY_OOP(obj);2093}20942095CALL_VM(InterpreterRuntime::post_field_modification(THREAD,2096obj,2097cache,2098(jvalue *)STACK_SLOT(-1)),2099handle_exception);2100}2101}2102#endif /* VM_JVMTI */21032104// QQQ Need to make this as inlined as possible. Probably need to split all the bytecode cases2105// out so c++ compiler has a chance for constant prop to fold everything possible away.21062107oop obj;2108int count;2109TosState tos_type = cache->flag_state();21102111count = -1;2112if (tos_type == ltos || tos_type == dtos) {2113--count;2114}2115if ((Bytecodes::Code)opcode == Bytecodes::_putstatic) {2116Klass* k = cache->f1_as_klass();2117obj = k->java_mirror();2118} else {2119--count;2120obj = (oop) STACK_OBJECT(count);2121CHECK_NULL(obj);2122}21232124//2125// Now store the result2126//2127int field_offset = cache->f2_as_index();2128if (cache->is_volatile()) {2129if (tos_type == itos) {2130obj->release_int_field_put(field_offset, STACK_INT(-1));2131} else if (tos_type == atos) {2132VERIFY_OOP(STACK_OBJECT(-1));2133obj->release_obj_field_put(field_offset, STACK_OBJECT(-1));2134} else if (tos_type == btos) {2135obj->release_byte_field_put(field_offset, STACK_INT(-1));2136} else if (tos_type == ztos) {2137int bool_field = STACK_INT(-1); // only store LSB2138obj->release_byte_field_put(field_offset, (bool_field & 1));2139} else if (tos_type == ltos) {2140obj->release_long_field_put(field_offset, STACK_LONG(-1));2141} else if (tos_type == ctos) {2142obj->release_char_field_put(field_offset, STACK_INT(-1));2143} else if (tos_type == stos) {2144obj->release_short_field_put(field_offset, STACK_INT(-1));2145} else if (tos_type == ftos) {2146obj->release_float_field_put(field_offset, STACK_FLOAT(-1));2147} else {2148obj->release_double_field_put(field_offset, STACK_DOUBLE(-1));2149}2150OrderAccess::storeload();2151} else {2152if (tos_type == itos) {2153obj->int_field_put(field_offset, STACK_INT(-1));2154} else if (tos_type == atos) {2155VERIFY_OOP(STACK_OBJECT(-1));2156obj->obj_field_put(field_offset, STACK_OBJECT(-1));2157} else if (tos_type == btos) {2158obj->byte_field_put(field_offset, STACK_INT(-1));2159} else if (tos_type == ztos) {2160int bool_field = STACK_INT(-1); // only store LSB2161obj->byte_field_put(field_offset, (bool_field & 1));2162} else if (tos_type == ltos) {2163obj->long_field_put(field_offset, STACK_LONG(-1));2164} else if (tos_type == ctos) {2165obj->char_field_put(field_offset, STACK_INT(-1));2166} else if (tos_type == stos) {2167obj->short_field_put(field_offset, STACK_INT(-1));2168} else if (tos_type == ftos) {2169obj->float_field_put(field_offset, STACK_FLOAT(-1));2170} else {2171obj->double_field_put(field_offset, STACK_DOUBLE(-1));2172}2173}21742175UPDATE_PC_AND_TOS_AND_CONTINUE(3, count);2176}21772178CASE(_new): {2179u2 index = Bytes::get_Java_u2(pc+1);2180ConstantPool* constants = istate->method()->constants();2181if (!constants->tag_at(index).is_unresolved_klass()) {2182// Make sure klass is initialized and doesn't have a finalizer2183Klass* entry = constants->slot_at(index).get_klass();2184assert(entry->is_klass(), "Should be resolved klass");2185Klass* k_entry = (Klass*) entry;2186assert(k_entry->oop_is_instance(), "Should be InstanceKlass");2187InstanceKlass* ik = (InstanceKlass*) k_entry;2188if ( ik->is_initialized() && ik->can_be_fastpath_allocated() ) {2189size_t obj_size = ik->size_helper();2190oop result = NULL;2191// If the TLAB isn't pre-zeroed then we'll have to do it2192bool need_zero = !ZeroTLAB;2193if (UseTLAB) {2194result = (oop) THREAD->tlab().allocate(obj_size);2195}2196// Disable non-TLAB-based fast-path, because profiling requires that all2197// allocations go through InterpreterRuntime::_new() if THREAD->tlab().allocate2198// returns NULL.2199#ifndef CC_INTERP_PROFILE2200if (result == NULL) {2201need_zero = true;2202// Try allocate in shared eden2203retry:2204HeapWord* compare_to = *Universe::heap()->top_addr();2205HeapWord* new_top = compare_to + obj_size;2206if (new_top <= *Universe::heap()->end_addr()) {2207if (Atomic::cmpxchg_ptr(new_top, Universe::heap()->top_addr(), compare_to) != compare_to) {2208goto retry;2209}2210result = (oop) compare_to;2211}2212}2213#endif2214if (result != NULL) {2215// Initialize object (if nonzero size and need) and then the header2216if (need_zero ) {2217HeapWord* to_zero = (HeapWord*) result + sizeof(oopDesc) / oopSize;2218obj_size -= sizeof(oopDesc) / oopSize;2219if (obj_size > 0 ) {2220memset(to_zero, 0, obj_size * HeapWordSize);2221}2222}2223if (UseBiasedLocking) {2224result->set_mark(ik->prototype_header());2225} else {2226result->set_mark(markOopDesc::prototype());2227}2228result->set_klass_gap(0);2229result->set_klass(k_entry);2230// Must prevent reordering of stores for object initialization2231// with stores that publish the new object.2232OrderAccess::storestore();2233SET_STACK_OBJECT(result, 0);2234UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);2235}2236}2237}2238// Slow case allocation2239CALL_VM(InterpreterRuntime::_new(THREAD, METHOD->constants(), index),2240handle_exception);2241// Must prevent reordering of stores for object initialization2242// with stores that publish the new object.2243OrderAccess::storestore();2244SET_STACK_OBJECT(THREAD->vm_result(), 0);2245THREAD->set_vm_result(NULL);2246UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);2247}2248CASE(_anewarray): {2249u2 index = Bytes::get_Java_u2(pc+1);2250jint size = STACK_INT(-1);2251CALL_VM(InterpreterRuntime::anewarray(THREAD, METHOD->constants(), index, size),2252handle_exception);2253// Must prevent reordering of stores for object initialization2254// with stores that publish the new object.2255OrderAccess::storestore();2256SET_STACK_OBJECT(THREAD->vm_result(), -1);2257THREAD->set_vm_result(NULL);2258UPDATE_PC_AND_CONTINUE(3);2259}2260CASE(_multianewarray): {2261jint dims = *(pc+3);2262jint size = STACK_INT(-1);2263// stack grows down, dimensions are up!2264jint *dimarray =2265(jint*)&topOfStack[dims * Interpreter::stackElementWords+2266Interpreter::stackElementWords-1];2267//adjust pointer to start of stack element2268CALL_VM(InterpreterRuntime::multianewarray(THREAD, dimarray),2269handle_exception);2270// Must prevent reordering of stores for object initialization2271// with stores that publish the new object.2272OrderAccess::storestore();2273SET_STACK_OBJECT(THREAD->vm_result(), -dims);2274THREAD->set_vm_result(NULL);2275UPDATE_PC_AND_TOS_AND_CONTINUE(4, -(dims-1));2276}2277CASE(_checkcast):2278if (STACK_OBJECT(-1) != NULL) {2279VERIFY_OOP(STACK_OBJECT(-1));2280u2 index = Bytes::get_Java_u2(pc+1);2281// Constant pool may have actual klass or unresolved klass. If it is2282// unresolved we must resolve it.2283if (METHOD->constants()->tag_at(index).is_unresolved_klass()) {2284CALL_VM(InterpreterRuntime::quicken_io_cc(THREAD), handle_exception);2285}2286Klass* klassOf = (Klass*) METHOD->constants()->slot_at(index).get_klass();2287Klass* objKlass = STACK_OBJECT(-1)->klass(); // ebx2288//2289// Check for compatibilty. This check must not GC!!2290// Seems way more expensive now that we must dispatch.2291//2292if (objKlass != klassOf && !objKlass->is_subtype_of(klassOf)) {2293// Decrement counter at checkcast.2294BI_PROFILE_SUBTYPECHECK_FAILED(objKlass);2295ResourceMark rm(THREAD);2296const char* objName = objKlass->external_name();2297const char* klassName = klassOf->external_name();2298char* message = SharedRuntime::generate_class_cast_message(2299objName, klassName);2300VM_JAVA_ERROR(vmSymbols::java_lang_ClassCastException(), message, note_classCheck_trap);2301}2302// Profile checkcast with null_seen and receiver.2303BI_PROFILE_UPDATE_CHECKCAST(/*null_seen=*/false, objKlass);2304} else {2305// Profile checkcast with null_seen and receiver.2306BI_PROFILE_UPDATE_CHECKCAST(/*null_seen=*/true, NULL);2307}2308UPDATE_PC_AND_CONTINUE(3);23092310CASE(_instanceof):2311if (STACK_OBJECT(-1) == NULL) {2312SET_STACK_INT(0, -1);2313// Profile instanceof with null_seen and receiver.2314BI_PROFILE_UPDATE_INSTANCEOF(/*null_seen=*/true, NULL);2315} else {2316VERIFY_OOP(STACK_OBJECT(-1));2317u2 index = Bytes::get_Java_u2(pc+1);2318// Constant pool may have actual klass or unresolved klass. If it is2319// unresolved we must resolve it.2320if (METHOD->constants()->tag_at(index).is_unresolved_klass()) {2321CALL_VM(InterpreterRuntime::quicken_io_cc(THREAD), handle_exception);2322}2323Klass* klassOf = (Klass*) METHOD->constants()->slot_at(index).get_klass();2324Klass* objKlass = STACK_OBJECT(-1)->klass();2325//2326// Check for compatibilty. This check must not GC!!2327// Seems way more expensive now that we must dispatch.2328//2329if ( objKlass == klassOf || objKlass->is_subtype_of(klassOf)) {2330SET_STACK_INT(1, -1);2331} else {2332SET_STACK_INT(0, -1);2333// Decrement counter at checkcast.2334BI_PROFILE_SUBTYPECHECK_FAILED(objKlass);2335}2336// Profile instanceof with null_seen and receiver.2337BI_PROFILE_UPDATE_INSTANCEOF(/*null_seen=*/false, objKlass);2338}2339UPDATE_PC_AND_CONTINUE(3);23402341CASE(_ldc_w):2342CASE(_ldc):2343{2344u2 index;2345bool wide = false;2346int incr = 2; // frequent case2347if (opcode == Bytecodes::_ldc) {2348index = pc[1];2349} else {2350index = Bytes::get_Java_u2(pc+1);2351incr = 3;2352wide = true;2353}23542355ConstantPool* constants = METHOD->constants();2356switch (constants->tag_at(index).value()) {2357case JVM_CONSTANT_Integer:2358SET_STACK_INT(constants->int_at(index), 0);2359break;23602361case JVM_CONSTANT_Float:2362SET_STACK_FLOAT(constants->float_at(index), 0);2363break;23642365case JVM_CONSTANT_String:2366{2367oop result = constants->resolved_references()->obj_at(index);2368if (result == NULL) {2369CALL_VM(InterpreterRuntime::resolve_ldc(THREAD, (Bytecodes::Code) opcode), handle_exception);2370SET_STACK_OBJECT(THREAD->vm_result(), 0);2371THREAD->set_vm_result(NULL);2372} else {2373VERIFY_OOP(result);2374SET_STACK_OBJECT(result, 0);2375}2376break;2377}23782379case JVM_CONSTANT_Class:2380VERIFY_OOP(constants->resolved_klass_at(index)->java_mirror());2381SET_STACK_OBJECT(constants->resolved_klass_at(index)->java_mirror(), 0);2382break;23832384case JVM_CONSTANT_UnresolvedClass:2385case JVM_CONSTANT_UnresolvedClassInError:2386CALL_VM(InterpreterRuntime::ldc(THREAD, wide), handle_exception);2387SET_STACK_OBJECT(THREAD->vm_result(), 0);2388THREAD->set_vm_result(NULL);2389break;23902391default: ShouldNotReachHere();2392}2393UPDATE_PC_AND_TOS_AND_CONTINUE(incr, 1);2394}23952396CASE(_ldc2_w):2397{2398u2 index = Bytes::get_Java_u2(pc+1);23992400ConstantPool* constants = METHOD->constants();2401switch (constants->tag_at(index).value()) {24022403case JVM_CONSTANT_Long:2404SET_STACK_LONG(constants->long_at(index), 1);2405break;24062407case JVM_CONSTANT_Double:2408SET_STACK_DOUBLE(constants->double_at(index), 1);2409break;2410default: ShouldNotReachHere();2411}2412UPDATE_PC_AND_TOS_AND_CONTINUE(3, 2);2413}24142415CASE(_fast_aldc_w):2416CASE(_fast_aldc): {2417u2 index;2418int incr;2419if (opcode == Bytecodes::_fast_aldc) {2420index = pc[1];2421incr = 2;2422} else {2423index = Bytes::get_native_u2(pc+1);2424incr = 3;2425}24262427// We are resolved if the f1 field contains a non-null object (CallSite, etc.)2428// This kind of CP cache entry does not need to match the flags byte, because2429// there is a 1-1 relation between bytecode type and CP entry type.2430ConstantPool* constants = METHOD->constants();2431oop result = constants->resolved_references()->obj_at(index);2432if (result == NULL) {2433CALL_VM(InterpreterRuntime::resolve_ldc(THREAD, (Bytecodes::Code) opcode),2434handle_exception);2435result = THREAD->vm_result();2436}24372438VERIFY_OOP(result);2439SET_STACK_OBJECT(result, 0);2440UPDATE_PC_AND_TOS_AND_CONTINUE(incr, 1);2441}24422443CASE(_invokedynamic): {24442445if (!EnableInvokeDynamic) {2446// We should not encounter this bytecode if !EnableInvokeDynamic.2447// The verifier will stop it. However, if we get past the verifier,2448// this will stop the thread in a reasonable way, without crashing the JVM.2449CALL_VM(InterpreterRuntime::throw_IncompatibleClassChangeError(THREAD),2450handle_exception);2451ShouldNotReachHere();2452}24532454u4 index = Bytes::get_native_u4(pc+1);2455ConstantPoolCacheEntry* cache = cp->constant_pool()->invokedynamic_cp_cache_entry_at(index);24562457// We are resolved if the resolved_references field contains a non-null object (CallSite, etc.)2458// This kind of CP cache entry does not need to match the flags byte, because2459// there is a 1-1 relation between bytecode type and CP entry type.2460if (! cache->is_resolved((Bytecodes::Code) opcode)) {2461CALL_VM(InterpreterRuntime::resolve_invokedynamic(THREAD),2462handle_exception);2463cache = cp->constant_pool()->invokedynamic_cp_cache_entry_at(index);2464}24652466Method* method = cache->f1_as_method();2467if (VerifyOops) method->verify();24682469if (cache->has_appendix()) {2470ConstantPool* constants = METHOD->constants();2471SET_STACK_OBJECT(cache->appendix_if_resolved(constants), 0);2472MORE_STACK(1);2473}24742475istate->set_msg(call_method);2476istate->set_callee(method);2477istate->set_callee_entry_point(method->from_interpreted_entry());2478istate->set_bcp_advance(5);24792480// Invokedynamic has got a call counter, just like an invokestatic -> increment!2481BI_PROFILE_UPDATE_CALL();24822483UPDATE_PC_AND_RETURN(0); // I'll be back...2484}24852486CASE(_invokehandle): {24872488if (!EnableInvokeDynamic) {2489ShouldNotReachHere();2490}24912492u2 index = Bytes::get_native_u2(pc+1);2493ConstantPoolCacheEntry* cache = cp->entry_at(index);24942495if (! cache->is_resolved((Bytecodes::Code) opcode)) {2496CALL_VM(InterpreterRuntime::resolve_invokehandle(THREAD),2497handle_exception);2498cache = cp->entry_at(index);2499}25002501Method* method = cache->f1_as_method();2502if (VerifyOops) method->verify();25032504if (cache->has_appendix()) {2505ConstantPool* constants = METHOD->constants();2506SET_STACK_OBJECT(cache->appendix_if_resolved(constants), 0);2507MORE_STACK(1);2508}25092510istate->set_msg(call_method);2511istate->set_callee(method);2512istate->set_callee_entry_point(method->from_interpreted_entry());2513istate->set_bcp_advance(3);25142515// Invokehandle has got a call counter, just like a final call -> increment!2516BI_PROFILE_UPDATE_FINALCALL();25172518UPDATE_PC_AND_RETURN(0); // I'll be back...2519}25202521CASE(_invokeinterface): {2522u2 index = Bytes::get_native_u2(pc+1);25232524// QQQ Need to make this as inlined as possible. Probably need to split all the bytecode cases2525// out so c++ compiler has a chance for constant prop to fold everything possible away.25262527ConstantPoolCacheEntry* cache = cp->entry_at(index);2528if (!cache->is_resolved((Bytecodes::Code)opcode)) {2529CALL_VM(InterpreterRuntime::resolve_invoke(THREAD, (Bytecodes::Code)opcode),2530handle_exception);2531cache = cp->entry_at(index);2532}25332534istate->set_msg(call_method);25352536// Special case of invokeinterface called for virtual method of2537// java.lang.Object. See cpCacheOop.cpp for details.2538// This code isn't produced by javac, but could be produced by2539// another compliant java compiler.2540if (cache->is_forced_virtual()) {2541Method* callee;2542CHECK_NULL(STACK_OBJECT(-(cache->parameter_size())));2543if (cache->is_vfinal()) {2544callee = cache->f2_as_vfinal_method();2545// Profile 'special case of invokeinterface' final call.2546BI_PROFILE_UPDATE_FINALCALL();2547} else {2548// Get receiver.2549int parms = cache->parameter_size();2550// Same comments as invokevirtual apply here.2551oop rcvr = STACK_OBJECT(-parms);2552VERIFY_OOP(rcvr);2553InstanceKlass* rcvrKlass = (InstanceKlass*)rcvr->klass();2554callee = (Method*) rcvrKlass->start_of_vtable()[ cache->f2_as_index()];2555// Profile 'special case of invokeinterface' virtual call.2556BI_PROFILE_UPDATE_VIRTUALCALL(rcvr->klass());2557}2558istate->set_callee(callee);2559istate->set_callee_entry_point(callee->from_interpreted_entry());2560#ifdef VM_JVMTI2561if (JvmtiExport::can_post_interpreter_events() && THREAD->is_interp_only_mode()) {2562istate->set_callee_entry_point(callee->interpreter_entry());2563}2564#endif /* VM_JVMTI */2565istate->set_bcp_advance(5);2566UPDATE_PC_AND_RETURN(0); // I'll be back...2567}25682569// this could definitely be cleaned up QQQ2570Method* callee;2571Method *interface_method = cache->f2_as_interface_method();2572InstanceKlass* iclass = interface_method->method_holder();25732574// get receiver2575int parms = cache->parameter_size();2576oop rcvr = STACK_OBJECT(-parms);2577CHECK_NULL(rcvr);2578InstanceKlass* int2 = (InstanceKlass*) rcvr->klass();25792580// Receiver subtype check against resolved interface klass (REFC).2581{2582Klass* refc = cache->f1_as_klass();2583itableOffsetEntry* scan;2584for (scan = (itableOffsetEntry*) int2->start_of_itable();2585scan->interface_klass() != NULL;2586scan++) {2587if (scan->interface_klass() == refc) {2588break;2589}2590}2591// Check that the entry is non-null. A null entry means2592// that the receiver class doesn't implement the2593// interface, and wasn't the same as when the caller was2594// compiled.2595if (scan->interface_klass() == NULL) {2596VM_JAVA_ERROR(vmSymbols::java_lang_IncompatibleClassChangeError(), "", note_no_trap);2597}2598}25992600itableOffsetEntry* ki = (itableOffsetEntry*) int2->start_of_itable();2601int i;2602for ( i = 0 ; i < int2->itable_length() ; i++, ki++ ) {2603if (ki->interface_klass() == iclass) break;2604}2605// If the interface isn't found, this class doesn't implement this2606// interface. The link resolver checks this but only for the first2607// time this interface is called.2608if (i == int2->itable_length()) {2609VM_JAVA_ERROR(vmSymbols::java_lang_IncompatibleClassChangeError(), "", note_no_trap);2610}2611int mindex = interface_method->itable_index();26122613itableMethodEntry* im = ki->first_method_entry(rcvr->klass());2614callee = im[mindex].method();2615if (callee == NULL) {2616VM_JAVA_ERROR(vmSymbols::java_lang_AbstractMethodError(), "", note_no_trap);2617}26182619// Profile virtual call.2620BI_PROFILE_UPDATE_VIRTUALCALL(rcvr->klass());26212622istate->set_callee(callee);2623istate->set_callee_entry_point(callee->from_interpreted_entry());2624#ifdef VM_JVMTI2625if (JvmtiExport::can_post_interpreter_events() && THREAD->is_interp_only_mode()) {2626istate->set_callee_entry_point(callee->interpreter_entry());2627}2628#endif /* VM_JVMTI */2629istate->set_bcp_advance(5);2630UPDATE_PC_AND_RETURN(0); // I'll be back...2631}26322633CASE(_invokevirtual):2634CASE(_invokespecial):2635CASE(_invokestatic): {2636u2 index = Bytes::get_native_u2(pc+1);26372638ConstantPoolCacheEntry* cache = cp->entry_at(index);2639// QQQ Need to make this as inlined as possible. Probably need to split all the bytecode cases2640// out so c++ compiler has a chance for constant prop to fold everything possible away.26412642if (!cache->is_resolved((Bytecodes::Code)opcode)) {2643CALL_VM(InterpreterRuntime::resolve_invoke(THREAD, (Bytecodes::Code)opcode),2644handle_exception);2645cache = cp->entry_at(index);2646}26472648istate->set_msg(call_method);2649{2650Method* callee;2651if ((Bytecodes::Code)opcode == Bytecodes::_invokevirtual) {2652CHECK_NULL(STACK_OBJECT(-(cache->parameter_size())));2653if (cache->is_vfinal()) {2654callee = cache->f2_as_vfinal_method();2655// Profile final call.2656BI_PROFILE_UPDATE_FINALCALL();2657} else {2658// get receiver2659int parms = cache->parameter_size();2660// this works but needs a resourcemark and seems to create a vtable on every call:2661// Method* callee = rcvr->klass()->vtable()->method_at(cache->f2_as_index());2662//2663// this fails with an assert2664// InstanceKlass* rcvrKlass = InstanceKlass::cast(STACK_OBJECT(-parms)->klass());2665// but this works2666oop rcvr = STACK_OBJECT(-parms);2667VERIFY_OOP(rcvr);2668InstanceKlass* rcvrKlass = (InstanceKlass*)rcvr->klass();2669/*2670Executing this code in java.lang.String:2671public String(char value[]) {2672this.count = value.length;2673this.value = (char[])value.clone();2674}26752676a find on rcvr->klass() reports:2677{type array char}{type array class}2678- klass: {other class}26792680but using InstanceKlass::cast(STACK_OBJECT(-parms)->klass()) causes in assertion failure2681because rcvr->klass()->oop_is_instance() == 02682However it seems to have a vtable in the right location. Huh?26832684*/2685callee = (Method*) rcvrKlass->start_of_vtable()[ cache->f2_as_index()];2686// Profile virtual call.2687BI_PROFILE_UPDATE_VIRTUALCALL(rcvr->klass());2688}2689} else {2690if ((Bytecodes::Code)opcode == Bytecodes::_invokespecial) {2691CHECK_NULL(STACK_OBJECT(-(cache->parameter_size())));2692}2693callee = cache->f1_as_method();26942695// Profile call.2696BI_PROFILE_UPDATE_CALL();2697}26982699istate->set_callee(callee);2700istate->set_callee_entry_point(callee->from_interpreted_entry());2701#ifdef VM_JVMTI2702if (JvmtiExport::can_post_interpreter_events() && THREAD->is_interp_only_mode()) {2703istate->set_callee_entry_point(callee->interpreter_entry());2704}2705#endif /* VM_JVMTI */2706istate->set_bcp_advance(3);2707UPDATE_PC_AND_RETURN(0); // I'll be back...2708}2709}27102711/* Allocate memory for a new java object. */27122713CASE(_newarray): {2714BasicType atype = (BasicType) *(pc+1);2715jint size = STACK_INT(-1);2716CALL_VM(InterpreterRuntime::newarray(THREAD, atype, size),2717handle_exception);2718// Must prevent reordering of stores for object initialization2719// with stores that publish the new object.2720OrderAccess::storestore();2721SET_STACK_OBJECT(THREAD->vm_result(), -1);2722THREAD->set_vm_result(NULL);27232724UPDATE_PC_AND_CONTINUE(2);2725}27262727/* Throw an exception. */27282729CASE(_athrow): {2730oop except_oop = STACK_OBJECT(-1);2731CHECK_NULL(except_oop);2732// set pending_exception so we use common code2733THREAD->set_pending_exception(except_oop, NULL, 0);2734goto handle_exception;2735}27362737/* goto and jsr. They are exactly the same except jsr pushes2738* the address of the next instruction first.2739*/27402741CASE(_jsr): {2742/* push bytecode index on stack */2743SET_STACK_ADDR(((address)pc - (intptr_t)(istate->method()->code_base()) + 3), 0);2744MORE_STACK(1);2745/* FALL THROUGH */2746}27472748CASE(_goto):2749{2750int16_t offset = (int16_t)Bytes::get_Java_u2(pc + 1);2751// Profile jump.2752BI_PROFILE_UPDATE_JUMP();2753address branch_pc = pc;2754UPDATE_PC(offset);2755DO_BACKEDGE_CHECKS(offset, branch_pc);2756CONTINUE;2757}27582759CASE(_jsr_w): {2760/* push return address on the stack */2761SET_STACK_ADDR(((address)pc - (intptr_t)(istate->method()->code_base()) + 5), 0);2762MORE_STACK(1);2763/* FALL THROUGH */2764}27652766CASE(_goto_w):2767{2768int32_t offset = Bytes::get_Java_u4(pc + 1);2769// Profile jump.2770BI_PROFILE_UPDATE_JUMP();2771address branch_pc = pc;2772UPDATE_PC(offset);2773DO_BACKEDGE_CHECKS(offset, branch_pc);2774CONTINUE;2775}27762777/* return from a jsr or jsr_w */27782779CASE(_ret): {2780// Profile ret.2781BI_PROFILE_UPDATE_RET(/*bci=*/((int)(intptr_t)(LOCALS_ADDR(pc[1]))));2782// Now, update the pc.2783pc = istate->method()->code_base() + (intptr_t)(LOCALS_ADDR(pc[1]));2784UPDATE_PC_AND_CONTINUE(0);2785}27862787/* debugger breakpoint */27882789CASE(_breakpoint): {2790Bytecodes::Code original_bytecode;2791DECACHE_STATE();2792SET_LAST_JAVA_FRAME();2793original_bytecode = InterpreterRuntime::get_original_bytecode_at(THREAD,2794METHOD, pc);2795RESET_LAST_JAVA_FRAME();2796CACHE_STATE();2797if (THREAD->has_pending_exception()) goto handle_exception;2798CALL_VM(InterpreterRuntime::_breakpoint(THREAD, METHOD, pc),2799handle_exception);28002801opcode = (jubyte)original_bytecode;2802goto opcode_switch;2803}28042805DEFAULT:2806fatal(err_msg("Unimplemented opcode %d = %s", opcode,2807Bytecodes::name((Bytecodes::Code)opcode)));2808goto finish;28092810} /* switch(opc) */281128122813#ifdef USELABELS2814check_for_exception:2815#endif2816{2817if (!THREAD->has_pending_exception()) {2818CONTINUE;2819}2820/* We will be gcsafe soon, so flush our state. */2821DECACHE_PC();2822goto handle_exception;2823}2824do_continue: ;28252826} /* while (1) interpreter loop */282728282829// An exception exists in the thread state see whether this activation can handle it2830handle_exception: {28312832HandleMarkCleaner __hmc(THREAD);2833Handle except_oop(THREAD, THREAD->pending_exception());2834// Prevent any subsequent HandleMarkCleaner in the VM2835// from freeing the except_oop handle.2836HandleMark __hm(THREAD);28372838THREAD->clear_pending_exception();2839assert(except_oop(), "No exception to process");2840intptr_t continuation_bci;2841// expression stack is emptied2842topOfStack = istate->stack_base() - Interpreter::stackElementWords;2843CALL_VM(continuation_bci = (intptr_t)InterpreterRuntime::exception_handler_for_exception(THREAD, except_oop()),2844handle_exception);28452846except_oop = THREAD->vm_result();2847THREAD->set_vm_result(NULL);2848if (continuation_bci >= 0) {2849// Place exception on top of stack2850SET_STACK_OBJECT(except_oop(), 0);2851MORE_STACK(1);2852pc = METHOD->code_base() + continuation_bci;2853if (TraceExceptions) {2854ttyLocker ttyl;2855ResourceMark rm;2856tty->print_cr("Exception <%s> (" INTPTR_FORMAT ")", except_oop->print_value_string(), p2i(except_oop()));2857tty->print_cr(" thrown in interpreter method <%s>", METHOD->print_value_string());2858tty->print_cr(" at bci %d, continuing at %d for thread " INTPTR_FORMAT,2859(int)(istate->bcp() - METHOD->code_base()),2860(int)continuation_bci, p2i(THREAD));2861}2862// for AbortVMOnException flag2863NOT_PRODUCT(Exceptions::debug_check_abort(except_oop));28642865// Update profiling data.2866BI_PROFILE_ALIGN_TO_CURRENT_BCI();2867goto run;2868}2869if (TraceExceptions) {2870ttyLocker ttyl;2871ResourceMark rm;2872tty->print_cr("Exception <%s> (" INTPTR_FORMAT ")", except_oop->print_value_string(), p2i(except_oop()));2873tty->print_cr(" thrown in interpreter method <%s>", METHOD->print_value_string());2874tty->print_cr(" at bci %d, unwinding for thread " INTPTR_FORMAT,2875(int)(istate->bcp() - METHOD->code_base()),2876p2i(THREAD));2877}2878// for AbortVMOnException flag2879NOT_PRODUCT(Exceptions::debug_check_abort(except_oop));2880// No handler in this activation, unwind and try again2881THREAD->set_pending_exception(except_oop(), NULL, 0);2882goto handle_return;2883} // handle_exception:28842885// Return from an interpreter invocation with the result of the interpretation2886// on the top of the Java Stack (or a pending exception)28872888handle_Pop_Frame: {28892890// We don't really do anything special here except we must be aware2891// that we can get here without ever locking the method (if sync).2892// Also we skip the notification of the exit.28932894istate->set_msg(popping_frame);2895// Clear pending so while the pop is in process2896// we don't start another one if a call_vm is done.2897THREAD->clr_pop_frame_pending();2898// Let interpreter (only) see the we're in the process of popping a frame2899THREAD->set_pop_frame_in_process();29002901goto handle_return;29022903} // handle_Pop_Frame29042905// ForceEarlyReturn ends a method, and returns to the caller with a return value2906// given by the invoker of the early return.2907handle_Early_Return: {29082909istate->set_msg(early_return);29102911// Clear expression stack.2912topOfStack = istate->stack_base() - Interpreter::stackElementWords;29132914JvmtiThreadState *ts = THREAD->jvmti_thread_state();29152916// Push the value to be returned.2917switch (istate->method()->result_type()) {2918case T_BOOLEAN:2919case T_SHORT:2920case T_BYTE:2921case T_CHAR:2922case T_INT:2923SET_STACK_INT(ts->earlyret_value().i, 0);2924MORE_STACK(1);2925break;2926case T_LONG:2927SET_STACK_LONG(ts->earlyret_value().j, 1);2928MORE_STACK(2);2929break;2930case T_FLOAT:2931SET_STACK_FLOAT(ts->earlyret_value().f, 0);2932MORE_STACK(1);2933break;2934case T_DOUBLE:2935SET_STACK_DOUBLE(ts->earlyret_value().d, 1);2936MORE_STACK(2);2937break;2938case T_ARRAY:2939case T_OBJECT:2940SET_STACK_OBJECT(ts->earlyret_oop(), 0);2941MORE_STACK(1);2942break;2943}29442945ts->clr_earlyret_value();2946ts->set_earlyret_oop(NULL);2947ts->clr_earlyret_pending();29482949// Fall through to handle_return.29502951} // handle_Early_Return29522953handle_return: {2954// A storestore barrier is required to order initialization of2955// final fields with publishing the reference to the object that2956// holds the field. Without the barrier the value of final fields2957// can be observed to change.2958OrderAccess::storestore();29592960DECACHE_STATE();29612962bool suppress_error = istate->msg() == popping_frame || istate->msg() == early_return;2963bool suppress_exit_event = THREAD->has_pending_exception() || istate->msg() == popping_frame;2964Handle original_exception(THREAD, THREAD->pending_exception());2965Handle illegal_state_oop(THREAD, NULL);29662967// We'd like a HandleMark here to prevent any subsequent HandleMarkCleaner2968// in any following VM entries from freeing our live handles, but illegal_state_oop2969// isn't really allocated yet and so doesn't become live until later and2970// in unpredicatable places. Instead we must protect the places where we enter the2971// VM. It would be much simpler (and safer) if we could allocate a real handle with2972// a NULL oop in it and then overwrite the oop later as needed. This isn't2973// unfortunately isn't possible.29742975THREAD->clear_pending_exception();29762977//2978// As far as we are concerned we have returned. If we have a pending exception2979// that will be returned as this invocation's result. However if we get any2980// exception(s) while checking monitor state one of those IllegalMonitorStateExceptions2981// will be our final result (i.e. monitor exception trumps a pending exception).2982//29832984// If we never locked the method (or really passed the point where we would have),2985// there is no need to unlock it (or look for other monitors), since that2986// could not have happened.29872988if (THREAD->do_not_unlock()) {29892990// Never locked, reset the flag now because obviously any caller must2991// have passed their point of locking for us to have gotten here.29922993THREAD->clr_do_not_unlock();2994} else {2995// At this point we consider that we have returned. We now check that the2996// locks were properly block structured. If we find that they were not2997// used properly we will return with an illegal monitor exception.2998// The exception is checked by the caller not the callee since this2999// checking is considered to be part of the invocation and therefore3000// in the callers scope (JVM spec 8.13).3001//3002// Another weird thing to watch for is if the method was locked3003// recursively and then not exited properly. This means we must3004// examine all the entries in reverse time(and stack) order and3005// unlock as we find them. If we find the method monitor before3006// we are at the initial entry then we should throw an exception.3007// It is not clear the template based interpreter does this3008// correctly30093010BasicObjectLock* base = istate->monitor_base();3011BasicObjectLock* end = (BasicObjectLock*) istate->stack_base();3012bool method_unlock_needed = METHOD->is_synchronized();3013// We know the initial monitor was used for the method don't check that3014// slot in the loop3015if (method_unlock_needed) base--;30163017// Check all the monitors to see they are unlocked. Install exception if found to be locked.3018while (end < base) {3019oop lockee = end->obj();3020if (lockee != NULL) {3021BasicLock* lock = end->lock();3022markOop header = lock->displaced_header();3023end->set_obj(NULL);30243025if (!lockee->mark()->has_bias_pattern()) {3026// If it isn't recursive we either must swap old header or call the runtime3027if (header != NULL) {3028if (Atomic::cmpxchg_ptr(header, lockee->mark_addr(), lock) != lock) {3029// restore object for the slow case3030end->set_obj(lockee);3031{3032// Prevent any HandleMarkCleaner from freeing our live handles3033HandleMark __hm(THREAD);3034CALL_VM_NOCHECK(InterpreterRuntime::monitorexit(THREAD, end));3035}3036}3037}3038}3039// One error is plenty3040if (illegal_state_oop() == NULL && !suppress_error) {3041{3042// Prevent any HandleMarkCleaner from freeing our live handles3043HandleMark __hm(THREAD);3044CALL_VM_NOCHECK(InterpreterRuntime::throw_illegal_monitor_state_exception(THREAD));3045}3046assert(THREAD->has_pending_exception(), "Lost our exception!");3047illegal_state_oop = THREAD->pending_exception();3048THREAD->clear_pending_exception();3049}3050}3051end++;3052}3053// Unlock the method if needed3054if (method_unlock_needed) {3055if (base->obj() == NULL) {3056// The method is already unlocked this is not good.3057if (illegal_state_oop() == NULL && !suppress_error) {3058{3059// Prevent any HandleMarkCleaner from freeing our live handles3060HandleMark __hm(THREAD);3061CALL_VM_NOCHECK(InterpreterRuntime::throw_illegal_monitor_state_exception(THREAD));3062}3063assert(THREAD->has_pending_exception(), "Lost our exception!");3064illegal_state_oop = THREAD->pending_exception();3065THREAD->clear_pending_exception();3066}3067} else {3068//3069// The initial monitor is always used for the method3070// However if that slot is no longer the oop for the method it was unlocked3071// and reused by something that wasn't unlocked!3072//3073// deopt can come in with rcvr dead because c2 knows3074// its value is preserved in the monitor. So we can't use locals[0] at all3075// and must use first monitor slot.3076//3077oop rcvr = base->obj();3078if (rcvr == NULL) {3079if (!suppress_error) {3080VM_JAVA_ERROR_NO_JUMP(vmSymbols::java_lang_NullPointerException(), "", note_nullCheck_trap);3081illegal_state_oop = THREAD->pending_exception();3082THREAD->clear_pending_exception();3083}3084} else if (UseHeavyMonitors) {3085{3086// Prevent any HandleMarkCleaner from freeing our live handles.3087HandleMark __hm(THREAD);3088CALL_VM_NOCHECK(InterpreterRuntime::monitorexit(THREAD, base));3089}3090if (THREAD->has_pending_exception()) {3091if (!suppress_error) illegal_state_oop = THREAD->pending_exception();3092THREAD->clear_pending_exception();3093}3094} else {3095BasicLock* lock = base->lock();3096markOop header = lock->displaced_header();3097base->set_obj(NULL);30983099if (!rcvr->mark()->has_bias_pattern()) {3100base->set_obj(NULL);3101// If it isn't recursive we either must swap old header or call the runtime3102if (header != NULL) {3103if (Atomic::cmpxchg_ptr(header, rcvr->mark_addr(), lock) != lock) {3104// restore object for the slow case3105base->set_obj(rcvr);3106{3107// Prevent any HandleMarkCleaner from freeing our live handles3108HandleMark __hm(THREAD);3109CALL_VM_NOCHECK(InterpreterRuntime::monitorexit(THREAD, base));3110}3111if (THREAD->has_pending_exception()) {3112if (!suppress_error) illegal_state_oop = THREAD->pending_exception();3113THREAD->clear_pending_exception();3114}3115}3116}3117}3118}3119}3120}3121}3122// Clear the do_not_unlock flag now.3123THREAD->clr_do_not_unlock();31243125//3126// Notify jvmti/jvmdi3127//3128// NOTE: we do not notify a method_exit if we have a pending exception,3129// including an exception we generate for unlocking checks. In the former3130// case, JVMDI has already been notified by our call for the exception handler3131// and in both cases as far as JVMDI is concerned we have already returned.3132// If we notify it again JVMDI will be all confused about how many frames3133// are still on the stack (4340444).3134//3135// NOTE Further! It turns out the the JVMTI spec in fact expects to see3136// method_exit events whenever we leave an activation unless it was done3137// for popframe. This is nothing like jvmdi. However we are passing the3138// tests at the moment (apparently because they are jvmdi based) so rather3139// than change this code and possibly fail tests we will leave it alone3140// (with this note) in anticipation of changing the vm and the tests3141// simultaneously.314231433144//3145suppress_exit_event = suppress_exit_event || illegal_state_oop() != NULL;3146314731483149#ifdef VM_JVMTI3150if (_jvmti_interp_events) {3151// Whenever JVMTI puts a thread in interp_only_mode, method3152// entry/exit events are sent for that thread to track stack depth.3153if ( !suppress_exit_event && THREAD->is_interp_only_mode() ) {3154{3155// Prevent any HandleMarkCleaner from freeing our live handles3156HandleMark __hm(THREAD);3157CALL_VM_NOCHECK(InterpreterRuntime::post_method_exit(THREAD));3158}3159}3160}3161#endif /* VM_JVMTI */31623163//3164// See if we are returning any exception3165// A pending exception that was pending prior to a possible popping frame3166// overrides the popping frame.3167//3168assert(!suppress_error || (suppress_error && illegal_state_oop() == NULL), "Error was not suppressed");3169if (illegal_state_oop() != NULL || original_exception() != NULL) {3170// Inform the frame manager we have no result.3171istate->set_msg(throwing_exception);3172if (illegal_state_oop() != NULL)3173THREAD->set_pending_exception(illegal_state_oop(), NULL, 0);3174else3175THREAD->set_pending_exception(original_exception(), NULL, 0);3176UPDATE_PC_AND_RETURN(0);3177}31783179if (istate->msg() == popping_frame) {3180// Make it simpler on the assembly code and set the message for the frame pop.3181// returns3182if (istate->prev() == NULL) {3183// We must be returning to a deoptimized frame (because popframe only happens between3184// two interpreted frames). We need to save the current arguments in C heap so that3185// the deoptimized frame when it restarts can copy the arguments to its expression3186// stack and re-execute the call. We also have to notify deoptimization that this3187// has occurred and to pick the preserved args copy them to the deoptimized frame's3188// java expression stack. Yuck.3189//3190THREAD->popframe_preserve_args(in_ByteSize(METHOD->size_of_parameters() * wordSize),3191LOCALS_SLOT(METHOD->size_of_parameters() - 1));3192THREAD->set_popframe_condition_bit(JavaThread::popframe_force_deopt_reexecution_bit);3193}3194} else {3195istate->set_msg(return_from_method);3196}31973198// Normal return3199// Advance the pc and return to frame manager3200UPDATE_PC_AND_RETURN(1);3201} /* handle_return: */32023203// This is really a fatal error return32043205finish:3206DECACHE_TOS();3207DECACHE_PC();32083209return;3210}32113212/*3213* All the code following this point is only produced once and is not present3214* in the JVMTI version of the interpreter3215*/32163217#ifndef VM_JVMTI32183219// This constructor should only be used to contruct the object to signal3220// interpreter initialization. All other instances should be created by3221// the frame manager.3222BytecodeInterpreter::BytecodeInterpreter(messages msg) {3223if (msg != initialize) ShouldNotReachHere();3224_msg = msg;3225_self_link = this;3226_prev_link = NULL;3227}32283229// Inline static functions for Java Stack and Local manipulation32303231// The implementations are platform dependent. We have to worry about alignment3232// issues on some machines which can change on the same platform depending on3233// whether it is an LP64 machine also.3234address BytecodeInterpreter::stack_slot(intptr_t *tos, int offset) {3235return (address) tos[Interpreter::expr_index_at(-offset)];3236}32373238jint BytecodeInterpreter::stack_int(intptr_t *tos, int offset) {3239return *((jint*) &tos[Interpreter::expr_index_at(-offset)]);3240}32413242jfloat BytecodeInterpreter::stack_float(intptr_t *tos, int offset) {3243return *((jfloat *) &tos[Interpreter::expr_index_at(-offset)]);3244}32453246oop BytecodeInterpreter::stack_object(intptr_t *tos, int offset) {3247return cast_to_oop(tos [Interpreter::expr_index_at(-offset)]);3248}32493250jdouble BytecodeInterpreter::stack_double(intptr_t *tos, int offset) {3251return ((VMJavaVal64*) &tos[Interpreter::expr_index_at(-offset)])->d;3252}32533254jlong BytecodeInterpreter::stack_long(intptr_t *tos, int offset) {3255return ((VMJavaVal64 *) &tos[Interpreter::expr_index_at(-offset)])->l;3256}32573258// only used for value types3259void BytecodeInterpreter::set_stack_slot(intptr_t *tos, address value,3260int offset) {3261*((address *)&tos[Interpreter::expr_index_at(-offset)]) = value;3262}32633264void BytecodeInterpreter::set_stack_int(intptr_t *tos, int value,3265int offset) {3266*((jint *)&tos[Interpreter::expr_index_at(-offset)]) = value;3267}32683269void BytecodeInterpreter::set_stack_float(intptr_t *tos, jfloat value,3270int offset) {3271*((jfloat *)&tos[Interpreter::expr_index_at(-offset)]) = value;3272}32733274void BytecodeInterpreter::set_stack_object(intptr_t *tos, oop value,3275int offset) {3276*((oop *)&tos[Interpreter::expr_index_at(-offset)]) = value;3277}32783279// needs to be platform dep for the 32 bit platforms.3280void BytecodeInterpreter::set_stack_double(intptr_t *tos, jdouble value,3281int offset) {3282((VMJavaVal64*)&tos[Interpreter::expr_index_at(-offset)])->d = value;3283}32843285void BytecodeInterpreter::set_stack_double_from_addr(intptr_t *tos,3286address addr, int offset) {3287(((VMJavaVal64*)&tos[Interpreter::expr_index_at(-offset)])->d =3288((VMJavaVal64*)addr)->d);3289}32903291void BytecodeInterpreter::set_stack_long(intptr_t *tos, jlong value,3292int offset) {3293((VMJavaVal64*)&tos[Interpreter::expr_index_at(-offset+1)])->l = 0xdeedbeeb;3294((VMJavaVal64*)&tos[Interpreter::expr_index_at(-offset)])->l = value;3295}32963297void BytecodeInterpreter::set_stack_long_from_addr(intptr_t *tos,3298address addr, int offset) {3299((VMJavaVal64*)&tos[Interpreter::expr_index_at(-offset+1)])->l = 0xdeedbeeb;3300((VMJavaVal64*)&tos[Interpreter::expr_index_at(-offset)])->l =3301((VMJavaVal64*)addr)->l;3302}33033304// Locals33053306address BytecodeInterpreter::locals_slot(intptr_t* locals, int offset) {3307return (address)locals[Interpreter::local_index_at(-offset)];3308}3309jint BytecodeInterpreter::locals_int(intptr_t* locals, int offset) {3310return (jint)locals[Interpreter::local_index_at(-offset)];3311}3312jfloat BytecodeInterpreter::locals_float(intptr_t* locals, int offset) {3313return (jfloat)locals[Interpreter::local_index_at(-offset)];3314}3315oop BytecodeInterpreter::locals_object(intptr_t* locals, int offset) {3316return cast_to_oop(locals[Interpreter::local_index_at(-offset)]);3317}3318jdouble BytecodeInterpreter::locals_double(intptr_t* locals, int offset) {3319return ((VMJavaVal64*)&locals[Interpreter::local_index_at(-(offset+1))])->d;3320}3321jlong BytecodeInterpreter::locals_long(intptr_t* locals, int offset) {3322return ((VMJavaVal64*)&locals[Interpreter::local_index_at(-(offset+1))])->l;3323}33243325// Returns the address of locals value.3326address BytecodeInterpreter::locals_long_at(intptr_t* locals, int offset) {3327return ((address)&locals[Interpreter::local_index_at(-(offset+1))]);3328}3329address BytecodeInterpreter::locals_double_at(intptr_t* locals, int offset) {3330return ((address)&locals[Interpreter::local_index_at(-(offset+1))]);3331}33323333// Used for local value or returnAddress3334void BytecodeInterpreter::set_locals_slot(intptr_t *locals,3335address value, int offset) {3336*((address*)&locals[Interpreter::local_index_at(-offset)]) = value;3337}3338void BytecodeInterpreter::set_locals_int(intptr_t *locals,3339jint value, int offset) {3340*((jint *)&locals[Interpreter::local_index_at(-offset)]) = value;3341}3342void BytecodeInterpreter::set_locals_float(intptr_t *locals,3343jfloat value, int offset) {3344*((jfloat *)&locals[Interpreter::local_index_at(-offset)]) = value;3345}3346void BytecodeInterpreter::set_locals_object(intptr_t *locals,3347oop value, int offset) {3348*((oop *)&locals[Interpreter::local_index_at(-offset)]) = value;3349}3350void BytecodeInterpreter::set_locals_double(intptr_t *locals,3351jdouble value, int offset) {3352((VMJavaVal64*)&locals[Interpreter::local_index_at(-(offset+1))])->d = value;3353}3354void BytecodeInterpreter::set_locals_long(intptr_t *locals,3355jlong value, int offset) {3356((VMJavaVal64*)&locals[Interpreter::local_index_at(-(offset+1))])->l = value;3357}3358void BytecodeInterpreter::set_locals_double_from_addr(intptr_t *locals,3359address addr, int offset) {3360((VMJavaVal64*)&locals[Interpreter::local_index_at(-(offset+1))])->d = ((VMJavaVal64*)addr)->d;3361}3362void BytecodeInterpreter::set_locals_long_from_addr(intptr_t *locals,3363address addr, int offset) {3364((VMJavaVal64*)&locals[Interpreter::local_index_at(-(offset+1))])->l = ((VMJavaVal64*)addr)->l;3365}33663367void BytecodeInterpreter::astore(intptr_t* tos, int stack_offset,3368intptr_t* locals, int locals_offset) {3369intptr_t value = tos[Interpreter::expr_index_at(-stack_offset)];3370locals[Interpreter::local_index_at(-locals_offset)] = value;3371}337233733374void BytecodeInterpreter::copy_stack_slot(intptr_t *tos, int from_offset,3375int to_offset) {3376tos[Interpreter::expr_index_at(-to_offset)] =3377(intptr_t)tos[Interpreter::expr_index_at(-from_offset)];3378}33793380void BytecodeInterpreter::dup(intptr_t *tos) {3381copy_stack_slot(tos, -1, 0);3382}3383void BytecodeInterpreter::dup2(intptr_t *tos) {3384copy_stack_slot(tos, -2, 0);3385copy_stack_slot(tos, -1, 1);3386}33873388void BytecodeInterpreter::dup_x1(intptr_t *tos) {3389/* insert top word two down */3390copy_stack_slot(tos, -1, 0);3391copy_stack_slot(tos, -2, -1);3392copy_stack_slot(tos, 0, -2);3393}33943395void BytecodeInterpreter::dup_x2(intptr_t *tos) {3396/* insert top word three down */3397copy_stack_slot(tos, -1, 0);3398copy_stack_slot(tos, -2, -1);3399copy_stack_slot(tos, -3, -2);3400copy_stack_slot(tos, 0, -3);3401}3402void BytecodeInterpreter::dup2_x1(intptr_t *tos) {3403/* insert top 2 slots three down */3404copy_stack_slot(tos, -1, 1);3405copy_stack_slot(tos, -2, 0);3406copy_stack_slot(tos, -3, -1);3407copy_stack_slot(tos, 1, -2);3408copy_stack_slot(tos, 0, -3);3409}3410void BytecodeInterpreter::dup2_x2(intptr_t *tos) {3411/* insert top 2 slots four down */3412copy_stack_slot(tos, -1, 1);3413copy_stack_slot(tos, -2, 0);3414copy_stack_slot(tos, -3, -1);3415copy_stack_slot(tos, -4, -2);3416copy_stack_slot(tos, 1, -3);3417copy_stack_slot(tos, 0, -4);3418}341934203421void BytecodeInterpreter::swap(intptr_t *tos) {3422// swap top two elements3423intptr_t val = tos[Interpreter::expr_index_at(1)];3424// Copy -2 entry to -13425copy_stack_slot(tos, -2, -1);3426// Store saved -1 entry into -23427tos[Interpreter::expr_index_at(2)] = val;3428}3429// --------------------------------------------------------------------------------3430// Non-product code3431#ifndef PRODUCT34323433const char* BytecodeInterpreter::C_msg(BytecodeInterpreter::messages msg) {3434switch (msg) {3435case BytecodeInterpreter::no_request: return("no_request");3436case BytecodeInterpreter::initialize: return("initialize");3437// status message to C++ interpreter3438case BytecodeInterpreter::method_entry: return("method_entry");3439case BytecodeInterpreter::method_resume: return("method_resume");3440case BytecodeInterpreter::got_monitors: return("got_monitors");3441case BytecodeInterpreter::rethrow_exception: return("rethrow_exception");3442// requests to frame manager from C++ interpreter3443case BytecodeInterpreter::call_method: return("call_method");3444case BytecodeInterpreter::return_from_method: return("return_from_method");3445case BytecodeInterpreter::more_monitors: return("more_monitors");3446case BytecodeInterpreter::throwing_exception: return("throwing_exception");3447case BytecodeInterpreter::popping_frame: return("popping_frame");3448case BytecodeInterpreter::do_osr: return("do_osr");3449// deopt3450case BytecodeInterpreter::deopt_resume: return("deopt_resume");3451case BytecodeInterpreter::deopt_resume2: return("deopt_resume2");3452default: return("BAD MSG");3453}3454}3455void3456BytecodeInterpreter::print() {3457tty->print_cr("thread: " INTPTR_FORMAT, (uintptr_t) this->_thread);3458tty->print_cr("bcp: " INTPTR_FORMAT, (uintptr_t) this->_bcp);3459tty->print_cr("locals: " INTPTR_FORMAT, (uintptr_t) this->_locals);3460tty->print_cr("constants: " INTPTR_FORMAT, (uintptr_t) this->_constants);3461{3462ResourceMark rm;3463char *method_name = _method->name_and_sig_as_C_string();3464tty->print_cr("method: " INTPTR_FORMAT "[ %s ]", (uintptr_t) this->_method, method_name);3465}3466tty->print_cr("mdx: " INTPTR_FORMAT, (uintptr_t) this->_mdx);3467tty->print_cr("stack: " INTPTR_FORMAT, (uintptr_t) this->_stack);3468tty->print_cr("msg: %s", C_msg(this->_msg));3469tty->print_cr("result_to_call._callee: " INTPTR_FORMAT, (uintptr_t) this->_result._to_call._callee);3470tty->print_cr("result_to_call._callee_entry_point: " INTPTR_FORMAT, (uintptr_t) this->_result._to_call._callee_entry_point);3471tty->print_cr("result_to_call._bcp_advance: %d ", this->_result._to_call._bcp_advance);3472tty->print_cr("osr._osr_buf: " INTPTR_FORMAT, (uintptr_t) this->_result._osr._osr_buf);3473tty->print_cr("osr._osr_entry: " INTPTR_FORMAT, (uintptr_t) this->_result._osr._osr_entry);3474tty->print_cr("prev_link: " INTPTR_FORMAT, (uintptr_t) this->_prev_link);3475tty->print_cr("native_mirror: " INTPTR_FORMAT, (uintptr_t) p2i(this->_oop_temp));3476tty->print_cr("stack_base: " INTPTR_FORMAT, (uintptr_t) this->_stack_base);3477tty->print_cr("stack_limit: " INTPTR_FORMAT, (uintptr_t) this->_stack_limit);3478tty->print_cr("monitor_base: " INTPTR_FORMAT, (uintptr_t) this->_monitor_base);3479#ifdef SPARC3480tty->print_cr("last_Java_pc: " INTPTR_FORMAT, (uintptr_t) this->_last_Java_pc);3481tty->print_cr("frame_bottom: " INTPTR_FORMAT, (uintptr_t) this->_frame_bottom);3482tty->print_cr("&native_fresult: " INTPTR_FORMAT, (uintptr_t) &this->_native_fresult);3483tty->print_cr("native_lresult: " INTPTR_FORMAT, (uintptr_t) this->_native_lresult);3484#endif3485#if !defined(ZERO)3486tty->print_cr("last_Java_fp: " INTPTR_FORMAT, (uintptr_t) this->_last_Java_fp);3487#endif // !ZERO3488tty->print_cr("self_link: " INTPTR_FORMAT, (uintptr_t) this->_self_link);3489}34903491extern "C" {3492void PI(uintptr_t arg) {3493((BytecodeInterpreter*)arg)->print();3494}3495}3496#endif // PRODUCT34973498#endif // JVMTI3499#endif // CC_INTERP350035013502