Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/cpu/ppc/vm/interp_masm_ppc_64.cpp
32285 views
/*1* Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.2* Copyright 2012, 2014 SAP AG. All rights reserved.3* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.4*5* This code is free software; you can redistribute it and/or modify it6* under the terms of the GNU General Public License version 2 only, as7* published by the Free Software Foundation.8*9* This code is distributed in the hope that it will be useful, but WITHOUT10* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or11* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License12* version 2 for more details (a copy is included in the LICENSE file that13* accompanied this code).14*15* You should have received a copy of the GNU General Public License version16* 2 along with this work; if not, write to the Free Software Foundation,17* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.18*19* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA20* or visit www.oracle.com if you need additional information or have any21* questions.22*23*/242526#include "precompiled.hpp"27#include "asm/macroAssembler.inline.hpp"28#include "interp_masm_ppc_64.hpp"29#include "interpreter/interpreterRuntime.hpp"30#include "prims/jvmtiThreadState.hpp"3132#ifdef PRODUCT33#define BLOCK_COMMENT(str) // nothing34#else35#define BLOCK_COMMENT(str) block_comment(str)36#endif3738void InterpreterMacroAssembler::null_check_throw(Register a, int offset, Register temp_reg) {39#ifdef CC_INTERP40address exception_entry = StubRoutines::throw_NullPointerException_at_call_entry();41#else42address exception_entry = Interpreter::throw_NullPointerException_entry();43#endif44MacroAssembler::null_check_throw(a, offset, temp_reg, exception_entry);45}4647void InterpreterMacroAssembler::branch_to_entry(address entry, Register Rscratch) {48assert(entry, "Entry must have been generated by now");49if (is_within_range_of_b(entry, pc())) {50b(entry);51} else {52load_const_optimized(Rscratch, entry, R0);53mtctr(Rscratch);54bctr();55}56}5758#ifndef CC_INTERP5960void InterpreterMacroAssembler::dispatch_next(TosState state, int bcp_incr) {61Register bytecode = R12_scratch2;62if (bcp_incr != 0) {63lbzu(bytecode, bcp_incr, R14_bcp);64} else {65lbz(bytecode, 0, R14_bcp);66}6768dispatch_Lbyte_code(state, bytecode, Interpreter::dispatch_table(state));69}7071void InterpreterMacroAssembler::dispatch_via(TosState state, address* table) {72// Load current bytecode.73Register bytecode = R12_scratch2;74lbz(bytecode, 0, R14_bcp);75dispatch_Lbyte_code(state, bytecode, table);76}7778// Dispatch code executed in the prolog of a bytecode which does not do it's79// own dispatch. The dispatch address is computed and placed in R24_dispatch_addr.80void InterpreterMacroAssembler::dispatch_prolog(TosState state, int bcp_incr) {81Register bytecode = R12_scratch2;82lbz(bytecode, bcp_incr, R14_bcp);8384load_dispatch_table(R24_dispatch_addr, Interpreter::dispatch_table(state));8586sldi(bytecode, bytecode, LogBytesPerWord);87ldx(R24_dispatch_addr, R24_dispatch_addr, bytecode);88}8990// Dispatch code executed in the epilog of a bytecode which does not do it's91// own dispatch. The dispatch address in R24_dispatch_addr is used for the92// dispatch.93void InterpreterMacroAssembler::dispatch_epilog(TosState state, int bcp_incr) {94mtctr(R24_dispatch_addr);95addi(R14_bcp, R14_bcp, bcp_incr);96bctr();97}9899void InterpreterMacroAssembler::check_and_handle_popframe(Register scratch_reg) {100assert(scratch_reg != R0, "can't use R0 as scratch_reg here");101if (JvmtiExport::can_pop_frame()) {102Label L;103104// Check the "pending popframe condition" flag in the current thread.105lwz(scratch_reg, in_bytes(JavaThread::popframe_condition_offset()), R16_thread);106107// Initiate popframe handling only if it is not already being108// processed. If the flag has the popframe_processing bit set, it109// means that this code is called *during* popframe handling - we110// don't want to reenter.111andi_(R0, scratch_reg, JavaThread::popframe_pending_bit);112beq(CCR0, L);113114andi_(R0, scratch_reg, JavaThread::popframe_processing_bit);115bne(CCR0, L);116117// Call the Interpreter::remove_activation_preserving_args_entry()118// func to get the address of the same-named entrypoint in the119// generated interpreter code.120#if defined(ABI_ELFv2)121call_c(CAST_FROM_FN_PTR(address,122Interpreter::remove_activation_preserving_args_entry),123relocInfo::none);124#else125call_c(CAST_FROM_FN_PTR(FunctionDescriptor*,126Interpreter::remove_activation_preserving_args_entry),127relocInfo::none);128#endif129130// Jump to Interpreter::_remove_activation_preserving_args_entry.131mtctr(R3_RET);132bctr();133134align(32, 12);135bind(L);136}137}138139void InterpreterMacroAssembler::check_and_handle_earlyret(Register scratch_reg) {140const Register Rthr_state_addr = scratch_reg;141if (JvmtiExport::can_force_early_return()) {142Label Lno_early_ret;143ld(Rthr_state_addr, in_bytes(JavaThread::jvmti_thread_state_offset()), R16_thread);144cmpdi(CCR0, Rthr_state_addr, 0);145beq(CCR0, Lno_early_ret);146147lwz(R0, in_bytes(JvmtiThreadState::earlyret_state_offset()), Rthr_state_addr);148cmpwi(CCR0, R0, JvmtiThreadState::earlyret_pending);149bne(CCR0, Lno_early_ret);150151// Jump to Interpreter::_earlyret_entry.152lwz(R3_ARG1, in_bytes(JvmtiThreadState::earlyret_tos_offset()), Rthr_state_addr);153call_VM_leaf(CAST_FROM_FN_PTR(address, Interpreter::remove_activation_early_entry));154mtlr(R3_RET);155blr();156157align(32, 12);158bind(Lno_early_ret);159}160}161162void InterpreterMacroAssembler::load_earlyret_value(TosState state, Register Rscratch1) {163const Register RjvmtiState = Rscratch1;164const Register Rscratch2 = R0;165166ld(RjvmtiState, in_bytes(JavaThread::jvmti_thread_state_offset()), R16_thread);167li(Rscratch2, 0);168169switch (state) {170case atos: ld(R17_tos, in_bytes(JvmtiThreadState::earlyret_oop_offset()), RjvmtiState);171std(Rscratch2, in_bytes(JvmtiThreadState::earlyret_oop_offset()), RjvmtiState);172break;173case ltos: ld(R17_tos, in_bytes(JvmtiThreadState::earlyret_value_offset()), RjvmtiState);174break;175case btos: // fall through176case ztos: // fall through177case ctos: // fall through178case stos: // fall through179case itos: lwz(R17_tos, in_bytes(JvmtiThreadState::earlyret_value_offset()), RjvmtiState);180break;181case ftos: lfs(F15_ftos, in_bytes(JvmtiThreadState::earlyret_value_offset()), RjvmtiState);182break;183case dtos: lfd(F15_ftos, in_bytes(JvmtiThreadState::earlyret_value_offset()), RjvmtiState);184break;185case vtos: break;186default : ShouldNotReachHere();187}188189// Clean up tos value in the jvmti thread state.190std(Rscratch2, in_bytes(JvmtiThreadState::earlyret_value_offset()), RjvmtiState);191// Set tos state field to illegal value.192li(Rscratch2, ilgl);193stw(Rscratch2, in_bytes(JvmtiThreadState::earlyret_tos_offset()), RjvmtiState);194}195196// Common code to dispatch and dispatch_only.197// Dispatch value in Lbyte_code and increment Lbcp.198199void InterpreterMacroAssembler::load_dispatch_table(Register dst, address* table) {200address table_base = (address)Interpreter::dispatch_table((TosState)0);201intptr_t table_offs = (intptr_t)table - (intptr_t)table_base;202if (is_simm16(table_offs)) {203addi(dst, R25_templateTableBase, (int)table_offs);204} else {205load_const_optimized(dst, table, R0);206}207}208209void InterpreterMacroAssembler::dispatch_Lbyte_code(TosState state, Register bytecode, address* table, bool verify) {210if (verify) {211unimplemented("dispatch_Lbyte_code: verify"); // See Sparc Implementation to implement this212}213214#ifdef FAST_DISPATCH215unimplemented("dispatch_Lbyte_code FAST_DISPATCH");216#else217assert_different_registers(bytecode, R11_scratch1);218219// Calc dispatch table address.220load_dispatch_table(R11_scratch1, table);221222sldi(R12_scratch2, bytecode, LogBytesPerWord);223ldx(R11_scratch1, R11_scratch1, R12_scratch2);224225// Jump off!226mtctr(R11_scratch1);227bctr();228#endif229}230231void InterpreterMacroAssembler::load_receiver(Register Rparam_count, Register Rrecv_dst) {232sldi(Rrecv_dst, Rparam_count, Interpreter::logStackElementSize);233ldx(Rrecv_dst, Rrecv_dst, R15_esp);234}235236// helpers for expression stack237238void InterpreterMacroAssembler::pop_i(Register r) {239lwzu(r, Interpreter::stackElementSize, R15_esp);240}241242void InterpreterMacroAssembler::pop_ptr(Register r) {243ldu(r, Interpreter::stackElementSize, R15_esp);244}245246void InterpreterMacroAssembler::pop_l(Register r) {247ld(r, Interpreter::stackElementSize, R15_esp);248addi(R15_esp, R15_esp, 2 * Interpreter::stackElementSize);249}250251void InterpreterMacroAssembler::pop_f(FloatRegister f) {252lfsu(f, Interpreter::stackElementSize, R15_esp);253}254255void InterpreterMacroAssembler::pop_d(FloatRegister f) {256lfd(f, Interpreter::stackElementSize, R15_esp);257addi(R15_esp, R15_esp, 2 * Interpreter::stackElementSize);258}259260void InterpreterMacroAssembler::push_i(Register r) {261stw(r, 0, R15_esp);262addi(R15_esp, R15_esp, - Interpreter::stackElementSize );263}264265void InterpreterMacroAssembler::push_ptr(Register r) {266std(r, 0, R15_esp);267addi(R15_esp, R15_esp, - Interpreter::stackElementSize );268}269270void InterpreterMacroAssembler::push_l(Register r) {271std(r, - Interpreter::stackElementSize, R15_esp);272addi(R15_esp, R15_esp, - 2 * Interpreter::stackElementSize );273}274275void InterpreterMacroAssembler::push_f(FloatRegister f) {276stfs(f, 0, R15_esp);277addi(R15_esp, R15_esp, - Interpreter::stackElementSize );278}279280void InterpreterMacroAssembler::push_d(FloatRegister f) {281stfd(f, - Interpreter::stackElementSize, R15_esp);282addi(R15_esp, R15_esp, - 2 * Interpreter::stackElementSize );283}284285void InterpreterMacroAssembler::push_2ptrs(Register first, Register second) {286std(first, 0, R15_esp);287std(second, -Interpreter::stackElementSize, R15_esp);288addi(R15_esp, R15_esp, - 2 * Interpreter::stackElementSize );289}290291void InterpreterMacroAssembler::push_l_pop_d(Register l, FloatRegister d) {292std(l, 0, R15_esp);293lfd(d, 0, R15_esp);294}295296void InterpreterMacroAssembler::push_d_pop_l(FloatRegister d, Register l) {297stfd(d, 0, R15_esp);298ld(l, 0, R15_esp);299}300301void InterpreterMacroAssembler::push(TosState state) {302switch (state) {303case atos: push_ptr(); break;304case btos:305case ztos:306case ctos:307case stos:308case itos: push_i(); break;309case ltos: push_l(); break;310case ftos: push_f(); break;311case dtos: push_d(); break;312case vtos: /* nothing to do */ break;313default : ShouldNotReachHere();314}315}316317void InterpreterMacroAssembler::pop(TosState state) {318switch (state) {319case atos: pop_ptr(); break;320case btos:321case ztos:322case ctos:323case stos:324case itos: pop_i(); break;325case ltos: pop_l(); break;326case ftos: pop_f(); break;327case dtos: pop_d(); break;328case vtos: /* nothing to do */ break;329default : ShouldNotReachHere();330}331verify_oop(R17_tos, state);332}333334void InterpreterMacroAssembler::empty_expression_stack() {335addi(R15_esp, R26_monitor, - Interpreter::stackElementSize);336}337338void InterpreterMacroAssembler::get_2_byte_integer_at_bcp(int bcp_offset,339Register Rdst,340signedOrNot is_signed) {341#if defined(VM_LITTLE_ENDIAN)342if (bcp_offset) {343load_const_optimized(Rdst, bcp_offset);344lhbrx(Rdst, R14_bcp, Rdst);345} else {346lhbrx(Rdst, R14_bcp);347}348if (is_signed == Signed) {349extsh(Rdst, Rdst);350}351#else352// Read Java big endian format.353if (is_signed == Signed) {354lha(Rdst, bcp_offset, R14_bcp);355} else {356lhz(Rdst, bcp_offset, R14_bcp);357}358#endif359}360361void InterpreterMacroAssembler::get_4_byte_integer_at_bcp(int bcp_offset,362Register Rdst,363signedOrNot is_signed) {364#if defined(VM_LITTLE_ENDIAN)365if (bcp_offset) {366load_const_optimized(Rdst, bcp_offset);367lwbrx(Rdst, R14_bcp, Rdst);368} else {369lwbrx(Rdst, R14_bcp);370}371if (is_signed == Signed) {372extsw(Rdst, Rdst);373}374#else375// Read Java big endian format.376if (bcp_offset & 3) { // Offset unaligned?377load_const_optimized(Rdst, bcp_offset);378if (is_signed == Signed) {379lwax(Rdst, R14_bcp, Rdst);380} else {381lwzx(Rdst, R14_bcp, Rdst);382}383} else {384if (is_signed == Signed) {385lwa(Rdst, bcp_offset, R14_bcp);386} else {387lwz(Rdst, bcp_offset, R14_bcp);388}389}390#endif391}392393394// Load the constant pool cache index from the bytecode stream.395//396// Kills / writes:397// - Rdst, Rscratch398void InterpreterMacroAssembler::get_cache_index_at_bcp(Register Rdst, int bcp_offset, size_t index_size) {399assert(bcp_offset > 0, "bcp is still pointing to start of bytecode");400// Cache index is always in the native format, courtesy of Rewriter.401if (index_size == sizeof(u2)) {402lhz(Rdst, bcp_offset, R14_bcp);403} else if (index_size == sizeof(u4)) {404assert(EnableInvokeDynamic, "giant index used only for JSR 292");405if (bcp_offset & 3) {406load_const_optimized(Rdst, bcp_offset);407lwax(Rdst, R14_bcp, Rdst);408} else {409lwa(Rdst, bcp_offset, R14_bcp);410}411assert(ConstantPool::decode_invokedynamic_index(~123) == 123, "else change next line");412nand(Rdst, Rdst, Rdst); // convert to plain index413} else if (index_size == sizeof(u1)) {414lbz(Rdst, bcp_offset, R14_bcp);415} else {416ShouldNotReachHere();417}418// Rdst now contains cp cache index.419}420421void InterpreterMacroAssembler::get_cache_and_index_at_bcp(Register cache, int bcp_offset, size_t index_size) {422get_cache_index_at_bcp(cache, bcp_offset, index_size);423sldi(cache, cache, exact_log2(in_words(ConstantPoolCacheEntry::size()) * BytesPerWord));424add(cache, R27_constPoolCache, cache);425}426427// Load 4-byte signed or unsigned integer in Java format (that is, big-endian format)428// from (Rsrc)+offset.429void InterpreterMacroAssembler::get_u4(Register Rdst, Register Rsrc, int offset,430signedOrNot is_signed) {431#if defined(VM_LITTLE_ENDIAN)432if (offset) {433load_const_optimized(Rdst, offset);434lwbrx(Rdst, Rdst, Rsrc);435} else {436lwbrx(Rdst, Rsrc);437}438if (is_signed == Signed) {439extsw(Rdst, Rdst);440}441#else442if (is_signed == Signed) {443lwa(Rdst, offset, Rsrc);444} else {445lwz(Rdst, offset, Rsrc);446}447#endif448}449450// Load object from cpool->resolved_references(index).451void InterpreterMacroAssembler::load_resolved_reference_at_index(Register result, Register index) {452assert_different_registers(result, index);453get_constant_pool(result);454455// Convert from field index to resolved_references() index and from456// word index to byte offset. Since this is a java object, it can be compressed.457Register tmp = index; // reuse458sldi(tmp, index, LogBytesPerHeapOop);459// Load pointer for resolved_references[] objArray.460ld(result, ConstantPool::resolved_references_offset_in_bytes(), result);461// JNIHandles::resolve(result)462ld(result, 0, result);463#ifdef ASSERT464Label index_ok;465lwa(R0, arrayOopDesc::length_offset_in_bytes(), result);466sldi(R0, R0, LogBytesPerHeapOop);467cmpd(CCR0, tmp, R0);468blt(CCR0, index_ok);469stop("resolved reference index out of bounds", 0x09256);470bind(index_ok);471#endif472// Add in the index.473add(result, tmp, result);474load_heap_oop(result, arrayOopDesc::base_offset_in_bytes(T_OBJECT), result);475}476477// Generate a subtype check: branch to ok_is_subtype if sub_klass is478// a subtype of super_klass. Blows registers Rsub_klass, tmp1, tmp2.479void InterpreterMacroAssembler::gen_subtype_check(Register Rsub_klass, Register Rsuper_klass, Register Rtmp1,480Register Rtmp2, Register Rtmp3, Label &ok_is_subtype) {481// Profile the not-null value's klass.482profile_typecheck(Rsub_klass, Rtmp1, Rtmp2);483check_klass_subtype(Rsub_klass, Rsuper_klass, Rtmp1, Rtmp2, ok_is_subtype);484profile_typecheck_failed(Rtmp1, Rtmp2);485}486487void InterpreterMacroAssembler::generate_stack_overflow_check_with_compare_and_throw(Register Rmem_frame_size, Register Rscratch1) {488Label done;489sub(Rmem_frame_size, R1_SP, Rmem_frame_size);490ld(Rscratch1, thread_(stack_overflow_limit));491cmpld(CCR0/*is_stack_overflow*/, Rmem_frame_size, Rscratch1);492bgt(CCR0/*is_stack_overflow*/, done);493494// Load target address of the runtime stub.495assert(StubRoutines::throw_StackOverflowError_entry() != NULL, "generated in wrong order");496load_const_optimized(Rscratch1, (StubRoutines::throw_StackOverflowError_entry()), R0);497mtctr(Rscratch1);498// Restore caller_sp.499#ifdef ASSERT500ld(Rscratch1, 0, R1_SP);501ld(R0, 0, R21_sender_SP);502cmpd(CCR0, R0, Rscratch1);503asm_assert_eq("backlink", 0x547);504#endif // ASSERT505mr(R1_SP, R21_sender_SP);506bctr();507508align(32, 12);509bind(done);510}511512// Separate these two to allow for delay slot in middle.513// These are used to do a test and full jump to exception-throwing code.514515// Check that index is in range for array, then shift index by index_shift,516// and put arrayOop + shifted_index into res.517// Note: res is still shy of address by array offset into object.518519void InterpreterMacroAssembler::index_check_without_pop(Register Rarray, Register Rindex, int index_shift, Register Rtmp, Register Rres) {520// Check that index is in range for array, then shift index by index_shift,521// and put arrayOop + shifted_index into res.522// Note: res is still shy of address by array offset into object.523// Kills:524// - Rindex525// Writes:526// - Rres: Address that corresponds to the array index if check was successful.527verify_oop(Rarray);528const Register Rlength = R0;529const Register RsxtIndex = Rtmp;530Label LisNull, LnotOOR;531532// Array nullcheck533if (!ImplicitNullChecks) {534cmpdi(CCR0, Rarray, 0);535beq(CCR0, LisNull);536} else {537null_check_throw(Rarray, arrayOopDesc::length_offset_in_bytes(), /*temp*/RsxtIndex);538}539540// Rindex might contain garbage in upper bits (remember that we don't sign extend541// during integer arithmetic operations). So kill them and put value into same register542// where ArrayIndexOutOfBounds would expect the index in.543rldicl(RsxtIndex, Rindex, 0, 32); // zero extend 32 bit -> 64 bit544545// Index check546lwz(Rlength, arrayOopDesc::length_offset_in_bytes(), Rarray);547cmplw(CCR0, Rindex, Rlength);548sldi(RsxtIndex, RsxtIndex, index_shift);549blt(CCR0, LnotOOR);550// Index should be in R17_tos, array should be in R4_ARG2.551mr(R17_tos, Rindex);552mr(R4_ARG2, Rarray);553load_dispatch_table(Rtmp, (address*)Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);554mtctr(Rtmp);555bctr();556557if (!ImplicitNullChecks) {558bind(LisNull);559load_dispatch_table(Rtmp, (address*)Interpreter::_throw_NullPointerException_entry);560mtctr(Rtmp);561bctr();562}563564align(32, 16);565bind(LnotOOR);566567// Calc address568add(Rres, RsxtIndex, Rarray);569}570571void InterpreterMacroAssembler::index_check(Register array, Register index, int index_shift, Register tmp, Register res) {572// pop array573pop_ptr(array);574575// check array576index_check_without_pop(array, index, index_shift, tmp, res);577}578579void InterpreterMacroAssembler::get_const(Register Rdst) {580ld(Rdst, in_bytes(Method::const_offset()), R19_method);581}582583void InterpreterMacroAssembler::get_constant_pool(Register Rdst) {584get_const(Rdst);585ld(Rdst, in_bytes(ConstMethod::constants_offset()), Rdst);586}587588void InterpreterMacroAssembler::get_constant_pool_cache(Register Rdst) {589get_constant_pool(Rdst);590ld(Rdst, ConstantPool::cache_offset_in_bytes(), Rdst);591}592593void InterpreterMacroAssembler::get_cpool_and_tags(Register Rcpool, Register Rtags) {594get_constant_pool(Rcpool);595ld(Rtags, ConstantPool::tags_offset_in_bytes(), Rcpool);596}597598// Unlock if synchronized method.599//600// Unlock the receiver if this is a synchronized method.601// Unlock any Java monitors from synchronized blocks.602//603// If there are locked Java monitors604// If throw_monitor_exception605// throws IllegalMonitorStateException606// Else if install_monitor_exception607// installs IllegalMonitorStateException608// Else609// no error processing610void InterpreterMacroAssembler::unlock_if_synchronized_method(TosState state,611bool throw_monitor_exception,612bool install_monitor_exception) {613Label Lunlocked, Lno_unlock;614{615Register Rdo_not_unlock_flag = R11_scratch1;616Register Raccess_flags = R12_scratch2;617618// Check if synchronized method or unlocking prevented by619// JavaThread::do_not_unlock_if_synchronized flag.620lbz(Rdo_not_unlock_flag, in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()), R16_thread);621lwz(Raccess_flags, in_bytes(Method::access_flags_offset()), R19_method);622li(R0, 0);623stb(R0, in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()), R16_thread); // reset flag624625push(state);626627// Skip if we don't have to unlock.628rldicl_(R0, Raccess_flags, 64-JVM_ACC_SYNCHRONIZED_BIT, 63); // Extract bit and compare to 0.629beq(CCR0, Lunlocked);630631cmpwi(CCR0, Rdo_not_unlock_flag, 0);632bne(CCR0, Lno_unlock);633}634635// Unlock636{637Register Rmonitor_base = R11_scratch1;638639Label Lunlock;640// If it's still locked, everything is ok, unlock it.641ld(Rmonitor_base, 0, R1_SP);642addi(Rmonitor_base, Rmonitor_base, - (frame::ijava_state_size + frame::interpreter_frame_monitor_size_in_bytes())); // Monitor base643644ld(R0, BasicObjectLock::obj_offset_in_bytes(), Rmonitor_base);645cmpdi(CCR0, R0, 0);646bne(CCR0, Lunlock);647648// If it's already unlocked, throw exception.649if (throw_monitor_exception) {650call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_illegal_monitor_state_exception));651should_not_reach_here();652} else {653if (install_monitor_exception) {654call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::new_illegal_monitor_state_exception));655b(Lunlocked);656}657}658659bind(Lunlock);660unlock_object(Rmonitor_base);661}662663// Check that all other monitors are unlocked. Throw IllegelMonitorState exception if not.664bind(Lunlocked);665{666Label Lexception, Lrestart;667Register Rcurrent_obj_addr = R11_scratch1;668const int delta = frame::interpreter_frame_monitor_size_in_bytes();669assert((delta & LongAlignmentMask) == 0, "sizeof BasicObjectLock must be even number of doublewords");670671bind(Lrestart);672// Set up search loop: Calc num of iterations.673{674Register Riterations = R12_scratch2;675Register Rmonitor_base = Rcurrent_obj_addr;676ld(Rmonitor_base, 0, R1_SP);677addi(Rmonitor_base, Rmonitor_base, - frame::ijava_state_size); // Monitor base678679subf_(Riterations, R26_monitor, Rmonitor_base);680ble(CCR0, Lno_unlock);681682addi(Rcurrent_obj_addr, Rmonitor_base, BasicObjectLock::obj_offset_in_bytes() - frame::interpreter_frame_monitor_size_in_bytes());683// Check if any monitor is on stack, bail out if not684srdi(Riterations, Riterations, exact_log2(delta));685mtctr(Riterations);686}687688// The search loop: Look for locked monitors.689{690const Register Rcurrent_obj = R0;691Label Lloop;692693ld(Rcurrent_obj, 0, Rcurrent_obj_addr);694addi(Rcurrent_obj_addr, Rcurrent_obj_addr, -delta);695bind(Lloop);696697// Check if current entry is used.698cmpdi(CCR0, Rcurrent_obj, 0);699bne(CCR0, Lexception);700// Preload next iteration's compare value.701ld(Rcurrent_obj, 0, Rcurrent_obj_addr);702addi(Rcurrent_obj_addr, Rcurrent_obj_addr, -delta);703bdnz(Lloop);704}705// Fell through: Everything's unlocked => finish.706b(Lno_unlock);707708// An object is still locked => need to throw exception.709bind(Lexception);710if (throw_monitor_exception) {711call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_illegal_monitor_state_exception));712should_not_reach_here();713} else {714// Stack unrolling. Unlock object and if requested, install illegal_monitor_exception.715// Unlock does not block, so don't have to worry about the frame.716Register Rmonitor_addr = R11_scratch1;717addi(Rmonitor_addr, Rcurrent_obj_addr, -BasicObjectLock::obj_offset_in_bytes() + delta);718unlock_object(Rmonitor_addr);719if (install_monitor_exception) {720call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::new_illegal_monitor_state_exception));721}722b(Lrestart);723}724}725726align(32, 12);727bind(Lno_unlock);728pop(state);729}730731// Support function for remove_activation & Co.732void InterpreterMacroAssembler::merge_frames(Register Rsender_sp, Register return_pc, Register Rscratch1, Register Rscratch2) {733// Pop interpreter frame.734ld(Rscratch1, 0, R1_SP); // *SP735ld(Rsender_sp, _ijava_state_neg(sender_sp), Rscratch1); // top_frame_sp736ld(Rscratch2, 0, Rscratch1); // **SP737#ifdef ASSERT738{739Label Lok;740ld(R0, _ijava_state_neg(ijava_reserved), Rscratch1);741cmpdi(CCR0, R0, 0x5afe);742beq(CCR0, Lok);743stop("frame corrupted (remove activation)", 0x5afe);744bind(Lok);745}746#endif747if (return_pc!=noreg) {748ld(return_pc, _abi(lr), Rscratch1); // LR749}750751// Merge top frames.752subf(Rscratch1, R1_SP, Rsender_sp); // top_frame_sp - SP753stdux(Rscratch2, R1_SP, Rscratch1); // atomically set *(SP = top_frame_sp) = **SP754}755756void InterpreterMacroAssembler::narrow(Register result) {757Register ret_type = R11_scratch1;758ld(R11_scratch1, in_bytes(Method::const_offset()), R19_method);759lbz(ret_type, in_bytes(ConstMethod::result_type_offset()), R11_scratch1);760761Label notBool, notByte, notChar, done;762763// common case first764cmpwi(CCR0, ret_type, T_INT);765beq(CCR0, done);766767cmpwi(CCR0, ret_type, T_BOOLEAN);768bne(CCR0, notBool);769andi(result, result, 0x1);770b(done);771772bind(notBool);773cmpwi(CCR0, ret_type, T_BYTE);774bne(CCR0, notByte);775extsb(result, result);776b(done);777778bind(notByte);779cmpwi(CCR0, ret_type, T_CHAR);780bne(CCR0, notChar);781andi(result, result, 0xffff);782b(done);783784bind(notChar);785// cmpwi(CCR0, ret_type, T_SHORT); // all that's left786// bne(CCR0, done);787extsh(result, result);788789// Nothing to do for T_INT790bind(done);791}792793// Remove activation.794//795// Unlock the receiver if this is a synchronized method.796// Unlock any Java monitors from synchronized blocks.797// Remove the activation from the stack.798//799// If there are locked Java monitors800// If throw_monitor_exception801// throws IllegalMonitorStateException802// Else if install_monitor_exception803// installs IllegalMonitorStateException804// Else805// no error processing806void InterpreterMacroAssembler::remove_activation(TosState state,807bool throw_monitor_exception,808bool install_monitor_exception) {809unlock_if_synchronized_method(state, throw_monitor_exception, install_monitor_exception);810811// Save result (push state before jvmti call and pop it afterwards) and notify jvmti.812notify_method_exit(false, state, NotifyJVMTI, true);813814verify_oop(R17_tos, state);815verify_thread();816817merge_frames(/*top_frame_sp*/ R21_sender_SP, /*return_pc*/ R0, R11_scratch1, R12_scratch2);818mtlr(R0);819}820821#endif // !CC_INTERP822823// Lock object824//825// Registers alive826// monitor - Address of the BasicObjectLock to be used for locking,827// which must be initialized with the object to lock.828// object - Address of the object to be locked.829//830void InterpreterMacroAssembler::lock_object(Register monitor, Register object) {831if (UseHeavyMonitors) {832call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),833monitor, /*check_for_exceptions=*/true CC_INTERP_ONLY(&& false));834} else {835// template code:836//837// markOop displaced_header = obj->mark().set_unlocked();838// monitor->lock()->set_displaced_header(displaced_header);839// if (Atomic::cmpxchg_ptr(/*ex=*/monitor, /*addr*/obj->mark_addr(), /*cmp*/displaced_header) == displaced_header) {840// // We stored the monitor address into the object's mark word.841// } else if (THREAD->is_lock_owned((address)displaced_header))842// // Simple recursive case.843// monitor->lock()->set_displaced_header(NULL);844// } else {845// // Slow path.846// InterpreterRuntime::monitorenter(THREAD, monitor);847// }848849const Register displaced_header = R7_ARG5;850const Register object_mark_addr = R8_ARG6;851const Register current_header = R9_ARG7;852const Register tmp = R10_ARG8;853854Label done;855Label cas_failed, slow_case;856857assert_different_registers(displaced_header, object_mark_addr, current_header, tmp);858859// markOop displaced_header = obj->mark().set_unlocked();860861// Load markOop from object into displaced_header.862ld(displaced_header, oopDesc::mark_offset_in_bytes(), object);863864if (UseBiasedLocking) {865biased_locking_enter(CCR0, object, displaced_header, tmp, current_header, done, &slow_case);866}867868// Set displaced_header to be (markOop of object | UNLOCK_VALUE).869ori(displaced_header, displaced_header, markOopDesc::unlocked_value);870871// monitor->lock()->set_displaced_header(displaced_header);872873// Initialize the box (Must happen before we update the object mark!).874std(displaced_header, BasicObjectLock::lock_offset_in_bytes() +875BasicLock::displaced_header_offset_in_bytes(), monitor);876877// if (Atomic::cmpxchg_ptr(/*ex=*/monitor, /*addr*/obj->mark_addr(), /*cmp*/displaced_header) == displaced_header) {878879// Store stack address of the BasicObjectLock (this is monitor) into object.880addi(object_mark_addr, object, oopDesc::mark_offset_in_bytes());881882// Must fence, otherwise, preceding store(s) may float below cmpxchg.883// CmpxchgX sets CCR0 to cmpX(current, displaced).884fence(); // TODO: replace by MacroAssembler::MemBarRel | MacroAssembler::MemBarAcq ?885cmpxchgd(/*flag=*/CCR0,886/*current_value=*/current_header,887/*compare_value=*/displaced_header, /*exchange_value=*/monitor,888/*where=*/object_mark_addr,889MacroAssembler::MemBarRel | MacroAssembler::MemBarAcq,890MacroAssembler::cmpxchgx_hint_acquire_lock(),891noreg,892&cas_failed);893894// If the compare-and-exchange succeeded, then we found an unlocked895// object and we have now locked it.896b(done);897bind(cas_failed);898899// } else if (THREAD->is_lock_owned((address)displaced_header))900// // Simple recursive case.901// monitor->lock()->set_displaced_header(NULL);902903// We did not see an unlocked object so try the fast recursive case.904905// Check if owner is self by comparing the value in the markOop of object906// (current_header) with the stack pointer.907sub(current_header, current_header, R1_SP);908909assert(os::vm_page_size() > 0xfff, "page size too small - change the constant");910load_const_optimized(tmp, ~(os::vm_page_size()-1) | markOopDesc::lock_mask_in_place);911912and_(R0/*==0?*/, current_header, tmp);913// If condition is true we are done and hence we can store 0 in the displaced914// header indicating it is a recursive lock.915bne(CCR0, slow_case);916release();917std(R0/*==0!*/, BasicObjectLock::lock_offset_in_bytes() +918BasicLock::displaced_header_offset_in_bytes(), monitor);919b(done);920921// } else {922// // Slow path.923// InterpreterRuntime::monitorenter(THREAD, monitor);924925// None of the above fast optimizations worked so we have to get into the926// slow case of monitor enter.927bind(slow_case);928call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),929monitor, /*check_for_exceptions=*/true CC_INTERP_ONLY(&& false));930// }931align(32, 12);932bind(done);933}934}935936// Unlocks an object. Used in monitorexit bytecode and remove_activation.937//938// Registers alive939// monitor - Address of the BasicObjectLock to be used for locking,940// which must be initialized with the object to lock.941//942// Throw IllegalMonitorException if object is not locked by current thread.943void InterpreterMacroAssembler::unlock_object(Register monitor, bool check_for_exceptions) {944if (UseHeavyMonitors) {945call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorexit),946monitor, check_for_exceptions CC_INTERP_ONLY(&& false));947} else {948949// template code:950//951// if ((displaced_header = monitor->displaced_header()) == NULL) {952// // Recursive unlock. Mark the monitor unlocked by setting the object field to NULL.953// monitor->set_obj(NULL);954// } else if (Atomic::cmpxchg_ptr(displaced_header, obj->mark_addr(), monitor) == monitor) {955// // We swapped the unlocked mark in displaced_header into the object's mark word.956// monitor->set_obj(NULL);957// } else {958// // Slow path.959// InterpreterRuntime::monitorexit(THREAD, monitor);960// }961962const Register object = R7_ARG5;963const Register displaced_header = R8_ARG6;964const Register object_mark_addr = R9_ARG7;965const Register current_header = R10_ARG8;966967Label free_slot;968Label slow_case;969970assert_different_registers(object, displaced_header, object_mark_addr, current_header);971972if (UseBiasedLocking) {973// The object address from the monitor is in object.974ld(object, BasicObjectLock::obj_offset_in_bytes(), monitor);975assert(oopDesc::mark_offset_in_bytes() == 0, "offset of _mark is not 0");976biased_locking_exit(CCR0, object, displaced_header, free_slot);977}978979// Test first if we are in the fast recursive case.980ld(displaced_header, BasicObjectLock::lock_offset_in_bytes() +981BasicLock::displaced_header_offset_in_bytes(), monitor);982983// If the displaced header is zero, we have a recursive unlock.984cmpdi(CCR0, displaced_header, 0);985beq(CCR0, free_slot); // recursive unlock986987// } else if (Atomic::cmpxchg_ptr(displaced_header, obj->mark_addr(), monitor) == monitor) {988// // We swapped the unlocked mark in displaced_header into the object's mark word.989// monitor->set_obj(NULL);990991// If we still have a lightweight lock, unlock the object and be done.992993// The object address from the monitor is in object.994if (!UseBiasedLocking) { ld(object, BasicObjectLock::obj_offset_in_bytes(), monitor); }995addi(object_mark_addr, object, oopDesc::mark_offset_in_bytes());996997// We have the displaced header in displaced_header. If the lock is still998// lightweight, it will contain the monitor address and we'll store the999// displaced header back into the object's mark word.1000// CmpxchgX sets CCR0 to cmpX(current, monitor).1001cmpxchgd(/*flag=*/CCR0,1002/*current_value=*/current_header,1003/*compare_value=*/monitor, /*exchange_value=*/displaced_header,1004/*where=*/object_mark_addr,1005MacroAssembler::MemBarRel,1006MacroAssembler::cmpxchgx_hint_release_lock(),1007noreg,1008&slow_case);1009b(free_slot);10101011// } else {1012// // Slow path.1013// InterpreterRuntime::monitorexit(THREAD, monitor);10141015// The lock has been converted into a heavy lock and hence1016// we need to get into the slow case.1017bind(slow_case);1018call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorexit),1019monitor, check_for_exceptions CC_INTERP_ONLY(&& false));1020// }10211022Label done;1023b(done); // Monitor register may be overwritten! Runtime has already freed the slot.10241025// Exchange worked, do monitor->set_obj(NULL);1026align(32, 12);1027bind(free_slot);1028li(R0, 0);1029std(R0, BasicObjectLock::obj_offset_in_bytes(), monitor);1030bind(done);1031}1032}10331034#ifndef CC_INTERP10351036// Load compiled (i2c) or interpreter entry when calling from interpreted and1037// do the call. Centralized so that all interpreter calls will do the same actions.1038// If jvmti single stepping is on for a thread we must not call compiled code.1039//1040// Input:1041// - Rtarget_method: method to call1042// - Rret_addr: return address1043// - 2 scratch regs1044//1045void InterpreterMacroAssembler::call_from_interpreter(Register Rtarget_method, Register Rret_addr, Register Rscratch1, Register Rscratch2) {1046assert_different_registers(Rscratch1, Rscratch2, Rtarget_method, Rret_addr);1047// Assume we want to go compiled if available.1048const Register Rtarget_addr = Rscratch1;1049const Register Rinterp_only = Rscratch2;10501051ld(Rtarget_addr, in_bytes(Method::from_interpreted_offset()), Rtarget_method);10521053if (JvmtiExport::can_post_interpreter_events()) {1054lwz(Rinterp_only, in_bytes(JavaThread::interp_only_mode_offset()), R16_thread);10551056// JVMTI events, such as single-stepping, are implemented partly by avoiding running1057// compiled code in threads for which the event is enabled. Check here for1058// interp_only_mode if these events CAN be enabled.1059Label done;1060verify_thread();1061cmpwi(CCR0, Rinterp_only, 0);1062beq(CCR0, done);1063ld(Rtarget_addr, in_bytes(Method::interpreter_entry_offset()), Rtarget_method);1064align(32, 12);1065bind(done);1066}10671068#ifdef ASSERT1069{1070Label Lok;1071cmpdi(CCR0, Rtarget_addr, 0);1072bne(CCR0, Lok);1073stop("null entry point");1074bind(Lok);1075}1076#endif // ASSERT10771078mr(R21_sender_SP, R1_SP);10791080// Calc a precise SP for the call. The SP value we calculated in1081// generate_fixed_frame() is based on the max_stack() value, so we would waste stack space1082// if esp is not max. Also, the i2c adapter extends the stack space without restoring1083// our pre-calced value, so repeating calls via i2c would result in stack overflow.1084// Since esp already points to an empty slot, we just have to sub 1 additional slot1085// to meet the abi scratch requirements.1086// The max_stack pointer will get restored by means of the GR_Lmax_stack local in1087// the return entry of the interpreter.1088addi(Rscratch2, R15_esp, Interpreter::stackElementSize - frame::abi_reg_args_size);1089clrrdi(Rscratch2, Rscratch2, exact_log2(frame::alignment_in_bytes)); // round towards smaller address1090resize_frame_absolute(Rscratch2, Rscratch2, R0);10911092mr_if_needed(R19_method, Rtarget_method);1093mtctr(Rtarget_addr);1094mtlr(Rret_addr);10951096save_interpreter_state(Rscratch2);1097#ifdef ASSERT1098ld(Rscratch1, _ijava_state_neg(top_frame_sp), Rscratch2); // Rscratch2 contains fp1099cmpd(CCR0, R21_sender_SP, Rscratch1);1100asm_assert_eq("top_frame_sp incorrect", 0x951);1101#endif11021103bctr();1104}11051106// Set the method data pointer for the current bcp.1107void InterpreterMacroAssembler::set_method_data_pointer_for_bcp() {1108assert(ProfileInterpreter, "must be profiling interpreter");1109Label get_continue;1110ld(R28_mdx, in_bytes(Method::method_data_offset()), R19_method);1111test_method_data_pointer(get_continue);1112call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::bcp_to_di), R19_method, R14_bcp);11131114addi(R28_mdx, R28_mdx, in_bytes(MethodData::data_offset()));1115add(R28_mdx, R28_mdx, R3_RET);1116bind(get_continue);1117}11181119// Test ImethodDataPtr. If it is null, continue at the specified label.1120void InterpreterMacroAssembler::test_method_data_pointer(Label& zero_continue) {1121assert(ProfileInterpreter, "must be profiling interpreter");1122cmpdi(CCR0, R28_mdx, 0);1123beq(CCR0, zero_continue);1124}11251126void InterpreterMacroAssembler::verify_method_data_pointer() {1127assert(ProfileInterpreter, "must be profiling interpreter");1128#ifdef ASSERT1129Label verify_continue;1130test_method_data_pointer(verify_continue);11311132// If the mdp is valid, it will point to a DataLayout header which is1133// consistent with the bcp. The converse is highly probable also.1134lhz(R11_scratch1, in_bytes(DataLayout::bci_offset()), R28_mdx);1135ld(R12_scratch2, in_bytes(Method::const_offset()), R19_method);1136addi(R11_scratch1, R11_scratch1, in_bytes(ConstMethod::codes_offset()));1137add(R11_scratch1, R12_scratch2, R12_scratch2);1138cmpd(CCR0, R11_scratch1, R14_bcp);1139beq(CCR0, verify_continue);11401141call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::verify_mdp ), R19_method, R14_bcp, R28_mdx);11421143bind(verify_continue);1144#endif1145}11461147void InterpreterMacroAssembler::test_invocation_counter_for_mdp(Register invocation_count,1148Register Rscratch,1149Label &profile_continue) {1150assert(ProfileInterpreter, "must be profiling interpreter");1151// Control will flow to "profile_continue" if the counter is less than the1152// limit or if we call profile_method().1153Label done;11541155// If no method data exists, and the counter is high enough, make one.1156int ipl_offs = load_const_optimized(Rscratch, &InvocationCounter::InterpreterProfileLimit, R0, true);1157lwz(Rscratch, ipl_offs, Rscratch);11581159cmpdi(CCR0, R28_mdx, 0);1160// Test to see if we should create a method data oop.1161cmpd(CCR1, Rscratch /* InterpreterProfileLimit */, invocation_count);1162bne(CCR0, done);1163bge(CCR1, profile_continue);11641165// Build it now.1166call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::profile_method));1167set_method_data_pointer_for_bcp();1168b(profile_continue);11691170align(32, 12);1171bind(done);1172}11731174void InterpreterMacroAssembler::test_backedge_count_for_osr(Register backedge_count, Register branch_bcp, Register Rtmp) {1175assert_different_registers(backedge_count, Rtmp, branch_bcp);1176assert(UseOnStackReplacement,"Must UseOnStackReplacement to test_backedge_count_for_osr");11771178Label did_not_overflow;1179Label overflow_with_error;11801181int ibbl_offs = load_const_optimized(Rtmp, &InvocationCounter::InterpreterBackwardBranchLimit, R0, true);1182lwz(Rtmp, ibbl_offs, Rtmp);1183cmpw(CCR0, backedge_count, Rtmp);11841185blt(CCR0, did_not_overflow);11861187// When ProfileInterpreter is on, the backedge_count comes from the1188// methodDataOop, which value does not get reset on the call to1189// frequency_counter_overflow(). To avoid excessive calls to the overflow1190// routine while the method is being compiled, add a second test to make sure1191// the overflow function is called only once every overflow_frequency.1192if (ProfileInterpreter) {1193const int overflow_frequency = 1024;1194li(Rtmp, overflow_frequency-1);1195andr(Rtmp, Rtmp, backedge_count);1196cmpwi(CCR0, Rtmp, 0);1197bne(CCR0, did_not_overflow);1198}11991200// Overflow in loop, pass branch bytecode.1201call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), branch_bcp, true);12021203// Was an OSR adapter generated?1204// O0 = osr nmethod1205cmpdi(CCR0, R3_RET, 0);1206beq(CCR0, overflow_with_error);12071208// Has the nmethod been invalidated already?1209lwz(Rtmp, nmethod::entry_bci_offset(), R3_RET);1210cmpwi(CCR0, Rtmp, InvalidOSREntryBci);1211beq(CCR0, overflow_with_error);12121213// Migrate the interpreter frame off of the stack.1214// We can use all registers because we will not return to interpreter from this point.12151216// Save nmethod.1217const Register osr_nmethod = R31;1218mr(osr_nmethod, R3_RET);1219set_top_ijava_frame_at_SP_as_last_Java_frame(R1_SP, R11_scratch1);1220call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::OSR_migration_begin), R16_thread);1221reset_last_Java_frame();1222// OSR buffer is in ARG112231224// Remove the interpreter frame.1225merge_frames(/*top_frame_sp*/ R21_sender_SP, /*return_pc*/ R0, R11_scratch1, R12_scratch2);12261227// Jump to the osr code.1228ld(R11_scratch1, nmethod::osr_entry_point_offset(), osr_nmethod);1229mtlr(R0);1230mtctr(R11_scratch1);1231bctr();12321233align(32, 12);1234bind(overflow_with_error);1235bind(did_not_overflow);1236}12371238// Store a value at some constant offset from the method data pointer.1239void InterpreterMacroAssembler::set_mdp_data_at(int constant, Register value) {1240assert(ProfileInterpreter, "must be profiling interpreter");12411242std(value, constant, R28_mdx);1243}12441245// Increment the value at some constant offset from the method data pointer.1246void InterpreterMacroAssembler::increment_mdp_data_at(int constant,1247Register counter_addr,1248Register Rbumped_count,1249bool decrement) {1250// Locate the counter at a fixed offset from the mdp:1251addi(counter_addr, R28_mdx, constant);1252increment_mdp_data_at(counter_addr, Rbumped_count, decrement);1253}12541255// Increment the value at some non-fixed (reg + constant) offset from1256// the method data pointer.1257void InterpreterMacroAssembler::increment_mdp_data_at(Register reg,1258int constant,1259Register scratch,1260Register Rbumped_count,1261bool decrement) {1262// Add the constant to reg to get the offset.1263add(scratch, R28_mdx, reg);1264// Then calculate the counter address.1265addi(scratch, scratch, constant);1266increment_mdp_data_at(scratch, Rbumped_count, decrement);1267}12681269void InterpreterMacroAssembler::increment_mdp_data_at(Register counter_addr,1270Register Rbumped_count,1271bool decrement) {1272assert(ProfileInterpreter, "must be profiling interpreter");12731274// Load the counter.1275ld(Rbumped_count, 0, counter_addr);12761277if (decrement) {1278// Decrement the register. Set condition codes.1279addi(Rbumped_count, Rbumped_count, - DataLayout::counter_increment);1280// Store the decremented counter, if it is still negative.1281std(Rbumped_count, 0, counter_addr);1282// Note: add/sub overflow check are not ported, since 64 bit1283// calculation should never overflow.1284} else {1285// Increment the register. Set carry flag.1286addi(Rbumped_count, Rbumped_count, DataLayout::counter_increment);1287// Store the incremented counter.1288std(Rbumped_count, 0, counter_addr);1289}1290}12911292// Set a flag value at the current method data pointer position.1293void InterpreterMacroAssembler::set_mdp_flag_at(int flag_constant,1294Register scratch) {1295assert(ProfileInterpreter, "must be profiling interpreter");1296// Load the data header.1297lbz(scratch, in_bytes(DataLayout::flags_offset()), R28_mdx);1298// Set the flag.1299ori(scratch, scratch, flag_constant);1300// Store the modified header.1301stb(scratch, in_bytes(DataLayout::flags_offset()), R28_mdx);1302}13031304// Test the location at some offset from the method data pointer.1305// If it is not equal to value, branch to the not_equal_continue Label.1306void InterpreterMacroAssembler::test_mdp_data_at(int offset,1307Register value,1308Label& not_equal_continue,1309Register test_out) {1310assert(ProfileInterpreter, "must be profiling interpreter");13111312ld(test_out, offset, R28_mdx);1313cmpd(CCR0, value, test_out);1314bne(CCR0, not_equal_continue);1315}13161317// Update the method data pointer by the displacement located at some fixed1318// offset from the method data pointer.1319void InterpreterMacroAssembler::update_mdp_by_offset(int offset_of_disp,1320Register scratch) {1321assert(ProfileInterpreter, "must be profiling interpreter");13221323ld(scratch, offset_of_disp, R28_mdx);1324add(R28_mdx, scratch, R28_mdx);1325}13261327// Update the method data pointer by the displacement located at the1328// offset (reg + offset_of_disp).1329void InterpreterMacroAssembler::update_mdp_by_offset(Register reg,1330int offset_of_disp,1331Register scratch) {1332assert(ProfileInterpreter, "must be profiling interpreter");13331334add(scratch, reg, R28_mdx);1335ld(scratch, offset_of_disp, scratch);1336add(R28_mdx, scratch, R28_mdx);1337}13381339// Update the method data pointer by a simple constant displacement.1340void InterpreterMacroAssembler::update_mdp_by_constant(int constant) {1341assert(ProfileInterpreter, "must be profiling interpreter");1342addi(R28_mdx, R28_mdx, constant);1343}13441345// Update the method data pointer for a _ret bytecode whose target1346// was not among our cached targets.1347void InterpreterMacroAssembler::update_mdp_for_ret(TosState state,1348Register return_bci) {1349assert(ProfileInterpreter, "must be profiling interpreter");13501351push(state);1352assert(return_bci->is_nonvolatile(), "need to protect return_bci");1353call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::update_mdp_for_ret), return_bci);1354pop(state);1355}13561357// Increments the backedge counter.1358// Returns backedge counter + invocation counter in Rdst.1359void InterpreterMacroAssembler::increment_backedge_counter(const Register Rcounters, const Register Rdst,1360const Register Rtmp1, Register Rscratch) {1361assert(UseCompiler, "incrementing must be useful");1362assert_different_registers(Rdst, Rtmp1);1363const Register invocation_counter = Rtmp1;1364const Register counter = Rdst;1365// TODO ppc port assert(4 == InvocationCounter::sz_counter(), "unexpected field size.");13661367// Load backedge counter.1368lwz(counter, in_bytes(MethodCounters::backedge_counter_offset()) +1369in_bytes(InvocationCounter::counter_offset()), Rcounters);1370// Load invocation counter.1371lwz(invocation_counter, in_bytes(MethodCounters::invocation_counter_offset()) +1372in_bytes(InvocationCounter::counter_offset()), Rcounters);13731374// Add the delta to the backedge counter.1375addi(counter, counter, InvocationCounter::count_increment);13761377// Mask the invocation counter.1378li(Rscratch, InvocationCounter::count_mask_value);1379andr(invocation_counter, invocation_counter, Rscratch);13801381// Store new counter value.1382stw(counter, in_bytes(MethodCounters::backedge_counter_offset()) +1383in_bytes(InvocationCounter::counter_offset()), Rcounters);1384// Return invocation counter + backedge counter.1385add(counter, counter, invocation_counter);1386}13871388// Count a taken branch in the bytecodes.1389void InterpreterMacroAssembler::profile_taken_branch(Register scratch, Register bumped_count) {1390if (ProfileInterpreter) {1391Label profile_continue;13921393// If no method data exists, go to profile_continue.1394test_method_data_pointer(profile_continue);13951396// We are taking a branch. Increment the taken count.1397increment_mdp_data_at(in_bytes(JumpData::taken_offset()), scratch, bumped_count);13981399// The method data pointer needs to be updated to reflect the new target.1400update_mdp_by_offset(in_bytes(JumpData::displacement_offset()), scratch);1401bind (profile_continue);1402}1403}14041405// Count a not-taken branch in the bytecodes.1406void InterpreterMacroAssembler::profile_not_taken_branch(Register scratch1, Register scratch2) {1407if (ProfileInterpreter) {1408Label profile_continue;14091410// If no method data exists, go to profile_continue.1411test_method_data_pointer(profile_continue);14121413// We are taking a branch. Increment the not taken count.1414increment_mdp_data_at(in_bytes(BranchData::not_taken_offset()), scratch1, scratch2);14151416// The method data pointer needs to be updated to correspond to the1417// next bytecode.1418update_mdp_by_constant(in_bytes(BranchData::branch_data_size()));1419bind (profile_continue);1420}1421}14221423// Count a non-virtual call in the bytecodes.1424void InterpreterMacroAssembler::profile_call(Register scratch1, Register scratch2) {1425if (ProfileInterpreter) {1426Label profile_continue;14271428// If no method data exists, go to profile_continue.1429test_method_data_pointer(profile_continue);14301431// We are making a call. Increment the count.1432increment_mdp_data_at(in_bytes(CounterData::count_offset()), scratch1, scratch2);14331434// The method data pointer needs to be updated to reflect the new target.1435update_mdp_by_constant(in_bytes(CounterData::counter_data_size()));1436bind (profile_continue);1437}1438}14391440// Count a final call in the bytecodes.1441void InterpreterMacroAssembler::profile_final_call(Register scratch1, Register scratch2) {1442if (ProfileInterpreter) {1443Label profile_continue;14441445// If no method data exists, go to profile_continue.1446test_method_data_pointer(profile_continue);14471448// We are making a call. Increment the count.1449increment_mdp_data_at(in_bytes(CounterData::count_offset()), scratch1, scratch2);14501451// The method data pointer needs to be updated to reflect the new target.1452update_mdp_by_constant(in_bytes(VirtualCallData::virtual_call_data_size()));1453bind (profile_continue);1454}1455}14561457// Count a virtual call in the bytecodes.1458void InterpreterMacroAssembler::profile_virtual_call(Register Rreceiver,1459Register Rscratch1,1460Register Rscratch2,1461bool receiver_can_be_null) {1462if (!ProfileInterpreter) { return; }1463Label profile_continue;14641465// If no method data exists, go to profile_continue.1466test_method_data_pointer(profile_continue);14671468Label skip_receiver_profile;1469if (receiver_can_be_null) {1470Label not_null;1471cmpdi(CCR0, Rreceiver, 0);1472bne(CCR0, not_null);1473// We are making a call. Increment the count for null receiver.1474increment_mdp_data_at(in_bytes(CounterData::count_offset()), Rscratch1, Rscratch2);1475b(skip_receiver_profile);1476bind(not_null);1477}14781479// Record the receiver type.1480record_klass_in_profile(Rreceiver, Rscratch1, Rscratch2, true);1481bind(skip_receiver_profile);14821483// The method data pointer needs to be updated to reflect the new target.1484update_mdp_by_constant(in_bytes(VirtualCallData::virtual_call_data_size()));1485bind (profile_continue);1486}14871488void InterpreterMacroAssembler::profile_typecheck(Register Rklass, Register Rscratch1, Register Rscratch2) {1489if (ProfileInterpreter) {1490Label profile_continue;14911492// If no method data exists, go to profile_continue.1493test_method_data_pointer(profile_continue);14941495int mdp_delta = in_bytes(BitData::bit_data_size());1496if (TypeProfileCasts) {1497mdp_delta = in_bytes(VirtualCallData::virtual_call_data_size());14981499// Record the object type.1500record_klass_in_profile(Rklass, Rscratch1, Rscratch2, false);1501}15021503// The method data pointer needs to be updated.1504update_mdp_by_constant(mdp_delta);15051506bind (profile_continue);1507}1508}15091510void InterpreterMacroAssembler::profile_typecheck_failed(Register Rscratch1, Register Rscratch2) {1511if (ProfileInterpreter && TypeProfileCasts) {1512Label profile_continue;15131514// If no method data exists, go to profile_continue.1515test_method_data_pointer(profile_continue);15161517int count_offset = in_bytes(CounterData::count_offset());1518// Back up the address, since we have already bumped the mdp.1519count_offset -= in_bytes(VirtualCallData::virtual_call_data_size());15201521// *Decrement* the counter. We expect to see zero or small negatives.1522increment_mdp_data_at(count_offset, Rscratch1, Rscratch2, true);15231524bind (profile_continue);1525}1526}15271528// Count a ret in the bytecodes.1529void InterpreterMacroAssembler::profile_ret(TosState state, Register return_bci, Register scratch1, Register scratch2) {1530if (ProfileInterpreter) {1531Label profile_continue;1532uint row;15331534// If no method data exists, go to profile_continue.1535test_method_data_pointer(profile_continue);15361537// Update the total ret count.1538increment_mdp_data_at(in_bytes(CounterData::count_offset()), scratch1, scratch2 );15391540for (row = 0; row < RetData::row_limit(); row++) {1541Label next_test;15421543// See if return_bci is equal to bci[n]:1544test_mdp_data_at(in_bytes(RetData::bci_offset(row)), return_bci, next_test, scratch1);15451546// return_bci is equal to bci[n]. Increment the count.1547increment_mdp_data_at(in_bytes(RetData::bci_count_offset(row)), scratch1, scratch2);15481549// The method data pointer needs to be updated to reflect the new target.1550update_mdp_by_offset(in_bytes(RetData::bci_displacement_offset(row)), scratch1);1551b(profile_continue);1552bind(next_test);1553}15541555update_mdp_for_ret(state, return_bci);15561557bind (profile_continue);1558}1559}15601561// Count the default case of a switch construct.1562void InterpreterMacroAssembler::profile_switch_default(Register scratch1, Register scratch2) {1563if (ProfileInterpreter) {1564Label profile_continue;15651566// If no method data exists, go to profile_continue.1567test_method_data_pointer(profile_continue);15681569// Update the default case count1570increment_mdp_data_at(in_bytes(MultiBranchData::default_count_offset()),1571scratch1, scratch2);15721573// The method data pointer needs to be updated.1574update_mdp_by_offset(in_bytes(MultiBranchData::default_displacement_offset()),1575scratch1);15761577bind (profile_continue);1578}1579}15801581// Count the index'th case of a switch construct.1582void InterpreterMacroAssembler::profile_switch_case(Register index,1583Register scratch1,1584Register scratch2,1585Register scratch3) {1586if (ProfileInterpreter) {1587assert_different_registers(index, scratch1, scratch2, scratch3);1588Label profile_continue;15891590// If no method data exists, go to profile_continue.1591test_method_data_pointer(profile_continue);15921593// Build the base (index * per_case_size_in_bytes()) + case_array_offset_in_bytes().1594li(scratch3, in_bytes(MultiBranchData::case_array_offset()));15951596assert (in_bytes(MultiBranchData::per_case_size()) == 16, "so that shladd works");1597sldi(scratch1, index, exact_log2(in_bytes(MultiBranchData::per_case_size())));1598add(scratch1, scratch1, scratch3);15991600// Update the case count.1601increment_mdp_data_at(scratch1, in_bytes(MultiBranchData::relative_count_offset()), scratch2, scratch3);16021603// The method data pointer needs to be updated.1604update_mdp_by_offset(scratch1, in_bytes(MultiBranchData::relative_displacement_offset()), scratch2);16051606bind (profile_continue);1607}1608}16091610void InterpreterMacroAssembler::profile_null_seen(Register Rscratch1, Register Rscratch2) {1611if (ProfileInterpreter) {1612assert_different_registers(Rscratch1, Rscratch2);1613Label profile_continue;16141615// If no method data exists, go to profile_continue.1616test_method_data_pointer(profile_continue);16171618set_mdp_flag_at(BitData::null_seen_byte_constant(), Rscratch1);16191620// The method data pointer needs to be updated.1621int mdp_delta = in_bytes(BitData::bit_data_size());1622if (TypeProfileCasts) {1623mdp_delta = in_bytes(VirtualCallData::virtual_call_data_size());1624}1625update_mdp_by_constant(mdp_delta);16261627bind (profile_continue);1628}1629}16301631void InterpreterMacroAssembler::record_klass_in_profile(Register Rreceiver,1632Register Rscratch1, Register Rscratch2,1633bool is_virtual_call) {1634assert(ProfileInterpreter, "must be profiling");1635assert_different_registers(Rreceiver, Rscratch1, Rscratch2);16361637Label done;1638record_klass_in_profile_helper(Rreceiver, Rscratch1, Rscratch2, 0, done, is_virtual_call);1639bind (done);1640}16411642void InterpreterMacroAssembler::record_klass_in_profile_helper(1643Register receiver, Register scratch1, Register scratch2,1644int start_row, Label& done, bool is_virtual_call) {1645if (TypeProfileWidth == 0) {1646if (is_virtual_call) {1647increment_mdp_data_at(in_bytes(CounterData::count_offset()), scratch1, scratch2);1648}1649return;1650}16511652int last_row = VirtualCallData::row_limit() - 1;1653assert(start_row <= last_row, "must be work left to do");1654// Test this row for both the receiver and for null.1655// Take any of three different outcomes:1656// 1. found receiver => increment count and goto done1657// 2. found null => keep looking for case 1, maybe allocate this cell1658// 3. found something else => keep looking for cases 1 and 21659// Case 3 is handled by a recursive call.1660for (int row = start_row; row <= last_row; row++) {1661Label next_test;1662bool test_for_null_also = (row == start_row);16631664// See if the receiver is receiver[n].1665int recvr_offset = in_bytes(VirtualCallData::receiver_offset(row));1666test_mdp_data_at(recvr_offset, receiver, next_test, scratch1);1667// delayed()->tst(scratch);16681669// The receiver is receiver[n]. Increment count[n].1670int count_offset = in_bytes(VirtualCallData::receiver_count_offset(row));1671increment_mdp_data_at(count_offset, scratch1, scratch2);1672b(done);1673bind(next_test);16741675if (test_for_null_also) {1676Label found_null;1677// Failed the equality check on receiver[n]... Test for null.1678if (start_row == last_row) {1679// The only thing left to do is handle the null case.1680if (is_virtual_call) {1681// Scratch1 contains test_out from test_mdp_data_at.1682cmpdi(CCR0, scratch1, 0);1683beq(CCR0, found_null);1684// Receiver did not match any saved receiver and there is no empty row for it.1685// Increment total counter to indicate polymorphic case.1686increment_mdp_data_at(in_bytes(CounterData::count_offset()), scratch1, scratch2);1687b(done);1688bind(found_null);1689} else {1690cmpdi(CCR0, scratch1, 0);1691bne(CCR0, done);1692}1693break;1694}1695// Since null is rare, make it be the branch-taken case.1696cmpdi(CCR0, scratch1, 0);1697beq(CCR0, found_null);16981699// Put all the "Case 3" tests here.1700record_klass_in_profile_helper(receiver, scratch1, scratch2, start_row + 1, done, is_virtual_call);17011702// Found a null. Keep searching for a matching receiver,1703// but remember that this is an empty (unused) slot.1704bind(found_null);1705}1706}17071708// In the fall-through case, we found no matching receiver, but we1709// observed the receiver[start_row] is NULL.17101711// Fill in the receiver field and increment the count.1712int recvr_offset = in_bytes(VirtualCallData::receiver_offset(start_row));1713set_mdp_data_at(recvr_offset, receiver);1714int count_offset = in_bytes(VirtualCallData::receiver_count_offset(start_row));1715li(scratch1, DataLayout::counter_increment);1716set_mdp_data_at(count_offset, scratch1);1717if (start_row > 0) {1718b(done);1719}1720}17211722// Argument and return type profilig.1723// kills: tmp, tmp2, R0, CR0, CR11724void InterpreterMacroAssembler::profile_obj_type(Register obj, Register mdo_addr_base,1725RegisterOrConstant mdo_addr_offs, Register tmp, Register tmp2) {1726Label do_nothing, do_update;17271728// tmp2 = obj is allowed1729assert_different_registers(obj, mdo_addr_base, tmp, R0);1730assert_different_registers(tmp2, mdo_addr_base, tmp, R0);1731const Register klass = tmp2;17321733verify_oop(obj);17341735ld(tmp, mdo_addr_offs, mdo_addr_base);17361737// Set null_seen if obj is 0.1738cmpdi(CCR0, obj, 0);1739ori(R0, tmp, TypeEntries::null_seen);1740beq(CCR0, do_update);17411742load_klass(klass, obj);17431744clrrdi(R0, tmp, exact_log2(-TypeEntries::type_klass_mask));1745// Basically same as andi(R0, tmp, TypeEntries::type_klass_mask);1746cmpd(CCR1, R0, klass);1747// Klass seen before, nothing to do (regardless of unknown bit).1748//beq(CCR1, do_nothing);17491750andi_(R0, klass, TypeEntries::type_unknown);1751// Already unknown. Nothing to do anymore.1752//bne(CCR0, do_nothing);1753crorc(/*CCR0 eq*/2, /*CCR1 eq*/4+2, /*CCR0 eq*/2); // cr0 eq = cr1 eq or cr0 ne1754beq(CCR0, do_nothing);17551756clrrdi_(R0, tmp, exact_log2(-TypeEntries::type_mask));1757orr(R0, klass, tmp); // Combine klass and null_seen bit (only used if (tmp & type_mask)==0).1758beq(CCR0, do_update); // First time here. Set profile type.17591760// Different than before. Cannot keep accurate profile.1761ori(R0, tmp, TypeEntries::type_unknown);17621763bind(do_update);1764// update profile1765std(R0, mdo_addr_offs, mdo_addr_base);17661767align(32, 12);1768bind(do_nothing);1769}17701771void InterpreterMacroAssembler::profile_arguments_type(Register callee, Register tmp1, Register tmp2, bool is_virtual) {1772if (!ProfileInterpreter) {1773return;1774}17751776assert_different_registers(callee, tmp1, tmp2, R28_mdx);17771778if (MethodData::profile_arguments() || MethodData::profile_return()) {1779Label profile_continue;17801781test_method_data_pointer(profile_continue);17821783int off_to_start = is_virtual ? in_bytes(VirtualCallData::virtual_call_data_size()) : in_bytes(CounterData::counter_data_size());17841785lbz(tmp1, in_bytes(DataLayout::tag_offset()) - off_to_start, R28_mdx);1786cmpwi(CCR0, tmp1, is_virtual ? DataLayout::virtual_call_type_data_tag : DataLayout::call_type_data_tag);1787bne(CCR0, profile_continue);17881789if (MethodData::profile_arguments()) {1790Label done;1791int off_to_args = in_bytes(TypeEntriesAtCall::args_data_offset());1792add(R28_mdx, off_to_args, R28_mdx);17931794for (int i = 0; i < TypeProfileArgsLimit; i++) {1795if (i > 0 || MethodData::profile_return()) {1796// If return value type is profiled we may have no argument to profile.1797ld(tmp1, in_bytes(TypeEntriesAtCall::cell_count_offset())-off_to_args, R28_mdx);1798cmpdi(CCR0, tmp1, (i+1)*TypeStackSlotEntries::per_arg_count());1799addi(tmp1, tmp1, -i*TypeStackSlotEntries::per_arg_count());1800blt(CCR0, done);1801}1802ld(tmp1, in_bytes(Method::const_offset()), callee);1803lhz(tmp1, in_bytes(ConstMethod::size_of_parameters_offset()), tmp1);1804// Stack offset o (zero based) from the start of the argument1805// list, for n arguments translates into offset n - o - 1 from1806// the end of the argument list. But there's an extra slot at1807// the top of the stack. So the offset is n - o from Lesp.1808ld(tmp2, in_bytes(TypeEntriesAtCall::stack_slot_offset(i))-off_to_args, R28_mdx);1809subf(tmp1, tmp2, tmp1);18101811sldi(tmp1, tmp1, Interpreter::logStackElementSize);1812ldx(tmp1, tmp1, R15_esp);18131814profile_obj_type(tmp1, R28_mdx, in_bytes(TypeEntriesAtCall::argument_type_offset(i))-off_to_args, tmp2, tmp1);18151816int to_add = in_bytes(TypeStackSlotEntries::per_arg_size());1817addi(R28_mdx, R28_mdx, to_add);1818off_to_args += to_add;1819}18201821if (MethodData::profile_return()) {1822ld(tmp1, in_bytes(TypeEntriesAtCall::cell_count_offset())-off_to_args, R28_mdx);1823addi(tmp1, tmp1, -TypeProfileArgsLimit*TypeStackSlotEntries::per_arg_count());1824}18251826bind(done);18271828if (MethodData::profile_return()) {1829// We're right after the type profile for the last1830// argument. tmp1 is the number of cells left in the1831// CallTypeData/VirtualCallTypeData to reach its end. Non null1832// if there's a return to profile.1833assert(ReturnTypeEntry::static_cell_count() < TypeStackSlotEntries::per_arg_count(), "can't move past ret type");1834sldi(tmp1, tmp1, exact_log2(DataLayout::cell_size));1835add(R28_mdx, tmp1, R28_mdx);1836}1837} else {1838assert(MethodData::profile_return(), "either profile call args or call ret");1839update_mdp_by_constant(in_bytes(TypeEntriesAtCall::return_only_size()));1840}18411842// Mdp points right after the end of the1843// CallTypeData/VirtualCallTypeData, right after the cells for the1844// return value type if there's one.1845align(32, 12);1846bind(profile_continue);1847}1848}18491850void InterpreterMacroAssembler::profile_return_type(Register ret, Register tmp1, Register tmp2) {1851assert_different_registers(ret, tmp1, tmp2);1852if (ProfileInterpreter && MethodData::profile_return()) {1853Label profile_continue;18541855test_method_data_pointer(profile_continue);18561857if (MethodData::profile_return_jsr292_only()) {1858// If we don't profile all invoke bytecodes we must make sure1859// it's a bytecode we indeed profile. We can't go back to the1860// begining of the ProfileData we intend to update to check its1861// type because we're right after it and we don't known its1862// length.1863lbz(tmp1, 0, R14_bcp);1864lbz(tmp2, Method::intrinsic_id_offset_in_bytes(), R19_method);1865cmpwi(CCR0, tmp1, Bytecodes::_invokedynamic);1866cmpwi(CCR1, tmp1, Bytecodes::_invokehandle);1867cror(/*CR0 eq*/2, /*CR1 eq*/4+2, /*CR0 eq*/2);1868cmpwi(CCR1, tmp2, vmIntrinsics::_compiledLambdaForm);1869cror(/*CR0 eq*/2, /*CR1 eq*/4+2, /*CR0 eq*/2);1870bne(CCR0, profile_continue);1871}18721873profile_obj_type(ret, R28_mdx, -in_bytes(ReturnTypeEntry::size()), tmp1, tmp2);18741875align(32, 12);1876bind(profile_continue);1877}1878}18791880void InterpreterMacroAssembler::profile_parameters_type(Register tmp1, Register tmp2, Register tmp3, Register tmp4) {1881if (ProfileInterpreter && MethodData::profile_parameters()) {1882Label profile_continue, done;18831884test_method_data_pointer(profile_continue);18851886// Load the offset of the area within the MDO used for1887// parameters. If it's negative we're not profiling any parameters.1888lwz(tmp1, in_bytes(MethodData::parameters_type_data_di_offset()) - in_bytes(MethodData::data_offset()), R28_mdx);1889cmpwi(CCR0, tmp1, 0);1890blt(CCR0, profile_continue);18911892// Compute a pointer to the area for parameters from the offset1893// and move the pointer to the slot for the last1894// parameters. Collect profiling from last parameter down.1895// mdo start + parameters offset + array length - 118961897// Pointer to the parameter area in the MDO.1898const Register mdp = tmp1;1899add(mdp, tmp1, R28_mdx);19001901// Pffset of the current profile entry to update.1902const Register entry_offset = tmp2;1903// entry_offset = array len in number of cells1904ld(entry_offset, in_bytes(ArrayData::array_len_offset()), mdp);19051906int off_base = in_bytes(ParametersTypeData::stack_slot_offset(0));1907assert(off_base % DataLayout::cell_size == 0, "should be a number of cells");19081909// entry_offset (number of cells) = array len - size of 1 entry + offset of the stack slot field1910addi(entry_offset, entry_offset, -TypeStackSlotEntries::per_arg_count() + (off_base / DataLayout::cell_size));1911// entry_offset in bytes1912sldi(entry_offset, entry_offset, exact_log2(DataLayout::cell_size));19131914Label loop;1915align(32, 12);1916bind(loop);19171918// Load offset on the stack from the slot for this parameter.1919ld(tmp3, entry_offset, mdp);1920sldi(tmp3, tmp3, Interpreter::logStackElementSize);1921neg(tmp3, tmp3);1922// Read the parameter from the local area.1923ldx(tmp3, tmp3, R18_locals);19241925// Make entry_offset now point to the type field for this parameter.1926int type_base = in_bytes(ParametersTypeData::type_offset(0));1927assert(type_base > off_base, "unexpected");1928addi(entry_offset, entry_offset, type_base - off_base);19291930// Profile the parameter.1931profile_obj_type(tmp3, mdp, entry_offset, tmp4, tmp3);19321933// Go to next parameter.1934int delta = TypeStackSlotEntries::per_arg_count() * DataLayout::cell_size + (type_base - off_base);1935cmpdi(CCR0, entry_offset, off_base + delta);1936addi(entry_offset, entry_offset, -delta);1937bge(CCR0, loop);19381939align(32, 12);1940bind(profile_continue);1941}1942}19431944// Add a InterpMonitorElem to stack (see frame_sparc.hpp).1945void InterpreterMacroAssembler::add_monitor_to_stack(bool stack_is_empty, Register Rtemp1, Register Rtemp2) {19461947// Very-local scratch registers.1948const Register esp = Rtemp1;1949const Register slot = Rtemp2;19501951// Extracted monitor_size.1952int monitor_size = frame::interpreter_frame_monitor_size_in_bytes();1953assert(Assembler::is_aligned((unsigned int)monitor_size,1954(unsigned int)frame::alignment_in_bytes),1955"size of a monitor must respect alignment of SP");19561957resize_frame(-monitor_size, /*temp*/esp); // Allocate space for new monitor1958std(R1_SP, _ijava_state_neg(top_frame_sp), esp); // esp contains fp19591960// Shuffle expression stack down. Recall that stack_base points1961// just above the new expression stack bottom. Old_tos and new_tos1962// are used to scan thru the old and new expression stacks.1963if (!stack_is_empty) {1964Label copy_slot, copy_slot_finished;1965const Register n_slots = slot;19661967addi(esp, R15_esp, Interpreter::stackElementSize); // Point to first element (pre-pushed stack).1968subf(n_slots, esp, R26_monitor);1969srdi_(n_slots, n_slots, LogBytesPerWord); // Compute number of slots to copy.1970assert(LogBytesPerWord == 3, "conflicts assembler instructions");1971beq(CCR0, copy_slot_finished); // Nothing to copy.19721973mtctr(n_slots);19741975// loop1976bind(copy_slot);1977ld(slot, 0, esp); // Move expression stack down.1978std(slot, -monitor_size, esp); // distance = monitor_size1979addi(esp, esp, BytesPerWord);1980bdnz(copy_slot);19811982bind(copy_slot_finished);1983}19841985addi(R15_esp, R15_esp, -monitor_size);1986addi(R26_monitor, R26_monitor, -monitor_size);19871988// Restart interpreter1989}19901991// ============================================================================1992// Java locals access19931994// Load a local variable at index in Rindex into register Rdst_value.1995// Also puts address of local into Rdst_address as a service.1996// Kills:1997// - Rdst_value1998// - Rdst_address1999void InterpreterMacroAssembler::load_local_int(Register Rdst_value, Register Rdst_address, Register Rindex) {2000sldi(Rdst_address, Rindex, Interpreter::logStackElementSize);2001subf(Rdst_address, Rdst_address, R18_locals);2002lwz(Rdst_value, 0, Rdst_address);2003}20042005// Load a local variable at index in Rindex into register Rdst_value.2006// Also puts address of local into Rdst_address as a service.2007// Kills:2008// - Rdst_value2009// - Rdst_address2010void InterpreterMacroAssembler::load_local_long(Register Rdst_value, Register Rdst_address, Register Rindex) {2011sldi(Rdst_address, Rindex, Interpreter::logStackElementSize);2012subf(Rdst_address, Rdst_address, R18_locals);2013ld(Rdst_value, -8, Rdst_address);2014}20152016// Load a local variable at index in Rindex into register Rdst_value.2017// Also puts address of local into Rdst_address as a service.2018// Input:2019// - Rindex: slot nr of local variable2020// Kills:2021// - Rdst_value2022// - Rdst_address2023void InterpreterMacroAssembler::load_local_ptr(Register Rdst_value, Register Rdst_address, Register Rindex) {2024sldi(Rdst_address, Rindex, Interpreter::logStackElementSize);2025subf(Rdst_address, Rdst_address, R18_locals);2026ld(Rdst_value, 0, Rdst_address);2027}20282029// Load a local variable at index in Rindex into register Rdst_value.2030// Also puts address of local into Rdst_address as a service.2031// Kills:2032// - Rdst_value2033// - Rdst_address2034void InterpreterMacroAssembler::load_local_float(FloatRegister Rdst_value, Register Rdst_address, Register Rindex) {2035sldi(Rdst_address, Rindex, Interpreter::logStackElementSize);2036subf(Rdst_address, Rdst_address, R18_locals);2037lfs(Rdst_value, 0, Rdst_address);2038}20392040// Load a local variable at index in Rindex into register Rdst_value.2041// Also puts address of local into Rdst_address as a service.2042// Kills:2043// - Rdst_value2044// - Rdst_address2045void InterpreterMacroAssembler::load_local_double(FloatRegister Rdst_value, Register Rdst_address, Register Rindex) {2046sldi(Rdst_address, Rindex, Interpreter::logStackElementSize);2047subf(Rdst_address, Rdst_address, R18_locals);2048lfd(Rdst_value, -8, Rdst_address);2049}20502051// Store an int value at local variable slot Rindex.2052// Kills:2053// - Rindex2054void InterpreterMacroAssembler::store_local_int(Register Rvalue, Register Rindex) {2055sldi(Rindex, Rindex, Interpreter::logStackElementSize);2056subf(Rindex, Rindex, R18_locals);2057stw(Rvalue, 0, Rindex);2058}20592060// Store a long value at local variable slot Rindex.2061// Kills:2062// - Rindex2063void InterpreterMacroAssembler::store_local_long(Register Rvalue, Register Rindex) {2064sldi(Rindex, Rindex, Interpreter::logStackElementSize);2065subf(Rindex, Rindex, R18_locals);2066std(Rvalue, -8, Rindex);2067}20682069// Store an oop value at local variable slot Rindex.2070// Kills:2071// - Rindex2072void InterpreterMacroAssembler::store_local_ptr(Register Rvalue, Register Rindex) {2073sldi(Rindex, Rindex, Interpreter::logStackElementSize);2074subf(Rindex, Rindex, R18_locals);2075std(Rvalue, 0, Rindex);2076}20772078// Store an int value at local variable slot Rindex.2079// Kills:2080// - Rindex2081void InterpreterMacroAssembler::store_local_float(FloatRegister Rvalue, Register Rindex) {2082sldi(Rindex, Rindex, Interpreter::logStackElementSize);2083subf(Rindex, Rindex, R18_locals);2084stfs(Rvalue, 0, Rindex);2085}20862087// Store an int value at local variable slot Rindex.2088// Kills:2089// - Rindex2090void InterpreterMacroAssembler::store_local_double(FloatRegister Rvalue, Register Rindex) {2091sldi(Rindex, Rindex, Interpreter::logStackElementSize);2092subf(Rindex, Rindex, R18_locals);2093stfd(Rvalue, -8, Rindex);2094}20952096// Read pending exception from thread and jump to interpreter.2097// Throw exception entry if one if pending. Fall through otherwise.2098void InterpreterMacroAssembler::check_and_forward_exception(Register Rscratch1, Register Rscratch2) {2099assert_different_registers(Rscratch1, Rscratch2, R3);2100Register Rexception = Rscratch1;2101Register Rtmp = Rscratch2;2102Label Ldone;2103// Get pending exception oop.2104ld(Rexception, thread_(pending_exception));2105cmpdi(CCR0, Rexception, 0);2106beq(CCR0, Ldone);2107li(Rtmp, 0);2108mr_if_needed(R3, Rexception);2109std(Rtmp, thread_(pending_exception)); // Clear exception in thread2110if (Interpreter::rethrow_exception_entry() != NULL) {2111// Already got entry address.2112load_dispatch_table(Rtmp, (address*)Interpreter::rethrow_exception_entry());2113} else {2114// Dynamically load entry address.2115int simm16_rest = load_const_optimized(Rtmp, &Interpreter::_rethrow_exception_entry, R0, true);2116ld(Rtmp, simm16_rest, Rtmp);2117}2118mtctr(Rtmp);2119save_interpreter_state(Rtmp);2120bctr();21212122align(32, 12);2123bind(Ldone);2124}21252126void InterpreterMacroAssembler::call_VM(Register oop_result, address entry_point, bool check_exceptions) {2127save_interpreter_state(R11_scratch1);21282129MacroAssembler::call_VM(oop_result, entry_point, false);21302131restore_interpreter_state(R11_scratch1, /*bcp_and_mdx_only*/ true);21322133check_and_handle_popframe(R11_scratch1);2134check_and_handle_earlyret(R11_scratch1);2135// Now check exceptions manually.2136if (check_exceptions) {2137check_and_forward_exception(R11_scratch1, R12_scratch2);2138}2139}21402141void InterpreterMacroAssembler::call_VM(Register oop_result, address entry_point, Register arg_1, bool check_exceptions) {2142// ARG1 is reserved for the thread.2143mr_if_needed(R4_ARG2, arg_1);2144call_VM(oop_result, entry_point, check_exceptions);2145}21462147void InterpreterMacroAssembler::call_VM(Register oop_result, address entry_point, Register arg_1, Register arg_2, bool check_exceptions) {2148// ARG1 is reserved for the thread.2149mr_if_needed(R4_ARG2, arg_1);2150assert(arg_2 != R4_ARG2, "smashed argument");2151mr_if_needed(R5_ARG3, arg_2);2152call_VM(oop_result, entry_point, check_exceptions);2153}21542155void InterpreterMacroAssembler::call_VM(Register oop_result, address entry_point, Register arg_1, Register arg_2, Register arg_3, bool check_exceptions) {2156// ARG1 is reserved for the thread.2157mr_if_needed(R4_ARG2, arg_1);2158assert(arg_2 != R4_ARG2, "smashed argument");2159mr_if_needed(R5_ARG3, arg_2);2160assert(arg_3 != R4_ARG2 && arg_3 != R5_ARG3, "smashed argument");2161mr_if_needed(R6_ARG4, arg_3);2162call_VM(oop_result, entry_point, check_exceptions);2163}21642165void InterpreterMacroAssembler::save_interpreter_state(Register scratch) {2166ld(scratch, 0, R1_SP);2167std(R15_esp, _ijava_state_neg(esp), scratch);2168std(R14_bcp, _ijava_state_neg(bcp), scratch);2169std(R26_monitor, _ijava_state_neg(monitors), scratch);2170if (ProfileInterpreter) { std(R28_mdx, _ijava_state_neg(mdx), scratch); }2171// Other entries should be unchanged.2172}21732174void InterpreterMacroAssembler::restore_interpreter_state(Register scratch, bool bcp_and_mdx_only) {2175ld(scratch, 0, R1_SP);2176ld(R14_bcp, _ijava_state_neg(bcp), scratch); // Changed by VM code (exception).2177if (ProfileInterpreter) { ld(R28_mdx, _ijava_state_neg(mdx), scratch); } // Changed by VM code.2178if (!bcp_and_mdx_only) {2179// Following ones are Metadata.2180ld(R19_method, _ijava_state_neg(method), scratch);2181ld(R27_constPoolCache, _ijava_state_neg(cpoolCache), scratch);2182// Following ones are stack addresses and don't require reload.2183ld(R15_esp, _ijava_state_neg(esp), scratch);2184ld(R18_locals, _ijava_state_neg(locals), scratch);2185ld(R26_monitor, _ijava_state_neg(monitors), scratch);2186}2187#ifdef ASSERT2188{2189Label Lok;2190subf(R0, R1_SP, scratch);2191cmpdi(CCR0, R0, frame::abi_reg_args_size + frame::ijava_state_size);2192bge(CCR0, Lok);2193stop("frame too small (restore istate)", 0x5432);2194bind(Lok);2195}2196{2197Label Lok;2198ld(R0, _ijava_state_neg(ijava_reserved), scratch);2199cmpdi(CCR0, R0, 0x5afe);2200beq(CCR0, Lok);2201stop("frame corrupted (restore istate)", 0x5afe);2202bind(Lok);2203}2204#endif2205}22062207#endif // !CC_INTERP22082209void InterpreterMacroAssembler::get_method_counters(Register method,2210Register Rcounters,2211Label& skip) {2212BLOCK_COMMENT("Load and ev. allocate counter object {");2213Label has_counters;2214ld(Rcounters, in_bytes(Method::method_counters_offset()), method);2215cmpdi(CCR0, Rcounters, 0);2216bne(CCR0, has_counters);2217call_VM(noreg, CAST_FROM_FN_PTR(address,2218InterpreterRuntime::build_method_counters), method, false);2219ld(Rcounters, in_bytes(Method::method_counters_offset()), method);2220cmpdi(CCR0, Rcounters, 0);2221beq(CCR0, skip); // No MethodCounters, OutOfMemory.2222BLOCK_COMMENT("} Load and ev. allocate counter object");22232224bind(has_counters);2225}22262227void InterpreterMacroAssembler::increment_invocation_counter(Register Rcounters, Register iv_be_count, Register Rtmp_r0) {2228assert(UseCompiler, "incrementing must be useful");2229Register invocation_count = iv_be_count;2230Register backedge_count = Rtmp_r0;2231int delta = InvocationCounter::count_increment;22322233// Load each counter in a register.2234// ld(inv_counter, Rtmp);2235// ld(be_counter, Rtmp2);2236int inv_counter_offset = in_bytes(MethodCounters::invocation_counter_offset() +2237InvocationCounter::counter_offset());2238int be_counter_offset = in_bytes(MethodCounters::backedge_counter_offset() +2239InvocationCounter::counter_offset());22402241BLOCK_COMMENT("Increment profiling counters {");22422243// Load the backedge counter.2244lwz(backedge_count, be_counter_offset, Rcounters); // is unsigned int2245// Mask the backedge counter.2246Register tmp = invocation_count;2247li(tmp, InvocationCounter::count_mask_value);2248andr(backedge_count, tmp, backedge_count); // Cannot use andi, need sign extension of count_mask_value.22492250// Load the invocation counter.2251lwz(invocation_count, inv_counter_offset, Rcounters); // is unsigned int2252// Add the delta to the invocation counter and store the result.2253addi(invocation_count, invocation_count, delta);2254// Store value.2255stw(invocation_count, inv_counter_offset, Rcounters);22562257// Add invocation counter + backedge counter.2258add(iv_be_count, backedge_count, invocation_count);22592260// Note that this macro must leave the backedge_count + invocation_count in2261// register iv_be_count!2262BLOCK_COMMENT("} Increment profiling counters");2263}22642265void InterpreterMacroAssembler::verify_oop(Register reg, TosState state) {2266if (state == atos) { MacroAssembler::verify_oop(reg); }2267}22682269#ifndef CC_INTERP2270// Local helper function for the verify_oop_or_return_address macro.2271static bool verify_return_address(Method* m, int bci) {2272#ifndef PRODUCT2273address pc = (address)(m->constMethod()) + in_bytes(ConstMethod::codes_offset()) + bci;2274// Assume it is a valid return address if it is inside m and is preceded by a jsr.2275if (!m->contains(pc)) return false;2276address jsr_pc;2277jsr_pc = pc - Bytecodes::length_for(Bytecodes::_jsr);2278if (*jsr_pc == Bytecodes::_jsr && jsr_pc >= m->code_base()) return true;2279jsr_pc = pc - Bytecodes::length_for(Bytecodes::_jsr_w);2280if (*jsr_pc == Bytecodes::_jsr_w && jsr_pc >= m->code_base()) return true;2281#endif // PRODUCT2282return false;2283}22842285void InterpreterMacroAssembler::verify_FPU(int stack_depth, TosState state) {2286if (VerifyFPU) {2287unimplemented("verfiyFPU");2288}2289}22902291void InterpreterMacroAssembler::verify_oop_or_return_address(Register reg, Register Rtmp) {2292if (!VerifyOops) return;22932294// The VM documentation for the astore[_wide] bytecode allows2295// the TOS to be not only an oop but also a return address.2296Label test;2297Label skip;2298// See if it is an address (in the current method):22992300const int log2_bytecode_size_limit = 16;2301srdi_(Rtmp, reg, log2_bytecode_size_limit);2302bne(CCR0, test);23032304address fd = CAST_FROM_FN_PTR(address, verify_return_address);2305const int nbytes_save = 11*8; // volatile gprs except R02306save_volatile_gprs(R1_SP, -nbytes_save); // except R02307save_LR_CR(Rtmp); // Save in old frame.2308push_frame_reg_args(nbytes_save, Rtmp);23092310load_const_optimized(Rtmp, fd, R0);2311mr_if_needed(R4_ARG2, reg);2312mr(R3_ARG1, R19_method);2313call_c(Rtmp); // call C23142315pop_frame();2316restore_LR_CR(Rtmp);2317restore_volatile_gprs(R1_SP, -nbytes_save); // except R02318b(skip);23192320// Perform a more elaborate out-of-line call.2321// Not an address; verify it:2322bind(test);2323verify_oop(reg);2324bind(skip);2325}2326#endif // !CC_INTERP23272328// Inline assembly for:2329//2330// if (thread is in interp_only_mode) {2331// InterpreterRuntime::post_method_entry();2332// }2333// if (*jvmpi::event_flags_array_at_addr(JVMPI_EVENT_METHOD_ENTRY ) ||2334// *jvmpi::event_flags_array_at_addr(JVMPI_EVENT_METHOD_ENTRY2) ) {2335// SharedRuntime::jvmpi_method_entry(method, receiver);2336// }2337void InterpreterMacroAssembler::notify_method_entry() {2338// JVMTI2339// Whenever JVMTI puts a thread in interp_only_mode, method2340// entry/exit events are sent for that thread to track stack2341// depth. If it is possible to enter interp_only_mode we add2342// the code to check if the event should be sent.2343if (JvmtiExport::can_post_interpreter_events()) {2344Label jvmti_post_done;23452346lwz(R0, in_bytes(JavaThread::interp_only_mode_offset()), R16_thread);2347cmpwi(CCR0, R0, 0);2348beq(CCR0, jvmti_post_done);2349call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_method_entry),2350/*check_exceptions=*/true CC_INTERP_ONLY(&& false));23512352bind(jvmti_post_done);2353}2354}23552356// Inline assembly for:2357//2358// if (thread is in interp_only_mode) {2359// // save result2360// InterpreterRuntime::post_method_exit();2361// // restore result2362// }2363// if (*jvmpi::event_flags_array_at_addr(JVMPI_EVENT_METHOD_EXIT)) {2364// // save result2365// SharedRuntime::jvmpi_method_exit();2366// // restore result2367// }2368//2369// Native methods have their result stored in d_tmp and l_tmp.2370// Java methods have their result stored in the expression stack.2371void InterpreterMacroAssembler::notify_method_exit(bool is_native_method, TosState state,2372NotifyMethodExitMode mode, bool check_exceptions) {2373// JVMTI2374// Whenever JVMTI puts a thread in interp_only_mode, method2375// entry/exit events are sent for that thread to track stack2376// depth. If it is possible to enter interp_only_mode we add2377// the code to check if the event should be sent.2378if (mode == NotifyJVMTI && JvmtiExport::can_post_interpreter_events()) {2379Label jvmti_post_done;23802381lwz(R0, in_bytes(JavaThread::interp_only_mode_offset()), R16_thread);2382cmpwi(CCR0, R0, 0);2383beq(CCR0, jvmti_post_done);2384CC_INTERP_ONLY(assert(is_native_method && !check_exceptions, "must not push state"));2385if (!is_native_method) push(state); // Expose tos to GC.2386call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_method_exit),2387/*check_exceptions=*/check_exceptions);2388if (!is_native_method) pop(state);23892390align(32, 12);2391bind(jvmti_post_done);2392}23932394// Dtrace support not implemented.2395}23962397#ifdef CC_INTERP2398// Convert the current TOP_IJAVA_FRAME into a PARENT_IJAVA_FRAME2399// (using parent_frame_resize) and push a new interpreter2400// TOP_IJAVA_FRAME (using frame_size).2401void InterpreterMacroAssembler::push_interpreter_frame(Register top_frame_size, Register parent_frame_resize,2402Register tmp1, Register tmp2, Register tmp3,2403Register tmp4, Register pc) {2404assert_different_registers(top_frame_size, parent_frame_resize, tmp1, tmp2, tmp3, tmp4);2405ld(tmp1, _top_ijava_frame_abi(frame_manager_lr), R1_SP);2406mr(tmp2/*top_frame_sp*/, R1_SP);2407// Move initial_caller_sp.2408ld(tmp4, _top_ijava_frame_abi(initial_caller_sp), R1_SP);2409neg(parent_frame_resize, parent_frame_resize);2410resize_frame(parent_frame_resize/*-parent_frame_resize*/, tmp3);24112412// Set LR in new parent frame.2413std(tmp1, _abi(lr), R1_SP);2414// Set top_frame_sp info for new parent frame.2415std(tmp2, _parent_ijava_frame_abi(top_frame_sp), R1_SP);2416std(tmp4, _parent_ijava_frame_abi(initial_caller_sp), R1_SP);24172418// Push new TOP_IJAVA_FRAME.2419push_frame(top_frame_size, tmp2);24202421get_PC_trash_LR(tmp3);2422std(tmp3, _top_ijava_frame_abi(frame_manager_lr), R1_SP);2423// Used for non-initial callers by unextended_sp().2424std(R1_SP, _top_ijava_frame_abi(initial_caller_sp), R1_SP);2425}24262427// Pop the topmost TOP_IJAVA_FRAME and convert the previous2428// PARENT_IJAVA_FRAME back into a TOP_IJAVA_FRAME.2429void InterpreterMacroAssembler::pop_interpreter_frame(Register tmp1, Register tmp2, Register tmp3, Register tmp4) {2430assert_different_registers(tmp1, tmp2, tmp3, tmp4);24312432ld(tmp1/*caller's sp*/, _abi(callers_sp), R1_SP);2433ld(tmp3, _abi(lr), tmp1);24342435ld(tmp4, _parent_ijava_frame_abi(initial_caller_sp), tmp1);24362437ld(tmp2/*caller's caller's sp*/, _abi(callers_sp), tmp1);2438// Merge top frame.2439std(tmp2, _abi(callers_sp), R1_SP);24402441ld(tmp2, _parent_ijava_frame_abi(top_frame_sp), tmp1);24422443// Update C stack pointer to caller's top_abi.2444resize_frame_absolute(tmp2/*addr*/, tmp1/*tmp*/, tmp2/*tmp*/);24452446// Update LR in top_frame.2447std(tmp3, _top_ijava_frame_abi(frame_manager_lr), R1_SP);24482449std(tmp4, _top_ijava_frame_abi(initial_caller_sp), R1_SP);24502451// Store the top-frame stack-pointer for c2i adapters.2452std(R1_SP, _top_ijava_frame_abi(top_frame_sp), R1_SP);2453}24542455// Turn state's interpreter frame into the current TOP_IJAVA_FRAME.2456void InterpreterMacroAssembler::pop_interpreter_frame_to_state(Register state, Register tmp1, Register tmp2, Register tmp3) {2457assert_different_registers(R14_state, R15_prev_state, tmp1, tmp2, tmp3);24582459if (state == R14_state) {2460ld(tmp1/*state's fp*/, state_(_last_Java_fp));2461ld(tmp2/*state's sp*/, state_(_last_Java_sp));2462} else if (state == R15_prev_state) {2463ld(tmp1/*state's fp*/, prev_state_(_last_Java_fp));2464ld(tmp2/*state's sp*/, prev_state_(_last_Java_sp));2465} else {2466ShouldNotReachHere();2467}24682469// Merge top frames.2470std(tmp1, _abi(callers_sp), R1_SP);24712472// Tmp2 is new SP.2473// Tmp1 is parent's SP.2474resize_frame_absolute(tmp2/*addr*/, tmp1/*tmp*/, tmp2/*tmp*/);24752476// Update LR in top_frame.2477// Must be interpreter frame.2478get_PC_trash_LR(tmp3);2479std(tmp3, _top_ijava_frame_abi(frame_manager_lr), R1_SP);2480// Used for non-initial callers by unextended_sp().2481std(R1_SP, _top_ijava_frame_abi(initial_caller_sp), R1_SP);2482}24832484// Set SP to initial caller's sp, but before fix the back chain.2485void InterpreterMacroAssembler::resize_frame_to_initial_caller(Register tmp1, Register tmp2) {2486ld(tmp1, _parent_ijava_frame_abi(initial_caller_sp), R1_SP);2487ld(tmp2, _parent_ijava_frame_abi(callers_sp), R1_SP);2488std(tmp2, _parent_ijava_frame_abi(callers_sp), tmp1); // Fix back chain ...2489mr(R1_SP, tmp1); // ... and resize to initial caller.2490}24912492// Pop the current interpreter state (without popping the correspoding2493// frame) and restore R14_state and R15_prev_state accordingly.2494// Use prev_state_may_be_0 to indicate whether prev_state may be 02495// in order to generate an extra check before retrieving prev_state_(_prev_link).2496void InterpreterMacroAssembler::pop_interpreter_state(bool prev_state_may_be_0)2497{2498// Move prev_state to state and restore prev_state from state_(_prev_link).2499Label prev_state_is_0;2500mr(R14_state, R15_prev_state);25012502// Don't retrieve /*state==*/prev_state_(_prev_link)2503// if /*state==*/prev_state is 0.2504if (prev_state_may_be_0) {2505cmpdi(CCR0, R15_prev_state, 0);2506beq(CCR0, prev_state_is_0);2507}25082509ld(R15_prev_state, /*state==*/prev_state_(_prev_link));2510bind(prev_state_is_0);2511}25122513void InterpreterMacroAssembler::restore_prev_state() {2514// _prev_link is private, but cInterpreter is a friend.2515ld(R15_prev_state, state_(_prev_link));2516}2517#endif // CC_INTERP251825192520