Path: blob/master/src/hotspot/cpu/arm/interp_masm_arm.cpp
40930 views
/*1* Copyright (c) 2008, 2021, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "jvm.h"26#include "asm/macroAssembler.inline.hpp"27#include "gc/shared/barrierSet.hpp"28#include "gc/shared/cardTable.hpp"29#include "gc/shared/cardTableBarrierSet.inline.hpp"30#include "gc/shared/collectedHeap.hpp"31#include "interp_masm_arm.hpp"32#include "interpreter/interpreter.hpp"33#include "interpreter/interpreterRuntime.hpp"34#include "logging/log.hpp"35#include "oops/arrayOop.hpp"36#include "oops/markWord.hpp"37#include "oops/method.hpp"38#include "oops/methodData.hpp"39#include "prims/jvmtiExport.hpp"40#include "prims/jvmtiThreadState.hpp"41#include "runtime/basicLock.hpp"42#include "runtime/biasedLocking.hpp"43#include "runtime/frame.inline.hpp"44#include "runtime/safepointMechanism.hpp"45#include "runtime/sharedRuntime.hpp"46#include "utilities/powerOfTwo.hpp"4748//--------------------------------------------------------------------49// Implementation of InterpreterMacroAssembler5051525354InterpreterMacroAssembler::InterpreterMacroAssembler(CodeBuffer* code) : MacroAssembler(code) {55}5657void InterpreterMacroAssembler::call_VM_helper(Register oop_result, address entry_point, int number_of_arguments, bool check_exceptions) {58#ifdef ASSERT59// Ensure that last_sp is not filled.60{ Label L;61ldr(Rtemp, Address(FP, frame::interpreter_frame_last_sp_offset * wordSize));62cbz(Rtemp, L);63stop("InterpreterMacroAssembler::call_VM_helper: last_sp != NULL");64bind(L);65}66#endif // ASSERT6768// Rbcp must be saved/restored since it may change due to GC.69save_bcp();707172// super call73MacroAssembler::call_VM_helper(oop_result, entry_point, number_of_arguments, check_exceptions);747576// Restore interpreter specific registers.77restore_bcp();78restore_method();79}8081void InterpreterMacroAssembler::jump_to_entry(address entry) {82assert(entry, "Entry must have been generated by now");83b(entry);84}8586void InterpreterMacroAssembler::check_and_handle_popframe() {87if (can_pop_frame()) {88Label L;89const Register popframe_cond = R2_tmp;9091// Initiate popframe handling only if it is not already being processed. If the flag92// has the popframe_processing bit set, it means that this code is called *during* popframe93// handling - we don't want to reenter.9495ldr_s32(popframe_cond, Address(Rthread, JavaThread::popframe_condition_offset()));96tbz(popframe_cond, exact_log2(JavaThread::popframe_pending_bit), L);97tbnz(popframe_cond, exact_log2(JavaThread::popframe_processing_bit), L);9899// Call Interpreter::remove_activation_preserving_args_entry() to get the100// address of the same-named entrypoint in the generated interpreter code.101call_VM_leaf(CAST_FROM_FN_PTR(address, Interpreter::remove_activation_preserving_args_entry));102103// Call indirectly to avoid generation ordering problem.104jump(R0);105106bind(L);107}108}109110111// Blows R2, Rtemp. Sets TOS cached value.112void InterpreterMacroAssembler::load_earlyret_value(TosState state) {113const Register thread_state = R2_tmp;114115ldr(thread_state, Address(Rthread, JavaThread::jvmti_thread_state_offset()));116117const Address tos_addr(thread_state, JvmtiThreadState::earlyret_tos_offset());118const Address oop_addr(thread_state, JvmtiThreadState::earlyret_oop_offset());119const Address val_addr(thread_state, JvmtiThreadState::earlyret_value_offset());120const Address val_addr_hi(thread_state, JvmtiThreadState::earlyret_value_offset()121+ in_ByteSize(wordSize));122123Register zero = zero_register(Rtemp);124125switch (state) {126case atos: ldr(R0_tos, oop_addr);127str(zero, oop_addr);128interp_verify_oop(R0_tos, state, __FILE__, __LINE__);129break;130131case ltos: ldr(R1_tos_hi, val_addr_hi); // fall through132case btos: // fall through133case ztos: // fall through134case ctos: // fall through135case stos: // fall through136case itos: ldr_s32(R0_tos, val_addr); break;137#ifdef __SOFTFP__138case dtos: ldr(R1_tos_hi, val_addr_hi); // fall through139case ftos: ldr(R0_tos, val_addr); break;140#else141case ftos: ldr_float (S0_tos, val_addr); break;142case dtos: ldr_double(D0_tos, val_addr); break;143#endif // __SOFTFP__144case vtos: /* nothing to do */ break;145default : ShouldNotReachHere();146}147// Clean up tos value in the thread object148str(zero, val_addr);149str(zero, val_addr_hi);150151mov(Rtemp, (int) ilgl);152str_32(Rtemp, tos_addr);153}154155156// Blows R2, Rtemp.157void InterpreterMacroAssembler::check_and_handle_earlyret() {158if (can_force_early_return()) {159Label L;160const Register thread_state = R2_tmp;161162ldr(thread_state, Address(Rthread, JavaThread::jvmti_thread_state_offset()));163cbz(thread_state, L); // if (thread->jvmti_thread_state() == NULL) exit;164165// Initiate earlyret handling only if it is not already being processed.166// If the flag has the earlyret_processing bit set, it means that this code167// is called *during* earlyret handling - we don't want to reenter.168169ldr_s32(Rtemp, Address(thread_state, JvmtiThreadState::earlyret_state_offset()));170cmp(Rtemp, JvmtiThreadState::earlyret_pending);171b(L, ne);172173// Call Interpreter::remove_activation_early_entry() to get the address of the174// same-named entrypoint in the generated interpreter code.175176ldr_s32(R0, Address(thread_state, JvmtiThreadState::earlyret_tos_offset()));177call_VM_leaf(CAST_FROM_FN_PTR(address, Interpreter::remove_activation_early_entry), R0);178179jump(R0);180181bind(L);182}183}184185186// Sets reg. Blows Rtemp.187void InterpreterMacroAssembler::get_unsigned_2_byte_index_at_bcp(Register reg, int bcp_offset) {188assert(bcp_offset >= 0, "bcp is still pointing to start of bytecode");189assert(reg != Rtemp, "should be different registers");190191ldrb(Rtemp, Address(Rbcp, bcp_offset));192ldrb(reg, Address(Rbcp, bcp_offset+1));193orr(reg, reg, AsmOperand(Rtemp, lsl, BitsPerByte));194}195196void InterpreterMacroAssembler::get_index_at_bcp(Register index, int bcp_offset, Register tmp_reg, size_t index_size) {197assert_different_registers(index, tmp_reg);198if (index_size == sizeof(u2)) {199// load bytes of index separately to avoid unaligned access200ldrb(index, Address(Rbcp, bcp_offset+1));201ldrb(tmp_reg, Address(Rbcp, bcp_offset));202orr(index, tmp_reg, AsmOperand(index, lsl, BitsPerByte));203} else if (index_size == sizeof(u4)) {204ldrb(index, Address(Rbcp, bcp_offset+3));205ldrb(tmp_reg, Address(Rbcp, bcp_offset+2));206orr(index, tmp_reg, AsmOperand(index, lsl, BitsPerByte));207ldrb(tmp_reg, Address(Rbcp, bcp_offset+1));208orr(index, tmp_reg, AsmOperand(index, lsl, BitsPerByte));209ldrb(tmp_reg, Address(Rbcp, bcp_offset));210orr(index, tmp_reg, AsmOperand(index, lsl, BitsPerByte));211// Check if the secondary index definition is still ~x, otherwise212// we have to change the following assembler code to calculate the213// plain index.214assert(ConstantPool::decode_invokedynamic_index(~123) == 123, "else change next line");215mvn_32(index, index); // convert to plain index216} else if (index_size == sizeof(u1)) {217ldrb(index, Address(Rbcp, bcp_offset));218} else {219ShouldNotReachHere();220}221}222223// Sets cache, index.224void InterpreterMacroAssembler::get_cache_and_index_at_bcp(Register cache, Register index, int bcp_offset, size_t index_size) {225assert(bcp_offset > 0, "bcp is still pointing to start of bytecode");226assert_different_registers(cache, index);227228get_index_at_bcp(index, bcp_offset, cache, index_size);229230// load constant pool cache pointer231ldr(cache, Address(FP, frame::interpreter_frame_cache_offset * wordSize));232233// convert from field index to ConstantPoolCacheEntry index234assert(sizeof(ConstantPoolCacheEntry) == 4*wordSize, "adjust code below");235logical_shift_left(index, index, 2);236}237238// Sets cache, index, bytecode.239void InterpreterMacroAssembler::get_cache_and_index_and_bytecode_at_bcp(Register cache, Register index, Register bytecode, int byte_no, int bcp_offset, size_t index_size) {240get_cache_and_index_at_bcp(cache, index, bcp_offset, index_size);241// caution index and bytecode can be the same242add(bytecode, cache, AsmOperand(index, lsl, LogBytesPerWord));243ldrb(bytecode, Address(bytecode, (1 + byte_no) + in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::indices_offset())));244TemplateTable::volatile_barrier(MacroAssembler::LoadLoad, noreg, true);245}246247// Sets cache. Blows reg_tmp.248void InterpreterMacroAssembler::get_cache_entry_pointer_at_bcp(Register cache, Register reg_tmp, int bcp_offset, size_t index_size) {249assert(bcp_offset > 0, "bcp is still pointing to start of bytecode");250assert_different_registers(cache, reg_tmp);251252get_index_at_bcp(reg_tmp, bcp_offset, cache, index_size);253254// load constant pool cache pointer255ldr(cache, Address(FP, frame::interpreter_frame_cache_offset * wordSize));256257// skip past the header258add(cache, cache, in_bytes(ConstantPoolCache::base_offset()));259// convert from field index to ConstantPoolCacheEntry index260// and from word offset to byte offset261assert(sizeof(ConstantPoolCacheEntry) == 4*wordSize, "adjust code below");262add(cache, cache, AsmOperand(reg_tmp, lsl, 2 + LogBytesPerWord));263}264265// Load object from cpool->resolved_references(index)266void InterpreterMacroAssembler::load_resolved_reference_at_index(267Register result, Register index) {268assert_different_registers(result, index);269get_constant_pool(result);270271Register cache = result;272// load pointer for resolved_references[] objArray273ldr(cache, Address(result, ConstantPool::cache_offset_in_bytes()));274ldr(cache, Address(result, ConstantPoolCache::resolved_references_offset_in_bytes()));275resolve_oop_handle(cache);276// Add in the index277// convert from field index to resolved_references() index and from278// word index to byte offset. Since this is a java object, it can be compressed279logical_shift_left(index, index, LogBytesPerHeapOop);280add(index, index, arrayOopDesc::base_offset_in_bytes(T_OBJECT));281load_heap_oop(result, Address(cache, index));282}283284void InterpreterMacroAssembler::load_resolved_klass_at_offset(285Register Rcpool, Register Rindex, Register Rklass) {286add(Rtemp, Rcpool, AsmOperand(Rindex, lsl, LogBytesPerWord));287ldrh(Rtemp, Address(Rtemp, sizeof(ConstantPool))); // Rtemp = resolved_klass_index288ldr(Rklass, Address(Rcpool, ConstantPool::resolved_klasses_offset_in_bytes())); // Rklass = cpool->_resolved_klasses289add(Rklass, Rklass, AsmOperand(Rtemp, lsl, LogBytesPerWord));290ldr(Rklass, Address(Rklass, Array<Klass*>::base_offset_in_bytes()));291}292293// Generate a subtype check: branch to not_subtype if sub_klass is294// not a subtype of super_klass.295// Profiling code for the subtype check failure (profile_typecheck_failed)296// should be explicitly generated by the caller in the not_subtype case.297// Blows Rtemp, tmp1, tmp2.298void InterpreterMacroAssembler::gen_subtype_check(Register Rsub_klass,299Register Rsuper_klass,300Label ¬_subtype,301Register tmp1,302Register tmp2) {303304assert_different_registers(Rsub_klass, Rsuper_klass, tmp1, tmp2, Rtemp);305Label ok_is_subtype, loop, update_cache;306307const Register super_check_offset = tmp1;308const Register cached_super = tmp2;309310// Profile the not-null value's klass.311profile_typecheck(tmp1, Rsub_klass);312313// Load the super-klass's check offset into314ldr_u32(super_check_offset, Address(Rsuper_klass, Klass::super_check_offset_offset()));315316// Check for self317cmp(Rsub_klass, Rsuper_klass);318319// Load from the sub-klass's super-class display list, or a 1-word cache of320// the secondary superclass list, or a failing value with a sentinel offset321// if the super-klass is an interface or exceptionally deep in the Java322// hierarchy and we have to scan the secondary superclass list the hard way.323// See if we get an immediate positive hit324ldr(cached_super, Address(Rsub_klass, super_check_offset));325326cond_cmp(Rsuper_klass, cached_super, ne);327b(ok_is_subtype, eq);328329// Check for immediate negative hit330cmp(super_check_offset, in_bytes(Klass::secondary_super_cache_offset()));331b(not_subtype, ne);332333// Now do a linear scan of the secondary super-klass chain.334const Register supers_arr = tmp1;335const Register supers_cnt = tmp2;336const Register cur_super = Rtemp;337338// Load objArrayOop of secondary supers.339ldr(supers_arr, Address(Rsub_klass, Klass::secondary_supers_offset()));340341ldr_u32(supers_cnt, Address(supers_arr, Array<Klass*>::length_offset_in_bytes())); // Load the array length342cmp(supers_cnt, 0);343344// Skip to the start of array elements and prefetch the first super-klass.345ldr(cur_super, Address(supers_arr, Array<Klass*>::base_offset_in_bytes(), pre_indexed), ne);346b(not_subtype, eq);347348bind(loop);349350351cmp(cur_super, Rsuper_klass);352b(update_cache, eq);353354subs(supers_cnt, supers_cnt, 1);355356ldr(cur_super, Address(supers_arr, wordSize, pre_indexed), ne);357358b(loop, ne);359360b(not_subtype);361362bind(update_cache);363// Must be equal but missed in cache. Update cache.364str(Rsuper_klass, Address(Rsub_klass, Klass::secondary_super_cache_offset()));365366bind(ok_is_subtype);367}368369370//////////////////////////////////////////////////////////////////////////////////371372373// Java Expression Stack374375void InterpreterMacroAssembler::pop_ptr(Register r) {376assert(r != Rstack_top, "unpredictable instruction");377ldr(r, Address(Rstack_top, wordSize, post_indexed));378}379380void InterpreterMacroAssembler::pop_i(Register r) {381assert(r != Rstack_top, "unpredictable instruction");382ldr_s32(r, Address(Rstack_top, wordSize, post_indexed));383zap_high_non_significant_bits(r);384}385386void InterpreterMacroAssembler::pop_l(Register lo, Register hi) {387assert_different_registers(lo, hi);388assert(lo < hi, "lo must be < hi");389pop(RegisterSet(lo) | RegisterSet(hi));390}391392void InterpreterMacroAssembler::pop_f(FloatRegister fd) {393fpops(fd);394}395396void InterpreterMacroAssembler::pop_d(FloatRegister fd) {397fpopd(fd);398}399400401// Transition vtos -> state. Blows R0, R1. Sets TOS cached value.402void InterpreterMacroAssembler::pop(TosState state) {403switch (state) {404case atos: pop_ptr(R0_tos); break;405case btos: // fall through406case ztos: // fall through407case ctos: // fall through408case stos: // fall through409case itos: pop_i(R0_tos); break;410case ltos: pop_l(R0_tos_lo, R1_tos_hi); break;411#ifdef __SOFTFP__412case ftos: pop_i(R0_tos); break;413case dtos: pop_l(R0_tos_lo, R1_tos_hi); break;414#else415case ftos: pop_f(S0_tos); break;416case dtos: pop_d(D0_tos); break;417#endif // __SOFTFP__418case vtos: /* nothing to do */ break;419default : ShouldNotReachHere();420}421interp_verify_oop(R0_tos, state, __FILE__, __LINE__);422}423424void InterpreterMacroAssembler::push_ptr(Register r) {425assert(r != Rstack_top, "unpredictable instruction");426str(r, Address(Rstack_top, -wordSize, pre_indexed));427check_stack_top_on_expansion();428}429430void InterpreterMacroAssembler::push_i(Register r) {431assert(r != Rstack_top, "unpredictable instruction");432str_32(r, Address(Rstack_top, -wordSize, pre_indexed));433check_stack_top_on_expansion();434}435436void InterpreterMacroAssembler::push_l(Register lo, Register hi) {437assert_different_registers(lo, hi);438assert(lo < hi, "lo must be < hi");439push(RegisterSet(lo) | RegisterSet(hi));440}441442void InterpreterMacroAssembler::push_f() {443fpushs(S0_tos);444}445446void InterpreterMacroAssembler::push_d() {447fpushd(D0_tos);448}449450// Transition state -> vtos. Blows Rtemp.451void InterpreterMacroAssembler::push(TosState state) {452interp_verify_oop(R0_tos, state, __FILE__, __LINE__);453switch (state) {454case atos: push_ptr(R0_tos); break;455case btos: // fall through456case ztos: // fall through457case ctos: // fall through458case stos: // fall through459case itos: push_i(R0_tos); break;460case ltos: push_l(R0_tos_lo, R1_tos_hi); break;461#ifdef __SOFTFP__462case ftos: push_i(R0_tos); break;463case dtos: push_l(R0_tos_lo, R1_tos_hi); break;464#else465case ftos: push_f(); break;466case dtos: push_d(); break;467#endif // __SOFTFP__468case vtos: /* nothing to do */ break;469default : ShouldNotReachHere();470}471}472473474475// Converts return value in R0/R1 (interpreter calling conventions) to TOS cached value.476void InterpreterMacroAssembler::convert_retval_to_tos(TosState state) {477#if (!defined __SOFTFP__ && !defined __ABI_HARD__)478// According to interpreter calling conventions, result is returned in R0/R1,479// but templates expect ftos in S0, and dtos in D0.480if (state == ftos) {481fmsr(S0_tos, R0);482} else if (state == dtos) {483fmdrr(D0_tos, R0, R1);484}485#endif // !__SOFTFP__ && !__ABI_HARD__486}487488// Converts TOS cached value to return value in R0/R1 (according to interpreter calling conventions).489void InterpreterMacroAssembler::convert_tos_to_retval(TosState state) {490#if (!defined __SOFTFP__ && !defined __ABI_HARD__)491// According to interpreter calling conventions, result is returned in R0/R1,492// so ftos (S0) and dtos (D0) are moved to R0/R1.493if (state == ftos) {494fmrs(R0, S0_tos);495} else if (state == dtos) {496fmrrd(R0, R1, D0_tos);497}498#endif // !__SOFTFP__ && !__ABI_HARD__499}500501502503// Helpers for swap and dup504void InterpreterMacroAssembler::load_ptr(int n, Register val) {505ldr(val, Address(Rstack_top, Interpreter::expr_offset_in_bytes(n)));506}507508void InterpreterMacroAssembler::store_ptr(int n, Register val) {509str(val, Address(Rstack_top, Interpreter::expr_offset_in_bytes(n)));510}511512513void InterpreterMacroAssembler::prepare_to_jump_from_interpreted() {514515// set sender sp516mov(Rsender_sp, SP);517518// record last_sp519str(Rsender_sp, Address(FP, frame::interpreter_frame_last_sp_offset * wordSize));520}521522// Jump to from_interpreted entry of a call unless single stepping is possible523// in this thread in which case we must call the i2i entry524void InterpreterMacroAssembler::jump_from_interpreted(Register method) {525assert_different_registers(method, Rtemp);526527prepare_to_jump_from_interpreted();528529if (can_post_interpreter_events()) {530// JVMTI events, such as single-stepping, are implemented partly by avoiding running531// compiled code in threads for which the event is enabled. Check here for532// interp_only_mode if these events CAN be enabled.533534ldr_s32(Rtemp, Address(Rthread, JavaThread::interp_only_mode_offset()));535cmp(Rtemp, 0);536ldr(PC, Address(method, Method::interpreter_entry_offset()), ne);537}538539indirect_jump(Address(method, Method::from_interpreted_offset()), Rtemp);540}541542543void InterpreterMacroAssembler::restore_dispatch() {544mov_slow(RdispatchTable, (address)Interpreter::dispatch_table(vtos));545}546547548// The following two routines provide a hook so that an implementation549// can schedule the dispatch in two parts.550void InterpreterMacroAssembler::dispatch_prolog(TosState state, int step) {551// Nothing ARM-specific to be done here.552}553554void InterpreterMacroAssembler::dispatch_epilog(TosState state, int step) {555dispatch_next(state, step);556}557558void InterpreterMacroAssembler::dispatch_base(TosState state,559DispatchTableMode table_mode,560bool verifyoop, bool generate_poll) {561if (VerifyActivationFrameSize) {562Label L;563sub(Rtemp, FP, SP);564int min_frame_size = (frame::link_offset - frame::interpreter_frame_initial_sp_offset) * wordSize;565cmp(Rtemp, min_frame_size);566b(L, ge);567stop("broken stack frame");568bind(L);569}570571if (verifyoop) {572interp_verify_oop(R0_tos, state, __FILE__, __LINE__);573}574575Label safepoint;576address* const safepoint_table = Interpreter::safept_table(state);577address* const table = Interpreter::dispatch_table(state);578bool needs_thread_local_poll = generate_poll && table != safepoint_table;579580if (needs_thread_local_poll) {581NOT_PRODUCT(block_comment("Thread-local Safepoint poll"));582ldr(Rtemp, Address(Rthread, JavaThread::polling_word_offset()));583tbnz(Rtemp, exact_log2(SafepointMechanism::poll_bit()), safepoint);584}585586if((state == itos) || (state == btos) || (state == ztos) || (state == ctos) || (state == stos)) {587zap_high_non_significant_bits(R0_tos);588}589590#ifdef ASSERT591Label L;592mov_slow(Rtemp, (address)Interpreter::dispatch_table(vtos));593cmp(Rtemp, RdispatchTable);594b(L, eq);595stop("invalid RdispatchTable");596bind(L);597#endif598599if (table_mode == DispatchDefault) {600if (state == vtos) {601indirect_jump(Address::indexed_ptr(RdispatchTable, R3_bytecode), Rtemp);602} else {603// on 32-bit ARM this method is faster than the one above.604sub(Rtemp, RdispatchTable, (Interpreter::distance_from_dispatch_table(vtos) -605Interpreter::distance_from_dispatch_table(state)) * wordSize);606indirect_jump(Address::indexed_ptr(Rtemp, R3_bytecode), Rtemp);607}608} else {609assert(table_mode == DispatchNormal, "invalid dispatch table mode");610address table = (address) Interpreter::normal_table(state);611mov_slow(Rtemp, table);612indirect_jump(Address::indexed_ptr(Rtemp, R3_bytecode), Rtemp);613}614615if (needs_thread_local_poll) {616bind(safepoint);617lea(Rtemp, ExternalAddress((address)safepoint_table));618indirect_jump(Address::indexed_ptr(Rtemp, R3_bytecode), Rtemp);619}620621nop(); // to avoid filling CPU pipeline with invalid instructions622nop();623}624625void InterpreterMacroAssembler::dispatch_only(TosState state, bool generate_poll) {626dispatch_base(state, DispatchDefault, true, generate_poll);627}628629630void InterpreterMacroAssembler::dispatch_only_normal(TosState state) {631dispatch_base(state, DispatchNormal);632}633634void InterpreterMacroAssembler::dispatch_only_noverify(TosState state) {635dispatch_base(state, DispatchNormal, false);636}637638void InterpreterMacroAssembler::dispatch_next(TosState state, int step, bool generate_poll) {639// load next bytecode and advance Rbcp640ldrb(R3_bytecode, Address(Rbcp, step, pre_indexed));641dispatch_base(state, DispatchDefault, true, generate_poll);642}643644void InterpreterMacroAssembler::narrow(Register result) {645// mask integer result to narrower return type.646const Register Rtmp = R2;647648// get method type649ldr(Rtmp, Address(Rmethod, Method::const_offset()));650ldrb(Rtmp, Address(Rtmp, ConstMethod::result_type_offset()));651652Label notBool, notByte, notChar, done;653cmp(Rtmp, T_INT);654b(done, eq);655656cmp(Rtmp, T_BOOLEAN);657b(notBool, ne);658and_32(result, result, 1);659b(done);660661bind(notBool);662cmp(Rtmp, T_BYTE);663b(notByte, ne);664sign_extend(result, result, 8);665b(done);666667bind(notByte);668cmp(Rtmp, T_CHAR);669b(notChar, ne);670zero_extend(result, result, 16);671b(done);672673bind(notChar);674// cmp(Rtmp, T_SHORT);675// b(done, ne);676sign_extend(result, result, 16);677678// Nothing to do679bind(done);680}681682// remove activation683//684// Unlock the receiver if this is a synchronized method.685// Unlock any Java monitors from syncronized blocks.686// Remove the activation from the stack.687//688// If there are locked Java monitors689// If throw_monitor_exception690// throws IllegalMonitorStateException691// Else if install_monitor_exception692// installs IllegalMonitorStateException693// Else694// no error processing695void InterpreterMacroAssembler::remove_activation(TosState state, Register ret_addr,696bool throw_monitor_exception,697bool install_monitor_exception,698bool notify_jvmdi) {699Label unlock, unlocked, no_unlock;700701// Note: Registers R0, R1, S0 and D0 (TOS cached value) may be in use for the result.702703const Address do_not_unlock_if_synchronized(Rthread,704JavaThread::do_not_unlock_if_synchronized_offset());705706const Register Rflag = R2;707const Register Raccess_flags = R3;708709restore_method();710711ldrb(Rflag, do_not_unlock_if_synchronized);712713// get method access flags714ldr_u32(Raccess_flags, Address(Rmethod, Method::access_flags_offset()));715716strb(zero_register(Rtemp), do_not_unlock_if_synchronized); // reset the flag717718// check if method is synchronized719720tbz(Raccess_flags, JVM_ACC_SYNCHRONIZED_BIT, unlocked);721722// Don't unlock anything if the _do_not_unlock_if_synchronized flag is set.723cbnz(Rflag, no_unlock);724725// unlock monitor726push(state); // save result727728// BasicObjectLock will be first in list, since this is a synchronized method. However, need729// to check that the object has not been unlocked by an explicit monitorexit bytecode.730731const Register Rmonitor = R0; // fixed in unlock_object()732const Register Robj = R2;733734// address of first monitor735sub(Rmonitor, FP, - frame::interpreter_frame_monitor_block_bottom_offset * wordSize + (int)sizeof(BasicObjectLock));736737ldr(Robj, Address(Rmonitor, BasicObjectLock::obj_offset_in_bytes()));738cbnz(Robj, unlock);739740pop(state);741742if (throw_monitor_exception) {743// Entry already unlocked, need to throw exception744call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_illegal_monitor_state_exception));745should_not_reach_here();746} else {747// Monitor already unlocked during a stack unroll.748// If requested, install an illegal_monitor_state_exception.749// Continue with stack unrolling.750if (install_monitor_exception) {751call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::new_illegal_monitor_state_exception));752}753b(unlocked);754}755756757// Exception case for the check that all monitors are unlocked.758const Register Rcur = R2;759Label restart_check_monitors_unlocked, exception_monitor_is_still_locked;760761bind(exception_monitor_is_still_locked);762// Monitor entry is still locked, need to throw exception.763// Rcur: monitor entry.764765if (throw_monitor_exception) {766// Throw exception767call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_illegal_monitor_state_exception));768should_not_reach_here();769} else {770// Stack unrolling. Unlock object and install illegal_monitor_exception771// Unlock does not block, so don't have to worry about the frame772773push(state);774mov(Rmonitor, Rcur);775unlock_object(Rmonitor);776777if (install_monitor_exception) {778call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::new_illegal_monitor_state_exception));779}780781pop(state);782b(restart_check_monitors_unlocked);783}784785bind(unlock);786unlock_object(Rmonitor);787pop(state);788789// Check that for block-structured locking (i.e., that all locked objects has been unlocked)790bind(unlocked);791792// Check that all monitors are unlocked793{794Label loop;795796const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;797const Register Rbottom = R3;798const Register Rcur_obj = Rtemp;799800bind(restart_check_monitors_unlocked);801802ldr(Rcur, Address(FP, frame::interpreter_frame_monitor_block_top_offset * wordSize));803// points to current entry, starting with top-most entry804sub(Rbottom, FP, -frame::interpreter_frame_monitor_block_bottom_offset * wordSize);805// points to word before bottom of monitor block806807cmp(Rcur, Rbottom); // check if there are no monitors808ldr(Rcur_obj, Address(Rcur, BasicObjectLock::obj_offset_in_bytes()), ne);809// prefetch monitor's object810b(no_unlock, eq);811812bind(loop);813// check if current entry is used814cbnz(Rcur_obj, exception_monitor_is_still_locked);815816add(Rcur, Rcur, entry_size); // otherwise advance to next entry817cmp(Rcur, Rbottom); // check if bottom reached818ldr(Rcur_obj, Address(Rcur, BasicObjectLock::obj_offset_in_bytes()), ne);819// prefetch monitor's object820b(loop, ne); // if not at bottom then check this entry821}822823bind(no_unlock);824825// jvmti support826if (notify_jvmdi) {827notify_method_exit(state, NotifyJVMTI); // preserve TOSCA828} else {829notify_method_exit(state, SkipNotifyJVMTI); // preserve TOSCA830}831832// remove activation833mov(Rtemp, FP);834ldmia(FP, RegisterSet(FP) | RegisterSet(LR));835ldr(SP, Address(Rtemp, frame::interpreter_frame_sender_sp_offset * wordSize));836837if (ret_addr != LR) {838mov(ret_addr, LR);839}840}841842843// At certain points in the method invocation the monitor of844// synchronized methods hasn't been entered yet.845// To correctly handle exceptions at these points, we set the thread local846// variable _do_not_unlock_if_synchronized to true. The remove_activation will847// check this flag.848void InterpreterMacroAssembler::set_do_not_unlock_if_synchronized(bool flag, Register tmp) {849const Address do_not_unlock_if_synchronized(Rthread,850JavaThread::do_not_unlock_if_synchronized_offset());851if (flag) {852mov(tmp, 1);853strb(tmp, do_not_unlock_if_synchronized);854} else {855strb(zero_register(tmp), do_not_unlock_if_synchronized);856}857}858859// Lock object860//861// Argument: R1 : Points to BasicObjectLock to be used for locking.862// Must be initialized with object to lock.863// Blows volatile registers R0-R3, Rtemp, LR. Calls VM.864void InterpreterMacroAssembler::lock_object(Register Rlock) {865assert(Rlock == R1, "the second argument");866867if (UseHeavyMonitors) {868call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter), Rlock);869} else {870Label done;871872const Register Robj = R2;873const Register Rmark = R3;874assert_different_registers(Robj, Rmark, Rlock, R0, Rtemp);875876const int obj_offset = BasicObjectLock::obj_offset_in_bytes();877const int lock_offset = BasicObjectLock::lock_offset_in_bytes ();878const int mark_offset = lock_offset + BasicLock::displaced_header_offset_in_bytes();879880Label already_locked, slow_case;881882// Load object pointer883ldr(Robj, Address(Rlock, obj_offset));884885if (DiagnoseSyncOnValueBasedClasses != 0) {886load_klass(R0, Robj);887ldr_u32(R0, Address(R0, Klass::access_flags_offset()));888tst(R0, JVM_ACC_IS_VALUE_BASED_CLASS);889b(slow_case, ne);890}891892if (UseBiasedLocking) {893biased_locking_enter(Robj, Rmark/*scratched*/, R0, false, Rtemp, done, slow_case);894}895896897// On MP platforms the next load could return a 'stale' value if the memory location has been modified by another thread.898// That would be acceptable as ether CAS or slow case path is taken in that case.899// Exception to that is if the object is locked by the calling thread, then the recursive test will pass (guaranteed as900// loads are satisfied from a store queue if performed on the same processor).901902assert(oopDesc::mark_offset_in_bytes() == 0, "must be");903ldr(Rmark, Address(Robj, oopDesc::mark_offset_in_bytes()));904905// Test if object is already locked906tst(Rmark, markWord::unlocked_value);907b(already_locked, eq);908909// Save old object->mark() into BasicLock's displaced header910str(Rmark, Address(Rlock, mark_offset));911912cas_for_lock_acquire(Rmark, Rlock, Robj, Rtemp, slow_case);913914#ifndef PRODUCT915if (PrintBiasedLockingStatistics) {916cond_atomic_inc32(al, BiasedLocking::fast_path_entry_count_addr());917}918#endif //!PRODUCT919920b(done);921922// If we got here that means the object is locked by ether calling thread or another thread.923bind(already_locked);924// Handling of locked objects: recursive locks and slow case.925926// Fast check for recursive lock.927//928// Can apply the optimization only if this is a stack lock929// allocated in this thread. For efficiency, we can focus on930// recently allocated stack locks (instead of reading the stack931// base and checking whether 'mark' points inside the current932// thread stack):933// 1) (mark & 3) == 0934// 2) SP <= mark < SP + os::pagesize()935//936// Warning: SP + os::pagesize can overflow the stack base. We must937// neither apply the optimization for an inflated lock allocated938// just above the thread stack (this is why condition 1 matters)939// nor apply the optimization if the stack lock is inside the stack940// of another thread. The latter is avoided even in case of overflow941// because we have guard pages at the end of all stacks. Hence, if942// we go over the stack base and hit the stack of another thread,943// this should not be in a writeable area that could contain a944// stack lock allocated by that thread. As a consequence, a stack945// lock less than page size away from SP is guaranteed to be946// owned by the current thread.947//948// Note: assuming SP is aligned, we can check the low bits of949// (mark-SP) instead of the low bits of mark. In that case,950// assuming page size is a power of 2, we can merge the two951// conditions into a single test:952// => ((mark - SP) & (3 - os::pagesize())) == 0953954// (3 - os::pagesize()) cannot be encoded as an ARM immediate operand.955// Check independently the low bits and the distance to SP.956// -1- test low 2 bits957movs(R0, AsmOperand(Rmark, lsl, 30));958// -2- test (mark - SP) if the low two bits are 0959sub(R0, Rmark, SP, eq);960movs(R0, AsmOperand(R0, lsr, exact_log2(os::vm_page_size())), eq);961// If still 'eq' then recursive locking OK: store 0 into lock record962str(R0, Address(Rlock, mark_offset), eq);963964965#ifndef PRODUCT966if (PrintBiasedLockingStatistics) {967cond_atomic_inc32(eq, BiasedLocking::fast_path_entry_count_addr());968}969#endif // !PRODUCT970971b(done, eq);972973bind(slow_case);974975// Call the runtime routine for slow case976call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter), Rlock);977978bind(done);979}980}981982983// Unlocks an object. Used in monitorexit bytecode and remove_activation.984//985// Argument: R0: Points to BasicObjectLock structure for lock986// Throw an IllegalMonitorException if object is not locked by current thread987// Blows volatile registers R0-R3, Rtemp, LR. Calls VM.988void InterpreterMacroAssembler::unlock_object(Register Rlock) {989assert(Rlock == R0, "the first argument");990991if (UseHeavyMonitors) {992call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorexit), Rlock);993} else {994Label done, slow_case;995996const Register Robj = R2;997const Register Rmark = R3;998assert_different_registers(Robj, Rmark, Rlock, Rtemp);9991000const int obj_offset = BasicObjectLock::obj_offset_in_bytes();1001const int lock_offset = BasicObjectLock::lock_offset_in_bytes ();1002const int mark_offset = lock_offset + BasicLock::displaced_header_offset_in_bytes();10031004const Register Rzero = zero_register(Rtemp);10051006// Load oop into Robj1007ldr(Robj, Address(Rlock, obj_offset));10081009// Free entry1010str(Rzero, Address(Rlock, obj_offset));10111012if (UseBiasedLocking) {1013biased_locking_exit(Robj, Rmark, done);1014}10151016// Load the old header from BasicLock structure1017ldr(Rmark, Address(Rlock, mark_offset));10181019// Test for recursion (zero mark in BasicLock)1020cbz(Rmark, done);10211022bool allow_fallthrough_on_failure = true;10231024cas_for_lock_release(Rlock, Rmark, Robj, Rtemp, slow_case, allow_fallthrough_on_failure);10251026b(done, eq);10271028bind(slow_case);10291030// Call the runtime routine for slow case.1031str(Robj, Address(Rlock, obj_offset)); // restore obj1032call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorexit), Rlock);10331034bind(done);1035}1036}103710381039// Test ImethodDataPtr. If it is null, continue at the specified label1040void InterpreterMacroAssembler::test_method_data_pointer(Register mdp, Label& zero_continue) {1041assert(ProfileInterpreter, "must be profiling interpreter");1042ldr(mdp, Address(FP, frame::interpreter_frame_mdp_offset * wordSize));1043cbz(mdp, zero_continue);1044}104510461047// Set the method data pointer for the current bcp.1048// Blows volatile registers R0-R3, Rtemp, LR.1049void InterpreterMacroAssembler::set_method_data_pointer_for_bcp() {1050assert(ProfileInterpreter, "must be profiling interpreter");1051Label set_mdp;10521053// Test MDO to avoid the call if it is NULL.1054ldr(Rtemp, Address(Rmethod, Method::method_data_offset()));1055cbz(Rtemp, set_mdp);10561057mov(R0, Rmethod);1058mov(R1, Rbcp);1059call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::bcp_to_di), R0, R1);1060// R0/W0: mdi10611062// mdo is guaranteed to be non-zero here, we checked for it before the call.1063ldr(Rtemp, Address(Rmethod, Method::method_data_offset()));1064add(Rtemp, Rtemp, in_bytes(MethodData::data_offset()));1065add_ptr_scaled_int32(Rtemp, Rtemp, R0, 0);10661067bind(set_mdp);1068str(Rtemp, Address(FP, frame::interpreter_frame_mdp_offset * wordSize));1069}107010711072void InterpreterMacroAssembler::verify_method_data_pointer() {1073assert(ProfileInterpreter, "must be profiling interpreter");1074#ifdef ASSERT1075Label verify_continue;1076save_caller_save_registers();10771078const Register Rmdp = R2;1079test_method_data_pointer(Rmdp, verify_continue); // If mdp is zero, continue10801081// If the mdp is valid, it will point to a DataLayout header which is1082// consistent with the bcp. The converse is highly probable also.10831084ldrh(R3, Address(Rmdp, DataLayout::bci_offset()));1085ldr(Rtemp, Address(Rmethod, Method::const_offset()));1086add(R3, R3, Rtemp);1087add(R3, R3, in_bytes(ConstMethod::codes_offset()));1088cmp(R3, Rbcp);1089b(verify_continue, eq);10901091mov(R0, Rmethod);1092mov(R1, Rbcp);1093call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::verify_mdp), R0, R1, Rmdp);10941095bind(verify_continue);1096restore_caller_save_registers();1097#endif // ASSERT1098}109911001101void InterpreterMacroAssembler::set_mdp_data_at(Register mdp_in, int offset, Register value) {1102assert(ProfileInterpreter, "must be profiling interpreter");1103assert_different_registers(mdp_in, value);1104str(value, Address(mdp_in, offset));1105}110611071108// Increments mdp data. Sets bumped_count register to adjusted counter.1109void InterpreterMacroAssembler::increment_mdp_data_at(Register mdp_in,1110int offset,1111Register bumped_count,1112bool decrement) {1113assert(ProfileInterpreter, "must be profiling interpreter");11141115// Counter address1116Address data(mdp_in, offset);1117assert_different_registers(mdp_in, bumped_count);11181119increment_mdp_data_at(data, bumped_count, decrement);1120}11211122void InterpreterMacroAssembler::set_mdp_flag_at(Register mdp_in, int flag_byte_constant) {1123assert_different_registers(mdp_in, Rtemp);1124assert(ProfileInterpreter, "must be profiling interpreter");1125assert((0 < flag_byte_constant) && (flag_byte_constant < (1 << BitsPerByte)), "flag mask is out of range");11261127// Set the flag1128ldrb(Rtemp, Address(mdp_in, in_bytes(DataLayout::flags_offset())));1129orr(Rtemp, Rtemp, (unsigned)flag_byte_constant);1130strb(Rtemp, Address(mdp_in, in_bytes(DataLayout::flags_offset())));1131}113211331134// Increments mdp data. Sets bumped_count register to adjusted counter.1135void InterpreterMacroAssembler::increment_mdp_data_at(Address data,1136Register bumped_count,1137bool decrement) {1138assert(ProfileInterpreter, "must be profiling interpreter");11391140ldr(bumped_count, data);1141if (decrement) {1142// Decrement the register. Set condition codes.1143subs(bumped_count, bumped_count, DataLayout::counter_increment);1144// Avoid overflow.1145add(bumped_count, bumped_count, DataLayout::counter_increment, pl);1146} else {1147// Increment the register. Set condition codes.1148adds(bumped_count, bumped_count, DataLayout::counter_increment);1149// Avoid overflow.1150sub(bumped_count, bumped_count, DataLayout::counter_increment, mi);1151}1152str(bumped_count, data);1153}115411551156void InterpreterMacroAssembler::test_mdp_data_at(Register mdp_in,1157int offset,1158Register value,1159Register test_value_out,1160Label& not_equal_continue) {1161assert(ProfileInterpreter, "must be profiling interpreter");1162assert_different_registers(mdp_in, test_value_out, value);11631164ldr(test_value_out, Address(mdp_in, offset));1165cmp(test_value_out, value);11661167b(not_equal_continue, ne);1168}116911701171void InterpreterMacroAssembler::update_mdp_by_offset(Register mdp_in, int offset_of_disp, Register reg_temp) {1172assert(ProfileInterpreter, "must be profiling interpreter");1173assert_different_registers(mdp_in, reg_temp);11741175ldr(reg_temp, Address(mdp_in, offset_of_disp));1176add(mdp_in, mdp_in, reg_temp);1177str(mdp_in, Address(FP, frame::interpreter_frame_mdp_offset * wordSize));1178}117911801181void InterpreterMacroAssembler::update_mdp_by_offset(Register mdp_in, Register reg_offset, Register reg_tmp) {1182assert(ProfileInterpreter, "must be profiling interpreter");1183assert_different_registers(mdp_in, reg_offset, reg_tmp);11841185ldr(reg_tmp, Address(mdp_in, reg_offset));1186add(mdp_in, mdp_in, reg_tmp);1187str(mdp_in, Address(FP, frame::interpreter_frame_mdp_offset * wordSize));1188}118911901191void InterpreterMacroAssembler::update_mdp_by_constant(Register mdp_in, int constant) {1192assert(ProfileInterpreter, "must be profiling interpreter");1193add(mdp_in, mdp_in, constant);1194str(mdp_in, Address(FP, frame::interpreter_frame_mdp_offset * wordSize));1195}119611971198// Blows volatile registers R0-R3, Rtemp, LR).1199void InterpreterMacroAssembler::update_mdp_for_ret(Register return_bci) {1200assert(ProfileInterpreter, "must be profiling interpreter");1201assert_different_registers(return_bci, R0, R1, R2, R3, Rtemp);12021203mov(R1, return_bci);1204call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::update_mdp_for_ret), R1);1205}120612071208// Sets mdp, bumped_count registers, blows Rtemp.1209void InterpreterMacroAssembler::profile_taken_branch(Register mdp, Register bumped_count) {1210assert_different_registers(mdp, bumped_count);12111212if (ProfileInterpreter) {1213Label profile_continue;12141215// If no method data exists, go to profile_continue.1216// Otherwise, assign to mdp1217test_method_data_pointer(mdp, profile_continue);12181219// We are taking a branch. Increment the taken count.1220increment_mdp_data_at(mdp, in_bytes(JumpData::taken_offset()), bumped_count);12211222// The method data pointer needs to be updated to reflect the new target.1223update_mdp_by_offset(mdp, in_bytes(JumpData::displacement_offset()), Rtemp);12241225bind (profile_continue);1226}1227}122812291230// Sets mdp, blows Rtemp.1231void InterpreterMacroAssembler::profile_not_taken_branch(Register mdp) {1232assert_different_registers(mdp, Rtemp);12331234if (ProfileInterpreter) {1235Label profile_continue;12361237// If no method data exists, go to profile_continue.1238test_method_data_pointer(mdp, profile_continue);12391240// We are taking a branch. Increment the not taken count.1241increment_mdp_data_at(mdp, in_bytes(BranchData::not_taken_offset()), Rtemp);12421243// The method data pointer needs to be updated to correspond to the next bytecode1244update_mdp_by_constant(mdp, in_bytes(BranchData::branch_data_size()));12451246bind (profile_continue);1247}1248}124912501251// Sets mdp, blows Rtemp.1252void InterpreterMacroAssembler::profile_call(Register mdp) {1253assert_different_registers(mdp, Rtemp);12541255if (ProfileInterpreter) {1256Label profile_continue;12571258// If no method data exists, go to profile_continue.1259test_method_data_pointer(mdp, profile_continue);12601261// We are making a call. Increment the count.1262increment_mdp_data_at(mdp, in_bytes(CounterData::count_offset()), Rtemp);12631264// The method data pointer needs to be updated to reflect the new target.1265update_mdp_by_constant(mdp, in_bytes(CounterData::counter_data_size()));12661267bind (profile_continue);1268}1269}127012711272// Sets mdp, blows Rtemp.1273void InterpreterMacroAssembler::profile_final_call(Register mdp) {1274if (ProfileInterpreter) {1275Label profile_continue;12761277// If no method data exists, go to profile_continue.1278test_method_data_pointer(mdp, profile_continue);12791280// We are making a call. Increment the count.1281increment_mdp_data_at(mdp, in_bytes(CounterData::count_offset()), Rtemp);12821283// The method data pointer needs to be updated to reflect the new target.1284update_mdp_by_constant(mdp, in_bytes(VirtualCallData::virtual_call_data_size()));12851286bind (profile_continue);1287}1288}128912901291// Sets mdp, blows Rtemp.1292void InterpreterMacroAssembler::profile_virtual_call(Register mdp, Register receiver, bool receiver_can_be_null) {1293assert_different_registers(mdp, receiver, Rtemp);12941295if (ProfileInterpreter) {1296Label profile_continue;12971298// If no method data exists, go to profile_continue.1299test_method_data_pointer(mdp, profile_continue);13001301Label skip_receiver_profile;1302if (receiver_can_be_null) {1303Label not_null;1304cbnz(receiver, not_null);1305// We are making a call. Increment the count for null receiver.1306increment_mdp_data_at(mdp, in_bytes(CounterData::count_offset()), Rtemp);1307b(skip_receiver_profile);1308bind(not_null);1309}13101311// Record the receiver type.1312record_klass_in_profile(receiver, mdp, Rtemp, true);1313bind(skip_receiver_profile);13141315// The method data pointer needs to be updated to reflect the new target.1316update_mdp_by_constant(mdp, in_bytes(VirtualCallData::virtual_call_data_size()));1317bind(profile_continue);1318}1319}132013211322void InterpreterMacroAssembler::record_klass_in_profile_helper(1323Register receiver, Register mdp,1324Register reg_tmp,1325int start_row, Label& done, bool is_virtual_call) {1326if (TypeProfileWidth == 0)1327return;13281329assert_different_registers(receiver, mdp, reg_tmp);13301331int last_row = VirtualCallData::row_limit() - 1;1332assert(start_row <= last_row, "must be work left to do");1333// Test this row for both the receiver and for null.1334// Take any of three different outcomes:1335// 1. found receiver => increment count and goto done1336// 2. found null => keep looking for case 1, maybe allocate this cell1337// 3. found something else => keep looking for cases 1 and 21338// Case 3 is handled by a recursive call.1339for (int row = start_row; row <= last_row; row++) {1340Label next_test;13411342// See if the receiver is receiver[n].1343int recvr_offset = in_bytes(VirtualCallData::receiver_offset(row));13441345test_mdp_data_at(mdp, recvr_offset, receiver, reg_tmp, next_test);13461347// The receiver is receiver[n]. Increment count[n].1348int count_offset = in_bytes(VirtualCallData::receiver_count_offset(row));1349increment_mdp_data_at(mdp, count_offset, reg_tmp);1350b(done);13511352bind(next_test);1353// reg_tmp now contains the receiver from the CallData.13541355if (row == start_row) {1356Label found_null;1357// Failed the equality check on receiver[n]... Test for null.1358if (start_row == last_row) {1359// The only thing left to do is handle the null case.1360if (is_virtual_call) {1361cbz(reg_tmp, found_null);1362// Receiver did not match any saved receiver and there is no empty row for it.1363// Increment total counter to indicate polymorphic case.1364increment_mdp_data_at(mdp, in_bytes(CounterData::count_offset()), reg_tmp);1365b(done);1366bind(found_null);1367} else {1368cbnz(reg_tmp, done);1369}1370break;1371}1372// Since null is rare, make it be the branch-taken case.1373cbz(reg_tmp, found_null);13741375// Put all the "Case 3" tests here.1376record_klass_in_profile_helper(receiver, mdp, reg_tmp, start_row + 1, done, is_virtual_call);13771378// Found a null. Keep searching for a matching receiver,1379// but remember that this is an empty (unused) slot.1380bind(found_null);1381}1382}13831384// In the fall-through case, we found no matching receiver, but we1385// observed the receiver[start_row] is NULL.13861387// Fill in the receiver field and increment the count.1388int recvr_offset = in_bytes(VirtualCallData::receiver_offset(start_row));1389set_mdp_data_at(mdp, recvr_offset, receiver);1390int count_offset = in_bytes(VirtualCallData::receiver_count_offset(start_row));1391mov(reg_tmp, DataLayout::counter_increment);1392set_mdp_data_at(mdp, count_offset, reg_tmp);1393if (start_row > 0) {1394b(done);1395}1396}13971398void InterpreterMacroAssembler::record_klass_in_profile(Register receiver,1399Register mdp,1400Register reg_tmp,1401bool is_virtual_call) {1402assert(ProfileInterpreter, "must be profiling");1403assert_different_registers(receiver, mdp, reg_tmp);14041405Label done;14061407record_klass_in_profile_helper(receiver, mdp, reg_tmp, 0, done, is_virtual_call);14081409bind (done);1410}14111412// Sets mdp, blows volatile registers R0-R3, Rtemp, LR).1413void InterpreterMacroAssembler::profile_ret(Register mdp, Register return_bci) {1414assert_different_registers(mdp, return_bci, Rtemp, R0, R1, R2, R3);14151416if (ProfileInterpreter) {1417Label profile_continue;1418uint row;14191420// If no method data exists, go to profile_continue.1421test_method_data_pointer(mdp, profile_continue);14221423// Update the total ret count.1424increment_mdp_data_at(mdp, in_bytes(CounterData::count_offset()), Rtemp);14251426for (row = 0; row < RetData::row_limit(); row++) {1427Label next_test;14281429// See if return_bci is equal to bci[n]:1430test_mdp_data_at(mdp, in_bytes(RetData::bci_offset(row)), return_bci,1431Rtemp, next_test);14321433// return_bci is equal to bci[n]. Increment the count.1434increment_mdp_data_at(mdp, in_bytes(RetData::bci_count_offset(row)), Rtemp);14351436// The method data pointer needs to be updated to reflect the new target.1437update_mdp_by_offset(mdp, in_bytes(RetData::bci_displacement_offset(row)), Rtemp);1438b(profile_continue);1439bind(next_test);1440}14411442update_mdp_for_ret(return_bci);14431444bind(profile_continue);1445}1446}144714481449// Sets mdp.1450void InterpreterMacroAssembler::profile_null_seen(Register mdp) {1451if (ProfileInterpreter) {1452Label profile_continue;14531454// If no method data exists, go to profile_continue.1455test_method_data_pointer(mdp, profile_continue);14561457set_mdp_flag_at(mdp, BitData::null_seen_byte_constant());14581459// The method data pointer needs to be updated.1460int mdp_delta = in_bytes(BitData::bit_data_size());1461if (TypeProfileCasts) {1462mdp_delta = in_bytes(VirtualCallData::virtual_call_data_size());1463}1464update_mdp_by_constant(mdp, mdp_delta);14651466bind (profile_continue);1467}1468}146914701471// Sets mdp, blows Rtemp.1472void InterpreterMacroAssembler::profile_typecheck_failed(Register mdp) {1473assert_different_registers(mdp, Rtemp);14741475if (ProfileInterpreter && TypeProfileCasts) {1476Label profile_continue;14771478// If no method data exists, go to profile_continue.1479test_method_data_pointer(mdp, profile_continue);14801481int count_offset = in_bytes(CounterData::count_offset());1482// Back up the address, since we have already bumped the mdp.1483count_offset -= in_bytes(VirtualCallData::virtual_call_data_size());14841485// *Decrement* the counter. We expect to see zero or small negatives.1486increment_mdp_data_at(mdp, count_offset, Rtemp, true);14871488bind (profile_continue);1489}1490}149114921493// Sets mdp, blows Rtemp.1494void InterpreterMacroAssembler::profile_typecheck(Register mdp, Register klass)1495{1496assert_different_registers(mdp, klass, Rtemp);14971498if (ProfileInterpreter) {1499Label profile_continue;15001501// If no method data exists, go to profile_continue.1502test_method_data_pointer(mdp, profile_continue);15031504// The method data pointer needs to be updated.1505int mdp_delta = in_bytes(BitData::bit_data_size());1506if (TypeProfileCasts) {1507mdp_delta = in_bytes(VirtualCallData::virtual_call_data_size());15081509// Record the object type.1510record_klass_in_profile(klass, mdp, Rtemp, false);1511}1512update_mdp_by_constant(mdp, mdp_delta);15131514bind(profile_continue);1515}1516}151715181519// Sets mdp, blows Rtemp.1520void InterpreterMacroAssembler::profile_switch_default(Register mdp) {1521assert_different_registers(mdp, Rtemp);15221523if (ProfileInterpreter) {1524Label profile_continue;15251526// If no method data exists, go to profile_continue.1527test_method_data_pointer(mdp, profile_continue);15281529// Update the default case count1530increment_mdp_data_at(mdp, in_bytes(MultiBranchData::default_count_offset()), Rtemp);15311532// The method data pointer needs to be updated.1533update_mdp_by_offset(mdp, in_bytes(MultiBranchData::default_displacement_offset()), Rtemp);15341535bind(profile_continue);1536}1537}153815391540// Sets mdp. Blows reg_tmp1, reg_tmp2. Index could be the same as reg_tmp2.1541void InterpreterMacroAssembler::profile_switch_case(Register mdp, Register index, Register reg_tmp1, Register reg_tmp2) {1542assert_different_registers(mdp, reg_tmp1, reg_tmp2);1543assert_different_registers(mdp, reg_tmp1, index);15441545if (ProfileInterpreter) {1546Label profile_continue;15471548const int count_offset = in_bytes(MultiBranchData::case_array_offset()) +1549in_bytes(MultiBranchData::relative_count_offset());15501551const int displacement_offset = in_bytes(MultiBranchData::case_array_offset()) +1552in_bytes(MultiBranchData::relative_displacement_offset());15531554// If no method data exists, go to profile_continue.1555test_method_data_pointer(mdp, profile_continue);15561557// Build the base (index * per_case_size_in_bytes())1558logical_shift_left(reg_tmp1, index, exact_log2(in_bytes(MultiBranchData::per_case_size())));15591560// Update the case count1561add(reg_tmp1, reg_tmp1, count_offset);1562increment_mdp_data_at(Address(mdp, reg_tmp1), reg_tmp2);15631564// The method data pointer needs to be updated.1565add(reg_tmp1, reg_tmp1, displacement_offset - count_offset);1566update_mdp_by_offset(mdp, reg_tmp1, reg_tmp2);15671568bind (profile_continue);1569}1570}157115721573void InterpreterMacroAssembler::byteswap_u32(Register r, Register rtmp1, Register rtmp2) {1574if (VM_Version::supports_rev()) {1575rev(r, r);1576} else {1577eor(rtmp1, r, AsmOperand(r, ror, 16));1578mvn(rtmp2, 0x0000ff00);1579andr(rtmp1, rtmp2, AsmOperand(rtmp1, lsr, 8));1580eor(r, rtmp1, AsmOperand(r, ror, 8));1581}1582}158315841585void InterpreterMacroAssembler::inc_global_counter(address address_of_counter, int offset, Register tmp1, Register tmp2, bool avoid_overflow) {1586const intx addr = (intx) (address_of_counter + offset);15871588assert ((addr & 0x3) == 0, "address of counter should be aligned");1589const intx offset_mask = right_n_bits(12);15901591const address base = (address) (addr & ~offset_mask);1592const int offs = (int) (addr & offset_mask);15931594const Register addr_base = tmp1;1595const Register val = tmp2;15961597mov_slow(addr_base, base);1598ldr_s32(val, Address(addr_base, offs));15991600if (avoid_overflow) {1601adds_32(val, val, 1);1602str(val, Address(addr_base, offs), pl);1603} else {1604add_32(val, val, 1);1605str_32(val, Address(addr_base, offs));1606}1607}16081609void InterpreterMacroAssembler::interp_verify_oop(Register reg, TosState state, const char *file, int line) {1610if (state == atos) { MacroAssembler::_verify_oop(reg, "broken oop", file, line); }1611}16121613// Inline assembly for:1614//1615// if (thread is in interp_only_mode) {1616// InterpreterRuntime::post_method_entry();1617// }1618// if (DTraceMethodProbes) {1619// SharedRuntime::dtrace_method_entry(method, receiver);1620// }1621// if (RC_TRACE_IN_RANGE(0x00001000, 0x00002000)) {1622// SharedRuntime::rc_trace_method_entry(method, receiver);1623// }16241625void InterpreterMacroAssembler::notify_method_entry() {1626// Whenever JVMTI is interp_only_mode, method entry/exit events are sent to1627// track stack depth. If it is possible to enter interp_only_mode we add1628// the code to check if the event should be sent.1629if (can_post_interpreter_events()) {1630Label L;16311632ldr_s32(Rtemp, Address(Rthread, JavaThread::interp_only_mode_offset()));1633cbz(Rtemp, L);16341635call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_method_entry));16361637bind(L);1638}16391640// Note: Disable DTrace runtime check for now to eliminate overhead on each method entry1641if (DTraceMethodProbes) {1642Label Lcontinue;16431644ldrb_global(Rtemp, (address)&DTraceMethodProbes);1645cbz(Rtemp, Lcontinue);16461647mov(R0, Rthread);1648mov(R1, Rmethod);1649call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), R0, R1);16501651bind(Lcontinue);1652}1653// RedefineClasses() tracing support for obsolete method entry1654if (log_is_enabled(Trace, redefine, class, obsolete)) {1655mov(R0, Rthread);1656mov(R1, Rmethod);1657call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::rc_trace_method_entry),1658R0, R1);1659}1660}166116621663void InterpreterMacroAssembler::notify_method_exit(1664TosState state, NotifyMethodExitMode mode,1665bool native, Register result_lo, Register result_hi, FloatRegister result_fp) {1666// Whenever JVMTI is interp_only_mode, method entry/exit events are sent to1667// track stack depth. If it is possible to enter interp_only_mode we add1668// the code to check if the event should be sent.1669if (mode == NotifyJVMTI && can_post_interpreter_events()) {1670Label L;1671// Note: frame::interpreter_frame_result has a dependency on how the1672// method result is saved across the call to post_method_exit. If this1673// is changed then the interpreter_frame_result implementation will1674// need to be updated too.16751676ldr_s32(Rtemp, Address(Rthread, JavaThread::interp_only_mode_offset()));1677cbz(Rtemp, L);16781679if (native) {1680// For c++ and template interpreter push both result registers on the1681// stack in native, we don't know the state.1682// See frame::interpreter_frame_result for code that gets the result values from here.1683assert(result_lo != noreg, "result registers should be defined");16841685assert(result_hi != noreg, "result registers should be defined");16861687#ifdef __ABI_HARD__1688assert(result_fp != fnoreg, "FP result register must be defined");1689sub(SP, SP, 2 * wordSize);1690fstd(result_fp, Address(SP));1691#endif // __ABI_HARD__16921693push(RegisterSet(result_lo) | RegisterSet(result_hi));16941695call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_method_exit));16961697pop(RegisterSet(result_lo) | RegisterSet(result_hi));1698#ifdef __ABI_HARD__1699fldd(result_fp, Address(SP));1700add(SP, SP, 2 * wordSize);1701#endif // __ABI_HARD__17021703} else {1704// For the template interpreter, the value on tos is the size of the1705// state. (c++ interpreter calls jvmti somewhere else).1706push(state);1707call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_method_exit));1708pop(state);1709}17101711bind(L);1712}17131714// Note: Disable DTrace runtime check for now to eliminate overhead on each method exit1715if (DTraceMethodProbes) {1716Label Lcontinue;17171718ldrb_global(Rtemp, (address)&DTraceMethodProbes);1719cbz(Rtemp, Lcontinue);17201721push(state);17221723mov(R0, Rthread);1724mov(R1, Rmethod);17251726call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), R0, R1);17271728pop(state);17291730bind(Lcontinue);1731}1732}173317341735#ifndef PRODUCT17361737void InterpreterMacroAssembler::trace_state(const char* msg) {1738int push_size = save_caller_save_registers();17391740Label Lcontinue;1741InlinedString Lmsg0("%s: FP=" INTPTR_FORMAT ", SP=" INTPTR_FORMAT "\n");1742InlinedString Lmsg(msg);1743InlinedAddress Lprintf((address)printf);17441745ldr_literal(R0, Lmsg0);1746ldr_literal(R1, Lmsg);1747mov(R2, FP);1748add(R3, SP, push_size); // original SP (without saved registers)1749ldr_literal(Rtemp, Lprintf);1750call(Rtemp);17511752b(Lcontinue);17531754bind_literal(Lmsg0);1755bind_literal(Lmsg);1756bind_literal(Lprintf);175717581759bind(Lcontinue);17601761restore_caller_save_registers();1762}17631764#endif17651766// Jump if ((*counter_addr += increment) & mask) satisfies the condition.1767void InterpreterMacroAssembler::increment_mask_and_jump(Address counter_addr,1768int increment, Address mask_addr,1769Register scratch, Register scratch2,1770AsmCondition cond, Label* where) {1771// caution: scratch2 and base address of counter_addr can be the same1772assert_different_registers(scratch, scratch2);1773ldr_u32(scratch, counter_addr);1774add(scratch, scratch, increment);1775str_32(scratch, counter_addr);17761777ldr(scratch2, mask_addr);1778andrs(scratch, scratch, scratch2);1779b(*where, cond);1780}17811782void InterpreterMacroAssembler::get_method_counters(Register method,1783Register Rcounters,1784Label& skip,1785bool saveRegs,1786Register reg1,1787Register reg2,1788Register reg3) {1789const Address method_counters(method, Method::method_counters_offset());1790Label has_counters;17911792ldr(Rcounters, method_counters);1793cbnz(Rcounters, has_counters);17941795if (saveRegs) {1796// Save and restore in use caller-saved registers since they will be trashed by call_VM1797assert(reg1 != noreg, "must specify reg1");1798assert(reg2 != noreg, "must specify reg2");1799assert(reg3 == noreg, "must not specify reg3");1800push(RegisterSet(reg1) | RegisterSet(reg2));1801}18021803mov(R1, method);1804call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::build_method_counters), R1);18051806if (saveRegs) {1807pop(RegisterSet(reg1) | RegisterSet(reg2));1808}18091810ldr(Rcounters, method_counters);1811cbz(Rcounters, skip); // No MethodCounters created, OutOfMemory18121813bind(has_counters);1814}181518161817