Path: blob/master/src/hotspot/cpu/ppc/c1_CodeStubs_ppc.cpp
40930 views
/*1* Copyright (c) 1999, 2021, Oracle and/or its affiliates. All rights reserved.2* Copyright (c) 2012, 2021 SAP SE. All rights reserved.3* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.4*5* This code is free software; you can redistribute it and/or modify it6* under the terms of the GNU General Public License version 2 only, as7* published by the Free Software Foundation.8*9* This code is distributed in the hope that it will be useful, but WITHOUT10* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or11* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License12* version 2 for more details (a copy is included in the LICENSE file that13* accompanied this code).14*15* You should have received a copy of the GNU General Public License version16* 2 along with this work; if not, write to the Free Software Foundation,17* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.18*19* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA20* or visit www.oracle.com if you need additional information or have any21* questions.22*23*/2425#include "precompiled.hpp"26#include "asm/macroAssembler.inline.hpp"27#include "c1/c1_CodeStubs.hpp"28#include "c1/c1_FrameMap.hpp"29#include "c1/c1_LIRAssembler.hpp"30#include "c1/c1_MacroAssembler.hpp"31#include "c1/c1_Runtime1.hpp"32#include "classfile/javaClasses.hpp"33#include "nativeInst_ppc.hpp"34#include "runtime/sharedRuntime.hpp"35#include "utilities/macros.hpp"36#include "vmreg_ppc.inline.hpp"3738#define __ ce->masm()->3940void C1SafepointPollStub::emit_code(LIR_Assembler* ce) {41if (UseSIGTRAP) {42DEBUG_ONLY( __ should_not_reach_here("C1SafepointPollStub::emit_code"); )43} else {44assert(SharedRuntime::polling_page_return_handler_blob() != NULL,45"polling page return stub not created yet");46address stub = SharedRuntime::polling_page_return_handler_blob()->entry_point();4748__ bind(_entry);49// Using pc relative address computation.50{51Label next_pc;52__ bl(next_pc);53__ bind(next_pc);54}55int current_offset = __ offset();56__ mflr(R12);57__ add_const_optimized(R12, R12, safepoint_offset() - current_offset);58__ std(R12, in_bytes(JavaThread::saved_exception_pc_offset()), R16_thread);5960__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(stub));61__ mtctr(R0);62__ bctr();63}64}6566RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index, LIR_Opr array)67: _index(index), _array(array), _throw_index_out_of_bounds_exception(false) {68assert(info != NULL, "must have info");69_info = new CodeEmitInfo(info);70}7172RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index)73: _index(index), _array(NULL), _throw_index_out_of_bounds_exception(true) {74assert(info != NULL, "must have info");75_info = new CodeEmitInfo(info);76}7778void RangeCheckStub::emit_code(LIR_Assembler* ce) {79__ bind(_entry);8081if (_info->deoptimize_on_exception()) {82address a = Runtime1::entry_for(Runtime1::predicate_failed_trap_id);83// May be used by optimizations like LoopInvariantCodeMotion or RangeCheckEliminator.84DEBUG_ONLY( __ untested("RangeCheckStub: predicate_failed_trap_id"); )85//__ load_const_optimized(R0, a);86__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(a));87__ mtctr(R0);88__ bctrl();89ce->add_call_info_here(_info);90ce->verify_oop_map(_info);91debug_only(__ illtrap());92return;93}9495address stub = _throw_index_out_of_bounds_exception ? Runtime1::entry_for(Runtime1::throw_index_exception_id)96: Runtime1::entry_for(Runtime1::throw_range_check_failed_id);97//__ load_const_optimized(R0, stub);98__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(stub));99__ mtctr(R0);100101Register index = R0;102if (_index->is_register()) {103__ extsw(index, _index->as_register());104} else {105__ load_const_optimized(index, _index->as_jint());106}107if (_array) {108__ std(_array->as_pointer_register(), -8, R1_SP);109}110__ std(index, -16, R1_SP);111112__ bctrl();113ce->add_call_info_here(_info);114ce->verify_oop_map(_info);115debug_only(__ illtrap());116}117118119PredicateFailedStub::PredicateFailedStub(CodeEmitInfo* info) {120_info = new CodeEmitInfo(info);121}122123void PredicateFailedStub::emit_code(LIR_Assembler* ce) {124__ bind(_entry);125address a = Runtime1::entry_for(Runtime1::predicate_failed_trap_id);126//__ load_const_optimized(R0, a);127__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(a));128__ mtctr(R0);129__ bctrl();130ce->add_call_info_here(_info);131ce->verify_oop_map(_info);132debug_only(__ illtrap());133}134135136void CounterOverflowStub::emit_code(LIR_Assembler* ce) {137__ bind(_entry);138139// Parameter 1: bci140__ load_const_optimized(R0, _bci);141__ std(R0, -16, R1_SP);142143// Parameter 2: Method*144Metadata *m = _method->as_constant_ptr()->as_metadata();145AddressLiteral md = __ constant_metadata_address(m); // Notify OOP recorder (don't need the relocation).146__ load_const_optimized(R0, md.value());147__ std(R0, -8, R1_SP);148149address a = Runtime1::entry_for(Runtime1::counter_overflow_id);150//__ load_const_optimized(R0, a);151__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(a));152__ mtctr(R0);153__ bctrl();154ce->add_call_info_here(_info);155ce->verify_oop_map(_info);156157__ b(_continuation);158}159160161void DivByZeroStub::emit_code(LIR_Assembler* ce) {162if (_offset != -1) {163ce->compilation()->implicit_exception_table()->append(_offset, __ offset());164}165__ bind(_entry);166address stub = Runtime1::entry_for(Runtime1::throw_div0_exception_id);167//__ load_const_optimized(R0, stub);168__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(stub));169__ mtctr(R0);170__ bctrl();171ce->add_call_info_here(_info);172ce->verify_oop_map(_info);173debug_only(__ illtrap());174}175176177void ImplicitNullCheckStub::emit_code(LIR_Assembler* ce) {178address a;179if (_info->deoptimize_on_exception()) {180// Deoptimize, do not throw the exception, because it is probably wrong to do it here.181a = Runtime1::entry_for(Runtime1::predicate_failed_trap_id);182} else {183a = Runtime1::entry_for(Runtime1::throw_null_pointer_exception_id);184}185186if (ImplicitNullChecks || TrapBasedNullChecks) {187ce->compilation()->implicit_exception_table()->append(_offset, __ offset());188}189__ bind(_entry);190//__ load_const_optimized(R0, a);191__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(a));192__ mtctr(R0);193__ bctrl();194ce->add_call_info_here(_info);195ce->verify_oop_map(_info);196debug_only(__ illtrap());197}198199200// Implementation of SimpleExceptionStub201void SimpleExceptionStub::emit_code(LIR_Assembler* ce) {202__ bind(_entry);203address stub = Runtime1::entry_for(_stub);204//__ load_const_optimized(R0, stub);205__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(stub));206if (_obj->is_valid()) { __ mr_if_needed(/*tmp1 in do_CheckCast*/ R4_ARG2, _obj->as_register()); }207__ mtctr(R0);208__ bctrl();209ce->add_call_info_here(_info);210debug_only( __ illtrap(); )211}212213214// Implementation of NewInstanceStub215NewInstanceStub::NewInstanceStub(LIR_Opr klass_reg, LIR_Opr result, ciInstanceKlass* klass, CodeEmitInfo* info, Runtime1::StubID stub_id) {216_result = result;217_klass = klass;218_klass_reg = klass_reg;219_info = new CodeEmitInfo(info);220assert(stub_id == Runtime1::new_instance_id ||221stub_id == Runtime1::fast_new_instance_id ||222stub_id == Runtime1::fast_new_instance_init_check_id,223"need new_instance id");224_stub_id = stub_id;225}226227void NewInstanceStub::emit_code(LIR_Assembler* ce) {228__ bind(_entry);229230address entry = Runtime1::entry_for(_stub_id);231//__ load_const_optimized(R0, entry);232__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(entry));233__ mtctr(R0);234__ bctrl();235ce->add_call_info_here(_info);236ce->verify_oop_map(_info);237__ b(_continuation);238}239240241// Implementation of NewTypeArrayStub242NewTypeArrayStub::NewTypeArrayStub(LIR_Opr klass_reg, LIR_Opr length, LIR_Opr result, CodeEmitInfo* info) {243_klass_reg = klass_reg;244_length = length;245_result = result;246_info = new CodeEmitInfo(info);247}248249void NewTypeArrayStub::emit_code(LIR_Assembler* ce) {250__ bind(_entry);251252address entry = Runtime1::entry_for(Runtime1::new_type_array_id);253//__ load_const_optimized(R0, entry);254__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(entry));255__ mr_if_needed(/*op->tmp1()->as_register()*/ R5_ARG3, _length->as_register()); // already sign-extended256__ mtctr(R0);257__ bctrl();258ce->add_call_info_here(_info);259ce->verify_oop_map(_info);260__ b(_continuation);261}262263264// Implementation of NewObjectArrayStub265NewObjectArrayStub::NewObjectArrayStub(LIR_Opr klass_reg, LIR_Opr length, LIR_Opr result, CodeEmitInfo* info) {266_klass_reg = klass_reg;267_length = length;268_result = result;269_info = new CodeEmitInfo(info);270}271272void NewObjectArrayStub::emit_code(LIR_Assembler* ce) {273__ bind(_entry);274275address entry = Runtime1::entry_for(Runtime1::new_object_array_id);276//__ load_const_optimized(R0, entry);277__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(entry));278__ mr_if_needed(/*op->tmp1()->as_register()*/ R5_ARG3, _length->as_register()); // already sign-extended279__ mtctr(R0);280__ bctrl();281ce->add_call_info_here(_info);282ce->verify_oop_map(_info);283__ b(_continuation);284}285286287// Implementation of MonitorAccessStubs288MonitorEnterStub::MonitorEnterStub(LIR_Opr obj_reg, LIR_Opr lock_reg, CodeEmitInfo* info)289: MonitorAccessStub(obj_reg, lock_reg) {290_info = new CodeEmitInfo(info);291}292293void MonitorEnterStub::emit_code(LIR_Assembler* ce) {294__ bind(_entry);295address stub = Runtime1::entry_for(ce->compilation()->has_fpu_code() ? Runtime1::monitorenter_id : Runtime1::monitorenter_nofpu_id);296//__ load_const_optimized(R0, stub);297__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(stub));298__ mr_if_needed(/*scratch_opr()->as_register()*/ R4_ARG2, _obj_reg->as_register());299assert(_lock_reg->as_register() == R5_ARG3, "");300__ mtctr(R0);301__ bctrl();302ce->add_call_info_here(_info);303ce->verify_oop_map(_info);304__ b(_continuation);305}306307void MonitorExitStub::emit_code(LIR_Assembler* ce) {308__ bind(_entry);309if (_compute_lock) {310ce->monitor_address(_monitor_ix, _lock_reg);311}312address stub = Runtime1::entry_for(ce->compilation()->has_fpu_code() ? Runtime1::monitorexit_id : Runtime1::monitorexit_nofpu_id);313//__ load_const_optimized(R0, stub);314__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(stub));315assert(_lock_reg->as_register() == R4_ARG2, "");316__ mtctr(R0);317__ bctrl();318__ b(_continuation);319}320321322// Implementation of patching:323// - Copy the code at given offset to an inlined buffer (first the bytes, then the number of bytes).324// - Replace original code with a call to the stub.325// At Runtime:326// - call to stub, jump to runtime327// - in runtime: preserve all registers (especially objects, i.e., source and destination object)328// - in runtime: after initializing class, restore original code, reexecute instruction329330int PatchingStub::_patch_info_offset = -(5 * BytesPerInstWord);331332void PatchingStub::align_patch_site(MacroAssembler* ) {333// Patch sites on ppc are always properly aligned.334}335336#ifdef ASSERT337inline void compare_with_patch_site(address template_start, address pc_start, int bytes_to_copy) {338address start = template_start;339for (int i = 0; i < bytes_to_copy; i++) {340address ptr = (address)(pc_start + i);341int a_byte = (*ptr) & 0xFF;342assert(a_byte == *start++, "should be the same code");343}344}345#endif346347void PatchingStub::emit_code(LIR_Assembler* ce) {348// copy original code here349assert(NativeGeneralJump::instruction_size <= _bytes_to_copy && _bytes_to_copy <= 0xFF,350"not enough room for call, need %d", _bytes_to_copy);351assert((_bytes_to_copy & 0x3) == 0, "must copy a multiple of four bytes");352353Label call_patch;354355int being_initialized_entry = __ offset();356357if (_id == load_klass_id) {358// Produce a copy of the load klass instruction for use by the being initialized case.359AddressLiteral addrlit((address)NULL, metadata_Relocation::spec(_index));360__ load_const(_obj, addrlit, R0);361DEBUG_ONLY( compare_with_patch_site(__ code_section()->start() + being_initialized_entry, _pc_start, _bytes_to_copy); )362} else if (_id == load_mirror_id || _id == load_appendix_id) {363// Produce a copy of the load mirror instruction for use by the being initialized case.364AddressLiteral addrlit((address)NULL, oop_Relocation::spec(_index));365__ load_const(_obj, addrlit, R0);366DEBUG_ONLY( compare_with_patch_site(__ code_section()->start() + being_initialized_entry, _pc_start, _bytes_to_copy); )367} else {368// Make a copy of the code which is going to be patched.369for (int i = 0; i < _bytes_to_copy; i++) {370address ptr = (address)(_pc_start + i);371int a_byte = (*ptr) & 0xFF;372__ emit_int8 (a_byte);373}374}375376address end_of_patch = __ pc();377int bytes_to_skip = 0;378if (_id == load_mirror_id) {379int offset = __ offset();380__ block_comment(" being_initialized check");381382// Static field accesses have special semantics while the class383// initializer is being run so we emit a test which can be used to384// check that this code is being executed by the initializing385// thread.386assert(_obj != noreg, "must be a valid register");387assert(_index >= 0, "must have oop index");388__ mr(R0, _obj); // spill389__ ld(_obj, java_lang_Class::klass_offset(), _obj);390__ ld(_obj, in_bytes(InstanceKlass::init_thread_offset()), _obj);391__ cmpd(CCR0, _obj, R16_thread);392__ mr(_obj, R0); // restore393__ bne(CCR0, call_patch);394395// Load_klass patches may execute the patched code before it's396// copied back into place so we need to jump back into the main397// code of the nmethod to continue execution.398__ b(_patch_site_continuation);399400// Make sure this extra code gets skipped.401bytes_to_skip += __ offset() - offset;402}403404// Now emit the patch record telling the runtime how to find the405// pieces of the patch. We only need 3 bytes but it has to be406// aligned as an instruction so emit 4 bytes.407int sizeof_patch_record = 4;408bytes_to_skip += sizeof_patch_record;409410// Emit the offsets needed to find the code to patch.411int being_initialized_entry_offset = __ offset() - being_initialized_entry + sizeof_patch_record;412413// Emit the patch record. We need to emit a full word, so emit an extra empty byte.414__ emit_int8(0);415__ emit_int8(being_initialized_entry_offset);416__ emit_int8(bytes_to_skip);417__ emit_int8(_bytes_to_copy);418address patch_info_pc = __ pc();419assert(patch_info_pc - end_of_patch == bytes_to_skip, "incorrect patch info");420421address entry = __ pc();422NativeGeneralJump::insert_unconditional((address)_pc_start, entry);423address target = NULL;424relocInfo::relocType reloc_type = relocInfo::none;425switch (_id) {426case access_field_id: target = Runtime1::entry_for(Runtime1::access_field_patching_id); break;427case load_klass_id: target = Runtime1::entry_for(Runtime1::load_klass_patching_id);428reloc_type = relocInfo::metadata_type; break;429case load_mirror_id: target = Runtime1::entry_for(Runtime1::load_mirror_patching_id);430reloc_type = relocInfo::oop_type; break;431case load_appendix_id: target = Runtime1::entry_for(Runtime1::load_appendix_patching_id);432reloc_type = relocInfo::oop_type; break;433default: ShouldNotReachHere();434}435__ bind(call_patch);436437__ block_comment("patch entry point");438//__ load_const(R0, target); + mtctr + bctrl must have size -_patch_info_offset439__ load_const32(R0, MacroAssembler::offset_to_global_toc(target));440__ add(R0, R29_TOC, R0);441__ mtctr(R0);442__ bctrl();443assert(_patch_info_offset == (patch_info_pc - __ pc()), "must not change");444ce->add_call_info_here(_info);445__ b(_patch_site_entry);446if (_id == load_klass_id || _id == load_mirror_id || _id == load_appendix_id) {447CodeSection* cs = __ code_section();448address pc = (address)_pc_start;449RelocIterator iter(cs, pc, pc + 1);450relocInfo::change_reloc_info_for_address(&iter, (address) pc, reloc_type, relocInfo::none);451}452}453454455void DeoptimizeStub::emit_code(LIR_Assembler* ce) {456__ bind(_entry);457address stub = Runtime1::entry_for(Runtime1::deoptimize_id);458//__ load_const_optimized(R0, stub);459__ add_const_optimized(R0, R29_TOC, MacroAssembler::offset_to_global_toc(stub));460__ mtctr(R0);461462__ load_const_optimized(R0, _trap_request); // Pass trap request in R0.463__ bctrl();464ce->add_call_info_here(_info);465debug_only(__ illtrap());466}467468469void ArrayCopyStub::emit_code(LIR_Assembler* ce) {470//---------------slow case: call to native-----------------471__ bind(_entry);472__ mr(R3_ARG1, src()->as_register());473__ extsw(R4_ARG2, src_pos()->as_register());474__ mr(R5_ARG3, dst()->as_register());475__ extsw(R6_ARG4, dst_pos()->as_register());476__ extsw(R7_ARG5, length()->as_register());477478ce->emit_static_call_stub();479480bool success = ce->emit_trampoline_stub_for_call(SharedRuntime::get_resolve_static_call_stub());481if (!success) { return; }482483__ relocate(relocInfo::static_call_type);484// Note: At this point we do not have the address of the trampoline485// stub, and the entry point might be too far away for bl, so __ pc()486// serves as dummy and the bl will be patched later.487__ code()->set_insts_mark();488__ bl(__ pc());489ce->add_call_info_here(info());490ce->verify_oop_map(info());491492#ifndef PRODUCT493const address counter = (address)&Runtime1::_arraycopy_slowcase_cnt;494const Register tmp = R3, tmp2 = R4;495int simm16_offs = __ load_const_optimized(tmp, counter, tmp2, true);496__ lwz(tmp2, simm16_offs, tmp);497__ addi(tmp2, tmp2, 1);498__ stw(tmp2, simm16_offs, tmp);499#endif500501__ b(_continuation);502}503504#undef __505506507