Path: blob/master/src/hotspot/cpu/ppc/gc/z/zBarrierSetAssembler_ppc.cpp
66646 views
/*1* Copyright (c) 2021, Oracle and/or its affiliates. All rights reserved.2* Copyright (c) 2021, 2022 SAP SE. All rights reserved.3* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.4*5* This code is free software; you can redistribute it and/or modify it6* under the terms of the GNU General Public License version 2 only, as7* published by the Free Software Foundation.8*9* This code is distributed in the hope that it will be useful, but WITHOUT10* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or11* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License12* version 2 for more details (a copy is included in the LICENSE file that13* accompanied this code).14*15* You should have received a copy of the GNU General Public License version16* 2 along with this work; if not, write to the Free Software Foundation,17* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.18*19* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA20* or visit www.oracle.com if you need additional information or have any21* questions.22*/2324#include "asm/register.hpp"25#include "precompiled.hpp"26#include "asm/macroAssembler.inline.hpp"27#include "code/codeBlob.hpp"28#include "code/vmreg.inline.hpp"29#include "gc/z/zBarrier.inline.hpp"30#include "gc/z/zBarrierSet.hpp"31#include "gc/z/zBarrierSetAssembler.hpp"32#include "gc/z/zBarrierSetRuntime.hpp"33#include "gc/z/zThreadLocalData.hpp"34#include "memory/resourceArea.hpp"35#include "register_ppc.hpp"36#include "runtime/sharedRuntime.hpp"37#include "utilities/globalDefinitions.hpp"38#include "utilities/macros.hpp"39#ifdef COMPILER140#include "c1/c1_LIRAssembler.hpp"41#include "c1/c1_MacroAssembler.hpp"42#include "gc/z/c1/zBarrierSetC1.hpp"43#endif // COMPILER144#ifdef COMPILER245#include "gc/z/c2/zBarrierSetC2.hpp"46#endif // COMPILER24748#undef __49#define __ masm->5051void ZBarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,52Register base, RegisterOrConstant ind_or_offs, Register dst,53Register tmp1, Register tmp2,54MacroAssembler::PreservationLevel preservation_level, Label *L_handle_null) {55__ block_comment("load_at (zgc) {");5657// Check whether a special gc barrier is required for this particular load58// (e.g. whether it's a reference load or not)59if (!ZBarrierSet::barrier_needed(decorators, type)) {60BarrierSetAssembler::load_at(masm, decorators, type, base, ind_or_offs, dst,61tmp1, tmp2, preservation_level, L_handle_null);62return;63}6465if (ind_or_offs.is_register()) {66assert_different_registers(base, ind_or_offs.as_register(), tmp1, tmp2, R0, noreg);67assert_different_registers(dst, ind_or_offs.as_register(), tmp1, tmp2, R0, noreg);68} else {69assert_different_registers(base, tmp1, tmp2, R0, noreg);70assert_different_registers(dst, tmp1, tmp2, R0, noreg);71}7273/* ==== Load the pointer using the standard implementation for the actual heap access74and the decompression of compressed pointers ==== */75// Result of 'load_at' (standard implementation) will be written back to 'dst'.76// As 'base' is required for the C-call, it must be reserved in case of a register clash.77Register saved_base = base;78if (base == dst) {79__ mr(tmp2, base);80saved_base = tmp2;81}8283BarrierSetAssembler::load_at(masm, decorators, type, base, ind_or_offs, dst,84tmp1, noreg, preservation_level, L_handle_null);8586/* ==== Check whether pointer is dirty ==== */87Label skip_barrier;8889// Load bad mask into scratch register.90__ ld(tmp1, (intptr_t) ZThreadLocalData::address_bad_mask_offset(), R16_thread);9192// The color bits of the to-be-tested pointer do not have to be equivalent to the 'bad_mask' testing bits.93// A pointer is classified as dirty if any of the color bits that also match the bad mask is set.94// Conversely, it follows that the logical AND of the bad mask and the pointer must be zero95// if the pointer is not dirty.96// Only dirty pointers must be processed by this barrier, so we can skip it in case the latter condition holds true.97__ and_(tmp1, tmp1, dst);98__ beq(CCR0, skip_barrier);99100/* ==== Invoke barrier ==== */101int nbytes_save = 0;102103const bool needs_frame = preservation_level >= MacroAssembler::PRESERVATION_FRAME_LR;104const bool preserve_gp_registers = preservation_level >= MacroAssembler::PRESERVATION_FRAME_LR_GP_REGS;105const bool preserve_fp_registers = preservation_level >= MacroAssembler::PRESERVATION_FRAME_LR_GP_FP_REGS;106107const bool preserve_R3 = dst != R3_ARG1;108109if (needs_frame) {110if (preserve_gp_registers) {111nbytes_save = (preserve_fp_registers112? MacroAssembler::num_volatile_gp_regs + MacroAssembler::num_volatile_fp_regs113: MacroAssembler::num_volatile_gp_regs) * BytesPerWord;114nbytes_save -= preserve_R3 ? 0 : BytesPerWord;115__ save_volatile_gprs(R1_SP, -nbytes_save, preserve_fp_registers, preserve_R3);116}117118__ save_LR_CR(tmp1);119__ push_frame_reg_args(nbytes_save, tmp1);120}121122// Setup arguments123if (saved_base != R3_ARG1) {124__ mr_if_needed(R3_ARG1, dst);125__ add(R4_ARG2, ind_or_offs, saved_base);126} else if (dst != R4_ARG2) {127__ add(R4_ARG2, ind_or_offs, saved_base);128__ mr(R3_ARG1, dst);129} else {130__ add(R0, ind_or_offs, saved_base);131__ mr(R3_ARG1, dst);132__ mr(R4_ARG2, R0);133}134135__ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators));136137Register result = R3_RET;138if (needs_frame) {139__ pop_frame();140__ restore_LR_CR(tmp1);141142if (preserve_R3) {143__ mr(R0, R3_RET);144result = R0;145}146147if (preserve_gp_registers) {148__ restore_volatile_gprs(R1_SP, -nbytes_save, preserve_fp_registers, preserve_R3);149}150}151__ mr_if_needed(dst, result);152153__ bind(skip_barrier);154__ block_comment("} load_at (zgc)");155}156157#ifdef ASSERT158// The Z store barrier only verifies the pointers it is operating on and is thus a sole debugging measure.159void ZBarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,160Register base, RegisterOrConstant ind_or_offs, Register val,161Register tmp1, Register tmp2, Register tmp3,162MacroAssembler::PreservationLevel preservation_level) {163__ block_comment("store_at (zgc) {");164165// If the 'val' register is 'noreg', the to-be-stored value is a null pointer.166if (is_reference_type(type) && val != noreg) {167__ ld(tmp1, in_bytes(ZThreadLocalData::address_bad_mask_offset()), R16_thread);168__ and_(tmp1, tmp1, val);169__ asm_assert_eq("Detected dirty pointer on the heap in Z store barrier");170}171172// Store value173BarrierSetAssembler::store_at(masm, decorators, type, base, ind_or_offs, val, tmp1, tmp2, tmp3, preservation_level);174175__ block_comment("} store_at (zgc)");176}177#endif // ASSERT178179void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler *masm, DecoratorSet decorators, BasicType component_type,180Register src, Register dst, Register count,181Register preserve1, Register preserve2) {182__ block_comment("arraycopy_prologue (zgc) {");183184/* ==== Check whether a special gc barrier is required for this particular load ==== */185if (!is_reference_type(component_type)) {186return;187}188189Label skip_barrier;190191// Fast path: Array is of length zero192__ cmpdi(CCR0, count, 0);193__ beq(CCR0, skip_barrier);194195/* ==== Ensure register sanity ==== */196Register tmp_R11 = R11_scratch1;197198assert_different_registers(src, dst, count, tmp_R11, noreg);199if (preserve1 != noreg) {200// Not technically required, but unlikely being intended.201assert_different_registers(preserve1, preserve2);202}203204/* ==== Invoke barrier (slowpath) ==== */205int nbytes_save = 0;206207{208assert(!noreg->is_volatile(), "sanity");209210if (preserve1->is_volatile()) {211__ std(preserve1, -BytesPerWord * ++nbytes_save, R1_SP);212}213214if (preserve2->is_volatile() && preserve1 != preserve2) {215__ std(preserve2, -BytesPerWord * ++nbytes_save, R1_SP);216}217218__ std(src, -BytesPerWord * ++nbytes_save, R1_SP);219__ std(dst, -BytesPerWord * ++nbytes_save, R1_SP);220__ std(count, -BytesPerWord * ++nbytes_save, R1_SP);221222__ save_LR_CR(tmp_R11);223__ push_frame_reg_args(nbytes_save, tmp_R11);224}225226// ZBarrierSetRuntime::load_barrier_on_oop_array_addr(src, count)227if (count == R3_ARG1) {228if (src == R4_ARG2) {229// Arguments are provided in reverse order230__ mr(tmp_R11, count);231__ mr(R3_ARG1, src);232__ mr(R4_ARG2, tmp_R11);233} else {234__ mr(R4_ARG2, count);235__ mr(R3_ARG1, src);236}237} else {238__ mr_if_needed(R3_ARG1, src);239__ mr_if_needed(R4_ARG2, count);240}241242__ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_array_addr());243244__ pop_frame();245__ restore_LR_CR(tmp_R11);246247{248__ ld(count, -BytesPerWord * nbytes_save--, R1_SP);249__ ld(dst, -BytesPerWord * nbytes_save--, R1_SP);250__ ld(src, -BytesPerWord * nbytes_save--, R1_SP);251252if (preserve2->is_volatile() && preserve1 != preserve2) {253__ ld(preserve2, -BytesPerWord * nbytes_save--, R1_SP);254}255256if (preserve1->is_volatile()) {257__ ld(preserve1, -BytesPerWord * nbytes_save--, R1_SP);258}259}260261__ bind(skip_barrier);262263__ block_comment("} arraycopy_prologue (zgc)");264}265266void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register dst, Register jni_env,267Register obj, Register tmp, Label& slowpath) {268__ block_comment("try_resolve_jobject_in_native (zgc) {");269270assert_different_registers(jni_env, obj, tmp);271272// Resolve the pointer using the standard implementation for weak tag handling and pointer verfication.273BarrierSetAssembler::try_resolve_jobject_in_native(masm, dst, jni_env, obj, tmp, slowpath);274275// Check whether pointer is dirty.276__ ld(tmp,277in_bytes(ZThreadLocalData::address_bad_mask_offset() - JavaThread::jni_environment_offset()),278jni_env);279280__ and_(tmp, obj, tmp);281__ bne(CCR0, slowpath);282283__ block_comment("} try_resolve_jobject_in_native (zgc)");284}285286#undef __287288#ifdef COMPILER1289#define __ ce->masm()->290291// Code emitted by LIR node "LIR_OpZLoadBarrierTest" which in turn is emitted by ZBarrierSetC1::load_barrier.292// The actual compare and branch instructions are represented as stand-alone LIR nodes.293void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,294LIR_Opr ref) const {295__ block_comment("load_barrier_test (zgc) {");296297__ ld(R0, in_bytes(ZThreadLocalData::address_bad_mask_offset()), R16_thread);298__ andr(R0, R0, ref->as_pointer_register());299__ cmpdi(CCR5 /* as mandated by LIR node */, R0, 0);300301__ block_comment("} load_barrier_test (zgc)");302}303304// Code emitted by code stub "ZLoadBarrierStubC1" which in turn is emitted by ZBarrierSetC1::load_barrier.305// Invokes the runtime stub which is defined just below.306void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,307ZLoadBarrierStubC1* stub) const {308__ block_comment("c1_load_barrier_stub (zgc) {");309310__ bind(*stub->entry());311312/* ==== Determine relevant data registers and ensure register sanity ==== */313Register ref = stub->ref()->as_register();314Register ref_addr = noreg;315316// Determine reference address317if (stub->tmp()->is_valid()) {318// 'tmp' register is given, so address might have an index or a displacement.319ce->leal(stub->ref_addr(), stub->tmp());320ref_addr = stub->tmp()->as_pointer_register();321} else {322// 'tmp' register is not given, so address must have neither an index nor a displacement.323// The address' base register is thus usable as-is.324assert(stub->ref_addr()->as_address_ptr()->disp() == 0, "illegal displacement");325assert(!stub->ref_addr()->as_address_ptr()->index()->is_valid(), "illegal index");326327ref_addr = stub->ref_addr()->as_address_ptr()->base()->as_pointer_register();328}329330assert_different_registers(ref, ref_addr, R0, noreg);331332/* ==== Invoke stub ==== */333// Pass arguments via stack. The stack pointer will be bumped by the stub.334__ std(ref, (intptr_t) -1 * BytesPerWord, R1_SP);335__ std(ref_addr, (intptr_t) -2 * BytesPerWord, R1_SP);336337__ load_const_optimized(R0, stub->runtime_stub());338__ call_stub(R0);339340// The runtime stub passes the result via the R0 register, overriding the previously-loaded stub address.341__ mr_if_needed(ref, R0);342__ b(*stub->continuation());343344__ block_comment("} c1_load_barrier_stub (zgc)");345}346347#undef __348#define __ sasm->349350// Code emitted by runtime code stub which in turn is emitted by ZBarrierSetC1::generate_c1_runtime_stubs.351void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,352DecoratorSet decorators) const {353__ block_comment("c1_load_barrier_runtime_stub (zgc) {");354355const int stack_parameters = 2;356const int nbytes_save = (MacroAssembler::num_volatile_regs + stack_parameters) * BytesPerWord;357358__ save_volatile_gprs(R1_SP, -nbytes_save);359__ save_LR_CR(R0);360361// Load arguments back again from the stack.362__ ld(R3_ARG1, (intptr_t) -1 * BytesPerWord, R1_SP); // ref363__ ld(R4_ARG2, (intptr_t) -2 * BytesPerWord, R1_SP); // ref_addr364365__ push_frame_reg_args(nbytes_save, R0);366367__ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators));368369__ verify_oop(R3_RET, "Bad pointer after barrier invocation");370__ mr(R0, R3_RET);371372__ pop_frame();373__ restore_LR_CR(R3_RET);374__ restore_volatile_gprs(R1_SP, -nbytes_save);375376__ blr();377378__ block_comment("} c1_load_barrier_runtime_stub (zgc)");379}380381#undef __382#endif // COMPILER1383384#ifdef COMPILER2385386OptoReg::Name ZBarrierSetAssembler::refine_register(const Node* node, OptoReg::Name opto_reg) const {387if (!OptoReg::is_reg(opto_reg)) {388return OptoReg::Bad;389}390391VMReg vm_reg = OptoReg::as_VMReg(opto_reg);392if ((vm_reg->is_Register() || vm_reg ->is_FloatRegister()) && (opto_reg & 1) != 0) {393return OptoReg::Bad;394}395396return opto_reg;397}398399#define __ _masm->400401class ZSaveLiveRegisters {402MacroAssembler* _masm;403RegMask _reg_mask;404Register _result_reg;405int _frame_size;406407public:408ZSaveLiveRegisters(MacroAssembler *masm, ZLoadBarrierStubC2 *stub)409: _masm(masm), _reg_mask(stub->live()), _result_reg(stub->ref()) {410411const int register_save_size = iterate_over_register_mask(ACTION_COUNT_ONLY) * BytesPerWord;412_frame_size = align_up(register_save_size, frame::alignment_in_bytes)413+ frame::abi_reg_args_size;414415__ save_LR_CR(R0);416__ push_frame(_frame_size, R0);417418iterate_over_register_mask(ACTION_SAVE, _frame_size);419}420421~ZSaveLiveRegisters() {422iterate_over_register_mask(ACTION_RESTORE, _frame_size);423424__ addi(R1_SP, R1_SP, _frame_size);425__ restore_LR_CR(R0);426}427428private:429enum IterationAction : int {430ACTION_SAVE,431ACTION_RESTORE,432ACTION_COUNT_ONLY433};434435int iterate_over_register_mask(IterationAction action, int offset = 0) {436int reg_save_index = 0;437RegMaskIterator live_regs_iterator(_reg_mask);438439while(live_regs_iterator.has_next()) {440const OptoReg::Name opto_reg = live_regs_iterator.next();441442// Filter out stack slots (spilled registers, i.e., stack-allocated registers).443if (!OptoReg::is_reg(opto_reg)) {444continue;445}446447const VMReg vm_reg = OptoReg::as_VMReg(opto_reg);448if (vm_reg->is_Register()) {449Register std_reg = vm_reg->as_Register();450451// '_result_reg' will hold the end result of the operation. Its content must thus not be preserved.452if (std_reg == _result_reg) {453continue;454}455456if (std_reg->encoding() >= R2->encoding() && std_reg->encoding() <= R12->encoding()) {457reg_save_index++;458459if (action == ACTION_SAVE) {460_masm->std(std_reg, offset - reg_save_index * BytesPerWord, R1_SP);461} else if (action == ACTION_RESTORE) {462_masm->ld(std_reg, offset - reg_save_index * BytesPerWord, R1_SP);463} else {464assert(action == ACTION_COUNT_ONLY, "Sanity");465}466}467} else if (vm_reg->is_FloatRegister()) {468FloatRegister fp_reg = vm_reg->as_FloatRegister();469if (fp_reg->encoding() >= F0->encoding() && fp_reg->encoding() <= F13->encoding()) {470reg_save_index++;471472if (action == ACTION_SAVE) {473_masm->stfd(fp_reg, offset - reg_save_index * BytesPerWord, R1_SP);474} else if (action == ACTION_RESTORE) {475_masm->lfd(fp_reg, offset - reg_save_index * BytesPerWord, R1_SP);476} else {477assert(action == ACTION_COUNT_ONLY, "Sanity");478}479}480} else if (vm_reg->is_ConditionRegister()) {481// NOP. Conditions registers are covered by save_LR_CR482} else if (vm_reg->is_VectorSRegister()) {483assert(SuperwordUseVSX, "or should not reach here");484VectorSRegister vs_reg = vm_reg->as_VectorSRegister();485if (vs_reg->encoding() >= VSR32->encoding() && vs_reg->encoding() <= VSR51->encoding()) {486reg_save_index += 2;487488Register spill_addr = R0;489if (action == ACTION_SAVE) {490_masm->addi(spill_addr, R1_SP, offset - reg_save_index * BytesPerWord);491_masm->stxvd2x(vs_reg, spill_addr);492} else if (action == ACTION_RESTORE) {493_masm->addi(spill_addr, R1_SP, offset - reg_save_index * BytesPerWord);494_masm->lxvd2x(vs_reg, spill_addr);495} else {496assert(action == ACTION_COUNT_ONLY, "Sanity");497}498}499} else {500if (vm_reg->is_SpecialRegister()) {501fatal("Special registers are unsupported. Found register %s", vm_reg->name());502} else {503fatal("Register type is not known");504}505}506}507508return reg_save_index;509}510};511512#undef __513#define __ _masm->514515class ZSetupArguments {516MacroAssembler* const _masm;517const Register _ref;518const Address _ref_addr;519520public:521ZSetupArguments(MacroAssembler* masm, ZLoadBarrierStubC2* stub) :522_masm(masm),523_ref(stub->ref()),524_ref_addr(stub->ref_addr()) {525526// Desired register/argument configuration:527// _ref: R3_ARG1528// _ref_addr: R4_ARG2529530// '_ref_addr' can be unspecified. In that case, the barrier will not heal the reference.531if (_ref_addr.base() == noreg) {532assert_different_registers(_ref, R0, noreg);533534__ mr_if_needed(R3_ARG1, _ref);535__ li(R4_ARG2, 0);536} else {537assert_different_registers(_ref, _ref_addr.base(), R0, noreg);538assert(!_ref_addr.index()->is_valid(), "reference addresses must not contain an index component");539540if (_ref != R4_ARG2) {541// Calculate address first as the address' base register might clash with R4_ARG2542__ add(R4_ARG2, (intptr_t) _ref_addr.disp(), _ref_addr.base());543__ mr_if_needed(R3_ARG1, _ref);544} else if (_ref_addr.base() != R3_ARG1) {545__ mr(R3_ARG1, _ref);546__ add(R4_ARG2, (intptr_t) _ref_addr.disp(), _ref_addr.base()); // Cloberring _ref547} else {548// Arguments are provided in inverse order (i.e. _ref == R4_ARG2, _ref_addr == R3_ARG1)549__ mr(R0, _ref);550__ add(R4_ARG2, (intptr_t) _ref_addr.disp(), _ref_addr.base());551__ mr(R3_ARG1, R0);552}553}554}555};556557#undef __558#define __ masm->559560void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {561__ block_comment("generate_c2_load_barrier_stub (zgc) {");562563__ bind(*stub->entry());564565Register ref = stub->ref();566Address ref_addr = stub->ref_addr();567568assert_different_registers(ref, ref_addr.base());569570{571ZSaveLiveRegisters save_live_registers(masm, stub);572ZSetupArguments setup_arguments(masm, stub);573574__ call_VM_leaf(stub->slow_path());575__ mr_if_needed(ref, R3_RET);576}577578__ b(*stub->continuation());579580__ block_comment("} generate_c2_load_barrier_stub (zgc)");581}582583#undef __584#endif // COMPILER2585586587