Path: blob/master/src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp
66646 views
/*1* Copyright (c) 2018, 2021, Oracle and/or its affiliates. All rights reserved.2* Copyright (c) 2018, 2021 SAP SE. All rights reserved.3* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.4*5* This code is free software; you can redistribute it and/or modify it6* under the terms of the GNU General Public License version 2 only, as7* published by the Free Software Foundation.8*9* This code is distributed in the hope that it will be useful, but WITHOUT10* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or11* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License12* version 2 for more details (a copy is included in the LICENSE file that13* accompanied this code).14*15* You should have received a copy of the GNU General Public License version16* 2 along with this work; if not, write to the Free Software Foundation,17* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.18*19* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA20* or visit www.oracle.com if you need additional information or have any21* questions.22*23*/2425#include "nativeInst_ppc.hpp"26#include "precompiled.hpp"27#include "asm/macroAssembler.inline.hpp"28#include "classfile/classLoaderData.hpp"29#include "gc/shared/barrierSetAssembler.hpp"30#include "gc/shared/barrierSetNMethod.hpp"31#include "interpreter/interp_masm.hpp"32#include "oops/compressedOops.hpp"33#include "runtime/jniHandles.hpp"34#include "runtime/sharedRuntime.hpp"35#include "runtime/stubRoutines.hpp"3637#define __ masm->3839void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,40Register base, RegisterOrConstant ind_or_offs, Register val,41Register tmp1, Register tmp2, Register tmp3,42MacroAssembler::PreservationLevel preservation_level) {43bool in_heap = (decorators & IN_HEAP) != 0;44bool in_native = (decorators & IN_NATIVE) != 0;45bool not_null = (decorators & IS_NOT_NULL) != 0;46assert(in_heap || in_native, "where?");47assert_different_registers(base, val, tmp1, tmp2, R0);4849switch (type) {50case T_ARRAY:51case T_OBJECT: {52if (UseCompressedOops && in_heap) {53Register co = tmp1;54if (val == noreg) {55__ li(co, 0);56} else {57co = not_null ? __ encode_heap_oop_not_null(tmp1, val) : __ encode_heap_oop(tmp1, val);58}59__ stw(co, ind_or_offs, base, tmp2);60} else {61if (val == noreg) {62val = tmp1;63__ li(val, 0);64}65__ std(val, ind_or_offs, base, tmp2);66}67break;68}69default: Unimplemented();70}71}7273void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,74Register base, RegisterOrConstant ind_or_offs, Register dst,75Register tmp1, Register tmp2,76MacroAssembler::PreservationLevel preservation_level, Label *L_handle_null) {77bool in_heap = (decorators & IN_HEAP) != 0;78bool in_native = (decorators & IN_NATIVE) != 0;79bool not_null = (decorators & IS_NOT_NULL) != 0;80assert(in_heap || in_native, "where?");81assert_different_registers(ind_or_offs.register_or_noreg(), dst, R0);8283switch (type) {84case T_ARRAY:85case T_OBJECT: {86if (UseCompressedOops && in_heap) {87if (L_handle_null != NULL) { // Label provided.88__ lwz(dst, ind_or_offs, base);89__ cmpwi(CCR0, dst, 0);90__ beq(CCR0, *L_handle_null);91__ decode_heap_oop_not_null(dst);92} else if (not_null) { // Guaranteed to be not null.93Register narrowOop = (tmp1 != noreg && CompressedOops::base_disjoint()) ? tmp1 : dst;94__ lwz(narrowOop, ind_or_offs, base);95__ decode_heap_oop_not_null(dst, narrowOop);96} else { // Any oop.97__ lwz(dst, ind_or_offs, base);98__ decode_heap_oop(dst);99}100} else {101__ ld(dst, ind_or_offs, base);102if (L_handle_null != NULL) {103__ cmpdi(CCR0, dst, 0);104__ beq(CCR0, *L_handle_null);105}106}107break;108}109default: Unimplemented();110}111}112113// Generic implementation. GCs can provide an optimized one.114void BarrierSetAssembler::resolve_jobject(MacroAssembler* masm, Register value,115Register tmp1, Register tmp2,116MacroAssembler::PreservationLevel preservation_level) {117Label done, not_weak, verify;118__ cmpdi(CCR0, value, 0);119__ beq(CCR0, done); // Use NULL as-is.120121__ andi_(tmp1, value, JNIHandles::weak_tag_mask);122__ beq(CCR0, not_weak); // Test for jweak tag.123124// Resolve (untagged) jobject.125__ clrrdi(value, value, JNIHandles::weak_tag_size);126load_at(masm, IN_NATIVE | ON_PHANTOM_OOP_REF, T_OBJECT,127value, (intptr_t)0, value, tmp1, tmp2, preservation_level);128__ b(verify);129130__ bind(not_weak);131load_at(masm, IN_NATIVE, T_OBJECT,132value, (intptr_t)0, value, tmp1, tmp2, preservation_level);133134__ bind(verify);135__ verify_oop(value, FILE_AND_LINE);136__ bind(done);137}138139void BarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register dst, Register jni_env,140Register obj, Register tmp, Label& slowpath) {141__ clrrdi(dst, obj, JNIHandles::weak_tag_size);142__ ld(dst, 0, dst); // Resolve (untagged) jobject.143}144145void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Register tmp) {146BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();147if (bs_nm == nullptr) {148return;149}150151assert_different_registers(tmp, R0);152153__ block_comment("nmethod_entry_barrier (nmethod_entry_barrier) {");154155// Load stub address using toc (fixed instruction size, unlike load_const_optimized)156__ calculate_address_from_global_toc(tmp, StubRoutines::ppc::nmethod_entry_barrier(),157true, true, false); // 2 instructions158__ mtctr(tmp);159160// This is a compound instruction. Patching support is provided by NativeMovRegMem.161// Actual patching is done in (platform-specific part of) BarrierSetNMethod.162__ load_const32(tmp, 0 /* Value is patched */); // 2 instructions163164__ lwz(R0, in_bytes(bs_nm->thread_disarmed_offset()), R16_thread);165__ cmpw(CCR0, R0, tmp);166167__ bnectrl(CCR0);168169// Oops may have been changed; exploiting isync semantics (used as acquire) to make those updates observable.170__ isync();171172__ block_comment("} nmethod_entry_barrier (nmethod_entry_barrier)");173}174175void BarrierSetAssembler::c2i_entry_barrier(MacroAssembler *masm, Register tmp1, Register tmp2, Register tmp3) {176BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();177if (bs_nm == nullptr) {178return;179}180181assert_different_registers(tmp1, tmp2, tmp3);182183__ block_comment("c2i_entry_barrier (c2i_entry_barrier) {");184185Register tmp1_class_loader_data = tmp1;186187Label bad_call, skip_barrier;188189// Fast path: If no method is given, the call is definitely bad.190__ cmpdi(CCR0, R19_method, 0);191__ beq(CCR0, bad_call);192193// Load class loader data to determine whether the method's holder is concurrently unloading.194__ load_method_holder(tmp1, R19_method);195__ ld(tmp1_class_loader_data, in_bytes(InstanceKlass::class_loader_data_offset()), tmp1);196197// Fast path: If class loader is strong, the holder cannot be unloaded.198__ lwz(tmp2, in_bytes(ClassLoaderData::keep_alive_offset()), tmp1_class_loader_data);199__ cmpdi(CCR0, tmp2, 0);200__ bne(CCR0, skip_barrier);201202// Class loader is weak. Determine whether the holder is still alive.203__ ld(tmp2, in_bytes(ClassLoaderData::holder_offset()), tmp1_class_loader_data);204__ resolve_weak_handle(tmp2, tmp1, tmp3, MacroAssembler::PreservationLevel::PRESERVATION_FRAME_LR_GP_FP_REGS);205__ cmpdi(CCR0, tmp2, 0);206__ bne(CCR0, skip_barrier);207208__ bind(bad_call);209210__ calculate_address_from_global_toc(tmp1, SharedRuntime::get_handle_wrong_method_stub(), true, true, false);211__ mtctr(tmp1);212__ bctr();213214__ bind(skip_barrier);215216__ block_comment("} c2i_entry_barrier (c2i_entry_barrier)");217}218219220