Path: blob/master/src/hotspot/share/gc/g1/c1/g1BarrierSetC1.cpp
40975 views
/*1* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "c1/c1_LIRGenerator.hpp"26#include "c1/c1_CodeStubs.hpp"27#include "gc/g1/c1/g1BarrierSetC1.hpp"28#include "gc/g1/g1BarrierSet.hpp"29#include "gc/g1/g1BarrierSetAssembler.hpp"30#include "gc/g1/g1ThreadLocalData.hpp"31#include "gc/g1/heapRegion.hpp"32#include "utilities/macros.hpp"3334#ifdef ASSERT35#define __ gen->lir(__FILE__, __LINE__)->36#else37#define __ gen->lir()->38#endif3940void G1PreBarrierStub::emit_code(LIR_Assembler* ce) {41G1BarrierSetAssembler* bs = (G1BarrierSetAssembler*)BarrierSet::barrier_set()->barrier_set_assembler();42bs->gen_pre_barrier_stub(ce, this);43}4445void G1PostBarrierStub::emit_code(LIR_Assembler* ce) {46G1BarrierSetAssembler* bs = (G1BarrierSetAssembler*)BarrierSet::barrier_set()->barrier_set_assembler();47bs->gen_post_barrier_stub(ce, this);48}4950void G1BarrierSetC1::pre_barrier(LIRAccess& access, LIR_Opr addr_opr,51LIR_Opr pre_val, CodeEmitInfo* info) {52LIRGenerator* gen = access.gen();53DecoratorSet decorators = access.decorators();5455// First we test whether marking is in progress.56BasicType flag_type;57bool patch = (decorators & C1_NEEDS_PATCHING) != 0;58bool do_load = pre_val == LIR_OprFact::illegalOpr;59if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) {60flag_type = T_INT;61} else {62guarantee(in_bytes(SATBMarkQueue::byte_width_of_active()) == 1,63"Assumption");64// Use unsigned type T_BOOLEAN here rather than signed T_BYTE since some platforms, eg. ARM,65// need to use unsigned instructions to use the large offset to load the satb_mark_queue.66flag_type = T_BOOLEAN;67}68LIR_Opr thrd = gen->getThreadPointer();69LIR_Address* mark_active_flag_addr =70new LIR_Address(thrd,71in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset()),72flag_type);73// Read the marking-in-progress flag.74LIR_Opr flag_val = gen->new_register(T_INT);75__ load(mark_active_flag_addr, flag_val);76__ cmp(lir_cond_notEqual, flag_val, LIR_OprFact::intConst(0));7778LIR_PatchCode pre_val_patch_code = lir_patch_none;7980CodeStub* slow;8182if (do_load) {83assert(pre_val == LIR_OprFact::illegalOpr, "sanity");84assert(addr_opr != LIR_OprFact::illegalOpr, "sanity");8586if (patch)87pre_val_patch_code = lir_patch_normal;8889pre_val = gen->new_register(T_OBJECT);9091if (!addr_opr->is_address()) {92assert(addr_opr->is_register(), "must be");93addr_opr = LIR_OprFact::address(new LIR_Address(addr_opr, T_OBJECT));94}95slow = new G1PreBarrierStub(addr_opr, pre_val, pre_val_patch_code, info);96} else {97assert(addr_opr == LIR_OprFact::illegalOpr, "sanity");98assert(pre_val->is_register(), "must be");99assert(pre_val->type() == T_OBJECT, "must be an object");100assert(info == NULL, "sanity");101102slow = new G1PreBarrierStub(pre_val);103}104105__ branch(lir_cond_notEqual, slow);106__ branch_destination(slow->continuation());107}108109void G1BarrierSetC1::post_barrier(LIRAccess& access, LIR_OprDesc* addr, LIR_OprDesc* new_val) {110LIRGenerator* gen = access.gen();111DecoratorSet decorators = access.decorators();112bool in_heap = (decorators & IN_HEAP) != 0;113if (!in_heap) {114return;115}116117// If the "new_val" is a constant NULL, no barrier is necessary.118if (new_val->is_constant() &&119new_val->as_constant_ptr()->as_jobject() == NULL) return;120121if (!new_val->is_register()) {122LIR_Opr new_val_reg = gen->new_register(T_OBJECT);123if (new_val->is_constant()) {124__ move(new_val, new_val_reg);125} else {126__ leal(new_val, new_val_reg);127}128new_val = new_val_reg;129}130assert(new_val->is_register(), "must be a register at this point");131132if (addr->is_address()) {133LIR_Address* address = addr->as_address_ptr();134LIR_Opr ptr = gen->new_pointer_register();135if (!address->index()->is_valid() && address->disp() == 0) {136__ move(address->base(), ptr);137} else {138assert(address->disp() != max_jint, "lea doesn't support patched addresses!");139__ leal(addr, ptr);140}141addr = ptr;142}143assert(addr->is_register(), "must be a register at this point");144145LIR_Opr xor_res = gen->new_pointer_register();146LIR_Opr xor_shift_res = gen->new_pointer_register();147if (TwoOperandLIRForm) {148__ move(addr, xor_res);149__ logical_xor(xor_res, new_val, xor_res);150__ move(xor_res, xor_shift_res);151__ unsigned_shift_right(xor_shift_res,152LIR_OprFact::intConst(HeapRegion::LogOfHRGrainBytes),153xor_shift_res,154LIR_OprDesc::illegalOpr());155} else {156__ logical_xor(addr, new_val, xor_res);157__ unsigned_shift_right(xor_res,158LIR_OprFact::intConst(HeapRegion::LogOfHRGrainBytes),159xor_shift_res,160LIR_OprDesc::illegalOpr());161}162163if (!new_val->is_register()) {164LIR_Opr new_val_reg = gen->new_register(T_OBJECT);165__ leal(new_val, new_val_reg);166new_val = new_val_reg;167}168assert(new_val->is_register(), "must be a register at this point");169170__ cmp(lir_cond_notEqual, xor_shift_res, LIR_OprFact::intptrConst(NULL_WORD));171172CodeStub* slow = new G1PostBarrierStub(addr, new_val);173__ branch(lir_cond_notEqual, slow);174__ branch_destination(slow->continuation());175}176177void G1BarrierSetC1::load_at_resolved(LIRAccess& access, LIR_Opr result) {178DecoratorSet decorators = access.decorators();179bool is_weak = (decorators & ON_WEAK_OOP_REF) != 0;180bool is_phantom = (decorators & ON_PHANTOM_OOP_REF) != 0;181bool is_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;182LIRGenerator *gen = access.gen();183184BarrierSetC1::load_at_resolved(access, result);185186if (access.is_oop() && (is_weak || is_phantom || is_anonymous)) {187// Register the value in the referent field with the pre-barrier188LabelObj *Lcont_anonymous;189if (is_anonymous) {190Lcont_anonymous = new LabelObj();191generate_referent_check(access, Lcont_anonymous);192}193pre_barrier(access, LIR_OprFact::illegalOpr /* addr_opr */,194result /* pre_val */, access.patch_emit_info() /* info */);195if (is_anonymous) {196__ branch_destination(Lcont_anonymous->label());197}198}199}200201class C1G1PreBarrierCodeGenClosure : public StubAssemblerCodeGenClosure {202virtual OopMapSet* generate_code(StubAssembler* sasm) {203G1BarrierSetAssembler* bs = (G1BarrierSetAssembler*)BarrierSet::barrier_set()->barrier_set_assembler();204bs->generate_c1_pre_barrier_runtime_stub(sasm);205return NULL;206}207};208209class C1G1PostBarrierCodeGenClosure : public StubAssemblerCodeGenClosure {210virtual OopMapSet* generate_code(StubAssembler* sasm) {211G1BarrierSetAssembler* bs = (G1BarrierSetAssembler*)BarrierSet::barrier_set()->barrier_set_assembler();212bs->generate_c1_post_barrier_runtime_stub(sasm);213return NULL;214}215};216217void G1BarrierSetC1::generate_c1_runtime_stubs(BufferBlob* buffer_blob) {218C1G1PreBarrierCodeGenClosure pre_code_gen_cl;219C1G1PostBarrierCodeGenClosure post_code_gen_cl;220_pre_barrier_c1_runtime_code_blob = Runtime1::generate_blob(buffer_blob, -1, "g1_pre_barrier_slow",221false, &pre_code_gen_cl);222_post_barrier_c1_runtime_code_blob = Runtime1::generate_blob(buffer_blob, -1, "g1_post_barrier_slow",223false, &post_code_gen_cl);224}225226227