Path: blob/master/src/hotspot/cpu/ppc/gc/shared/barrierSetNMethod_ppc.cpp
40948 views
/*1* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "code/codeBlob.hpp"26#include "code/nmethod.hpp"27#include "code/nativeInst.hpp"28#include "gc/shared/barrierSetNMethod.hpp"29#include "utilities/debug.hpp"3031class NativeNMethodBarrier: public NativeInstruction {3233address get_barrier_start_address() const {34return NativeInstruction::addr_at(0);35}3637NativeMovRegMem* get_patchable_instruction_handle() const {38// Endianness is handled by NativeMovRegMem39return reinterpret_cast<NativeMovRegMem*>(get_barrier_start_address() + 3 * 4);40}4142public:43int get_guard_value() const {44// Retrieve the guard value (naming of 'offset' function is misleading).45return get_patchable_instruction_handle()->offset();46}4748void release_set_guard_value(int value) {49// Patching is not atomic.50// Stale observations of the "armed" state is okay as invoking the barrier stub in that case has no51// unwanted side effects. Disarming is thus a non-critical operation.52// The visibility of the "armed" state must be ensured by safepoint/handshake.5354OrderAccess::release(); // Release modified oops5556// Set the guard value (naming of 'offset' function is misleading).57get_patchable_instruction_handle()->set_offset(value);58}5960void verify() const {61// Although it's possible to just validate the to-be-patched instruction,62// all instructions are validated to ensure that the barrier is hit properly - especially since63// the pattern used in load_const32 is a quite common one.6465uint* current_instruction = reinterpret_cast<uint*>(get_barrier_start_address());6667// calculate_address_from_global_toc (compound instruction)68verify_op_code_manually(current_instruction, MacroAssembler::is_addis(*current_instruction));69verify_op_code_manually(current_instruction, MacroAssembler::is_addi(*current_instruction));7071verify_op_code_manually(current_instruction, MacroAssembler::is_mtctr(*current_instruction));7273get_patchable_instruction_handle()->verify();74current_instruction += 2;7576verify_op_code(current_instruction, Assembler::LWZ_OPCODE);7778// cmpw (mnemonic)79verify_op_code(current_instruction, Assembler::CMP_OPCODE);8081// bnectrl (mnemonic) (weak check; not checking the exact type)82verify_op_code(current_instruction, Assembler::BCCTR_OPCODE);8384verify_op_code(current_instruction, Assembler::ISYNC_OPCODE);85}8687private:88static void verify_op_code_manually(uint*& current_instruction, bool result) {89assert(result, "illegal instruction sequence for nmethod entry barrier");90current_instruction++;91}92static void verify_op_code(uint*& current_instruction, uint expected,93unsigned int mask = 63u << Assembler::OPCODE_SHIFT) {94// Masking both, current instruction and opcode, as some opcodes in Assembler contain additional information95// to uniquely identify simplified mnemonics.96// As long as the caller doesn't provide a custom mask, that additional information is discarded.97verify_op_code_manually(current_instruction, (*current_instruction & mask) == (expected & mask));98}99};100101static NativeNMethodBarrier* get_nmethod_barrier(nmethod* nm) {102address barrier_address = nm->code_begin() + nm->frame_complete_offset() + (-9 * 4);103104auto barrier = reinterpret_cast<NativeNMethodBarrier*>(barrier_address);105debug_only(barrier->verify());106return barrier;107}108109void BarrierSetNMethod::deoptimize(nmethod* nm, address* return_address_ptr) {110// As PPC64 always has a valid back chain (unlike other platforms), the stub can simply pop the frame.111// Thus, there's nothing to do here.112}113114void BarrierSetNMethod::disarm(nmethod* nm) {115if (!supports_entry_barrier(nm)) {116return;117}118119NativeNMethodBarrier* barrier = get_nmethod_barrier(nm);120barrier->release_set_guard_value(disarmed_value());121}122123bool BarrierSetNMethod::is_armed(nmethod* nm) {124if (!supports_entry_barrier(nm)) {125return false;126}127128NativeNMethodBarrier* barrier = get_nmethod_barrier(nm);129return barrier->get_guard_value() != disarmed_value();130}131132133