Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/openjdk-multiarch-jdk8u
Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/cpu/ppc/vm/compiledIC_ppc.cpp
32285 views
1
/*
2
* Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation.
8
*
9
* This code is distributed in the hope that it will be useful, but WITHOUT
10
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12
* version 2 for more details (a copy is included in the LICENSE file that
13
* accompanied this code).
14
*
15
* You should have received a copy of the GNU General Public License version
16
* 2 along with this work; if not, write to the Free Software Foundation,
17
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
*
19
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
* or visit www.oracle.com if you need additional information or have any
21
* questions.
22
*
23
*/
24
25
#include "precompiled.hpp"
26
#include "asm/macroAssembler.inline.hpp"
27
#include "code/compiledIC.hpp"
28
#include "code/icBuffer.hpp"
29
#include "code/nmethod.hpp"
30
#include "memory/resourceArea.hpp"
31
#include "runtime/mutexLocker.hpp"
32
#include "runtime/safepoint.hpp"
33
#ifdef COMPILER2
34
#include "opto/matcher.hpp"
35
#endif
36
37
// Release the CompiledICHolder* associated with this call site is there is one.
38
void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) {
39
// This call site might have become stale so inspect it carefully.
40
NativeCall* call = nativeCall_at(call_site->addr());
41
if (is_icholder_entry(call->destination())) {
42
NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
43
InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
44
}
45
}
46
47
bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
48
// This call site might have become stale so inspect it carefully.
49
NativeCall* call = nativeCall_at(call_site->addr());
50
return is_icholder_entry(call->destination());
51
}
52
53
// ----------------------------------------------------------------------------
54
55
// A PPC CompiledStaticCall looks like this:
56
//
57
// >>>> consts
58
//
59
// [call target1]
60
// [IC cache]
61
// [call target2]
62
//
63
// <<<< consts
64
// >>>> insts
65
//
66
// bl offset16 -+ -+ ??? // How many bits available?
67
// | |
68
// <<<< insts | |
69
// >>>> stubs | |
70
// | |- trampoline_stub_Reloc
71
// trampoline stub: | <-+
72
// r2 = toc |
73
// r2 = [r2 + offset] | // Load call target1 from const section
74
// mtctr r2 |
75
// bctr |- static_stub_Reloc
76
// comp_to_interp_stub: <---+
77
// r1 = toc
78
// ICreg = [r1 + IC_offset] // Load IC from const section
79
// r1 = [r1 + offset] // Load call target2 from const section
80
// mtctr r1
81
// bctr
82
//
83
// <<<< stubs
84
//
85
// The call instruction in the code either
86
// - branches directly to a compiled method if offset encodable in instruction
87
// - branches to the trampoline stub if offset to compiled method not encodable
88
// - branches to the compiled_to_interp stub if target interpreted
89
//
90
// Further there are three relocations from the loads to the constants in
91
// the constant section.
92
//
93
// Usage of r1 and r2 in the stubs allows to distinguish them.
94
95
const int IC_pos_in_java_to_interp_stub = 8;
96
#define __ _masm.
97
address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
98
#ifdef COMPILER2
99
// Get the mark within main instrs section which is set to the address of the call.
100
address call_addr = cbuf.insts_mark();
101
102
// Note that the code buffer's insts_mark is always relative to insts.
103
// That's why we must use the macroassembler to generate a stub.
104
MacroAssembler _masm(&cbuf);
105
106
// Start the stub.
107
address stub = __ start_a_stub(CompiledStaticCall::to_interp_stub_size());
108
if (stub == NULL) {
109
return NULL; // CodeCache is full
110
}
111
112
// For java_to_interp stubs we use R11_scratch1 as scratch register
113
// and in call trampoline stubs we use R12_scratch2. This way we
114
// can distinguish them (see is_NativeCallTrampolineStub_at()).
115
Register reg_scratch = R11_scratch1;
116
117
// Create a static stub relocation which relates this stub
118
// with the call instruction at insts_call_instruction_offset in the
119
// instructions code-section.
120
__ relocate(static_stub_Relocation::spec(call_addr));
121
const int stub_start_offset = __ offset();
122
123
// Now, create the stub's code:
124
// - load the TOC
125
// - load the inline cache oop from the constant pool
126
// - load the call target from the constant pool
127
// - call
128
__ calculate_address_from_global_toc(reg_scratch, __ method_toc());
129
AddressLiteral ic = __ allocate_metadata_address((Metadata *)NULL);
130
__ load_const_from_method_toc(as_Register(Matcher::inline_cache_reg_encode()), ic, reg_scratch);
131
132
if (ReoptimizeCallSequences) {
133
__ b64_patchable((address)-1, relocInfo::none);
134
} else {
135
AddressLiteral a((address)-1);
136
__ load_const_from_method_toc(reg_scratch, a, reg_scratch);
137
__ mtctr(reg_scratch);
138
__ bctr();
139
}
140
141
// FIXME: Assert that the stub can be identified and patched.
142
143
// Java_to_interp_stub_size should be good.
144
assert((__ offset() - stub_start_offset) <= CompiledStaticCall::to_interp_stub_size(),
145
"should be good size");
146
assert(!is_NativeCallTrampolineStub_at(__ addr_at(stub_start_offset)),
147
"must not confuse java_to_interp with trampoline stubs");
148
149
// End the stub.
150
__ end_a_stub();
151
return stub;
152
#else
153
ShouldNotReachHere();
154
#endif
155
}
156
#undef __
157
158
// Size of java_to_interp stub, this doesn't need to be accurate but it must
159
// be larger or equal to the real size of the stub.
160
// Used for optimization in Compile::Shorten_branches.
161
int CompiledStaticCall::to_interp_stub_size() {
162
return 12 * BytesPerInstWord;
163
}
164
165
// Relocation entries for call stub, compiled java to interpreter.
166
// Used for optimization in Compile::Shorten_branches.
167
int CompiledStaticCall::reloc_to_interp_stub() {
168
return 5;
169
}
170
171
void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
172
address stub = find_stub();
173
guarantee(stub != NULL, "stub not found");
174
175
if (TraceICs) {
176
ResourceMark rm;
177
tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
178
p2i(instruction_address()),
179
callee->name_and_sig_as_C_string());
180
}
181
182
// Creation also verifies the object.
183
NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
184
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
185
186
assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(),
187
"a) MT-unsafe modification of inline cache");
188
assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry,
189
"b) MT-unsafe modification of inline cache");
190
191
// Update stub.
192
method_holder->set_data((intptr_t)callee());
193
jump->set_jump_destination(entry);
194
195
// Update jump to call.
196
set_destination_mt_safe(stub);
197
}
198
199
void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
200
assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
201
// Reset stub.
202
address stub = static_stub->addr();
203
assert(stub != NULL, "stub not found");
204
// Creation also verifies the object.
205
NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
206
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
207
method_holder->set_data(0);
208
jump->set_jump_destination((address)-1);
209
}
210
211
//-----------------------------------------------------------------------------
212
// Non-product mode code
213
#ifndef PRODUCT
214
215
void CompiledStaticCall::verify() {
216
// Verify call.
217
NativeCall::verify();
218
if (os::is_MP()) {
219
verify_alignment();
220
}
221
222
// Verify stub.
223
address stub = find_stub();
224
assert(stub != NULL, "no stub found for static call");
225
// Creation also verifies the object.
226
NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
227
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
228
229
// Verify state.
230
assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
231
}
232
233
#endif // !PRODUCT
234
235