Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mobile
Path: blob/master/src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp
40948 views
1
/*
2
* Copyright (c) 2018, 2021, Oracle and/or its affiliates. All rights reserved.
3
* Copyright (c) 2018, 2021 SAP SE. All rights reserved.
4
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5
*
6
* This code is free software; you can redistribute it and/or modify it
7
* under the terms of the GNU General Public License version 2 only, as
8
* published by the Free Software Foundation.
9
*
10
* This code is distributed in the hope that it will be useful, but WITHOUT
11
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13
* version 2 for more details (a copy is included in the LICENSE file that
14
* accompanied this code).
15
*
16
* You should have received a copy of the GNU General Public License version
17
* 2 along with this work; if not, write to the Free Software Foundation,
18
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19
*
20
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21
* or visit www.oracle.com if you need additional information or have any
22
* questions.
23
*
24
*/
25
26
#include "nativeInst_ppc.hpp"
27
#include "precompiled.hpp"
28
#include "asm/macroAssembler.inline.hpp"
29
#include "classfile/classLoaderData.hpp"
30
#include "gc/shared/barrierSetAssembler.hpp"
31
#include "gc/shared/barrierSetNMethod.hpp"
32
#include "interpreter/interp_masm.hpp"
33
#include "oops/compressedOops.hpp"
34
#include "runtime/jniHandles.hpp"
35
#include "runtime/sharedRuntime.hpp"
36
#include "runtime/stubRoutines.hpp"
37
38
#define __ masm->
39
40
void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
41
Register base, RegisterOrConstant ind_or_offs, Register val,
42
Register tmp1, Register tmp2, Register tmp3,
43
MacroAssembler::PreservationLevel preservation_level) {
44
bool in_heap = (decorators & IN_HEAP) != 0;
45
bool in_native = (decorators & IN_NATIVE) != 0;
46
bool not_null = (decorators & IS_NOT_NULL) != 0;
47
assert(in_heap || in_native, "where?");
48
assert_different_registers(base, val, tmp1, tmp2, R0);
49
50
switch (type) {
51
case T_ARRAY:
52
case T_OBJECT: {
53
if (UseCompressedOops && in_heap) {
54
Register co = tmp1;
55
if (val == noreg) {
56
__ li(co, 0);
57
} else {
58
co = not_null ? __ encode_heap_oop_not_null(tmp1, val) : __ encode_heap_oop(tmp1, val);
59
}
60
__ stw(co, ind_or_offs, base, tmp2);
61
} else {
62
if (val == noreg) {
63
val = tmp1;
64
__ li(val, 0);
65
}
66
__ std(val, ind_or_offs, base, tmp2);
67
}
68
break;
69
}
70
default: Unimplemented();
71
}
72
}
73
74
void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
75
Register base, RegisterOrConstant ind_or_offs, Register dst,
76
Register tmp1, Register tmp2,
77
MacroAssembler::PreservationLevel preservation_level, Label *L_handle_null) {
78
bool in_heap = (decorators & IN_HEAP) != 0;
79
bool in_native = (decorators & IN_NATIVE) != 0;
80
bool not_null = (decorators & IS_NOT_NULL) != 0;
81
assert(in_heap || in_native, "where?");
82
assert_different_registers(ind_or_offs.register_or_noreg(), dst, R0);
83
84
switch (type) {
85
case T_ARRAY:
86
case T_OBJECT: {
87
if (UseCompressedOops && in_heap) {
88
if (L_handle_null != NULL) { // Label provided.
89
__ lwz(dst, ind_or_offs, base);
90
__ cmpwi(CCR0, dst, 0);
91
__ beq(CCR0, *L_handle_null);
92
__ decode_heap_oop_not_null(dst);
93
} else if (not_null) { // Guaranteed to be not null.
94
Register narrowOop = (tmp1 != noreg && CompressedOops::base_disjoint()) ? tmp1 : dst;
95
__ lwz(narrowOop, ind_or_offs, base);
96
__ decode_heap_oop_not_null(dst, narrowOop);
97
} else { // Any oop.
98
__ lwz(dst, ind_or_offs, base);
99
__ decode_heap_oop(dst);
100
}
101
} else {
102
__ ld(dst, ind_or_offs, base);
103
if (L_handle_null != NULL) {
104
__ cmpdi(CCR0, dst, 0);
105
__ beq(CCR0, *L_handle_null);
106
}
107
}
108
break;
109
}
110
default: Unimplemented();
111
}
112
}
113
114
void BarrierSetAssembler::resolve_jobject(MacroAssembler* masm, Register value,
115
Register tmp1, Register tmp2,
116
MacroAssembler::PreservationLevel preservation_level) {
117
Label done;
118
__ cmpdi(CCR0, value, 0);
119
__ beq(CCR0, done); // Use NULL as-is.
120
121
__ clrrdi(tmp1, value, JNIHandles::weak_tag_size);
122
__ ld(value, 0, tmp1); // Resolve (untagged) jobject.
123
124
__ verify_oop(value, FILE_AND_LINE);
125
__ bind(done);
126
}
127
128
void BarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register dst, Register jni_env,
129
Register obj, Register tmp, Label& slowpath) {
130
__ clrrdi(dst, obj, JNIHandles::weak_tag_size);
131
__ ld(dst, 0, dst); // Resolve (untagged) jobject.
132
}
133
134
void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Register tmp) {
135
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
136
if (bs_nm == nullptr) {
137
return;
138
}
139
140
assert_different_registers(tmp, R0);
141
142
// Load stub address using toc (fixed instruction size, unlike load_const_optimized)
143
__ calculate_address_from_global_toc(tmp, StubRoutines::ppc::nmethod_entry_barrier(),
144
true, true, false); // 2 instructions
145
__ mtctr(tmp);
146
147
// This is a compound instruction. Patching support is provided by NativeMovRegMem.
148
// Actual patching is done in (platform-specific part of) BarrierSetNMethod.
149
__ load_const32(tmp, 0 /* Value is patched */); // 2 instructions
150
151
__ lwz(R0, in_bytes(bs_nm->thread_disarmed_offset()), R16_thread);
152
__ cmpw(CCR0, R0, tmp);
153
154
__ bnectrl(CCR0);
155
156
// Oops may have been changed; exploiting isync semantics (used as acquire) to make those updates observable.
157
__ isync();
158
}
159
160
void BarrierSetAssembler::c2i_entry_barrier(MacroAssembler *masm, Register tmp1, Register tmp2, Register tmp3) {
161
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
162
if (bs_nm == nullptr) {
163
return;
164
}
165
166
assert_different_registers(tmp1, tmp2, tmp3);
167
168
Register tmp1_class_loader_data = tmp1;
169
170
Label bad_call, skip_barrier;
171
172
// Fast path: If no method is given, the call is definitely bad.
173
__ cmpdi(CCR0, R19_method, 0);
174
__ beq(CCR0, bad_call);
175
176
// Load class loader data to determine whether the method's holder is concurrently unloading.
177
__ load_method_holder(tmp1, R19_method);
178
__ ld(tmp1_class_loader_data, in_bytes(InstanceKlass::class_loader_data_offset()), tmp1);
179
180
// Fast path: If class loader is strong, the holder cannot be unloaded.
181
__ ld(tmp2, in_bytes(ClassLoaderData::keep_alive_offset()), tmp1_class_loader_data);
182
__ cmpdi(CCR0, tmp2, 0);
183
__ bne(CCR0, skip_barrier);
184
185
// Class loader is weak. Determine whether the holder is still alive.
186
__ ld(tmp2, in_bytes(ClassLoaderData::holder_offset()), tmp1_class_loader_data);
187
__ resolve_weak_handle(tmp2, tmp1, tmp3, MacroAssembler::PreservationLevel::PRESERVATION_FRAME_LR_GP_FP_REGS);
188
__ cmpdi(CCR0, tmp2, 0);
189
__ bne(CCR0, skip_barrier);
190
191
__ bind(bad_call);
192
193
__ calculate_address_from_global_toc(tmp1, SharedRuntime::get_handle_wrong_method_stub(), true, true, false);
194
__ mtctr(tmp1);
195
__ bctr();
196
197
__ bind(skip_barrier);
198
}
199
200