Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mobile
Path: blob/master/src/hotspot/share/gc/shared/c2/barrierSetC2.hpp
40974 views
1
/*
2
* Copyright (c) 2018, 2020, Oracle and/or its affiliates. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation.
8
*
9
* This code is distributed in the hope that it will be useful, but WITHOUT
10
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12
* version 2 for more details (a copy is included in the LICENSE file that
13
* accompanied this code).
14
*
15
* You should have received a copy of the GNU General Public License version
16
* 2 along with this work; if not, write to the Free Software Foundation,
17
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
*
19
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
* or visit www.oracle.com if you need additional information or have any
21
* questions.
22
*
23
*/
24
25
#ifndef SHARE_GC_SHARED_C2_BARRIERSETC2_HPP
26
#define SHARE_GC_SHARED_C2_BARRIERSETC2_HPP
27
28
#include "memory/allocation.hpp"
29
#include "oops/accessDecorators.hpp"
30
#include "opto/loopnode.hpp"
31
#include "opto/matcher.hpp"
32
#include "opto/memnode.hpp"
33
#include "utilities/globalDefinitions.hpp"
34
35
// This means the access is mismatched. This means the value of an access
36
// is not equivalent to the value pointed to by the address.
37
const DecoratorSet C2_MISMATCHED = DECORATOR_LAST << 1;
38
// The access may not be aligned to its natural size.
39
const DecoratorSet C2_UNALIGNED = DECORATOR_LAST << 2;
40
// The atomic cmpxchg is weak, meaning that spurious false negatives are allowed,
41
// but never false positives.
42
const DecoratorSet C2_WEAK_CMPXCHG = DECORATOR_LAST << 3;
43
// This denotes that a load has control dependency.
44
const DecoratorSet C2_CONTROL_DEPENDENT_LOAD = DECORATOR_LAST << 4;
45
// This denotes that a load that must be pinned, but may float above safepoints.
46
const DecoratorSet C2_UNKNOWN_CONTROL_LOAD = DECORATOR_LAST << 5;
47
// This denotes that the access is produced from the sun.misc.Unsafe intrinsics.
48
const DecoratorSet C2_UNSAFE_ACCESS = DECORATOR_LAST << 6;
49
// This denotes that the access mutates state.
50
const DecoratorSet C2_WRITE_ACCESS = DECORATOR_LAST << 7;
51
// This denotes that the access reads state.
52
const DecoratorSet C2_READ_ACCESS = DECORATOR_LAST << 8;
53
// A nearby allocation?
54
const DecoratorSet C2_TIGHTLY_COUPLED_ALLOC = DECORATOR_LAST << 9;
55
// Loads and stores from an arraycopy being optimized
56
const DecoratorSet C2_ARRAY_COPY = DECORATOR_LAST << 10;
57
// Loads from immutable memory
58
const DecoratorSet C2_IMMUTABLE_MEMORY = DECORATOR_LAST << 11;
59
60
class Compile;
61
class ConnectionGraph;
62
class GraphKit;
63
class IdealKit;
64
class Node;
65
class PhaseGVN;
66
class PhaseIdealLoop;
67
class PhaseMacroExpand;
68
class Type;
69
class TypePtr;
70
class Unique_Node_List;
71
72
// This class wraps a node and a type.
73
class C2AccessValue: public StackObj {
74
protected:
75
Node* _node;
76
const Type* _type;
77
78
public:
79
C2AccessValue(Node* node, const Type* type) :
80
_node(node),
81
_type(type) {}
82
83
Node* node() const { return _node; }
84
const Type* type() const { return _type; }
85
86
void set_node(Node* node) { _node = node; }
87
};
88
89
// This class wraps a node and a pointer type.
90
class C2AccessValuePtr: public C2AccessValue {
91
92
public:
93
C2AccessValuePtr(Node* node, const TypePtr* type) :
94
C2AccessValue(node, reinterpret_cast<const Type*>(type)) {}
95
96
const TypePtr* type() const { return reinterpret_cast<const TypePtr*>(_type); }
97
};
98
99
// This class wraps a bunch of context parameters thare are passed around in the
100
// BarrierSetC2 backend hierarchy, for loads and stores, to reduce boiler plate.
101
class C2Access: public StackObj {
102
protected:
103
DecoratorSet _decorators;
104
BasicType _type;
105
Node* _base;
106
C2AccessValuePtr& _addr;
107
Node* _raw_access;
108
uint8_t _barrier_data;
109
110
void fixup_decorators();
111
112
public:
113
C2Access(DecoratorSet decorators,
114
BasicType type, Node* base, C2AccessValuePtr& addr) :
115
_decorators(decorators),
116
_type(type),
117
_base(base),
118
_addr(addr),
119
_raw_access(NULL),
120
_barrier_data(0)
121
{}
122
123
DecoratorSet decorators() const { return _decorators; }
124
Node* base() const { return _base; }
125
C2AccessValuePtr& addr() const { return _addr; }
126
BasicType type() const { return _type; }
127
bool is_oop() const { return is_reference_type(_type); }
128
bool is_raw() const { return (_decorators & AS_RAW) != 0; }
129
Node* raw_access() const { return _raw_access; }
130
131
uint8_t barrier_data() const { return _barrier_data; }
132
void set_barrier_data(uint8_t data) { _barrier_data = data; }
133
134
void set_raw_access(Node* raw_access) { _raw_access = raw_access; }
135
virtual void set_memory() {} // no-op for normal accesses, but not for atomic accesses.
136
137
MemNode::MemOrd mem_node_mo() const;
138
bool needs_cpu_membar() const;
139
140
virtual PhaseGVN& gvn() const = 0;
141
virtual bool is_parse_access() const { return false; }
142
virtual bool is_opt_access() const { return false; }
143
};
144
145
// C2Access for parse time calls to the BarrierSetC2 backend.
146
class C2ParseAccess: public C2Access {
147
protected:
148
GraphKit* _kit;
149
150
void* barrier_set_state() const;
151
152
public:
153
C2ParseAccess(GraphKit* kit, DecoratorSet decorators,
154
BasicType type, Node* base, C2AccessValuePtr& addr) :
155
C2Access(decorators, type, base, addr),
156
_kit(kit) {
157
fixup_decorators();
158
}
159
160
GraphKit* kit() const { return _kit; }
161
162
virtual PhaseGVN& gvn() const;
163
virtual bool is_parse_access() const { return true; }
164
};
165
166
// This class wraps a bunch of context parameters thare are passed around in the
167
// BarrierSetC2 backend hierarchy, for atomic accesses, to reduce boiler plate.
168
class C2AtomicParseAccess: public C2ParseAccess {
169
Node* _memory;
170
uint _alias_idx;
171
bool _needs_pinning;
172
173
public:
174
C2AtomicParseAccess(GraphKit* kit, DecoratorSet decorators, BasicType type,
175
Node* base, C2AccessValuePtr& addr, uint alias_idx) :
176
C2ParseAccess(kit, decorators, type, base, addr),
177
_memory(NULL),
178
_alias_idx(alias_idx),
179
_needs_pinning(true) {}
180
181
// Set the memory node based on the current memory slice.
182
virtual void set_memory();
183
184
Node* memory() const { return _memory; }
185
uint alias_idx() const { return _alias_idx; }
186
bool needs_pinning() const { return _needs_pinning; }
187
};
188
189
// C2Access for optimization time calls to the BarrierSetC2 backend.
190
class C2OptAccess: public C2Access {
191
PhaseGVN& _gvn;
192
MergeMemNode* _mem;
193
Node* _ctl;
194
195
public:
196
C2OptAccess(PhaseGVN& gvn, Node* ctl, MergeMemNode* mem, DecoratorSet decorators,
197
BasicType type, Node* base, C2AccessValuePtr& addr) :
198
C2Access(decorators, type, base, addr),
199
_gvn(gvn), _mem(mem), _ctl(ctl) {
200
fixup_decorators();
201
}
202
203
MergeMemNode* mem() const { return _mem; }
204
Node* ctl() const { return _ctl; }
205
206
virtual PhaseGVN& gvn() const { return _gvn; }
207
virtual bool is_opt_access() const { return true; }
208
};
209
210
211
// This is the top-level class for the backend of the Access API in C2.
212
// The top-level class is responsible for performing raw accesses. The
213
// various GC barrier sets inherit from the BarrierSetC2 class to sprinkle
214
// barriers into the accesses.
215
class BarrierSetC2: public CHeapObj<mtGC> {
216
protected:
217
virtual void resolve_address(C2Access& access) const;
218
virtual Node* store_at_resolved(C2Access& access, C2AccessValue& val) const;
219
virtual Node* load_at_resolved(C2Access& access, const Type* val_type) const;
220
221
virtual Node* atomic_cmpxchg_val_at_resolved(C2AtomicParseAccess& access, Node* expected_val,
222
Node* new_val, const Type* val_type) const;
223
virtual Node* atomic_cmpxchg_bool_at_resolved(C2AtomicParseAccess& access, Node* expected_val,
224
Node* new_val, const Type* value_type) const;
225
virtual Node* atomic_xchg_at_resolved(C2AtomicParseAccess& access, Node* new_val, const Type* val_type) const;
226
virtual Node* atomic_add_at_resolved(C2AtomicParseAccess& access, Node* new_val, const Type* val_type) const;
227
void pin_atomic_op(C2AtomicParseAccess& access) const;
228
229
public:
230
// This is the entry-point for the backend to perform accesses through the Access API.
231
virtual Node* store_at(C2Access& access, C2AccessValue& val) const;
232
virtual Node* load_at(C2Access& access, const Type* val_type) const;
233
234
virtual Node* atomic_cmpxchg_val_at(C2AtomicParseAccess& access, Node* expected_val,
235
Node* new_val, const Type* val_type) const;
236
virtual Node* atomic_cmpxchg_bool_at(C2AtomicParseAccess& access, Node* expected_val,
237
Node* new_val, const Type* val_type) const;
238
virtual Node* atomic_xchg_at(C2AtomicParseAccess& access, Node* new_val, const Type* value_type) const;
239
virtual Node* atomic_add_at(C2AtomicParseAccess& access, Node* new_val, const Type* value_type) const;
240
241
virtual void clone(GraphKit* kit, Node* src, Node* dst, Node* size, bool is_array) const;
242
243
virtual Node* obj_allocate(PhaseMacroExpand* macro, Node* mem, Node* toobig_false, Node* size_in_bytes,
244
Node*& i_o, Node*& needgc_ctrl,
245
Node*& fast_oop_ctrl, Node*& fast_oop_rawmem,
246
intx prefetch_lines) const;
247
248
virtual Node* ideal_node(PhaseGVN* phase, Node* n, bool can_reshape) const { return NULL; }
249
250
// These are general helper methods used by C2
251
enum ArrayCopyPhase {
252
Parsing,
253
Optimization,
254
Expansion
255
};
256
257
virtual bool array_copy_requires_gc_barriers(bool tightly_coupled_alloc, BasicType type, bool is_clone, bool is_clone_instance, ArrayCopyPhase phase) const { return false; }
258
virtual void clone_at_expansion(PhaseMacroExpand* phase, ArrayCopyNode* ac) const;
259
260
// Support for GC barriers emitted during parsing
261
virtual bool has_load_barrier_nodes() const { return false; }
262
virtual bool is_gc_barrier_node(Node* node) const { return false; }
263
virtual Node* step_over_gc_barrier(Node* c) const { return c; }
264
265
// Support for macro expanded GC barriers
266
virtual void register_potential_barrier_node(Node* node) const { }
267
virtual void unregister_potential_barrier_node(Node* node) const { }
268
virtual void eliminate_gc_barrier(PhaseMacroExpand* macro, Node* node) const { }
269
virtual void enqueue_useful_gc_barrier(PhaseIterGVN* igvn, Node* node) const {}
270
virtual void eliminate_useless_gc_barriers(Unique_Node_List &useful, Compile* C) const {}
271
272
// Allow barrier sets to have shared state that is preserved across a compilation unit.
273
// This could for example comprise macro nodes to be expanded during macro expansion.
274
virtual void* create_barrier_state(Arena* comp_arena) const { return NULL; }
275
// If the BarrierSetC2 state has barrier nodes in its compilation
276
// unit state to be expanded later, then now is the time to do so.
277
virtual bool expand_barriers(Compile* C, PhaseIterGVN& igvn) const { return false; }
278
virtual bool optimize_loops(PhaseIdealLoop* phase, LoopOptsMode mode, VectorSet& visited, Node_Stack& nstack, Node_List& worklist) const { return false; }
279
virtual bool strip_mined_loops_expanded(LoopOptsMode mode) const { return false; }
280
virtual bool is_gc_specific_loop_opts_pass(LoopOptsMode mode) const { return false; }
281
282
enum CompilePhase {
283
BeforeOptimize,
284
BeforeMacroExpand,
285
BeforeCodeGen
286
};
287
288
#ifdef ASSERT
289
virtual void verify_gc_barriers(Compile* compile, CompilePhase phase) const {}
290
#endif
291
292
virtual bool final_graph_reshaping(Compile* compile, Node* n, uint opcode) const { return false; }
293
294
virtual bool escape_add_to_con_graph(ConnectionGraph* conn_graph, PhaseGVN* gvn, Unique_Node_List* delayed_worklist, Node* n, uint opcode) const { return false; }
295
virtual bool escape_add_final_edges(ConnectionGraph* conn_graph, PhaseGVN* gvn, Node* n, uint opcode) const { return false; }
296
virtual bool escape_has_out_with_unsafe_object(Node* n) const { return false; }
297
298
virtual bool matcher_find_shared_post_visit(Matcher* matcher, Node* n, uint opcode) const { return false; };
299
virtual bool matcher_is_store_load_barrier(Node* x, uint xop) const { return false; }
300
301
virtual void late_barrier_analysis() const { }
302
virtual int estimate_stub_size() const { return 0; }
303
virtual void emit_stubs(CodeBuffer& cb) const { }
304
305
static int arraycopy_payload_base_offset(bool is_array);
306
};
307
308
#endif // SHARE_GC_SHARED_C2_BARRIERSETC2_HPP
309
310