Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/jdk17u
Path: blob/master/src/hotspot/share/code/dependencies.cpp
64440 views
1
/*
2
* Copyright (c) 2005, 2021, Oracle and/or its affiliates. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation.
8
*
9
* This code is distributed in the hope that it will be useful, but WITHOUT
10
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12
* version 2 for more details (a copy is included in the LICENSE file that
13
* accompanied this code).
14
*
15
* You should have received a copy of the GNU General Public License version
16
* 2 along with this work; if not, write to the Free Software Foundation,
17
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
*
19
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
* or visit www.oracle.com if you need additional information or have any
21
* questions.
22
*
23
*/
24
25
#include "precompiled.hpp"
26
#include "ci/ciArrayKlass.hpp"
27
#include "ci/ciEnv.hpp"
28
#include "ci/ciKlass.hpp"
29
#include "ci/ciMethod.hpp"
30
#include "classfile/javaClasses.inline.hpp"
31
#include "classfile/vmClasses.hpp"
32
#include "code/dependencies.hpp"
33
#include "compiler/compileLog.hpp"
34
#include "compiler/compileBroker.hpp"
35
#include "compiler/compileTask.hpp"
36
#include "memory/resourceArea.hpp"
37
#include "oops/klass.hpp"
38
#include "oops/oop.inline.hpp"
39
#include "oops/objArrayKlass.hpp"
40
#include "runtime/flags/flagSetting.hpp"
41
#include "runtime/handles.hpp"
42
#include "runtime/handles.inline.hpp"
43
#include "runtime/jniHandles.inline.hpp"
44
#include "runtime/perfData.hpp"
45
#include "runtime/thread.inline.hpp"
46
#include "runtime/vmThread.hpp"
47
#include "utilities/copy.hpp"
48
49
50
#ifdef ASSERT
51
static bool must_be_in_vm() {
52
Thread* thread = Thread::current();
53
if (thread->is_Java_thread()) {
54
return thread->as_Java_thread()->thread_state() == _thread_in_vm;
55
} else {
56
return true; // Could be VMThread or GC thread
57
}
58
}
59
#endif //ASSERT
60
61
void Dependencies::initialize(ciEnv* env) {
62
Arena* arena = env->arena();
63
_oop_recorder = env->oop_recorder();
64
_log = env->log();
65
_dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
66
#if INCLUDE_JVMCI
67
_using_dep_values = false;
68
#endif
69
DEBUG_ONLY(_deps[end_marker] = NULL);
70
for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
71
_deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
72
}
73
_content_bytes = NULL;
74
_size_in_bytes = (size_t)-1;
75
76
assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
77
}
78
79
void Dependencies::assert_evol_method(ciMethod* m) {
80
assert_common_1(evol_method, m);
81
}
82
83
void Dependencies::assert_leaf_type(ciKlass* ctxk) {
84
if (ctxk->is_array_klass()) {
85
// As a special case, support this assertion on an array type,
86
// which reduces to an assertion on its element type.
87
// Note that this cannot be done with assertions that
88
// relate to concreteness or abstractness.
89
ciType* elemt = ctxk->as_array_klass()->base_element_type();
90
if (!elemt->is_instance_klass()) return; // Ex: int[][]
91
ctxk = elemt->as_instance_klass();
92
//if (ctxk->is_final()) return; // Ex: String[][]
93
}
94
check_ctxk(ctxk);
95
assert_common_1(leaf_type, ctxk);
96
}
97
98
void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
99
check_ctxk_abstract(ctxk);
100
assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
101
}
102
103
void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
104
check_ctxk(ctxk);
105
check_unique_method(ctxk, uniqm);
106
assert_common_2(unique_concrete_method_2, ctxk, uniqm);
107
}
108
109
void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm, ciKlass* resolved_klass, ciMethod* resolved_method) {
110
check_ctxk(ctxk);
111
check_unique_method(ctxk, uniqm);
112
if (UseVtableBasedCHA) {
113
assert_common_4(unique_concrete_method_4, ctxk, uniqm, resolved_klass, resolved_method);
114
} else {
115
assert_common_2(unique_concrete_method_2, ctxk, uniqm);
116
}
117
}
118
119
void Dependencies::assert_unique_implementor(ciInstanceKlass* ctxk, ciInstanceKlass* uniqk) {
120
check_ctxk(ctxk);
121
check_unique_implementor(ctxk, uniqk);
122
assert_common_2(unique_implementor, ctxk, uniqk);
123
}
124
125
void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
126
check_ctxk(ctxk);
127
assert_common_1(no_finalizable_subclasses, ctxk);
128
}
129
130
void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
131
assert_common_2(call_site_target_value, call_site, method_handle);
132
}
133
134
#if INCLUDE_JVMCI
135
136
Dependencies::Dependencies(Arena* arena, OopRecorder* oop_recorder, CompileLog* log) {
137
_oop_recorder = oop_recorder;
138
_log = log;
139
_dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
140
_using_dep_values = true;
141
DEBUG_ONLY(_dep_values[end_marker] = NULL);
142
for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
143
_dep_values[i] = new(arena) GrowableArray<DepValue>(arena, 10, 0, DepValue());
144
}
145
_content_bytes = NULL;
146
_size_in_bytes = (size_t)-1;
147
148
assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
149
}
150
151
void Dependencies::assert_evol_method(Method* m) {
152
assert_common_1(evol_method, DepValue(_oop_recorder, m));
153
}
154
155
void Dependencies::assert_has_no_finalizable_subclasses(Klass* ctxk) {
156
check_ctxk(ctxk);
157
assert_common_1(no_finalizable_subclasses, DepValue(_oop_recorder, ctxk));
158
}
159
160
void Dependencies::assert_leaf_type(Klass* ctxk) {
161
if (ctxk->is_array_klass()) {
162
// As a special case, support this assertion on an array type,
163
// which reduces to an assertion on its element type.
164
// Note that this cannot be done with assertions that
165
// relate to concreteness or abstractness.
166
BasicType elemt = ArrayKlass::cast(ctxk)->element_type();
167
if (is_java_primitive(elemt)) return; // Ex: int[][]
168
ctxk = ObjArrayKlass::cast(ctxk)->bottom_klass();
169
//if (ctxk->is_final()) return; // Ex: String[][]
170
}
171
check_ctxk(ctxk);
172
assert_common_1(leaf_type, DepValue(_oop_recorder, ctxk));
173
}
174
175
void Dependencies::assert_abstract_with_unique_concrete_subtype(Klass* ctxk, Klass* conck) {
176
check_ctxk_abstract(ctxk);
177
DepValue ctxk_dv(_oop_recorder, ctxk);
178
DepValue conck_dv(_oop_recorder, conck, &ctxk_dv);
179
assert_common_2(abstract_with_unique_concrete_subtype, ctxk_dv, conck_dv);
180
}
181
182
void Dependencies::assert_unique_implementor(InstanceKlass* ctxk, InstanceKlass* uniqk) {
183
check_ctxk(ctxk);
184
assert(ctxk->is_interface(), "not an interface");
185
assert(ctxk->implementor() == uniqk, "not a unique implementor");
186
assert_common_2(unique_implementor, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqk));
187
}
188
189
void Dependencies::assert_unique_concrete_method(Klass* ctxk, Method* uniqm) {
190
check_ctxk(ctxk);
191
check_unique_method(ctxk, uniqm);
192
assert_common_2(unique_concrete_method_2, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqm));
193
}
194
195
void Dependencies::assert_call_site_target_value(oop call_site, oop method_handle) {
196
assert_common_2(call_site_target_value, DepValue(_oop_recorder, JNIHandles::make_local(call_site)), DepValue(_oop_recorder, JNIHandles::make_local(method_handle)));
197
}
198
199
#endif // INCLUDE_JVMCI
200
201
202
// Helper function. If we are adding a new dep. under ctxk2,
203
// try to find an old dep. under a broader* ctxk1. If there is
204
//
205
bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
206
int ctxk_i, ciKlass* ctxk2) {
207
ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
208
if (ctxk2->is_subtype_of(ctxk1)) {
209
return true; // success, and no need to change
210
} else if (ctxk1->is_subtype_of(ctxk2)) {
211
// new context class fully subsumes previous one
212
deps->at_put(ctxk_i, ctxk2);
213
return true;
214
} else {
215
return false;
216
}
217
}
218
219
void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
220
assert(dep_args(dept) == 1, "sanity");
221
log_dependency(dept, x);
222
GrowableArray<ciBaseObject*>* deps = _deps[dept];
223
224
// see if the same (or a similar) dep is already recorded
225
if (note_dep_seen(dept, x)) {
226
assert(deps->find(x) >= 0, "sanity");
227
} else {
228
deps->append(x);
229
}
230
}
231
232
void Dependencies::assert_common_2(DepType dept,
233
ciBaseObject* x0, ciBaseObject* x1) {
234
assert(dep_args(dept) == 2, "sanity");
235
log_dependency(dept, x0, x1);
236
GrowableArray<ciBaseObject*>* deps = _deps[dept];
237
238
// see if the same (or a similar) dep is already recorded
239
bool has_ctxk = has_explicit_context_arg(dept);
240
if (has_ctxk) {
241
assert(dep_context_arg(dept) == 0, "sanity");
242
if (note_dep_seen(dept, x1)) {
243
// look in this bucket for redundant assertions
244
const int stride = 2;
245
for (int i = deps->length(); (i -= stride) >= 0; ) {
246
ciBaseObject* y1 = deps->at(i+1);
247
if (x1 == y1) { // same subject; check the context
248
if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
249
return;
250
}
251
}
252
}
253
}
254
} else {
255
if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
256
// look in this bucket for redundant assertions
257
const int stride = 2;
258
for (int i = deps->length(); (i -= stride) >= 0; ) {
259
ciBaseObject* y0 = deps->at(i+0);
260
ciBaseObject* y1 = deps->at(i+1);
261
if (x0 == y0 && x1 == y1) {
262
return;
263
}
264
}
265
}
266
}
267
268
// append the assertion in the correct bucket:
269
deps->append(x0);
270
deps->append(x1);
271
}
272
273
void Dependencies::assert_common_4(DepType dept,
274
ciKlass* ctxk, ciBaseObject* x1, ciBaseObject* x2, ciBaseObject* x3) {
275
assert(has_explicit_context_arg(dept), "sanity");
276
assert(dep_context_arg(dept) == 0, "sanity");
277
assert(dep_args(dept) == 4, "sanity");
278
log_dependency(dept, ctxk, x1, x2, x3);
279
GrowableArray<ciBaseObject*>* deps = _deps[dept];
280
281
// see if the same (or a similar) dep is already recorded
282
if (note_dep_seen(dept, x1) && note_dep_seen(dept, x2) && note_dep_seen(dept, x3)) {
283
// look in this bucket for redundant assertions
284
const int stride = 4;
285
for (int i = deps->length(); (i -= stride) >= 0; ) {
286
ciBaseObject* y1 = deps->at(i+1);
287
ciBaseObject* y2 = deps->at(i+2);
288
ciBaseObject* y3 = deps->at(i+3);
289
if (x1 == y1 && x2 == y2 && x3 == y3) { // same subjects; check the context
290
if (maybe_merge_ctxk(deps, i+0, ctxk)) {
291
return;
292
}
293
}
294
}
295
}
296
// append the assertion in the correct bucket:
297
deps->append(ctxk);
298
deps->append(x1);
299
deps->append(x2);
300
deps->append(x3);
301
}
302
303
#if INCLUDE_JVMCI
304
bool Dependencies::maybe_merge_ctxk(GrowableArray<DepValue>* deps,
305
int ctxk_i, DepValue ctxk2_dv) {
306
Klass* ctxk1 = deps->at(ctxk_i).as_klass(_oop_recorder);
307
Klass* ctxk2 = ctxk2_dv.as_klass(_oop_recorder);
308
if (ctxk2->is_subtype_of(ctxk1)) {
309
return true; // success, and no need to change
310
} else if (ctxk1->is_subtype_of(ctxk2)) {
311
// new context class fully subsumes previous one
312
deps->at_put(ctxk_i, ctxk2_dv);
313
return true;
314
} else {
315
return false;
316
}
317
}
318
319
void Dependencies::assert_common_1(DepType dept, DepValue x) {
320
assert(dep_args(dept) == 1, "sanity");
321
//log_dependency(dept, x);
322
GrowableArray<DepValue>* deps = _dep_values[dept];
323
324
// see if the same (or a similar) dep is already recorded
325
if (note_dep_seen(dept, x)) {
326
assert(deps->find(x) >= 0, "sanity");
327
} else {
328
deps->append(x);
329
}
330
}
331
332
void Dependencies::assert_common_2(DepType dept,
333
DepValue x0, DepValue x1) {
334
assert(dep_args(dept) == 2, "sanity");
335
//log_dependency(dept, x0, x1);
336
GrowableArray<DepValue>* deps = _dep_values[dept];
337
338
// see if the same (or a similar) dep is already recorded
339
bool has_ctxk = has_explicit_context_arg(dept);
340
if (has_ctxk) {
341
assert(dep_context_arg(dept) == 0, "sanity");
342
if (note_dep_seen(dept, x1)) {
343
// look in this bucket for redundant assertions
344
const int stride = 2;
345
for (int i = deps->length(); (i -= stride) >= 0; ) {
346
DepValue y1 = deps->at(i+1);
347
if (x1 == y1) { // same subject; check the context
348
if (maybe_merge_ctxk(deps, i+0, x0)) {
349
return;
350
}
351
}
352
}
353
}
354
} else {
355
if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
356
// look in this bucket for redundant assertions
357
const int stride = 2;
358
for (int i = deps->length(); (i -= stride) >= 0; ) {
359
DepValue y0 = deps->at(i+0);
360
DepValue y1 = deps->at(i+1);
361
if (x0 == y0 && x1 == y1) {
362
return;
363
}
364
}
365
}
366
}
367
368
// append the assertion in the correct bucket:
369
deps->append(x0);
370
deps->append(x1);
371
}
372
#endif // INCLUDE_JVMCI
373
374
/// Support for encoding dependencies into an nmethod:
375
376
void Dependencies::copy_to(nmethod* nm) {
377
address beg = nm->dependencies_begin();
378
address end = nm->dependencies_end();
379
guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
380
Copy::disjoint_words((HeapWord*) content_bytes(),
381
(HeapWord*) beg,
382
size_in_bytes() / sizeof(HeapWord));
383
assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
384
}
385
386
static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
387
for (int i = 0; i < narg; i++) {
388
int diff = p1[i]->ident() - p2[i]->ident();
389
if (diff != 0) return diff;
390
}
391
return 0;
392
}
393
static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
394
{ return sort_dep(p1, p2, 1); }
395
static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
396
{ return sort_dep(p1, p2, 2); }
397
static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
398
{ return sort_dep(p1, p2, 3); }
399
static int sort_dep_arg_4(ciBaseObject** p1, ciBaseObject** p2)
400
{ return sort_dep(p1, p2, 4); }
401
402
#if INCLUDE_JVMCI
403
// metadata deps are sorted before object deps
404
static int sort_dep_value(Dependencies::DepValue* p1, Dependencies::DepValue* p2, int narg) {
405
for (int i = 0; i < narg; i++) {
406
int diff = p1[i].sort_key() - p2[i].sort_key();
407
if (diff != 0) return diff;
408
}
409
return 0;
410
}
411
static int sort_dep_value_arg_1(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
412
{ return sort_dep_value(p1, p2, 1); }
413
static int sort_dep_value_arg_2(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
414
{ return sort_dep_value(p1, p2, 2); }
415
static int sort_dep_value_arg_3(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
416
{ return sort_dep_value(p1, p2, 3); }
417
#endif // INCLUDE_JVMCI
418
419
void Dependencies::sort_all_deps() {
420
#if INCLUDE_JVMCI
421
if (_using_dep_values) {
422
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
423
DepType dept = (DepType)deptv;
424
GrowableArray<DepValue>* deps = _dep_values[dept];
425
if (deps->length() <= 1) continue;
426
switch (dep_args(dept)) {
427
case 1: deps->sort(sort_dep_value_arg_1, 1); break;
428
case 2: deps->sort(sort_dep_value_arg_2, 2); break;
429
case 3: deps->sort(sort_dep_value_arg_3, 3); break;
430
default: ShouldNotReachHere(); break;
431
}
432
}
433
return;
434
}
435
#endif // INCLUDE_JVMCI
436
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
437
DepType dept = (DepType)deptv;
438
GrowableArray<ciBaseObject*>* deps = _deps[dept];
439
if (deps->length() <= 1) continue;
440
switch (dep_args(dept)) {
441
case 1: deps->sort(sort_dep_arg_1, 1); break;
442
case 2: deps->sort(sort_dep_arg_2, 2); break;
443
case 3: deps->sort(sort_dep_arg_3, 3); break;
444
case 4: deps->sort(sort_dep_arg_4, 4); break;
445
default: ShouldNotReachHere(); break;
446
}
447
}
448
}
449
450
size_t Dependencies::estimate_size_in_bytes() {
451
size_t est_size = 100;
452
#if INCLUDE_JVMCI
453
if (_using_dep_values) {
454
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
455
DepType dept = (DepType)deptv;
456
GrowableArray<DepValue>* deps = _dep_values[dept];
457
est_size += deps->length() * 2; // tags and argument(s)
458
}
459
return est_size;
460
}
461
#endif // INCLUDE_JVMCI
462
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
463
DepType dept = (DepType)deptv;
464
GrowableArray<ciBaseObject*>* deps = _deps[dept];
465
est_size += deps->length()*2; // tags and argument(s)
466
}
467
return est_size;
468
}
469
470
ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
471
switch (dept) {
472
case unique_concrete_method_2:
473
case unique_concrete_method_4:
474
return x->as_metadata()->as_method()->holder();
475
default:
476
return NULL; // let NULL be NULL
477
}
478
}
479
480
Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
481
assert(must_be_in_vm(), "raw oops here");
482
switch (dept) {
483
case unique_concrete_method_2:
484
case unique_concrete_method_4:
485
assert(x->is_method(), "sanity");
486
return ((Method*)x)->method_holder();
487
default:
488
return NULL; // let NULL be NULL
489
}
490
}
491
492
void Dependencies::encode_content_bytes() {
493
sort_all_deps();
494
495
// cast is safe, no deps can overflow INT_MAX
496
CompressedWriteStream bytes((int)estimate_size_in_bytes());
497
498
#if INCLUDE_JVMCI
499
if (_using_dep_values) {
500
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
501
DepType dept = (DepType)deptv;
502
GrowableArray<DepValue>* deps = _dep_values[dept];
503
if (deps->length() == 0) continue;
504
int stride = dep_args(dept);
505
int ctxkj = dep_context_arg(dept); // -1 if no context arg
506
assert(stride > 0, "sanity");
507
for (int i = 0; i < deps->length(); i += stride) {
508
jbyte code_byte = (jbyte)dept;
509
int skipj = -1;
510
if (ctxkj >= 0 && ctxkj+1 < stride) {
511
Klass* ctxk = deps->at(i+ctxkj+0).as_klass(_oop_recorder);
512
DepValue x = deps->at(i+ctxkj+1); // following argument
513
if (ctxk == ctxk_encoded_as_null(dept, x.as_metadata(_oop_recorder))) {
514
skipj = ctxkj; // we win: maybe one less oop to keep track of
515
code_byte |= default_context_type_bit;
516
}
517
}
518
bytes.write_byte(code_byte);
519
for (int j = 0; j < stride; j++) {
520
if (j == skipj) continue;
521
DepValue v = deps->at(i+j);
522
int idx = v.index();
523
bytes.write_int(idx);
524
}
525
}
526
}
527
} else {
528
#endif // INCLUDE_JVMCI
529
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
530
DepType dept = (DepType)deptv;
531
GrowableArray<ciBaseObject*>* deps = _deps[dept];
532
if (deps->length() == 0) continue;
533
int stride = dep_args(dept);
534
int ctxkj = dep_context_arg(dept); // -1 if no context arg
535
assert(stride > 0, "sanity");
536
for (int i = 0; i < deps->length(); i += stride) {
537
jbyte code_byte = (jbyte)dept;
538
int skipj = -1;
539
if (ctxkj >= 0 && ctxkj+1 < stride) {
540
ciKlass* ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
541
ciBaseObject* x = deps->at(i+ctxkj+1); // following argument
542
if (ctxk == ctxk_encoded_as_null(dept, x)) {
543
skipj = ctxkj; // we win: maybe one less oop to keep track of
544
code_byte |= default_context_type_bit;
545
}
546
}
547
bytes.write_byte(code_byte);
548
for (int j = 0; j < stride; j++) {
549
if (j == skipj) continue;
550
ciBaseObject* v = deps->at(i+j);
551
int idx;
552
if (v->is_object()) {
553
idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
554
} else {
555
ciMetadata* meta = v->as_metadata();
556
idx = _oop_recorder->find_index(meta->constant_encoding());
557
}
558
bytes.write_int(idx);
559
}
560
}
561
}
562
#if INCLUDE_JVMCI
563
}
564
#endif
565
566
// write a sentinel byte to mark the end
567
bytes.write_byte(end_marker);
568
569
// round it out to a word boundary
570
while (bytes.position() % sizeof(HeapWord) != 0) {
571
bytes.write_byte(end_marker);
572
}
573
574
// check whether the dept byte encoding really works
575
assert((jbyte)default_context_type_bit != 0, "byte overflow");
576
577
_content_bytes = bytes.buffer();
578
_size_in_bytes = bytes.position();
579
}
580
581
582
const char* Dependencies::_dep_name[TYPE_LIMIT] = {
583
"end_marker",
584
"evol_method",
585
"leaf_type",
586
"abstract_with_unique_concrete_subtype",
587
"unique_concrete_method_2",
588
"unique_concrete_method_4",
589
"unique_implementor",
590
"no_finalizable_subclasses",
591
"call_site_target_value"
592
};
593
594
int Dependencies::_dep_args[TYPE_LIMIT] = {
595
-1,// end_marker
596
1, // evol_method m
597
1, // leaf_type ctxk
598
2, // abstract_with_unique_concrete_subtype ctxk, k
599
2, // unique_concrete_method_2 ctxk, m
600
4, // unique_concrete_method_4 ctxk, m, resolved_klass, resolved_method
601
2, // unique_implementor ctxk, implementor
602
1, // no_finalizable_subclasses ctxk
603
2 // call_site_target_value call_site, method_handle
604
};
605
606
const char* Dependencies::dep_name(Dependencies::DepType dept) {
607
if (!dept_in_mask(dept, all_types)) return "?bad-dep?";
608
return _dep_name[dept];
609
}
610
611
int Dependencies::dep_args(Dependencies::DepType dept) {
612
if (!dept_in_mask(dept, all_types)) return -1;
613
return _dep_args[dept];
614
}
615
616
void Dependencies::check_valid_dependency_type(DepType dept) {
617
guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, "invalid dependency type: %d", (int) dept);
618
}
619
620
Dependencies::DepType Dependencies::validate_dependencies(CompileTask* task, char** failure_detail) {
621
int klass_violations = 0;
622
DepType result = end_marker;
623
for (Dependencies::DepStream deps(this); deps.next(); ) {
624
Klass* witness = deps.check_dependency();
625
if (witness != NULL) {
626
if (klass_violations == 0) {
627
result = deps.type();
628
if (failure_detail != NULL && klass_violations == 0) {
629
// Use a fixed size buffer to prevent the string stream from
630
// resizing in the context of an inner resource mark.
631
char* buffer = NEW_RESOURCE_ARRAY(char, O_BUFLEN);
632
stringStream st(buffer, O_BUFLEN);
633
deps.print_dependency(witness, true, &st);
634
*failure_detail = st.as_string();
635
}
636
}
637
klass_violations++;
638
if (xtty == NULL) {
639
// If we're not logging then a single violation is sufficient,
640
// otherwise we want to log all the dependences which were
641
// violated.
642
break;
643
}
644
}
645
}
646
647
return result;
648
}
649
650
// for the sake of the compiler log, print out current dependencies:
651
void Dependencies::log_all_dependencies() {
652
if (log() == NULL) return;
653
ResourceMark rm;
654
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
655
DepType dept = (DepType)deptv;
656
GrowableArray<ciBaseObject*>* deps = _deps[dept];
657
int deplen = deps->length();
658
if (deplen == 0) {
659
continue;
660
}
661
int stride = dep_args(dept);
662
GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride);
663
for (int i = 0; i < deps->length(); i += stride) {
664
for (int j = 0; j < stride; j++) {
665
// flush out the identities before printing
666
ciargs->push(deps->at(i+j));
667
}
668
write_dependency_to(log(), dept, ciargs);
669
ciargs->clear();
670
}
671
guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope");
672
}
673
}
674
675
void Dependencies::write_dependency_to(CompileLog* log,
676
DepType dept,
677
GrowableArray<DepArgument>* args,
678
Klass* witness) {
679
if (log == NULL) {
680
return;
681
}
682
ResourceMark rm;
683
ciEnv* env = ciEnv::current();
684
GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length());
685
for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) {
686
DepArgument arg = *it;
687
if (arg.is_oop()) {
688
ciargs->push(env->get_object(arg.oop_value()));
689
} else {
690
ciargs->push(env->get_metadata(arg.metadata_value()));
691
}
692
}
693
int argslen = ciargs->length();
694
Dependencies::write_dependency_to(log, dept, ciargs, witness);
695
guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope");
696
}
697
698
void Dependencies::write_dependency_to(CompileLog* log,
699
DepType dept,
700
GrowableArray<ciBaseObject*>* args,
701
Klass* witness) {
702
if (log == NULL) {
703
return;
704
}
705
ResourceMark rm;
706
GrowableArray<int>* argids = new GrowableArray<int>(args->length());
707
for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) {
708
ciBaseObject* obj = *it;
709
if (obj->is_object()) {
710
argids->push(log->identify(obj->as_object()));
711
} else {
712
argids->push(log->identify(obj->as_metadata()));
713
}
714
}
715
if (witness != NULL) {
716
log->begin_elem("dependency_failed");
717
} else {
718
log->begin_elem("dependency");
719
}
720
log->print(" type='%s'", dep_name(dept));
721
const int ctxkj = dep_context_arg(dept); // -1 if no context arg
722
if (ctxkj >= 0 && ctxkj < argids->length()) {
723
log->print(" ctxk='%d'", argids->at(ctxkj));
724
}
725
// write remaining arguments, if any.
726
for (int j = 0; j < argids->length(); j++) {
727
if (j == ctxkj) continue; // already logged
728
if (j == 1) {
729
log->print( " x='%d'", argids->at(j));
730
} else {
731
log->print(" x%d='%d'", j, argids->at(j));
732
}
733
}
734
if (witness != NULL) {
735
log->object("witness", witness);
736
log->stamp();
737
}
738
log->end_elem();
739
}
740
741
void Dependencies::write_dependency_to(xmlStream* xtty,
742
DepType dept,
743
GrowableArray<DepArgument>* args,
744
Klass* witness) {
745
if (xtty == NULL) {
746
return;
747
}
748
Thread* thread = Thread::current();
749
HandleMark rm(thread);
750
ttyLocker ttyl;
751
int ctxkj = dep_context_arg(dept); // -1 if no context arg
752
if (witness != NULL) {
753
xtty->begin_elem("dependency_failed");
754
} else {
755
xtty->begin_elem("dependency");
756
}
757
xtty->print(" type='%s'", dep_name(dept));
758
if (ctxkj >= 0) {
759
xtty->object("ctxk", args->at(ctxkj).metadata_value());
760
}
761
// write remaining arguments, if any.
762
for (int j = 0; j < args->length(); j++) {
763
if (j == ctxkj) continue; // already logged
764
DepArgument arg = args->at(j);
765
if (j == 1) {
766
if (arg.is_oop()) {
767
xtty->object("x", Handle(thread, arg.oop_value()));
768
} else {
769
xtty->object("x", arg.metadata_value());
770
}
771
} else {
772
char xn[12]; sprintf(xn, "x%d", j);
773
if (arg.is_oop()) {
774
xtty->object(xn, Handle(thread, arg.oop_value()));
775
} else {
776
xtty->object(xn, arg.metadata_value());
777
}
778
}
779
}
780
if (witness != NULL) {
781
xtty->object("witness", witness);
782
xtty->stamp();
783
}
784
xtty->end_elem();
785
}
786
787
void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args,
788
Klass* witness, outputStream* st) {
789
ResourceMark rm;
790
ttyLocker ttyl; // keep the following output all in one block
791
st->print_cr("%s of type %s",
792
(witness == NULL)? "Dependency": "Failed dependency",
793
dep_name(dept));
794
// print arguments
795
int ctxkj = dep_context_arg(dept); // -1 if no context arg
796
for (int j = 0; j < args->length(); j++) {
797
DepArgument arg = args->at(j);
798
bool put_star = false;
799
if (arg.is_null()) continue;
800
const char* what;
801
if (j == ctxkj) {
802
assert(arg.is_metadata(), "must be");
803
what = "context";
804
put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
805
} else if (arg.is_method()) {
806
what = "method ";
807
put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), NULL);
808
} else if (arg.is_klass()) {
809
what = "class ";
810
} else {
811
what = "object ";
812
}
813
st->print(" %s = %s", what, (put_star? "*": ""));
814
if (arg.is_klass()) {
815
st->print("%s", ((Klass*)arg.metadata_value())->external_name());
816
} else if (arg.is_method()) {
817
((Method*)arg.metadata_value())->print_value_on(st);
818
} else if (arg.is_oop()) {
819
arg.oop_value()->print_value_on(st);
820
} else {
821
ShouldNotReachHere(); // Provide impl for this type.
822
}
823
824
st->cr();
825
}
826
if (witness != NULL) {
827
bool put_star = !Dependencies::is_concrete_klass(witness);
828
st->print_cr(" witness = %s%s",
829
(put_star? "*": ""),
830
witness->external_name());
831
}
832
}
833
834
void Dependencies::DepStream::log_dependency(Klass* witness) {
835
if (_deps == NULL && xtty == NULL) return; // fast cutout for runtime
836
ResourceMark rm;
837
const int nargs = argument_count();
838
GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
839
for (int j = 0; j < nargs; j++) {
840
if (is_oop_argument(j)) {
841
args->push(argument_oop(j));
842
} else {
843
args->push(argument(j));
844
}
845
}
846
int argslen = args->length();
847
if (_deps != NULL && _deps->log() != NULL) {
848
if (ciEnv::current() != NULL) {
849
Dependencies::write_dependency_to(_deps->log(), type(), args, witness);
850
} else {
851
// Treat the CompileLog as an xmlstream instead
852
Dependencies::write_dependency_to((xmlStream*)_deps->log(), type(), args, witness);
853
}
854
} else {
855
Dependencies::write_dependency_to(xtty, type(), args, witness);
856
}
857
guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
858
}
859
860
void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose, outputStream* st) {
861
ResourceMark rm;
862
int nargs = argument_count();
863
GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
864
for (int j = 0; j < nargs; j++) {
865
if (is_oop_argument(j)) {
866
args->push(argument_oop(j));
867
} else {
868
args->push(argument(j));
869
}
870
}
871
int argslen = args->length();
872
Dependencies::print_dependency(type(), args, witness, st);
873
if (verbose) {
874
if (_code != NULL) {
875
st->print(" code: ");
876
_code->print_value_on(st);
877
st->cr();
878
}
879
}
880
guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
881
}
882
883
884
/// Dependency stream support (decodes dependencies from an nmethod):
885
886
#ifdef ASSERT
887
void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
888
assert(must_be_in_vm(), "raw oops here");
889
_byte_limit = byte_limit;
890
_type = (DepType)(end_marker-1); // defeat "already at end" assert
891
assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
892
}
893
#endif //ASSERT
894
895
bool Dependencies::DepStream::next() {
896
assert(_type != end_marker, "already at end");
897
if (_bytes.position() == 0 && _code != NULL
898
&& _code->dependencies_size() == 0) {
899
// Method has no dependencies at all.
900
return false;
901
}
902
int code_byte = (_bytes.read_byte() & 0xFF);
903
if (code_byte == end_marker) {
904
DEBUG_ONLY(_type = end_marker);
905
return false;
906
} else {
907
int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
908
code_byte -= ctxk_bit;
909
DepType dept = (DepType)code_byte;
910
_type = dept;
911
Dependencies::check_valid_dependency_type(dept);
912
int stride = _dep_args[dept];
913
assert(stride == dep_args(dept), "sanity");
914
int skipj = -1;
915
if (ctxk_bit != 0) {
916
skipj = 0; // currently the only context argument is at zero
917
assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
918
}
919
for (int j = 0; j < stride; j++) {
920
_xi[j] = (j == skipj)? 0: _bytes.read_int();
921
}
922
DEBUG_ONLY(_xi[stride] = -1); // help detect overruns
923
return true;
924
}
925
}
926
927
inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
928
Metadata* o = NULL;
929
if (_code != NULL) {
930
o = _code->metadata_at(i);
931
} else {
932
o = _deps->oop_recorder()->metadata_at(i);
933
}
934
return o;
935
}
936
937
inline oop Dependencies::DepStream::recorded_oop_at(int i) {
938
return (_code != NULL)
939
? _code->oop_at(i)
940
: JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
941
}
942
943
Metadata* Dependencies::DepStream::argument(int i) {
944
Metadata* result = recorded_metadata_at(argument_index(i));
945
946
if (result == NULL) { // Explicit context argument can be compressed
947
int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg
948
if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
949
result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
950
}
951
}
952
953
assert(result == NULL || result->is_klass() || result->is_method(), "must be");
954
return result;
955
}
956
957
/**
958
* Returns a unique identifier for each dependency argument.
959
*/
960
uintptr_t Dependencies::DepStream::get_identifier(int i) {
961
if (is_oop_argument(i)) {
962
return (uintptr_t)(oopDesc*)argument_oop(i);
963
} else {
964
return (uintptr_t)argument(i);
965
}
966
}
967
968
oop Dependencies::DepStream::argument_oop(int i) {
969
oop result = recorded_oop_at(argument_index(i));
970
assert(oopDesc::is_oop_or_null(result), "must be");
971
return result;
972
}
973
974
InstanceKlass* Dependencies::DepStream::context_type() {
975
assert(must_be_in_vm(), "raw oops here");
976
977
// Most dependencies have an explicit context type argument.
978
{
979
int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg
980
if (ctxkj >= 0) {
981
Metadata* k = argument(ctxkj);
982
assert(k != NULL && k->is_klass(), "type check");
983
return InstanceKlass::cast((Klass*)k);
984
}
985
}
986
987
// Some dependencies are using the klass of the first object
988
// argument as implicit context type.
989
{
990
int ctxkj = dep_implicit_context_arg(type());
991
if (ctxkj >= 0) {
992
Klass* k = argument_oop(ctxkj)->klass();
993
assert(k != NULL, "type check");
994
return InstanceKlass::cast(k);
995
}
996
}
997
998
// And some dependencies don't have a context type at all,
999
// e.g. evol_method.
1000
return NULL;
1001
}
1002
1003
// ----------------- DependencySignature --------------------------------------
1004
bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) {
1005
if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) {
1006
return false;
1007
}
1008
1009
for (int i = 0; i < s1.args_count(); i++) {
1010
if (s1.arg(i) != s2.arg(i)) {
1011
return false;
1012
}
1013
}
1014
return true;
1015
}
1016
1017
/// Checking dependencies
1018
1019
// This hierarchy walker inspects subtypes of a given type, trying to find a "bad" class which breaks a dependency.
1020
// Such a class is called a "witness" to the broken dependency.
1021
// While searching around, we ignore "participants", which are already known to the dependency.
1022
class AbstractClassHierarchyWalker {
1023
public:
1024
enum { PARTICIPANT_LIMIT = 3 };
1025
1026
private:
1027
// if non-zero, tells how many witnesses to convert to participants
1028
uint _record_witnesses;
1029
1030
// special classes which are not allowed to be witnesses:
1031
Klass* _participants[PARTICIPANT_LIMIT+1];
1032
uint _num_participants;
1033
1034
#ifdef ASSERT
1035
uint _nof_requests; // one-shot walker
1036
#endif // ASSERT
1037
1038
static PerfCounter* _perf_find_witness_anywhere_calls_count;
1039
static PerfCounter* _perf_find_witness_anywhere_steps_count;
1040
static PerfCounter* _perf_find_witness_in_calls_count;
1041
1042
protected:
1043
virtual Klass* find_witness_in(KlassDepChange& changes) = 0;
1044
virtual Klass* find_witness_anywhere(InstanceKlass* context_type) = 0;
1045
1046
AbstractClassHierarchyWalker(Klass* participant) : _record_witnesses(0), _num_participants(0)
1047
#ifdef ASSERT
1048
, _nof_requests(0)
1049
#endif // ASSERT
1050
{
1051
for (uint i = 0; i < PARTICIPANT_LIMIT+1; i++) {
1052
_participants[i] = NULL;
1053
}
1054
if (participant != NULL) {
1055
add_participant(participant);
1056
}
1057
}
1058
1059
bool is_participant(Klass* k) {
1060
for (uint i = 0; i < _num_participants; i++) {
1061
if (_participants[i] == k) {
1062
return true;
1063
}
1064
}
1065
return false;
1066
}
1067
1068
bool record_witness(Klass* witness) {
1069
if (_record_witnesses > 0) {
1070
--_record_witnesses;
1071
add_participant(witness);
1072
return false; // not a witness
1073
} else {
1074
return true; // is a witness
1075
}
1076
}
1077
1078
class CountingClassHierarchyIterator : public ClassHierarchyIterator {
1079
private:
1080
jlong _nof_steps;
1081
public:
1082
CountingClassHierarchyIterator(InstanceKlass* root) : ClassHierarchyIterator(root), _nof_steps(0) {}
1083
1084
void next() {
1085
_nof_steps++;
1086
ClassHierarchyIterator::next();
1087
}
1088
1089
~CountingClassHierarchyIterator() {
1090
if (UsePerfData) {
1091
_perf_find_witness_anywhere_steps_count->inc(_nof_steps);
1092
}
1093
}
1094
};
1095
1096
public:
1097
uint num_participants() { return _num_participants; }
1098
Klass* participant(uint n) {
1099
assert(n <= _num_participants, "oob");
1100
if (n < _num_participants) {
1101
return _participants[n];
1102
} else {
1103
return NULL;
1104
}
1105
}
1106
1107
void add_participant(Klass* participant) {
1108
assert(!is_participant(participant), "sanity");
1109
assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
1110
uint np = _num_participants++;
1111
_participants[np] = participant;
1112
}
1113
1114
void record_witnesses(uint add) {
1115
if (add > PARTICIPANT_LIMIT) add = PARTICIPANT_LIMIT;
1116
assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
1117
_record_witnesses = add;
1118
}
1119
1120
Klass* find_witness(InstanceKlass* context_type, KlassDepChange* changes = NULL);
1121
1122
static void init();
1123
static void print_statistics();
1124
};
1125
1126
PerfCounter* AbstractClassHierarchyWalker::_perf_find_witness_anywhere_calls_count = NULL;
1127
PerfCounter* AbstractClassHierarchyWalker::_perf_find_witness_anywhere_steps_count = NULL;
1128
PerfCounter* AbstractClassHierarchyWalker::_perf_find_witness_in_calls_count = NULL;
1129
1130
void AbstractClassHierarchyWalker::init() {
1131
if (UsePerfData) {
1132
EXCEPTION_MARK;
1133
_perf_find_witness_anywhere_calls_count =
1134
PerfDataManager::create_counter(SUN_CI, "findWitnessAnywhere", PerfData::U_Events, CHECK);
1135
_perf_find_witness_anywhere_steps_count =
1136
PerfDataManager::create_counter(SUN_CI, "findWitnessAnywhereSteps", PerfData::U_Events, CHECK);
1137
_perf_find_witness_in_calls_count =
1138
PerfDataManager::create_counter(SUN_CI, "findWitnessIn", PerfData::U_Events, CHECK);
1139
}
1140
}
1141
1142
Klass* AbstractClassHierarchyWalker::find_witness(InstanceKlass* context_type, KlassDepChange* changes) {
1143
// Current thread must be in VM (not native mode, as in CI):
1144
assert(must_be_in_vm(), "raw oops here");
1145
// Must not move the class hierarchy during this check:
1146
assert_locked_or_safepoint(Compile_lock);
1147
assert(_nof_requests++ == 0, "repeated requests are not supported");
1148
1149
assert(changes == NULL || changes->involves_context(context_type), "irrelevant dependency");
1150
1151
// (Note: Interfaces do not have subclasses.)
1152
// If it is an interface, search its direct implementors.
1153
// (Their subclasses are additional indirect implementors. See InstanceKlass::add_implementor().)
1154
if (context_type->is_interface()) {
1155
int nof_impls = context_type->nof_implementors();
1156
if (nof_impls == 0) {
1157
return NULL; // no implementors
1158
} else if (nof_impls == 1) { // unique implementor
1159
assert(context_type != context_type->implementor(), "not unique");
1160
context_type = InstanceKlass::cast(context_type->implementor());
1161
} else { // nof_impls >= 2
1162
// Avoid this case: *I.m > { A.m, C }; B.m > C
1163
// Here, I.m has 2 concrete implementations, but m appears unique
1164
// as A.m, because the search misses B.m when checking C.
1165
// The inherited method B.m was getting missed by the walker
1166
// when interface 'I' was the starting point.
1167
// %%% Until this is fixed more systematically, bail out.
1168
return context_type;
1169
}
1170
}
1171
assert(!context_type->is_interface(), "no interfaces allowed");
1172
1173
if (changes != NULL) {
1174
if (UsePerfData) {
1175
_perf_find_witness_in_calls_count->inc();
1176
}
1177
return find_witness_in(*changes);
1178
} else {
1179
if (UsePerfData) {
1180
_perf_find_witness_anywhere_calls_count->inc();
1181
}
1182
return find_witness_anywhere(context_type);
1183
}
1184
}
1185
1186
class ConcreteSubtypeFinder : public AbstractClassHierarchyWalker {
1187
private:
1188
bool is_witness(Klass* k);
1189
1190
protected:
1191
virtual Klass* find_witness_in(KlassDepChange& changes);
1192
virtual Klass* find_witness_anywhere(InstanceKlass* context_type);
1193
1194
public:
1195
ConcreteSubtypeFinder(Klass* participant = NULL) : AbstractClassHierarchyWalker(participant) {}
1196
};
1197
1198
bool ConcreteSubtypeFinder::is_witness(Klass* k) {
1199
if (Dependencies::is_concrete_klass(k)) {
1200
return record_witness(k); // concrete subtype
1201
} else {
1202
return false; // not a concrete class
1203
}
1204
}
1205
1206
Klass* ConcreteSubtypeFinder::find_witness_in(KlassDepChange& changes) {
1207
// When looking for unexpected concrete types, do not look beneath expected ones:
1208
// * CX > CC > C' is OK, even if C' is new.
1209
// * CX > { CC, C' } is not OK if C' is new, and C' is the witness.
1210
Klass* new_type = changes.as_new_klass_change()->new_type();
1211
assert(!is_participant(new_type), "only old classes are participants");
1212
// If the new type is a subtype of a participant, we are done.
1213
for (uint i = 0; i < num_participants(); i++) {
1214
if (changes.involves_context(participant(i))) {
1215
// new guy is protected from this check by previous participant
1216
return NULL;
1217
}
1218
}
1219
if (is_witness(new_type)) {
1220
return new_type;
1221
}
1222
// No witness found. The dependency remains unbroken.
1223
return NULL;
1224
}
1225
1226
Klass* ConcreteSubtypeFinder::find_witness_anywhere(InstanceKlass* context_type) {
1227
for (CountingClassHierarchyIterator iter(context_type); !iter.done(); iter.next()) {
1228
Klass* sub = iter.klass();
1229
// Do not report participant types.
1230
if (is_participant(sub)) {
1231
// Don't walk beneath a participant since it hides witnesses.
1232
iter.skip_subclasses();
1233
} else if (is_witness(sub)) {
1234
return sub; // found a witness
1235
}
1236
}
1237
// No witness found. The dependency remains unbroken.
1238
return NULL;
1239
}
1240
1241
class ConcreteMethodFinder : public AbstractClassHierarchyWalker {
1242
private:
1243
Symbol* _name;
1244
Symbol* _signature;
1245
1246
// cache of method lookups
1247
Method* _found_methods[PARTICIPANT_LIMIT+1];
1248
1249
bool is_witness(Klass* k);
1250
1251
protected:
1252
virtual Klass* find_witness_in(KlassDepChange& changes);
1253
virtual Klass* find_witness_anywhere(InstanceKlass* context_type);
1254
1255
public:
1256
bool witnessed_reabstraction_in_supers(Klass* k);
1257
1258
ConcreteMethodFinder(Method* m, Klass* participant = NULL) : AbstractClassHierarchyWalker(participant) {
1259
assert(m != NULL && m->is_method(), "sanity");
1260
_name = m->name();
1261
_signature = m->signature();
1262
1263
for (int i = 0; i < PARTICIPANT_LIMIT+1; i++) {
1264
_found_methods[i] = NULL;
1265
}
1266
}
1267
1268
// Note: If n==num_participants, returns NULL.
1269
Method* found_method(uint n) {
1270
assert(n <= num_participants(), "oob");
1271
Method* fm = _found_methods[n];
1272
assert(n == num_participants() || fm != NULL, "proper usage");
1273
if (fm != NULL && fm->method_holder() != participant(n)) {
1274
// Default methods from interfaces can be added to classes. In
1275
// that case the holder of the method is not the class but the
1276
// interface where it's defined.
1277
assert(fm->is_default_method(), "sanity");
1278
return NULL;
1279
}
1280
return fm;
1281
}
1282
1283
void add_participant(Klass* participant) {
1284
AbstractClassHierarchyWalker::add_participant(participant);
1285
_found_methods[num_participants()] = NULL;
1286
}
1287
1288
bool record_witness(Klass* witness, Method* m) {
1289
_found_methods[num_participants()] = m;
1290
return AbstractClassHierarchyWalker::record_witness(witness);
1291
}
1292
1293
private:
1294
static PerfCounter* _perf_find_witness_anywhere_calls_count;
1295
static PerfCounter* _perf_find_witness_anywhere_steps_count;
1296
static PerfCounter* _perf_find_witness_in_calls_count;
1297
1298
public:
1299
static void init();
1300
static void print_statistics();
1301
};
1302
1303
bool ConcreteMethodFinder::is_witness(Klass* k) {
1304
if (is_participant(k)) {
1305
return false; // do not report participant types
1306
}
1307
if (k->is_instance_klass()) {
1308
InstanceKlass* ik = InstanceKlass::cast(k);
1309
// Search class hierarchy first, skipping private implementations
1310
// as they never override any inherited methods
1311
Method* m = ik->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip);
1312
if (Dependencies::is_concrete_method(m, ik)) {
1313
return record_witness(k, m); // concrete method found
1314
} else {
1315
// Check for re-abstraction of method
1316
if (!ik->is_interface() && m != NULL && m->is_abstract()) {
1317
// Found a matching abstract method 'm' in the class hierarchy.
1318
// This is fine iff 'k' is an abstract class and all concrete subtypes
1319
// of 'k' override 'm' and are participates of the current search.
1320
ConcreteSubtypeFinder wf;
1321
for (uint i = 0; i < num_participants(); i++) {
1322
Klass* p = participant(i);
1323
wf.add_participant(p);
1324
}
1325
Klass* w = wf.find_witness(ik);
1326
if (w != NULL) {
1327
Method* wm = InstanceKlass::cast(w)->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip);
1328
if (!Dependencies::is_concrete_method(wm, w)) {
1329
// Found a concrete subtype 'w' which does not override abstract method 'm'.
1330
// Bail out because 'm' could be called with 'w' as receiver (leading to an
1331
// AbstractMethodError) and thus the method we are looking for is not unique.
1332
return record_witness(k, m);
1333
}
1334
}
1335
}
1336
// Check interface defaults also, if any exist.
1337
Array<Method*>* default_methods = ik->default_methods();
1338
if (default_methods != NULL) {
1339
Method* dm = ik->find_method(default_methods, _name, _signature);
1340
if (Dependencies::is_concrete_method(dm, NULL)) {
1341
return record_witness(k, dm); // default method found
1342
}
1343
}
1344
return false; // no concrete method found
1345
}
1346
} else {
1347
return false; // no methods to find in an array type
1348
}
1349
}
1350
1351
Klass* ConcreteMethodFinder::find_witness_in(KlassDepChange& changes) {
1352
// When looking for unexpected concrete methods, look beneath expected ones, to see if there are overrides.
1353
// * CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
1354
Klass* new_type = changes.as_new_klass_change()->new_type();
1355
assert(!is_participant(new_type), "only old classes are participants");
1356
if (is_witness(new_type)) {
1357
return new_type;
1358
} else {
1359
// No witness found, but is_witness() doesn't detect method re-abstraction in case of spot-checking.
1360
if (witnessed_reabstraction_in_supers(new_type)) {
1361
return new_type;
1362
}
1363
}
1364
// No witness found. The dependency remains unbroken.
1365
return NULL;
1366
}
1367
1368
bool ConcreteMethodFinder::witnessed_reabstraction_in_supers(Klass* k) {
1369
if (!k->is_instance_klass()) {
1370
return false; // no methods to find in an array type
1371
} else {
1372
// Looking for a case when an abstract method is inherited into a concrete class.
1373
if (Dependencies::is_concrete_klass(k) && !k->is_interface()) {
1374
Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip);
1375
if (m != NULL) {
1376
return false; // no reabstraction possible: local method found
1377
}
1378
for (InstanceKlass* super = k->java_super(); super != NULL; super = super->java_super()) {
1379
m = super->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip);
1380
if (m != NULL) { // inherited method found
1381
if (m->is_abstract() || m->is_overpass()) {
1382
return record_witness(super, m); // abstract method found
1383
}
1384
return false;
1385
}
1386
}
1387
// Miranda.
1388
return true;
1389
}
1390
return false;
1391
}
1392
}
1393
1394
1395
Klass* ConcreteMethodFinder::find_witness_anywhere(InstanceKlass* context_type) {
1396
// Walk hierarchy under a context type, looking for unexpected types.
1397
for (CountingClassHierarchyIterator iter(context_type); !iter.done(); iter.next()) {
1398
Klass* sub = iter.klass();
1399
if (is_witness(sub)) {
1400
return sub; // found a witness
1401
}
1402
}
1403
// No witness found. The dependency remains unbroken.
1404
return NULL;
1405
}
1406
1407
// For some method m and some class ctxk (subclass of method holder),
1408
// enumerate all distinct overrides of m in concrete subclasses of ctxk.
1409
// It relies on vtable/itable information to perform method selection on each linked subclass
1410
// and ignores all non yet linked ones (speculatively treat them as "effectively abstract").
1411
class LinkedConcreteMethodFinder : public AbstractClassHierarchyWalker {
1412
private:
1413
InstanceKlass* _resolved_klass; // resolved class (JVMS-5.4.3.1)
1414
InstanceKlass* _declaring_klass; // the holder of resolved method (JVMS-5.4.3.3)
1415
int _vtable_index; // vtable/itable index of the resolved method
1416
bool _do_itable_lookup; // choose between itable and vtable lookup logic
1417
1418
// cache of method lookups
1419
Method* _found_methods[PARTICIPANT_LIMIT+1];
1420
1421
bool is_witness(Klass* k);
1422
Method* select_method(InstanceKlass* recv_klass);
1423
static int compute_vtable_index(InstanceKlass* resolved_klass, Method* resolved_method, bool& is_itable_index);
1424
static bool is_concrete_klass(InstanceKlass* ik);
1425
1426
void add_participant(Method* m, Klass* participant) {
1427
uint np = num_participants();
1428
AbstractClassHierarchyWalker::add_participant(participant);
1429
assert(np + 1 == num_participants(), "sanity");
1430
_found_methods[np] = m; // record the method for the participant
1431
}
1432
1433
bool record_witness(Klass* witness, Method* m) {
1434
for (uint i = 0; i < num_participants(); i++) {
1435
if (found_method(i) == m) {
1436
return false; // already recorded
1437
}
1438
}
1439
// Record not yet seen method.
1440
_found_methods[num_participants()] = m;
1441
return AbstractClassHierarchyWalker::record_witness(witness);
1442
}
1443
1444
void initialize(Method* participant) {
1445
for (uint i = 0; i < PARTICIPANT_LIMIT+1; i++) {
1446
_found_methods[i] = NULL;
1447
}
1448
if (participant != NULL) {
1449
add_participant(participant, participant->method_holder());
1450
}
1451
}
1452
1453
protected:
1454
virtual Klass* find_witness_in(KlassDepChange& changes);
1455
virtual Klass* find_witness_anywhere(InstanceKlass* context_type);
1456
1457
public:
1458
// In order to perform method selection, the following info is needed:
1459
// (1) interface or virtual call;
1460
// (2) vtable/itable index;
1461
// (3) declaring class (in case of interface call).
1462
//
1463
// It is prepared based on the results of method resolution: resolved class and resolved method (as specified in JVMS-5.4.3.3).
1464
// Optionally, a method which was previously determined as a unique target (uniqm) is added as a participant
1465
// to enable dependency spot-checking and speed up the search.
1466
LinkedConcreteMethodFinder(InstanceKlass* resolved_klass, Method* resolved_method, Method* uniqm = NULL) : AbstractClassHierarchyWalker(NULL) {
1467
assert(UseVtableBasedCHA, "required");
1468
assert(resolved_klass->is_linked(), "required");
1469
assert(resolved_method->method_holder()->is_linked(), "required");
1470
assert(!resolved_method->can_be_statically_bound(), "no vtable index available");
1471
1472
_resolved_klass = resolved_klass;
1473
_declaring_klass = resolved_method->method_holder();
1474
_vtable_index = compute_vtable_index(resolved_klass, resolved_method,
1475
_do_itable_lookup); // out parameter
1476
assert(_vtable_index >= 0, "invalid vtable index");
1477
1478
initialize(uniqm);
1479
}
1480
1481
// Note: If n==num_participants, returns NULL.
1482
Method* found_method(uint n) {
1483
assert(n <= num_participants(), "oob");
1484
assert(participant(n) != NULL || n == num_participants(), "proper usage");
1485
return _found_methods[n];
1486
}
1487
};
1488
1489
Klass* LinkedConcreteMethodFinder::find_witness_in(KlassDepChange& changes) {
1490
Klass* type = changes.type();
1491
1492
assert(!is_participant(type), "only old classes are participants");
1493
1494
if (is_witness(type)) {
1495
return type;
1496
}
1497
return NULL; // No witness found. The dependency remains unbroken.
1498
}
1499
1500
Klass* LinkedConcreteMethodFinder::find_witness_anywhere(InstanceKlass* context_type) {
1501
for (CountingClassHierarchyIterator iter(context_type); !iter.done(); iter.next()) {
1502
Klass* sub = iter.klass();
1503
if (is_witness(sub)) {
1504
return sub;
1505
}
1506
if (sub->is_instance_klass() && !InstanceKlass::cast(sub)->is_linked()) {
1507
iter.skip_subclasses(); // ignore not yet linked classes
1508
}
1509
}
1510
return NULL; // No witness found. The dependency remains unbroken.
1511
}
1512
1513
bool LinkedConcreteMethodFinder::is_witness(Klass* k) {
1514
if (is_participant(k)) {
1515
return false; // do not report participant types
1516
} else if (k->is_instance_klass()) {
1517
InstanceKlass* ik = InstanceKlass::cast(k);
1518
if (is_concrete_klass(ik)) {
1519
Method* m = select_method(ik);
1520
return record_witness(ik, m);
1521
} else {
1522
return false; // ignore non-concrete holder class
1523
}
1524
} else {
1525
return false; // no methods to find in an array type
1526
}
1527
}
1528
1529
Method* LinkedConcreteMethodFinder::select_method(InstanceKlass* recv_klass) {
1530
Method* selected_method = NULL;
1531
if (_do_itable_lookup) {
1532
assert(_declaring_klass->is_interface(), "sanity");
1533
bool implements_interface; // initialized by method_at_itable_or_null()
1534
selected_method = recv_klass->method_at_itable_or_null(_declaring_klass, _vtable_index,
1535
implements_interface); // out parameter
1536
assert(implements_interface, "not implemented");
1537
} else {
1538
selected_method = recv_klass->method_at_vtable(_vtable_index);
1539
}
1540
return selected_method; // NULL when corresponding slot is empty (AbstractMethodError case)
1541
}
1542
1543
int LinkedConcreteMethodFinder::compute_vtable_index(InstanceKlass* resolved_klass, Method* resolved_method,
1544
// out parameter
1545
bool& is_itable_index) {
1546
if (resolved_klass->is_interface() && resolved_method->has_itable_index()) {
1547
is_itable_index = true;
1548
return resolved_method->itable_index();
1549
}
1550
// Check for default or miranda method first.
1551
InstanceKlass* declaring_klass = resolved_method->method_holder();
1552
if (!resolved_klass->is_interface() && declaring_klass->is_interface()) {
1553
is_itable_index = false;
1554
return resolved_klass->vtable_index_of_interface_method(resolved_method);
1555
}
1556
// At this point we are sure that resolved_method is virtual and not
1557
// a default or miranda method; therefore, it must have a valid vtable index.
1558
assert(resolved_method->has_vtable_index(), "");
1559
is_itable_index = false;
1560
return resolved_method->vtable_index();
1561
}
1562
1563
bool LinkedConcreteMethodFinder::is_concrete_klass(InstanceKlass* ik) {
1564
if (!Dependencies::is_concrete_klass(ik)) {
1565
return false; // not concrete
1566
}
1567
if (ik->is_interface()) {
1568
return false; // interfaces aren't concrete
1569
}
1570
if (!ik->is_linked()) {
1571
return false; // not yet linked classes don't have instances
1572
}
1573
return true;
1574
}
1575
1576
#ifdef ASSERT
1577
// Assert that m is inherited into ctxk, without intervening overrides.
1578
// (May return true even if this is not true, in corner cases where we punt.)
1579
bool Dependencies::verify_method_context(InstanceKlass* ctxk, Method* m) {
1580
if (m->is_private()) {
1581
return false; // Quick lose. Should not happen.
1582
}
1583
if (m->method_holder() == ctxk) {
1584
return true; // Quick win.
1585
}
1586
if (!(m->is_public() || m->is_protected())) {
1587
// The override story is complex when packages get involved.
1588
return true; // Must punt the assertion to true.
1589
}
1590
Method* lm = ctxk->lookup_method(m->name(), m->signature());
1591
if (lm == NULL && ctxk->is_instance_klass()) {
1592
// It might be an interface method
1593
lm = InstanceKlass::cast(ctxk)->lookup_method_in_ordered_interfaces(m->name(),
1594
m->signature());
1595
}
1596
if (lm == m) {
1597
// Method m is inherited into ctxk.
1598
return true;
1599
}
1600
if (lm != NULL) {
1601
if (!(lm->is_public() || lm->is_protected())) {
1602
// Method is [package-]private, so the override story is complex.
1603
return true; // Must punt the assertion to true.
1604
}
1605
if (lm->is_static()) {
1606
// Static methods don't override non-static so punt
1607
return true;
1608
}
1609
if (!Dependencies::is_concrete_method(lm, ctxk) &&
1610
!Dependencies::is_concrete_method(m, ctxk)) {
1611
// They are both non-concrete
1612
if (lm->method_holder()->is_subtype_of(m->method_holder())) {
1613
// Method m is overridden by lm, but both are non-concrete.
1614
return true;
1615
}
1616
if (lm->method_holder()->is_interface() && m->method_holder()->is_interface() &&
1617
ctxk->is_subtype_of(m->method_holder()) && ctxk->is_subtype_of(lm->method_holder())) {
1618
// Interface method defined in multiple super interfaces
1619
return true;
1620
}
1621
}
1622
}
1623
ResourceMark rm;
1624
tty->print_cr("Dependency method not found in the associated context:");
1625
tty->print_cr(" context = %s", ctxk->external_name());
1626
tty->print( " method = "); m->print_short_name(tty); tty->cr();
1627
if (lm != NULL) {
1628
tty->print( " found = "); lm->print_short_name(tty); tty->cr();
1629
}
1630
return false;
1631
}
1632
#endif // ASSERT
1633
1634
bool Dependencies::is_concrete_klass(Klass* k) {
1635
if (k->is_abstract()) return false;
1636
// %%% We could treat classes which are concrete but
1637
// have not yet been instantiated as virtually abstract.
1638
// This would require a deoptimization barrier on first instantiation.
1639
//if (k->is_not_instantiated()) return false;
1640
return true;
1641
}
1642
1643
bool Dependencies::is_concrete_method(Method* m, Klass* k) {
1644
// NULL is not a concrete method.
1645
if (m == NULL) {
1646
return false;
1647
}
1648
// Statics are irrelevant to virtual call sites.
1649
if (m->is_static()) {
1650
return false;
1651
}
1652
// Abstract methods are not concrete.
1653
if (m->is_abstract()) {
1654
return false;
1655
}
1656
// Overpass (error) methods are not concrete if k is abstract.
1657
if (m->is_overpass() && k != NULL) {
1658
return !k->is_abstract();
1659
}
1660
// Note "true" is conservative answer: overpass clause is false if k == NULL,
1661
// implies return true if answer depends on overpass clause.
1662
return true;
1663
}
1664
1665
Klass* Dependencies::find_finalizable_subclass(InstanceKlass* ik) {
1666
for (ClassHierarchyIterator iter(ik); !iter.done(); iter.next()) {
1667
Klass* sub = iter.klass();
1668
if (sub->has_finalizer() && !sub->is_interface()) {
1669
return sub;
1670
}
1671
}
1672
return NULL; // not found
1673
}
1674
1675
bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1676
if (k->is_abstract()) return false;
1677
// We could also return false if k does not yet appear to be
1678
// instantiated, if the VM version supports this distinction also.
1679
//if (k->is_not_instantiated()) return false;
1680
return true;
1681
}
1682
1683
bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1684
return k->has_finalizable_subclass();
1685
}
1686
1687
// Any use of the contents (bytecodes) of a method must be
1688
// marked by an "evol_method" dependency, if those contents
1689
// can change. (Note: A method is always dependent on itself.)
1690
Klass* Dependencies::check_evol_method(Method* m) {
1691
assert(must_be_in_vm(), "raw oops here");
1692
// Did somebody do a JVMTI RedefineClasses while our backs were turned?
1693
// Or is there a now a breakpoint?
1694
// (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1695
if (m->is_old()
1696
|| m->number_of_breakpoints() > 0) {
1697
return m->method_holder();
1698
} else {
1699
return NULL;
1700
}
1701
}
1702
1703
// This is a strong assertion: It is that the given type
1704
// has no subtypes whatever. It is most useful for
1705
// optimizing checks on reflected types or on array types.
1706
// (Checks on types which are derived from real instances
1707
// can be optimized more strongly than this, because we
1708
// know that the checked type comes from a concrete type,
1709
// and therefore we can disregard abstract types.)
1710
Klass* Dependencies::check_leaf_type(InstanceKlass* ctxk) {
1711
assert(must_be_in_vm(), "raw oops here");
1712
assert_locked_or_safepoint(Compile_lock);
1713
Klass* sub = ctxk->subklass();
1714
if (sub != NULL) {
1715
return sub;
1716
} else if (ctxk->nof_implementors() != 0) {
1717
// if it is an interface, it must be unimplemented
1718
// (if it is not an interface, nof_implementors is always zero)
1719
InstanceKlass* impl = ctxk->implementor();
1720
assert(impl != NULL, "must be set");
1721
return impl;
1722
} else {
1723
return NULL;
1724
}
1725
}
1726
1727
// Test the assertion that conck is the only concrete subtype* of ctxk.
1728
// The type conck itself is allowed to have have further concrete subtypes.
1729
// This allows the compiler to narrow occurrences of ctxk by conck,
1730
// when dealing with the types of actual instances.
1731
Klass* Dependencies::check_abstract_with_unique_concrete_subtype(InstanceKlass* ctxk,
1732
Klass* conck,
1733
NewKlassDepChange* changes) {
1734
ConcreteSubtypeFinder wf(conck);
1735
Klass* k = wf.find_witness(ctxk, changes);
1736
return k;
1737
}
1738
1739
1740
// Find the unique concrete proper subtype of ctxk, or NULL if there
1741
// is more than one concrete proper subtype. If there are no concrete
1742
// proper subtypes, return ctxk itself, whether it is concrete or not.
1743
// The returned subtype is allowed to have have further concrete subtypes.
1744
// That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
1745
Klass* Dependencies::find_unique_concrete_subtype(InstanceKlass* ctxk) {
1746
ConcreteSubtypeFinder wf(ctxk); // Ignore ctxk when walking.
1747
wf.record_witnesses(1); // Record one other witness when walking.
1748
Klass* wit = wf.find_witness(ctxk);
1749
if (wit != NULL) return NULL; // Too many witnesses.
1750
Klass* conck = wf.participant(0);
1751
if (conck == NULL) {
1752
return ctxk; // Return ctxk as a flag for "no subtypes".
1753
} else {
1754
#ifndef PRODUCT
1755
// Make sure the dependency mechanism will pass this discovery:
1756
if (VerifyDependencies) {
1757
// Turn off dependency tracing while actually testing deps.
1758
FlagSetting fs(TraceDependencies, false);
1759
if (!Dependencies::is_concrete_klass(ctxk)) {
1760
guarantee(NULL == (void *)
1761
check_abstract_with_unique_concrete_subtype(ctxk, conck),
1762
"verify dep.");
1763
}
1764
}
1765
#endif //PRODUCT
1766
return conck;
1767
}
1768
}
1769
1770
// Try to determine whether root method in some context is concrete or not based on the information about the unique method
1771
// in that context. It exploits the fact that concrete root method is always inherited into the context when there's a unique method.
1772
// Hence, unique method holder is always a supertype of the context class when root method is concrete.
1773
// Examples for concrete_root_method
1774
// C (C.m uniqm)
1775
// |
1776
// CX (ctxk) uniqm is inherited into context.
1777
//
1778
// CX (ctxk) (CX.m uniqm) here uniqm is defined in ctxk.
1779
// Examples for !concrete_root_method
1780
// CX (ctxk)
1781
// |
1782
// C (C.m uniqm) uniqm is in subtype of ctxk.
1783
bool Dependencies::is_concrete_root_method(Method* uniqm, InstanceKlass* ctxk) {
1784
if (uniqm == NULL) {
1785
return false; // match Dependencies::is_concrete_method() behavior
1786
}
1787
// Theoretically, the "direction" of subtype check matters here.
1788
// On one hand, in case of interface context with a single implementor, uniqm can be in a superclass of the implementor which
1789
// is not related to context class.
1790
// On another hand, uniqm could come from an interface unrelated to the context class, but right now it is not possible:
1791
// it is required that uniqm->method_holder() is the participant (uniqm->method_holder() <: ctxk), hence a default method
1792
// can't be used as unique.
1793
if (ctxk->is_interface()) {
1794
InstanceKlass* implementor = ctxk->implementor();
1795
assert(implementor != ctxk, "single implementor only"); // should have been invalidated earlier
1796
ctxk = implementor;
1797
}
1798
InstanceKlass* holder = uniqm->method_holder();
1799
assert(!holder->is_interface(), "no default methods allowed");
1800
assert(ctxk->is_subclass_of(holder) || holder->is_subclass_of(ctxk), "not related");
1801
return ctxk->is_subclass_of(holder);
1802
}
1803
1804
// If a class (or interface) has a unique concrete method uniqm, return NULL.
1805
// Otherwise, return a class that contains an interfering method.
1806
Klass* Dependencies::check_unique_concrete_method(InstanceKlass* ctxk,
1807
Method* uniqm,
1808
NewKlassDepChange* changes) {
1809
ConcreteMethodFinder wf(uniqm, uniqm->method_holder());
1810
Klass* k = wf.find_witness(ctxk, changes);
1811
if (k != NULL) {
1812
return k;
1813
}
1814
if (!Dependencies::is_concrete_root_method(uniqm, ctxk) || changes != NULL) {
1815
Klass* conck = find_witness_AME(ctxk, uniqm, changes);
1816
if (conck != NULL) {
1817
// Found a concrete subtype 'conck' which does not override abstract root method.
1818
return conck;
1819
}
1820
}
1821
return NULL;
1822
}
1823
1824
Klass* Dependencies::check_unique_implementor(InstanceKlass* ctxk, Klass* uniqk, NewKlassDepChange* changes) {
1825
assert(ctxk->is_interface(), "sanity");
1826
assert(ctxk->nof_implementors() > 0, "no implementors");
1827
if (ctxk->nof_implementors() == 1) {
1828
assert(ctxk->implementor() == uniqk, "sanity");
1829
return NULL;
1830
}
1831
return ctxk; // no unique implementor
1832
}
1833
1834
// Search for AME.
1835
// There are two version of checks.
1836
// 1) Spot checking version(Classload time). Newly added class is checked for AME.
1837
// Checks whether abstract/overpass method is inherited into/declared in newly added concrete class.
1838
// 2) Compile time analysis for abstract/overpass(abstract klass) root_m. The non uniqm subtrees are checked for concrete classes.
1839
Klass* Dependencies::find_witness_AME(InstanceKlass* ctxk, Method* m, KlassDepChange* changes) {
1840
if (m != NULL) {
1841
if (changes != NULL) {
1842
// Spot checking version.
1843
ConcreteMethodFinder wf(m);
1844
Klass* new_type = changes->as_new_klass_change()->new_type();
1845
if (wf.witnessed_reabstraction_in_supers(new_type)) {
1846
return new_type;
1847
}
1848
} else {
1849
// Note: It is required that uniqm->method_holder() is the participant (see ClassHierarchyWalker::found_method()).
1850
ConcreteSubtypeFinder wf(m->method_holder());
1851
Klass* conck = wf.find_witness(ctxk);
1852
if (conck != NULL) {
1853
Method* cm = InstanceKlass::cast(conck)->find_instance_method(m->name(), m->signature(), Klass::PrivateLookupMode::skip);
1854
if (!Dependencies::is_concrete_method(cm, conck)) {
1855
return conck;
1856
}
1857
}
1858
}
1859
}
1860
return NULL;
1861
}
1862
1863
// This function is used by find_unique_concrete_method(non vtable based)
1864
// to check whether subtype method overrides the base method.
1865
static bool overrides(Method* sub_m, Method* base_m) {
1866
assert(base_m != NULL, "base method should be non null");
1867
if (sub_m == NULL) {
1868
return false;
1869
}
1870
/**
1871
* If base_m is public or protected then sub_m always overrides.
1872
* If base_m is !public, !protected and !private (i.e. base_m is package private)
1873
* then sub_m should be in the same package as that of base_m.
1874
* For package private base_m this is conservative approach as it allows only subset of all allowed cases in
1875
* the jvm specification.
1876
**/
1877
if (base_m->is_public() || base_m->is_protected() ||
1878
base_m->method_holder()->is_same_class_package(sub_m->method_holder())) {
1879
return true;
1880
}
1881
return false;
1882
}
1883
1884
// Find the set of all non-abstract methods under ctxk that match m.
1885
// (The method m must be defined or inherited in ctxk.)
1886
// Include m itself in the set, unless it is abstract.
1887
// If this set has exactly one element, return that element.
1888
Method* Dependencies::find_unique_concrete_method(InstanceKlass* ctxk, Method* m, Klass** participant) {
1889
// Return NULL if m is marked old; must have been a redefined method.
1890
if (m->is_old()) {
1891
return NULL;
1892
}
1893
if (m->is_default_method()) {
1894
return NULL; // not supported
1895
}
1896
assert(verify_method_context(ctxk, m), "proper context");
1897
ConcreteMethodFinder wf(m);
1898
wf.record_witnesses(1);
1899
Klass* wit = wf.find_witness(ctxk);
1900
if (wit != NULL) return NULL; // Too many witnesses.
1901
Method* fm = wf.found_method(0); // Will be NULL if num_parts == 0.
1902
if (participant != NULL) {
1903
(*participant) = wf.participant(0);
1904
}
1905
if (!Dependencies::is_concrete_method(fm, NULL)) {
1906
fm = NULL; // ignore abstract methods
1907
}
1908
if (Dependencies::is_concrete_method(m, ctxk)) {
1909
if (fm == NULL) {
1910
// It turns out that m was always the only implementation.
1911
fm = m;
1912
} else if (fm != m) {
1913
// Two conflicting implementations after all.
1914
// (This can happen if m is inherited into ctxk and fm overrides it.)
1915
return NULL;
1916
}
1917
} else if (Dependencies::find_witness_AME(ctxk, fm) != NULL) {
1918
// Found a concrete subtype which does not override abstract root method.
1919
return NULL;
1920
} else if (!overrides(fm, m)) {
1921
// Found method doesn't override abstract root method.
1922
return NULL;
1923
}
1924
assert(Dependencies::is_concrete_root_method(fm, ctxk) == Dependencies::is_concrete_method(m, ctxk), "mismatch");
1925
#ifndef PRODUCT
1926
// Make sure the dependency mechanism will pass this discovery:
1927
if (VerifyDependencies && fm != NULL) {
1928
guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
1929
"verify dep.");
1930
}
1931
#endif //PRODUCT
1932
return fm;
1933
}
1934
1935
// If a class (or interface) has a unique concrete method uniqm, return NULL.
1936
// Otherwise, return a class that contains an interfering method.
1937
Klass* Dependencies::check_unique_concrete_method(InstanceKlass* ctxk,
1938
Method* uniqm,
1939
Klass* resolved_klass,
1940
Method* resolved_method,
1941
KlassDepChange* changes) {
1942
assert(UseVtableBasedCHA, "required");
1943
assert(!ctxk->is_interface() || ctxk == resolved_klass, "sanity");
1944
assert(!resolved_method->can_be_statically_bound() || resolved_method == uniqm, "sanity");
1945
assert(resolved_klass->is_subtype_of(resolved_method->method_holder()), "sanity");
1946
1947
if (!InstanceKlass::cast(resolved_klass)->is_linked() ||
1948
!resolved_method->method_holder()->is_linked() ||
1949
resolved_method->can_be_statically_bound()) {
1950
// Dependency is redundant, but benign. Just keep it to avoid unnecessary recompilation.
1951
return NULL; // no vtable index available
1952
}
1953
1954
LinkedConcreteMethodFinder mf(InstanceKlass::cast(resolved_klass), resolved_method, uniqm);
1955
return mf.find_witness(ctxk, changes);
1956
}
1957
1958
// Find the set of all non-abstract methods under ctxk that match m.
1959
// (The method m must be defined or inherited in ctxk.)
1960
// Include m itself in the set, unless it is abstract.
1961
// If this set has exactly one element, return that element.
1962
// Not yet linked subclasses of ctxk are ignored since they don't have any instances yet.
1963
// Additionally, resolved_klass and resolved_method complete the description of the call site being analyzed.
1964
Method* Dependencies::find_unique_concrete_method(InstanceKlass* ctxk, Method* m, Klass* resolved_klass, Method* resolved_method) {
1965
// Return NULL if m is marked old; must have been a redefined method.
1966
if (m->is_old()) {
1967
return NULL;
1968
}
1969
if (!InstanceKlass::cast(resolved_klass)->is_linked() ||
1970
!resolved_method->method_holder()->is_linked() ||
1971
resolved_method->can_be_statically_bound()) {
1972
return m; // nothing to do: no witness under ctxk
1973
}
1974
LinkedConcreteMethodFinder wf(InstanceKlass::cast(resolved_klass), resolved_method);
1975
assert(Dependencies::verify_method_context(ctxk, m), "proper context");
1976
wf.record_witnesses(1);
1977
Klass* wit = wf.find_witness(ctxk);
1978
if (wit != NULL) {
1979
return NULL; // Too many witnesses.
1980
}
1981
// p == NULL when no participants are found (wf.num_participants() == 0).
1982
// fm == NULL case has 2 meanings:
1983
// * when p == NULL: no method found;
1984
// * when p != NULL: AbstractMethodError-throwing method found.
1985
// Also, found method should always be accompanied by a participant class.
1986
Klass* p = wf.participant(0);
1987
Method* fm = wf.found_method(0);
1988
assert(fm == NULL || p != NULL, "no participant");
1989
// Normalize all error-throwing cases to NULL.
1990
if (fm == Universe::throw_illegal_access_error() ||
1991
fm == Universe::throw_no_such_method_error() ||
1992
!Dependencies::is_concrete_method(fm, p)) {
1993
fm = NULL; // error-throwing method
1994
}
1995
if (Dependencies::is_concrete_method(m, ctxk)) {
1996
if (p == NULL) {
1997
// It turns out that m was always the only implementation.
1998
assert(fm == NULL, "sanity");
1999
fm = m;
2000
}
2001
}
2002
#ifndef PRODUCT
2003
// Make sure the dependency mechanism will pass this discovery:
2004
if (VerifyDependencies && fm != NULL) {
2005
guarantee(NULL == check_unique_concrete_method(ctxk, fm, resolved_klass, resolved_method),
2006
"verify dep.");
2007
}
2008
#endif // PRODUCT
2009
assert(fm == NULL || !fm->is_abstract(), "sanity");
2010
// Old CHA conservatively reports concrete methods in abstract classes
2011
// irrespective of whether they have concrete subclasses or not.
2012
// Also, abstract root method case is not fully supported.
2013
#ifdef ASSERT
2014
Klass* uniqp = NULL;
2015
Method* uniqm = Dependencies::find_unique_concrete_method(ctxk, m, &uniqp);
2016
assert(uniqm == NULL || uniqm == fm ||
2017
m->is_abstract() ||
2018
uniqm->method_holder()->is_abstract() ||
2019
(fm == NULL && uniqm != NULL && uniqp != NULL && !InstanceKlass::cast(uniqp)->is_linked()),
2020
"sanity");
2021
#endif // ASSERT
2022
return fm;
2023
}
2024
2025
Klass* Dependencies::check_has_no_finalizable_subclasses(InstanceKlass* ctxk, NewKlassDepChange* changes) {
2026
InstanceKlass* search_at = ctxk;
2027
if (changes != NULL) {
2028
search_at = changes->new_type(); // just look at the new bit
2029
}
2030
return find_finalizable_subclass(search_at);
2031
}
2032
2033
Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
2034
assert(call_site != NULL, "sanity");
2035
assert(method_handle != NULL, "sanity");
2036
assert(call_site->is_a(vmClasses::CallSite_klass()), "sanity");
2037
2038
if (changes == NULL) {
2039
// Validate all CallSites
2040
if (java_lang_invoke_CallSite::target(call_site) != method_handle)
2041
return call_site->klass(); // assertion failed
2042
} else {
2043
// Validate the given CallSite
2044
if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
2045
assert(method_handle != changes->method_handle(), "must be");
2046
return call_site->klass(); // assertion failed
2047
}
2048
}
2049
return NULL; // assertion still valid
2050
}
2051
2052
void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
2053
if (witness != NULL) {
2054
if (TraceDependencies) {
2055
print_dependency(witness, /*verbose=*/ true);
2056
}
2057
// The following is a no-op unless logging is enabled:
2058
log_dependency(witness);
2059
}
2060
}
2061
2062
Klass* Dependencies::DepStream::check_new_klass_dependency(NewKlassDepChange* changes) {
2063
assert_locked_or_safepoint(Compile_lock);
2064
Dependencies::check_valid_dependency_type(type());
2065
2066
Klass* witness = NULL;
2067
switch (type()) {
2068
case evol_method:
2069
witness = check_evol_method(method_argument(0));
2070
break;
2071
case leaf_type:
2072
witness = check_leaf_type(context_type());
2073
break;
2074
case abstract_with_unique_concrete_subtype:
2075
witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
2076
break;
2077
case unique_concrete_method_2:
2078
witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
2079
break;
2080
case unique_concrete_method_4:
2081
witness = check_unique_concrete_method(context_type(), method_argument(1), type_argument(2), method_argument(3), changes);
2082
break;
2083
case unique_implementor:
2084
witness = check_unique_implementor(context_type(), type_argument(1), changes);
2085
break;
2086
case no_finalizable_subclasses:
2087
witness = check_has_no_finalizable_subclasses(context_type(), changes);
2088
break;
2089
default:
2090
witness = NULL;
2091
break;
2092
}
2093
trace_and_log_witness(witness);
2094
return witness;
2095
}
2096
2097
Klass* Dependencies::DepStream::check_klass_init_dependency(KlassInitDepChange* changes) {
2098
assert_locked_or_safepoint(Compile_lock);
2099
Dependencies::check_valid_dependency_type(type());
2100
2101
// No new types added. Only unique_concrete_method_4 is sensitive to class initialization changes.
2102
Klass* witness = NULL;
2103
switch (type()) {
2104
case unique_concrete_method_4:
2105
witness = check_unique_concrete_method(context_type(), method_argument(1), type_argument(2), method_argument(3), changes);
2106
break;
2107
default:
2108
witness = NULL;
2109
break;
2110
}
2111
trace_and_log_witness(witness);
2112
return witness;
2113
}
2114
2115
Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
2116
assert_locked_or_safepoint(Compile_lock);
2117
Dependencies::check_valid_dependency_type(type());
2118
2119
if (changes != NULL) {
2120
if (UseVtableBasedCHA && changes->is_klass_init_change()) {
2121
return check_klass_init_dependency(changes->as_klass_init_change());
2122
} else {
2123
return check_new_klass_dependency(changes->as_new_klass_change());
2124
}
2125
} else {
2126
Klass* witness = check_new_klass_dependency(NULL);
2127
// check_klass_init_dependency duplicates check_new_klass_dependency checks when class hierarchy change info is absent.
2128
assert(witness != NULL || check_klass_init_dependency(NULL) == NULL, "missed dependency");
2129
return witness;
2130
}
2131
}
2132
2133
Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
2134
assert_locked_or_safepoint(Compile_lock);
2135
Dependencies::check_valid_dependency_type(type());
2136
2137
Klass* witness = NULL;
2138
switch (type()) {
2139
case call_site_target_value:
2140
witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
2141
break;
2142
default:
2143
witness = NULL;
2144
break;
2145
}
2146
trace_and_log_witness(witness);
2147
return witness;
2148
}
2149
2150
2151
Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
2152
// Handle klass dependency
2153
if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
2154
return check_klass_dependency(changes.as_klass_change());
2155
2156
// Handle CallSite dependency
2157
if (changes.is_call_site_change())
2158
return check_call_site_dependency(changes.as_call_site_change());
2159
2160
// irrelevant dependency; skip it
2161
return NULL;
2162
}
2163
2164
2165
void DepChange::print() {
2166
int nsup = 0, nint = 0;
2167
for (ContextStream str(*this); str.next(); ) {
2168
Klass* k = str.klass();
2169
switch (str.change_type()) {
2170
case Change_new_type:
2171
tty->print_cr(" dependee = %s", k->external_name());
2172
break;
2173
case Change_new_sub:
2174
if (!WizardMode) {
2175
++nsup;
2176
} else {
2177
tty->print_cr(" context super = %s", k->external_name());
2178
}
2179
break;
2180
case Change_new_impl:
2181
if (!WizardMode) {
2182
++nint;
2183
} else {
2184
tty->print_cr(" context interface = %s", k->external_name());
2185
}
2186
break;
2187
default:
2188
break;
2189
}
2190
}
2191
if (nsup + nint != 0) {
2192
tty->print_cr(" context supers = %d, interfaces = %d", nsup, nint);
2193
}
2194
}
2195
2196
void DepChange::ContextStream::start() {
2197
Klass* type = (_changes.is_klass_change() ? _changes.as_klass_change()->type() : (Klass*) NULL);
2198
_change_type = (type == NULL ? NO_CHANGE : Start_Klass);
2199
_klass = type;
2200
_ti_base = NULL;
2201
_ti_index = 0;
2202
_ti_limit = 0;
2203
}
2204
2205
bool DepChange::ContextStream::next() {
2206
switch (_change_type) {
2207
case Start_Klass: // initial state; _klass is the new type
2208
_ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
2209
_ti_index = 0;
2210
_change_type = Change_new_type;
2211
return true;
2212
case Change_new_type:
2213
// fall through:
2214
_change_type = Change_new_sub;
2215
case Change_new_sub:
2216
// 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
2217
{
2218
_klass = _klass->super();
2219
if (_klass != NULL) {
2220
return true;
2221
}
2222
}
2223
// else set up _ti_limit and fall through:
2224
_ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
2225
_change_type = Change_new_impl;
2226
case Change_new_impl:
2227
if (_ti_index < _ti_limit) {
2228
_klass = _ti_base->at(_ti_index++);
2229
return true;
2230
}
2231
// fall through:
2232
_change_type = NO_CHANGE; // iterator is exhausted
2233
case NO_CHANGE:
2234
break;
2235
default:
2236
ShouldNotReachHere();
2237
}
2238
return false;
2239
}
2240
2241
void KlassDepChange::initialize() {
2242
// entire transaction must be under this lock:
2243
assert_lock_strong(Compile_lock);
2244
2245
// Mark all dependee and all its superclasses
2246
// Mark transitive interfaces
2247
for (ContextStream str(*this); str.next(); ) {
2248
Klass* d = str.klass();
2249
assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
2250
InstanceKlass::cast(d)->set_is_marked_dependent(true);
2251
}
2252
}
2253
2254
KlassDepChange::~KlassDepChange() {
2255
// Unmark all dependee and all its superclasses
2256
// Unmark transitive interfaces
2257
for (ContextStream str(*this); str.next(); ) {
2258
Klass* d = str.klass();
2259
InstanceKlass::cast(d)->set_is_marked_dependent(false);
2260
}
2261
}
2262
2263
bool KlassDepChange::involves_context(Klass* k) {
2264
if (k == NULL || !k->is_instance_klass()) {
2265
return false;
2266
}
2267
InstanceKlass* ik = InstanceKlass::cast(k);
2268
bool is_contained = ik->is_marked_dependent();
2269
assert(is_contained == type()->is_subtype_of(k),
2270
"correct marking of potential context types");
2271
return is_contained;
2272
}
2273
2274
void Dependencies::print_statistics() {
2275
AbstractClassHierarchyWalker::print_statistics();
2276
}
2277
2278
void AbstractClassHierarchyWalker::print_statistics() {
2279
if (UsePerfData) {
2280
jlong deps_find_witness_calls = _perf_find_witness_anywhere_calls_count->get_value();
2281
jlong deps_find_witness_steps = _perf_find_witness_anywhere_steps_count->get_value();
2282
jlong deps_find_witness_singles = _perf_find_witness_in_calls_count->get_value();
2283
2284
ttyLocker ttyl;
2285
tty->print_cr("Dependency check (find_witness) "
2286
"calls=" JLONG_FORMAT ", steps=" JLONG_FORMAT " (avg=%.1f), singles=" JLONG_FORMAT,
2287
deps_find_witness_calls,
2288
deps_find_witness_steps,
2289
(double)deps_find_witness_steps / deps_find_witness_calls,
2290
deps_find_witness_singles);
2291
if (xtty != NULL) {
2292
xtty->elem("deps_find_witness calls='" JLONG_FORMAT "' steps='" JLONG_FORMAT "' singles='" JLONG_FORMAT "'",
2293
deps_find_witness_calls,
2294
deps_find_witness_steps,
2295
deps_find_witness_singles);
2296
}
2297
}
2298
}
2299
2300
CallSiteDepChange::CallSiteDepChange(Handle call_site, Handle method_handle) :
2301
_call_site(call_site),
2302
_method_handle(method_handle) {
2303
assert(_call_site()->is_a(vmClasses::CallSite_klass()), "must be");
2304
assert(_method_handle.is_null() || _method_handle()->is_a(vmClasses::MethodHandle_klass()), "must be");
2305
}
2306
2307
void dependencies_init() {
2308
AbstractClassHierarchyWalker::init();
2309
}
2310
2311