Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mobile
Path: blob/master/src/hotspot/share/code/dependencies.cpp
40931 views
1
/*
2
* Copyright (c) 2005, 2021, Oracle and/or its affiliates. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation.
8
*
9
* This code is distributed in the hope that it will be useful, but WITHOUT
10
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12
* version 2 for more details (a copy is included in the LICENSE file that
13
* accompanied this code).
14
*
15
* You should have received a copy of the GNU General Public License version
16
* 2 along with this work; if not, write to the Free Software Foundation,
17
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
*
19
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
* or visit www.oracle.com if you need additional information or have any
21
* questions.
22
*
23
*/
24
25
#include "precompiled.hpp"
26
#include "ci/ciArrayKlass.hpp"
27
#include "ci/ciEnv.hpp"
28
#include "ci/ciKlass.hpp"
29
#include "ci/ciMethod.hpp"
30
#include "classfile/javaClasses.inline.hpp"
31
#include "classfile/vmClasses.hpp"
32
#include "code/dependencies.hpp"
33
#include "compiler/compileLog.hpp"
34
#include "compiler/compileBroker.hpp"
35
#include "compiler/compileTask.hpp"
36
#include "memory/resourceArea.hpp"
37
#include "oops/klass.hpp"
38
#include "oops/oop.inline.hpp"
39
#include "oops/objArrayKlass.hpp"
40
#include "runtime/flags/flagSetting.hpp"
41
#include "runtime/handles.hpp"
42
#include "runtime/handles.inline.hpp"
43
#include "runtime/jniHandles.inline.hpp"
44
#include "runtime/perfData.hpp"
45
#include "runtime/thread.inline.hpp"
46
#include "runtime/vmThread.hpp"
47
#include "utilities/copy.hpp"
48
49
50
#ifdef ASSERT
51
static bool must_be_in_vm() {
52
Thread* thread = Thread::current();
53
if (thread->is_Java_thread()) {
54
return thread->as_Java_thread()->thread_state() == _thread_in_vm;
55
} else {
56
return true; // Could be VMThread or GC thread
57
}
58
}
59
#endif //ASSERT
60
61
void Dependencies::initialize(ciEnv* env) {
62
Arena* arena = env->arena();
63
_oop_recorder = env->oop_recorder();
64
_log = env->log();
65
_dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
66
#if INCLUDE_JVMCI
67
_using_dep_values = false;
68
#endif
69
DEBUG_ONLY(_deps[end_marker] = NULL);
70
for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
71
_deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
72
}
73
_content_bytes = NULL;
74
_size_in_bytes = (size_t)-1;
75
76
assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
77
}
78
79
void Dependencies::assert_evol_method(ciMethod* m) {
80
assert_common_1(evol_method, m);
81
}
82
83
void Dependencies::assert_leaf_type(ciKlass* ctxk) {
84
if (ctxk->is_array_klass()) {
85
// As a special case, support this assertion on an array type,
86
// which reduces to an assertion on its element type.
87
// Note that this cannot be done with assertions that
88
// relate to concreteness or abstractness.
89
ciType* elemt = ctxk->as_array_klass()->base_element_type();
90
if (!elemt->is_instance_klass()) return; // Ex: int[][]
91
ctxk = elemt->as_instance_klass();
92
//if (ctxk->is_final()) return; // Ex: String[][]
93
}
94
check_ctxk(ctxk);
95
assert_common_1(leaf_type, ctxk);
96
}
97
98
void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
99
check_ctxk_abstract(ctxk);
100
assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
101
}
102
103
void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
104
check_ctxk(ctxk);
105
check_unique_method(ctxk, uniqm);
106
assert_common_2(unique_concrete_method_2, ctxk, uniqm);
107
}
108
109
void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm, ciKlass* resolved_klass, ciMethod* resolved_method) {
110
check_ctxk(ctxk);
111
check_unique_method(ctxk, uniqm);
112
if (UseVtableBasedCHA) {
113
assert_common_4(unique_concrete_method_4, ctxk, uniqm, resolved_klass, resolved_method);
114
} else {
115
assert_common_2(unique_concrete_method_2, ctxk, uniqm);
116
}
117
}
118
119
void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
120
check_ctxk(ctxk);
121
assert_common_1(no_finalizable_subclasses, ctxk);
122
}
123
124
void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
125
assert_common_2(call_site_target_value, call_site, method_handle);
126
}
127
128
#if INCLUDE_JVMCI
129
130
Dependencies::Dependencies(Arena* arena, OopRecorder* oop_recorder, CompileLog* log) {
131
_oop_recorder = oop_recorder;
132
_log = log;
133
_dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
134
_using_dep_values = true;
135
DEBUG_ONLY(_dep_values[end_marker] = NULL);
136
for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
137
_dep_values[i] = new(arena) GrowableArray<DepValue>(arena, 10, 0, DepValue());
138
}
139
_content_bytes = NULL;
140
_size_in_bytes = (size_t)-1;
141
142
assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
143
}
144
145
void Dependencies::assert_evol_method(Method* m) {
146
assert_common_1(evol_method, DepValue(_oop_recorder, m));
147
}
148
149
void Dependencies::assert_has_no_finalizable_subclasses(Klass* ctxk) {
150
check_ctxk(ctxk);
151
assert_common_1(no_finalizable_subclasses, DepValue(_oop_recorder, ctxk));
152
}
153
154
void Dependencies::assert_leaf_type(Klass* ctxk) {
155
if (ctxk->is_array_klass()) {
156
// As a special case, support this assertion on an array type,
157
// which reduces to an assertion on its element type.
158
// Note that this cannot be done with assertions that
159
// relate to concreteness or abstractness.
160
BasicType elemt = ArrayKlass::cast(ctxk)->element_type();
161
if (is_java_primitive(elemt)) return; // Ex: int[][]
162
ctxk = ObjArrayKlass::cast(ctxk)->bottom_klass();
163
//if (ctxk->is_final()) return; // Ex: String[][]
164
}
165
check_ctxk(ctxk);
166
assert_common_1(leaf_type, DepValue(_oop_recorder, ctxk));
167
}
168
169
void Dependencies::assert_abstract_with_unique_concrete_subtype(Klass* ctxk, Klass* conck) {
170
check_ctxk_abstract(ctxk);
171
DepValue ctxk_dv(_oop_recorder, ctxk);
172
DepValue conck_dv(_oop_recorder, conck, &ctxk_dv);
173
assert_common_2(abstract_with_unique_concrete_subtype, ctxk_dv, conck_dv);
174
}
175
176
void Dependencies::assert_unique_concrete_method(Klass* ctxk, Method* uniqm) {
177
check_ctxk(ctxk);
178
check_unique_method(ctxk, uniqm);
179
assert_common_2(unique_concrete_method_2, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqm));
180
}
181
182
void Dependencies::assert_call_site_target_value(oop call_site, oop method_handle) {
183
assert_common_2(call_site_target_value, DepValue(_oop_recorder, JNIHandles::make_local(call_site)), DepValue(_oop_recorder, JNIHandles::make_local(method_handle)));
184
}
185
186
#endif // INCLUDE_JVMCI
187
188
189
// Helper function. If we are adding a new dep. under ctxk2,
190
// try to find an old dep. under a broader* ctxk1. If there is
191
//
192
bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
193
int ctxk_i, ciKlass* ctxk2) {
194
ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
195
if (ctxk2->is_subtype_of(ctxk1)) {
196
return true; // success, and no need to change
197
} else if (ctxk1->is_subtype_of(ctxk2)) {
198
// new context class fully subsumes previous one
199
deps->at_put(ctxk_i, ctxk2);
200
return true;
201
} else {
202
return false;
203
}
204
}
205
206
void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
207
assert(dep_args(dept) == 1, "sanity");
208
log_dependency(dept, x);
209
GrowableArray<ciBaseObject*>* deps = _deps[dept];
210
211
// see if the same (or a similar) dep is already recorded
212
if (note_dep_seen(dept, x)) {
213
assert(deps->find(x) >= 0, "sanity");
214
} else {
215
deps->append(x);
216
}
217
}
218
219
void Dependencies::assert_common_2(DepType dept,
220
ciBaseObject* x0, ciBaseObject* x1) {
221
assert(dep_args(dept) == 2, "sanity");
222
log_dependency(dept, x0, x1);
223
GrowableArray<ciBaseObject*>* deps = _deps[dept];
224
225
// see if the same (or a similar) dep is already recorded
226
bool has_ctxk = has_explicit_context_arg(dept);
227
if (has_ctxk) {
228
assert(dep_context_arg(dept) == 0, "sanity");
229
if (note_dep_seen(dept, x1)) {
230
// look in this bucket for redundant assertions
231
const int stride = 2;
232
for (int i = deps->length(); (i -= stride) >= 0; ) {
233
ciBaseObject* y1 = deps->at(i+1);
234
if (x1 == y1) { // same subject; check the context
235
if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
236
return;
237
}
238
}
239
}
240
}
241
} else {
242
if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
243
// look in this bucket for redundant assertions
244
const int stride = 2;
245
for (int i = deps->length(); (i -= stride) >= 0; ) {
246
ciBaseObject* y0 = deps->at(i+0);
247
ciBaseObject* y1 = deps->at(i+1);
248
if (x0 == y0 && x1 == y1) {
249
return;
250
}
251
}
252
}
253
}
254
255
// append the assertion in the correct bucket:
256
deps->append(x0);
257
deps->append(x1);
258
}
259
260
void Dependencies::assert_common_4(DepType dept,
261
ciKlass* ctxk, ciBaseObject* x1, ciBaseObject* x2, ciBaseObject* x3) {
262
assert(has_explicit_context_arg(dept), "sanity");
263
assert(dep_context_arg(dept) == 0, "sanity");
264
assert(dep_args(dept) == 4, "sanity");
265
log_dependency(dept, ctxk, x1, x2, x3);
266
GrowableArray<ciBaseObject*>* deps = _deps[dept];
267
268
// see if the same (or a similar) dep is already recorded
269
if (note_dep_seen(dept, x1) && note_dep_seen(dept, x2) && note_dep_seen(dept, x3)) {
270
// look in this bucket for redundant assertions
271
const int stride = 4;
272
for (int i = deps->length(); (i -= stride) >= 0; ) {
273
ciBaseObject* y1 = deps->at(i+1);
274
ciBaseObject* y2 = deps->at(i+2);
275
ciBaseObject* y3 = deps->at(i+3);
276
if (x1 == y1 && x2 == y2 && x3 == y3) { // same subjects; check the context
277
if (maybe_merge_ctxk(deps, i+0, ctxk)) {
278
return;
279
}
280
}
281
}
282
}
283
// append the assertion in the correct bucket:
284
deps->append(ctxk);
285
deps->append(x1);
286
deps->append(x2);
287
deps->append(x3);
288
}
289
290
#if INCLUDE_JVMCI
291
bool Dependencies::maybe_merge_ctxk(GrowableArray<DepValue>* deps,
292
int ctxk_i, DepValue ctxk2_dv) {
293
Klass* ctxk1 = deps->at(ctxk_i).as_klass(_oop_recorder);
294
Klass* ctxk2 = ctxk2_dv.as_klass(_oop_recorder);
295
if (ctxk2->is_subtype_of(ctxk1)) {
296
return true; // success, and no need to change
297
} else if (ctxk1->is_subtype_of(ctxk2)) {
298
// new context class fully subsumes previous one
299
deps->at_put(ctxk_i, ctxk2_dv);
300
return true;
301
} else {
302
return false;
303
}
304
}
305
306
void Dependencies::assert_common_1(DepType dept, DepValue x) {
307
assert(dep_args(dept) == 1, "sanity");
308
//log_dependency(dept, x);
309
GrowableArray<DepValue>* deps = _dep_values[dept];
310
311
// see if the same (or a similar) dep is already recorded
312
if (note_dep_seen(dept, x)) {
313
assert(deps->find(x) >= 0, "sanity");
314
} else {
315
deps->append(x);
316
}
317
}
318
319
void Dependencies::assert_common_2(DepType dept,
320
DepValue x0, DepValue x1) {
321
assert(dep_args(dept) == 2, "sanity");
322
//log_dependency(dept, x0, x1);
323
GrowableArray<DepValue>* deps = _dep_values[dept];
324
325
// see if the same (or a similar) dep is already recorded
326
bool has_ctxk = has_explicit_context_arg(dept);
327
if (has_ctxk) {
328
assert(dep_context_arg(dept) == 0, "sanity");
329
if (note_dep_seen(dept, x1)) {
330
// look in this bucket for redundant assertions
331
const int stride = 2;
332
for (int i = deps->length(); (i -= stride) >= 0; ) {
333
DepValue y1 = deps->at(i+1);
334
if (x1 == y1) { // same subject; check the context
335
if (maybe_merge_ctxk(deps, i+0, x0)) {
336
return;
337
}
338
}
339
}
340
}
341
} else {
342
if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
343
// look in this bucket for redundant assertions
344
const int stride = 2;
345
for (int i = deps->length(); (i -= stride) >= 0; ) {
346
DepValue y0 = deps->at(i+0);
347
DepValue y1 = deps->at(i+1);
348
if (x0 == y0 && x1 == y1) {
349
return;
350
}
351
}
352
}
353
}
354
355
// append the assertion in the correct bucket:
356
deps->append(x0);
357
deps->append(x1);
358
}
359
#endif // INCLUDE_JVMCI
360
361
/// Support for encoding dependencies into an nmethod:
362
363
void Dependencies::copy_to(nmethod* nm) {
364
address beg = nm->dependencies_begin();
365
address end = nm->dependencies_end();
366
guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
367
Copy::disjoint_words((HeapWord*) content_bytes(),
368
(HeapWord*) beg,
369
size_in_bytes() / sizeof(HeapWord));
370
assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
371
}
372
373
static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
374
for (int i = 0; i < narg; i++) {
375
int diff = p1[i]->ident() - p2[i]->ident();
376
if (diff != 0) return diff;
377
}
378
return 0;
379
}
380
static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
381
{ return sort_dep(p1, p2, 1); }
382
static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
383
{ return sort_dep(p1, p2, 2); }
384
static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
385
{ return sort_dep(p1, p2, 3); }
386
static int sort_dep_arg_4(ciBaseObject** p1, ciBaseObject** p2)
387
{ return sort_dep(p1, p2, 4); }
388
389
#if INCLUDE_JVMCI
390
// metadata deps are sorted before object deps
391
static int sort_dep_value(Dependencies::DepValue* p1, Dependencies::DepValue* p2, int narg) {
392
for (int i = 0; i < narg; i++) {
393
int diff = p1[i].sort_key() - p2[i].sort_key();
394
if (diff != 0) return diff;
395
}
396
return 0;
397
}
398
static int sort_dep_value_arg_1(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
399
{ return sort_dep_value(p1, p2, 1); }
400
static int sort_dep_value_arg_2(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
401
{ return sort_dep_value(p1, p2, 2); }
402
static int sort_dep_value_arg_3(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
403
{ return sort_dep_value(p1, p2, 3); }
404
#endif // INCLUDE_JVMCI
405
406
void Dependencies::sort_all_deps() {
407
#if INCLUDE_JVMCI
408
if (_using_dep_values) {
409
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
410
DepType dept = (DepType)deptv;
411
GrowableArray<DepValue>* deps = _dep_values[dept];
412
if (deps->length() <= 1) continue;
413
switch (dep_args(dept)) {
414
case 1: deps->sort(sort_dep_value_arg_1, 1); break;
415
case 2: deps->sort(sort_dep_value_arg_2, 2); break;
416
case 3: deps->sort(sort_dep_value_arg_3, 3); break;
417
default: ShouldNotReachHere(); break;
418
}
419
}
420
return;
421
}
422
#endif // INCLUDE_JVMCI
423
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
424
DepType dept = (DepType)deptv;
425
GrowableArray<ciBaseObject*>* deps = _deps[dept];
426
if (deps->length() <= 1) continue;
427
switch (dep_args(dept)) {
428
case 1: deps->sort(sort_dep_arg_1, 1); break;
429
case 2: deps->sort(sort_dep_arg_2, 2); break;
430
case 3: deps->sort(sort_dep_arg_3, 3); break;
431
case 4: deps->sort(sort_dep_arg_4, 4); break;
432
default: ShouldNotReachHere(); break;
433
}
434
}
435
}
436
437
size_t Dependencies::estimate_size_in_bytes() {
438
size_t est_size = 100;
439
#if INCLUDE_JVMCI
440
if (_using_dep_values) {
441
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
442
DepType dept = (DepType)deptv;
443
GrowableArray<DepValue>* deps = _dep_values[dept];
444
est_size += deps->length() * 2; // tags and argument(s)
445
}
446
return est_size;
447
}
448
#endif // INCLUDE_JVMCI
449
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
450
DepType dept = (DepType)deptv;
451
GrowableArray<ciBaseObject*>* deps = _deps[dept];
452
est_size += deps->length()*2; // tags and argument(s)
453
}
454
return est_size;
455
}
456
457
ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
458
switch (dept) {
459
case unique_concrete_method_2:
460
case unique_concrete_method_4:
461
return x->as_metadata()->as_method()->holder();
462
default:
463
return NULL; // let NULL be NULL
464
}
465
}
466
467
Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
468
assert(must_be_in_vm(), "raw oops here");
469
switch (dept) {
470
case unique_concrete_method_2:
471
case unique_concrete_method_4:
472
assert(x->is_method(), "sanity");
473
return ((Method*)x)->method_holder();
474
default:
475
return NULL; // let NULL be NULL
476
}
477
}
478
479
void Dependencies::encode_content_bytes() {
480
sort_all_deps();
481
482
// cast is safe, no deps can overflow INT_MAX
483
CompressedWriteStream bytes((int)estimate_size_in_bytes());
484
485
#if INCLUDE_JVMCI
486
if (_using_dep_values) {
487
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
488
DepType dept = (DepType)deptv;
489
GrowableArray<DepValue>* deps = _dep_values[dept];
490
if (deps->length() == 0) continue;
491
int stride = dep_args(dept);
492
int ctxkj = dep_context_arg(dept); // -1 if no context arg
493
assert(stride > 0, "sanity");
494
for (int i = 0; i < deps->length(); i += stride) {
495
jbyte code_byte = (jbyte)dept;
496
int skipj = -1;
497
if (ctxkj >= 0 && ctxkj+1 < stride) {
498
Klass* ctxk = deps->at(i+ctxkj+0).as_klass(_oop_recorder);
499
DepValue x = deps->at(i+ctxkj+1); // following argument
500
if (ctxk == ctxk_encoded_as_null(dept, x.as_metadata(_oop_recorder))) {
501
skipj = ctxkj; // we win: maybe one less oop to keep track of
502
code_byte |= default_context_type_bit;
503
}
504
}
505
bytes.write_byte(code_byte);
506
for (int j = 0; j < stride; j++) {
507
if (j == skipj) continue;
508
DepValue v = deps->at(i+j);
509
int idx = v.index();
510
bytes.write_int(idx);
511
}
512
}
513
}
514
} else {
515
#endif // INCLUDE_JVMCI
516
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
517
DepType dept = (DepType)deptv;
518
GrowableArray<ciBaseObject*>* deps = _deps[dept];
519
if (deps->length() == 0) continue;
520
int stride = dep_args(dept);
521
int ctxkj = dep_context_arg(dept); // -1 if no context arg
522
assert(stride > 0, "sanity");
523
for (int i = 0; i < deps->length(); i += stride) {
524
jbyte code_byte = (jbyte)dept;
525
int skipj = -1;
526
if (ctxkj >= 0 && ctxkj+1 < stride) {
527
ciKlass* ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
528
ciBaseObject* x = deps->at(i+ctxkj+1); // following argument
529
if (ctxk == ctxk_encoded_as_null(dept, x)) {
530
skipj = ctxkj; // we win: maybe one less oop to keep track of
531
code_byte |= default_context_type_bit;
532
}
533
}
534
bytes.write_byte(code_byte);
535
for (int j = 0; j < stride; j++) {
536
if (j == skipj) continue;
537
ciBaseObject* v = deps->at(i+j);
538
int idx;
539
if (v->is_object()) {
540
idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
541
} else {
542
ciMetadata* meta = v->as_metadata();
543
idx = _oop_recorder->find_index(meta->constant_encoding());
544
}
545
bytes.write_int(idx);
546
}
547
}
548
}
549
#if INCLUDE_JVMCI
550
}
551
#endif
552
553
// write a sentinel byte to mark the end
554
bytes.write_byte(end_marker);
555
556
// round it out to a word boundary
557
while (bytes.position() % sizeof(HeapWord) != 0) {
558
bytes.write_byte(end_marker);
559
}
560
561
// check whether the dept byte encoding really works
562
assert((jbyte)default_context_type_bit != 0, "byte overflow");
563
564
_content_bytes = bytes.buffer();
565
_size_in_bytes = bytes.position();
566
}
567
568
569
const char* Dependencies::_dep_name[TYPE_LIMIT] = {
570
"end_marker",
571
"evol_method",
572
"leaf_type",
573
"abstract_with_unique_concrete_subtype",
574
"unique_concrete_method_2",
575
"unique_concrete_method_4",
576
"no_finalizable_subclasses",
577
"call_site_target_value"
578
};
579
580
int Dependencies::_dep_args[TYPE_LIMIT] = {
581
-1,// end_marker
582
1, // evol_method m
583
1, // leaf_type ctxk
584
2, // abstract_with_unique_concrete_subtype ctxk, k
585
2, // unique_concrete_method_2 ctxk, m
586
4, // unique_concrete_method_4 ctxk, m, resolved_klass, resolved_method
587
1, // no_finalizable_subclasses ctxk
588
2 // call_site_target_value call_site, method_handle
589
};
590
591
const char* Dependencies::dep_name(Dependencies::DepType dept) {
592
if (!dept_in_mask(dept, all_types)) return "?bad-dep?";
593
return _dep_name[dept];
594
}
595
596
int Dependencies::dep_args(Dependencies::DepType dept) {
597
if (!dept_in_mask(dept, all_types)) return -1;
598
return _dep_args[dept];
599
}
600
601
void Dependencies::check_valid_dependency_type(DepType dept) {
602
guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, "invalid dependency type: %d", (int) dept);
603
}
604
605
Dependencies::DepType Dependencies::validate_dependencies(CompileTask* task, char** failure_detail) {
606
int klass_violations = 0;
607
DepType result = end_marker;
608
for (Dependencies::DepStream deps(this); deps.next(); ) {
609
Klass* witness = deps.check_dependency();
610
if (witness != NULL) {
611
if (klass_violations == 0) {
612
result = deps.type();
613
if (failure_detail != NULL && klass_violations == 0) {
614
// Use a fixed size buffer to prevent the string stream from
615
// resizing in the context of an inner resource mark.
616
char* buffer = NEW_RESOURCE_ARRAY(char, O_BUFLEN);
617
stringStream st(buffer, O_BUFLEN);
618
deps.print_dependency(witness, true, &st);
619
*failure_detail = st.as_string();
620
}
621
}
622
klass_violations++;
623
if (xtty == NULL) {
624
// If we're not logging then a single violation is sufficient,
625
// otherwise we want to log all the dependences which were
626
// violated.
627
break;
628
}
629
}
630
}
631
632
return result;
633
}
634
635
// for the sake of the compiler log, print out current dependencies:
636
void Dependencies::log_all_dependencies() {
637
if (log() == NULL) return;
638
ResourceMark rm;
639
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
640
DepType dept = (DepType)deptv;
641
GrowableArray<ciBaseObject*>* deps = _deps[dept];
642
int deplen = deps->length();
643
if (deplen == 0) {
644
continue;
645
}
646
int stride = dep_args(dept);
647
GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride);
648
for (int i = 0; i < deps->length(); i += stride) {
649
for (int j = 0; j < stride; j++) {
650
// flush out the identities before printing
651
ciargs->push(deps->at(i+j));
652
}
653
write_dependency_to(log(), dept, ciargs);
654
ciargs->clear();
655
}
656
guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope");
657
}
658
}
659
660
void Dependencies::write_dependency_to(CompileLog* log,
661
DepType dept,
662
GrowableArray<DepArgument>* args,
663
Klass* witness) {
664
if (log == NULL) {
665
return;
666
}
667
ResourceMark rm;
668
ciEnv* env = ciEnv::current();
669
GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length());
670
for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) {
671
DepArgument arg = *it;
672
if (arg.is_oop()) {
673
ciargs->push(env->get_object(arg.oop_value()));
674
} else {
675
ciargs->push(env->get_metadata(arg.metadata_value()));
676
}
677
}
678
int argslen = ciargs->length();
679
Dependencies::write_dependency_to(log, dept, ciargs, witness);
680
guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope");
681
}
682
683
void Dependencies::write_dependency_to(CompileLog* log,
684
DepType dept,
685
GrowableArray<ciBaseObject*>* args,
686
Klass* witness) {
687
if (log == NULL) {
688
return;
689
}
690
ResourceMark rm;
691
GrowableArray<int>* argids = new GrowableArray<int>(args->length());
692
for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) {
693
ciBaseObject* obj = *it;
694
if (obj->is_object()) {
695
argids->push(log->identify(obj->as_object()));
696
} else {
697
argids->push(log->identify(obj->as_metadata()));
698
}
699
}
700
if (witness != NULL) {
701
log->begin_elem("dependency_failed");
702
} else {
703
log->begin_elem("dependency");
704
}
705
log->print(" type='%s'", dep_name(dept));
706
const int ctxkj = dep_context_arg(dept); // -1 if no context arg
707
if (ctxkj >= 0 && ctxkj < argids->length()) {
708
log->print(" ctxk='%d'", argids->at(ctxkj));
709
}
710
// write remaining arguments, if any.
711
for (int j = 0; j < argids->length(); j++) {
712
if (j == ctxkj) continue; // already logged
713
if (j == 1) {
714
log->print( " x='%d'", argids->at(j));
715
} else {
716
log->print(" x%d='%d'", j, argids->at(j));
717
}
718
}
719
if (witness != NULL) {
720
log->object("witness", witness);
721
log->stamp();
722
}
723
log->end_elem();
724
}
725
726
void Dependencies::write_dependency_to(xmlStream* xtty,
727
DepType dept,
728
GrowableArray<DepArgument>* args,
729
Klass* witness) {
730
if (xtty == NULL) {
731
return;
732
}
733
Thread* thread = Thread::current();
734
HandleMark rm(thread);
735
ttyLocker ttyl;
736
int ctxkj = dep_context_arg(dept); // -1 if no context arg
737
if (witness != NULL) {
738
xtty->begin_elem("dependency_failed");
739
} else {
740
xtty->begin_elem("dependency");
741
}
742
xtty->print(" type='%s'", dep_name(dept));
743
if (ctxkj >= 0) {
744
xtty->object("ctxk", args->at(ctxkj).metadata_value());
745
}
746
// write remaining arguments, if any.
747
for (int j = 0; j < args->length(); j++) {
748
if (j == ctxkj) continue; // already logged
749
DepArgument arg = args->at(j);
750
if (j == 1) {
751
if (arg.is_oop()) {
752
xtty->object("x", Handle(thread, arg.oop_value()));
753
} else {
754
xtty->object("x", arg.metadata_value());
755
}
756
} else {
757
char xn[12]; sprintf(xn, "x%d", j);
758
if (arg.is_oop()) {
759
xtty->object(xn, Handle(thread, arg.oop_value()));
760
} else {
761
xtty->object(xn, arg.metadata_value());
762
}
763
}
764
}
765
if (witness != NULL) {
766
xtty->object("witness", witness);
767
xtty->stamp();
768
}
769
xtty->end_elem();
770
}
771
772
void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args,
773
Klass* witness, outputStream* st) {
774
ResourceMark rm;
775
ttyLocker ttyl; // keep the following output all in one block
776
st->print_cr("%s of type %s",
777
(witness == NULL)? "Dependency": "Failed dependency",
778
dep_name(dept));
779
// print arguments
780
int ctxkj = dep_context_arg(dept); // -1 if no context arg
781
for (int j = 0; j < args->length(); j++) {
782
DepArgument arg = args->at(j);
783
bool put_star = false;
784
if (arg.is_null()) continue;
785
const char* what;
786
if (j == ctxkj) {
787
assert(arg.is_metadata(), "must be");
788
what = "context";
789
put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
790
} else if (arg.is_method()) {
791
what = "method ";
792
put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), NULL);
793
} else if (arg.is_klass()) {
794
what = "class ";
795
} else {
796
what = "object ";
797
}
798
st->print(" %s = %s", what, (put_star? "*": ""));
799
if (arg.is_klass()) {
800
st->print("%s", ((Klass*)arg.metadata_value())->external_name());
801
} else if (arg.is_method()) {
802
((Method*)arg.metadata_value())->print_value_on(st);
803
} else if (arg.is_oop()) {
804
arg.oop_value()->print_value_on(st);
805
} else {
806
ShouldNotReachHere(); // Provide impl for this type.
807
}
808
809
st->cr();
810
}
811
if (witness != NULL) {
812
bool put_star = !Dependencies::is_concrete_klass(witness);
813
st->print_cr(" witness = %s%s",
814
(put_star? "*": ""),
815
witness->external_name());
816
}
817
}
818
819
void Dependencies::DepStream::log_dependency(Klass* witness) {
820
if (_deps == NULL && xtty == NULL) return; // fast cutout for runtime
821
ResourceMark rm;
822
const int nargs = argument_count();
823
GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
824
for (int j = 0; j < nargs; j++) {
825
if (is_oop_argument(j)) {
826
args->push(argument_oop(j));
827
} else {
828
args->push(argument(j));
829
}
830
}
831
int argslen = args->length();
832
if (_deps != NULL && _deps->log() != NULL) {
833
if (ciEnv::current() != NULL) {
834
Dependencies::write_dependency_to(_deps->log(), type(), args, witness);
835
} else {
836
// Treat the CompileLog as an xmlstream instead
837
Dependencies::write_dependency_to((xmlStream*)_deps->log(), type(), args, witness);
838
}
839
} else {
840
Dependencies::write_dependency_to(xtty, type(), args, witness);
841
}
842
guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
843
}
844
845
void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose, outputStream* st) {
846
ResourceMark rm;
847
int nargs = argument_count();
848
GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
849
for (int j = 0; j < nargs; j++) {
850
if (is_oop_argument(j)) {
851
args->push(argument_oop(j));
852
} else {
853
args->push(argument(j));
854
}
855
}
856
int argslen = args->length();
857
Dependencies::print_dependency(type(), args, witness, st);
858
if (verbose) {
859
if (_code != NULL) {
860
st->print(" code: ");
861
_code->print_value_on(st);
862
st->cr();
863
}
864
}
865
guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
866
}
867
868
869
/// Dependency stream support (decodes dependencies from an nmethod):
870
871
#ifdef ASSERT
872
void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
873
assert(must_be_in_vm(), "raw oops here");
874
_byte_limit = byte_limit;
875
_type = (DepType)(end_marker-1); // defeat "already at end" assert
876
assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
877
}
878
#endif //ASSERT
879
880
bool Dependencies::DepStream::next() {
881
assert(_type != end_marker, "already at end");
882
if (_bytes.position() == 0 && _code != NULL
883
&& _code->dependencies_size() == 0) {
884
// Method has no dependencies at all.
885
return false;
886
}
887
int code_byte = (_bytes.read_byte() & 0xFF);
888
if (code_byte == end_marker) {
889
DEBUG_ONLY(_type = end_marker);
890
return false;
891
} else {
892
int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
893
code_byte -= ctxk_bit;
894
DepType dept = (DepType)code_byte;
895
_type = dept;
896
Dependencies::check_valid_dependency_type(dept);
897
int stride = _dep_args[dept];
898
assert(stride == dep_args(dept), "sanity");
899
int skipj = -1;
900
if (ctxk_bit != 0) {
901
skipj = 0; // currently the only context argument is at zero
902
assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
903
}
904
for (int j = 0; j < stride; j++) {
905
_xi[j] = (j == skipj)? 0: _bytes.read_int();
906
}
907
DEBUG_ONLY(_xi[stride] = -1); // help detect overruns
908
return true;
909
}
910
}
911
912
inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
913
Metadata* o = NULL;
914
if (_code != NULL) {
915
o = _code->metadata_at(i);
916
} else {
917
o = _deps->oop_recorder()->metadata_at(i);
918
}
919
return o;
920
}
921
922
inline oop Dependencies::DepStream::recorded_oop_at(int i) {
923
return (_code != NULL)
924
? _code->oop_at(i)
925
: JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
926
}
927
928
Metadata* Dependencies::DepStream::argument(int i) {
929
Metadata* result = recorded_metadata_at(argument_index(i));
930
931
if (result == NULL) { // Explicit context argument can be compressed
932
int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg
933
if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
934
result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
935
}
936
}
937
938
assert(result == NULL || result->is_klass() || result->is_method(), "must be");
939
return result;
940
}
941
942
/**
943
* Returns a unique identifier for each dependency argument.
944
*/
945
uintptr_t Dependencies::DepStream::get_identifier(int i) {
946
if (is_oop_argument(i)) {
947
return (uintptr_t)(oopDesc*)argument_oop(i);
948
} else {
949
return (uintptr_t)argument(i);
950
}
951
}
952
953
oop Dependencies::DepStream::argument_oop(int i) {
954
oop result = recorded_oop_at(argument_index(i));
955
assert(oopDesc::is_oop_or_null(result), "must be");
956
return result;
957
}
958
959
InstanceKlass* Dependencies::DepStream::context_type() {
960
assert(must_be_in_vm(), "raw oops here");
961
962
// Most dependencies have an explicit context type argument.
963
{
964
int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg
965
if (ctxkj >= 0) {
966
Metadata* k = argument(ctxkj);
967
assert(k != NULL && k->is_klass(), "type check");
968
return InstanceKlass::cast((Klass*)k);
969
}
970
}
971
972
// Some dependencies are using the klass of the first object
973
// argument as implicit context type.
974
{
975
int ctxkj = dep_implicit_context_arg(type());
976
if (ctxkj >= 0) {
977
Klass* k = argument_oop(ctxkj)->klass();
978
assert(k != NULL, "type check");
979
return InstanceKlass::cast(k);
980
}
981
}
982
983
// And some dependencies don't have a context type at all,
984
// e.g. evol_method.
985
return NULL;
986
}
987
988
// ----------------- DependencySignature --------------------------------------
989
bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) {
990
if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) {
991
return false;
992
}
993
994
for (int i = 0; i < s1.args_count(); i++) {
995
if (s1.arg(i) != s2.arg(i)) {
996
return false;
997
}
998
}
999
return true;
1000
}
1001
1002
/// Checking dependencies
1003
1004
// This hierarchy walker inspects subtypes of a given type, trying to find a "bad" class which breaks a dependency.
1005
// Such a class is called a "witness" to the broken dependency.
1006
// While searching around, we ignore "participants", which are already known to the dependency.
1007
class AbstractClassHierarchyWalker {
1008
public:
1009
enum { PARTICIPANT_LIMIT = 3 };
1010
1011
private:
1012
// if non-zero, tells how many witnesses to convert to participants
1013
uint _record_witnesses;
1014
1015
// special classes which are not allowed to be witnesses:
1016
Klass* _participants[PARTICIPANT_LIMIT+1];
1017
uint _num_participants;
1018
1019
#ifdef ASSERT
1020
uint _nof_requests; // one-shot walker
1021
#endif // ASSERT
1022
1023
static PerfCounter* _perf_find_witness_anywhere_calls_count;
1024
static PerfCounter* _perf_find_witness_anywhere_steps_count;
1025
static PerfCounter* _perf_find_witness_in_calls_count;
1026
1027
protected:
1028
virtual Klass* find_witness_in(KlassDepChange& changes) = 0;
1029
virtual Klass* find_witness_anywhere(InstanceKlass* context_type) = 0;
1030
1031
AbstractClassHierarchyWalker(Klass* participant) : _record_witnesses(0), _num_participants(0)
1032
#ifdef ASSERT
1033
, _nof_requests(0)
1034
#endif // ASSERT
1035
{
1036
for (uint i = 0; i < PARTICIPANT_LIMIT+1; i++) {
1037
_participants[i] = NULL;
1038
}
1039
if (participant != NULL) {
1040
add_participant(participant);
1041
}
1042
}
1043
1044
bool is_participant(Klass* k) {
1045
for (uint i = 0; i < _num_participants; i++) {
1046
if (_participants[i] == k) {
1047
return true;
1048
}
1049
}
1050
return false;
1051
}
1052
1053
bool record_witness(Klass* witness) {
1054
if (_record_witnesses > 0) {
1055
--_record_witnesses;
1056
add_participant(witness);
1057
return false; // not a witness
1058
} else {
1059
return true; // is a witness
1060
}
1061
}
1062
1063
class CountingClassHierarchyIterator : public ClassHierarchyIterator {
1064
private:
1065
jlong _nof_steps;
1066
public:
1067
CountingClassHierarchyIterator(InstanceKlass* root) : ClassHierarchyIterator(root), _nof_steps(0) {}
1068
1069
void next() {
1070
_nof_steps++;
1071
ClassHierarchyIterator::next();
1072
}
1073
1074
~CountingClassHierarchyIterator() {
1075
if (UsePerfData) {
1076
_perf_find_witness_anywhere_steps_count->inc(_nof_steps);
1077
}
1078
}
1079
};
1080
1081
public:
1082
uint num_participants() { return _num_participants; }
1083
Klass* participant(uint n) {
1084
assert(n <= _num_participants, "oob");
1085
if (n < _num_participants) {
1086
return _participants[n];
1087
} else {
1088
return NULL;
1089
}
1090
}
1091
1092
void add_participant(Klass* participant) {
1093
assert(!is_participant(participant), "sanity");
1094
assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
1095
uint np = _num_participants++;
1096
_participants[np] = participant;
1097
}
1098
1099
void record_witnesses(uint add) {
1100
if (add > PARTICIPANT_LIMIT) add = PARTICIPANT_LIMIT;
1101
assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
1102
_record_witnesses = add;
1103
}
1104
1105
Klass* find_witness(InstanceKlass* context_type, KlassDepChange* changes = NULL);
1106
1107
static void init();
1108
static void print_statistics();
1109
};
1110
1111
PerfCounter* AbstractClassHierarchyWalker::_perf_find_witness_anywhere_calls_count = NULL;
1112
PerfCounter* AbstractClassHierarchyWalker::_perf_find_witness_anywhere_steps_count = NULL;
1113
PerfCounter* AbstractClassHierarchyWalker::_perf_find_witness_in_calls_count = NULL;
1114
1115
void AbstractClassHierarchyWalker::init() {
1116
if (UsePerfData) {
1117
EXCEPTION_MARK;
1118
_perf_find_witness_anywhere_calls_count =
1119
PerfDataManager::create_counter(SUN_CI, "findWitnessAnywhere", PerfData::U_Events, CHECK);
1120
_perf_find_witness_anywhere_steps_count =
1121
PerfDataManager::create_counter(SUN_CI, "findWitnessAnywhereSteps", PerfData::U_Events, CHECK);
1122
_perf_find_witness_in_calls_count =
1123
PerfDataManager::create_counter(SUN_CI, "findWitnessIn", PerfData::U_Events, CHECK);
1124
}
1125
}
1126
1127
Klass* AbstractClassHierarchyWalker::find_witness(InstanceKlass* context_type, KlassDepChange* changes) {
1128
// Current thread must be in VM (not native mode, as in CI):
1129
assert(must_be_in_vm(), "raw oops here");
1130
// Must not move the class hierarchy during this check:
1131
assert_locked_or_safepoint(Compile_lock);
1132
assert(_nof_requests++ == 0, "repeated requests are not supported");
1133
1134
assert(changes == NULL || changes->involves_context(context_type), "irrelevant dependency");
1135
1136
// (Note: Interfaces do not have subclasses.)
1137
// If it is an interface, search its direct implementors.
1138
// (Their subclasses are additional indirect implementors. See InstanceKlass::add_implementor().)
1139
if (context_type->is_interface()) {
1140
int nof_impls = context_type->nof_implementors();
1141
if (nof_impls == 0) {
1142
return NULL; // no implementors
1143
} else if (nof_impls == 1) { // unique implementor
1144
assert(context_type != context_type->implementor(), "not unique");
1145
context_type = InstanceKlass::cast(context_type->implementor());
1146
} else { // nof_impls >= 2
1147
// Avoid this case: *I.m > { A.m, C }; B.m > C
1148
// Here, I.m has 2 concrete implementations, but m appears unique
1149
// as A.m, because the search misses B.m when checking C.
1150
// The inherited method B.m was getting missed by the walker
1151
// when interface 'I' was the starting point.
1152
// %%% Until this is fixed more systematically, bail out.
1153
return context_type;
1154
}
1155
}
1156
assert(!context_type->is_interface(), "no interfaces allowed");
1157
1158
if (changes != NULL) {
1159
if (UsePerfData) {
1160
_perf_find_witness_in_calls_count->inc();
1161
}
1162
return find_witness_in(*changes);
1163
} else {
1164
if (UsePerfData) {
1165
_perf_find_witness_anywhere_calls_count->inc();
1166
}
1167
return find_witness_anywhere(context_type);
1168
}
1169
}
1170
1171
class ConcreteSubtypeFinder : public AbstractClassHierarchyWalker {
1172
private:
1173
bool is_witness(Klass* k);
1174
1175
protected:
1176
virtual Klass* find_witness_in(KlassDepChange& changes);
1177
virtual Klass* find_witness_anywhere(InstanceKlass* context_type);
1178
1179
public:
1180
ConcreteSubtypeFinder(Klass* participant = NULL) : AbstractClassHierarchyWalker(participant) {}
1181
};
1182
1183
bool ConcreteSubtypeFinder::is_witness(Klass* k) {
1184
if (Dependencies::is_concrete_klass(k)) {
1185
return record_witness(k); // concrete subtype
1186
} else {
1187
return false; // not a concrete class
1188
}
1189
}
1190
1191
Klass* ConcreteSubtypeFinder::find_witness_in(KlassDepChange& changes) {
1192
// When looking for unexpected concrete types, do not look beneath expected ones:
1193
// * CX > CC > C' is OK, even if C' is new.
1194
// * CX > { CC, C' } is not OK if C' is new, and C' is the witness.
1195
Klass* new_type = changes.as_new_klass_change()->new_type();
1196
assert(!is_participant(new_type), "only old classes are participants");
1197
// If the new type is a subtype of a participant, we are done.
1198
for (uint i = 0; i < num_participants(); i++) {
1199
if (changes.involves_context(participant(i))) {
1200
// new guy is protected from this check by previous participant
1201
return NULL;
1202
}
1203
}
1204
if (is_witness(new_type)) {
1205
return new_type;
1206
}
1207
// No witness found. The dependency remains unbroken.
1208
return NULL;
1209
}
1210
1211
Klass* ConcreteSubtypeFinder::find_witness_anywhere(InstanceKlass* context_type) {
1212
for (CountingClassHierarchyIterator iter(context_type); !iter.done(); iter.next()) {
1213
Klass* sub = iter.klass();
1214
// Do not report participant types.
1215
if (is_participant(sub)) {
1216
// Don't walk beneath a participant since it hides witnesses.
1217
iter.skip_subclasses();
1218
} else if (is_witness(sub)) {
1219
return sub; // found a witness
1220
}
1221
}
1222
// No witness found. The dependency remains unbroken.
1223
return NULL;
1224
}
1225
1226
class ConcreteMethodFinder : public AbstractClassHierarchyWalker {
1227
private:
1228
Symbol* _name;
1229
Symbol* _signature;
1230
1231
// cache of method lookups
1232
Method* _found_methods[PARTICIPANT_LIMIT+1];
1233
1234
bool is_witness(Klass* k);
1235
1236
protected:
1237
virtual Klass* find_witness_in(KlassDepChange& changes);
1238
virtual Klass* find_witness_anywhere(InstanceKlass* context_type);
1239
1240
bool witnessed_reabstraction_in_supers(Klass* k);
1241
1242
public:
1243
ConcreteMethodFinder(Method* m, Klass* participant = NULL) : AbstractClassHierarchyWalker(participant) {
1244
assert(m != NULL && m->is_method(), "sanity");
1245
_name = m->name();
1246
_signature = m->signature();
1247
1248
for (int i = 0; i < PARTICIPANT_LIMIT+1; i++) {
1249
_found_methods[i] = NULL;
1250
}
1251
}
1252
1253
// Note: If n==num_participants, returns NULL.
1254
Method* found_method(uint n) {
1255
assert(n <= num_participants(), "oob");
1256
Method* fm = _found_methods[n];
1257
assert(n == num_participants() || fm != NULL, "proper usage");
1258
if (fm != NULL && fm->method_holder() != participant(n)) {
1259
// Default methods from interfaces can be added to classes. In
1260
// that case the holder of the method is not the class but the
1261
// interface where it's defined.
1262
assert(fm->is_default_method(), "sanity");
1263
return NULL;
1264
}
1265
return fm;
1266
}
1267
1268
void add_participant(Klass* participant) {
1269
AbstractClassHierarchyWalker::add_participant(participant);
1270
_found_methods[num_participants()] = NULL;
1271
}
1272
1273
bool record_witness(Klass* witness, Method* m) {
1274
_found_methods[num_participants()] = m;
1275
return AbstractClassHierarchyWalker::record_witness(witness);
1276
}
1277
1278
private:
1279
static PerfCounter* _perf_find_witness_anywhere_calls_count;
1280
static PerfCounter* _perf_find_witness_anywhere_steps_count;
1281
static PerfCounter* _perf_find_witness_in_calls_count;
1282
1283
public:
1284
static void init();
1285
static void print_statistics();
1286
};
1287
1288
bool ConcreteMethodFinder::is_witness(Klass* k) {
1289
if (is_participant(k)) {
1290
return false; // do not report participant types
1291
}
1292
if (k->is_instance_klass()) {
1293
InstanceKlass* ik = InstanceKlass::cast(k);
1294
// Search class hierarchy first, skipping private implementations
1295
// as they never override any inherited methods
1296
Method* m = ik->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip);
1297
if (Dependencies::is_concrete_method(m, ik)) {
1298
return record_witness(k, m); // concrete method found
1299
} else {
1300
// Check for re-abstraction of method
1301
if (!ik->is_interface() && m != NULL && m->is_abstract()) {
1302
// Found a matching abstract method 'm' in the class hierarchy.
1303
// This is fine iff 'k' is an abstract class and all concrete subtypes
1304
// of 'k' override 'm' and are participates of the current search.
1305
ConcreteSubtypeFinder wf;
1306
for (uint i = 0; i < num_participants(); i++) {
1307
Klass* p = participant(i);
1308
wf.add_participant(p);
1309
}
1310
Klass* w = wf.find_witness(ik);
1311
if (w != NULL) {
1312
Method* wm = InstanceKlass::cast(w)->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip);
1313
if (!Dependencies::is_concrete_method(wm, w)) {
1314
// Found a concrete subtype 'w' which does not override abstract method 'm'.
1315
// Bail out because 'm' could be called with 'w' as receiver (leading to an
1316
// AbstractMethodError) and thus the method we are looking for is not unique.
1317
return record_witness(k, m);
1318
}
1319
}
1320
}
1321
// Check interface defaults also, if any exist.
1322
Array<Method*>* default_methods = ik->default_methods();
1323
if (default_methods != NULL) {
1324
Method* dm = ik->find_method(default_methods, _name, _signature);
1325
if (Dependencies::is_concrete_method(dm, NULL)) {
1326
return record_witness(k, dm); // default method found
1327
}
1328
}
1329
return false; // no concrete method found
1330
}
1331
} else {
1332
return false; // no methods to find in an array type
1333
}
1334
}
1335
1336
Klass* ConcreteMethodFinder::find_witness_in(KlassDepChange& changes) {
1337
// When looking for unexpected concrete methods, look beneath expected ones, to see if there are overrides.
1338
// * CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
1339
Klass* new_type = changes.as_new_klass_change()->new_type();
1340
assert(!is_participant(new_type), "only old classes are participants");
1341
if (is_witness(new_type)) {
1342
return new_type;
1343
} else {
1344
// No witness found, but is_witness() doesn't detect method re-abstraction in case of spot-checking.
1345
if (witnessed_reabstraction_in_supers(new_type)) {
1346
return new_type;
1347
}
1348
}
1349
// No witness found. The dependency remains unbroken.
1350
return NULL;
1351
}
1352
1353
bool ConcreteMethodFinder::witnessed_reabstraction_in_supers(Klass* k) {
1354
if (!k->is_instance_klass()) {
1355
return false; // no methods to find in an array type
1356
} else {
1357
// Looking for a case when an abstract method is inherited into a concrete class.
1358
if (Dependencies::is_concrete_klass(k) && !k->is_interface()) {
1359
Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip);
1360
if (m != NULL) {
1361
return false; // no reabstraction possible: local method found
1362
}
1363
for (InstanceKlass* super = k->java_super(); super != NULL; super = super->java_super()) {
1364
m = super->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip);
1365
if (m != NULL) { // inherited method found
1366
if (m->is_abstract() || m->is_overpass()) {
1367
return record_witness(super, m); // abstract method found
1368
}
1369
return false;
1370
}
1371
}
1372
// Miranda.
1373
return true;
1374
}
1375
return false;
1376
}
1377
}
1378
1379
1380
Klass* ConcreteMethodFinder::find_witness_anywhere(InstanceKlass* context_type) {
1381
// Walk hierarchy under a context type, looking for unexpected types.
1382
for (CountingClassHierarchyIterator iter(context_type); !iter.done(); iter.next()) {
1383
Klass* sub = iter.klass();
1384
if (is_witness(sub)) {
1385
return sub; // found a witness
1386
}
1387
}
1388
// No witness found. The dependency remains unbroken.
1389
return NULL;
1390
}
1391
1392
// For some method m and some class ctxk (subclass of method holder),
1393
// enumerate all distinct overrides of m in concrete subclasses of ctxk.
1394
// It relies on vtable/itable information to perform method selection on each linked subclass
1395
// and ignores all non yet linked ones (speculatively treat them as "effectively abstract").
1396
class LinkedConcreteMethodFinder : public AbstractClassHierarchyWalker {
1397
private:
1398
InstanceKlass* _resolved_klass; // resolved class (JVMS-5.4.3.1)
1399
InstanceKlass* _declaring_klass; // the holder of resolved method (JVMS-5.4.3.3)
1400
int _vtable_index; // vtable/itable index of the resolved method
1401
bool _do_itable_lookup; // choose between itable and vtable lookup logic
1402
1403
// cache of method lookups
1404
Method* _found_methods[PARTICIPANT_LIMIT+1];
1405
1406
bool is_witness(Klass* k);
1407
Method* select_method(InstanceKlass* recv_klass);
1408
static int compute_vtable_index(InstanceKlass* resolved_klass, Method* resolved_method, bool& is_itable_index);
1409
static bool is_concrete_klass(InstanceKlass* ik);
1410
1411
void add_participant(Method* m, Klass* participant) {
1412
uint np = num_participants();
1413
AbstractClassHierarchyWalker::add_participant(participant);
1414
assert(np + 1 == num_participants(), "sanity");
1415
_found_methods[np] = m; // record the method for the participant
1416
}
1417
1418
bool record_witness(Klass* witness, Method* m) {
1419
for (uint i = 0; i < num_participants(); i++) {
1420
if (found_method(i) == m) {
1421
return false; // already recorded
1422
}
1423
}
1424
// Record not yet seen method.
1425
_found_methods[num_participants()] = m;
1426
return AbstractClassHierarchyWalker::record_witness(witness);
1427
}
1428
1429
void initialize(Method* participant) {
1430
for (uint i = 0; i < PARTICIPANT_LIMIT+1; i++) {
1431
_found_methods[i] = NULL;
1432
}
1433
if (participant != NULL) {
1434
add_participant(participant, participant->method_holder());
1435
}
1436
}
1437
1438
protected:
1439
virtual Klass* find_witness_in(KlassDepChange& changes);
1440
virtual Klass* find_witness_anywhere(InstanceKlass* context_type);
1441
1442
public:
1443
// In order to perform method selection, the following info is needed:
1444
// (1) interface or virtual call;
1445
// (2) vtable/itable index;
1446
// (3) declaring class (in case of interface call).
1447
//
1448
// It is prepared based on the results of method resolution: resolved class and resolved method (as specified in JVMS-5.4.3.3).
1449
// Optionally, a method which was previously determined as a unique target (uniqm) is added as a participant
1450
// to enable dependency spot-checking and speed up the search.
1451
LinkedConcreteMethodFinder(InstanceKlass* resolved_klass, Method* resolved_method, Method* uniqm = NULL) : AbstractClassHierarchyWalker(NULL) {
1452
assert(UseVtableBasedCHA, "required");
1453
assert(resolved_klass->is_linked(), "required");
1454
assert(resolved_method->method_holder()->is_linked(), "required");
1455
assert(!resolved_method->can_be_statically_bound(), "no vtable index available");
1456
1457
_resolved_klass = resolved_klass;
1458
_declaring_klass = resolved_method->method_holder();
1459
_vtable_index = compute_vtable_index(resolved_klass, resolved_method,
1460
_do_itable_lookup); // out parameter
1461
assert(_vtable_index >= 0, "invalid vtable index");
1462
1463
initialize(uniqm);
1464
}
1465
1466
// Note: If n==num_participants, returns NULL.
1467
Method* found_method(uint n) {
1468
assert(n <= num_participants(), "oob");
1469
assert(participant(n) != NULL || n == num_participants(), "proper usage");
1470
return _found_methods[n];
1471
}
1472
};
1473
1474
Klass* LinkedConcreteMethodFinder::find_witness_in(KlassDepChange& changes) {
1475
Klass* type = changes.type();
1476
1477
assert(!is_participant(type), "only old classes are participants");
1478
1479
if (is_witness(type)) {
1480
return type;
1481
}
1482
return NULL; // No witness found. The dependency remains unbroken.
1483
}
1484
1485
Klass* LinkedConcreteMethodFinder::find_witness_anywhere(InstanceKlass* context_type) {
1486
for (CountingClassHierarchyIterator iter(context_type); !iter.done(); iter.next()) {
1487
Klass* sub = iter.klass();
1488
if (is_witness(sub)) {
1489
return sub;
1490
}
1491
if (sub->is_instance_klass() && !InstanceKlass::cast(sub)->is_linked()) {
1492
iter.skip_subclasses(); // ignore not yet linked classes
1493
}
1494
}
1495
return NULL; // No witness found. The dependency remains unbroken.
1496
}
1497
1498
bool LinkedConcreteMethodFinder::is_witness(Klass* k) {
1499
if (is_participant(k)) {
1500
return false; // do not report participant types
1501
} else if (k->is_instance_klass()) {
1502
InstanceKlass* ik = InstanceKlass::cast(k);
1503
if (is_concrete_klass(ik)) {
1504
Method* m = select_method(ik);
1505
return record_witness(ik, m);
1506
} else {
1507
return false; // ignore non-concrete holder class
1508
}
1509
} else {
1510
return false; // no methods to find in an array type
1511
}
1512
}
1513
1514
Method* LinkedConcreteMethodFinder::select_method(InstanceKlass* recv_klass) {
1515
Method* selected_method = NULL;
1516
if (_do_itable_lookup) {
1517
assert(_declaring_klass->is_interface(), "sanity");
1518
bool implements_interface; // initialized by method_at_itable_or_null()
1519
selected_method = recv_klass->method_at_itable_or_null(_declaring_klass, _vtable_index,
1520
implements_interface); // out parameter
1521
assert(implements_interface, "not implemented");
1522
} else {
1523
selected_method = recv_klass->method_at_vtable(_vtable_index);
1524
}
1525
return selected_method; // NULL when corresponding slot is empty (AbstractMethodError case)
1526
}
1527
1528
int LinkedConcreteMethodFinder::compute_vtable_index(InstanceKlass* resolved_klass, Method* resolved_method,
1529
// out parameter
1530
bool& is_itable_index) {
1531
if (resolved_klass->is_interface() && resolved_method->has_itable_index()) {
1532
is_itable_index = true;
1533
return resolved_method->itable_index();
1534
}
1535
// Check for default or miranda method first.
1536
InstanceKlass* declaring_klass = resolved_method->method_holder();
1537
if (!resolved_klass->is_interface() && declaring_klass->is_interface()) {
1538
is_itable_index = false;
1539
return resolved_klass->vtable_index_of_interface_method(resolved_method);
1540
}
1541
// At this point we are sure that resolved_method is virtual and not
1542
// a default or miranda method; therefore, it must have a valid vtable index.
1543
assert(resolved_method->has_vtable_index(), "");
1544
is_itable_index = false;
1545
return resolved_method->vtable_index();
1546
}
1547
1548
bool LinkedConcreteMethodFinder::is_concrete_klass(InstanceKlass* ik) {
1549
if (!Dependencies::is_concrete_klass(ik)) {
1550
return false; // not concrete
1551
}
1552
if (ik->is_interface()) {
1553
return false; // interfaces aren't concrete
1554
}
1555
if (!ik->is_linked()) {
1556
return false; // not yet linked classes don't have instances
1557
}
1558
return true;
1559
}
1560
1561
#ifdef ASSERT
1562
// Assert that m is inherited into ctxk, without intervening overrides.
1563
// (May return true even if this is not true, in corner cases where we punt.)
1564
bool Dependencies::verify_method_context(InstanceKlass* ctxk, Method* m) {
1565
if (m->is_private()) {
1566
return false; // Quick lose. Should not happen.
1567
}
1568
if (m->method_holder() == ctxk) {
1569
return true; // Quick win.
1570
}
1571
if (!(m->is_public() || m->is_protected())) {
1572
// The override story is complex when packages get involved.
1573
return true; // Must punt the assertion to true.
1574
}
1575
Method* lm = ctxk->lookup_method(m->name(), m->signature());
1576
if (lm == NULL && ctxk->is_instance_klass()) {
1577
// It might be an interface method
1578
lm = InstanceKlass::cast(ctxk)->lookup_method_in_ordered_interfaces(m->name(),
1579
m->signature());
1580
}
1581
if (lm == m) {
1582
// Method m is inherited into ctxk.
1583
return true;
1584
}
1585
if (lm != NULL) {
1586
if (!(lm->is_public() || lm->is_protected())) {
1587
// Method is [package-]private, so the override story is complex.
1588
return true; // Must punt the assertion to true.
1589
}
1590
if (lm->is_static()) {
1591
// Static methods don't override non-static so punt
1592
return true;
1593
}
1594
if (!Dependencies::is_concrete_method(lm, ctxk) &&
1595
!Dependencies::is_concrete_method(m, ctxk)) {
1596
// They are both non-concrete
1597
if (lm->method_holder()->is_subtype_of(m->method_holder())) {
1598
// Method m is overridden by lm, but both are non-concrete.
1599
return true;
1600
}
1601
if (lm->method_holder()->is_interface() && m->method_holder()->is_interface() &&
1602
ctxk->is_subtype_of(m->method_holder()) && ctxk->is_subtype_of(lm->method_holder())) {
1603
// Interface method defined in multiple super interfaces
1604
return true;
1605
}
1606
}
1607
}
1608
ResourceMark rm;
1609
tty->print_cr("Dependency method not found in the associated context:");
1610
tty->print_cr(" context = %s", ctxk->external_name());
1611
tty->print( " method = "); m->print_short_name(tty); tty->cr();
1612
if (lm != NULL) {
1613
tty->print( " found = "); lm->print_short_name(tty); tty->cr();
1614
}
1615
return false;
1616
}
1617
#endif // ASSERT
1618
1619
bool Dependencies::is_concrete_klass(Klass* k) {
1620
if (k->is_abstract()) return false;
1621
// %%% We could treat classes which are concrete but
1622
// have not yet been instantiated as virtually abstract.
1623
// This would require a deoptimization barrier on first instantiation.
1624
//if (k->is_not_instantiated()) return false;
1625
return true;
1626
}
1627
1628
bool Dependencies::is_concrete_method(Method* m, Klass* k) {
1629
// NULL is not a concrete method.
1630
if (m == NULL) {
1631
return false;
1632
}
1633
// Statics are irrelevant to virtual call sites.
1634
if (m->is_static()) {
1635
return false;
1636
}
1637
// Abstract methods are not concrete.
1638
if (m->is_abstract()) {
1639
return false;
1640
}
1641
// Overpass (error) methods are not concrete if k is abstract.
1642
if (m->is_overpass() && k != NULL) {
1643
return !k->is_abstract();
1644
}
1645
// Note "true" is conservative answer: overpass clause is false if k == NULL,
1646
// implies return true if answer depends on overpass clause.
1647
return true;
1648
}
1649
1650
Klass* Dependencies::find_finalizable_subclass(InstanceKlass* ik) {
1651
for (ClassHierarchyIterator iter(ik); !iter.done(); iter.next()) {
1652
Klass* sub = iter.klass();
1653
if (sub->has_finalizer() && !sub->is_interface()) {
1654
return sub;
1655
}
1656
}
1657
return NULL; // not found
1658
}
1659
1660
bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1661
if (k->is_abstract()) return false;
1662
// We could also return false if k does not yet appear to be
1663
// instantiated, if the VM version supports this distinction also.
1664
//if (k->is_not_instantiated()) return false;
1665
return true;
1666
}
1667
1668
bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1669
return k->has_finalizable_subclass();
1670
}
1671
1672
// Any use of the contents (bytecodes) of a method must be
1673
// marked by an "evol_method" dependency, if those contents
1674
// can change. (Note: A method is always dependent on itself.)
1675
Klass* Dependencies::check_evol_method(Method* m) {
1676
assert(must_be_in_vm(), "raw oops here");
1677
// Did somebody do a JVMTI RedefineClasses while our backs were turned?
1678
// Or is there a now a breakpoint?
1679
// (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1680
if (m->is_old()
1681
|| m->number_of_breakpoints() > 0) {
1682
return m->method_holder();
1683
} else {
1684
return NULL;
1685
}
1686
}
1687
1688
// This is a strong assertion: It is that the given type
1689
// has no subtypes whatever. It is most useful for
1690
// optimizing checks on reflected types or on array types.
1691
// (Checks on types which are derived from real instances
1692
// can be optimized more strongly than this, because we
1693
// know that the checked type comes from a concrete type,
1694
// and therefore we can disregard abstract types.)
1695
Klass* Dependencies::check_leaf_type(InstanceKlass* ctxk) {
1696
assert(must_be_in_vm(), "raw oops here");
1697
assert_locked_or_safepoint(Compile_lock);
1698
Klass* sub = ctxk->subklass();
1699
if (sub != NULL) {
1700
return sub;
1701
} else if (ctxk->nof_implementors() != 0) {
1702
// if it is an interface, it must be unimplemented
1703
// (if it is not an interface, nof_implementors is always zero)
1704
InstanceKlass* impl = ctxk->implementor();
1705
assert(impl != NULL, "must be set");
1706
return impl;
1707
} else {
1708
return NULL;
1709
}
1710
}
1711
1712
// Test the assertion that conck is the only concrete subtype* of ctxk.
1713
// The type conck itself is allowed to have have further concrete subtypes.
1714
// This allows the compiler to narrow occurrences of ctxk by conck,
1715
// when dealing with the types of actual instances.
1716
Klass* Dependencies::check_abstract_with_unique_concrete_subtype(InstanceKlass* ctxk,
1717
Klass* conck,
1718
NewKlassDepChange* changes) {
1719
ConcreteSubtypeFinder wf(conck);
1720
Klass* k = wf.find_witness(ctxk, changes);
1721
return k;
1722
}
1723
1724
1725
// Find the unique concrete proper subtype of ctxk, or NULL if there
1726
// is more than one concrete proper subtype. If there are no concrete
1727
// proper subtypes, return ctxk itself, whether it is concrete or not.
1728
// The returned subtype is allowed to have have further concrete subtypes.
1729
// That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
1730
Klass* Dependencies::find_unique_concrete_subtype(InstanceKlass* ctxk) {
1731
ConcreteSubtypeFinder wf(ctxk); // Ignore ctxk when walking.
1732
wf.record_witnesses(1); // Record one other witness when walking.
1733
Klass* wit = wf.find_witness(ctxk);
1734
if (wit != NULL) return NULL; // Too many witnesses.
1735
Klass* conck = wf.participant(0);
1736
if (conck == NULL) {
1737
return ctxk; // Return ctxk as a flag for "no subtypes".
1738
} else {
1739
#ifndef PRODUCT
1740
// Make sure the dependency mechanism will pass this discovery:
1741
if (VerifyDependencies) {
1742
// Turn off dependency tracing while actually testing deps.
1743
FlagSetting fs(TraceDependencies, false);
1744
if (!Dependencies::is_concrete_klass(ctxk)) {
1745
guarantee(NULL == (void *)
1746
check_abstract_with_unique_concrete_subtype(ctxk, conck),
1747
"verify dep.");
1748
}
1749
}
1750
#endif //PRODUCT
1751
return conck;
1752
}
1753
}
1754
1755
// If a class (or interface) has a unique concrete method uniqm, return NULL.
1756
// Otherwise, return a class that contains an interfering method.
1757
Klass* Dependencies::check_unique_concrete_method(InstanceKlass* ctxk,
1758
Method* uniqm,
1759
NewKlassDepChange* changes) {
1760
// Here is a missing optimization: If uniqm->is_final(),
1761
// we don't really need to search beneath it for overrides.
1762
// This is probably not important, since we don't use dependencies
1763
// to track final methods. (They can't be "definalized".)
1764
ConcreteMethodFinder wf(uniqm, uniqm->method_holder());
1765
Klass* k = wf.find_witness(ctxk, changes);
1766
return k;
1767
}
1768
1769
// Find the set of all non-abstract methods under ctxk that match m.
1770
// (The method m must be defined or inherited in ctxk.)
1771
// Include m itself in the set, unless it is abstract.
1772
// If this set has exactly one element, return that element.
1773
Method* Dependencies::find_unique_concrete_method(InstanceKlass* ctxk, Method* m, Klass** participant) {
1774
// Return NULL if m is marked old; must have been a redefined method.
1775
if (m->is_old()) {
1776
return NULL;
1777
}
1778
if (m->is_default_method()) {
1779
return NULL; // not supported
1780
}
1781
assert(verify_method_context(ctxk, m), "proper context");
1782
ConcreteMethodFinder wf(m);
1783
wf.record_witnesses(1);
1784
Klass* wit = wf.find_witness(ctxk);
1785
if (wit != NULL) return NULL; // Too many witnesses.
1786
Method* fm = wf.found_method(0); // Will be NULL if num_parts == 0.
1787
if (participant != NULL) {
1788
(*participant) = wf.participant(0);
1789
}
1790
if (Dependencies::is_concrete_method(m, ctxk)) {
1791
if (fm == NULL) {
1792
// It turns out that m was always the only implementation.
1793
fm = m;
1794
} else if (fm != m) {
1795
// Two conflicting implementations after all.
1796
// (This can happen if m is inherited into ctxk and fm overrides it.)
1797
return NULL;
1798
}
1799
}
1800
#ifndef PRODUCT
1801
// Make sure the dependency mechanism will pass this discovery:
1802
if (VerifyDependencies && fm != NULL) {
1803
guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
1804
"verify dep.");
1805
}
1806
#endif //PRODUCT
1807
return fm;
1808
}
1809
1810
// If a class (or interface) has a unique concrete method uniqm, return NULL.
1811
// Otherwise, return a class that contains an interfering method.
1812
Klass* Dependencies::check_unique_concrete_method(InstanceKlass* ctxk,
1813
Method* uniqm,
1814
Klass* resolved_klass,
1815
Method* resolved_method,
1816
KlassDepChange* changes) {
1817
assert(UseVtableBasedCHA, "required");
1818
assert(!ctxk->is_interface() || ctxk == resolved_klass, "sanity");
1819
assert(!resolved_method->can_be_statically_bound() || resolved_method == uniqm, "sanity");
1820
assert(resolved_klass->is_subtype_of(resolved_method->method_holder()), "sanity");
1821
1822
if (!InstanceKlass::cast(resolved_klass)->is_linked() ||
1823
!resolved_method->method_holder()->is_linked() ||
1824
resolved_method->can_be_statically_bound()) {
1825
// Dependency is redundant, but benign. Just keep it to avoid unnecessary recompilation.
1826
return NULL; // no vtable index available
1827
}
1828
1829
LinkedConcreteMethodFinder mf(InstanceKlass::cast(resolved_klass), resolved_method, uniqm);
1830
return mf.find_witness(ctxk, changes);
1831
}
1832
1833
// Find the set of all non-abstract methods under ctxk that match m.
1834
// (The method m must be defined or inherited in ctxk.)
1835
// Include m itself in the set, unless it is abstract.
1836
// If this set has exactly one element, return that element.
1837
// Not yet linked subclasses of ctxk are ignored since they don't have any instances yet.
1838
// Additionally, resolved_klass and resolved_method complete the description of the call site being analyzed.
1839
Method* Dependencies::find_unique_concrete_method(InstanceKlass* ctxk, Method* m, Klass* resolved_klass, Method* resolved_method) {
1840
// Return NULL if m is marked old; must have been a redefined method.
1841
if (m->is_old()) {
1842
return NULL;
1843
}
1844
if (!InstanceKlass::cast(resolved_klass)->is_linked() ||
1845
!resolved_method->method_holder()->is_linked() ||
1846
resolved_method->can_be_statically_bound()) {
1847
return m; // nothing to do: no witness under ctxk
1848
}
1849
LinkedConcreteMethodFinder wf(InstanceKlass::cast(resolved_klass), resolved_method);
1850
assert(Dependencies::verify_method_context(ctxk, m), "proper context");
1851
wf.record_witnesses(1);
1852
Klass* wit = wf.find_witness(ctxk);
1853
if (wit != NULL) {
1854
return NULL; // Too many witnesses.
1855
}
1856
// p == NULL when no participants are found (wf.num_participants() == 0).
1857
// fm == NULL case has 2 meanings:
1858
// * when p == NULL: no method found;
1859
// * when p != NULL: AbstractMethodError-throwing method found.
1860
// Also, found method should always be accompanied by a participant class.
1861
Klass* p = wf.participant(0);
1862
Method* fm = wf.found_method(0);
1863
assert(fm == NULL || p != NULL, "no participant");
1864
// Normalize all error-throwing cases to NULL.
1865
if (fm == Universe::throw_illegal_access_error() ||
1866
fm == Universe::throw_no_such_method_error() ||
1867
!Dependencies::is_concrete_method(fm, p)) {
1868
fm = NULL; // error-throwing method
1869
}
1870
if (Dependencies::is_concrete_method(m, ctxk)) {
1871
if (p == NULL) {
1872
// It turns out that m was always the only implementation.
1873
assert(fm == NULL, "sanity");
1874
fm = m;
1875
}
1876
}
1877
#ifndef PRODUCT
1878
// Make sure the dependency mechanism will pass this discovery:
1879
if (VerifyDependencies && fm != NULL) {
1880
guarantee(NULL == check_unique_concrete_method(ctxk, fm, resolved_klass, resolved_method),
1881
"verify dep.");
1882
}
1883
#endif // PRODUCT
1884
assert(fm == NULL || !fm->is_abstract(), "sanity");
1885
// Old CHA conservatively reports concrete methods in abstract classes
1886
// irrespective of whether they have concrete subclasses or not.
1887
// Also, abstract root method case is not fully supported.
1888
#ifdef ASSERT
1889
Klass* uniqp = NULL;
1890
Method* uniqm = Dependencies::find_unique_concrete_method(ctxk, m, &uniqp);
1891
assert(uniqm == NULL || uniqm == fm ||
1892
m->is_abstract() ||
1893
uniqm->method_holder()->is_abstract() ||
1894
(fm == NULL && uniqm != NULL && uniqp != NULL && !InstanceKlass::cast(uniqp)->is_linked()),
1895
"sanity");
1896
#endif // ASSERT
1897
return fm;
1898
}
1899
1900
Klass* Dependencies::check_has_no_finalizable_subclasses(InstanceKlass* ctxk, NewKlassDepChange* changes) {
1901
InstanceKlass* search_at = ctxk;
1902
if (changes != NULL) {
1903
search_at = changes->new_type(); // just look at the new bit
1904
}
1905
return find_finalizable_subclass(search_at);
1906
}
1907
1908
Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
1909
assert(call_site != NULL, "sanity");
1910
assert(method_handle != NULL, "sanity");
1911
assert(call_site->is_a(vmClasses::CallSite_klass()), "sanity");
1912
1913
if (changes == NULL) {
1914
// Validate all CallSites
1915
if (java_lang_invoke_CallSite::target(call_site) != method_handle)
1916
return call_site->klass(); // assertion failed
1917
} else {
1918
// Validate the given CallSite
1919
if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
1920
assert(method_handle != changes->method_handle(), "must be");
1921
return call_site->klass(); // assertion failed
1922
}
1923
}
1924
return NULL; // assertion still valid
1925
}
1926
1927
void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
1928
if (witness != NULL) {
1929
if (TraceDependencies) {
1930
print_dependency(witness, /*verbose=*/ true);
1931
}
1932
// The following is a no-op unless logging is enabled:
1933
log_dependency(witness);
1934
}
1935
}
1936
1937
Klass* Dependencies::DepStream::check_new_klass_dependency(NewKlassDepChange* changes) {
1938
assert_locked_or_safepoint(Compile_lock);
1939
Dependencies::check_valid_dependency_type(type());
1940
1941
Klass* witness = NULL;
1942
switch (type()) {
1943
case evol_method:
1944
witness = check_evol_method(method_argument(0));
1945
break;
1946
case leaf_type:
1947
witness = check_leaf_type(context_type());
1948
break;
1949
case abstract_with_unique_concrete_subtype:
1950
witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
1951
break;
1952
case unique_concrete_method_2:
1953
witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
1954
break;
1955
case unique_concrete_method_4:
1956
witness = check_unique_concrete_method(context_type(), method_argument(1), type_argument(2), method_argument(3), changes);
1957
break;
1958
case no_finalizable_subclasses:
1959
witness = check_has_no_finalizable_subclasses(context_type(), changes);
1960
break;
1961
default:
1962
witness = NULL;
1963
break;
1964
}
1965
trace_and_log_witness(witness);
1966
return witness;
1967
}
1968
1969
Klass* Dependencies::DepStream::check_klass_init_dependency(KlassInitDepChange* changes) {
1970
assert_locked_or_safepoint(Compile_lock);
1971
Dependencies::check_valid_dependency_type(type());
1972
1973
// No new types added. Only unique_concrete_method_4 is sensitive to class initialization changes.
1974
Klass* witness = NULL;
1975
switch (type()) {
1976
case unique_concrete_method_4:
1977
witness = check_unique_concrete_method(context_type(), method_argument(1), type_argument(2), method_argument(3), changes);
1978
break;
1979
default:
1980
witness = NULL;
1981
break;
1982
}
1983
trace_and_log_witness(witness);
1984
return witness;
1985
}
1986
1987
Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
1988
assert_locked_or_safepoint(Compile_lock);
1989
Dependencies::check_valid_dependency_type(type());
1990
1991
if (changes != NULL) {
1992
if (UseVtableBasedCHA && changes->is_klass_init_change()) {
1993
return check_klass_init_dependency(changes->as_klass_init_change());
1994
} else {
1995
return check_new_klass_dependency(changes->as_new_klass_change());
1996
}
1997
} else {
1998
Klass* witness = check_new_klass_dependency(NULL);
1999
// check_klass_init_dependency duplicates check_new_klass_dependency checks when class hierarchy change info is absent.
2000
assert(witness != NULL || check_klass_init_dependency(NULL) == NULL, "missed dependency");
2001
return witness;
2002
}
2003
}
2004
2005
Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
2006
assert_locked_or_safepoint(Compile_lock);
2007
Dependencies::check_valid_dependency_type(type());
2008
2009
Klass* witness = NULL;
2010
switch (type()) {
2011
case call_site_target_value:
2012
witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
2013
break;
2014
default:
2015
witness = NULL;
2016
break;
2017
}
2018
trace_and_log_witness(witness);
2019
return witness;
2020
}
2021
2022
2023
Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
2024
// Handle klass dependency
2025
if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
2026
return check_klass_dependency(changes.as_klass_change());
2027
2028
// Handle CallSite dependency
2029
if (changes.is_call_site_change())
2030
return check_call_site_dependency(changes.as_call_site_change());
2031
2032
// irrelevant dependency; skip it
2033
return NULL;
2034
}
2035
2036
2037
void DepChange::print() {
2038
int nsup = 0, nint = 0;
2039
for (ContextStream str(*this); str.next(); ) {
2040
Klass* k = str.klass();
2041
switch (str.change_type()) {
2042
case Change_new_type:
2043
tty->print_cr(" dependee = %s", k->external_name());
2044
break;
2045
case Change_new_sub:
2046
if (!WizardMode) {
2047
++nsup;
2048
} else {
2049
tty->print_cr(" context super = %s", k->external_name());
2050
}
2051
break;
2052
case Change_new_impl:
2053
if (!WizardMode) {
2054
++nint;
2055
} else {
2056
tty->print_cr(" context interface = %s", k->external_name());
2057
}
2058
break;
2059
default:
2060
break;
2061
}
2062
}
2063
if (nsup + nint != 0) {
2064
tty->print_cr(" context supers = %d, interfaces = %d", nsup, nint);
2065
}
2066
}
2067
2068
void DepChange::ContextStream::start() {
2069
Klass* type = (_changes.is_klass_change() ? _changes.as_klass_change()->type() : (Klass*) NULL);
2070
_change_type = (type == NULL ? NO_CHANGE : Start_Klass);
2071
_klass = type;
2072
_ti_base = NULL;
2073
_ti_index = 0;
2074
_ti_limit = 0;
2075
}
2076
2077
bool DepChange::ContextStream::next() {
2078
switch (_change_type) {
2079
case Start_Klass: // initial state; _klass is the new type
2080
_ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
2081
_ti_index = 0;
2082
_change_type = Change_new_type;
2083
return true;
2084
case Change_new_type:
2085
// fall through:
2086
_change_type = Change_new_sub;
2087
case Change_new_sub:
2088
// 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
2089
{
2090
_klass = _klass->super();
2091
if (_klass != NULL) {
2092
return true;
2093
}
2094
}
2095
// else set up _ti_limit and fall through:
2096
_ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
2097
_change_type = Change_new_impl;
2098
case Change_new_impl:
2099
if (_ti_index < _ti_limit) {
2100
_klass = _ti_base->at(_ti_index++);
2101
return true;
2102
}
2103
// fall through:
2104
_change_type = NO_CHANGE; // iterator is exhausted
2105
case NO_CHANGE:
2106
break;
2107
default:
2108
ShouldNotReachHere();
2109
}
2110
return false;
2111
}
2112
2113
void KlassDepChange::initialize() {
2114
// entire transaction must be under this lock:
2115
assert_lock_strong(Compile_lock);
2116
2117
// Mark all dependee and all its superclasses
2118
// Mark transitive interfaces
2119
for (ContextStream str(*this); str.next(); ) {
2120
Klass* d = str.klass();
2121
assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
2122
InstanceKlass::cast(d)->set_is_marked_dependent(true);
2123
}
2124
}
2125
2126
KlassDepChange::~KlassDepChange() {
2127
// Unmark all dependee and all its superclasses
2128
// Unmark transitive interfaces
2129
for (ContextStream str(*this); str.next(); ) {
2130
Klass* d = str.klass();
2131
InstanceKlass::cast(d)->set_is_marked_dependent(false);
2132
}
2133
}
2134
2135
bool KlassDepChange::involves_context(Klass* k) {
2136
if (k == NULL || !k->is_instance_klass()) {
2137
return false;
2138
}
2139
InstanceKlass* ik = InstanceKlass::cast(k);
2140
bool is_contained = ik->is_marked_dependent();
2141
assert(is_contained == type()->is_subtype_of(k),
2142
"correct marking of potential context types");
2143
return is_contained;
2144
}
2145
2146
void Dependencies::print_statistics() {
2147
AbstractClassHierarchyWalker::print_statistics();
2148
}
2149
2150
void AbstractClassHierarchyWalker::print_statistics() {
2151
if (UsePerfData) {
2152
jlong deps_find_witness_calls = _perf_find_witness_anywhere_calls_count->get_value();
2153
jlong deps_find_witness_steps = _perf_find_witness_anywhere_steps_count->get_value();
2154
jlong deps_find_witness_singles = _perf_find_witness_in_calls_count->get_value();
2155
2156
ttyLocker ttyl;
2157
tty->print_cr("Dependency check (find_witness) "
2158
"calls=" JLONG_FORMAT ", steps=" JLONG_FORMAT " (avg=%.1f), singles=" JLONG_FORMAT,
2159
deps_find_witness_calls,
2160
deps_find_witness_steps,
2161
(double)deps_find_witness_steps / deps_find_witness_calls,
2162
deps_find_witness_singles);
2163
if (xtty != NULL) {
2164
xtty->elem("deps_find_witness calls='" JLONG_FORMAT "' steps='" JLONG_FORMAT "' singles='" JLONG_FORMAT "'",
2165
deps_find_witness_calls,
2166
deps_find_witness_steps,
2167
deps_find_witness_singles);
2168
}
2169
}
2170
}
2171
2172
CallSiteDepChange::CallSiteDepChange(Handle call_site, Handle method_handle) :
2173
_call_site(call_site),
2174
_method_handle(method_handle) {
2175
assert(_call_site()->is_a(vmClasses::CallSite_klass()), "must be");
2176
assert(_method_handle.is_null() || _method_handle()->is_a(vmClasses::MethodHandle_klass()), "must be");
2177
}
2178
2179
void dependencies_init() {
2180
AbstractClassHierarchyWalker::init();
2181
}
2182
2183