Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/openjdk-multiarch-jdk8u
Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/share/vm/code/dependencies.cpp
32285 views
1
/*
2
* Copyright (c) 2005, 2014, Oracle and/or its affiliates. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation.
8
*
9
* This code is distributed in the hope that it will be useful, but WITHOUT
10
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12
* version 2 for more details (a copy is included in the LICENSE file that
13
* accompanied this code).
14
*
15
* You should have received a copy of the GNU General Public License version
16
* 2 along with this work; if not, write to the Free Software Foundation,
17
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
*
19
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
* or visit www.oracle.com if you need additional information or have any
21
* questions.
22
*
23
*/
24
25
#include "precompiled.hpp"
26
#include "ci/ciArrayKlass.hpp"
27
#include "ci/ciEnv.hpp"
28
#include "ci/ciKlass.hpp"
29
#include "ci/ciMethod.hpp"
30
#include "code/dependencies.hpp"
31
#include "compiler/compileLog.hpp"
32
#include "oops/klass.hpp"
33
#include "oops/oop.inline.hpp"
34
#include "runtime/handles.hpp"
35
#include "runtime/handles.inline.hpp"
36
#include "runtime/thread.inline.hpp"
37
#include "utilities/copy.hpp"
38
39
40
#ifdef ASSERT
41
static bool must_be_in_vm() {
42
Thread* thread = Thread::current();
43
if (thread->is_Java_thread())
44
return ((JavaThread*)thread)->thread_state() == _thread_in_vm;
45
else
46
return true; //something like this: thread->is_VM_thread();
47
}
48
#endif //ASSERT
49
50
void Dependencies::initialize(ciEnv* env) {
51
Arena* arena = env->arena();
52
_oop_recorder = env->oop_recorder();
53
_log = env->log();
54
_dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
55
DEBUG_ONLY(_deps[end_marker] = NULL);
56
for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
57
_deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
58
}
59
_content_bytes = NULL;
60
_size_in_bytes = (size_t)-1;
61
62
assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
63
}
64
65
void Dependencies::assert_evol_method(ciMethod* m) {
66
assert_common_1(evol_method, m);
67
}
68
69
void Dependencies::assert_leaf_type(ciKlass* ctxk) {
70
if (ctxk->is_array_klass()) {
71
// As a special case, support this assertion on an array type,
72
// which reduces to an assertion on its element type.
73
// Note that this cannot be done with assertions that
74
// relate to concreteness or abstractness.
75
ciType* elemt = ctxk->as_array_klass()->base_element_type();
76
if (!elemt->is_instance_klass()) return; // Ex: int[][]
77
ctxk = elemt->as_instance_klass();
78
//if (ctxk->is_final()) return; // Ex: String[][]
79
}
80
check_ctxk(ctxk);
81
assert_common_1(leaf_type, ctxk);
82
}
83
84
void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
85
check_ctxk_abstract(ctxk);
86
assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
87
}
88
89
void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) {
90
check_ctxk_abstract(ctxk);
91
assert_common_1(abstract_with_no_concrete_subtype, ctxk);
92
}
93
94
void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) {
95
check_ctxk_concrete(ctxk);
96
assert_common_1(concrete_with_no_concrete_subtype, ctxk);
97
}
98
99
void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
100
check_ctxk(ctxk);
101
assert_common_2(unique_concrete_method, ctxk, uniqm);
102
}
103
104
void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) {
105
check_ctxk(ctxk);
106
assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2);
107
}
108
109
void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) {
110
check_ctxk(ctxk);
111
assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2);
112
}
113
114
void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
115
check_ctxk(ctxk);
116
assert_common_1(no_finalizable_subclasses, ctxk);
117
}
118
119
void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
120
check_ctxk(call_site->klass());
121
assert_common_2(call_site_target_value, call_site, method_handle);
122
}
123
124
// Helper function. If we are adding a new dep. under ctxk2,
125
// try to find an old dep. under a broader* ctxk1. If there is
126
//
127
bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
128
int ctxk_i, ciKlass* ctxk2) {
129
ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
130
if (ctxk2->is_subtype_of(ctxk1)) {
131
return true; // success, and no need to change
132
} else if (ctxk1->is_subtype_of(ctxk2)) {
133
// new context class fully subsumes previous one
134
deps->at_put(ctxk_i, ctxk2);
135
return true;
136
} else {
137
return false;
138
}
139
}
140
141
void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
142
assert(dep_args(dept) == 1, "sanity");
143
log_dependency(dept, x);
144
GrowableArray<ciBaseObject*>* deps = _deps[dept];
145
146
// see if the same (or a similar) dep is already recorded
147
if (note_dep_seen(dept, x)) {
148
assert(deps->find(x) >= 0, "sanity");
149
} else {
150
deps->append(x);
151
}
152
}
153
154
void Dependencies::assert_common_2(DepType dept,
155
ciBaseObject* x0, ciBaseObject* x1) {
156
assert(dep_args(dept) == 2, "sanity");
157
log_dependency(dept, x0, x1);
158
GrowableArray<ciBaseObject*>* deps = _deps[dept];
159
160
// see if the same (or a similar) dep is already recorded
161
bool has_ctxk = has_explicit_context_arg(dept);
162
if (has_ctxk) {
163
assert(dep_context_arg(dept) == 0, "sanity");
164
if (note_dep_seen(dept, x1)) {
165
// look in this bucket for redundant assertions
166
const int stride = 2;
167
for (int i = deps->length(); (i -= stride) >= 0; ) {
168
ciBaseObject* y1 = deps->at(i+1);
169
if (x1 == y1) { // same subject; check the context
170
if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
171
return;
172
}
173
}
174
}
175
}
176
} else {
177
assert(dep_implicit_context_arg(dept) == 0, "sanity");
178
if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
179
// look in this bucket for redundant assertions
180
const int stride = 2;
181
for (int i = deps->length(); (i -= stride) >= 0; ) {
182
ciBaseObject* y0 = deps->at(i+0);
183
ciBaseObject* y1 = deps->at(i+1);
184
if (x0 == y0 && x1 == y1) {
185
return;
186
}
187
}
188
}
189
}
190
191
// append the assertion in the correct bucket:
192
deps->append(x0);
193
deps->append(x1);
194
}
195
196
void Dependencies::assert_common_3(DepType dept,
197
ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) {
198
assert(dep_context_arg(dept) == 0, "sanity");
199
assert(dep_args(dept) == 3, "sanity");
200
log_dependency(dept, ctxk, x, x2);
201
GrowableArray<ciBaseObject*>* deps = _deps[dept];
202
203
// try to normalize an unordered pair:
204
bool swap = false;
205
switch (dept) {
206
case abstract_with_exclusive_concrete_subtypes_2:
207
swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk);
208
break;
209
case exclusive_concrete_methods_2:
210
swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk);
211
break;
212
}
213
if (swap) { ciBaseObject* t = x; x = x2; x2 = t; }
214
215
// see if the same (or a similar) dep is already recorded
216
if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) {
217
// look in this bucket for redundant assertions
218
const int stride = 3;
219
for (int i = deps->length(); (i -= stride) >= 0; ) {
220
ciBaseObject* y = deps->at(i+1);
221
ciBaseObject* y2 = deps->at(i+2);
222
if (x == y && x2 == y2) { // same subjects; check the context
223
if (maybe_merge_ctxk(deps, i+0, ctxk)) {
224
return;
225
}
226
}
227
}
228
}
229
// append the assertion in the correct bucket:
230
deps->append(ctxk);
231
deps->append(x);
232
deps->append(x2);
233
}
234
235
/// Support for encoding dependencies into an nmethod:
236
237
void Dependencies::copy_to(nmethod* nm) {
238
address beg = nm->dependencies_begin();
239
address end = nm->dependencies_end();
240
guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
241
Copy::disjoint_words((HeapWord*) content_bytes(),
242
(HeapWord*) beg,
243
size_in_bytes() / sizeof(HeapWord));
244
assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
245
}
246
247
static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
248
for (int i = 0; i < narg; i++) {
249
int diff = p1[i]->ident() - p2[i]->ident();
250
if (diff != 0) return diff;
251
}
252
return 0;
253
}
254
static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
255
{ return sort_dep(p1, p2, 1); }
256
static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
257
{ return sort_dep(p1, p2, 2); }
258
static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
259
{ return sort_dep(p1, p2, 3); }
260
261
void Dependencies::sort_all_deps() {
262
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
263
DepType dept = (DepType)deptv;
264
GrowableArray<ciBaseObject*>* deps = _deps[dept];
265
if (deps->length() <= 1) continue;
266
switch (dep_args(dept)) {
267
case 1: deps->sort(sort_dep_arg_1, 1); break;
268
case 2: deps->sort(sort_dep_arg_2, 2); break;
269
case 3: deps->sort(sort_dep_arg_3, 3); break;
270
default: ShouldNotReachHere();
271
}
272
}
273
}
274
275
size_t Dependencies::estimate_size_in_bytes() {
276
size_t est_size = 100;
277
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
278
DepType dept = (DepType)deptv;
279
GrowableArray<ciBaseObject*>* deps = _deps[dept];
280
est_size += deps->length()*2; // tags and argument(s)
281
}
282
return est_size;
283
}
284
285
ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
286
switch (dept) {
287
case abstract_with_exclusive_concrete_subtypes_2:
288
return x->as_metadata()->as_klass();
289
case unique_concrete_method:
290
case exclusive_concrete_methods_2:
291
return x->as_metadata()->as_method()->holder();
292
}
293
return NULL; // let NULL be NULL
294
}
295
296
Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
297
assert(must_be_in_vm(), "raw oops here");
298
switch (dept) {
299
case abstract_with_exclusive_concrete_subtypes_2:
300
assert(x->is_klass(), "sanity");
301
return (Klass*) x;
302
case unique_concrete_method:
303
case exclusive_concrete_methods_2:
304
assert(x->is_method(), "sanity");
305
return ((Method*)x)->method_holder();
306
}
307
return NULL; // let NULL be NULL
308
}
309
310
void Dependencies::encode_content_bytes() {
311
sort_all_deps();
312
313
// cast is safe, no deps can overflow INT_MAX
314
CompressedWriteStream bytes((int)estimate_size_in_bytes());
315
316
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
317
DepType dept = (DepType)deptv;
318
GrowableArray<ciBaseObject*>* deps = _deps[dept];
319
if (deps->length() == 0) continue;
320
int stride = dep_args(dept);
321
int ctxkj = dep_context_arg(dept); // -1 if no context arg
322
assert(stride > 0, "sanity");
323
for (int i = 0; i < deps->length(); i += stride) {
324
jbyte code_byte = (jbyte)dept;
325
int skipj = -1;
326
if (ctxkj >= 0 && ctxkj+1 < stride) {
327
ciKlass* ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
328
ciBaseObject* x = deps->at(i+ctxkj+1); // following argument
329
if (ctxk == ctxk_encoded_as_null(dept, x)) {
330
skipj = ctxkj; // we win: maybe one less oop to keep track of
331
code_byte |= default_context_type_bit;
332
}
333
}
334
bytes.write_byte(code_byte);
335
for (int j = 0; j < stride; j++) {
336
if (j == skipj) continue;
337
ciBaseObject* v = deps->at(i+j);
338
int idx;
339
if (v->is_object()) {
340
idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
341
} else {
342
ciMetadata* meta = v->as_metadata();
343
idx = _oop_recorder->find_index(meta->constant_encoding());
344
}
345
bytes.write_int(idx);
346
}
347
}
348
}
349
350
// write a sentinel byte to mark the end
351
bytes.write_byte(end_marker);
352
353
// round it out to a word boundary
354
while (bytes.position() % sizeof(HeapWord) != 0) {
355
bytes.write_byte(end_marker);
356
}
357
358
// check whether the dept byte encoding really works
359
assert((jbyte)default_context_type_bit != 0, "byte overflow");
360
361
_content_bytes = bytes.buffer();
362
_size_in_bytes = bytes.position();
363
}
364
365
366
const char* Dependencies::_dep_name[TYPE_LIMIT] = {
367
"end_marker",
368
"evol_method",
369
"leaf_type",
370
"abstract_with_unique_concrete_subtype",
371
"abstract_with_no_concrete_subtype",
372
"concrete_with_no_concrete_subtype",
373
"unique_concrete_method",
374
"abstract_with_exclusive_concrete_subtypes_2",
375
"exclusive_concrete_methods_2",
376
"no_finalizable_subclasses",
377
"call_site_target_value"
378
};
379
380
int Dependencies::_dep_args[TYPE_LIMIT] = {
381
-1,// end_marker
382
1, // evol_method m
383
1, // leaf_type ctxk
384
2, // abstract_with_unique_concrete_subtype ctxk, k
385
1, // abstract_with_no_concrete_subtype ctxk
386
1, // concrete_with_no_concrete_subtype ctxk
387
2, // unique_concrete_method ctxk, m
388
3, // unique_concrete_subtypes_2 ctxk, k1, k2
389
3, // unique_concrete_methods_2 ctxk, m1, m2
390
1, // no_finalizable_subclasses ctxk
391
2 // call_site_target_value call_site, method_handle
392
};
393
394
const char* Dependencies::dep_name(Dependencies::DepType dept) {
395
if (!dept_in_mask(dept, all_types)) return "?bad-dep?";
396
return _dep_name[dept];
397
}
398
399
int Dependencies::dep_args(Dependencies::DepType dept) {
400
if (!dept_in_mask(dept, all_types)) return -1;
401
return _dep_args[dept];
402
}
403
404
void Dependencies::check_valid_dependency_type(DepType dept) {
405
guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, err_msg("invalid dependency type: %d", (int) dept));
406
}
407
408
// for the sake of the compiler log, print out current dependencies:
409
void Dependencies::log_all_dependencies() {
410
if (log() == NULL) return;
411
ResourceMark rm;
412
for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
413
DepType dept = (DepType)deptv;
414
GrowableArray<ciBaseObject*>* deps = _deps[dept];
415
int deplen = deps->length();
416
if (deplen == 0) {
417
continue;
418
}
419
int stride = dep_args(dept);
420
GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride);
421
for (int i = 0; i < deps->length(); i += stride) {
422
for (int j = 0; j < stride; j++) {
423
// flush out the identities before printing
424
ciargs->push(deps->at(i+j));
425
}
426
write_dependency_to(log(), dept, ciargs);
427
ciargs->clear();
428
}
429
guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope");
430
}
431
}
432
433
void Dependencies::write_dependency_to(CompileLog* log,
434
DepType dept,
435
GrowableArray<DepArgument>* args,
436
Klass* witness) {
437
if (log == NULL) {
438
return;
439
}
440
ResourceMark rm;
441
ciEnv* env = ciEnv::current();
442
GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length());
443
for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) {
444
DepArgument arg = *it;
445
if (arg.is_oop()) {
446
ciargs->push(env->get_object(arg.oop_value()));
447
} else {
448
ciargs->push(env->get_metadata(arg.metadata_value()));
449
}
450
}
451
int argslen = ciargs->length();
452
Dependencies::write_dependency_to(log, dept, ciargs, witness);
453
guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope");
454
}
455
456
void Dependencies::write_dependency_to(CompileLog* log,
457
DepType dept,
458
GrowableArray<ciBaseObject*>* args,
459
Klass* witness) {
460
if (log == NULL) {
461
return;
462
}
463
ResourceMark rm;
464
GrowableArray<int>* argids = new GrowableArray<int>(args->length());
465
for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) {
466
ciBaseObject* obj = *it;
467
if (obj->is_object()) {
468
argids->push(log->identify(obj->as_object()));
469
} else {
470
argids->push(log->identify(obj->as_metadata()));
471
}
472
}
473
if (witness != NULL) {
474
log->begin_elem("dependency_failed");
475
} else {
476
log->begin_elem("dependency");
477
}
478
log->print(" type='%s'", dep_name(dept));
479
const int ctxkj = dep_context_arg(dept); // -1 if no context arg
480
if (ctxkj >= 0 && ctxkj < argids->length()) {
481
log->print(" ctxk='%d'", argids->at(ctxkj));
482
}
483
// write remaining arguments, if any.
484
for (int j = 0; j < argids->length(); j++) {
485
if (j == ctxkj) continue; // already logged
486
if (j == 1) {
487
log->print( " x='%d'", argids->at(j));
488
} else {
489
log->print(" x%d='%d'", j, argids->at(j));
490
}
491
}
492
if (witness != NULL) {
493
log->object("witness", witness);
494
log->stamp();
495
}
496
log->end_elem();
497
}
498
499
void Dependencies::write_dependency_to(xmlStream* xtty,
500
DepType dept,
501
GrowableArray<DepArgument>* args,
502
Klass* witness) {
503
if (xtty == NULL) {
504
return;
505
}
506
ResourceMark rm;
507
ttyLocker ttyl;
508
int ctxkj = dep_context_arg(dept); // -1 if no context arg
509
if (witness != NULL) {
510
xtty->begin_elem("dependency_failed");
511
} else {
512
xtty->begin_elem("dependency");
513
}
514
xtty->print(" type='%s'", dep_name(dept));
515
if (ctxkj >= 0) {
516
xtty->object("ctxk", args->at(ctxkj).metadata_value());
517
}
518
// write remaining arguments, if any.
519
for (int j = 0; j < args->length(); j++) {
520
if (j == ctxkj) continue; // already logged
521
DepArgument arg = args->at(j);
522
if (j == 1) {
523
if (arg.is_oop()) {
524
xtty->object("x", arg.oop_value());
525
} else {
526
xtty->object("x", arg.metadata_value());
527
}
528
} else {
529
char xn[12]; sprintf(xn, "x%d", j);
530
if (arg.is_oop()) {
531
xtty->object(xn, arg.oop_value());
532
} else {
533
xtty->object(xn, arg.metadata_value());
534
}
535
}
536
}
537
if (witness != NULL) {
538
xtty->object("witness", witness);
539
xtty->stamp();
540
}
541
xtty->end_elem();
542
}
543
544
void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args,
545
Klass* witness) {
546
ResourceMark rm;
547
ttyLocker ttyl; // keep the following output all in one block
548
tty->print_cr("%s of type %s",
549
(witness == NULL)? "Dependency": "Failed dependency",
550
dep_name(dept));
551
// print arguments
552
int ctxkj = dep_context_arg(dept); // -1 if no context arg
553
for (int j = 0; j < args->length(); j++) {
554
DepArgument arg = args->at(j);
555
bool put_star = false;
556
if (arg.is_null()) continue;
557
const char* what;
558
if (j == ctxkj) {
559
assert(arg.is_metadata(), "must be");
560
what = "context";
561
put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
562
} else if (arg.is_method()) {
563
what = "method ";
564
put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), NULL);
565
} else if (arg.is_klass()) {
566
what = "class ";
567
} else {
568
what = "object ";
569
}
570
tty->print(" %s = %s", what, (put_star? "*": ""));
571
if (arg.is_klass())
572
tty->print("%s", ((Klass*)arg.metadata_value())->external_name());
573
else if (arg.is_method())
574
((Method*)arg.metadata_value())->print_value();
575
else
576
ShouldNotReachHere(); // Provide impl for this type.
577
tty->cr();
578
}
579
if (witness != NULL) {
580
bool put_star = !Dependencies::is_concrete_klass(witness);
581
tty->print_cr(" witness = %s%s",
582
(put_star? "*": ""),
583
witness->external_name());
584
}
585
}
586
587
void Dependencies::DepStream::log_dependency(Klass* witness) {
588
if (_deps == NULL && xtty == NULL) return; // fast cutout for runtime
589
ResourceMark rm;
590
const int nargs = argument_count();
591
GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
592
for (int j = 0; j < nargs; j++) {
593
if (type() == call_site_target_value) {
594
args->push(argument_oop(j));
595
} else {
596
args->push(argument(j));
597
}
598
}
599
int argslen = args->length();
600
if (_deps != NULL && _deps->log() != NULL) {
601
Dependencies::write_dependency_to(_deps->log(), type(), args, witness);
602
} else {
603
Dependencies::write_dependency_to(xtty, type(), args, witness);
604
}
605
guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
606
}
607
608
void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose) {
609
ResourceMark rm;
610
int nargs = argument_count();
611
GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
612
for (int j = 0; j < nargs; j++) {
613
args->push(argument(j));
614
}
615
int argslen = args->length();
616
Dependencies::print_dependency(type(), args, witness);
617
if (verbose) {
618
if (_code != NULL) {
619
tty->print(" code: ");
620
_code->print_value_on(tty);
621
tty->cr();
622
}
623
}
624
guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
625
}
626
627
628
/// Dependency stream support (decodes dependencies from an nmethod):
629
630
#ifdef ASSERT
631
void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
632
assert(must_be_in_vm(), "raw oops here");
633
_byte_limit = byte_limit;
634
_type = (DepType)(end_marker-1); // defeat "already at end" assert
635
assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
636
}
637
#endif //ASSERT
638
639
bool Dependencies::DepStream::next() {
640
assert(_type != end_marker, "already at end");
641
if (_bytes.position() == 0 && _code != NULL
642
&& _code->dependencies_size() == 0) {
643
// Method has no dependencies at all.
644
return false;
645
}
646
int code_byte = (_bytes.read_byte() & 0xFF);
647
if (code_byte == end_marker) {
648
DEBUG_ONLY(_type = end_marker);
649
return false;
650
} else {
651
int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
652
code_byte -= ctxk_bit;
653
DepType dept = (DepType)code_byte;
654
_type = dept;
655
Dependencies::check_valid_dependency_type(dept);
656
int stride = _dep_args[dept];
657
assert(stride == dep_args(dept), "sanity");
658
int skipj = -1;
659
if (ctxk_bit != 0) {
660
skipj = 0; // currently the only context argument is at zero
661
assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
662
}
663
for (int j = 0; j < stride; j++) {
664
_xi[j] = (j == skipj)? 0: _bytes.read_int();
665
}
666
DEBUG_ONLY(_xi[stride] = -1); // help detect overruns
667
return true;
668
}
669
}
670
671
inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
672
Metadata* o = NULL;
673
if (_code != NULL) {
674
o = _code->metadata_at(i);
675
} else {
676
o = _deps->oop_recorder()->metadata_at(i);
677
}
678
return o;
679
}
680
681
inline oop Dependencies::DepStream::recorded_oop_at(int i) {
682
return (_code != NULL)
683
? _code->oop_at(i)
684
: JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
685
}
686
687
Metadata* Dependencies::DepStream::argument(int i) {
688
Metadata* result = recorded_metadata_at(argument_index(i));
689
690
if (result == NULL) { // Explicit context argument can be compressed
691
int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg
692
if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
693
result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
694
}
695
}
696
697
assert(result == NULL || result->is_klass() || result->is_method(), "must be");
698
return result;
699
}
700
701
oop Dependencies::DepStream::argument_oop(int i) {
702
oop result = recorded_oop_at(argument_index(i));
703
assert(result == NULL || result->is_oop(), "must be");
704
return result;
705
}
706
707
Klass* Dependencies::DepStream::context_type() {
708
assert(must_be_in_vm(), "raw oops here");
709
710
// Most dependencies have an explicit context type argument.
711
{
712
int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg
713
if (ctxkj >= 0) {
714
Metadata* k = argument(ctxkj);
715
assert(k != NULL && k->is_klass(), "type check");
716
return (Klass*)k;
717
}
718
}
719
720
// Some dependencies are using the klass of the first object
721
// argument as implicit context type (e.g. call_site_target_value).
722
{
723
int ctxkj = dep_implicit_context_arg(type());
724
if (ctxkj >= 0) {
725
Klass* k = argument_oop(ctxkj)->klass();
726
assert(k != NULL && k->is_klass(), "type check");
727
return (Klass*) k;
728
}
729
}
730
731
// And some dependencies don't have a context type at all,
732
// e.g. evol_method.
733
return NULL;
734
}
735
736
/// Checking dependencies:
737
738
// This hierarchy walker inspects subtypes of a given type,
739
// trying to find a "bad" class which breaks a dependency.
740
// Such a class is called a "witness" to the broken dependency.
741
// While searching around, we ignore "participants", which
742
// are already known to the dependency.
743
class ClassHierarchyWalker {
744
public:
745
enum { PARTICIPANT_LIMIT = 3 };
746
747
private:
748
// optional method descriptor to check for:
749
Symbol* _name;
750
Symbol* _signature;
751
752
// special classes which are not allowed to be witnesses:
753
Klass* _participants[PARTICIPANT_LIMIT+1];
754
int _num_participants;
755
756
// cache of method lookups
757
Method* _found_methods[PARTICIPANT_LIMIT+1];
758
759
// if non-zero, tells how many witnesses to convert to participants
760
int _record_witnesses;
761
762
void initialize(Klass* participant) {
763
_record_witnesses = 0;
764
_participants[0] = participant;
765
_found_methods[0] = NULL;
766
_num_participants = 0;
767
if (participant != NULL) {
768
// Terminating NULL.
769
_participants[1] = NULL;
770
_found_methods[1] = NULL;
771
_num_participants = 1;
772
}
773
}
774
775
void initialize_from_method(Method* m) {
776
assert(m != NULL && m->is_method(), "sanity");
777
_name = m->name();
778
_signature = m->signature();
779
}
780
781
public:
782
// The walker is initialized to recognize certain methods and/or types
783
// as friendly participants.
784
ClassHierarchyWalker(Klass* participant, Method* m) {
785
initialize_from_method(m);
786
initialize(participant);
787
}
788
ClassHierarchyWalker(Method* m) {
789
initialize_from_method(m);
790
initialize(NULL);
791
}
792
ClassHierarchyWalker(Klass* participant = NULL) {
793
_name = NULL;
794
_signature = NULL;
795
initialize(participant);
796
}
797
ClassHierarchyWalker(Klass* participants[], int num_participants) {
798
_name = NULL;
799
_signature = NULL;
800
initialize(NULL);
801
for (int i = 0; i < num_participants; ++i) {
802
add_participant(participants[i]);
803
}
804
}
805
806
// This is common code for two searches: One for concrete subtypes,
807
// the other for concrete method implementations and overrides.
808
bool doing_subtype_search() {
809
return _name == NULL;
810
}
811
812
int num_participants() { return _num_participants; }
813
Klass* participant(int n) {
814
assert((uint)n <= (uint)_num_participants, "oob");
815
return _participants[n];
816
}
817
818
// Note: If n==num_participants, returns NULL.
819
Method* found_method(int n) {
820
assert((uint)n <= (uint)_num_participants, "oob");
821
Method* fm = _found_methods[n];
822
assert(n == _num_participants || fm != NULL, "proper usage");
823
if (fm != NULL && fm->method_holder() != _participants[n]) {
824
// Default methods from interfaces can be added to classes. In
825
// that case the holder of the method is not the class but the
826
// interface where it's defined.
827
assert(fm->is_default_method(), "sanity");
828
return NULL;
829
}
830
return fm;
831
}
832
833
#ifdef ASSERT
834
// Assert that m is inherited into ctxk, without intervening overrides.
835
// (May return true even if this is not true, in corner cases where we punt.)
836
bool check_method_context(Klass* ctxk, Method* m) {
837
if (m->method_holder() == ctxk)
838
return true; // Quick win.
839
if (m->is_private())
840
return false; // Quick lose. Should not happen.
841
if (!(m->is_public() || m->is_protected()))
842
// The override story is complex when packages get involved.
843
return true; // Must punt the assertion to true.
844
Klass* k = ctxk;
845
Method* lm = k->lookup_method(m->name(), m->signature());
846
if (lm == NULL && k->oop_is_instance()) {
847
// It might be an interface method
848
lm = ((InstanceKlass*)k)->lookup_method_in_ordered_interfaces(m->name(),
849
m->signature());
850
}
851
if (lm == m)
852
// Method m is inherited into ctxk.
853
return true;
854
if (lm != NULL) {
855
if (!(lm->is_public() || lm->is_protected())) {
856
// Method is [package-]private, so the override story is complex.
857
return true; // Must punt the assertion to true.
858
}
859
if (lm->is_static()) {
860
// Static methods don't override non-static so punt
861
return true;
862
}
863
if ( !Dependencies::is_concrete_method(lm, k)
864
&& !Dependencies::is_concrete_method(m, ctxk)
865
&& lm->method_holder()->is_subtype_of(m->method_holder()))
866
// Method m is overridden by lm, but both are non-concrete.
867
return true;
868
}
869
ResourceMark rm;
870
tty->print_cr("Dependency method not found in the associated context:");
871
tty->print_cr(" context = %s", ctxk->external_name());
872
tty->print( " method = "); m->print_short_name(tty); tty->cr();
873
if (lm != NULL) {
874
tty->print( " found = "); lm->print_short_name(tty); tty->cr();
875
}
876
return false;
877
}
878
#endif
879
880
void add_participant(Klass* participant) {
881
assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
882
int np = _num_participants++;
883
_participants[np] = participant;
884
_participants[np+1] = NULL;
885
_found_methods[np+1] = NULL;
886
}
887
888
void record_witnesses(int add) {
889
if (add > PARTICIPANT_LIMIT) add = PARTICIPANT_LIMIT;
890
assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
891
_record_witnesses = add;
892
}
893
894
bool is_witness(Klass* k) {
895
if (doing_subtype_search()) {
896
return Dependencies::is_concrete_klass(k);
897
} else if (!k->oop_is_instance()) {
898
return false; // no methods to find in an array type
899
} else {
900
// Search class hierarchy first, skipping private implementations
901
// as they never override any inherited methods
902
Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature, Klass::skip_private);
903
if (!Dependencies::is_concrete_method(m, k)) {
904
// Check for re-abstraction of method
905
if (!k->is_interface() && m != NULL && m->is_abstract()) {
906
// Found a matching abstract method 'm' in the class hierarchy.
907
// This is fine iff 'k' is an abstract class and all concrete subtypes
908
// of 'k' override 'm' and are participates of the current search.
909
ClassHierarchyWalker wf(_participants, _num_participants);
910
Klass* w = wf.find_witness_subtype(k);
911
if (w != NULL) {
912
Method* wm = InstanceKlass::cast(w)->find_instance_method(_name, _signature, Klass::skip_private);
913
if (!Dependencies::is_concrete_method(wm, w)) {
914
// Found a concrete subtype 'w' which does not override abstract method 'm'.
915
// Bail out because 'm' could be called with 'w' as receiver (leading to an
916
// AbstractMethodError) and thus the method we are looking for is not unique.
917
_found_methods[_num_participants] = m;
918
return true;
919
}
920
}
921
}
922
// Check interface defaults also, if any exist.
923
Array<Method*>* default_methods = InstanceKlass::cast(k)->default_methods();
924
if (default_methods == NULL)
925
return false;
926
m = InstanceKlass::cast(k)->find_method(default_methods, _name, _signature);
927
if (!Dependencies::is_concrete_method(m, NULL))
928
return false;
929
}
930
_found_methods[_num_participants] = m;
931
// Note: If add_participant(k) is called,
932
// the method m will already be memoized for it.
933
return true;
934
}
935
}
936
937
bool is_participant(Klass* k) {
938
if (k == _participants[0]) {
939
return true;
940
} else if (_num_participants <= 1) {
941
return false;
942
} else {
943
return in_list(k, &_participants[1]);
944
}
945
}
946
bool ignore_witness(Klass* witness) {
947
if (_record_witnesses == 0) {
948
return false;
949
} else {
950
--_record_witnesses;
951
add_participant(witness);
952
return true;
953
}
954
}
955
static bool in_list(Klass* x, Klass** list) {
956
for (int i = 0; ; i++) {
957
Klass* y = list[i];
958
if (y == NULL) break;
959
if (y == x) return true;
960
}
961
return false; // not in list
962
}
963
964
private:
965
// the actual search method:
966
Klass* find_witness_anywhere(Klass* context_type,
967
bool participants_hide_witnesses,
968
bool top_level_call = true);
969
// the spot-checking version:
970
Klass* find_witness_in(KlassDepChange& changes,
971
Klass* context_type,
972
bool participants_hide_witnesses);
973
bool witnessed_reabstraction_in_supers(Klass* k);
974
public:
975
Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) {
976
assert(doing_subtype_search(), "must set up a subtype search");
977
// When looking for unexpected concrete types,
978
// do not look beneath expected ones.
979
const bool participants_hide_witnesses = true;
980
// CX > CC > C' is OK, even if C' is new.
981
// CX > { CC, C' } is not OK if C' is new, and C' is the witness.
982
if (changes != NULL) {
983
return find_witness_in(*changes, context_type, participants_hide_witnesses);
984
} else {
985
return find_witness_anywhere(context_type, participants_hide_witnesses);
986
}
987
}
988
Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) {
989
assert(!doing_subtype_search(), "must set up a method definer search");
990
// When looking for unexpected concrete methods,
991
// look beneath expected ones, to see if there are overrides.
992
const bool participants_hide_witnesses = true;
993
// CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
994
if (changes != NULL) {
995
return find_witness_in(*changes, context_type, !participants_hide_witnesses);
996
} else {
997
return find_witness_anywhere(context_type, !participants_hide_witnesses);
998
}
999
}
1000
};
1001
1002
#ifndef PRODUCT
1003
static int deps_find_witness_calls = 0;
1004
static int deps_find_witness_steps = 0;
1005
static int deps_find_witness_recursions = 0;
1006
static int deps_find_witness_singles = 0;
1007
static int deps_find_witness_print = 0; // set to -1 to force a final print
1008
static bool count_find_witness_calls() {
1009
if (TraceDependencies || LogCompilation) {
1010
int pcount = deps_find_witness_print + 1;
1011
bool final_stats = (pcount == 0);
1012
bool initial_call = (pcount == 1);
1013
bool occasional_print = ((pcount & ((1<<10) - 1)) == 0);
1014
if (pcount < 0) pcount = 1; // crude overflow protection
1015
deps_find_witness_print = pcount;
1016
if (VerifyDependencies && initial_call) {
1017
tty->print_cr("Warning: TraceDependencies results may be inflated by VerifyDependencies");
1018
}
1019
if (occasional_print || final_stats) {
1020
// Every now and then dump a little info about dependency searching.
1021
if (xtty != NULL) {
1022
ttyLocker ttyl;
1023
xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'",
1024
deps_find_witness_calls,
1025
deps_find_witness_steps,
1026
deps_find_witness_recursions,
1027
deps_find_witness_singles);
1028
}
1029
if (final_stats || (TraceDependencies && WizardMode)) {
1030
ttyLocker ttyl;
1031
tty->print_cr("Dependency check (find_witness) "
1032
"calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d",
1033
deps_find_witness_calls,
1034
deps_find_witness_steps,
1035
(double)deps_find_witness_steps / deps_find_witness_calls,
1036
deps_find_witness_recursions,
1037
deps_find_witness_singles);
1038
}
1039
}
1040
return true;
1041
}
1042
return false;
1043
}
1044
#else
1045
#define count_find_witness_calls() (0)
1046
#endif //PRODUCT
1047
1048
Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
1049
Klass* context_type,
1050
bool participants_hide_witnesses) {
1051
assert(changes.involves_context(context_type), "irrelevant dependency");
1052
Klass* new_type = changes.new_type();
1053
1054
(void)count_find_witness_calls();
1055
NOT_PRODUCT(deps_find_witness_singles++);
1056
1057
// Current thread must be in VM (not native mode, as in CI):
1058
assert(must_be_in_vm(), "raw oops here");
1059
// Must not move the class hierarchy during this check:
1060
assert_locked_or_safepoint(Compile_lock);
1061
1062
int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1063
if (nof_impls > 1) {
1064
// Avoid this case: *I.m > { A.m, C }; B.m > C
1065
// %%% Until this is fixed more systematically, bail out.
1066
// See corresponding comment in find_witness_anywhere.
1067
return context_type;
1068
}
1069
1070
assert(!is_participant(new_type), "only old classes are participants");
1071
if (participants_hide_witnesses) {
1072
// If the new type is a subtype of a participant, we are done.
1073
for (int i = 0; i < num_participants(); i++) {
1074
Klass* part = participant(i);
1075
if (part == NULL) continue;
1076
assert(changes.involves_context(part) == new_type->is_subtype_of(part),
1077
"correct marking of participants, b/c new_type is unique");
1078
if (changes.involves_context(part)) {
1079
// new guy is protected from this check by previous participant
1080
return NULL;
1081
}
1082
}
1083
}
1084
1085
if (is_witness(new_type)) {
1086
if (!ignore_witness(new_type)) {
1087
return new_type;
1088
}
1089
} else if (!doing_subtype_search()) {
1090
// No witness found, but is_witness() doesn't detect method re-abstraction in case of spot-checking.
1091
if (witnessed_reabstraction_in_supers(new_type)) {
1092
return new_type;
1093
}
1094
}
1095
1096
return NULL;
1097
}
1098
1099
// Walk hierarchy under a context type, looking for unexpected types.
1100
// Do not report participant types, and recursively walk beneath
1101
// them only if participants_hide_witnesses is false.
1102
// If top_level_call is false, skip testing the context type,
1103
// because the caller has already considered it.
1104
Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type,
1105
bool participants_hide_witnesses,
1106
bool top_level_call) {
1107
// Current thread must be in VM (not native mode, as in CI):
1108
assert(must_be_in_vm(), "raw oops here");
1109
// Must not move the class hierarchy during this check:
1110
assert_locked_or_safepoint(Compile_lock);
1111
1112
bool do_counts = count_find_witness_calls();
1113
1114
// Check the root of the sub-hierarchy first.
1115
if (top_level_call) {
1116
if (do_counts) {
1117
NOT_PRODUCT(deps_find_witness_calls++);
1118
NOT_PRODUCT(deps_find_witness_steps++);
1119
}
1120
if (is_participant(context_type)) {
1121
if (participants_hide_witnesses) return NULL;
1122
// else fall through to search loop...
1123
} else if (is_witness(context_type) && !ignore_witness(context_type)) {
1124
// The context is an abstract class or interface, to start with.
1125
return context_type;
1126
}
1127
}
1128
1129
// Now we must check each implementor and each subclass.
1130
// Use a short worklist to avoid blowing the stack.
1131
// Each worklist entry is a *chain* of subklass siblings to process.
1132
const int CHAINMAX = 100; // >= 1 + InstanceKlass::implementors_limit
1133
Klass* chains[CHAINMAX];
1134
int chaini = 0; // index into worklist
1135
Klass* chain; // scratch variable
1136
#define ADD_SUBCLASS_CHAIN(k) { \
1137
assert(chaini < CHAINMAX, "oob"); \
1138
chain = k->subklass(); \
1139
if (chain != NULL) chains[chaini++] = chain; }
1140
1141
// Look for non-abstract subclasses.
1142
// (Note: Interfaces do not have subclasses.)
1143
ADD_SUBCLASS_CHAIN(context_type);
1144
1145
// If it is an interface, search its direct implementors.
1146
// (Their subclasses are additional indirect implementors.
1147
// See InstanceKlass::add_implementor.)
1148
// (Note: nof_implementors is always zero for non-interfaces.)
1149
if (top_level_call) {
1150
int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1151
if (nof_impls > 1) {
1152
// Avoid this case: *I.m > { A.m, C }; B.m > C
1153
// Here, I.m has 2 concrete implementations, but m appears unique
1154
// as A.m, because the search misses B.m when checking C.
1155
// The inherited method B.m was getting missed by the walker
1156
// when interface 'I' was the starting point.
1157
// %%% Until this is fixed more systematically, bail out.
1158
// (Old CHA had the same limitation.)
1159
return context_type;
1160
}
1161
if (nof_impls > 0) {
1162
Klass* impl = InstanceKlass::cast(context_type)->implementor();
1163
assert(impl != NULL, "just checking");
1164
// If impl is the same as the context_type, then more than one
1165
// implementor has seen. No exact info in this case.
1166
if (impl == context_type) {
1167
return context_type; // report an inexact witness to this sad affair
1168
}
1169
if (do_counts)
1170
{ NOT_PRODUCT(deps_find_witness_steps++); }
1171
if (is_participant(impl)) {
1172
if (!participants_hide_witnesses) {
1173
ADD_SUBCLASS_CHAIN(impl);
1174
}
1175
} else if (is_witness(impl) && !ignore_witness(impl)) {
1176
return impl;
1177
} else {
1178
ADD_SUBCLASS_CHAIN(impl);
1179
}
1180
}
1181
}
1182
1183
// Recursively process each non-trivial sibling chain.
1184
while (chaini > 0) {
1185
Klass* chain = chains[--chaini];
1186
for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) {
1187
if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); }
1188
if (is_participant(sub)) {
1189
if (participants_hide_witnesses) continue;
1190
// else fall through to process this guy's subclasses
1191
} else if (is_witness(sub) && !ignore_witness(sub)) {
1192
return sub;
1193
}
1194
if (chaini < (VerifyDependencies? 2: CHAINMAX)) {
1195
// Fast path. (Partially disabled if VerifyDependencies.)
1196
ADD_SUBCLASS_CHAIN(sub);
1197
} else {
1198
// Worklist overflow. Do a recursive call. Should be rare.
1199
// The recursive call will have its own worklist, of course.
1200
// (Note that sub has already been tested, so that there is
1201
// no need for the recursive call to re-test. That's handy,
1202
// since the recursive call sees sub as the context_type.)
1203
if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); }
1204
Klass* witness = find_witness_anywhere(sub,
1205
participants_hide_witnesses,
1206
/*top_level_call=*/ false);
1207
if (witness != NULL) return witness;
1208
}
1209
}
1210
}
1211
1212
// No witness found. The dependency remains unbroken.
1213
return NULL;
1214
#undef ADD_SUBCLASS_CHAIN
1215
}
1216
1217
bool ClassHierarchyWalker::witnessed_reabstraction_in_supers(Klass* k) {
1218
if (!k->oop_is_instance()) {
1219
return false; // no methods to find in an array type
1220
} else {
1221
// Looking for a case when an abstract method is inherited into a concrete class.
1222
if (Dependencies::is_concrete_klass(k) && !k->is_interface()) {
1223
Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature, Klass::skip_private);
1224
if (m != NULL) {
1225
return false; // no reabstraction possible: local method found
1226
}
1227
for (InstanceKlass* super = InstanceKlass::cast(k)->java_super(); super != NULL; super = super->java_super()) {
1228
m = super->find_instance_method(_name, _signature, Klass::skip_private);
1229
if (m != NULL) { // inherited method found
1230
if (m->is_abstract() || m->is_overpass()) {
1231
_found_methods[_num_participants] = m;
1232
return true; // abstract method found
1233
}
1234
return false;
1235
}
1236
}
1237
assert(false, "root method not found");
1238
return true;
1239
}
1240
return false;
1241
}
1242
}
1243
1244
bool Dependencies::is_concrete_klass(Klass* k) {
1245
if (k->is_abstract()) return false;
1246
// %%% We could treat classes which are concrete but
1247
// have not yet been instantiated as virtually abstract.
1248
// This would require a deoptimization barrier on first instantiation.
1249
//if (k->is_not_instantiated()) return false;
1250
return true;
1251
}
1252
1253
bool Dependencies::is_concrete_method(Method* m, Klass * k) {
1254
// NULL is not a concrete method,
1255
// statics are irrelevant to virtual call sites,
1256
// abstract methods are not concrete,
1257
// overpass (error) methods are not concrete if k is abstract
1258
//
1259
// note "true" is conservative answer --
1260
// overpass clause is false if k == NULL, implies return true if
1261
// answer depends on overpass clause.
1262
return ! ( m == NULL || m -> is_static() || m -> is_abstract() ||
1263
m->is_overpass() && k != NULL && k -> is_abstract() );
1264
}
1265
1266
1267
Klass* Dependencies::find_finalizable_subclass(Klass* k) {
1268
if (k->is_interface()) return NULL;
1269
if (k->has_finalizer()) return k;
1270
k = k->subklass();
1271
while (k != NULL) {
1272
Klass* result = find_finalizable_subclass(k);
1273
if (result != NULL) return result;
1274
k = k->next_sibling();
1275
}
1276
return NULL;
1277
}
1278
1279
1280
bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1281
if (k->is_abstract()) return false;
1282
// We could also return false if k does not yet appear to be
1283
// instantiated, if the VM version supports this distinction also.
1284
//if (k->is_not_instantiated()) return false;
1285
return true;
1286
}
1287
1288
bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1289
return k->has_finalizable_subclass();
1290
}
1291
1292
1293
// Any use of the contents (bytecodes) of a method must be
1294
// marked by an "evol_method" dependency, if those contents
1295
// can change. (Note: A method is always dependent on itself.)
1296
Klass* Dependencies::check_evol_method(Method* m) {
1297
assert(must_be_in_vm(), "raw oops here");
1298
// Did somebody do a JVMTI RedefineClasses while our backs were turned?
1299
// Or is there a now a breakpoint?
1300
// (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1301
if (m->is_old()
1302
|| m->number_of_breakpoints() > 0) {
1303
return m->method_holder();
1304
} else {
1305
return NULL;
1306
}
1307
}
1308
1309
// This is a strong assertion: It is that the given type
1310
// has no subtypes whatever. It is most useful for
1311
// optimizing checks on reflected types or on array types.
1312
// (Checks on types which are derived from real instances
1313
// can be optimized more strongly than this, because we
1314
// know that the checked type comes from a concrete type,
1315
// and therefore we can disregard abstract types.)
1316
Klass* Dependencies::check_leaf_type(Klass* ctxk) {
1317
assert(must_be_in_vm(), "raw oops here");
1318
assert_locked_or_safepoint(Compile_lock);
1319
InstanceKlass* ctx = InstanceKlass::cast(ctxk);
1320
Klass* sub = ctx->subklass();
1321
if (sub != NULL) {
1322
return sub;
1323
} else if (ctx->nof_implementors() != 0) {
1324
// if it is an interface, it must be unimplemented
1325
// (if it is not an interface, nof_implementors is always zero)
1326
Klass* impl = ctx->implementor();
1327
assert(impl != NULL, "must be set");
1328
return impl;
1329
} else {
1330
return NULL;
1331
}
1332
}
1333
1334
// Test the assertion that conck is the only concrete subtype* of ctxk.
1335
// The type conck itself is allowed to have have further concrete subtypes.
1336
// This allows the compiler to narrow occurrences of ctxk by conck,
1337
// when dealing with the types of actual instances.
1338
Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk,
1339
Klass* conck,
1340
KlassDepChange* changes) {
1341
ClassHierarchyWalker wf(conck);
1342
return wf.find_witness_subtype(ctxk, changes);
1343
}
1344
1345
// If a non-concrete class has no concrete subtypes, it is not (yet)
1346
// instantiatable. This can allow the compiler to make some paths go
1347
// dead, if they are gated by a test of the type.
1348
Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk,
1349
KlassDepChange* changes) {
1350
// Find any concrete subtype, with no participants:
1351
ClassHierarchyWalker wf;
1352
return wf.find_witness_subtype(ctxk, changes);
1353
}
1354
1355
1356
// If a concrete class has no concrete subtypes, it can always be
1357
// exactly typed. This allows the use of a cheaper type test.
1358
Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk,
1359
KlassDepChange* changes) {
1360
// Find any concrete subtype, with only the ctxk as participant:
1361
ClassHierarchyWalker wf(ctxk);
1362
return wf.find_witness_subtype(ctxk, changes);
1363
}
1364
1365
1366
// Find the unique concrete proper subtype of ctxk, or NULL if there
1367
// is more than one concrete proper subtype. If there are no concrete
1368
// proper subtypes, return ctxk itself, whether it is concrete or not.
1369
// The returned subtype is allowed to have have further concrete subtypes.
1370
// That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
1371
Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) {
1372
ClassHierarchyWalker wf(ctxk); // Ignore ctxk when walking.
1373
wf.record_witnesses(1); // Record one other witness when walking.
1374
Klass* wit = wf.find_witness_subtype(ctxk);
1375
if (wit != NULL) return NULL; // Too many witnesses.
1376
Klass* conck = wf.participant(0);
1377
if (conck == NULL) {
1378
#ifndef PRODUCT
1379
// Make sure the dependency mechanism will pass this discovery:
1380
if (VerifyDependencies) {
1381
// Turn off dependency tracing while actually testing deps.
1382
FlagSetting fs(TraceDependencies, false);
1383
if (!Dependencies::is_concrete_klass(ctxk)) {
1384
guarantee(NULL ==
1385
(void *)check_abstract_with_no_concrete_subtype(ctxk),
1386
"verify dep.");
1387
} else {
1388
guarantee(NULL ==
1389
(void *)check_concrete_with_no_concrete_subtype(ctxk),
1390
"verify dep.");
1391
}
1392
}
1393
#endif //PRODUCT
1394
return ctxk; // Return ctxk as a flag for "no subtypes".
1395
} else {
1396
#ifndef PRODUCT
1397
// Make sure the dependency mechanism will pass this discovery:
1398
if (VerifyDependencies) {
1399
// Turn off dependency tracing while actually testing deps.
1400
FlagSetting fs(TraceDependencies, false);
1401
if (!Dependencies::is_concrete_klass(ctxk)) {
1402
guarantee(NULL == (void *)
1403
check_abstract_with_unique_concrete_subtype(ctxk, conck),
1404
"verify dep.");
1405
}
1406
}
1407
#endif //PRODUCT
1408
return conck;
1409
}
1410
}
1411
1412
// Test the assertion that the k[12] are the only concrete subtypes of ctxk,
1413
// except possibly for further subtypes of k[12] themselves.
1414
// The context type must be abstract. The types k1 and k2 are themselves
1415
// allowed to have further concrete subtypes.
1416
Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes(
1417
Klass* ctxk,
1418
Klass* k1,
1419
Klass* k2,
1420
KlassDepChange* changes) {
1421
ClassHierarchyWalker wf;
1422
wf.add_participant(k1);
1423
wf.add_participant(k2);
1424
return wf.find_witness_subtype(ctxk, changes);
1425
}
1426
1427
// Search ctxk for concrete implementations. If there are klen or fewer,
1428
// pack them into the given array and return the number.
1429
// Otherwise, return -1, meaning the given array would overflow.
1430
// (Note that a return of 0 means there are exactly no concrete subtypes.)
1431
// In this search, if ctxk is concrete, it will be reported alone.
1432
// For any type CC reported, no proper subtypes of CC will be reported.
1433
int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk,
1434
int klen,
1435
Klass* karray[]) {
1436
ClassHierarchyWalker wf;
1437
wf.record_witnesses(klen);
1438
Klass* wit = wf.find_witness_subtype(ctxk);
1439
if (wit != NULL) return -1; // Too many witnesses.
1440
int num = wf.num_participants();
1441
assert(num <= klen, "oob");
1442
// Pack the result array with the good news.
1443
for (int i = 0; i < num; i++)
1444
karray[i] = wf.participant(i);
1445
#ifndef PRODUCT
1446
// Make sure the dependency mechanism will pass this discovery:
1447
if (VerifyDependencies) {
1448
// Turn off dependency tracing while actually testing deps.
1449
FlagSetting fs(TraceDependencies, false);
1450
switch (Dependencies::is_concrete_klass(ctxk)? -1: num) {
1451
case -1: // ctxk was itself concrete
1452
guarantee(num == 1 && karray[0] == ctxk, "verify dep.");
1453
break;
1454
case 0:
1455
guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk),
1456
"verify dep.");
1457
break;
1458
case 1:
1459
guarantee(NULL == (void *)
1460
check_abstract_with_unique_concrete_subtype(ctxk, karray[0]),
1461
"verify dep.");
1462
break;
1463
case 2:
1464
guarantee(NULL == (void *)
1465
check_abstract_with_exclusive_concrete_subtypes(ctxk,
1466
karray[0],
1467
karray[1]),
1468
"verify dep.");
1469
break;
1470
default:
1471
ShouldNotReachHere(); // klen > 2 yet supported
1472
}
1473
}
1474
#endif //PRODUCT
1475
return num;
1476
}
1477
1478
// If a class (or interface) has a unique concrete method uniqm, return NULL.
1479
// Otherwise, return a class that contains an interfering method.
1480
Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm,
1481
KlassDepChange* changes) {
1482
// Here is a missing optimization: If uniqm->is_final(),
1483
// we don't really need to search beneath it for overrides.
1484
// This is probably not important, since we don't use dependencies
1485
// to track final methods. (They can't be "definalized".)
1486
ClassHierarchyWalker wf(uniqm->method_holder(), uniqm);
1487
return wf.find_witness_definer(ctxk, changes);
1488
}
1489
1490
// Find the set of all non-abstract methods under ctxk that match m.
1491
// (The method m must be defined or inherited in ctxk.)
1492
// Include m itself in the set, unless it is abstract.
1493
// If this set has exactly one element, return that element.
1494
Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) {
1495
ClassHierarchyWalker wf(m);
1496
assert(wf.check_method_context(ctxk, m), "proper context");
1497
wf.record_witnesses(1);
1498
Klass* wit = wf.find_witness_definer(ctxk);
1499
if (wit != NULL) return NULL; // Too many witnesses.
1500
Method* fm = wf.found_method(0); // Will be NULL if num_parts == 0.
1501
if (Dependencies::is_concrete_method(m, ctxk)) {
1502
if (fm == NULL) {
1503
// It turns out that m was always the only implementation.
1504
fm = m;
1505
} else if (fm != m) {
1506
// Two conflicting implementations after all.
1507
// (This can happen if m is inherited into ctxk and fm overrides it.)
1508
return NULL;
1509
}
1510
}
1511
#ifndef PRODUCT
1512
// Make sure the dependency mechanism will pass this discovery:
1513
if (VerifyDependencies && fm != NULL) {
1514
guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
1515
"verify dep.");
1516
}
1517
#endif //PRODUCT
1518
return fm;
1519
}
1520
1521
Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk,
1522
Method* m1,
1523
Method* m2,
1524
KlassDepChange* changes) {
1525
ClassHierarchyWalker wf(m1);
1526
wf.add_participant(m1->method_holder());
1527
wf.add_participant(m2->method_holder());
1528
return wf.find_witness_definer(ctxk, changes);
1529
}
1530
1531
Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) {
1532
Klass* search_at = ctxk;
1533
if (changes != NULL)
1534
search_at = changes->new_type(); // just look at the new bit
1535
return find_finalizable_subclass(search_at);
1536
}
1537
1538
Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
1539
assert(call_site ->is_a(SystemDictionary::CallSite_klass()), "sanity");
1540
assert(method_handle->is_a(SystemDictionary::MethodHandle_klass()), "sanity");
1541
if (changes == NULL) {
1542
// Validate all CallSites
1543
if (java_lang_invoke_CallSite::target(call_site) != method_handle)
1544
return call_site->klass(); // assertion failed
1545
} else {
1546
// Validate the given CallSite
1547
if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
1548
assert(method_handle != changes->method_handle(), "must be");
1549
return call_site->klass(); // assertion failed
1550
}
1551
}
1552
return NULL; // assertion still valid
1553
}
1554
1555
1556
void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
1557
if (witness != NULL) {
1558
if (TraceDependencies) {
1559
print_dependency(witness, /*verbose=*/ true);
1560
}
1561
// The following is a no-op unless logging is enabled:
1562
log_dependency(witness);
1563
}
1564
}
1565
1566
1567
Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
1568
assert_locked_or_safepoint(Compile_lock);
1569
Dependencies::check_valid_dependency_type(type());
1570
1571
Klass* witness = NULL;
1572
switch (type()) {
1573
case evol_method:
1574
witness = check_evol_method(method_argument(0));
1575
break;
1576
case leaf_type:
1577
witness = check_leaf_type(context_type());
1578
break;
1579
case abstract_with_unique_concrete_subtype:
1580
witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
1581
break;
1582
case abstract_with_no_concrete_subtype:
1583
witness = check_abstract_with_no_concrete_subtype(context_type(), changes);
1584
break;
1585
case concrete_with_no_concrete_subtype:
1586
witness = check_concrete_with_no_concrete_subtype(context_type(), changes);
1587
break;
1588
case unique_concrete_method:
1589
witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
1590
break;
1591
case abstract_with_exclusive_concrete_subtypes_2:
1592
witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes);
1593
break;
1594
case exclusive_concrete_methods_2:
1595
witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes);
1596
break;
1597
case no_finalizable_subclasses:
1598
witness = check_has_no_finalizable_subclasses(context_type(), changes);
1599
break;
1600
default:
1601
witness = NULL;
1602
break;
1603
}
1604
trace_and_log_witness(witness);
1605
return witness;
1606
}
1607
1608
1609
Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
1610
assert_locked_or_safepoint(Compile_lock);
1611
Dependencies::check_valid_dependency_type(type());
1612
1613
Klass* witness = NULL;
1614
switch (type()) {
1615
case call_site_target_value:
1616
witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
1617
break;
1618
default:
1619
witness = NULL;
1620
break;
1621
}
1622
trace_and_log_witness(witness);
1623
return witness;
1624
}
1625
1626
1627
Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
1628
// Handle klass dependency
1629
if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
1630
return check_klass_dependency(changes.as_klass_change());
1631
1632
// Handle CallSite dependency
1633
if (changes.is_call_site_change())
1634
return check_call_site_dependency(changes.as_call_site_change());
1635
1636
// irrelevant dependency; skip it
1637
return NULL;
1638
}
1639
1640
1641
void DepChange::print() {
1642
int nsup = 0, nint = 0;
1643
for (ContextStream str(*this); str.next(); ) {
1644
Klass* k = str.klass();
1645
switch (str.change_type()) {
1646
case Change_new_type:
1647
tty->print_cr(" dependee = %s", InstanceKlass::cast(k)->external_name());
1648
break;
1649
case Change_new_sub:
1650
if (!WizardMode) {
1651
++nsup;
1652
} else {
1653
tty->print_cr(" context super = %s", InstanceKlass::cast(k)->external_name());
1654
}
1655
break;
1656
case Change_new_impl:
1657
if (!WizardMode) {
1658
++nint;
1659
} else {
1660
tty->print_cr(" context interface = %s", InstanceKlass::cast(k)->external_name());
1661
}
1662
break;
1663
}
1664
}
1665
if (nsup + nint != 0) {
1666
tty->print_cr(" context supers = %d, interfaces = %d", nsup, nint);
1667
}
1668
}
1669
1670
void DepChange::ContextStream::start() {
1671
Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL;
1672
_change_type = (new_type == NULL ? NO_CHANGE : Start_Klass);
1673
_klass = new_type;
1674
_ti_base = NULL;
1675
_ti_index = 0;
1676
_ti_limit = 0;
1677
}
1678
1679
bool DepChange::ContextStream::next() {
1680
switch (_change_type) {
1681
case Start_Klass: // initial state; _klass is the new type
1682
_ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
1683
_ti_index = 0;
1684
_change_type = Change_new_type;
1685
return true;
1686
case Change_new_type:
1687
// fall through:
1688
_change_type = Change_new_sub;
1689
case Change_new_sub:
1690
// 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
1691
{
1692
_klass = InstanceKlass::cast(_klass)->super();
1693
if (_klass != NULL) {
1694
return true;
1695
}
1696
}
1697
// else set up _ti_limit and fall through:
1698
_ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
1699
_change_type = Change_new_impl;
1700
case Change_new_impl:
1701
if (_ti_index < _ti_limit) {
1702
_klass = _ti_base->at(_ti_index++);
1703
return true;
1704
}
1705
// fall through:
1706
_change_type = NO_CHANGE; // iterator is exhausted
1707
case NO_CHANGE:
1708
break;
1709
default:
1710
ShouldNotReachHere();
1711
}
1712
return false;
1713
}
1714
1715
void KlassDepChange::initialize() {
1716
// entire transaction must be under this lock:
1717
assert_lock_strong(Compile_lock);
1718
1719
// Mark all dependee and all its superclasses
1720
// Mark transitive interfaces
1721
for (ContextStream str(*this); str.next(); ) {
1722
Klass* d = str.klass();
1723
assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
1724
InstanceKlass::cast(d)->set_is_marked_dependent(true);
1725
}
1726
}
1727
1728
KlassDepChange::~KlassDepChange() {
1729
// Unmark all dependee and all its superclasses
1730
// Unmark transitive interfaces
1731
for (ContextStream str(*this); str.next(); ) {
1732
Klass* d = str.klass();
1733
InstanceKlass::cast(d)->set_is_marked_dependent(false);
1734
}
1735
}
1736
1737
bool KlassDepChange::involves_context(Klass* k) {
1738
if (k == NULL || !k->oop_is_instance()) {
1739
return false;
1740
}
1741
InstanceKlass* ik = InstanceKlass::cast(k);
1742
bool is_contained = ik->is_marked_dependent();
1743
assert(is_contained == new_type()->is_subtype_of(k),
1744
"correct marking of potential context types");
1745
return is_contained;
1746
}
1747
1748
#ifndef PRODUCT
1749
void Dependencies::print_statistics() {
1750
if (deps_find_witness_print != 0) {
1751
// Call one final time, to flush out the data.
1752
deps_find_witness_print = -1;
1753
count_find_witness_calls();
1754
}
1755
}
1756
#endif
1757
1758