Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mobile
Path: blob/master/src/hotspot/share/oops/instanceKlass.cpp
40951 views
1
/*
2
* Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation.
8
*
9
* This code is distributed in the hope that it will be useful, but WITHOUT
10
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12
* version 2 for more details (a copy is included in the LICENSE file that
13
* accompanied this code).
14
*
15
* You should have received a copy of the GNU General Public License version
16
* 2 along with this work; if not, write to the Free Software Foundation,
17
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
*
19
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
* or visit www.oracle.com if you need additional information or have any
21
* questions.
22
*
23
*/
24
25
#include "precompiled.hpp"
26
#include "jvm.h"
27
#include "cds/archiveUtils.hpp"
28
#include "cds/classListWriter.hpp"
29
#include "cds/metaspaceShared.hpp"
30
#include "classfile/classFileParser.hpp"
31
#include "classfile/classFileStream.hpp"
32
#include "classfile/classLoader.hpp"
33
#include "classfile/classLoaderData.inline.hpp"
34
#include "classfile/javaClasses.hpp"
35
#include "classfile/moduleEntry.hpp"
36
#include "classfile/resolutionErrors.hpp"
37
#include "classfile/symbolTable.hpp"
38
#include "classfile/systemDictionary.hpp"
39
#include "classfile/systemDictionaryShared.hpp"
40
#include "classfile/verifier.hpp"
41
#include "classfile/vmClasses.hpp"
42
#include "classfile/vmSymbols.hpp"
43
#include "code/codeCache.hpp"
44
#include "code/dependencyContext.hpp"
45
#include "compiler/compilationPolicy.hpp"
46
#include "compiler/compileBroker.hpp"
47
#include "gc/shared/collectedHeap.inline.hpp"
48
#include "interpreter/oopMapCache.hpp"
49
#include "interpreter/rewriter.hpp"
50
#include "jvmtifiles/jvmti.h"
51
#include "logging/log.hpp"
52
#include "logging/logMessage.hpp"
53
#include "logging/logStream.hpp"
54
#include "memory/allocation.inline.hpp"
55
#include "memory/iterator.inline.hpp"
56
#include "memory/metadataFactory.hpp"
57
#include "memory/metaspaceClosure.hpp"
58
#include "memory/oopFactory.hpp"
59
#include "memory/resourceArea.hpp"
60
#include "memory/universe.hpp"
61
#include "oops/fieldStreams.inline.hpp"
62
#include "oops/constantPool.hpp"
63
#include "oops/instanceClassLoaderKlass.hpp"
64
#include "oops/instanceKlass.inline.hpp"
65
#include "oops/instanceMirrorKlass.hpp"
66
#include "oops/instanceOop.hpp"
67
#include "oops/klass.inline.hpp"
68
#include "oops/method.hpp"
69
#include "oops/oop.inline.hpp"
70
#include "oops/recordComponent.hpp"
71
#include "oops/symbol.hpp"
72
#include "prims/jvmtiExport.hpp"
73
#include "prims/jvmtiRedefineClasses.hpp"
74
#include "prims/jvmtiThreadState.hpp"
75
#include "prims/methodComparator.hpp"
76
#include "runtime/arguments.hpp"
77
#include "runtime/atomic.hpp"
78
#include "runtime/biasedLocking.hpp"
79
#include "runtime/fieldDescriptor.inline.hpp"
80
#include "runtime/handles.inline.hpp"
81
#include "runtime/javaCalls.hpp"
82
#include "runtime/mutexLocker.hpp"
83
#include "runtime/orderAccess.hpp"
84
#include "runtime/reflectionUtils.hpp"
85
#include "runtime/thread.inline.hpp"
86
#include "services/classLoadingService.hpp"
87
#include "services/threadService.hpp"
88
#include "utilities/dtrace.hpp"
89
#include "utilities/events.hpp"
90
#include "utilities/macros.hpp"
91
#include "utilities/stringUtils.hpp"
92
#ifdef COMPILER1
93
#include "c1/c1_Compiler.hpp"
94
#endif
95
#if INCLUDE_JFR
96
#include "jfr/jfrEvents.hpp"
97
#endif
98
99
100
#ifdef DTRACE_ENABLED
101
102
103
#define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED
104
#define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE
105
#define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT
106
#define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS
107
#define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED
108
#define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT
109
#define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR
110
#define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END
111
#define DTRACE_CLASSINIT_PROBE(type, thread_type) \
112
{ \
113
char* data = NULL; \
114
int len = 0; \
115
Symbol* clss_name = name(); \
116
if (clss_name != NULL) { \
117
data = (char*)clss_name->bytes(); \
118
len = clss_name->utf8_length(); \
119
} \
120
HOTSPOT_CLASS_INITIALIZATION_##type( \
121
data, len, (void*)class_loader(), thread_type); \
122
}
123
124
#define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) \
125
{ \
126
char* data = NULL; \
127
int len = 0; \
128
Symbol* clss_name = name(); \
129
if (clss_name != NULL) { \
130
data = (char*)clss_name->bytes(); \
131
len = clss_name->utf8_length(); \
132
} \
133
HOTSPOT_CLASS_INITIALIZATION_##type( \
134
data, len, (void*)class_loader(), thread_type, wait); \
135
}
136
137
#else // ndef DTRACE_ENABLED
138
139
#define DTRACE_CLASSINIT_PROBE(type, thread_type)
140
#define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait)
141
142
#endif // ndef DTRACE_ENABLED
143
144
145
static inline bool is_class_loader(const Symbol* class_name,
146
const ClassFileParser& parser) {
147
assert(class_name != NULL, "invariant");
148
149
if (class_name == vmSymbols::java_lang_ClassLoader()) {
150
return true;
151
}
152
153
if (vmClasses::ClassLoader_klass_loaded()) {
154
const Klass* const super_klass = parser.super_klass();
155
if (super_klass != NULL) {
156
if (super_klass->is_subtype_of(vmClasses::ClassLoader_klass())) {
157
return true;
158
}
159
}
160
}
161
return false;
162
}
163
164
// private: called to verify that k is a static member of this nest.
165
// We know that k is an instance class in the same package and hence the
166
// same classloader.
167
bool InstanceKlass::has_nest_member(JavaThread* current, InstanceKlass* k) const {
168
assert(!is_hidden(), "unexpected hidden class");
169
if (_nest_members == NULL || _nest_members == Universe::the_empty_short_array()) {
170
if (log_is_enabled(Trace, class, nestmates)) {
171
ResourceMark rm(current);
172
log_trace(class, nestmates)("Checked nest membership of %s in non-nest-host class %s",
173
k->external_name(), this->external_name());
174
}
175
return false;
176
}
177
178
if (log_is_enabled(Trace, class, nestmates)) {
179
ResourceMark rm(current);
180
log_trace(class, nestmates)("Checking nest membership of %s in %s",
181
k->external_name(), this->external_name());
182
}
183
184
// Check for the named class in _nest_members.
185
// We don't resolve, or load, any classes.
186
for (int i = 0; i < _nest_members->length(); i++) {
187
int cp_index = _nest_members->at(i);
188
Symbol* name = _constants->klass_name_at(cp_index);
189
if (name == k->name()) {
190
log_trace(class, nestmates)("- named class found at nest_members[%d] => cp[%d]", i, cp_index);
191
return true;
192
}
193
}
194
log_trace(class, nestmates)("- class is NOT a nest member!");
195
return false;
196
}
197
198
// Called to verify that k is a permitted subclass of this class
199
bool InstanceKlass::has_as_permitted_subclass(const InstanceKlass* k) const {
200
Thread* current = Thread::current();
201
assert(k != NULL, "sanity check");
202
assert(_permitted_subclasses != NULL && _permitted_subclasses != Universe::the_empty_short_array(),
203
"unexpected empty _permitted_subclasses array");
204
205
if (log_is_enabled(Trace, class, sealed)) {
206
ResourceMark rm(current);
207
log_trace(class, sealed)("Checking for permitted subclass of %s in %s",
208
k->external_name(), this->external_name());
209
}
210
211
// Check that the class and its super are in the same module.
212
if (k->module() != this->module()) {
213
ResourceMark rm(current);
214
log_trace(class, sealed)("Check failed for same module of permitted subclass %s and sealed class %s",
215
k->external_name(), this->external_name());
216
return false;
217
}
218
219
if (!k->is_public() && !is_same_class_package(k)) {
220
ResourceMark rm(current);
221
log_trace(class, sealed)("Check failed, subclass %s not public and not in the same package as sealed class %s",
222
k->external_name(), this->external_name());
223
return false;
224
}
225
226
for (int i = 0; i < _permitted_subclasses->length(); i++) {
227
int cp_index = _permitted_subclasses->at(i);
228
Symbol* name = _constants->klass_name_at(cp_index);
229
if (name == k->name()) {
230
log_trace(class, sealed)("- Found it at permitted_subclasses[%d] => cp[%d]", i, cp_index);
231
return true;
232
}
233
}
234
log_trace(class, sealed)("- class is NOT a permitted subclass!");
235
return false;
236
}
237
238
// Return nest-host class, resolving, validating and saving it if needed.
239
// In cases where this is called from a thread that cannot do classloading
240
// (such as a native JIT thread) then we simply return NULL, which in turn
241
// causes the access check to return false. Such code will retry the access
242
// from a more suitable environment later. Otherwise the _nest_host is always
243
// set once this method returns.
244
// Any errors from nest-host resolution must be preserved so they can be queried
245
// from higher-level access checking code, and reported as part of access checking
246
// exceptions.
247
// VirtualMachineErrors are propagated with a NULL return.
248
// Under any conditions where the _nest_host can be set to non-NULL the resulting
249
// value of it and, if applicable, the nest host resolution/validation error,
250
// are idempotent.
251
InstanceKlass* InstanceKlass::nest_host(TRAPS) {
252
InstanceKlass* nest_host_k = _nest_host;
253
if (nest_host_k != NULL) {
254
return nest_host_k;
255
}
256
257
ResourceMark rm(THREAD);
258
259
// need to resolve and save our nest-host class.
260
if (_nest_host_index != 0) { // we have a real nest_host
261
// Before trying to resolve check if we're in a suitable context
262
bool can_resolve = THREAD->can_call_java();
263
if (!can_resolve && !_constants->tag_at(_nest_host_index).is_klass()) {
264
log_trace(class, nestmates)("Rejected resolution of nest-host of %s in unsuitable thread",
265
this->external_name());
266
return NULL; // sentinel to say "try again from a different context"
267
}
268
269
log_trace(class, nestmates)("Resolving nest-host of %s using cp entry for %s",
270
this->external_name(),
271
_constants->klass_name_at(_nest_host_index)->as_C_string());
272
273
Klass* k = _constants->klass_at(_nest_host_index, THREAD);
274
if (HAS_PENDING_EXCEPTION) {
275
if (PENDING_EXCEPTION->is_a(vmClasses::VirtualMachineError_klass())) {
276
return NULL; // propagate VMEs
277
}
278
stringStream ss;
279
char* target_host_class = _constants->klass_name_at(_nest_host_index)->as_C_string();
280
ss.print("Nest host resolution of %s with host %s failed: ",
281
this->external_name(), target_host_class);
282
java_lang_Throwable::print(PENDING_EXCEPTION, &ss);
283
const char* msg = ss.as_string(true /* on C-heap */);
284
constantPoolHandle cph(THREAD, constants());
285
SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg);
286
CLEAR_PENDING_EXCEPTION;
287
288
log_trace(class, nestmates)("%s", msg);
289
} else {
290
// A valid nest-host is an instance class in the current package that lists this
291
// class as a nest member. If any of these conditions are not met the class is
292
// its own nest-host.
293
const char* error = NULL;
294
295
// JVMS 5.4.4 indicates package check comes first
296
if (is_same_class_package(k)) {
297
// Now check actual membership. We can't be a member if our "host" is
298
// not an instance class.
299
if (k->is_instance_klass()) {
300
nest_host_k = InstanceKlass::cast(k);
301
bool is_member = nest_host_k->has_nest_member(THREAD, this);
302
if (is_member) {
303
_nest_host = nest_host_k; // save resolved nest-host value
304
305
log_trace(class, nestmates)("Resolved nest-host of %s to %s",
306
this->external_name(), k->external_name());
307
return nest_host_k;
308
} else {
309
error = "current type is not listed as a nest member";
310
}
311
} else {
312
error = "host is not an instance class";
313
}
314
} else {
315
error = "types are in different packages";
316
}
317
318
// something went wrong, so record what and log it
319
{
320
stringStream ss;
321
ss.print("Type %s (loader: %s) is not a nest member of type %s (loader: %s): %s",
322
this->external_name(),
323
this->class_loader_data()->loader_name_and_id(),
324
k->external_name(),
325
k->class_loader_data()->loader_name_and_id(),
326
error);
327
const char* msg = ss.as_string(true /* on C-heap */);
328
constantPoolHandle cph(THREAD, constants());
329
SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg);
330
log_trace(class, nestmates)("%s", msg);
331
}
332
}
333
} else {
334
log_trace(class, nestmates)("Type %s is not part of a nest: setting nest-host to self",
335
this->external_name());
336
}
337
338
// Either not in an explicit nest, or else an error occurred, so
339
// the nest-host is set to `this`. Any thread that sees this assignment
340
// will also see any setting of nest_host_error(), if applicable.
341
return (_nest_host = this);
342
}
343
344
// Dynamic nest member support: set this class's nest host to the given class.
345
// This occurs as part of the class definition, as soon as the instanceKlass
346
// has been created and doesn't require further resolution. The code:
347
// lookup().defineHiddenClass(bytes_for_X, NESTMATE);
348
// results in:
349
// class_of_X.set_nest_host(lookup().lookupClass().getNestHost())
350
// If it has an explicit _nest_host_index or _nest_members, these will be ignored.
351
// We also know the "host" is a valid nest-host in the same package so we can
352
// assert some of those facts.
353
void InstanceKlass::set_nest_host(InstanceKlass* host) {
354
assert(is_hidden(), "must be a hidden class");
355
assert(host != NULL, "NULL nest host specified");
356
assert(_nest_host == NULL, "current class has resolved nest-host");
357
assert(nest_host_error() == NULL, "unexpected nest host resolution error exists: %s",
358
nest_host_error());
359
assert((host->_nest_host == NULL && host->_nest_host_index == 0) ||
360
(host->_nest_host == host), "proposed host is not a valid nest-host");
361
// Can't assert this as package is not set yet:
362
// assert(is_same_class_package(host), "proposed host is in wrong package");
363
364
if (log_is_enabled(Trace, class, nestmates)) {
365
ResourceMark rm;
366
const char* msg = "";
367
// a hidden class does not expect a statically defined nest-host
368
if (_nest_host_index > 0) {
369
msg = "(the NestHost attribute in the current class is ignored)";
370
} else if (_nest_members != NULL && _nest_members != Universe::the_empty_short_array()) {
371
msg = "(the NestMembers attribute in the current class is ignored)";
372
}
373
log_trace(class, nestmates)("Injected type %s into the nest of %s %s",
374
this->external_name(),
375
host->external_name(),
376
msg);
377
}
378
// set dynamic nest host
379
_nest_host = host;
380
// Record dependency to keep nest host from being unloaded before this class.
381
ClassLoaderData* this_key = class_loader_data();
382
this_key->record_dependency(host);
383
}
384
385
// check if 'this' and k are nestmates (same nest_host), or k is our nest_host,
386
// or we are k's nest_host - all of which is covered by comparing the two
387
// resolved_nest_hosts.
388
// Any exceptions (i.e. VMEs) are propagated.
389
bool InstanceKlass::has_nestmate_access_to(InstanceKlass* k, TRAPS) {
390
391
assert(this != k, "this should be handled by higher-level code");
392
393
// Per JVMS 5.4.4 we first resolve and validate the current class, then
394
// the target class k.
395
396
InstanceKlass* cur_host = nest_host(CHECK_false);
397
if (cur_host == NULL) {
398
return false;
399
}
400
401
Klass* k_nest_host = k->nest_host(CHECK_false);
402
if (k_nest_host == NULL) {
403
return false;
404
}
405
406
bool access = (cur_host == k_nest_host);
407
408
ResourceMark rm(THREAD);
409
log_trace(class, nestmates)("Class %s does %shave nestmate access to %s",
410
this->external_name(),
411
access ? "" : "NOT ",
412
k->external_name());
413
return access;
414
}
415
416
const char* InstanceKlass::nest_host_error() {
417
if (_nest_host_index == 0) {
418
return NULL;
419
} else {
420
constantPoolHandle cph(Thread::current(), constants());
421
return SystemDictionary::find_nest_host_error(cph, (int)_nest_host_index);
422
}
423
}
424
425
InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) {
426
const int size = InstanceKlass::size(parser.vtable_size(),
427
parser.itable_size(),
428
nonstatic_oop_map_size(parser.total_oop_map_count()),
429
parser.is_interface());
430
431
const Symbol* const class_name = parser.class_name();
432
assert(class_name != NULL, "invariant");
433
ClassLoaderData* loader_data = parser.loader_data();
434
assert(loader_data != NULL, "invariant");
435
436
InstanceKlass* ik;
437
438
// Allocation
439
if (REF_NONE == parser.reference_type()) {
440
if (class_name == vmSymbols::java_lang_Class()) {
441
// mirror
442
ik = new (loader_data, size, THREAD) InstanceMirrorKlass(parser);
443
}
444
else if (is_class_loader(class_name, parser)) {
445
// class loader
446
ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(parser);
447
} else {
448
// normal
449
ik = new (loader_data, size, THREAD) InstanceKlass(parser, InstanceKlass::_kind_other);
450
}
451
} else {
452
// reference
453
ik = new (loader_data, size, THREAD) InstanceRefKlass(parser);
454
}
455
456
// Check for pending exception before adding to the loader data and incrementing
457
// class count. Can get OOM here.
458
if (HAS_PENDING_EXCEPTION) {
459
return NULL;
460
}
461
462
return ik;
463
}
464
465
466
// copy method ordering from resource area to Metaspace
467
void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) {
468
if (m != NULL) {
469
// allocate a new array and copy contents (memcpy?)
470
_method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK);
471
for (int i = 0; i < m->length(); i++) {
472
_method_ordering->at_put(i, m->at(i));
473
}
474
} else {
475
_method_ordering = Universe::the_empty_int_array();
476
}
477
}
478
479
// create a new array of vtable_indices for default methods
480
Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) {
481
Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL);
482
assert(default_vtable_indices() == NULL, "only create once");
483
set_default_vtable_indices(vtable_indices);
484
return vtable_indices;
485
}
486
487
InstanceKlass::InstanceKlass(const ClassFileParser& parser, unsigned kind, KlassID id) :
488
Klass(id),
489
_nest_members(NULL),
490
_nest_host(NULL),
491
_permitted_subclasses(NULL),
492
_record_components(NULL),
493
_static_field_size(parser.static_field_size()),
494
_nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())),
495
_itable_len(parser.itable_size()),
496
_nest_host_index(0),
497
_init_state(allocated),
498
_reference_type(parser.reference_type()),
499
_init_thread(NULL)
500
{
501
set_vtable_length(parser.vtable_size());
502
set_kind(kind);
503
set_access_flags(parser.access_flags());
504
if (parser.is_hidden()) set_is_hidden();
505
set_layout_helper(Klass::instance_layout_helper(parser.layout_size(),
506
false));
507
508
assert(NULL == _methods, "underlying memory not zeroed?");
509
assert(is_instance_klass(), "is layout incorrect?");
510
assert(size_helper() == parser.layout_size(), "incorrect size_helper?");
511
512
// Set biased locking bit for all instances of this class; it will be
513
// cleared if revocation occurs too often for this type
514
if (UseBiasedLocking && BiasedLocking::enabled()) {
515
set_prototype_header(markWord::biased_locking_prototype());
516
}
517
}
518
519
void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data,
520
Array<Method*>* methods) {
521
if (methods != NULL && methods != Universe::the_empty_method_array() &&
522
!methods->is_shared()) {
523
for (int i = 0; i < methods->length(); i++) {
524
Method* method = methods->at(i);
525
if (method == NULL) continue; // maybe null if error processing
526
// Only want to delete methods that are not executing for RedefineClasses.
527
// The previous version will point to them so they're not totally dangling
528
assert (!method->on_stack(), "shouldn't be called with methods on stack");
529
MetadataFactory::free_metadata(loader_data, method);
530
}
531
MetadataFactory::free_array<Method*>(loader_data, methods);
532
}
533
}
534
535
void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data,
536
const Klass* super_klass,
537
Array<InstanceKlass*>* local_interfaces,
538
Array<InstanceKlass*>* transitive_interfaces) {
539
// Only deallocate transitive interfaces if not empty, same as super class
540
// or same as local interfaces. See code in parseClassFile.
541
Array<InstanceKlass*>* ti = transitive_interfaces;
542
if (ti != Universe::the_empty_instance_klass_array() && ti != local_interfaces) {
543
// check that the interfaces don't come from super class
544
Array<InstanceKlass*>* sti = (super_klass == NULL) ? NULL :
545
InstanceKlass::cast(super_klass)->transitive_interfaces();
546
if (ti != sti && ti != NULL && !ti->is_shared()) {
547
MetadataFactory::free_array<InstanceKlass*>(loader_data, ti);
548
}
549
}
550
551
// local interfaces can be empty
552
if (local_interfaces != Universe::the_empty_instance_klass_array() &&
553
local_interfaces != NULL && !local_interfaces->is_shared()) {
554
MetadataFactory::free_array<InstanceKlass*>(loader_data, local_interfaces);
555
}
556
}
557
558
void InstanceKlass::deallocate_record_components(ClassLoaderData* loader_data,
559
Array<RecordComponent*>* record_components) {
560
if (record_components != NULL && !record_components->is_shared()) {
561
for (int i = 0; i < record_components->length(); i++) {
562
RecordComponent* record_component = record_components->at(i);
563
MetadataFactory::free_metadata(loader_data, record_component);
564
}
565
MetadataFactory::free_array<RecordComponent*>(loader_data, record_components);
566
}
567
}
568
569
// This function deallocates the metadata and C heap pointers that the
570
// InstanceKlass points to.
571
void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
572
573
// Orphan the mirror first, CMS thinks it's still live.
574
if (java_mirror() != NULL) {
575
java_lang_Class::set_klass(java_mirror(), NULL);
576
}
577
578
// Also remove mirror from handles
579
loader_data->remove_handle(_java_mirror);
580
581
// Need to take this class off the class loader data list.
582
loader_data->remove_class(this);
583
584
// The array_klass for this class is created later, after error handling.
585
// For class redefinition, we keep the original class so this scratch class
586
// doesn't have an array class. Either way, assert that there is nothing
587
// to deallocate.
588
assert(array_klasses() == NULL, "array classes shouldn't be created for this class yet");
589
590
// Release C heap allocated data that this points to, which includes
591
// reference counting symbol names.
592
release_C_heap_structures_internal();
593
594
deallocate_methods(loader_data, methods());
595
set_methods(NULL);
596
597
deallocate_record_components(loader_data, record_components());
598
set_record_components(NULL);
599
600
if (method_ordering() != NULL &&
601
method_ordering() != Universe::the_empty_int_array() &&
602
!method_ordering()->is_shared()) {
603
MetadataFactory::free_array<int>(loader_data, method_ordering());
604
}
605
set_method_ordering(NULL);
606
607
// default methods can be empty
608
if (default_methods() != NULL &&
609
default_methods() != Universe::the_empty_method_array() &&
610
!default_methods()->is_shared()) {
611
MetadataFactory::free_array<Method*>(loader_data, default_methods());
612
}
613
// Do NOT deallocate the default methods, they are owned by superinterfaces.
614
set_default_methods(NULL);
615
616
// default methods vtable indices can be empty
617
if (default_vtable_indices() != NULL &&
618
!default_vtable_indices()->is_shared()) {
619
MetadataFactory::free_array<int>(loader_data, default_vtable_indices());
620
}
621
set_default_vtable_indices(NULL);
622
623
624
// This array is in Klass, but remove it with the InstanceKlass since
625
// this place would be the only caller and it can share memory with transitive
626
// interfaces.
627
if (secondary_supers() != NULL &&
628
secondary_supers() != Universe::the_empty_klass_array() &&
629
// see comments in compute_secondary_supers about the following cast
630
(address)(secondary_supers()) != (address)(transitive_interfaces()) &&
631
!secondary_supers()->is_shared()) {
632
MetadataFactory::free_array<Klass*>(loader_data, secondary_supers());
633
}
634
set_secondary_supers(NULL);
635
636
deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces());
637
set_transitive_interfaces(NULL);
638
set_local_interfaces(NULL);
639
640
if (fields() != NULL && !fields()->is_shared()) {
641
MetadataFactory::free_array<jushort>(loader_data, fields());
642
}
643
set_fields(NULL, 0);
644
645
// If a method from a redefined class is using this constant pool, don't
646
// delete it, yet. The new class's previous version will point to this.
647
if (constants() != NULL) {
648
assert (!constants()->on_stack(), "shouldn't be called if anything is onstack");
649
if (!constants()->is_shared()) {
650
MetadataFactory::free_metadata(loader_data, constants());
651
}
652
// Delete any cached resolution errors for the constant pool
653
SystemDictionary::delete_resolution_error(constants());
654
655
set_constants(NULL);
656
}
657
658
if (inner_classes() != NULL &&
659
inner_classes() != Universe::the_empty_short_array() &&
660
!inner_classes()->is_shared()) {
661
MetadataFactory::free_array<jushort>(loader_data, inner_classes());
662
}
663
set_inner_classes(NULL);
664
665
if (nest_members() != NULL &&
666
nest_members() != Universe::the_empty_short_array() &&
667
!nest_members()->is_shared()) {
668
MetadataFactory::free_array<jushort>(loader_data, nest_members());
669
}
670
set_nest_members(NULL);
671
672
if (permitted_subclasses() != NULL &&
673
permitted_subclasses() != Universe::the_empty_short_array() &&
674
!permitted_subclasses()->is_shared()) {
675
MetadataFactory::free_array<jushort>(loader_data, permitted_subclasses());
676
}
677
set_permitted_subclasses(NULL);
678
679
// We should deallocate the Annotations instance if it's not in shared spaces.
680
if (annotations() != NULL && !annotations()->is_shared()) {
681
MetadataFactory::free_metadata(loader_data, annotations());
682
}
683
set_annotations(NULL);
684
685
if (Arguments::is_dumping_archive()) {
686
SystemDictionaryShared::remove_dumptime_info(this);
687
}
688
}
689
690
bool InstanceKlass::is_record() const {
691
return _record_components != NULL &&
692
is_final() &&
693
java_super() == vmClasses::Record_klass();
694
}
695
696
bool InstanceKlass::is_sealed() const {
697
return _permitted_subclasses != NULL &&
698
_permitted_subclasses != Universe::the_empty_short_array();
699
}
700
701
bool InstanceKlass::should_be_initialized() const {
702
return !is_initialized();
703
}
704
705
klassItable InstanceKlass::itable() const {
706
return klassItable(const_cast<InstanceKlass*>(this));
707
}
708
709
void InstanceKlass::eager_initialize(Thread *thread) {
710
if (!EagerInitialization) return;
711
712
if (this->is_not_initialized()) {
713
// abort if the the class has a class initializer
714
if (this->class_initializer() != NULL) return;
715
716
// abort if it is java.lang.Object (initialization is handled in genesis)
717
Klass* super_klass = super();
718
if (super_klass == NULL) return;
719
720
// abort if the super class should be initialized
721
if (!InstanceKlass::cast(super_klass)->is_initialized()) return;
722
723
// call body to expose the this pointer
724
eager_initialize_impl();
725
}
726
}
727
728
// JVMTI spec thinks there are signers and protection domain in the
729
// instanceKlass. These accessors pretend these fields are there.
730
// The hprof specification also thinks these fields are in InstanceKlass.
731
oop InstanceKlass::protection_domain() const {
732
// return the protection_domain from the mirror
733
return java_lang_Class::protection_domain(java_mirror());
734
}
735
736
// To remove these from requires an incompatible change and CCC request.
737
objArrayOop InstanceKlass::signers() const {
738
// return the signers from the mirror
739
return java_lang_Class::signers(java_mirror());
740
}
741
742
oop InstanceKlass::init_lock() const {
743
// return the init lock from the mirror
744
oop lock = java_lang_Class::init_lock(java_mirror());
745
// Prevent reordering with any access of initialization state
746
OrderAccess::loadload();
747
assert(lock != NULL || !is_not_initialized(), // initialized or in_error state
748
"only fully initialized state can have a null lock");
749
return lock;
750
}
751
752
// Set the initialization lock to null so the object can be GC'ed. Any racing
753
// threads to get this lock will see a null lock and will not lock.
754
// That's okay because they all check for initialized state after getting
755
// the lock and return.
756
void InstanceKlass::fence_and_clear_init_lock() {
757
// make sure previous stores are all done, notably the init_state.
758
OrderAccess::storestore();
759
java_lang_Class::clear_init_lock(java_mirror());
760
assert(!is_not_initialized(), "class must be initialized now");
761
}
762
763
void InstanceKlass::eager_initialize_impl() {
764
EXCEPTION_MARK;
765
HandleMark hm(THREAD);
766
Handle h_init_lock(THREAD, init_lock());
767
ObjectLocker ol(h_init_lock, THREAD);
768
769
// abort if someone beat us to the initialization
770
if (!is_not_initialized()) return; // note: not equivalent to is_initialized()
771
772
ClassState old_state = init_state();
773
link_class_impl(THREAD);
774
if (HAS_PENDING_EXCEPTION) {
775
CLEAR_PENDING_EXCEPTION;
776
// Abort if linking the class throws an exception.
777
778
// Use a test to avoid redundantly resetting the state if there's
779
// no change. Set_init_state() asserts that state changes make
780
// progress, whereas here we might just be spinning in place.
781
if (old_state != _init_state)
782
set_init_state(old_state);
783
} else {
784
// linking successfull, mark class as initialized
785
set_init_state(fully_initialized);
786
fence_and_clear_init_lock();
787
// trace
788
if (log_is_enabled(Info, class, init)) {
789
ResourceMark rm(THREAD);
790
log_info(class, init)("[Initialized %s without side effects]", external_name());
791
}
792
}
793
}
794
795
796
// See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
797
// process. The step comments refers to the procedure described in that section.
798
// Note: implementation moved to static method to expose the this pointer.
799
void InstanceKlass::initialize(TRAPS) {
800
if (this->should_be_initialized()) {
801
initialize_impl(CHECK);
802
// Note: at this point the class may be initialized
803
// OR it may be in the state of being initialized
804
// in case of recursive initialization!
805
} else {
806
assert(is_initialized(), "sanity check");
807
}
808
}
809
810
811
bool InstanceKlass::verify_code(TRAPS) {
812
// 1) Verify the bytecodes
813
return Verifier::verify(this, should_verify_class(), THREAD);
814
}
815
816
void InstanceKlass::link_class(TRAPS) {
817
assert(is_loaded(), "must be loaded");
818
if (!is_linked()) {
819
link_class_impl(CHECK);
820
}
821
}
822
823
// Called to verify that a class can link during initialization, without
824
// throwing a VerifyError.
825
bool InstanceKlass::link_class_or_fail(TRAPS) {
826
assert(is_loaded(), "must be loaded");
827
if (!is_linked()) {
828
link_class_impl(CHECK_false);
829
}
830
return is_linked();
831
}
832
833
bool InstanceKlass::link_class_impl(TRAPS) {
834
if (DumpSharedSpaces && SystemDictionaryShared::has_class_failed_verification(this)) {
835
// This is for CDS dumping phase only -- we use the in_error_state to indicate that
836
// the class has failed verification. Throwing the NoClassDefFoundError here is just
837
// a convenient way to stop repeat attempts to verify the same (bad) class.
838
//
839
// Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown
840
// if we are executing Java code. This is not a problem for CDS dumping phase since
841
// it doesn't execute any Java code.
842
ResourceMark rm(THREAD);
843
Exceptions::fthrow(THREAD_AND_LOCATION,
844
vmSymbols::java_lang_NoClassDefFoundError(),
845
"Class %s, or one of its supertypes, failed class initialization",
846
external_name());
847
return false;
848
}
849
// return if already verified
850
if (is_linked()) {
851
return true;
852
}
853
854
// Timing
855
// timer handles recursion
856
JavaThread* jt = THREAD;
857
858
// link super class before linking this class
859
Klass* super_klass = super();
860
if (super_klass != NULL) {
861
if (super_klass->is_interface()) { // check if super class is an interface
862
ResourceMark rm(THREAD);
863
Exceptions::fthrow(
864
THREAD_AND_LOCATION,
865
vmSymbols::java_lang_IncompatibleClassChangeError(),
866
"class %s has interface %s as super class",
867
external_name(),
868
super_klass->external_name()
869
);
870
return false;
871
}
872
873
InstanceKlass* ik_super = InstanceKlass::cast(super_klass);
874
ik_super->link_class_impl(CHECK_false);
875
}
876
877
// link all interfaces implemented by this class before linking this class
878
Array<InstanceKlass*>* interfaces = local_interfaces();
879
int num_interfaces = interfaces->length();
880
for (int index = 0; index < num_interfaces; index++) {
881
InstanceKlass* interk = interfaces->at(index);
882
interk->link_class_impl(CHECK_false);
883
}
884
885
// in case the class is linked in the process of linking its superclasses
886
if (is_linked()) {
887
return true;
888
}
889
890
// trace only the link time for this klass that includes
891
// the verification time
892
PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
893
ClassLoader::perf_class_link_selftime(),
894
ClassLoader::perf_classes_linked(),
895
jt->get_thread_stat()->perf_recursion_counts_addr(),
896
jt->get_thread_stat()->perf_timers_addr(),
897
PerfClassTraceTime::CLASS_LINK);
898
899
// verification & rewriting
900
{
901
HandleMark hm(THREAD);
902
Handle h_init_lock(THREAD, init_lock());
903
ObjectLocker ol(h_init_lock, jt);
904
// rewritten will have been set if loader constraint error found
905
// on an earlier link attempt
906
// don't verify or rewrite if already rewritten
907
//
908
909
if (!is_linked()) {
910
if (!is_rewritten()) {
911
{
912
bool verify_ok = verify_code(THREAD);
913
if (!verify_ok) {
914
return false;
915
}
916
}
917
918
// Just in case a side-effect of verify linked this class already
919
// (which can sometimes happen since the verifier loads classes
920
// using custom class loaders, which are free to initialize things)
921
if (is_linked()) {
922
return true;
923
}
924
925
// also sets rewritten
926
rewrite_class(CHECK_false);
927
} else if (is_shared()) {
928
SystemDictionaryShared::check_verification_constraints(this, CHECK_false);
929
}
930
931
// relocate jsrs and link methods after they are all rewritten
932
link_methods(CHECK_false);
933
934
// Initialize the vtable and interface table after
935
// methods have been rewritten since rewrite may
936
// fabricate new Method*s.
937
// also does loader constraint checking
938
//
939
// initialize_vtable and initialize_itable need to be rerun
940
// for a shared class if
941
// 1) the class is loaded by custom class loader or
942
// 2) the class is loaded by built-in class loader but failed to add archived loader constraints
943
bool need_init_table = true;
944
if (is_shared() && SystemDictionaryShared::check_linking_constraints(THREAD, this)) {
945
need_init_table = false;
946
}
947
if (need_init_table) {
948
vtable().initialize_vtable_and_check_constraints(CHECK_false);
949
itable().initialize_itable_and_check_constraints(CHECK_false);
950
}
951
#ifdef ASSERT
952
vtable().verify(tty, true);
953
// In case itable verification is ever added.
954
// itable().verify(tty, true);
955
#endif
956
if (UseVtableBasedCHA) {
957
MutexLocker ml(THREAD, Compile_lock);
958
set_init_state(linked);
959
960
// Now flush all code that assume the class is not linked.
961
if (Universe::is_fully_initialized()) {
962
CodeCache::flush_dependents_on(this);
963
}
964
} else {
965
set_init_state(linked);
966
}
967
if (JvmtiExport::should_post_class_prepare()) {
968
JvmtiExport::post_class_prepare(THREAD, this);
969
}
970
}
971
}
972
return true;
973
}
974
975
// Rewrite the byte codes of all of the methods of a class.
976
// The rewriter must be called exactly once. Rewriting must happen after
977
// verification but before the first method of the class is executed.
978
void InstanceKlass::rewrite_class(TRAPS) {
979
assert(is_loaded(), "must be loaded");
980
if (is_rewritten()) {
981
assert(is_shared(), "rewriting an unshared class?");
982
return;
983
}
984
Rewriter::rewrite(this, CHECK);
985
set_rewritten();
986
}
987
988
// Now relocate and link method entry points after class is rewritten.
989
// This is outside is_rewritten flag. In case of an exception, it can be
990
// executed more than once.
991
void InstanceKlass::link_methods(TRAPS) {
992
int len = methods()->length();
993
for (int i = len-1; i >= 0; i--) {
994
methodHandle m(THREAD, methods()->at(i));
995
996
// Set up method entry points for compiler and interpreter .
997
m->link_method(m, CHECK);
998
}
999
}
1000
1001
// Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
1002
void InstanceKlass::initialize_super_interfaces(TRAPS) {
1003
assert (has_nonstatic_concrete_methods(), "caller should have checked this");
1004
for (int i = 0; i < local_interfaces()->length(); ++i) {
1005
InstanceKlass* ik = local_interfaces()->at(i);
1006
1007
// Initialization is depth first search ie. we start with top of the inheritance tree
1008
// has_nonstatic_concrete_methods drives searching superinterfaces since it
1009
// means has_nonstatic_concrete_methods in its superinterface hierarchy
1010
if (ik->has_nonstatic_concrete_methods()) {
1011
ik->initialize_super_interfaces(CHECK);
1012
}
1013
1014
// Only initialize() interfaces that "declare" concrete methods.
1015
if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) {
1016
ik->initialize(CHECK);
1017
}
1018
}
1019
}
1020
1021
void InstanceKlass::initialize_impl(TRAPS) {
1022
HandleMark hm(THREAD);
1023
1024
// Make sure klass is linked (verified) before initialization
1025
// A class could already be verified, since it has been reflected upon.
1026
link_class(CHECK);
1027
1028
DTRACE_CLASSINIT_PROBE(required, -1);
1029
1030
bool wait = false;
1031
1032
JavaThread* jt = THREAD;
1033
1034
// refer to the JVM book page 47 for description of steps
1035
// Step 1
1036
{
1037
Handle h_init_lock(THREAD, init_lock());
1038
ObjectLocker ol(h_init_lock, jt);
1039
1040
// Step 2
1041
// If we were to use wait() instead of waitInterruptibly() then
1042
// we might end up throwing IE from link/symbol resolution sites
1043
// that aren't expected to throw. This would wreak havoc. See 6320309.
1044
while (is_being_initialized() && !is_reentrant_initialization(jt)) {
1045
wait = true;
1046
jt->set_class_to_be_initialized(this);
1047
ol.wait_uninterruptibly(jt);
1048
jt->set_class_to_be_initialized(NULL);
1049
}
1050
1051
// Step 3
1052
if (is_being_initialized() && is_reentrant_initialization(jt)) {
1053
DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait);
1054
return;
1055
}
1056
1057
// Step 4
1058
if (is_initialized()) {
1059
DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait);
1060
return;
1061
}
1062
1063
// Step 5
1064
if (is_in_error_state()) {
1065
DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait);
1066
ResourceMark rm(THREAD);
1067
const char* desc = "Could not initialize class ";
1068
const char* className = external_name();
1069
size_t msglen = strlen(desc) + strlen(className) + 1;
1070
char* message = NEW_RESOURCE_ARRAY(char, msglen);
1071
if (NULL == message) {
1072
// Out of memory: can't create detailed error message
1073
THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className);
1074
} else {
1075
jio_snprintf(message, msglen, "%s%s", desc, className);
1076
THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message);
1077
}
1078
}
1079
1080
// Step 6
1081
set_init_state(being_initialized);
1082
set_init_thread(jt);
1083
}
1084
1085
// Step 7
1086
// Next, if C is a class rather than an interface, initialize it's super class and super
1087
// interfaces.
1088
if (!is_interface()) {
1089
Klass* super_klass = super();
1090
if (super_klass != NULL && super_klass->should_be_initialized()) {
1091
super_klass->initialize(THREAD);
1092
}
1093
// If C implements any interface that declares a non-static, concrete method,
1094
// the initialization of C triggers initialization of its super interfaces.
1095
// Only need to recurse if has_nonstatic_concrete_methods which includes declaring and
1096
// having a superinterface that declares, non-static, concrete methods
1097
if (!HAS_PENDING_EXCEPTION && has_nonstatic_concrete_methods()) {
1098
initialize_super_interfaces(THREAD);
1099
}
1100
1101
// If any exceptions, complete abruptly, throwing the same exception as above.
1102
if (HAS_PENDING_EXCEPTION) {
1103
Handle e(THREAD, PENDING_EXCEPTION);
1104
CLEAR_PENDING_EXCEPTION;
1105
{
1106
EXCEPTION_MARK;
1107
// Locks object, set state, and notify all waiting threads
1108
set_initialization_state_and_notify(initialization_error, THREAD);
1109
CLEAR_PENDING_EXCEPTION;
1110
}
1111
DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait);
1112
THROW_OOP(e());
1113
}
1114
}
1115
1116
1117
// Step 8
1118
{
1119
DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait);
1120
if (class_initializer() != NULL) {
1121
// Timer includes any side effects of class initialization (resolution,
1122
// etc), but not recursive entry into call_class_initializer().
1123
PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
1124
ClassLoader::perf_class_init_selftime(),
1125
ClassLoader::perf_classes_inited(),
1126
jt->get_thread_stat()->perf_recursion_counts_addr(),
1127
jt->get_thread_stat()->perf_timers_addr(),
1128
PerfClassTraceTime::CLASS_CLINIT);
1129
call_class_initializer(THREAD);
1130
} else {
1131
// The elapsed time is so small it's not worth counting.
1132
if (UsePerfData) {
1133
ClassLoader::perf_classes_inited()->inc();
1134
}
1135
call_class_initializer(THREAD);
1136
}
1137
}
1138
1139
// Step 9
1140
if (!HAS_PENDING_EXCEPTION) {
1141
set_initialization_state_and_notify(fully_initialized, CHECK);
1142
{
1143
debug_only(vtable().verify(tty, true);)
1144
}
1145
}
1146
else {
1147
// Step 10 and 11
1148
Handle e(THREAD, PENDING_EXCEPTION);
1149
CLEAR_PENDING_EXCEPTION;
1150
// JVMTI has already reported the pending exception
1151
// JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1152
JvmtiExport::clear_detected_exception(jt);
1153
{
1154
EXCEPTION_MARK;
1155
set_initialization_state_and_notify(initialization_error, THREAD);
1156
CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below
1157
// JVMTI has already reported the pending exception
1158
// JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1159
JvmtiExport::clear_detected_exception(jt);
1160
}
1161
DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait);
1162
if (e->is_a(vmClasses::Error_klass())) {
1163
THROW_OOP(e());
1164
} else {
1165
JavaCallArguments args(e);
1166
THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
1167
vmSymbols::throwable_void_signature(),
1168
&args);
1169
}
1170
}
1171
DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait);
1172
}
1173
1174
1175
void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
1176
Handle h_init_lock(THREAD, init_lock());
1177
if (h_init_lock() != NULL) {
1178
ObjectLocker ol(h_init_lock, THREAD);
1179
set_init_thread(NULL); // reset _init_thread before changing _init_state
1180
set_init_state(state);
1181
fence_and_clear_init_lock();
1182
ol.notify_all(CHECK);
1183
} else {
1184
assert(h_init_lock() != NULL, "The initialization state should never be set twice");
1185
set_init_thread(NULL); // reset _init_thread before changing _init_state
1186
set_init_state(state);
1187
}
1188
}
1189
1190
InstanceKlass* InstanceKlass::implementor() const {
1191
InstanceKlass* volatile* ik = adr_implementor();
1192
if (ik == NULL) {
1193
return NULL;
1194
} else {
1195
// This load races with inserts, and therefore needs acquire.
1196
InstanceKlass* ikls = Atomic::load_acquire(ik);
1197
if (ikls != NULL && !ikls->is_loader_alive()) {
1198
return NULL; // don't return unloaded class
1199
} else {
1200
return ikls;
1201
}
1202
}
1203
}
1204
1205
1206
void InstanceKlass::set_implementor(InstanceKlass* ik) {
1207
assert_locked_or_safepoint(Compile_lock);
1208
assert(is_interface(), "not interface");
1209
InstanceKlass* volatile* addr = adr_implementor();
1210
assert(addr != NULL, "null addr");
1211
if (addr != NULL) {
1212
Atomic::release_store(addr, ik);
1213
}
1214
}
1215
1216
int InstanceKlass::nof_implementors() const {
1217
InstanceKlass* ik = implementor();
1218
if (ik == NULL) {
1219
return 0;
1220
} else if (ik != this) {
1221
return 1;
1222
} else {
1223
return 2;
1224
}
1225
}
1226
1227
// The embedded _implementor field can only record one implementor.
1228
// When there are more than one implementors, the _implementor field
1229
// is set to the interface Klass* itself. Following are the possible
1230
// values for the _implementor field:
1231
// NULL - no implementor
1232
// implementor Klass* - one implementor
1233
// self - more than one implementor
1234
//
1235
// The _implementor field only exists for interfaces.
1236
void InstanceKlass::add_implementor(InstanceKlass* ik) {
1237
if (Universe::is_fully_initialized()) {
1238
assert_lock_strong(Compile_lock);
1239
}
1240
assert(is_interface(), "not interface");
1241
// Filter out my subinterfaces.
1242
// (Note: Interfaces are never on the subklass list.)
1243
if (ik->is_interface()) return;
1244
1245
// Filter out subclasses whose supers already implement me.
1246
// (Note: CHA must walk subclasses of direct implementors
1247
// in order to locate indirect implementors.)
1248
InstanceKlass* super_ik = ik->java_super();
1249
if (super_ik != NULL && super_ik->implements_interface(this))
1250
// We only need to check one immediate superclass, since the
1251
// implements_interface query looks at transitive_interfaces.
1252
// Any supers of the super have the same (or fewer) transitive_interfaces.
1253
return;
1254
1255
InstanceKlass* iklass = implementor();
1256
if (iklass == NULL) {
1257
set_implementor(ik);
1258
} else if (iklass != this && iklass != ik) {
1259
// There is already an implementor. Use itself as an indicator of
1260
// more than one implementors.
1261
set_implementor(this);
1262
}
1263
1264
// The implementor also implements the transitive_interfaces
1265
for (int index = 0; index < local_interfaces()->length(); index++) {
1266
local_interfaces()->at(index)->add_implementor(ik);
1267
}
1268
}
1269
1270
void InstanceKlass::init_implementor() {
1271
if (is_interface()) {
1272
set_implementor(NULL);
1273
}
1274
}
1275
1276
1277
void InstanceKlass::process_interfaces() {
1278
// link this class into the implementors list of every interface it implements
1279
for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
1280
assert(local_interfaces()->at(i)->is_klass(), "must be a klass");
1281
InstanceKlass* interf = local_interfaces()->at(i);
1282
assert(interf->is_interface(), "expected interface");
1283
interf->add_implementor(this);
1284
}
1285
}
1286
1287
bool InstanceKlass::can_be_primary_super_slow() const {
1288
if (is_interface())
1289
return false;
1290
else
1291
return Klass::can_be_primary_super_slow();
1292
}
1293
1294
GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots,
1295
Array<InstanceKlass*>* transitive_interfaces) {
1296
// The secondaries are the implemented interfaces.
1297
Array<InstanceKlass*>* interfaces = transitive_interfaces;
1298
int num_secondaries = num_extra_slots + interfaces->length();
1299
if (num_secondaries == 0) {
1300
// Must share this for correct bootstrapping!
1301
set_secondary_supers(Universe::the_empty_klass_array());
1302
return NULL;
1303
} else if (num_extra_slots == 0) {
1304
// The secondary super list is exactly the same as the transitive interfaces, so
1305
// let's use it instead of making a copy.
1306
// Redefine classes has to be careful not to delete this!
1307
// We need the cast because Array<Klass*> is NOT a supertype of Array<InstanceKlass*>,
1308
// (but it's safe to do here because we won't write into _secondary_supers from this point on).
1309
set_secondary_supers((Array<Klass*>*)(address)interfaces);
1310
return NULL;
1311
} else {
1312
// Copy transitive interfaces to a temporary growable array to be constructed
1313
// into the secondary super list with extra slots.
1314
GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length());
1315
for (int i = 0; i < interfaces->length(); i++) {
1316
secondaries->push(interfaces->at(i));
1317
}
1318
return secondaries;
1319
}
1320
}
1321
1322
bool InstanceKlass::implements_interface(Klass* k) const {
1323
if (this == k) return true;
1324
assert(k->is_interface(), "should be an interface class");
1325
for (int i = 0; i < transitive_interfaces()->length(); i++) {
1326
if (transitive_interfaces()->at(i) == k) {
1327
return true;
1328
}
1329
}
1330
return false;
1331
}
1332
1333
bool InstanceKlass::is_same_or_direct_interface(Klass *k) const {
1334
// Verify direct super interface
1335
if (this == k) return true;
1336
assert(k->is_interface(), "should be an interface class");
1337
for (int i = 0; i < local_interfaces()->length(); i++) {
1338
if (local_interfaces()->at(i) == k) {
1339
return true;
1340
}
1341
}
1342
return false;
1343
}
1344
1345
objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) {
1346
check_array_allocation_length(length, arrayOopDesc::max_array_length(T_OBJECT), CHECK_NULL);
1347
int size = objArrayOopDesc::object_size(length);
1348
Klass* ak = array_klass(n, CHECK_NULL);
1349
objArrayOop o = (objArrayOop)Universe::heap()->array_allocate(ak, size, length,
1350
/* do_zero */ true, CHECK_NULL);
1351
return o;
1352
}
1353
1354
instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) {
1355
if (TraceFinalizerRegistration) {
1356
tty->print("Registered ");
1357
i->print_value_on(tty);
1358
tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", p2i(i));
1359
}
1360
instanceHandle h_i(THREAD, i);
1361
// Pass the handle as argument, JavaCalls::call expects oop as jobjects
1362
JavaValue result(T_VOID);
1363
JavaCallArguments args(h_i);
1364
methodHandle mh (THREAD, Universe::finalizer_register_method());
1365
JavaCalls::call(&result, mh, &args, CHECK_NULL);
1366
return h_i();
1367
}
1368
1369
instanceOop InstanceKlass::allocate_instance(TRAPS) {
1370
bool has_finalizer_flag = has_finalizer(); // Query before possible GC
1371
int size = size_helper(); // Query before forming handle.
1372
1373
instanceOop i;
1374
1375
i = (instanceOop)Universe::heap()->obj_allocate(this, size, CHECK_NULL);
1376
if (has_finalizer_flag && !RegisterFinalizersAtInit) {
1377
i = register_finalizer(i, CHECK_NULL);
1378
}
1379
return i;
1380
}
1381
1382
instanceHandle InstanceKlass::allocate_instance_handle(TRAPS) {
1383
return instanceHandle(THREAD, allocate_instance(THREAD));
1384
}
1385
1386
void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
1387
if (is_interface() || is_abstract()) {
1388
ResourceMark rm(THREAD);
1389
THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
1390
: vmSymbols::java_lang_InstantiationException(), external_name());
1391
}
1392
if (this == vmClasses::Class_klass()) {
1393
ResourceMark rm(THREAD);
1394
THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
1395
: vmSymbols::java_lang_IllegalAccessException(), external_name());
1396
}
1397
}
1398
1399
Klass* InstanceKlass::array_klass(int n, TRAPS) {
1400
// Need load-acquire for lock-free read
1401
if (array_klasses_acquire() == NULL) {
1402
ResourceMark rm(THREAD);
1403
JavaThread *jt = THREAD;
1404
{
1405
// Atomic creation of array_klasses
1406
MutexLocker ma(THREAD, MultiArray_lock);
1407
1408
// Check if update has already taken place
1409
if (array_klasses() == NULL) {
1410
ObjArrayKlass* k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL);
1411
// use 'release' to pair with lock-free load
1412
release_set_array_klasses(k);
1413
}
1414
}
1415
}
1416
// array_klasses() will always be set at this point
1417
ObjArrayKlass* oak = array_klasses();
1418
return oak->array_klass(n, THREAD);
1419
}
1420
1421
Klass* InstanceKlass::array_klass_or_null(int n) {
1422
// Need load-acquire for lock-free read
1423
ObjArrayKlass* oak = array_klasses_acquire();
1424
if (oak == NULL) {
1425
return NULL;
1426
} else {
1427
return oak->array_klass_or_null(n);
1428
}
1429
}
1430
1431
Klass* InstanceKlass::array_klass(TRAPS) {
1432
return array_klass(1, THREAD);
1433
}
1434
1435
Klass* InstanceKlass::array_klass_or_null() {
1436
return array_klass_or_null(1);
1437
}
1438
1439
static int call_class_initializer_counter = 0; // for debugging
1440
1441
Method* InstanceKlass::class_initializer() const {
1442
Method* clinit = find_method(
1443
vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
1444
if (clinit != NULL && clinit->has_valid_initializer_flags()) {
1445
return clinit;
1446
}
1447
return NULL;
1448
}
1449
1450
void InstanceKlass::call_class_initializer(TRAPS) {
1451
if (ReplayCompiles &&
1452
(ReplaySuppressInitializers == 1 ||
1453
(ReplaySuppressInitializers >= 2 && class_loader() != NULL))) {
1454
// Hide the existence of the initializer for the purpose of replaying the compile
1455
return;
1456
}
1457
1458
methodHandle h_method(THREAD, class_initializer());
1459
assert(!is_initialized(), "we cannot initialize twice");
1460
LogTarget(Info, class, init) lt;
1461
if (lt.is_enabled()) {
1462
ResourceMark rm(THREAD);
1463
LogStream ls(lt);
1464
ls.print("%d Initializing ", call_class_initializer_counter++);
1465
name()->print_value_on(&ls);
1466
ls.print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", p2i(this));
1467
}
1468
if (h_method() != NULL) {
1469
JavaCallArguments args; // No arguments
1470
JavaValue result(T_VOID);
1471
JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
1472
}
1473
}
1474
1475
1476
void InstanceKlass::mask_for(const methodHandle& method, int bci,
1477
InterpreterOopMap* entry_for) {
1478
// Lazily create the _oop_map_cache at first request
1479
// Lock-free access requires load_acquire.
1480
OopMapCache* oop_map_cache = Atomic::load_acquire(&_oop_map_cache);
1481
if (oop_map_cache == NULL) {
1482
MutexLocker x(OopMapCacheAlloc_lock);
1483
// Check if _oop_map_cache was allocated while we were waiting for this lock
1484
if ((oop_map_cache = _oop_map_cache) == NULL) {
1485
oop_map_cache = new OopMapCache();
1486
// Ensure _oop_map_cache is stable, since it is examined without a lock
1487
Atomic::release_store(&_oop_map_cache, oop_map_cache);
1488
}
1489
}
1490
// _oop_map_cache is constant after init; lookup below does its own locking.
1491
oop_map_cache->lookup(method, bci, entry_for);
1492
}
1493
1494
bool InstanceKlass::contains_field_offset(int offset) {
1495
fieldDescriptor fd;
1496
return find_field_from_offset(offset, false, &fd);
1497
}
1498
1499
bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1500
for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1501
Symbol* f_name = fs.name();
1502
Symbol* f_sig = fs.signature();
1503
if (f_name == name && f_sig == sig) {
1504
fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index());
1505
return true;
1506
}
1507
}
1508
return false;
1509
}
1510
1511
1512
Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1513
const int n = local_interfaces()->length();
1514
for (int i = 0; i < n; i++) {
1515
Klass* intf1 = local_interfaces()->at(i);
1516
assert(intf1->is_interface(), "just checking type");
1517
// search for field in current interface
1518
if (InstanceKlass::cast(intf1)->find_local_field(name, sig, fd)) {
1519
assert(fd->is_static(), "interface field must be static");
1520
return intf1;
1521
}
1522
// search for field in direct superinterfaces
1523
Klass* intf2 = InstanceKlass::cast(intf1)->find_interface_field(name, sig, fd);
1524
if (intf2 != NULL) return intf2;
1525
}
1526
// otherwise field lookup fails
1527
return NULL;
1528
}
1529
1530
1531
Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1532
// search order according to newest JVM spec (5.4.3.2, p.167).
1533
// 1) search for field in current klass
1534
if (find_local_field(name, sig, fd)) {
1535
return const_cast<InstanceKlass*>(this);
1536
}
1537
// 2) search for field recursively in direct superinterfaces
1538
{ Klass* intf = find_interface_field(name, sig, fd);
1539
if (intf != NULL) return intf;
1540
}
1541
// 3) apply field lookup recursively if superclass exists
1542
{ Klass* supr = super();
1543
if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, fd);
1544
}
1545
// 4) otherwise field lookup fails
1546
return NULL;
1547
}
1548
1549
1550
Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const {
1551
// search order according to newest JVM spec (5.4.3.2, p.167).
1552
// 1) search for field in current klass
1553
if (find_local_field(name, sig, fd)) {
1554
if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this);
1555
}
1556
// 2) search for field recursively in direct superinterfaces
1557
if (is_static) {
1558
Klass* intf = find_interface_field(name, sig, fd);
1559
if (intf != NULL) return intf;
1560
}
1561
// 3) apply field lookup recursively if superclass exists
1562
{ Klass* supr = super();
1563
if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, is_static, fd);
1564
}
1565
// 4) otherwise field lookup fails
1566
return NULL;
1567
}
1568
1569
1570
bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1571
for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1572
if (fs.offset() == offset) {
1573
fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index());
1574
if (fd->is_static() == is_static) return true;
1575
}
1576
}
1577
return false;
1578
}
1579
1580
1581
bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1582
Klass* klass = const_cast<InstanceKlass*>(this);
1583
while (klass != NULL) {
1584
if (InstanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) {
1585
return true;
1586
}
1587
klass = klass->super();
1588
}
1589
return false;
1590
}
1591
1592
1593
void InstanceKlass::methods_do(void f(Method* method)) {
1594
// Methods aren't stable until they are loaded. This can be read outside
1595
// a lock through the ClassLoaderData for profiling
1596
if (!is_loaded()) {
1597
return;
1598
}
1599
1600
int len = methods()->length();
1601
for (int index = 0; index < len; index++) {
1602
Method* m = methods()->at(index);
1603
assert(m->is_method(), "must be method");
1604
f(m);
1605
}
1606
}
1607
1608
1609
void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
1610
for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1611
if (fs.access_flags().is_static()) {
1612
fieldDescriptor& fd = fs.field_descriptor();
1613
cl->do_field(&fd);
1614
}
1615
}
1616
}
1617
1618
1619
void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) {
1620
for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1621
if (fs.access_flags().is_static()) {
1622
fieldDescriptor& fd = fs.field_descriptor();
1623
f(&fd, mirror, CHECK);
1624
}
1625
}
1626
}
1627
1628
1629
static int compare_fields_by_offset(int* a, int* b) {
1630
return a[0] - b[0];
1631
}
1632
1633
void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
1634
InstanceKlass* super = superklass();
1635
if (super != NULL) {
1636
super->do_nonstatic_fields(cl);
1637
}
1638
fieldDescriptor fd;
1639
int length = java_fields_count();
1640
// In DebugInfo nonstatic fields are sorted by offset.
1641
int* fields_sorted = NEW_C_HEAP_ARRAY(int, 2*(length+1), mtClass);
1642
int j = 0;
1643
for (int i = 0; i < length; i += 1) {
1644
fd.reinitialize(this, i);
1645
if (!fd.is_static()) {
1646
fields_sorted[j + 0] = fd.offset();
1647
fields_sorted[j + 1] = i;
1648
j += 2;
1649
}
1650
}
1651
if (j > 0) {
1652
length = j;
1653
// _sort_Fn is defined in growableArray.hpp.
1654
qsort(fields_sorted, length/2, 2*sizeof(int), (_sort_Fn)compare_fields_by_offset);
1655
for (int i = 0; i < length; i += 2) {
1656
fd.reinitialize(this, fields_sorted[i + 1]);
1657
assert(!fd.is_static() && fd.offset() == fields_sorted[i], "only nonstatic fields");
1658
cl->do_field(&fd);
1659
}
1660
}
1661
FREE_C_HEAP_ARRAY(int, fields_sorted);
1662
}
1663
1664
1665
void InstanceKlass::array_klasses_do(void f(Klass* k, TRAPS), TRAPS) {
1666
if (array_klasses() != NULL)
1667
array_klasses()->array_klasses_do(f, THREAD);
1668
}
1669
1670
void InstanceKlass::array_klasses_do(void f(Klass* k)) {
1671
if (array_klasses() != NULL)
1672
array_klasses()->array_klasses_do(f);
1673
}
1674
1675
#ifdef ASSERT
1676
static int linear_search(const Array<Method*>* methods,
1677
const Symbol* name,
1678
const Symbol* signature) {
1679
const int len = methods->length();
1680
for (int index = 0; index < len; index++) {
1681
const Method* const m = methods->at(index);
1682
assert(m->is_method(), "must be method");
1683
if (m->signature() == signature && m->name() == name) {
1684
return index;
1685
}
1686
}
1687
return -1;
1688
}
1689
#endif
1690
1691
bool InstanceKlass::_disable_method_binary_search = false;
1692
1693
NOINLINE int linear_search(const Array<Method*>* methods, const Symbol* name) {
1694
int len = methods->length();
1695
int l = 0;
1696
int h = len - 1;
1697
while (l <= h) {
1698
Method* m = methods->at(l);
1699
if (m->name() == name) {
1700
return l;
1701
}
1702
l++;
1703
}
1704
return -1;
1705
}
1706
1707
inline int InstanceKlass::quick_search(const Array<Method*>* methods, const Symbol* name) {
1708
if (_disable_method_binary_search) {
1709
assert(DynamicDumpSharedSpaces, "must be");
1710
// At the final stage of dynamic dumping, the methods array may not be sorted
1711
// by ascending addresses of their names, so we can't use binary search anymore.
1712
// However, methods with the same name are still laid out consecutively inside the
1713
// methods array, so let's look for the first one that matches.
1714
return linear_search(methods, name);
1715
}
1716
1717
int len = methods->length();
1718
int l = 0;
1719
int h = len - 1;
1720
1721
// methods are sorted by ascending addresses of their names, so do binary search
1722
while (l <= h) {
1723
int mid = (l + h) >> 1;
1724
Method* m = methods->at(mid);
1725
assert(m->is_method(), "must be method");
1726
int res = m->name()->fast_compare(name);
1727
if (res == 0) {
1728
return mid;
1729
} else if (res < 0) {
1730
l = mid + 1;
1731
} else {
1732
h = mid - 1;
1733
}
1734
}
1735
return -1;
1736
}
1737
1738
// find_method looks up the name/signature in the local methods array
1739
Method* InstanceKlass::find_method(const Symbol* name,
1740
const Symbol* signature) const {
1741
return find_method_impl(name, signature,
1742
OverpassLookupMode::find,
1743
StaticLookupMode::find,
1744
PrivateLookupMode::find);
1745
}
1746
1747
Method* InstanceKlass::find_method_impl(const Symbol* name,
1748
const Symbol* signature,
1749
OverpassLookupMode overpass_mode,
1750
StaticLookupMode static_mode,
1751
PrivateLookupMode private_mode) const {
1752
return InstanceKlass::find_method_impl(methods(),
1753
name,
1754
signature,
1755
overpass_mode,
1756
static_mode,
1757
private_mode);
1758
}
1759
1760
// find_instance_method looks up the name/signature in the local methods array
1761
// and skips over static methods
1762
Method* InstanceKlass::find_instance_method(const Array<Method*>* methods,
1763
const Symbol* name,
1764
const Symbol* signature,
1765
PrivateLookupMode private_mode) {
1766
Method* const meth = InstanceKlass::find_method_impl(methods,
1767
name,
1768
signature,
1769
OverpassLookupMode::find,
1770
StaticLookupMode::skip,
1771
private_mode);
1772
assert(((meth == NULL) || !meth->is_static()),
1773
"find_instance_method should have skipped statics");
1774
return meth;
1775
}
1776
1777
// find_instance_method looks up the name/signature in the local methods array
1778
// and skips over static methods
1779
Method* InstanceKlass::find_instance_method(const Symbol* name,
1780
const Symbol* signature,
1781
PrivateLookupMode private_mode) const {
1782
return InstanceKlass::find_instance_method(methods(), name, signature, private_mode);
1783
}
1784
1785
// Find looks up the name/signature in the local methods array
1786
// and filters on the overpass, static and private flags
1787
// This returns the first one found
1788
// note that the local methods array can have up to one overpass, one static
1789
// and one instance (private or not) with the same name/signature
1790
Method* InstanceKlass::find_local_method(const Symbol* name,
1791
const Symbol* signature,
1792
OverpassLookupMode overpass_mode,
1793
StaticLookupMode static_mode,
1794
PrivateLookupMode private_mode) const {
1795
return InstanceKlass::find_method_impl(methods(),
1796
name,
1797
signature,
1798
overpass_mode,
1799
static_mode,
1800
private_mode);
1801
}
1802
1803
// Find looks up the name/signature in the local methods array
1804
// and filters on the overpass, static and private flags
1805
// This returns the first one found
1806
// note that the local methods array can have up to one overpass, one static
1807
// and one instance (private or not) with the same name/signature
1808
Method* InstanceKlass::find_local_method(const Array<Method*>* methods,
1809
const Symbol* name,
1810
const Symbol* signature,
1811
OverpassLookupMode overpass_mode,
1812
StaticLookupMode static_mode,
1813
PrivateLookupMode private_mode) {
1814
return InstanceKlass::find_method_impl(methods,
1815
name,
1816
signature,
1817
overpass_mode,
1818
static_mode,
1819
private_mode);
1820
}
1821
1822
Method* InstanceKlass::find_method(const Array<Method*>* methods,
1823
const Symbol* name,
1824
const Symbol* signature) {
1825
return InstanceKlass::find_method_impl(methods,
1826
name,
1827
signature,
1828
OverpassLookupMode::find,
1829
StaticLookupMode::find,
1830
PrivateLookupMode::find);
1831
}
1832
1833
Method* InstanceKlass::find_method_impl(const Array<Method*>* methods,
1834
const Symbol* name,
1835
const Symbol* signature,
1836
OverpassLookupMode overpass_mode,
1837
StaticLookupMode static_mode,
1838
PrivateLookupMode private_mode) {
1839
int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode);
1840
return hit >= 0 ? methods->at(hit): NULL;
1841
}
1842
1843
// true if method matches signature and conforms to skipping_X conditions.
1844
static bool method_matches(const Method* m,
1845
const Symbol* signature,
1846
bool skipping_overpass,
1847
bool skipping_static,
1848
bool skipping_private) {
1849
return ((m->signature() == signature) &&
1850
(!skipping_overpass || !m->is_overpass()) &&
1851
(!skipping_static || !m->is_static()) &&
1852
(!skipping_private || !m->is_private()));
1853
}
1854
1855
// Used directly for default_methods to find the index into the
1856
// default_vtable_indices, and indirectly by find_method
1857
// find_method_index looks in the local methods array to return the index
1858
// of the matching name/signature. If, overpass methods are being ignored,
1859
// the search continues to find a potential non-overpass match. This capability
1860
// is important during method resolution to prefer a static method, for example,
1861
// over an overpass method.
1862
// There is the possibility in any _method's array to have the same name/signature
1863
// for a static method, an overpass method and a local instance method
1864
// To correctly catch a given method, the search criteria may need
1865
// to explicitly skip the other two. For local instance methods, it
1866
// is often necessary to skip private methods
1867
int InstanceKlass::find_method_index(const Array<Method*>* methods,
1868
const Symbol* name,
1869
const Symbol* signature,
1870
OverpassLookupMode overpass_mode,
1871
StaticLookupMode static_mode,
1872
PrivateLookupMode private_mode) {
1873
const bool skipping_overpass = (overpass_mode == OverpassLookupMode::skip);
1874
const bool skipping_static = (static_mode == StaticLookupMode::skip);
1875
const bool skipping_private = (private_mode == PrivateLookupMode::skip);
1876
const int hit = quick_search(methods, name);
1877
if (hit != -1) {
1878
const Method* const m = methods->at(hit);
1879
1880
// Do linear search to find matching signature. First, quick check
1881
// for common case, ignoring overpasses if requested.
1882
if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
1883
return hit;
1884
}
1885
1886
// search downwards through overloaded methods
1887
int i;
1888
for (i = hit - 1; i >= 0; --i) {
1889
const Method* const m = methods->at(i);
1890
assert(m->is_method(), "must be method");
1891
if (m->name() != name) {
1892
break;
1893
}
1894
if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
1895
return i;
1896
}
1897
}
1898
// search upwards
1899
for (i = hit + 1; i < methods->length(); ++i) {
1900
const Method* const m = methods->at(i);
1901
assert(m->is_method(), "must be method");
1902
if (m->name() != name) {
1903
break;
1904
}
1905
if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
1906
return i;
1907
}
1908
}
1909
// not found
1910
#ifdef ASSERT
1911
const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 :
1912
linear_search(methods, name, signature);
1913
assert(-1 == index, "binary search should have found entry %d", index);
1914
#endif
1915
}
1916
return -1;
1917
}
1918
1919
int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const {
1920
return find_method_by_name(methods(), name, end);
1921
}
1922
1923
int InstanceKlass::find_method_by_name(const Array<Method*>* methods,
1924
const Symbol* name,
1925
int* end_ptr) {
1926
assert(end_ptr != NULL, "just checking");
1927
int start = quick_search(methods, name);
1928
int end = start + 1;
1929
if (start != -1) {
1930
while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start;
1931
while (end < methods->length() && (methods->at(end))->name() == name) ++end;
1932
*end_ptr = end;
1933
return start;
1934
}
1935
return -1;
1936
}
1937
1938
// uncached_lookup_method searches both the local class methods array and all
1939
// superclasses methods arrays, skipping any overpass methods in superclasses,
1940
// and possibly skipping private methods.
1941
Method* InstanceKlass::uncached_lookup_method(const Symbol* name,
1942
const Symbol* signature,
1943
OverpassLookupMode overpass_mode,
1944
PrivateLookupMode private_mode) const {
1945
OverpassLookupMode overpass_local_mode = overpass_mode;
1946
const Klass* klass = this;
1947
while (klass != NULL) {
1948
Method* const method = InstanceKlass::cast(klass)->find_method_impl(name,
1949
signature,
1950
overpass_local_mode,
1951
StaticLookupMode::find,
1952
private_mode);
1953
if (method != NULL) {
1954
return method;
1955
}
1956
klass = klass->super();
1957
overpass_local_mode = OverpassLookupMode::skip; // Always ignore overpass methods in superclasses
1958
}
1959
return NULL;
1960
}
1961
1962
#ifdef ASSERT
1963
// search through class hierarchy and return true if this class or
1964
// one of the superclasses was redefined
1965
bool InstanceKlass::has_redefined_this_or_super() const {
1966
const Klass* klass = this;
1967
while (klass != NULL) {
1968
if (InstanceKlass::cast(klass)->has_been_redefined()) {
1969
return true;
1970
}
1971
klass = klass->super();
1972
}
1973
return false;
1974
}
1975
#endif
1976
1977
// lookup a method in the default methods list then in all transitive interfaces
1978
// Do NOT return private or static methods
1979
Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name,
1980
Symbol* signature) const {
1981
Method* m = NULL;
1982
if (default_methods() != NULL) {
1983
m = find_method(default_methods(), name, signature);
1984
}
1985
// Look up interfaces
1986
if (m == NULL) {
1987
m = lookup_method_in_all_interfaces(name, signature, DefaultsLookupMode::find);
1988
}
1989
return m;
1990
}
1991
1992
// lookup a method in all the interfaces that this class implements
1993
// Do NOT return private or static methods, new in JDK8 which are not externally visible
1994
// They should only be found in the initial InterfaceMethodRef
1995
Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
1996
Symbol* signature,
1997
DefaultsLookupMode defaults_mode) const {
1998
Array<InstanceKlass*>* all_ifs = transitive_interfaces();
1999
int num_ifs = all_ifs->length();
2000
InstanceKlass *ik = NULL;
2001
for (int i = 0; i < num_ifs; i++) {
2002
ik = all_ifs->at(i);
2003
Method* m = ik->lookup_method(name, signature);
2004
if (m != NULL && m->is_public() && !m->is_static() &&
2005
((defaults_mode != DefaultsLookupMode::skip) || !m->is_default_method())) {
2006
return m;
2007
}
2008
}
2009
return NULL;
2010
}
2011
2012
/* jni_id_for_impl for jfieldIds only */
2013
JNIid* InstanceKlass::jni_id_for_impl(int offset) {
2014
MutexLocker ml(JfieldIdCreation_lock);
2015
// Retry lookup after we got the lock
2016
JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset);
2017
if (probe == NULL) {
2018
// Slow case, allocate new static field identifier
2019
probe = new JNIid(this, offset, jni_ids());
2020
set_jni_ids(probe);
2021
}
2022
return probe;
2023
}
2024
2025
2026
/* jni_id_for for jfieldIds only */
2027
JNIid* InstanceKlass::jni_id_for(int offset) {
2028
JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset);
2029
if (probe == NULL) {
2030
probe = jni_id_for_impl(offset);
2031
}
2032
return probe;
2033
}
2034
2035
u2 InstanceKlass::enclosing_method_data(int offset) const {
2036
const Array<jushort>* const inner_class_list = inner_classes();
2037
if (inner_class_list == NULL) {
2038
return 0;
2039
}
2040
const int length = inner_class_list->length();
2041
if (length % inner_class_next_offset == 0) {
2042
return 0;
2043
}
2044
const int index = length - enclosing_method_attribute_size;
2045
assert(offset < enclosing_method_attribute_size, "invalid offset");
2046
return inner_class_list->at(index + offset);
2047
}
2048
2049
void InstanceKlass::set_enclosing_method_indices(u2 class_index,
2050
u2 method_index) {
2051
Array<jushort>* inner_class_list = inner_classes();
2052
assert (inner_class_list != NULL, "_inner_classes list is not set up");
2053
int length = inner_class_list->length();
2054
if (length % inner_class_next_offset == enclosing_method_attribute_size) {
2055
int index = length - enclosing_method_attribute_size;
2056
inner_class_list->at_put(
2057
index + enclosing_method_class_index_offset, class_index);
2058
inner_class_list->at_put(
2059
index + enclosing_method_method_index_offset, method_index);
2060
}
2061
}
2062
2063
// Lookup or create a jmethodID.
2064
// This code is called by the VMThread and JavaThreads so the
2065
// locking has to be done very carefully to avoid deadlocks
2066
// and/or other cache consistency problems.
2067
//
2068
jmethodID InstanceKlass::get_jmethod_id(const methodHandle& method_h) {
2069
size_t idnum = (size_t)method_h->method_idnum();
2070
jmethodID* jmeths = methods_jmethod_ids_acquire();
2071
size_t length = 0;
2072
jmethodID id = NULL;
2073
2074
// We use a double-check locking idiom here because this cache is
2075
// performance sensitive. In the normal system, this cache only
2076
// transitions from NULL to non-NULL which is safe because we use
2077
// release_set_methods_jmethod_ids() to advertise the new cache.
2078
// A partially constructed cache should never be seen by a racing
2079
// thread. We also use release_store() to save a new jmethodID
2080
// in the cache so a partially constructed jmethodID should never be
2081
// seen either. Cache reads of existing jmethodIDs proceed without a
2082
// lock, but cache writes of a new jmethodID requires uniqueness and
2083
// creation of the cache itself requires no leaks so a lock is
2084
// generally acquired in those two cases.
2085
//
2086
// If the RedefineClasses() API has been used, then this cache can
2087
// grow and we'll have transitions from non-NULL to bigger non-NULL.
2088
// Cache creation requires no leaks and we require safety between all
2089
// cache accesses and freeing of the old cache so a lock is generally
2090
// acquired when the RedefineClasses() API has been used.
2091
2092
if (jmeths != NULL) {
2093
// the cache already exists
2094
if (!idnum_can_increment()) {
2095
// the cache can't grow so we can just get the current values
2096
get_jmethod_id_length_value(jmeths, idnum, &length, &id);
2097
} else {
2098
// cache can grow so we have to be more careful
2099
if (Threads::number_of_threads() == 0 ||
2100
SafepointSynchronize::is_at_safepoint()) {
2101
// we're single threaded or at a safepoint - no locking needed
2102
get_jmethod_id_length_value(jmeths, idnum, &length, &id);
2103
} else {
2104
MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2105
get_jmethod_id_length_value(jmeths, idnum, &length, &id);
2106
}
2107
}
2108
}
2109
// implied else:
2110
// we need to allocate a cache so default length and id values are good
2111
2112
if (jmeths == NULL || // no cache yet
2113
length <= idnum || // cache is too short
2114
id == NULL) { // cache doesn't contain entry
2115
2116
// This function can be called by the VMThread so we have to do all
2117
// things that might block on a safepoint before grabbing the lock.
2118
// Otherwise, we can deadlock with the VMThread or have a cache
2119
// consistency issue. These vars keep track of what we might have
2120
// to free after the lock is dropped.
2121
jmethodID to_dealloc_id = NULL;
2122
jmethodID* to_dealloc_jmeths = NULL;
2123
2124
// may not allocate new_jmeths or use it if we allocate it
2125
jmethodID* new_jmeths = NULL;
2126
if (length <= idnum) {
2127
// allocate a new cache that might be used
2128
size_t size = MAX2(idnum+1, (size_t)idnum_allocated_count());
2129
new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass);
2130
memset(new_jmeths, 0, (size+1)*sizeof(jmethodID));
2131
// cache size is stored in element[0], other elements offset by one
2132
new_jmeths[0] = (jmethodID)size;
2133
}
2134
2135
// allocate a new jmethodID that might be used
2136
jmethodID new_id = NULL;
2137
if (method_h->is_old() && !method_h->is_obsolete()) {
2138
// The method passed in is old (but not obsolete), we need to use the current version
2139
Method* current_method = method_with_idnum((int)idnum);
2140
assert(current_method != NULL, "old and but not obsolete, so should exist");
2141
new_id = Method::make_jmethod_id(class_loader_data(), current_method);
2142
} else {
2143
// It is the current version of the method or an obsolete method,
2144
// use the version passed in
2145
new_id = Method::make_jmethod_id(class_loader_data(), method_h());
2146
}
2147
2148
if (Threads::number_of_threads() == 0 ||
2149
SafepointSynchronize::is_at_safepoint()) {
2150
// we're single threaded or at a safepoint - no locking needed
2151
id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths,
2152
&to_dealloc_id, &to_dealloc_jmeths);
2153
} else {
2154
MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2155
id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths,
2156
&to_dealloc_id, &to_dealloc_jmeths);
2157
}
2158
2159
// The lock has been dropped so we can free resources.
2160
// Free up either the old cache or the new cache if we allocated one.
2161
if (to_dealloc_jmeths != NULL) {
2162
FreeHeap(to_dealloc_jmeths);
2163
}
2164
// free up the new ID since it wasn't needed
2165
if (to_dealloc_id != NULL) {
2166
Method::destroy_jmethod_id(class_loader_data(), to_dealloc_id);
2167
}
2168
}
2169
return id;
2170
}
2171
2172
// Figure out how many jmethodIDs haven't been allocated, and make
2173
// sure space for them is pre-allocated. This makes getting all
2174
// method ids much, much faster with classes with more than 8
2175
// methods, and has a *substantial* effect on performance with jvmti
2176
// code that loads all jmethodIDs for all classes.
2177
void InstanceKlass::ensure_space_for_methodids(int start_offset) {
2178
int new_jmeths = 0;
2179
int length = methods()->length();
2180
for (int index = start_offset; index < length; index++) {
2181
Method* m = methods()->at(index);
2182
jmethodID id = m->find_jmethod_id_or_null();
2183
if (id == NULL) {
2184
new_jmeths++;
2185
}
2186
}
2187
if (new_jmeths != 0) {
2188
Method::ensure_jmethod_ids(class_loader_data(), new_jmeths);
2189
}
2190
}
2191
2192
// Common code to fetch the jmethodID from the cache or update the
2193
// cache with the new jmethodID. This function should never do anything
2194
// that causes the caller to go to a safepoint or we can deadlock with
2195
// the VMThread or have cache consistency issues.
2196
//
2197
jmethodID InstanceKlass::get_jmethod_id_fetch_or_update(
2198
size_t idnum, jmethodID new_id,
2199
jmethodID* new_jmeths, jmethodID* to_dealloc_id_p,
2200
jmethodID** to_dealloc_jmeths_p) {
2201
assert(new_id != NULL, "sanity check");
2202
assert(to_dealloc_id_p != NULL, "sanity check");
2203
assert(to_dealloc_jmeths_p != NULL, "sanity check");
2204
assert(Threads::number_of_threads() == 0 ||
2205
SafepointSynchronize::is_at_safepoint() ||
2206
JmethodIdCreation_lock->owned_by_self(), "sanity check");
2207
2208
// reacquire the cache - we are locked, single threaded or at a safepoint
2209
jmethodID* jmeths = methods_jmethod_ids_acquire();
2210
jmethodID id = NULL;
2211
size_t length = 0;
2212
2213
if (jmeths == NULL || // no cache yet
2214
(length = (size_t)jmeths[0]) <= idnum) { // cache is too short
2215
if (jmeths != NULL) {
2216
// copy any existing entries from the old cache
2217
for (size_t index = 0; index < length; index++) {
2218
new_jmeths[index+1] = jmeths[index+1];
2219
}
2220
*to_dealloc_jmeths_p = jmeths; // save old cache for later delete
2221
}
2222
release_set_methods_jmethod_ids(jmeths = new_jmeths);
2223
} else {
2224
// fetch jmethodID (if any) from the existing cache
2225
id = jmeths[idnum+1];
2226
*to_dealloc_jmeths_p = new_jmeths; // save new cache for later delete
2227
}
2228
if (id == NULL) {
2229
// No matching jmethodID in the existing cache or we have a new
2230
// cache or we just grew the cache. This cache write is done here
2231
// by the first thread to win the foot race because a jmethodID
2232
// needs to be unique once it is generally available.
2233
id = new_id;
2234
2235
// The jmethodID cache can be read while unlocked so we have to
2236
// make sure the new jmethodID is complete before installing it
2237
// in the cache.
2238
Atomic::release_store(&jmeths[idnum+1], id);
2239
} else {
2240
*to_dealloc_id_p = new_id; // save new id for later delete
2241
}
2242
return id;
2243
}
2244
2245
2246
// Common code to get the jmethodID cache length and the jmethodID
2247
// value at index idnum if there is one.
2248
//
2249
void InstanceKlass::get_jmethod_id_length_value(jmethodID* cache,
2250
size_t idnum, size_t *length_p, jmethodID* id_p) {
2251
assert(cache != NULL, "sanity check");
2252
assert(length_p != NULL, "sanity check");
2253
assert(id_p != NULL, "sanity check");
2254
2255
// cache size is stored in element[0], other elements offset by one
2256
*length_p = (size_t)cache[0];
2257
if (*length_p <= idnum) { // cache is too short
2258
*id_p = NULL;
2259
} else {
2260
*id_p = cache[idnum+1]; // fetch jmethodID (if any)
2261
}
2262
}
2263
2264
2265
// Lookup a jmethodID, NULL if not found. Do no blocking, no allocations, no handles
2266
jmethodID InstanceKlass::jmethod_id_or_null(Method* method) {
2267
size_t idnum = (size_t)method->method_idnum();
2268
jmethodID* jmeths = methods_jmethod_ids_acquire();
2269
size_t length; // length assigned as debugging crumb
2270
jmethodID id = NULL;
2271
if (jmeths != NULL && // If there is a cache
2272
(length = (size_t)jmeths[0]) > idnum) { // and if it is long enough,
2273
id = jmeths[idnum+1]; // Look up the id (may be NULL)
2274
}
2275
return id;
2276
}
2277
2278
inline DependencyContext InstanceKlass::dependencies() {
2279
DependencyContext dep_context(&_dep_context, &_dep_context_last_cleaned);
2280
return dep_context;
2281
}
2282
2283
int InstanceKlass::mark_dependent_nmethods(KlassDepChange& changes) {
2284
return dependencies().mark_dependent_nmethods(changes);
2285
}
2286
2287
void InstanceKlass::add_dependent_nmethod(nmethod* nm) {
2288
dependencies().add_dependent_nmethod(nm);
2289
}
2290
2291
void InstanceKlass::remove_dependent_nmethod(nmethod* nm) {
2292
dependencies().remove_dependent_nmethod(nm);
2293
}
2294
2295
void InstanceKlass::clean_dependency_context() {
2296
dependencies().clean_unloading_dependents();
2297
}
2298
2299
#ifndef PRODUCT
2300
void InstanceKlass::print_dependent_nmethods(bool verbose) {
2301
dependencies().print_dependent_nmethods(verbose);
2302
}
2303
2304
bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
2305
return dependencies().is_dependent_nmethod(nm);
2306
}
2307
#endif //PRODUCT
2308
2309
void InstanceKlass::clean_weak_instanceklass_links() {
2310
clean_implementors_list();
2311
clean_method_data();
2312
}
2313
2314
void InstanceKlass::clean_implementors_list() {
2315
assert(is_loader_alive(), "this klass should be live");
2316
if (is_interface()) {
2317
assert (ClassUnloading, "only called for ClassUnloading");
2318
for (;;) {
2319
// Use load_acquire due to competing with inserts
2320
InstanceKlass* impl = Atomic::load_acquire(adr_implementor());
2321
if (impl != NULL && !impl->is_loader_alive()) {
2322
// NULL this field, might be an unloaded instance klass or NULL
2323
InstanceKlass* volatile* iklass = adr_implementor();
2324
if (Atomic::cmpxchg(iklass, impl, (InstanceKlass*)NULL) == impl) {
2325
// Successfully unlinking implementor.
2326
if (log_is_enabled(Trace, class, unload)) {
2327
ResourceMark rm;
2328
log_trace(class, unload)("unlinking class (implementor): %s", impl->external_name());
2329
}
2330
return;
2331
}
2332
} else {
2333
return;
2334
}
2335
}
2336
}
2337
}
2338
2339
void InstanceKlass::clean_method_data() {
2340
for (int m = 0; m < methods()->length(); m++) {
2341
MethodData* mdo = methods()->at(m)->method_data();
2342
if (mdo != NULL) {
2343
MutexLocker ml(SafepointSynchronize::is_at_safepoint() ? NULL : mdo->extra_data_lock());
2344
mdo->clean_method_data(/*always_clean*/false);
2345
}
2346
}
2347
}
2348
2349
void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) {
2350
Klass::metaspace_pointers_do(it);
2351
2352
if (log_is_enabled(Trace, cds)) {
2353
ResourceMark rm;
2354
log_trace(cds)("Iter(InstanceKlass): %p (%s)", this, external_name());
2355
}
2356
2357
it->push(&_annotations);
2358
it->push((Klass**)&_array_klasses);
2359
if (!is_rewritten()) {
2360
it->push(&_constants, MetaspaceClosure::_writable);
2361
} else {
2362
it->push(&_constants);
2363
}
2364
it->push(&_inner_classes);
2365
#if INCLUDE_JVMTI
2366
it->push(&_previous_versions);
2367
#endif
2368
it->push(&_methods);
2369
it->push(&_default_methods);
2370
it->push(&_local_interfaces);
2371
it->push(&_transitive_interfaces);
2372
it->push(&_method_ordering);
2373
if (!is_rewritten()) {
2374
it->push(&_default_vtable_indices, MetaspaceClosure::_writable);
2375
} else {
2376
it->push(&_default_vtable_indices);
2377
}
2378
it->push(&_fields);
2379
2380
if (itable_length() > 0) {
2381
itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
2382
int method_table_offset_in_words = ioe->offset()/wordSize;
2383
int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words())
2384
/ itableOffsetEntry::size();
2385
2386
for (int i = 0; i < nof_interfaces; i ++, ioe ++) {
2387
if (ioe->interface_klass() != NULL) {
2388
it->push(ioe->interface_klass_addr());
2389
itableMethodEntry* ime = ioe->first_method_entry(this);
2390
int n = klassItable::method_count_for_interface(ioe->interface_klass());
2391
for (int index = 0; index < n; index ++) {
2392
it->push(ime[index].method_addr());
2393
}
2394
}
2395
}
2396
}
2397
2398
it->push(&_nest_members);
2399
it->push(&_permitted_subclasses);
2400
it->push(&_record_components);
2401
}
2402
2403
void InstanceKlass::remove_unshareable_info() {
2404
2405
if (can_be_verified_at_dumptime()) {
2406
// Set the old class bit.
2407
set_is_shared_old_klass();
2408
}
2409
2410
Klass::remove_unshareable_info();
2411
2412
if (SystemDictionaryShared::has_class_failed_verification(this)) {
2413
// Classes are attempted to link during dumping and may fail,
2414
// but these classes are still in the dictionary and class list in CLD.
2415
// If the class has failed verification, there is nothing else to remove.
2416
return;
2417
}
2418
2419
// Reset to the 'allocated' state to prevent any premature accessing to
2420
// a shared class at runtime while the class is still being loaded and
2421
// restored. A class' init_state is set to 'loaded' at runtime when it's
2422
// being added to class hierarchy (see SystemDictionary:::add_to_hierarchy()).
2423
_init_state = allocated;
2424
2425
{ // Otherwise this needs to take out the Compile_lock.
2426
assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
2427
init_implementor();
2428
}
2429
2430
constants()->remove_unshareable_info();
2431
2432
for (int i = 0; i < methods()->length(); i++) {
2433
Method* m = methods()->at(i);
2434
m->remove_unshareable_info();
2435
}
2436
2437
// do array classes also.
2438
if (array_klasses() != NULL) {
2439
array_klasses()->remove_unshareable_info();
2440
}
2441
2442
// These are not allocated from metaspace. They are safe to set to NULL.
2443
_source_debug_extension = NULL;
2444
_dep_context = NULL;
2445
_osr_nmethods_head = NULL;
2446
#if INCLUDE_JVMTI
2447
_breakpoints = NULL;
2448
_previous_versions = NULL;
2449
_cached_class_file = NULL;
2450
_jvmti_cached_class_field_map = NULL;
2451
#endif
2452
2453
_init_thread = NULL;
2454
_methods_jmethod_ids = NULL;
2455
_jni_ids = NULL;
2456
_oop_map_cache = NULL;
2457
// clear _nest_host to ensure re-load at runtime
2458
_nest_host = NULL;
2459
init_shared_package_entry();
2460
_dep_context_last_cleaned = 0;
2461
}
2462
2463
void InstanceKlass::remove_java_mirror() {
2464
Klass::remove_java_mirror();
2465
2466
// do array classes also.
2467
if (array_klasses() != NULL) {
2468
array_klasses()->remove_java_mirror();
2469
}
2470
}
2471
2472
void InstanceKlass::init_shared_package_entry() {
2473
#if !INCLUDE_CDS_JAVA_HEAP
2474
_package_entry = NULL;
2475
#else
2476
if (!MetaspaceShared::use_full_module_graph()) {
2477
_package_entry = NULL;
2478
} else if (DynamicDumpSharedSpaces) {
2479
if (!MetaspaceShared::is_in_shared_metaspace(_package_entry)) {
2480
_package_entry = NULL;
2481
}
2482
} else {
2483
if (is_shared_unregistered_class()) {
2484
_package_entry = NULL;
2485
} else {
2486
_package_entry = PackageEntry::get_archived_entry(_package_entry);
2487
}
2488
}
2489
ArchivePtrMarker::mark_pointer((address**)&_package_entry);
2490
#endif
2491
}
2492
2493
void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain,
2494
PackageEntry* pkg_entry, TRAPS) {
2495
// SystemDictionary::add_to_hierarchy() sets the init_state to loaded
2496
// before the InstanceKlass is added to the SystemDictionary. Make
2497
// sure the current state is <loaded.
2498
assert(!is_loaded(), "invalid init state");
2499
set_package(loader_data, pkg_entry, CHECK);
2500
Klass::restore_unshareable_info(loader_data, protection_domain, CHECK);
2501
2502
Array<Method*>* methods = this->methods();
2503
int num_methods = methods->length();
2504
for (int index = 0; index < num_methods; ++index) {
2505
methods->at(index)->restore_unshareable_info(CHECK);
2506
}
2507
#if INCLUDE_JVMTI
2508
if (JvmtiExport::has_redefined_a_class()) {
2509
// Reinitialize vtable because RedefineClasses may have changed some
2510
// entries in this vtable for super classes so the CDS vtable might
2511
// point to old or obsolete entries. RedefineClasses doesn't fix up
2512
// vtables in the shared system dictionary, only the main one.
2513
// It also redefines the itable too so fix that too.
2514
// First fix any default methods that point to a super class that may
2515
// have been redefined.
2516
bool trace_name_printed = false;
2517
adjust_default_methods(&trace_name_printed);
2518
vtable().initialize_vtable();
2519
itable().initialize_itable();
2520
}
2521
#endif
2522
2523
// restore constant pool resolved references
2524
constants()->restore_unshareable_info(CHECK);
2525
2526
if (array_klasses() != NULL) {
2527
// Array classes have null protection domain.
2528
// --> see ArrayKlass::complete_create_array_klass()
2529
array_klasses()->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK);
2530
}
2531
2532
// Initialize current biased locking state.
2533
if (UseBiasedLocking && BiasedLocking::enabled()) {
2534
set_prototype_header(markWord::biased_locking_prototype());
2535
}
2536
2537
// Initialize @ValueBased class annotation
2538
if (DiagnoseSyncOnValueBasedClasses && has_value_based_class_annotation()) {
2539
set_is_value_based();
2540
set_prototype_header(markWord::prototype());
2541
}
2542
}
2543
2544
// Check if a class or any of its supertypes has a version older than 50.
2545
// CDS will not perform verification of old classes during dump time because
2546
// without changing the old verifier, the verification constraint cannot be
2547
// retrieved during dump time.
2548
// Verification of archived old classes will be performed during run time.
2549
bool InstanceKlass::can_be_verified_at_dumptime() const {
2550
if (major_version() < 50 /*JAVA_6_VERSION*/) {
2551
return true;
2552
}
2553
if (java_super() != NULL && java_super()->can_be_verified_at_dumptime()) {
2554
return true;
2555
}
2556
Array<InstanceKlass*>* interfaces = local_interfaces();
2557
int len = interfaces->length();
2558
for (int i = 0; i < len; i++) {
2559
if (interfaces->at(i)->can_be_verified_at_dumptime()) {
2560
return true;
2561
}
2562
}
2563
return false;
2564
}
2565
2566
void InstanceKlass::set_shared_class_loader_type(s2 loader_type) {
2567
switch (loader_type) {
2568
case ClassLoader::BOOT_LOADER:
2569
_misc_flags |= _misc_is_shared_boot_class;
2570
break;
2571
case ClassLoader::PLATFORM_LOADER:
2572
_misc_flags |= _misc_is_shared_platform_class;
2573
break;
2574
case ClassLoader::APP_LOADER:
2575
_misc_flags |= _misc_is_shared_app_class;
2576
break;
2577
default:
2578
ShouldNotReachHere();
2579
break;
2580
}
2581
}
2582
2583
void InstanceKlass::assign_class_loader_type() {
2584
ClassLoaderData *cld = class_loader_data();
2585
if (cld->is_boot_class_loader_data()) {
2586
set_shared_class_loader_type(ClassLoader::BOOT_LOADER);
2587
}
2588
else if (cld->is_platform_class_loader_data()) {
2589
set_shared_class_loader_type(ClassLoader::PLATFORM_LOADER);
2590
}
2591
else if (cld->is_system_class_loader_data()) {
2592
set_shared_class_loader_type(ClassLoader::APP_LOADER);
2593
}
2594
}
2595
2596
#if INCLUDE_JVMTI
2597
static void clear_all_breakpoints(Method* m) {
2598
m->clear_all_breakpoints();
2599
}
2600
#endif
2601
2602
void InstanceKlass::unload_class(InstanceKlass* ik) {
2603
// Release dependencies.
2604
ik->dependencies().remove_all_dependents();
2605
2606
// notify the debugger
2607
if (JvmtiExport::should_post_class_unload()) {
2608
JvmtiExport::post_class_unload(ik);
2609
}
2610
2611
// notify ClassLoadingService of class unload
2612
ClassLoadingService::notify_class_unloaded(ik);
2613
2614
if (Arguments::is_dumping_archive()) {
2615
SystemDictionaryShared::remove_dumptime_info(ik);
2616
}
2617
2618
if (log_is_enabled(Info, class, unload)) {
2619
ResourceMark rm;
2620
log_info(class, unload)("unloading class %s " INTPTR_FORMAT, ik->external_name(), p2i(ik));
2621
}
2622
2623
Events::log_class_unloading(Thread::current(), ik);
2624
2625
#if INCLUDE_JFR
2626
assert(ik != NULL, "invariant");
2627
EventClassUnload event;
2628
event.set_unloadedClass(ik);
2629
event.set_definingClassLoader(ik->class_loader_data());
2630
event.commit();
2631
#endif
2632
}
2633
2634
static void method_release_C_heap_structures(Method* m) {
2635
m->release_C_heap_structures();
2636
}
2637
2638
void InstanceKlass::release_C_heap_structures() {
2639
2640
// Clean up C heap
2641
release_C_heap_structures_internal();
2642
constants()->release_C_heap_structures();
2643
2644
// Deallocate and call destructors for MDO mutexes
2645
methods_do(method_release_C_heap_structures);
2646
}
2647
2648
void InstanceKlass::release_C_heap_structures_internal() {
2649
Klass::release_C_heap_structures();
2650
2651
// Can't release the constant pool here because the constant pool can be
2652
// deallocated separately from the InstanceKlass for default methods and
2653
// redefine classes.
2654
2655
// Deallocate oop map cache
2656
if (_oop_map_cache != NULL) {
2657
delete _oop_map_cache;
2658
_oop_map_cache = NULL;
2659
}
2660
2661
// Deallocate JNI identifiers for jfieldIDs
2662
JNIid::deallocate(jni_ids());
2663
set_jni_ids(NULL);
2664
2665
jmethodID* jmeths = methods_jmethod_ids_acquire();
2666
if (jmeths != (jmethodID*)NULL) {
2667
release_set_methods_jmethod_ids(NULL);
2668
FreeHeap(jmeths);
2669
}
2670
2671
assert(_dep_context == NULL,
2672
"dependencies should already be cleaned");
2673
2674
#if INCLUDE_JVMTI
2675
// Deallocate breakpoint records
2676
if (breakpoints() != 0x0) {
2677
methods_do(clear_all_breakpoints);
2678
assert(breakpoints() == 0x0, "should have cleared breakpoints");
2679
}
2680
2681
// deallocate the cached class file
2682
if (_cached_class_file != NULL) {
2683
os::free(_cached_class_file);
2684
_cached_class_file = NULL;
2685
}
2686
#endif
2687
2688
FREE_C_HEAP_ARRAY(char, _source_debug_extension);
2689
}
2690
2691
void InstanceKlass::set_source_debug_extension(const char* array, int length) {
2692
if (array == NULL) {
2693
_source_debug_extension = NULL;
2694
} else {
2695
// Adding one to the attribute length in order to store a null terminator
2696
// character could cause an overflow because the attribute length is
2697
// already coded with an u4 in the classfile, but in practice, it's
2698
// unlikely to happen.
2699
assert((length+1) > length, "Overflow checking");
2700
char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass);
2701
for (int i = 0; i < length; i++) {
2702
sde[i] = array[i];
2703
}
2704
sde[length] = '\0';
2705
_source_debug_extension = sde;
2706
}
2707
}
2708
2709
const char* InstanceKlass::signature_name() const {
2710
int hash_len = 0;
2711
char hash_buf[40];
2712
2713
// Get the internal name as a c string
2714
const char* src = (const char*) (name()->as_C_string());
2715
const int src_length = (int)strlen(src);
2716
2717
char* dest = NEW_RESOURCE_ARRAY(char, src_length + hash_len + 3);
2718
2719
// Add L as type indicator
2720
int dest_index = 0;
2721
dest[dest_index++] = JVM_SIGNATURE_CLASS;
2722
2723
// Add the actual class name
2724
for (int src_index = 0; src_index < src_length; ) {
2725
dest[dest_index++] = src[src_index++];
2726
}
2727
2728
if (is_hidden()) { // Replace the last '+' with a '.'.
2729
for (int index = (int)src_length; index > 0; index--) {
2730
if (dest[index] == '+') {
2731
dest[index] = JVM_SIGNATURE_DOT;
2732
break;
2733
}
2734
}
2735
}
2736
2737
// If we have a hash, append it
2738
for (int hash_index = 0; hash_index < hash_len; ) {
2739
dest[dest_index++] = hash_buf[hash_index++];
2740
}
2741
2742
// Add the semicolon and the NULL
2743
dest[dest_index++] = JVM_SIGNATURE_ENDCLASS;
2744
dest[dest_index] = '\0';
2745
return dest;
2746
}
2747
2748
ModuleEntry* InstanceKlass::module() const {
2749
if (is_hidden() &&
2750
in_unnamed_package() &&
2751
class_loader_data()->has_class_mirror_holder()) {
2752
// For a non-strong hidden class defined to an unnamed package,
2753
// its (class held) CLD will not have an unnamed module created for it.
2754
// Two choices to find the correct ModuleEntry:
2755
// 1. If hidden class is within a nest, use nest host's module
2756
// 2. Find the unnamed module off from the class loader
2757
// For now option #2 is used since a nest host is not set until
2758
// after the instance class is created in jvm_lookup_define_class().
2759
if (class_loader_data()->is_boot_class_loader_data()) {
2760
return ClassLoaderData::the_null_class_loader_data()->unnamed_module();
2761
} else {
2762
oop module = java_lang_ClassLoader::unnamedModule(class_loader_data()->class_loader());
2763
assert(java_lang_Module::is_instance(module), "Not an instance of java.lang.Module");
2764
return java_lang_Module::module_entry(module);
2765
}
2766
}
2767
2768
// Class is in a named package
2769
if (!in_unnamed_package()) {
2770
return _package_entry->module();
2771
}
2772
2773
// Class is in an unnamed package, return its loader's unnamed module
2774
return class_loader_data()->unnamed_module();
2775
}
2776
2777
void InstanceKlass::set_package(ClassLoaderData* loader_data, PackageEntry* pkg_entry, TRAPS) {
2778
2779
// ensure java/ packages only loaded by boot or platform builtin loaders
2780
// not needed for shared class since CDS does not archive prohibited classes.
2781
if (!is_shared()) {
2782
check_prohibited_package(name(), loader_data, CHECK);
2783
}
2784
2785
if (is_shared() && _package_entry != NULL) {
2786
if (MetaspaceShared::use_full_module_graph() && _package_entry == pkg_entry) {
2787
// we can use the saved package
2788
assert(MetaspaceShared::is_in_shared_metaspace(_package_entry), "must be");
2789
return;
2790
} else {
2791
_package_entry = NULL;
2792
}
2793
}
2794
2795
// ClassLoader::package_from_class_name has already incremented the refcount of the symbol
2796
// it returns, so we need to decrement it when the current function exits.
2797
TempNewSymbol from_class_name =
2798
(pkg_entry != NULL) ? NULL : ClassLoader::package_from_class_name(name());
2799
2800
Symbol* pkg_name;
2801
if (pkg_entry != NULL) {
2802
pkg_name = pkg_entry->name();
2803
} else {
2804
pkg_name = from_class_name;
2805
}
2806
2807
if (pkg_name != NULL && loader_data != NULL) {
2808
2809
// Find in class loader's package entry table.
2810
_package_entry = pkg_entry != NULL ? pkg_entry : loader_data->packages()->lookup_only(pkg_name);
2811
2812
// If the package name is not found in the loader's package
2813
// entry table, it is an indication that the package has not
2814
// been defined. Consider it defined within the unnamed module.
2815
if (_package_entry == NULL) {
2816
2817
if (!ModuleEntryTable::javabase_defined()) {
2818
// Before java.base is defined during bootstrapping, define all packages in
2819
// the java.base module. If a non-java.base package is erroneously placed
2820
// in the java.base module it will be caught later when java.base
2821
// is defined by ModuleEntryTable::verify_javabase_packages check.
2822
assert(ModuleEntryTable::javabase_moduleEntry() != NULL, JAVA_BASE_NAME " module is NULL");
2823
_package_entry = loader_data->packages()->lookup(pkg_name, ModuleEntryTable::javabase_moduleEntry());
2824
} else {
2825
assert(loader_data->unnamed_module() != NULL, "unnamed module is NULL");
2826
_package_entry = loader_data->packages()->lookup(pkg_name,
2827
loader_data->unnamed_module());
2828
}
2829
2830
// A package should have been successfully created
2831
DEBUG_ONLY(ResourceMark rm(THREAD));
2832
assert(_package_entry != NULL, "Package entry for class %s not found, loader %s",
2833
name()->as_C_string(), loader_data->loader_name_and_id());
2834
}
2835
2836
if (log_is_enabled(Debug, module)) {
2837
ResourceMark rm(THREAD);
2838
ModuleEntry* m = _package_entry->module();
2839
log_trace(module)("Setting package: class: %s, package: %s, loader: %s, module: %s",
2840
external_name(),
2841
pkg_name->as_C_string(),
2842
loader_data->loader_name_and_id(),
2843
(m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE));
2844
}
2845
} else {
2846
ResourceMark rm(THREAD);
2847
log_trace(module)("Setting package: class: %s, package: unnamed, loader: %s, module: %s",
2848
external_name(),
2849
(loader_data != NULL) ? loader_data->loader_name_and_id() : "NULL",
2850
UNNAMED_MODULE);
2851
}
2852
}
2853
2854
// Function set_classpath_index ensures that for a non-null _package_entry
2855
// of the InstanceKlass, the entry is in the boot loader's package entry table.
2856
// It then sets the classpath_index in the package entry record.
2857
//
2858
// The classpath_index field is used to find the entry on the boot loader class
2859
// path for packages with classes loaded by the boot loader from -Xbootclasspath/a
2860
// in an unnamed module. It is also used to indicate (for all packages whose
2861
// classes are loaded by the boot loader) that at least one of the package's
2862
// classes has been loaded.
2863
void InstanceKlass::set_classpath_index(s2 path_index) {
2864
if (_package_entry != NULL) {
2865
DEBUG_ONLY(PackageEntryTable* pkg_entry_tbl = ClassLoaderData::the_null_class_loader_data()->packages();)
2866
assert(pkg_entry_tbl->lookup_only(_package_entry->name()) == _package_entry, "Should be same");
2867
assert(path_index != -1, "Unexpected classpath_index");
2868
_package_entry->set_classpath_index(path_index);
2869
}
2870
}
2871
2872
// different versions of is_same_class_package
2873
2874
bool InstanceKlass::is_same_class_package(const Klass* class2) const {
2875
oop classloader1 = this->class_loader();
2876
PackageEntry* classpkg1 = this->package();
2877
if (class2->is_objArray_klass()) {
2878
class2 = ObjArrayKlass::cast(class2)->bottom_klass();
2879
}
2880
2881
oop classloader2;
2882
PackageEntry* classpkg2;
2883
if (class2->is_instance_klass()) {
2884
classloader2 = class2->class_loader();
2885
classpkg2 = class2->package();
2886
} else {
2887
assert(class2->is_typeArray_klass(), "should be type array");
2888
classloader2 = NULL;
2889
classpkg2 = NULL;
2890
}
2891
2892
// Same package is determined by comparing class loader
2893
// and package entries. Both must be the same. This rule
2894
// applies even to classes that are defined in the unnamed
2895
// package, they still must have the same class loader.
2896
if ((classloader1 == classloader2) && (classpkg1 == classpkg2)) {
2897
return true;
2898
}
2899
2900
return false;
2901
}
2902
2903
// return true if this class and other_class are in the same package. Classloader
2904
// and classname information is enough to determine a class's package
2905
bool InstanceKlass::is_same_class_package(oop other_class_loader,
2906
const Symbol* other_class_name) const {
2907
if (class_loader() != other_class_loader) {
2908
return false;
2909
}
2910
if (name()->fast_compare(other_class_name) == 0) {
2911
return true;
2912
}
2913
2914
{
2915
ResourceMark rm;
2916
2917
bool bad_class_name = false;
2918
TempNewSymbol other_pkg = ClassLoader::package_from_class_name(other_class_name, &bad_class_name);
2919
if (bad_class_name) {
2920
return false;
2921
}
2922
// Check that package_from_class_name() returns NULL, not "", if there is no package.
2923
assert(other_pkg == NULL || other_pkg->utf8_length() > 0, "package name is empty string");
2924
2925
const Symbol* const this_package_name =
2926
this->package() != NULL ? this->package()->name() : NULL;
2927
2928
if (this_package_name == NULL || other_pkg == NULL) {
2929
// One of the two doesn't have a package. Only return true if the other
2930
// one also doesn't have a package.
2931
return this_package_name == other_pkg;
2932
}
2933
2934
// Check if package is identical
2935
return this_package_name->fast_compare(other_pkg) == 0;
2936
}
2937
}
2938
2939
static bool is_prohibited_package_slow(Symbol* class_name) {
2940
// Caller has ResourceMark
2941
int length;
2942
jchar* unicode = class_name->as_unicode(length);
2943
return (length >= 5 &&
2944
unicode[0] == 'j' &&
2945
unicode[1] == 'a' &&
2946
unicode[2] == 'v' &&
2947
unicode[3] == 'a' &&
2948
unicode[4] == '/');
2949
}
2950
2951
// Only boot and platform class loaders can define classes in "java/" packages.
2952
void InstanceKlass::check_prohibited_package(Symbol* class_name,
2953
ClassLoaderData* loader_data,
2954
TRAPS) {
2955
if (!loader_data->is_boot_class_loader_data() &&
2956
!loader_data->is_platform_class_loader_data() &&
2957
class_name != NULL && class_name->utf8_length() >= 5) {
2958
ResourceMark rm(THREAD);
2959
bool prohibited;
2960
const u1* base = class_name->base();
2961
if ((base[0] | base[1] | base[2] | base[3] | base[4]) & 0x80) {
2962
prohibited = is_prohibited_package_slow(class_name);
2963
} else {
2964
char* name = class_name->as_C_string();
2965
prohibited = (strncmp(name, JAVAPKG, JAVAPKG_LEN) == 0 && name[JAVAPKG_LEN] == '/');
2966
}
2967
if (prohibited) {
2968
TempNewSymbol pkg_name = ClassLoader::package_from_class_name(class_name);
2969
assert(pkg_name != NULL, "Error in parsing package name starting with 'java/'");
2970
char* name = pkg_name->as_C_string();
2971
const char* class_loader_name = loader_data->loader_name_and_id();
2972
StringUtils::replace_no_expand(name, "/", ".");
2973
const char* msg_text1 = "Class loader (instance of): ";
2974
const char* msg_text2 = " tried to load prohibited package name: ";
2975
size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1;
2976
char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len);
2977
jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name);
2978
THROW_MSG(vmSymbols::java_lang_SecurityException(), message);
2979
}
2980
}
2981
return;
2982
}
2983
2984
bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPS) const {
2985
constantPoolHandle i_cp(THREAD, constants());
2986
for (InnerClassesIterator iter(this); !iter.done(); iter.next()) {
2987
int ioff = iter.inner_class_info_index();
2988
if (ioff != 0) {
2989
// Check to see if the name matches the class we're looking for
2990
// before attempting to find the class.
2991
if (i_cp->klass_name_at_matches(this, ioff)) {
2992
Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false);
2993
if (this == inner_klass) {
2994
*ooff = iter.outer_class_info_index();
2995
*noff = iter.inner_name_index();
2996
return true;
2997
}
2998
}
2999
}
3000
}
3001
return false;
3002
}
3003
3004
InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPS) const {
3005
InstanceKlass* outer_klass = NULL;
3006
*inner_is_member = false;
3007
int ooff = 0, noff = 0;
3008
bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD);
3009
if (has_inner_classes_attr) {
3010
constantPoolHandle i_cp(THREAD, constants());
3011
if (ooff != 0) {
3012
Klass* ok = i_cp->klass_at(ooff, CHECK_NULL);
3013
outer_klass = InstanceKlass::cast(ok);
3014
*inner_is_member = true;
3015
}
3016
if (NULL == outer_klass) {
3017
// It may be a local class; try for that.
3018
int encl_method_class_idx = enclosing_method_class_index();
3019
if (encl_method_class_idx != 0) {
3020
Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL);
3021
outer_klass = InstanceKlass::cast(ok);
3022
*inner_is_member = false;
3023
}
3024
}
3025
}
3026
3027
// If no inner class attribute found for this class.
3028
if (NULL == outer_klass) return NULL;
3029
3030
// Throws an exception if outer klass has not declared k as an inner klass
3031
// We need evidence that each klass knows about the other, or else
3032
// the system could allow a spoof of an inner class to gain access rights.
3033
Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL);
3034
return outer_klass;
3035
}
3036
3037
jint InstanceKlass::compute_modifier_flags() const {
3038
jint access = access_flags().as_int();
3039
3040
// But check if it happens to be member class.
3041
InnerClassesIterator iter(this);
3042
for (; !iter.done(); iter.next()) {
3043
int ioff = iter.inner_class_info_index();
3044
// Inner class attribute can be zero, skip it.
3045
// Strange but true: JVM spec. allows null inner class refs.
3046
if (ioff == 0) continue;
3047
3048
// only look at classes that are already loaded
3049
// since we are looking for the flags for our self.
3050
Symbol* inner_name = constants()->klass_name_at(ioff);
3051
if (name() == inner_name) {
3052
// This is really a member class.
3053
access = iter.inner_access_flags();
3054
break;
3055
}
3056
}
3057
// Remember to strip ACC_SUPER bit
3058
return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS;
3059
}
3060
3061
jint InstanceKlass::jvmti_class_status() const {
3062
jint result = 0;
3063
3064
if (is_linked()) {
3065
result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
3066
}
3067
3068
if (is_initialized()) {
3069
assert(is_linked(), "Class status is not consistent");
3070
result |= JVMTI_CLASS_STATUS_INITIALIZED;
3071
}
3072
if (is_in_error_state()) {
3073
result |= JVMTI_CLASS_STATUS_ERROR;
3074
}
3075
return result;
3076
}
3077
3078
Method* InstanceKlass::method_at_itable(InstanceKlass* holder, int index, TRAPS) {
3079
bool implements_interface; // initialized by method_at_itable_or_null
3080
Method* m = method_at_itable_or_null(holder, index,
3081
implements_interface); // out parameter
3082
if (m != NULL) {
3083
assert(implements_interface, "sanity");
3084
return m;
3085
} else if (implements_interface) {
3086
// Throw AbstractMethodError since corresponding itable slot is empty.
3087
THROW_NULL(vmSymbols::java_lang_AbstractMethodError());
3088
} else {
3089
// If the interface isn't implemented by the receiver class,
3090
// the VM should throw IncompatibleClassChangeError.
3091
ResourceMark rm(THREAD);
3092
stringStream ss;
3093
bool same_module = (module() == holder->module());
3094
ss.print("Receiver class %s does not implement "
3095
"the interface %s defining the method to be called "
3096
"(%s%s%s)",
3097
external_name(), holder->external_name(),
3098
(same_module) ? joint_in_module_of_loader(holder) : class_in_module_of_loader(),
3099
(same_module) ? "" : "; ",
3100
(same_module) ? "" : holder->class_in_module_of_loader());
3101
THROW_MSG_NULL(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string());
3102
}
3103
}
3104
3105
Method* InstanceKlass::method_at_itable_or_null(InstanceKlass* holder, int index, bool& implements_interface) {
3106
klassItable itable(this);
3107
for (int i = 0; i < itable.size_offset_table(); i++) {
3108
itableOffsetEntry* offset_entry = itable.offset_entry(i);
3109
if (offset_entry->interface_klass() == holder) {
3110
implements_interface = true;
3111
itableMethodEntry* ime = offset_entry->first_method_entry(this);
3112
Method* m = ime[index].method();
3113
return m;
3114
}
3115
}
3116
implements_interface = false;
3117
return NULL; // offset entry not found
3118
}
3119
3120
int InstanceKlass::vtable_index_of_interface_method(Method* intf_method) {
3121
assert(is_linked(), "required");
3122
assert(intf_method->method_holder()->is_interface(), "not an interface method");
3123
assert(is_subtype_of(intf_method->method_holder()), "interface not implemented");
3124
3125
int vtable_index = Method::invalid_vtable_index;
3126
Symbol* name = intf_method->name();
3127
Symbol* signature = intf_method->signature();
3128
3129
// First check in default method array
3130
if (!intf_method->is_abstract() && default_methods() != NULL) {
3131
int index = find_method_index(default_methods(),
3132
name, signature,
3133
Klass::OverpassLookupMode::find,
3134
Klass::StaticLookupMode::find,
3135
Klass::PrivateLookupMode::find);
3136
if (index >= 0) {
3137
vtable_index = default_vtable_indices()->at(index);
3138
}
3139
}
3140
if (vtable_index == Method::invalid_vtable_index) {
3141
// get vtable_index for miranda methods
3142
klassVtable vt = vtable();
3143
vtable_index = vt.index_of_miranda(name, signature);
3144
}
3145
return vtable_index;
3146
}
3147
3148
#if INCLUDE_JVMTI
3149
// update default_methods for redefineclasses for methods that are
3150
// not yet in the vtable due to concurrent subclass define and superinterface
3151
// redefinition
3152
// Note: those in the vtable, should have been updated via adjust_method_entries
3153
void InstanceKlass::adjust_default_methods(bool* trace_name_printed) {
3154
// search the default_methods for uses of either obsolete or EMCP methods
3155
if (default_methods() != NULL) {
3156
for (int index = 0; index < default_methods()->length(); index ++) {
3157
Method* old_method = default_methods()->at(index);
3158
if (old_method == NULL || !old_method->is_old()) {
3159
continue; // skip uninteresting entries
3160
}
3161
assert(!old_method->is_deleted(), "default methods may not be deleted");
3162
Method* new_method = old_method->get_new_method();
3163
default_methods()->at_put(index, new_method);
3164
3165
if (log_is_enabled(Info, redefine, class, update)) {
3166
ResourceMark rm;
3167
if (!(*trace_name_printed)) {
3168
log_info(redefine, class, update)
3169
("adjust: klassname=%s default methods from name=%s",
3170
external_name(), old_method->method_holder()->external_name());
3171
*trace_name_printed = true;
3172
}
3173
log_debug(redefine, class, update, vtables)
3174
("default method update: %s(%s) ",
3175
new_method->name()->as_C_string(), new_method->signature()->as_C_string());
3176
}
3177
}
3178
}
3179
}
3180
#endif // INCLUDE_JVMTI
3181
3182
// On-stack replacement stuff
3183
void InstanceKlass::add_osr_nmethod(nmethod* n) {
3184
assert_lock_strong(CompiledMethod_lock);
3185
#ifndef PRODUCT
3186
nmethod* prev = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), n->comp_level(), true);
3187
assert(prev == NULL || !prev->is_in_use() COMPILER2_PRESENT(|| StressRecompilation),
3188
"redundant OSR recompilation detected. memory leak in CodeCache!");
3189
#endif
3190
// only one compilation can be active
3191
assert(n->is_osr_method(), "wrong kind of nmethod");
3192
n->set_osr_link(osr_nmethods_head());
3193
set_osr_nmethods_head(n);
3194
// Raise the highest osr level if necessary
3195
n->method()->set_highest_osr_comp_level(MAX2(n->method()->highest_osr_comp_level(), n->comp_level()));
3196
3197
// Get rid of the osr methods for the same bci that have lower levels.
3198
for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) {
3199
nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true);
3200
if (inv != NULL && inv->is_in_use()) {
3201
inv->make_not_entrant();
3202
}
3203
}
3204
}
3205
3206
// Remove osr nmethod from the list. Return true if found and removed.
3207
bool InstanceKlass::remove_osr_nmethod(nmethod* n) {
3208
// This is a short non-blocking critical region, so the no safepoint check is ok.
3209
MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock
3210
, Mutex::_no_safepoint_check_flag);
3211
assert(n->is_osr_method(), "wrong kind of nmethod");
3212
nmethod* last = NULL;
3213
nmethod* cur = osr_nmethods_head();
3214
int max_level = CompLevel_none; // Find the max comp level excluding n
3215
Method* m = n->method();
3216
// Search for match
3217
bool found = false;
3218
while(cur != NULL && cur != n) {
3219
if (m == cur->method()) {
3220
// Find max level before n
3221
max_level = MAX2(max_level, cur->comp_level());
3222
}
3223
last = cur;
3224
cur = cur->osr_link();
3225
}
3226
nmethod* next = NULL;
3227
if (cur == n) {
3228
found = true;
3229
next = cur->osr_link();
3230
if (last == NULL) {
3231
// Remove first element
3232
set_osr_nmethods_head(next);
3233
} else {
3234
last->set_osr_link(next);
3235
}
3236
}
3237
n->set_osr_link(NULL);
3238
cur = next;
3239
while (cur != NULL) {
3240
// Find max level after n
3241
if (m == cur->method()) {
3242
max_level = MAX2(max_level, cur->comp_level());
3243
}
3244
cur = cur->osr_link();
3245
}
3246
m->set_highest_osr_comp_level(max_level);
3247
return found;
3248
}
3249
3250
int InstanceKlass::mark_osr_nmethods(const Method* m) {
3251
MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock,
3252
Mutex::_no_safepoint_check_flag);
3253
nmethod* osr = osr_nmethods_head();
3254
int found = 0;
3255
while (osr != NULL) {
3256
assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
3257
if (osr->method() == m) {
3258
osr->mark_for_deoptimization();
3259
found++;
3260
}
3261
osr = osr->osr_link();
3262
}
3263
return found;
3264
}
3265
3266
nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const {
3267
MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock,
3268
Mutex::_no_safepoint_check_flag);
3269
nmethod* osr = osr_nmethods_head();
3270
nmethod* best = NULL;
3271
while (osr != NULL) {
3272
assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
3273
// There can be a time when a c1 osr method exists but we are waiting
3274
// for a c2 version. When c2 completes its osr nmethod we will trash
3275
// the c1 version and only be able to find the c2 version. However
3276
// while we overflow in the c1 code at back branches we don't want to
3277
// try and switch to the same code as we are already running
3278
3279
if (osr->method() == m &&
3280
(bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
3281
if (match_level) {
3282
if (osr->comp_level() == comp_level) {
3283
// Found a match - return it.
3284
return osr;
3285
}
3286
} else {
3287
if (best == NULL || (osr->comp_level() > best->comp_level())) {
3288
if (osr->comp_level() == CompilationPolicy::highest_compile_level()) {
3289
// Found the best possible - return it.
3290
return osr;
3291
}
3292
best = osr;
3293
}
3294
}
3295
}
3296
osr = osr->osr_link();
3297
}
3298
3299
assert(match_level == false || best == NULL, "shouldn't pick up anything if match_level is set");
3300
if (best != NULL && best->comp_level() >= comp_level) {
3301
return best;
3302
}
3303
return NULL;
3304
}
3305
3306
// -----------------------------------------------------------------------------------------------------
3307
// Printing
3308
3309
#ifndef PRODUCT
3310
3311
#define BULLET " - "
3312
3313
static const char* state_names[] = {
3314
"allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error"
3315
};
3316
3317
static void print_vtable(intptr_t* start, int len, outputStream* st) {
3318
for (int i = 0; i < len; i++) {
3319
intptr_t e = start[i];
3320
st->print("%d : " INTPTR_FORMAT, i, e);
3321
if (MetaspaceObj::is_valid((Metadata*)e)) {
3322
st->print(" ");
3323
((Metadata*)e)->print_value_on(st);
3324
}
3325
st->cr();
3326
}
3327
}
3328
3329
static void print_vtable(vtableEntry* start, int len, outputStream* st) {
3330
return print_vtable(reinterpret_cast<intptr_t*>(start), len, st);
3331
}
3332
3333
void InstanceKlass::print_on(outputStream* st) const {
3334
assert(is_klass(), "must be klass");
3335
Klass::print_on(st);
3336
3337
st->print(BULLET"instance size: %d", size_helper()); st->cr();
3338
st->print(BULLET"klass size: %d", size()); st->cr();
3339
st->print(BULLET"access: "); access_flags().print_on(st); st->cr();
3340
st->print(BULLET"state: "); st->print_cr("%s", state_names[_init_state]);
3341
st->print(BULLET"name: "); name()->print_value_on(st); st->cr();
3342
st->print(BULLET"super: "); Metadata::print_value_on_maybe_null(st, super()); st->cr();
3343
st->print(BULLET"sub: ");
3344
Klass* sub = subklass();
3345
int n;
3346
for (n = 0; sub != NULL; n++, sub = sub->next_sibling()) {
3347
if (n < MaxSubklassPrintSize) {
3348
sub->print_value_on(st);
3349
st->print(" ");
3350
}
3351
}
3352
if (n >= MaxSubklassPrintSize) st->print("(" INTX_FORMAT " more klasses...)", n - MaxSubklassPrintSize);
3353
st->cr();
3354
3355
if (is_interface()) {
3356
st->print_cr(BULLET"nof implementors: %d", nof_implementors());
3357
if (nof_implementors() == 1) {
3358
st->print_cr(BULLET"implementor: ");
3359
st->print(" ");
3360
implementor()->print_value_on(st);
3361
st->cr();
3362
}
3363
}
3364
3365
st->print(BULLET"arrays: "); Metadata::print_value_on_maybe_null(st, array_klasses()); st->cr();
3366
st->print(BULLET"methods: "); methods()->print_value_on(st); st->cr();
3367
if (Verbose || WizardMode) {
3368
Array<Method*>* method_array = methods();
3369
for (int i = 0; i < method_array->length(); i++) {
3370
st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
3371
}
3372
}
3373
st->print(BULLET"method ordering: "); method_ordering()->print_value_on(st); st->cr();
3374
st->print(BULLET"default_methods: "); default_methods()->print_value_on(st); st->cr();
3375
if (Verbose && default_methods() != NULL) {
3376
Array<Method*>* method_array = default_methods();
3377
for (int i = 0; i < method_array->length(); i++) {
3378
st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
3379
}
3380
}
3381
if (default_vtable_indices() != NULL) {
3382
st->print(BULLET"default vtable indices: "); default_vtable_indices()->print_value_on(st); st->cr();
3383
}
3384
st->print(BULLET"local interfaces: "); local_interfaces()->print_value_on(st); st->cr();
3385
st->print(BULLET"trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr();
3386
st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr();
3387
if (class_loader_data() != NULL) {
3388
st->print(BULLET"class loader data: ");
3389
class_loader_data()->print_value_on(st);
3390
st->cr();
3391
}
3392
if (source_file_name() != NULL) {
3393
st->print(BULLET"source file: ");
3394
source_file_name()->print_value_on(st);
3395
st->cr();
3396
}
3397
if (source_debug_extension() != NULL) {
3398
st->print(BULLET"source debug extension: ");
3399
st->print("%s", source_debug_extension());
3400
st->cr();
3401
}
3402
st->print(BULLET"class annotations: "); class_annotations()->print_value_on(st); st->cr();
3403
st->print(BULLET"class type annotations: "); class_type_annotations()->print_value_on(st); st->cr();
3404
st->print(BULLET"field annotations: "); fields_annotations()->print_value_on(st); st->cr();
3405
st->print(BULLET"field type annotations: "); fields_type_annotations()->print_value_on(st); st->cr();
3406
{
3407
bool have_pv = false;
3408
// previous versions are linked together through the InstanceKlass
3409
for (InstanceKlass* pv_node = previous_versions();
3410
pv_node != NULL;
3411
pv_node = pv_node->previous_versions()) {
3412
if (!have_pv)
3413
st->print(BULLET"previous version: ");
3414
have_pv = true;
3415
pv_node->constants()->print_value_on(st);
3416
}
3417
if (have_pv) st->cr();
3418
}
3419
3420
if (generic_signature() != NULL) {
3421
st->print(BULLET"generic signature: ");
3422
generic_signature()->print_value_on(st);
3423
st->cr();
3424
}
3425
st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr();
3426
st->print(BULLET"nest members: "); nest_members()->print_value_on(st); st->cr();
3427
if (record_components() != NULL) {
3428
st->print(BULLET"record components: "); record_components()->print_value_on(st); st->cr();
3429
}
3430
st->print(BULLET"permitted subclasses: "); permitted_subclasses()->print_value_on(st); st->cr();
3431
if (java_mirror() != NULL) {
3432
st->print(BULLET"java mirror: ");
3433
java_mirror()->print_value_on(st);
3434
st->cr();
3435
} else {
3436
st->print_cr(BULLET"java mirror: NULL");
3437
}
3438
st->print(BULLET"vtable length %d (start addr: " INTPTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr();
3439
if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st);
3440
st->print(BULLET"itable length %d (start addr: " INTPTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr();
3441
if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_itable(), itable_length(), st);
3442
st->print_cr(BULLET"---- static fields (%d words):", static_field_size());
3443
FieldPrinter print_static_field(st);
3444
((InstanceKlass*)this)->do_local_static_fields(&print_static_field);
3445
st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size());
3446
FieldPrinter print_nonstatic_field(st);
3447
InstanceKlass* ik = const_cast<InstanceKlass*>(this);
3448
ik->do_nonstatic_fields(&print_nonstatic_field);
3449
3450
st->print(BULLET"non-static oop maps: ");
3451
OopMapBlock* map = start_of_nonstatic_oop_maps();
3452
OopMapBlock* end_map = map + nonstatic_oop_map_count();
3453
while (map < end_map) {
3454
st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1));
3455
map++;
3456
}
3457
st->cr();
3458
}
3459
3460
#endif //PRODUCT
3461
3462
void InstanceKlass::print_value_on(outputStream* st) const {
3463
assert(is_klass(), "must be klass");
3464
if (Verbose || WizardMode) access_flags().print_on(st);
3465
name()->print_value_on(st);
3466
}
3467
3468
#ifndef PRODUCT
3469
3470
void FieldPrinter::do_field(fieldDescriptor* fd) {
3471
_st->print(BULLET);
3472
if (_obj == NULL) {
3473
fd->print_on(_st);
3474
_st->cr();
3475
} else {
3476
fd->print_on_for(_st, _obj);
3477
_st->cr();
3478
}
3479
}
3480
3481
3482
void InstanceKlass::oop_print_on(oop obj, outputStream* st) {
3483
Klass::oop_print_on(obj, st);
3484
3485
if (this == vmClasses::String_klass()) {
3486
typeArrayOop value = java_lang_String::value(obj);
3487
juint length = java_lang_String::length(obj);
3488
if (value != NULL &&
3489
value->is_typeArray() &&
3490
length <= (juint) value->length()) {
3491
st->print(BULLET"string: ");
3492
java_lang_String::print(obj, st);
3493
st->cr();
3494
if (!WizardMode) return; // that is enough
3495
}
3496
}
3497
3498
st->print_cr(BULLET"---- fields (total size %d words):", oop_size(obj));
3499
FieldPrinter print_field(st, obj);
3500
do_nonstatic_fields(&print_field);
3501
3502
if (this == vmClasses::Class_klass()) {
3503
st->print(BULLET"signature: ");
3504
java_lang_Class::print_signature(obj, st);
3505
st->cr();
3506
Klass* mirrored_klass = java_lang_Class::as_Klass(obj);
3507
st->print(BULLET"fake entry for mirror: ");
3508
Metadata::print_value_on_maybe_null(st, mirrored_klass);
3509
st->cr();
3510
Klass* array_klass = java_lang_Class::array_klass_acquire(obj);
3511
st->print(BULLET"fake entry for array: ");
3512
Metadata::print_value_on_maybe_null(st, array_klass);
3513
st->cr();
3514
st->print_cr(BULLET"fake entry for oop_size: %d", java_lang_Class::oop_size(obj));
3515
st->print_cr(BULLET"fake entry for static_oop_field_count: %d", java_lang_Class::static_oop_field_count(obj));
3516
Klass* real_klass = java_lang_Class::as_Klass(obj);
3517
if (real_klass != NULL && real_klass->is_instance_klass()) {
3518
InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field);
3519
}
3520
} else if (this == vmClasses::MethodType_klass()) {
3521
st->print(BULLET"signature: ");
3522
java_lang_invoke_MethodType::print_signature(obj, st);
3523
st->cr();
3524
}
3525
}
3526
3527
bool InstanceKlass::verify_itable_index(int i) {
3528
int method_count = klassItable::method_count_for_interface(this);
3529
assert(i >= 0 && i < method_count, "index out of bounds");
3530
return true;
3531
}
3532
3533
#endif //PRODUCT
3534
3535
void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) {
3536
st->print("a ");
3537
name()->print_value_on(st);
3538
obj->print_address_on(st);
3539
if (this == vmClasses::String_klass()
3540
&& java_lang_String::value(obj) != NULL) {
3541
ResourceMark rm;
3542
int len = java_lang_String::length(obj);
3543
int plen = (len < 24 ? len : 12);
3544
char* str = java_lang_String::as_utf8_string(obj, 0, plen);
3545
st->print(" = \"%s\"", str);
3546
if (len > plen)
3547
st->print("...[%d]", len);
3548
} else if (this == vmClasses::Class_klass()) {
3549
Klass* k = java_lang_Class::as_Klass(obj);
3550
st->print(" = ");
3551
if (k != NULL) {
3552
k->print_value_on(st);
3553
} else {
3554
const char* tname = type2name(java_lang_Class::primitive_type(obj));
3555
st->print("%s", tname ? tname : "type?");
3556
}
3557
} else if (this == vmClasses::MethodType_klass()) {
3558
st->print(" = ");
3559
java_lang_invoke_MethodType::print_signature(obj, st);
3560
} else if (java_lang_boxing_object::is_instance(obj)) {
3561
st->print(" = ");
3562
java_lang_boxing_object::print(obj, st);
3563
} else if (this == vmClasses::LambdaForm_klass()) {
3564
oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj);
3565
if (vmentry != NULL) {
3566
st->print(" => ");
3567
vmentry->print_value_on(st);
3568
}
3569
} else if (this == vmClasses::MemberName_klass()) {
3570
Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj);
3571
if (vmtarget != NULL) {
3572
st->print(" = ");
3573
vmtarget->print_value_on(st);
3574
} else {
3575
oop clazz = java_lang_invoke_MemberName::clazz(obj);
3576
oop name = java_lang_invoke_MemberName::name(obj);
3577
if (clazz != NULL) {
3578
clazz->print_value_on(st);
3579
} else {
3580
st->print("NULL");
3581
}
3582
st->print(".");
3583
if (name != NULL) {
3584
name->print_value_on(st);
3585
} else {
3586
st->print("NULL");
3587
}
3588
}
3589
}
3590
}
3591
3592
const char* InstanceKlass::internal_name() const {
3593
return external_name();
3594
}
3595
3596
void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data,
3597
const ModuleEntry* module_entry,
3598
const ClassFileStream* cfs) const {
3599
log_to_classlist();
3600
3601
if (!log_is_enabled(Info, class, load)) {
3602
return;
3603
}
3604
3605
ResourceMark rm;
3606
LogMessage(class, load) msg;
3607
stringStream info_stream;
3608
3609
// Name and class hierarchy info
3610
info_stream.print("%s", external_name());
3611
3612
// Source
3613
if (cfs != NULL) {
3614
if (cfs->source() != NULL) {
3615
const char* module_name = (module_entry->name() == NULL) ? UNNAMED_MODULE : module_entry->name()->as_C_string();
3616
if (module_name != NULL) {
3617
// When the boot loader created the stream, it didn't know the module name
3618
// yet. Let's format it now.
3619
if (cfs->from_boot_loader_modules_image()) {
3620
info_stream.print(" source: jrt:/%s", module_name);
3621
} else {
3622
info_stream.print(" source: %s", cfs->source());
3623
}
3624
} else {
3625
info_stream.print(" source: %s", cfs->source());
3626
}
3627
} else if (loader_data == ClassLoaderData::the_null_class_loader_data()) {
3628
Thread* current = Thread::current();
3629
Klass* caller = current->is_Java_thread() ?
3630
current->as_Java_thread()->security_get_caller_class(1):
3631
NULL;
3632
// caller can be NULL, for example, during a JVMTI VM_Init hook
3633
if (caller != NULL) {
3634
info_stream.print(" source: instance of %s", caller->external_name());
3635
} else {
3636
// source is unknown
3637
}
3638
} else {
3639
oop class_loader = loader_data->class_loader();
3640
info_stream.print(" source: %s", class_loader->klass()->external_name());
3641
}
3642
} else {
3643
assert(this->is_shared(), "must be");
3644
if (MetaspaceShared::is_shared_dynamic((void*)this)) {
3645
info_stream.print(" source: shared objects file (top)");
3646
} else {
3647
info_stream.print(" source: shared objects file");
3648
}
3649
}
3650
3651
msg.info("%s", info_stream.as_string());
3652
3653
if (log_is_enabled(Debug, class, load)) {
3654
stringStream debug_stream;
3655
3656
// Class hierarchy info
3657
debug_stream.print(" klass: " INTPTR_FORMAT " super: " INTPTR_FORMAT,
3658
p2i(this), p2i(superklass()));
3659
3660
// Interfaces
3661
if (local_interfaces() != NULL && local_interfaces()->length() > 0) {
3662
debug_stream.print(" interfaces:");
3663
int length = local_interfaces()->length();
3664
for (int i = 0; i < length; i++) {
3665
debug_stream.print(" " INTPTR_FORMAT,
3666
p2i(InstanceKlass::cast(local_interfaces()->at(i))));
3667
}
3668
}
3669
3670
// Class loader
3671
debug_stream.print(" loader: [");
3672
loader_data->print_value_on(&debug_stream);
3673
debug_stream.print("]");
3674
3675
// Classfile checksum
3676
if (cfs) {
3677
debug_stream.print(" bytes: %d checksum: %08x",
3678
cfs->length(),
3679
ClassLoader::crc32(0, (const char*)cfs->buffer(),
3680
cfs->length()));
3681
}
3682
3683
msg.debug("%s", debug_stream.as_string());
3684
}
3685
}
3686
3687
// Verification
3688
3689
class VerifyFieldClosure: public BasicOopIterateClosure {
3690
protected:
3691
template <class T> void do_oop_work(T* p) {
3692
oop obj = RawAccess<>::oop_load(p);
3693
if (!oopDesc::is_oop_or_null(obj)) {
3694
tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj));
3695
Universe::print_on(tty);
3696
guarantee(false, "boom");
3697
}
3698
}
3699
public:
3700
virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); }
3701
virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
3702
};
3703
3704
void InstanceKlass::verify_on(outputStream* st) {
3705
#ifndef PRODUCT
3706
// Avoid redundant verifies, this really should be in product.
3707
if (_verify_count == Universe::verify_count()) return;
3708
_verify_count = Universe::verify_count();
3709
#endif
3710
3711
// Verify Klass
3712
Klass::verify_on(st);
3713
3714
// Verify that klass is present in ClassLoaderData
3715
guarantee(class_loader_data()->contains_klass(this),
3716
"this class isn't found in class loader data");
3717
3718
// Verify vtables
3719
if (is_linked()) {
3720
// $$$ This used to be done only for m/s collections. Doing it
3721
// always seemed a valid generalization. (DLD -- 6/00)
3722
vtable().verify(st);
3723
}
3724
3725
// Verify first subklass
3726
if (subklass() != NULL) {
3727
guarantee(subklass()->is_klass(), "should be klass");
3728
}
3729
3730
// Verify siblings
3731
Klass* super = this->super();
3732
Klass* sib = next_sibling();
3733
if (sib != NULL) {
3734
if (sib == this) {
3735
fatal("subclass points to itself " PTR_FORMAT, p2i(sib));
3736
}
3737
3738
guarantee(sib->is_klass(), "should be klass");
3739
guarantee(sib->super() == super, "siblings should have same superklass");
3740
}
3741
3742
// Verify local interfaces
3743
if (local_interfaces()) {
3744
Array<InstanceKlass*>* local_interfaces = this->local_interfaces();
3745
for (int j = 0; j < local_interfaces->length(); j++) {
3746
InstanceKlass* e = local_interfaces->at(j);
3747
guarantee(e->is_klass() && e->is_interface(), "invalid local interface");
3748
}
3749
}
3750
3751
// Verify transitive interfaces
3752
if (transitive_interfaces() != NULL) {
3753
Array<InstanceKlass*>* transitive_interfaces = this->transitive_interfaces();
3754
for (int j = 0; j < transitive_interfaces->length(); j++) {
3755
InstanceKlass* e = transitive_interfaces->at(j);
3756
guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface");
3757
}
3758
}
3759
3760
// Verify methods
3761
if (methods() != NULL) {
3762
Array<Method*>* methods = this->methods();
3763
for (int j = 0; j < methods->length(); j++) {
3764
guarantee(methods->at(j)->is_method(), "non-method in methods array");
3765
}
3766
for (int j = 0; j < methods->length() - 1; j++) {
3767
Method* m1 = methods->at(j);
3768
Method* m2 = methods->at(j + 1);
3769
guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
3770
}
3771
}
3772
3773
// Verify method ordering
3774
if (method_ordering() != NULL) {
3775
Array<int>* method_ordering = this->method_ordering();
3776
int length = method_ordering->length();
3777
if (JvmtiExport::can_maintain_original_method_order() ||
3778
((UseSharedSpaces || Arguments::is_dumping_archive()) && length != 0)) {
3779
guarantee(length == methods()->length(), "invalid method ordering length");
3780
jlong sum = 0;
3781
for (int j = 0; j < length; j++) {
3782
int original_index = method_ordering->at(j);
3783
guarantee(original_index >= 0, "invalid method ordering index");
3784
guarantee(original_index < length, "invalid method ordering index");
3785
sum += original_index;
3786
}
3787
// Verify sum of indices 0,1,...,length-1
3788
guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum");
3789
} else {
3790
guarantee(length == 0, "invalid method ordering length");
3791
}
3792
}
3793
3794
// Verify default methods
3795
if (default_methods() != NULL) {
3796
Array<Method*>* methods = this->default_methods();
3797
for (int j = 0; j < methods->length(); j++) {
3798
guarantee(methods->at(j)->is_method(), "non-method in methods array");
3799
}
3800
for (int j = 0; j < methods->length() - 1; j++) {
3801
Method* m1 = methods->at(j);
3802
Method* m2 = methods->at(j + 1);
3803
guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
3804
}
3805
}
3806
3807
// Verify JNI static field identifiers
3808
if (jni_ids() != NULL) {
3809
jni_ids()->verify(this);
3810
}
3811
3812
// Verify other fields
3813
if (constants() != NULL) {
3814
guarantee(constants()->is_constantPool(), "should be constant pool");
3815
}
3816
}
3817
3818
void InstanceKlass::oop_verify_on(oop obj, outputStream* st) {
3819
Klass::oop_verify_on(obj, st);
3820
VerifyFieldClosure blk;
3821
obj->oop_iterate(&blk);
3822
}
3823
3824
3825
// JNIid class for jfieldIDs only
3826
// Note to reviewers:
3827
// These JNI functions are just moved over to column 1 and not changed
3828
// in the compressed oops workspace.
3829
JNIid::JNIid(Klass* holder, int offset, JNIid* next) {
3830
_holder = holder;
3831
_offset = offset;
3832
_next = next;
3833
debug_only(_is_static_field_id = false;)
3834
}
3835
3836
3837
JNIid* JNIid::find(int offset) {
3838
JNIid* current = this;
3839
while (current != NULL) {
3840
if (current->offset() == offset) return current;
3841
current = current->next();
3842
}
3843
return NULL;
3844
}
3845
3846
void JNIid::deallocate(JNIid* current) {
3847
while (current != NULL) {
3848
JNIid* next = current->next();
3849
delete current;
3850
current = next;
3851
}
3852
}
3853
3854
3855
void JNIid::verify(Klass* holder) {
3856
int first_field_offset = InstanceMirrorKlass::offset_of_static_fields();
3857
int end_field_offset;
3858
end_field_offset = first_field_offset + (InstanceKlass::cast(holder)->static_field_size() * wordSize);
3859
3860
JNIid* current = this;
3861
while (current != NULL) {
3862
guarantee(current->holder() == holder, "Invalid klass in JNIid");
3863
#ifdef ASSERT
3864
int o = current->offset();
3865
if (current->is_static_field_id()) {
3866
guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid");
3867
}
3868
#endif
3869
current = current->next();
3870
}
3871
}
3872
3873
void InstanceKlass::set_init_state(ClassState state) {
3874
#ifdef ASSERT
3875
bool good_state = is_shared() ? (_init_state <= state)
3876
: (_init_state < state);
3877
assert(good_state || state == allocated, "illegal state transition");
3878
#endif
3879
assert(_init_thread == NULL, "should be cleared before state change");
3880
_init_state = (u1)state;
3881
}
3882
3883
#if INCLUDE_JVMTI
3884
3885
// RedefineClasses() support for previous versions
3886
3887
// Globally, there is at least one previous version of a class to walk
3888
// during class unloading, which is saved because old methods in the class
3889
// are still running. Otherwise the previous version list is cleaned up.
3890
bool InstanceKlass::_has_previous_versions = false;
3891
3892
// Returns true if there are previous versions of a class for class
3893
// unloading only. Also resets the flag to false. purge_previous_version
3894
// will set the flag to true if there are any left, i.e., if there's any
3895
// work to do for next time. This is to avoid the expensive code cache
3896
// walk in CLDG::clean_deallocate_lists().
3897
bool InstanceKlass::has_previous_versions_and_reset() {
3898
bool ret = _has_previous_versions;
3899
log_trace(redefine, class, iklass, purge)("Class unloading: has_previous_versions = %s",
3900
ret ? "true" : "false");
3901
_has_previous_versions = false;
3902
return ret;
3903
}
3904
3905
// Purge previous versions before adding new previous versions of the class and
3906
// during class unloading.
3907
void InstanceKlass::purge_previous_version_list() {
3908
assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
3909
assert(has_been_redefined(), "Should only be called for main class");
3910
3911
// Quick exit.
3912
if (previous_versions() == NULL) {
3913
return;
3914
}
3915
3916
// This klass has previous versions so see what we can cleanup
3917
// while it is safe to do so.
3918
3919
int deleted_count = 0; // leave debugging breadcrumbs
3920
int live_count = 0;
3921
ClassLoaderData* loader_data = class_loader_data();
3922
assert(loader_data != NULL, "should never be null");
3923
3924
ResourceMark rm;
3925
log_trace(redefine, class, iklass, purge)("%s: previous versions", external_name());
3926
3927
// previous versions are linked together through the InstanceKlass
3928
InstanceKlass* pv_node = previous_versions();
3929
InstanceKlass* last = this;
3930
int version = 0;
3931
3932
// check the previous versions list
3933
for (; pv_node != NULL; ) {
3934
3935
ConstantPool* pvcp = pv_node->constants();
3936
assert(pvcp != NULL, "cp ref was unexpectedly cleared");
3937
3938
if (!pvcp->on_stack()) {
3939
// If the constant pool isn't on stack, none of the methods
3940
// are executing. Unlink this previous_version.
3941
// The previous version InstanceKlass is on the ClassLoaderData deallocate list
3942
// so will be deallocated during the next phase of class unloading.
3943
log_trace(redefine, class, iklass, purge)
3944
("previous version " INTPTR_FORMAT " is dead.", p2i(pv_node));
3945
// For debugging purposes.
3946
pv_node->set_is_scratch_class();
3947
// Unlink from previous version list.
3948
assert(pv_node->class_loader_data() == loader_data, "wrong loader_data");
3949
InstanceKlass* next = pv_node->previous_versions();
3950
pv_node->link_previous_versions(NULL); // point next to NULL
3951
last->link_previous_versions(next);
3952
// Delete this node directly. Nothing is referring to it and we don't
3953
// want it to increase the counter for metadata to delete in CLDG.
3954
MetadataFactory::free_metadata(loader_data, pv_node);
3955
pv_node = next;
3956
deleted_count++;
3957
version++;
3958
continue;
3959
} else {
3960
log_trace(redefine, class, iklass, purge)("previous version " INTPTR_FORMAT " is alive", p2i(pv_node));
3961
assert(pvcp->pool_holder() != NULL, "Constant pool with no holder");
3962
guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack");
3963
live_count++;
3964
// found a previous version for next time we do class unloading
3965
_has_previous_versions = true;
3966
}
3967
3968
// next previous version
3969
last = pv_node;
3970
pv_node = pv_node->previous_versions();
3971
version++;
3972
}
3973
log_trace(redefine, class, iklass, purge)
3974
("previous version stats: live=%d, deleted=%d", live_count, deleted_count);
3975
}
3976
3977
void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods,
3978
int emcp_method_count) {
3979
int obsolete_method_count = old_methods->length() - emcp_method_count;
3980
3981
if (emcp_method_count != 0 && obsolete_method_count != 0 &&
3982
_previous_versions != NULL) {
3983
// We have a mix of obsolete and EMCP methods so we have to
3984
// clear out any matching EMCP method entries the hard way.
3985
int local_count = 0;
3986
for (int i = 0; i < old_methods->length(); i++) {
3987
Method* old_method = old_methods->at(i);
3988
if (old_method->is_obsolete()) {
3989
// only obsolete methods are interesting
3990
Symbol* m_name = old_method->name();
3991
Symbol* m_signature = old_method->signature();
3992
3993
// previous versions are linked together through the InstanceKlass
3994
int j = 0;
3995
for (InstanceKlass* prev_version = _previous_versions;
3996
prev_version != NULL;
3997
prev_version = prev_version->previous_versions(), j++) {
3998
3999
Array<Method*>* method_refs = prev_version->methods();
4000
for (int k = 0; k < method_refs->length(); k++) {
4001
Method* method = method_refs->at(k);
4002
4003
if (!method->is_obsolete() &&
4004
method->name() == m_name &&
4005
method->signature() == m_signature) {
4006
// The current RedefineClasses() call has made all EMCP
4007
// versions of this method obsolete so mark it as obsolete
4008
log_trace(redefine, class, iklass, add)
4009
("%s(%s): flush obsolete method @%d in version @%d",
4010
m_name->as_C_string(), m_signature->as_C_string(), k, j);
4011
4012
method->set_is_obsolete();
4013
break;
4014
}
4015
}
4016
4017
// The previous loop may not find a matching EMCP method, but
4018
// that doesn't mean that we can optimize and not go any
4019
// further back in the PreviousVersion generations. The EMCP
4020
// method for this generation could have already been made obsolete,
4021
// but there still may be an older EMCP method that has not
4022
// been made obsolete.
4023
}
4024
4025
if (++local_count >= obsolete_method_count) {
4026
// no more obsolete methods so bail out now
4027
break;
4028
}
4029
}
4030
}
4031
}
4032
}
4033
4034
// Save the scratch_class as the previous version if any of the methods are running.
4035
// The previous_versions are used to set breakpoints in EMCP methods and they are
4036
// also used to clean MethodData links to redefined methods that are no longer running.
4037
void InstanceKlass::add_previous_version(InstanceKlass* scratch_class,
4038
int emcp_method_count) {
4039
assert(Thread::current()->is_VM_thread(),
4040
"only VMThread can add previous versions");
4041
4042
ResourceMark rm;
4043
log_trace(redefine, class, iklass, add)
4044
("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count);
4045
4046
// Clean out old previous versions for this class
4047
purge_previous_version_list();
4048
4049
// Mark newly obsolete methods in remaining previous versions. An EMCP method from
4050
// a previous redefinition may be made obsolete by this redefinition.
4051
Array<Method*>* old_methods = scratch_class->methods();
4052
mark_newly_obsolete_methods(old_methods, emcp_method_count);
4053
4054
// If the constant pool for this previous version of the class
4055
// is not marked as being on the stack, then none of the methods
4056
// in this previous version of the class are on the stack so
4057
// we don't need to add this as a previous version.
4058
ConstantPool* cp_ref = scratch_class->constants();
4059
if (!cp_ref->on_stack()) {
4060
log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running");
4061
// For debugging purposes.
4062
scratch_class->set_is_scratch_class();
4063
scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class);
4064
return;
4065
}
4066
4067
// Add previous version if any methods are still running.
4068
// Set has_previous_version flag for processing during class unloading.
4069
_has_previous_versions = true;
4070
log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack.");
4071
assert(scratch_class->previous_versions() == NULL, "shouldn't have a previous version");
4072
scratch_class->link_previous_versions(previous_versions());
4073
link_previous_versions(scratch_class);
4074
} // end add_previous_version()
4075
4076
#endif // INCLUDE_JVMTI
4077
4078
Method* InstanceKlass::method_with_idnum(int idnum) {
4079
Method* m = NULL;
4080
if (idnum < methods()->length()) {
4081
m = methods()->at(idnum);
4082
}
4083
if (m == NULL || m->method_idnum() != idnum) {
4084
for (int index = 0; index < methods()->length(); ++index) {
4085
m = methods()->at(index);
4086
if (m->method_idnum() == idnum) {
4087
return m;
4088
}
4089
}
4090
// None found, return null for the caller to handle.
4091
return NULL;
4092
}
4093
return m;
4094
}
4095
4096
4097
Method* InstanceKlass::method_with_orig_idnum(int idnum) {
4098
if (idnum >= methods()->length()) {
4099
return NULL;
4100
}
4101
Method* m = methods()->at(idnum);
4102
if (m != NULL && m->orig_method_idnum() == idnum) {
4103
return m;
4104
}
4105
// Obsolete method idnum does not match the original idnum
4106
for (int index = 0; index < methods()->length(); ++index) {
4107
m = methods()->at(index);
4108
if (m->orig_method_idnum() == idnum) {
4109
return m;
4110
}
4111
}
4112
// None found, return null for the caller to handle.
4113
return NULL;
4114
}
4115
4116
4117
Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) {
4118
InstanceKlass* holder = get_klass_version(version);
4119
if (holder == NULL) {
4120
return NULL; // The version of klass is gone, no method is found
4121
}
4122
Method* method = holder->method_with_orig_idnum(idnum);
4123
return method;
4124
}
4125
4126
#if INCLUDE_JVMTI
4127
JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() {
4128
return _cached_class_file;
4129
}
4130
4131
jint InstanceKlass::get_cached_class_file_len() {
4132
return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file);
4133
}
4134
4135
unsigned char * InstanceKlass::get_cached_class_file_bytes() {
4136
return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
4137
}
4138
#endif
4139
4140
bool InstanceKlass::is_shareable() const {
4141
#if INCLUDE_CDS
4142
ClassLoaderData* loader_data = class_loader_data();
4143
if (!SystemDictionaryShared::is_sharing_possible(loader_data)) {
4144
return false;
4145
}
4146
4147
if (is_hidden()) {
4148
return false;
4149
}
4150
4151
if (module()->is_patched()) {
4152
return false;
4153
}
4154
4155
return true;
4156
#else
4157
return false;
4158
#endif
4159
}
4160
4161
void InstanceKlass::log_to_classlist() const {
4162
#if INCLUDE_CDS
4163
ResourceMark rm;
4164
if (ClassListWriter::is_enabled()) {
4165
if (!ClassLoader::has_jrt_entry()) {
4166
warning("DumpLoadedClassList and CDS are not supported in exploded build");
4167
DumpLoadedClassList = NULL;
4168
return;
4169
}
4170
if (is_shareable()) {
4171
ClassListWriter w;
4172
w.stream()->print_cr("%s", name()->as_C_string());
4173
w.stream()->flush();
4174
}
4175
}
4176
#endif // INCLUDE_CDS
4177
}
4178
4179
// Make a step iterating over the class hierarchy under the root class.
4180
// Skips subclasses if requested.
4181
void ClassHierarchyIterator::next() {
4182
assert(_current != NULL, "required");
4183
if (_visit_subclasses && _current->subklass() != NULL) {
4184
_current = _current->subklass();
4185
return; // visit next subclass
4186
}
4187
_visit_subclasses = true; // reset
4188
while (_current->next_sibling() == NULL && _current != _root) {
4189
_current = _current->superklass(); // backtrack; no more sibling subclasses left
4190
}
4191
if (_current == _root) {
4192
// Iteration is over (back at root after backtracking). Invalidate the iterator.
4193
_current = NULL;
4194
return;
4195
}
4196
_current = _current->next_sibling();
4197
return; // visit next sibling subclass
4198
}
4199
4200