Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/openjdk-multiarch-jdk8u
Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/share/vm/prims/jvmtiRedefineClasses.cpp
32285 views
1
/*
2
* Copyright (c) 2003, 2017, Oracle and/or its affiliates. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation.
8
*
9
* This code is distributed in the hope that it will be useful, but WITHOUT
10
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12
* version 2 for more details (a copy is included in the LICENSE file that
13
* accompanied this code).
14
*
15
* You should have received a copy of the GNU General Public License version
16
* 2 along with this work; if not, write to the Free Software Foundation,
17
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
*
19
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
* or visit www.oracle.com if you need additional information or have any
21
* questions.
22
*
23
*/
24
25
#include "precompiled.hpp"
26
#include "classfile/metadataOnStackMark.hpp"
27
#include "classfile/systemDictionary.hpp"
28
#include "classfile/verifier.hpp"
29
#include "code/codeCache.hpp"
30
#include "compiler/compileBroker.hpp"
31
#include "interpreter/oopMapCache.hpp"
32
#include "interpreter/rewriter.hpp"
33
#include "memory/gcLocker.hpp"
34
#include "memory/metadataFactory.hpp"
35
#include "memory/metaspaceShared.hpp"
36
#include "memory/universe.inline.hpp"
37
#include "oops/fieldStreams.hpp"
38
#include "oops/klassVtable.hpp"
39
#include "prims/jvmtiImpl.hpp"
40
#include "prims/jvmtiRedefineClasses.hpp"
41
#include "prims/methodComparator.hpp"
42
#include "runtime/deoptimization.hpp"
43
#include "runtime/relocator.hpp"
44
#include "utilities/bitMap.inline.hpp"
45
#include "utilities/events.hpp"
46
47
PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
48
49
Array<Method*>* VM_RedefineClasses::_old_methods = NULL;
50
Array<Method*>* VM_RedefineClasses::_new_methods = NULL;
51
Method** VM_RedefineClasses::_matching_old_methods = NULL;
52
Method** VM_RedefineClasses::_matching_new_methods = NULL;
53
Method** VM_RedefineClasses::_deleted_methods = NULL;
54
Method** VM_RedefineClasses::_added_methods = NULL;
55
int VM_RedefineClasses::_matching_methods_length = 0;
56
int VM_RedefineClasses::_deleted_methods_length = 0;
57
int VM_RedefineClasses::_added_methods_length = 0;
58
Klass* VM_RedefineClasses::_the_class_oop = NULL;
59
60
61
VM_RedefineClasses::VM_RedefineClasses(jint class_count,
62
const jvmtiClassDefinition *class_defs,
63
JvmtiClassLoadKind class_load_kind) {
64
_class_count = class_count;
65
_class_defs = class_defs;
66
_class_load_kind = class_load_kind;
67
_res = JVMTI_ERROR_NONE;
68
}
69
70
static inline InstanceKlass* get_ik(jclass def) {
71
oop mirror = JNIHandles::resolve_non_null(def);
72
return InstanceKlass::cast(java_lang_Class::as_Klass(mirror));
73
}
74
75
// If any of the classes are being redefined, wait
76
// Parallel constant pool merging leads to indeterminate constant pools.
77
void VM_RedefineClasses::lock_classes() {
78
MutexLocker ml(RedefineClasses_lock);
79
bool has_redefined;
80
do {
81
has_redefined = false;
82
// Go through classes each time until none are being redefined.
83
for (int i = 0; i < _class_count; i++) {
84
if (get_ik(_class_defs[i].klass)->is_being_redefined()) {
85
RedefineClasses_lock->wait();
86
has_redefined = true;
87
break; // for loop
88
}
89
}
90
} while (has_redefined);
91
for (int i = 0; i < _class_count; i++) {
92
get_ik(_class_defs[i].klass)->set_is_being_redefined(true);
93
}
94
RedefineClasses_lock->notify_all();
95
}
96
97
void VM_RedefineClasses::unlock_classes() {
98
MutexLocker ml(RedefineClasses_lock);
99
for (int i = 0; i < _class_count; i++) {
100
assert(get_ik(_class_defs[i].klass)->is_being_redefined(),
101
"should be being redefined to get here");
102
get_ik(_class_defs[i].klass)->set_is_being_redefined(false);
103
}
104
RedefineClasses_lock->notify_all();
105
}
106
107
bool VM_RedefineClasses::doit_prologue() {
108
if (_class_count == 0) {
109
_res = JVMTI_ERROR_NONE;
110
return false;
111
}
112
if (_class_defs == NULL) {
113
_res = JVMTI_ERROR_NULL_POINTER;
114
return false;
115
}
116
for (int i = 0; i < _class_count; i++) {
117
if (_class_defs[i].klass == NULL) {
118
_res = JVMTI_ERROR_INVALID_CLASS;
119
return false;
120
}
121
if (_class_defs[i].class_byte_count == 0) {
122
_res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
123
return false;
124
}
125
if (_class_defs[i].class_bytes == NULL) {
126
_res = JVMTI_ERROR_NULL_POINTER;
127
return false;
128
}
129
130
oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass);
131
// classes for primitives and arrays cannot be redefined
132
// check here so following code can assume these classes are InstanceKlass
133
if (!is_modifiable_class(mirror)) {
134
_res = JVMTI_ERROR_UNMODIFIABLE_CLASS;
135
return false;
136
}
137
}
138
139
// Start timer after all the sanity checks; not quite accurate, but
140
// better than adding a bunch of stop() calls.
141
RC_TIMER_START(_timer_vm_op_prologue);
142
143
lock_classes();
144
// We first load new class versions in the prologue, because somewhere down the
145
// call chain it is required that the current thread is a Java thread.
146
_res = load_new_class_versions(Thread::current());
147
if (_res != JVMTI_ERROR_NONE) {
148
// free any successfully created classes, since none are redefined
149
for (int i = 0; i < _class_count; i++) {
150
if (_scratch_classes[i] != NULL) {
151
ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
152
// Free the memory for this class at class unloading time. Not before
153
// because CMS might think this is still live.
154
InstanceKlass* ik = get_ik(_class_defs[i].klass);
155
if (ik->get_cached_class_file() == ((InstanceKlass*)_scratch_classes[i])->get_cached_class_file()) {
156
// Don't double-free cached_class_file copied from the original class if error.
157
((InstanceKlass*)_scratch_classes[i])->set_cached_class_file(NULL);
158
}
159
cld->add_to_deallocate_list((InstanceKlass*)_scratch_classes[i]);
160
}
161
}
162
// Free os::malloc allocated memory in load_new_class_version.
163
os::free(_scratch_classes);
164
RC_TIMER_STOP(_timer_vm_op_prologue);
165
unlock_classes();
166
return false;
167
}
168
169
RC_TIMER_STOP(_timer_vm_op_prologue);
170
return true;
171
}
172
173
void VM_RedefineClasses::doit() {
174
Thread *thread = Thread::current();
175
176
if (UseSharedSpaces) {
177
// Sharing is enabled so we remap the shared readonly space to
178
// shared readwrite, private just in case we need to redefine
179
// a shared class. We do the remap during the doit() phase of
180
// the safepoint to be safer.
181
if (!MetaspaceShared::remap_shared_readonly_as_readwrite()) {
182
RC_TRACE_WITH_THREAD(0x00000001, thread,
183
("failed to remap shared readonly space to readwrite, private"));
184
_res = JVMTI_ERROR_INTERNAL;
185
return;
186
}
187
}
188
189
// Mark methods seen on stack and everywhere else so old methods are not
190
// cleaned up if they're on the stack.
191
MetadataOnStackMark md_on_stack(true);
192
HandleMark hm(thread); // make sure any handles created are deleted
193
// before the stack walk again.
194
195
for (int i = 0; i < _class_count; i++) {
196
redefine_single_class(_class_defs[i].klass, _scratch_classes[i], thread);
197
ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
198
// Free the memory for this class at class unloading time. Not before
199
// because CMS might think this is still live.
200
cld->add_to_deallocate_list((InstanceKlass*)_scratch_classes[i]);
201
_scratch_classes[i] = NULL;
202
}
203
204
// Disable any dependent concurrent compilations
205
SystemDictionary::notice_modification();
206
207
// Set flag indicating that some invariants are no longer true.
208
// See jvmtiExport.hpp for detailed explanation.
209
JvmtiExport::set_has_redefined_a_class();
210
211
// check_class() is optionally called for product bits, but is
212
// always called for non-product bits.
213
#ifdef PRODUCT
214
if (RC_TRACE_ENABLED(0x00004000)) {
215
#endif
216
RC_TRACE_WITH_THREAD(0x00004000, thread, ("calling check_class"));
217
CheckClass check_class(thread);
218
ClassLoaderDataGraph::classes_do(&check_class);
219
#ifdef PRODUCT
220
}
221
#endif
222
}
223
224
void VM_RedefineClasses::doit_epilogue() {
225
unlock_classes();
226
227
// Free os::malloc allocated memory.
228
os::free(_scratch_classes);
229
230
// Reset the_class_oop to null for error printing.
231
_the_class_oop = NULL;
232
233
if (RC_TRACE_ENABLED(0x00000004)) {
234
// Used to have separate timers for "doit" and "all", but the timer
235
// overhead skewed the measurements.
236
jlong doit_time = _timer_rsc_phase1.milliseconds() +
237
_timer_rsc_phase2.milliseconds();
238
jlong all_time = _timer_vm_op_prologue.milliseconds() + doit_time;
239
240
RC_TRACE(0x00000004, ("vm_op: all=" UINT64_FORMAT
241
" prologue=" UINT64_FORMAT " doit=" UINT64_FORMAT, all_time,
242
_timer_vm_op_prologue.milliseconds(), doit_time));
243
RC_TRACE(0x00000004,
244
("redefine_single_class: phase1=" UINT64_FORMAT " phase2=" UINT64_FORMAT,
245
_timer_rsc_phase1.milliseconds(), _timer_rsc_phase2.milliseconds()));
246
}
247
}
248
249
bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
250
// classes for primitives cannot be redefined
251
if (java_lang_Class::is_primitive(klass_mirror)) {
252
return false;
253
}
254
Klass* the_class_oop = java_lang_Class::as_Klass(klass_mirror);
255
// classes for arrays cannot be redefined
256
if (the_class_oop == NULL || !the_class_oop->oop_is_instance()) {
257
return false;
258
}
259
return true;
260
}
261
262
// Append the current entry at scratch_i in scratch_cp to *merge_cp_p
263
// where the end of *merge_cp_p is specified by *merge_cp_length_p. For
264
// direct CP entries, there is just the current entry to append. For
265
// indirect and double-indirect CP entries, there are zero or more
266
// referenced CP entries along with the current entry to append.
267
// Indirect and double-indirect CP entries are handled by recursive
268
// calls to append_entry() as needed. The referenced CP entries are
269
// always appended to *merge_cp_p before the referee CP entry. These
270
// referenced CP entries may already exist in *merge_cp_p in which case
271
// there is nothing extra to append and only the current entry is
272
// appended.
273
void VM_RedefineClasses::append_entry(constantPoolHandle scratch_cp,
274
int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p,
275
TRAPS) {
276
277
// append is different depending on entry tag type
278
switch (scratch_cp->tag_at(scratch_i).value()) {
279
280
// The old verifier is implemented outside the VM. It loads classes,
281
// but does not resolve constant pool entries directly so we never
282
// see Class entries here with the old verifier. Similarly the old
283
// verifier does not like Class entries in the input constant pool.
284
// The split-verifier is implemented in the VM so it can optionally
285
// and directly resolve constant pool entries to load classes. The
286
// split-verifier can accept either Class entries or UnresolvedClass
287
// entries in the input constant pool. We revert the appended copy
288
// back to UnresolvedClass so that either verifier will be happy
289
// with the constant pool entry.
290
case JVM_CONSTANT_Class:
291
{
292
// revert the copy to JVM_CONSTANT_UnresolvedClass
293
(*merge_cp_p)->unresolved_klass_at_put(*merge_cp_length_p,
294
scratch_cp->klass_name_at(scratch_i));
295
296
if (scratch_i != *merge_cp_length_p) {
297
// The new entry in *merge_cp_p is at a different index than
298
// the new entry in scratch_cp so we need to map the index values.
299
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
300
}
301
(*merge_cp_length_p)++;
302
} break;
303
304
// these are direct CP entries so they can be directly appended,
305
// but double and long take two constant pool entries
306
case JVM_CONSTANT_Double: // fall through
307
case JVM_CONSTANT_Long:
308
{
309
ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
310
THREAD);
311
312
if (scratch_i != *merge_cp_length_p) {
313
// The new entry in *merge_cp_p is at a different index than
314
// the new entry in scratch_cp so we need to map the index values.
315
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
316
}
317
(*merge_cp_length_p) += 2;
318
} break;
319
320
// these are direct CP entries so they can be directly appended
321
case JVM_CONSTANT_Float: // fall through
322
case JVM_CONSTANT_Integer: // fall through
323
case JVM_CONSTANT_Utf8: // fall through
324
325
// This was an indirect CP entry, but it has been changed into
326
// Symbol*s so this entry can be directly appended.
327
case JVM_CONSTANT_String: // fall through
328
329
// These were indirect CP entries, but they have been changed into
330
// Symbol*s so these entries can be directly appended.
331
case JVM_CONSTANT_UnresolvedClass: // fall through
332
{
333
ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
334
THREAD);
335
336
if (scratch_i != *merge_cp_length_p) {
337
// The new entry in *merge_cp_p is at a different index than
338
// the new entry in scratch_cp so we need to map the index values.
339
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
340
}
341
(*merge_cp_length_p)++;
342
} break;
343
344
// this is an indirect CP entry so it needs special handling
345
case JVM_CONSTANT_NameAndType:
346
{
347
int name_ref_i = scratch_cp->name_ref_index_at(scratch_i);
348
int new_name_ref_i = find_or_append_indirect_entry(scratch_cp, name_ref_i, merge_cp_p,
349
merge_cp_length_p, THREAD);
350
351
int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i);
352
int new_signature_ref_i = find_or_append_indirect_entry(scratch_cp, signature_ref_i,
353
merge_cp_p, merge_cp_length_p,
354
THREAD);
355
356
// If the referenced entries already exist in *merge_cp_p, then
357
// both new_name_ref_i and new_signature_ref_i will both be 0.
358
// In that case, all we are appending is the current entry.
359
if (new_name_ref_i != name_ref_i) {
360
RC_TRACE(0x00080000,
361
("NameAndType entry@%d name_ref_index change: %d to %d",
362
*merge_cp_length_p, name_ref_i, new_name_ref_i));
363
}
364
if (new_signature_ref_i != signature_ref_i) {
365
RC_TRACE(0x00080000,
366
("NameAndType entry@%d signature_ref_index change: %d to %d",
367
*merge_cp_length_p, signature_ref_i, new_signature_ref_i));
368
}
369
370
(*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p,
371
new_name_ref_i, new_signature_ref_i);
372
if (scratch_i != *merge_cp_length_p) {
373
// The new entry in *merge_cp_p is at a different index than
374
// the new entry in scratch_cp so we need to map the index values.
375
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
376
}
377
(*merge_cp_length_p)++;
378
} break;
379
380
// this is a double-indirect CP entry so it needs special handling
381
case JVM_CONSTANT_Fieldref: // fall through
382
case JVM_CONSTANT_InterfaceMethodref: // fall through
383
case JVM_CONSTANT_Methodref:
384
{
385
int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i);
386
int new_klass_ref_i = find_or_append_indirect_entry(scratch_cp, klass_ref_i,
387
merge_cp_p, merge_cp_length_p, THREAD);
388
389
int name_and_type_ref_i = scratch_cp->uncached_name_and_type_ref_index_at(scratch_i);
390
int new_name_and_type_ref_i = find_or_append_indirect_entry(scratch_cp, name_and_type_ref_i,
391
merge_cp_p, merge_cp_length_p, THREAD);
392
393
const char *entry_name = NULL;
394
switch (scratch_cp->tag_at(scratch_i).value()) {
395
case JVM_CONSTANT_Fieldref:
396
entry_name = "Fieldref";
397
(*merge_cp_p)->field_at_put(*merge_cp_length_p, new_klass_ref_i,
398
new_name_and_type_ref_i);
399
break;
400
case JVM_CONSTANT_InterfaceMethodref:
401
entry_name = "IFMethodref";
402
(*merge_cp_p)->interface_method_at_put(*merge_cp_length_p,
403
new_klass_ref_i, new_name_and_type_ref_i);
404
break;
405
case JVM_CONSTANT_Methodref:
406
entry_name = "Methodref";
407
(*merge_cp_p)->method_at_put(*merge_cp_length_p, new_klass_ref_i,
408
new_name_and_type_ref_i);
409
break;
410
default:
411
guarantee(false, "bad switch");
412
break;
413
}
414
415
if (klass_ref_i != new_klass_ref_i) {
416
RC_TRACE(0x00080000, ("%s entry@%d class_index changed: %d to %d",
417
entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i));
418
}
419
if (name_and_type_ref_i != new_name_and_type_ref_i) {
420
RC_TRACE(0x00080000,
421
("%s entry@%d name_and_type_index changed: %d to %d",
422
entry_name, *merge_cp_length_p, name_and_type_ref_i,
423
new_name_and_type_ref_i));
424
}
425
426
if (scratch_i != *merge_cp_length_p) {
427
// The new entry in *merge_cp_p is at a different index than
428
// the new entry in scratch_cp so we need to map the index values.
429
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
430
}
431
(*merge_cp_length_p)++;
432
} break;
433
434
// this is an indirect CP entry so it needs special handling
435
case JVM_CONSTANT_MethodType:
436
{
437
int ref_i = scratch_cp->method_type_index_at(scratch_i);
438
int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
439
merge_cp_length_p, THREAD);
440
if (new_ref_i != ref_i) {
441
RC_TRACE(0x00080000,
442
("MethodType entry@%d ref_index change: %d to %d",
443
*merge_cp_length_p, ref_i, new_ref_i));
444
}
445
(*merge_cp_p)->method_type_index_at_put(*merge_cp_length_p, new_ref_i);
446
if (scratch_i != *merge_cp_length_p) {
447
// The new entry in *merge_cp_p is at a different index than
448
// the new entry in scratch_cp so we need to map the index values.
449
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
450
}
451
(*merge_cp_length_p)++;
452
} break;
453
454
// this is an indirect CP entry so it needs special handling
455
case JVM_CONSTANT_MethodHandle:
456
{
457
int ref_kind = scratch_cp->method_handle_ref_kind_at(scratch_i);
458
int ref_i = scratch_cp->method_handle_index_at(scratch_i);
459
int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
460
merge_cp_length_p, THREAD);
461
if (new_ref_i != ref_i) {
462
RC_TRACE(0x00080000,
463
("MethodHandle entry@%d ref_index change: %d to %d",
464
*merge_cp_length_p, ref_i, new_ref_i));
465
}
466
(*merge_cp_p)->method_handle_index_at_put(*merge_cp_length_p, ref_kind, new_ref_i);
467
if (scratch_i != *merge_cp_length_p) {
468
// The new entry in *merge_cp_p is at a different index than
469
// the new entry in scratch_cp so we need to map the index values.
470
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
471
}
472
(*merge_cp_length_p)++;
473
} break;
474
475
// this is an indirect CP entry so it needs special handling
476
case JVM_CONSTANT_InvokeDynamic:
477
{
478
// Index of the bootstrap specifier in the operands array
479
int old_bs_i = scratch_cp->invoke_dynamic_bootstrap_specifier_index(scratch_i);
480
int new_bs_i = find_or_append_operand(scratch_cp, old_bs_i, merge_cp_p,
481
merge_cp_length_p, THREAD);
482
// The bootstrap method NameAndType_info index
483
int old_ref_i = scratch_cp->invoke_dynamic_name_and_type_ref_index_at(scratch_i);
484
int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
485
merge_cp_length_p, THREAD);
486
if (new_bs_i != old_bs_i) {
487
RC_TRACE(0x00080000,
488
("InvokeDynamic entry@%d bootstrap_method_attr_index change: %d to %d",
489
*merge_cp_length_p, old_bs_i, new_bs_i));
490
}
491
if (new_ref_i != old_ref_i) {
492
RC_TRACE(0x00080000,
493
("InvokeDynamic entry@%d name_and_type_index change: %d to %d",
494
*merge_cp_length_p, old_ref_i, new_ref_i));
495
}
496
497
(*merge_cp_p)->invoke_dynamic_at_put(*merge_cp_length_p, new_bs_i, new_ref_i);
498
if (scratch_i != *merge_cp_length_p) {
499
// The new entry in *merge_cp_p is at a different index than
500
// the new entry in scratch_cp so we need to map the index values.
501
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
502
}
503
(*merge_cp_length_p)++;
504
} break;
505
506
// At this stage, Class or UnresolvedClass could be here, but not
507
// ClassIndex
508
case JVM_CONSTANT_ClassIndex: // fall through
509
510
// Invalid is used as the tag for the second constant pool entry
511
// occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
512
// not be seen by itself.
513
case JVM_CONSTANT_Invalid: // fall through
514
515
// At this stage, String could be here, but not StringIndex
516
case JVM_CONSTANT_StringIndex: // fall through
517
518
// At this stage JVM_CONSTANT_UnresolvedClassInError should not be
519
// here
520
case JVM_CONSTANT_UnresolvedClassInError: // fall through
521
522
default:
523
{
524
// leave a breadcrumb
525
jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
526
ShouldNotReachHere();
527
} break;
528
} // end switch tag value
529
} // end append_entry()
530
531
532
int VM_RedefineClasses::find_or_append_indirect_entry(constantPoolHandle scratch_cp,
533
int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
534
535
int new_ref_i = ref_i;
536
bool match = (ref_i < *merge_cp_length_p) &&
537
scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i, THREAD);
538
539
if (!match) {
540
// forward reference in *merge_cp_p or not a direct match
541
int found_i = scratch_cp->find_matching_entry(ref_i, *merge_cp_p, THREAD);
542
if (found_i != 0) {
543
guarantee(found_i != ref_i, "compare_entry_to() and find_matching_entry() do not agree");
544
// Found a matching entry somewhere else in *merge_cp_p so just need a mapping entry.
545
new_ref_i = found_i;
546
map_index(scratch_cp, ref_i, found_i);
547
} else {
548
// no match found so we have to append this entry to *merge_cp_p
549
append_entry(scratch_cp, ref_i, merge_cp_p, merge_cp_length_p, THREAD);
550
// The above call to append_entry() can only append one entry
551
// so the post call query of *merge_cp_length_p is only for
552
// the sake of consistency.
553
new_ref_i = *merge_cp_length_p - 1;
554
}
555
}
556
557
return new_ref_i;
558
} // end find_or_append_indirect_entry()
559
560
561
// Append a bootstrap specifier into the merge_cp operands that is semantically equal
562
// to the scratch_cp operands bootstrap specifier passed by the old_bs_i index.
563
// Recursively append new merge_cp entries referenced by the new bootstrap specifier.
564
void VM_RedefineClasses::append_operand(constantPoolHandle scratch_cp, int old_bs_i,
565
constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
566
567
int old_ref_i = scratch_cp->operand_bootstrap_method_ref_index_at(old_bs_i);
568
int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
569
merge_cp_length_p, THREAD);
570
if (new_ref_i != old_ref_i) {
571
RC_TRACE(0x00080000,
572
("operands entry@%d bootstrap method ref_index change: %d to %d",
573
_operands_cur_length, old_ref_i, new_ref_i));
574
}
575
576
Array<u2>* merge_ops = (*merge_cp_p)->operands();
577
int new_bs_i = _operands_cur_length;
578
// We have _operands_cur_length == 0 when the merge_cp operands is empty yet.
579
// However, the operand_offset_at(0) was set in the extend_operands() call.
580
int new_base = (new_bs_i == 0) ? (*merge_cp_p)->operand_offset_at(0)
581
: (*merge_cp_p)->operand_next_offset_at(new_bs_i - 1);
582
int argc = scratch_cp->operand_argument_count_at(old_bs_i);
583
584
ConstantPool::operand_offset_at_put(merge_ops, _operands_cur_length, new_base);
585
merge_ops->at_put(new_base++, new_ref_i);
586
merge_ops->at_put(new_base++, argc);
587
588
for (int i = 0; i < argc; i++) {
589
int old_arg_ref_i = scratch_cp->operand_argument_index_at(old_bs_i, i);
590
int new_arg_ref_i = find_or_append_indirect_entry(scratch_cp, old_arg_ref_i, merge_cp_p,
591
merge_cp_length_p, THREAD);
592
merge_ops->at_put(new_base++, new_arg_ref_i);
593
if (new_arg_ref_i != old_arg_ref_i) {
594
RC_TRACE(0x00080000,
595
("operands entry@%d bootstrap method argument ref_index change: %d to %d",
596
_operands_cur_length, old_arg_ref_i, new_arg_ref_i));
597
}
598
}
599
if (old_bs_i != _operands_cur_length) {
600
// The bootstrap specifier in *merge_cp_p is at a different index than
601
// that in scratch_cp so we need to map the index values.
602
map_operand_index(old_bs_i, new_bs_i);
603
}
604
_operands_cur_length++;
605
} // end append_operand()
606
607
608
int VM_RedefineClasses::find_or_append_operand(constantPoolHandle scratch_cp,
609
int old_bs_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
610
611
int new_bs_i = old_bs_i; // bootstrap specifier index
612
bool match = (old_bs_i < _operands_cur_length) &&
613
scratch_cp->compare_operand_to(old_bs_i, *merge_cp_p, old_bs_i, THREAD);
614
615
if (!match) {
616
// forward reference in *merge_cp_p or not a direct match
617
int found_i = scratch_cp->find_matching_operand(old_bs_i, *merge_cp_p,
618
_operands_cur_length, THREAD);
619
if (found_i != -1) {
620
guarantee(found_i != old_bs_i, "compare_operand_to() and find_matching_operand() disagree");
621
// found a matching operand somewhere else in *merge_cp_p so just need a mapping
622
new_bs_i = found_i;
623
map_operand_index(old_bs_i, found_i);
624
} else {
625
// no match found so we have to append this bootstrap specifier to *merge_cp_p
626
append_operand(scratch_cp, old_bs_i, merge_cp_p, merge_cp_length_p, THREAD);
627
new_bs_i = _operands_cur_length - 1;
628
}
629
}
630
return new_bs_i;
631
} // end find_or_append_operand()
632
633
634
void VM_RedefineClasses::finalize_operands_merge(constantPoolHandle merge_cp, TRAPS) {
635
if (merge_cp->operands() == NULL) {
636
return;
637
}
638
// Shrink the merge_cp operands
639
merge_cp->shrink_operands(_operands_cur_length, CHECK);
640
641
if (RC_TRACE_ENABLED(0x00040000)) {
642
// don't want to loop unless we are tracing
643
int count = 0;
644
for (int i = 1; i < _operands_index_map_p->length(); i++) {
645
int value = _operands_index_map_p->at(i);
646
if (value != -1) {
647
RC_TRACE_WITH_THREAD(0x00040000, THREAD,
648
("operands_index_map[%d]: old=%d new=%d", count, i, value));
649
count++;
650
}
651
}
652
}
653
// Clean-up
654
_operands_index_map_p = NULL;
655
_operands_cur_length = 0;
656
_operands_index_map_count = 0;
657
} // end finalize_operands_merge()
658
659
660
jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
661
instanceKlassHandle the_class,
662
instanceKlassHandle scratch_class) {
663
int i;
664
665
// Check superclasses, or rather their names, since superclasses themselves can be
666
// requested to replace.
667
// Check for NULL superclass first since this might be java.lang.Object
668
if (the_class->super() != scratch_class->super() &&
669
(the_class->super() == NULL || scratch_class->super() == NULL ||
670
the_class->super()->name() !=
671
scratch_class->super()->name())) {
672
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
673
}
674
675
// Check if the number, names and order of directly implemented interfaces are the same.
676
// I think in principle we should just check if the sets of names of directly implemented
677
// interfaces are the same, i.e. the order of declaration (which, however, if changed in the
678
// .java file, also changes in .class file) should not matter. However, comparing sets is
679
// technically a bit more difficult, and, more importantly, I am not sure at present that the
680
// order of interfaces does not matter on the implementation level, i.e. that the VM does not
681
// rely on it somewhere.
682
Array<Klass*>* k_interfaces = the_class->local_interfaces();
683
Array<Klass*>* k_new_interfaces = scratch_class->local_interfaces();
684
int n_intfs = k_interfaces->length();
685
if (n_intfs != k_new_interfaces->length()) {
686
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
687
}
688
for (i = 0; i < n_intfs; i++) {
689
if (k_interfaces->at(i)->name() !=
690
k_new_interfaces->at(i)->name()) {
691
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
692
}
693
}
694
695
// Check whether class is in the error init state.
696
if (the_class->is_in_error_state()) {
697
// TBD #5057930: special error code is needed in 1.6
698
return JVMTI_ERROR_INVALID_CLASS;
699
}
700
701
// Check whether class modifiers are the same.
702
jushort old_flags = (jushort) the_class->access_flags().get_flags();
703
jushort new_flags = (jushort) scratch_class->access_flags().get_flags();
704
if (old_flags != new_flags) {
705
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_MODIFIERS_CHANGED;
706
}
707
708
// Check if the number, names, types and order of fields declared in these classes
709
// are the same.
710
JavaFieldStream old_fs(the_class);
711
JavaFieldStream new_fs(scratch_class);
712
for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) {
713
// access
714
old_flags = old_fs.access_flags().as_short();
715
new_flags = new_fs.access_flags().as_short();
716
if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) {
717
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
718
}
719
// offset
720
if (old_fs.offset() != new_fs.offset()) {
721
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
722
}
723
// name and signature
724
Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index());
725
Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index());
726
Symbol* name_sym2 = scratch_class->constants()->symbol_at(new_fs.name_index());
727
Symbol* sig_sym2 = scratch_class->constants()->symbol_at(new_fs.signature_index());
728
if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
729
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
730
}
731
}
732
733
// If both streams aren't done then we have a differing number of
734
// fields.
735
if (!old_fs.done() || !new_fs.done()) {
736
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
737
}
738
739
// Do a parallel walk through the old and new methods. Detect
740
// cases where they match (exist in both), have been added in
741
// the new methods, or have been deleted (exist only in the
742
// old methods). The class file parser places methods in order
743
// by method name, but does not order overloaded methods by
744
// signature. In order to determine what fate befell the methods,
745
// this code places the overloaded new methods that have matching
746
// old methods in the same order as the old methods and places
747
// new overloaded methods at the end of overloaded methods of
748
// that name. The code for this order normalization is adapted
749
// from the algorithm used in InstanceKlass::find_method().
750
// Since we are swapping out of order entries as we find them,
751
// we only have to search forward through the overloaded methods.
752
// Methods which are added and have the same name as an existing
753
// method (but different signature) will be put at the end of
754
// the methods with that name, and the name mismatch code will
755
// handle them.
756
Array<Method*>* k_old_methods(the_class->methods());
757
Array<Method*>* k_new_methods(scratch_class->methods());
758
int n_old_methods = k_old_methods->length();
759
int n_new_methods = k_new_methods->length();
760
Thread* thread = Thread::current();
761
762
int ni = 0;
763
int oi = 0;
764
while (true) {
765
Method* k_old_method;
766
Method* k_new_method;
767
enum { matched, added, deleted, undetermined } method_was = undetermined;
768
769
if (oi >= n_old_methods) {
770
if (ni >= n_new_methods) {
771
break; // we've looked at everything, done
772
}
773
// New method at the end
774
k_new_method = k_new_methods->at(ni);
775
method_was = added;
776
} else if (ni >= n_new_methods) {
777
// Old method, at the end, is deleted
778
k_old_method = k_old_methods->at(oi);
779
method_was = deleted;
780
} else {
781
// There are more methods in both the old and new lists
782
k_old_method = k_old_methods->at(oi);
783
k_new_method = k_new_methods->at(ni);
784
if (k_old_method->name() != k_new_method->name()) {
785
// Methods are sorted by method name, so a mismatch means added
786
// or deleted
787
if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
788
method_was = added;
789
} else {
790
method_was = deleted;
791
}
792
} else if (k_old_method->signature() == k_new_method->signature()) {
793
// Both the name and signature match
794
method_was = matched;
795
} else {
796
// The name matches, but the signature doesn't, which means we have to
797
// search forward through the new overloaded methods.
798
int nj; // outside the loop for post-loop check
799
for (nj = ni + 1; nj < n_new_methods; nj++) {
800
Method* m = k_new_methods->at(nj);
801
if (k_old_method->name() != m->name()) {
802
// reached another method name so no more overloaded methods
803
method_was = deleted;
804
break;
805
}
806
if (k_old_method->signature() == m->signature()) {
807
// found a match so swap the methods
808
k_new_methods->at_put(ni, m);
809
k_new_methods->at_put(nj, k_new_method);
810
k_new_method = m;
811
method_was = matched;
812
break;
813
}
814
}
815
816
if (nj >= n_new_methods) {
817
// reached the end without a match; so method was deleted
818
method_was = deleted;
819
}
820
}
821
}
822
823
switch (method_was) {
824
case matched:
825
// methods match, be sure modifiers do too
826
old_flags = (jushort) k_old_method->access_flags().get_flags();
827
new_flags = (jushort) k_new_method->access_flags().get_flags();
828
if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
829
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
830
}
831
{
832
u2 new_num = k_new_method->method_idnum();
833
u2 old_num = k_old_method->method_idnum();
834
if (new_num != old_num) {
835
Method* idnum_owner = scratch_class->method_with_idnum(old_num);
836
if (idnum_owner != NULL) {
837
// There is already a method assigned this idnum -- switch them
838
// Take current and original idnum from the new_method
839
idnum_owner->set_method_idnum(new_num);
840
idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
841
}
842
// Take current and original idnum from the old_method
843
k_new_method->set_method_idnum(old_num);
844
k_new_method->set_orig_method_idnum(k_old_method->orig_method_idnum());
845
if (thread->has_pending_exception()) {
846
return JVMTI_ERROR_OUT_OF_MEMORY;
847
}
848
}
849
}
850
RC_TRACE(0x00008000, ("Method matched: new: %s [%d] == old: %s [%d]",
851
k_new_method->name_and_sig_as_C_string(), ni,
852
k_old_method->name_and_sig_as_C_string(), oi));
853
// advance to next pair of methods
854
++oi;
855
++ni;
856
break;
857
case added:
858
// method added, see if it is OK
859
new_flags = (jushort) k_new_method->access_flags().get_flags();
860
if ((new_flags & JVM_ACC_PRIVATE) == 0
861
// hack: private should be treated as final, but alas
862
|| (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
863
) {
864
// new methods must be private
865
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
866
}
867
{
868
u2 num = the_class->next_method_idnum();
869
if (num == ConstMethod::UNSET_IDNUM) {
870
// cannot add any more methods
871
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
872
}
873
u2 new_num = k_new_method->method_idnum();
874
Method* idnum_owner = scratch_class->method_with_idnum(num);
875
if (idnum_owner != NULL) {
876
// There is already a method assigned this idnum -- switch them
877
// Take current and original idnum from the new_method
878
idnum_owner->set_method_idnum(new_num);
879
idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
880
}
881
k_new_method->set_method_idnum(num);
882
k_new_method->set_orig_method_idnum(num);
883
if (thread->has_pending_exception()) {
884
return JVMTI_ERROR_OUT_OF_MEMORY;
885
}
886
}
887
RC_TRACE(0x00008000, ("Method added: new: %s [%d]",
888
k_new_method->name_and_sig_as_C_string(), ni));
889
++ni; // advance to next new method
890
break;
891
case deleted:
892
// method deleted, see if it is OK
893
old_flags = (jushort) k_old_method->access_flags().get_flags();
894
if ((old_flags & JVM_ACC_PRIVATE) == 0
895
// hack: private should be treated as final, but alas
896
|| (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
897
) {
898
// deleted methods must be private
899
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
900
}
901
RC_TRACE(0x00008000, ("Method deleted: old: %s [%d]",
902
k_old_method->name_and_sig_as_C_string(), oi));
903
++oi; // advance to next old method
904
break;
905
default:
906
ShouldNotReachHere();
907
}
908
}
909
910
return JVMTI_ERROR_NONE;
911
}
912
913
914
// Find new constant pool index value for old constant pool index value
915
// by seaching the index map. Returns zero (0) if there is no mapped
916
// value for the old constant pool index.
917
int VM_RedefineClasses::find_new_index(int old_index) {
918
if (_index_map_count == 0) {
919
// map is empty so nothing can be found
920
return 0;
921
}
922
923
if (old_index < 1 || old_index >= _index_map_p->length()) {
924
// The old_index is out of range so it is not mapped. This should
925
// not happen in regular constant pool merging use, but it can
926
// happen if a corrupt annotation is processed.
927
return 0;
928
}
929
930
int value = _index_map_p->at(old_index);
931
if (value == -1) {
932
// the old_index is not mapped
933
return 0;
934
}
935
936
return value;
937
} // end find_new_index()
938
939
940
// Find new bootstrap specifier index value for old bootstrap specifier index
941
// value by seaching the index map. Returns unused index (-1) if there is
942
// no mapped value for the old bootstrap specifier index.
943
int VM_RedefineClasses::find_new_operand_index(int old_index) {
944
if (_operands_index_map_count == 0) {
945
// map is empty so nothing can be found
946
return -1;
947
}
948
949
if (old_index == -1 || old_index >= _operands_index_map_p->length()) {
950
// The old_index is out of range so it is not mapped.
951
// This should not happen in regular constant pool merging use.
952
return -1;
953
}
954
955
int value = _operands_index_map_p->at(old_index);
956
if (value == -1) {
957
// the old_index is not mapped
958
return -1;
959
}
960
961
return value;
962
} // end find_new_operand_index()
963
964
965
// Returns true if the current mismatch is due to a resolved/unresolved
966
// class pair. Otherwise, returns false.
967
bool VM_RedefineClasses::is_unresolved_class_mismatch(constantPoolHandle cp1,
968
int index1, constantPoolHandle cp2, int index2) {
969
970
jbyte t1 = cp1->tag_at(index1).value();
971
if (t1 != JVM_CONSTANT_Class && t1 != JVM_CONSTANT_UnresolvedClass) {
972
return false; // wrong entry type; not our special case
973
}
974
975
jbyte t2 = cp2->tag_at(index2).value();
976
if (t2 != JVM_CONSTANT_Class && t2 != JVM_CONSTANT_UnresolvedClass) {
977
return false; // wrong entry type; not our special case
978
}
979
980
if (t1 == t2) {
981
return false; // not a mismatch; not our special case
982
}
983
984
char *s1 = cp1->klass_name_at(index1)->as_C_string();
985
char *s2 = cp2->klass_name_at(index2)->as_C_string();
986
if (strcmp(s1, s2) != 0) {
987
return false; // strings don't match; not our special case
988
}
989
990
return true; // made it through the gauntlet; this is our special case
991
} // end is_unresolved_class_mismatch()
992
993
994
jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) {
995
996
// For consistency allocate memory using os::malloc wrapper.
997
_scratch_classes = (Klass**)
998
os::malloc(sizeof(Klass*) * _class_count, mtClass);
999
if (_scratch_classes == NULL) {
1000
return JVMTI_ERROR_OUT_OF_MEMORY;
1001
}
1002
// Zero initialize the _scratch_classes array.
1003
for (int i = 0; i < _class_count; i++) {
1004
_scratch_classes[i] = NULL;
1005
}
1006
1007
ResourceMark rm(THREAD);
1008
1009
JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
1010
// state can only be NULL if the current thread is exiting which
1011
// should not happen since we're trying to do a RedefineClasses
1012
guarantee(state != NULL, "exiting thread calling load_new_class_versions");
1013
for (int i = 0; i < _class_count; i++) {
1014
// Create HandleMark so that any handles created while loading new class
1015
// versions are deleted. Constant pools are deallocated while merging
1016
// constant pools
1017
HandleMark hm(THREAD);
1018
instanceKlassHandle the_class(THREAD, get_ik(_class_defs[i].klass));
1019
Symbol* the_class_sym = the_class->name();
1020
1021
// RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1022
RC_TRACE_WITH_THREAD(0x00000001, THREAD,
1023
("loading name=%s kind=%d (avail_mem=" UINT64_FORMAT "K)",
1024
the_class->external_name(), _class_load_kind,
1025
os::available_memory() >> 10));
1026
1027
ClassFileStream st((u1*) _class_defs[i].class_bytes,
1028
_class_defs[i].class_byte_count, (char *)"__VM_RedefineClasses__");
1029
1030
// Parse the stream.
1031
Handle the_class_loader(THREAD, the_class->class_loader());
1032
Handle protection_domain(THREAD, the_class->protection_domain());
1033
// Set redefined class handle in JvmtiThreadState class.
1034
// This redefined class is sent to agent event handler for class file
1035
// load hook event.
1036
state->set_class_being_redefined(&the_class, _class_load_kind);
1037
1038
Klass* k = SystemDictionary::parse_stream(the_class_sym,
1039
the_class_loader,
1040
protection_domain,
1041
&st,
1042
THREAD);
1043
// Clear class_being_redefined just to be sure.
1044
state->clear_class_being_redefined();
1045
1046
// TODO: if this is retransform, and nothing changed we can skip it
1047
1048
instanceKlassHandle scratch_class (THREAD, k);
1049
1050
// Need to clean up allocated InstanceKlass if there's an error so assign
1051
// the result here. Caller deallocates all the scratch classes in case of
1052
// an error.
1053
_scratch_classes[i] = k;
1054
1055
if (HAS_PENDING_EXCEPTION) {
1056
Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1057
// RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1058
RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("parse_stream exception: '%s'",
1059
ex_name->as_C_string()));
1060
CLEAR_PENDING_EXCEPTION;
1061
1062
if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
1063
return JVMTI_ERROR_UNSUPPORTED_VERSION;
1064
} else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
1065
return JVMTI_ERROR_INVALID_CLASS_FORMAT;
1066
} else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
1067
return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
1068
} else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
1069
// The message will be "XXX (wrong name: YYY)"
1070
return JVMTI_ERROR_NAMES_DONT_MATCH;
1071
} else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1072
return JVMTI_ERROR_OUT_OF_MEMORY;
1073
} else { // Just in case more exceptions can be thrown..
1074
return JVMTI_ERROR_FAILS_VERIFICATION;
1075
}
1076
}
1077
1078
// Ensure class is linked before redefine
1079
if (!the_class->is_linked()) {
1080
the_class->link_class(THREAD);
1081
if (HAS_PENDING_EXCEPTION) {
1082
Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1083
// RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1084
RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("link_class exception: '%s'",
1085
ex_name->as_C_string()));
1086
CLEAR_PENDING_EXCEPTION;
1087
if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1088
return JVMTI_ERROR_OUT_OF_MEMORY;
1089
} else {
1090
return JVMTI_ERROR_INTERNAL;
1091
}
1092
}
1093
}
1094
1095
// Do the validity checks in compare_and_normalize_class_versions()
1096
// before verifying the byte codes. By doing these checks first, we
1097
// limit the number of functions that require redirection from
1098
// the_class to scratch_class. In particular, we don't have to
1099
// modify JNI GetSuperclass() and thus won't change its performance.
1100
jvmtiError res = compare_and_normalize_class_versions(the_class,
1101
scratch_class);
1102
if (res != JVMTI_ERROR_NONE) {
1103
return res;
1104
}
1105
1106
// verify what the caller passed us
1107
{
1108
// The bug 6214132 caused the verification to fail.
1109
// Information about the_class and scratch_class is temporarily
1110
// recorded into jvmtiThreadState. This data is used to redirect
1111
// the_class to scratch_class in the JVM_* functions called by the
1112
// verifier. Please, refer to jvmtiThreadState.hpp for the detailed
1113
// description.
1114
RedefineVerifyMark rvm(&the_class, &scratch_class, state);
1115
Verifier::verify(
1116
scratch_class, Verifier::ThrowException, true, THREAD);
1117
}
1118
1119
if (HAS_PENDING_EXCEPTION) {
1120
Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1121
// RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1122
RC_TRACE_WITH_THREAD(0x00000002, THREAD,
1123
("verify_byte_codes exception: '%s'", ex_name->as_C_string()));
1124
CLEAR_PENDING_EXCEPTION;
1125
if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1126
return JVMTI_ERROR_OUT_OF_MEMORY;
1127
} else {
1128
// tell the caller the bytecodes are bad
1129
return JVMTI_ERROR_FAILS_VERIFICATION;
1130
}
1131
}
1132
1133
res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
1134
if (HAS_PENDING_EXCEPTION) {
1135
Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1136
// RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1137
RC_TRACE_WITH_THREAD(0x00000002, THREAD,
1138
("merge_cp_and_rewrite exception: '%s'", ex_name->as_C_string()));
1139
CLEAR_PENDING_EXCEPTION;
1140
if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1141
return JVMTI_ERROR_OUT_OF_MEMORY;
1142
} else {
1143
return JVMTI_ERROR_INTERNAL;
1144
}
1145
}
1146
1147
if (VerifyMergedCPBytecodes) {
1148
// verify what we have done during constant pool merging
1149
{
1150
RedefineVerifyMark rvm(&the_class, &scratch_class, state);
1151
Verifier::verify(scratch_class, Verifier::ThrowException, true, THREAD);
1152
}
1153
1154
if (HAS_PENDING_EXCEPTION) {
1155
Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1156
// RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1157
RC_TRACE_WITH_THREAD(0x00000002, THREAD,
1158
("verify_byte_codes post merge-CP exception: '%s'",
1159
ex_name->as_C_string()));
1160
CLEAR_PENDING_EXCEPTION;
1161
if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1162
return JVMTI_ERROR_OUT_OF_MEMORY;
1163
} else {
1164
// tell the caller that constant pool merging screwed up
1165
return JVMTI_ERROR_INTERNAL;
1166
}
1167
}
1168
}
1169
1170
Rewriter::rewrite(scratch_class, THREAD);
1171
if (!HAS_PENDING_EXCEPTION) {
1172
scratch_class->link_methods(THREAD);
1173
}
1174
if (HAS_PENDING_EXCEPTION) {
1175
Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1176
// RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1177
RC_TRACE_WITH_THREAD(0x00000002, THREAD,
1178
("Rewriter::rewrite or link_methods exception: '%s'", ex_name->as_C_string()));
1179
CLEAR_PENDING_EXCEPTION;
1180
if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1181
return JVMTI_ERROR_OUT_OF_MEMORY;
1182
} else {
1183
return JVMTI_ERROR_INTERNAL;
1184
}
1185
}
1186
1187
// RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1188
RC_TRACE_WITH_THREAD(0x00000001, THREAD,
1189
("loaded name=%s (avail_mem=" UINT64_FORMAT "K)",
1190
the_class->external_name(), os::available_memory() >> 10));
1191
}
1192
1193
return JVMTI_ERROR_NONE;
1194
}
1195
1196
1197
// Map old_index to new_index as needed. scratch_cp is only needed
1198
// for RC_TRACE() calls.
1199
void VM_RedefineClasses::map_index(constantPoolHandle scratch_cp,
1200
int old_index, int new_index) {
1201
if (find_new_index(old_index) != 0) {
1202
// old_index is already mapped
1203
return;
1204
}
1205
1206
if (old_index == new_index) {
1207
// no mapping is needed
1208
return;
1209
}
1210
1211
_index_map_p->at_put(old_index, new_index);
1212
_index_map_count++;
1213
1214
RC_TRACE(0x00040000, ("mapped tag %d at index %d to %d",
1215
scratch_cp->tag_at(old_index).value(), old_index, new_index));
1216
} // end map_index()
1217
1218
1219
// Map old_index to new_index as needed.
1220
void VM_RedefineClasses::map_operand_index(int old_index, int new_index) {
1221
if (find_new_operand_index(old_index) != -1) {
1222
// old_index is already mapped
1223
return;
1224
}
1225
1226
if (old_index == new_index) {
1227
// no mapping is needed
1228
return;
1229
}
1230
1231
_operands_index_map_p->at_put(old_index, new_index);
1232
_operands_index_map_count++;
1233
1234
RC_TRACE(0x00040000, ("mapped bootstrap specifier at index %d to %d", old_index, new_index));
1235
} // end map_index()
1236
1237
1238
// Merge old_cp and scratch_cp and return the results of the merge via
1239
// merge_cp_p. The number of entries in *merge_cp_p is returned via
1240
// merge_cp_length_p. The entries in old_cp occupy the same locations
1241
// in *merge_cp_p. Also creates a map of indices from entries in
1242
// scratch_cp to the corresponding entry in *merge_cp_p. Index map
1243
// entries are only created for entries in scratch_cp that occupy a
1244
// different location in *merged_cp_p.
1245
bool VM_RedefineClasses::merge_constant_pools(constantPoolHandle old_cp,
1246
constantPoolHandle scratch_cp, constantPoolHandle *merge_cp_p,
1247
int *merge_cp_length_p, TRAPS) {
1248
1249
if (merge_cp_p == NULL) {
1250
assert(false, "caller must provide scratch constantPool");
1251
return false; // robustness
1252
}
1253
if (merge_cp_length_p == NULL) {
1254
assert(false, "caller must provide scratch CP length");
1255
return false; // robustness
1256
}
1257
// Worst case we need old_cp->length() + scratch_cp()->length(),
1258
// but the caller might be smart so make sure we have at least
1259
// the minimum.
1260
if ((*merge_cp_p)->length() < old_cp->length()) {
1261
assert(false, "merge area too small");
1262
return false; // robustness
1263
}
1264
1265
RC_TRACE_WITH_THREAD(0x00010000, THREAD,
1266
("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(),
1267
scratch_cp->length()));
1268
1269
{
1270
// Pass 0:
1271
// The old_cp is copied to *merge_cp_p; this means that any code
1272
// using old_cp does not have to change. This work looks like a
1273
// perfect fit for ConstantPool*::copy_cp_to(), but we need to
1274
// handle one special case:
1275
// - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
1276
// This will make verification happy.
1277
1278
int old_i; // index into old_cp
1279
1280
// index zero (0) is not used in constantPools
1281
for (old_i = 1; old_i < old_cp->length(); old_i++) {
1282
// leave debugging crumb
1283
jbyte old_tag = old_cp->tag_at(old_i).value();
1284
switch (old_tag) {
1285
case JVM_CONSTANT_Class:
1286
case JVM_CONSTANT_UnresolvedClass:
1287
// revert the copy to JVM_CONSTANT_UnresolvedClass
1288
// May be resolving while calling this so do the same for
1289
// JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition)
1290
(*merge_cp_p)->unresolved_klass_at_put(old_i,
1291
old_cp->klass_name_at(old_i));
1292
break;
1293
1294
case JVM_CONSTANT_Double:
1295
case JVM_CONSTANT_Long:
1296
// just copy the entry to *merge_cp_p, but double and long take
1297
// two constant pool entries
1298
ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
1299
old_i++;
1300
break;
1301
1302
default:
1303
// just copy the entry to *merge_cp_p
1304
ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
1305
break;
1306
}
1307
} // end for each old_cp entry
1308
1309
ConstantPool::copy_operands(old_cp, *merge_cp_p, CHECK_0);
1310
(*merge_cp_p)->extend_operands(scratch_cp, CHECK_0);
1311
1312
// We don't need to sanity check that *merge_cp_length_p is within
1313
// *merge_cp_p bounds since we have the minimum on-entry check above.
1314
(*merge_cp_length_p) = old_i;
1315
}
1316
1317
// merge_cp_len should be the same as old_cp->length() at this point
1318
// so this trace message is really a "warm-and-breathing" message.
1319
RC_TRACE_WITH_THREAD(0x00020000, THREAD,
1320
("after pass 0: merge_cp_len=%d", *merge_cp_length_p));
1321
1322
int scratch_i; // index into scratch_cp
1323
{
1324
// Pass 1a:
1325
// Compare scratch_cp entries to the old_cp entries that we have
1326
// already copied to *merge_cp_p. In this pass, we are eliminating
1327
// exact duplicates (matching entry at same index) so we only
1328
// compare entries in the common indice range.
1329
int increment = 1;
1330
int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
1331
for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
1332
switch (scratch_cp->tag_at(scratch_i).value()) {
1333
case JVM_CONSTANT_Double:
1334
case JVM_CONSTANT_Long:
1335
// double and long take two constant pool entries
1336
increment = 2;
1337
break;
1338
1339
default:
1340
increment = 1;
1341
break;
1342
}
1343
1344
bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p,
1345
scratch_i, CHECK_0);
1346
if (match) {
1347
// found a match at the same index so nothing more to do
1348
continue;
1349
} else if (is_unresolved_class_mismatch(scratch_cp, scratch_i,
1350
*merge_cp_p, scratch_i)) {
1351
// The mismatch in compare_entry_to() above is because of a
1352
// resolved versus unresolved class entry at the same index
1353
// with the same string value. Since Pass 0 reverted any
1354
// class entries to unresolved class entries in *merge_cp_p,
1355
// we go with the unresolved class entry.
1356
continue;
1357
}
1358
1359
int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p,
1360
CHECK_0);
1361
if (found_i != 0) {
1362
guarantee(found_i != scratch_i,
1363
"compare_entry_to() and find_matching_entry() do not agree");
1364
1365
// Found a matching entry somewhere else in *merge_cp_p so
1366
// just need a mapping entry.
1367
map_index(scratch_cp, scratch_i, found_i);
1368
continue;
1369
}
1370
1371
// The find_matching_entry() call above could fail to find a match
1372
// due to a resolved versus unresolved class or string entry situation
1373
// like we solved above with the is_unresolved_*_mismatch() calls.
1374
// However, we would have to call is_unresolved_*_mismatch() over
1375
// all of *merge_cp_p (potentially) and that doesn't seem to be
1376
// worth the time.
1377
1378
// No match found so we have to append this entry and any unique
1379
// referenced entries to *merge_cp_p.
1380
append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
1381
CHECK_0);
1382
}
1383
}
1384
1385
RC_TRACE_WITH_THREAD(0x00020000, THREAD,
1386
("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1387
*merge_cp_length_p, scratch_i, _index_map_count));
1388
1389
if (scratch_i < scratch_cp->length()) {
1390
// Pass 1b:
1391
// old_cp is smaller than scratch_cp so there are entries in
1392
// scratch_cp that we have not yet processed. We take care of
1393
// those now.
1394
int increment = 1;
1395
for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
1396
switch (scratch_cp->tag_at(scratch_i).value()) {
1397
case JVM_CONSTANT_Double:
1398
case JVM_CONSTANT_Long:
1399
// double and long take two constant pool entries
1400
increment = 2;
1401
break;
1402
1403
default:
1404
increment = 1;
1405
break;
1406
}
1407
1408
int found_i =
1409
scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, CHECK_0);
1410
if (found_i != 0) {
1411
// Found a matching entry somewhere else in *merge_cp_p so
1412
// just need a mapping entry.
1413
map_index(scratch_cp, scratch_i, found_i);
1414
continue;
1415
}
1416
1417
// No match found so we have to append this entry and any unique
1418
// referenced entries to *merge_cp_p.
1419
append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
1420
CHECK_0);
1421
}
1422
1423
RC_TRACE_WITH_THREAD(0x00020000, THREAD,
1424
("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1425
*merge_cp_length_p, scratch_i, _index_map_count));
1426
}
1427
finalize_operands_merge(*merge_cp_p, THREAD);
1428
1429
return true;
1430
} // end merge_constant_pools()
1431
1432
1433
// Scoped object to clean up the constant pool(s) created for merging
1434
class MergeCPCleaner {
1435
ClassLoaderData* _loader_data;
1436
ConstantPool* _cp;
1437
ConstantPool* _scratch_cp;
1438
public:
1439
MergeCPCleaner(ClassLoaderData* loader_data, ConstantPool* merge_cp) :
1440
_loader_data(loader_data), _cp(merge_cp), _scratch_cp(NULL) {}
1441
~MergeCPCleaner() {
1442
_loader_data->add_to_deallocate_list(_cp);
1443
if (_scratch_cp != NULL) {
1444
_loader_data->add_to_deallocate_list(_scratch_cp);
1445
}
1446
}
1447
void add_scratch_cp(ConstantPool* scratch_cp) { _scratch_cp = scratch_cp; }
1448
};
1449
1450
// Merge constant pools between the_class and scratch_class and
1451
// potentially rewrite bytecodes in scratch_class to use the merged
1452
// constant pool.
1453
jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
1454
instanceKlassHandle the_class, instanceKlassHandle scratch_class,
1455
TRAPS) {
1456
// worst case merged constant pool length is old and new combined
1457
int merge_cp_length = the_class->constants()->length()
1458
+ scratch_class->constants()->length();
1459
1460
// Constant pools are not easily reused so we allocate a new one
1461
// each time.
1462
// merge_cp is created unsafe for concurrent GC processing. It
1463
// should be marked safe before discarding it. Even though
1464
// garbage, if it crosses a card boundary, it may be scanned
1465
// in order to find the start of the first complete object on the card.
1466
ClassLoaderData* loader_data = the_class->class_loader_data();
1467
ConstantPool* merge_cp_oop =
1468
ConstantPool::allocate(loader_data,
1469
merge_cp_length,
1470
CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1471
MergeCPCleaner cp_cleaner(loader_data, merge_cp_oop);
1472
1473
HandleMark hm(THREAD); // make sure handles are cleared before
1474
// MergeCPCleaner clears out merge_cp_oop
1475
constantPoolHandle merge_cp(THREAD, merge_cp_oop);
1476
1477
// Get constants() from the old class because it could have been rewritten
1478
// while we were at a safepoint allocating a new constant pool.
1479
constantPoolHandle old_cp(THREAD, the_class->constants());
1480
constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
1481
1482
// If the length changed, the class was redefined out from under us. Return
1483
// an error.
1484
if (merge_cp_length != the_class->constants()->length()
1485
+ scratch_class->constants()->length()) {
1486
return JVMTI_ERROR_INTERNAL;
1487
}
1488
1489
// Update the version number of the constant pool
1490
merge_cp->increment_and_save_version(old_cp->version());
1491
1492
ResourceMark rm(THREAD);
1493
_index_map_count = 0;
1494
_index_map_p = new intArray(scratch_cp->length(), -1);
1495
1496
_operands_cur_length = ConstantPool::operand_array_length(old_cp->operands());
1497
_operands_index_map_count = 0;
1498
_operands_index_map_p = new intArray(
1499
ConstantPool::operand_array_length(scratch_cp->operands()), -1);
1500
1501
// reference to the cp holder is needed for copy_operands()
1502
merge_cp->set_pool_holder(scratch_class());
1503
bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
1504
&merge_cp_length, THREAD);
1505
merge_cp->set_pool_holder(NULL);
1506
1507
if (!result) {
1508
// The merge can fail due to memory allocation failure or due
1509
// to robustness checks.
1510
return JVMTI_ERROR_INTERNAL;
1511
}
1512
1513
RC_TRACE_WITH_THREAD(0x00010000, THREAD,
1514
("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count));
1515
1516
if (_index_map_count == 0) {
1517
// there is nothing to map between the new and merged constant pools
1518
1519
if (old_cp->length() == scratch_cp->length()) {
1520
// The old and new constant pools are the same length and the
1521
// index map is empty. This means that the three constant pools
1522
// are equivalent (but not the same). Unfortunately, the new
1523
// constant pool has not gone through link resolution nor have
1524
// the new class bytecodes gone through constant pool cache
1525
// rewriting so we can't use the old constant pool with the new
1526
// class.
1527
1528
// toss the merged constant pool at return
1529
} else if (old_cp->length() < scratch_cp->length()) {
1530
// The old constant pool has fewer entries than the new constant
1531
// pool and the index map is empty. This means the new constant
1532
// pool is a superset of the old constant pool. However, the old
1533
// class bytecodes have already gone through constant pool cache
1534
// rewriting so we can't use the new constant pool with the old
1535
// class.
1536
1537
// toss the merged constant pool at return
1538
} else {
1539
// The old constant pool has more entries than the new constant
1540
// pool and the index map is empty. This means that both the old
1541
// and merged constant pools are supersets of the new constant
1542
// pool.
1543
1544
// Replace the new constant pool with a shrunken copy of the
1545
// merged constant pool
1546
set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
1547
CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1548
// The new constant pool replaces scratch_cp so have cleaner clean it up.
1549
// It can't be cleaned up while there are handles to it.
1550
cp_cleaner.add_scratch_cp(scratch_cp());
1551
}
1552
} else {
1553
if (RC_TRACE_ENABLED(0x00040000)) {
1554
// don't want to loop unless we are tracing
1555
int count = 0;
1556
for (int i = 1; i < _index_map_p->length(); i++) {
1557
int value = _index_map_p->at(i);
1558
1559
if (value != -1) {
1560
RC_TRACE_WITH_THREAD(0x00040000, THREAD,
1561
("index_map[%d]: old=%d new=%d", count, i, value));
1562
count++;
1563
}
1564
}
1565
}
1566
1567
// We have entries mapped between the new and merged constant pools
1568
// so we have to rewrite some constant pool references.
1569
if (!rewrite_cp_refs(scratch_class, THREAD)) {
1570
return JVMTI_ERROR_INTERNAL;
1571
}
1572
1573
// Replace the new constant pool with a shrunken copy of the
1574
// merged constant pool so now the rewritten bytecodes have
1575
// valid references; the previous new constant pool will get
1576
// GCed.
1577
set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
1578
CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1579
// The new constant pool replaces scratch_cp so have cleaner clean it up.
1580
// It can't be cleaned up while there are handles to it.
1581
cp_cleaner.add_scratch_cp(scratch_cp());
1582
}
1583
1584
return JVMTI_ERROR_NONE;
1585
} // end merge_cp_and_rewrite()
1586
1587
1588
// Rewrite constant pool references in klass scratch_class.
1589
bool VM_RedefineClasses::rewrite_cp_refs(instanceKlassHandle scratch_class,
1590
TRAPS) {
1591
1592
// rewrite constant pool references in the methods:
1593
if (!rewrite_cp_refs_in_methods(scratch_class, THREAD)) {
1594
// propagate failure back to caller
1595
return false;
1596
}
1597
1598
// rewrite constant pool references in the class_annotations:
1599
if (!rewrite_cp_refs_in_class_annotations(scratch_class, THREAD)) {
1600
// propagate failure back to caller
1601
return false;
1602
}
1603
1604
// rewrite constant pool references in the fields_annotations:
1605
if (!rewrite_cp_refs_in_fields_annotations(scratch_class, THREAD)) {
1606
// propagate failure back to caller
1607
return false;
1608
}
1609
1610
// rewrite constant pool references in the methods_annotations:
1611
if (!rewrite_cp_refs_in_methods_annotations(scratch_class, THREAD)) {
1612
// propagate failure back to caller
1613
return false;
1614
}
1615
1616
// rewrite constant pool references in the methods_parameter_annotations:
1617
if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class,
1618
THREAD)) {
1619
// propagate failure back to caller
1620
return false;
1621
}
1622
1623
// rewrite constant pool references in the methods_default_annotations:
1624
if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class,
1625
THREAD)) {
1626
// propagate failure back to caller
1627
return false;
1628
}
1629
1630
// rewrite constant pool references in the class_type_annotations:
1631
if (!rewrite_cp_refs_in_class_type_annotations(scratch_class, THREAD)) {
1632
// propagate failure back to caller
1633
return false;
1634
}
1635
1636
// rewrite constant pool references in the fields_type_annotations:
1637
if (!rewrite_cp_refs_in_fields_type_annotations(scratch_class, THREAD)) {
1638
// propagate failure back to caller
1639
return false;
1640
}
1641
1642
// rewrite constant pool references in the methods_type_annotations:
1643
if (!rewrite_cp_refs_in_methods_type_annotations(scratch_class, THREAD)) {
1644
// propagate failure back to caller
1645
return false;
1646
}
1647
1648
// There can be type annotations in the Code part of a method_info attribute.
1649
// These annotations are not accessible, even by reflection.
1650
// Currently they are not even parsed by the ClassFileParser.
1651
// If runtime access is added they will also need to be rewritten.
1652
1653
// rewrite source file name index:
1654
u2 source_file_name_idx = scratch_class->source_file_name_index();
1655
if (source_file_name_idx != 0) {
1656
u2 new_source_file_name_idx = find_new_index(source_file_name_idx);
1657
if (new_source_file_name_idx != 0) {
1658
scratch_class->set_source_file_name_index(new_source_file_name_idx);
1659
}
1660
}
1661
1662
// rewrite class generic signature index:
1663
u2 generic_signature_index = scratch_class->generic_signature_index();
1664
if (generic_signature_index != 0) {
1665
u2 new_generic_signature_index = find_new_index(generic_signature_index);
1666
if (new_generic_signature_index != 0) {
1667
scratch_class->set_generic_signature_index(new_generic_signature_index);
1668
}
1669
}
1670
1671
return true;
1672
} // end rewrite_cp_refs()
1673
1674
// Rewrite constant pool references in the methods.
1675
bool VM_RedefineClasses::rewrite_cp_refs_in_methods(
1676
instanceKlassHandle scratch_class, TRAPS) {
1677
1678
Array<Method*>* methods = scratch_class->methods();
1679
1680
if (methods == NULL || methods->length() == 0) {
1681
// no methods so nothing to do
1682
return true;
1683
}
1684
1685
// rewrite constant pool references in the methods:
1686
for (int i = methods->length() - 1; i >= 0; i--) {
1687
methodHandle method(THREAD, methods->at(i));
1688
methodHandle new_method;
1689
rewrite_cp_refs_in_method(method, &new_method, THREAD);
1690
if (!new_method.is_null()) {
1691
// the method has been replaced so save the new method version
1692
// even in the case of an exception. original method is on the
1693
// deallocation list.
1694
methods->at_put(i, new_method());
1695
}
1696
if (HAS_PENDING_EXCEPTION) {
1697
Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1698
// RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1699
RC_TRACE_WITH_THREAD(0x00000002, THREAD,
1700
("rewrite_cp_refs_in_method exception: '%s'", ex_name->as_C_string()));
1701
// Need to clear pending exception here as the super caller sets
1702
// the JVMTI_ERROR_INTERNAL if the returned value is false.
1703
CLEAR_PENDING_EXCEPTION;
1704
return false;
1705
}
1706
}
1707
1708
return true;
1709
}
1710
1711
1712
// Rewrite constant pool references in the specific method. This code
1713
// was adapted from Rewriter::rewrite_method().
1714
void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
1715
methodHandle *new_method_p, TRAPS) {
1716
1717
*new_method_p = methodHandle(); // default is no new method
1718
1719
// We cache a pointer to the bytecodes here in code_base. If GC
1720
// moves the Method*, then the bytecodes will also move which
1721
// will likely cause a crash. We create a No_Safepoint_Verifier
1722
// object to detect whether we pass a possible safepoint in this
1723
// code block.
1724
No_Safepoint_Verifier nsv;
1725
1726
// Bytecodes and their length
1727
address code_base = method->code_base();
1728
int code_length = method->code_size();
1729
1730
int bc_length;
1731
for (int bci = 0; bci < code_length; bci += bc_length) {
1732
address bcp = code_base + bci;
1733
Bytecodes::Code c = (Bytecodes::Code)(*bcp);
1734
1735
bc_length = Bytecodes::length_for(c);
1736
if (bc_length == 0) {
1737
// More complicated bytecodes report a length of zero so
1738
// we have to try again a slightly different way.
1739
bc_length = Bytecodes::length_at(method(), bcp);
1740
}
1741
1742
assert(bc_length != 0, "impossible bytecode length");
1743
1744
switch (c) {
1745
case Bytecodes::_ldc:
1746
{
1747
int cp_index = *(bcp + 1);
1748
int new_index = find_new_index(cp_index);
1749
1750
if (StressLdcRewrite && new_index == 0) {
1751
// If we are stressing ldc -> ldc_w rewriting, then we
1752
// always need a new_index value.
1753
new_index = cp_index;
1754
}
1755
if (new_index != 0) {
1756
// the original index is mapped so we have more work to do
1757
if (!StressLdcRewrite && new_index <= max_jubyte) {
1758
// The new value can still use ldc instead of ldc_w
1759
// unless we are trying to stress ldc -> ldc_w rewriting
1760
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
1761
("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
1762
bcp, cp_index, new_index));
1763
*(bcp + 1) = new_index;
1764
} else {
1765
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
1766
("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d",
1767
Bytecodes::name(c), bcp, cp_index, new_index));
1768
// the new value needs ldc_w instead of ldc
1769
u_char inst_buffer[4]; // max instruction size is 4 bytes
1770
bcp = (address)inst_buffer;
1771
// construct new instruction sequence
1772
*bcp = Bytecodes::_ldc_w;
1773
bcp++;
1774
// Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
1775
// See comment below for difference between put_Java_u2()
1776
// and put_native_u2().
1777
Bytes::put_Java_u2(bcp, new_index);
1778
1779
Relocator rc(method, NULL /* no RelocatorListener needed */);
1780
methodHandle m;
1781
{
1782
Pause_No_Safepoint_Verifier pnsv(&nsv);
1783
1784
// ldc is 2 bytes and ldc_w is 3 bytes
1785
m = rc.insert_space_at(bci, 3, inst_buffer, CHECK);
1786
}
1787
1788
// return the new method so that the caller can update
1789
// the containing class
1790
*new_method_p = method = m;
1791
// switch our bytecode processing loop from the old method
1792
// to the new method
1793
code_base = method->code_base();
1794
code_length = method->code_size();
1795
bcp = code_base + bci;
1796
c = (Bytecodes::Code)(*bcp);
1797
bc_length = Bytecodes::length_for(c);
1798
assert(bc_length != 0, "sanity check");
1799
} // end we need ldc_w instead of ldc
1800
} // end if there is a mapped index
1801
} break;
1802
1803
// these bytecodes have a two-byte constant pool index
1804
case Bytecodes::_anewarray : // fall through
1805
case Bytecodes::_checkcast : // fall through
1806
case Bytecodes::_getfield : // fall through
1807
case Bytecodes::_getstatic : // fall through
1808
case Bytecodes::_instanceof : // fall through
1809
case Bytecodes::_invokedynamic : // fall through
1810
case Bytecodes::_invokeinterface: // fall through
1811
case Bytecodes::_invokespecial : // fall through
1812
case Bytecodes::_invokestatic : // fall through
1813
case Bytecodes::_invokevirtual : // fall through
1814
case Bytecodes::_ldc_w : // fall through
1815
case Bytecodes::_ldc2_w : // fall through
1816
case Bytecodes::_multianewarray : // fall through
1817
case Bytecodes::_new : // fall through
1818
case Bytecodes::_putfield : // fall through
1819
case Bytecodes::_putstatic :
1820
{
1821
address p = bcp + 1;
1822
int cp_index = Bytes::get_Java_u2(p);
1823
int new_index = find_new_index(cp_index);
1824
if (new_index != 0) {
1825
// the original index is mapped so update w/ new value
1826
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
1827
("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
1828
bcp, cp_index, new_index));
1829
// Rewriter::rewrite_method() uses put_native_u2() in this
1830
// situation because it is reusing the constant pool index
1831
// location for a native index into the ConstantPoolCache.
1832
// Since we are updating the constant pool index prior to
1833
// verification and ConstantPoolCache initialization, we
1834
// need to keep the new index in Java byte order.
1835
Bytes::put_Java_u2(p, new_index);
1836
}
1837
} break;
1838
}
1839
} // end for each bytecode
1840
1841
// We also need to rewrite the parameter name indexes, if there is
1842
// method parameter data present
1843
if(method->has_method_parameters()) {
1844
const int len = method->method_parameters_length();
1845
MethodParametersElement* elem = method->method_parameters_start();
1846
1847
for (int i = 0; i < len; i++) {
1848
const u2 cp_index = elem[i].name_cp_index;
1849
const u2 new_cp_index = find_new_index(cp_index);
1850
if (new_cp_index != 0) {
1851
elem[i].name_cp_index = new_cp_index;
1852
}
1853
}
1854
}
1855
} // end rewrite_cp_refs_in_method()
1856
1857
1858
// Rewrite constant pool references in the class_annotations field.
1859
bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(
1860
instanceKlassHandle scratch_class, TRAPS) {
1861
1862
AnnotationArray* class_annotations = scratch_class->class_annotations();
1863
if (class_annotations == NULL || class_annotations->length() == 0) {
1864
// no class_annotations so nothing to do
1865
return true;
1866
}
1867
1868
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1869
("class_annotations length=%d", class_annotations->length()));
1870
1871
int byte_i = 0; // byte index into class_annotations
1872
return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i,
1873
THREAD);
1874
}
1875
1876
1877
// Rewrite constant pool references in an annotations typeArray. This
1878
// "structure" is adapted from the RuntimeVisibleAnnotations_attribute
1879
// that is described in section 4.8.15 of the 2nd-edition of the VM spec:
1880
//
1881
// annotations_typeArray {
1882
// u2 num_annotations;
1883
// annotation annotations[num_annotations];
1884
// }
1885
//
1886
bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
1887
AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
1888
1889
if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1890
// not enough room for num_annotations field
1891
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1892
("length() is too small for num_annotations field"));
1893
return false;
1894
}
1895
1896
u2 num_annotations = Bytes::get_Java_u2((address)
1897
annotations_typeArray->adr_at(byte_i_ref));
1898
byte_i_ref += 2;
1899
1900
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1901
("num_annotations=%d", num_annotations));
1902
1903
int calc_num_annotations = 0;
1904
for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
1905
if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
1906
byte_i_ref, THREAD)) {
1907
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1908
("bad annotation_struct at %d", calc_num_annotations));
1909
// propagate failure back to caller
1910
return false;
1911
}
1912
}
1913
assert(num_annotations == calc_num_annotations, "sanity check");
1914
1915
return true;
1916
} // end rewrite_cp_refs_in_annotations_typeArray()
1917
1918
1919
// Rewrite constant pool references in the annotation struct portion of
1920
// an annotations_typeArray. This "structure" is from section 4.8.15 of
1921
// the 2nd-edition of the VM spec:
1922
//
1923
// struct annotation {
1924
// u2 type_index;
1925
// u2 num_element_value_pairs;
1926
// {
1927
// u2 element_name_index;
1928
// element_value value;
1929
// } element_value_pairs[num_element_value_pairs];
1930
// }
1931
//
1932
bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
1933
AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
1934
if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
1935
// not enough room for smallest annotation_struct
1936
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1937
("length() is too small for annotation_struct"));
1938
return false;
1939
}
1940
1941
u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
1942
byte_i_ref, "mapped old type_index=%d", THREAD);
1943
1944
u2 num_element_value_pairs = Bytes::get_Java_u2((address)
1945
annotations_typeArray->adr_at(byte_i_ref));
1946
byte_i_ref += 2;
1947
1948
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1949
("type_index=%d num_element_value_pairs=%d", type_index,
1950
num_element_value_pairs));
1951
1952
int calc_num_element_value_pairs = 0;
1953
for (; calc_num_element_value_pairs < num_element_value_pairs;
1954
calc_num_element_value_pairs++) {
1955
if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1956
// not enough room for another element_name_index, let alone
1957
// the rest of another component
1958
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1959
("length() is too small for element_name_index"));
1960
return false;
1961
}
1962
1963
u2 element_name_index = rewrite_cp_ref_in_annotation_data(
1964
annotations_typeArray, byte_i_ref,
1965
"mapped old element_name_index=%d", THREAD);
1966
1967
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1968
("element_name_index=%d", element_name_index));
1969
1970
if (!rewrite_cp_refs_in_element_value(annotations_typeArray,
1971
byte_i_ref, THREAD)) {
1972
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1973
("bad element_value at %d", calc_num_element_value_pairs));
1974
// propagate failure back to caller
1975
return false;
1976
}
1977
} // end for each component
1978
assert(num_element_value_pairs == calc_num_element_value_pairs,
1979
"sanity check");
1980
1981
return true;
1982
} // end rewrite_cp_refs_in_annotation_struct()
1983
1984
1985
// Rewrite a constant pool reference at the current position in
1986
// annotations_typeArray if needed. Returns the original constant
1987
// pool reference if a rewrite was not needed or the new constant
1988
// pool reference if a rewrite was needed.
1989
PRAGMA_DIAG_PUSH
1990
PRAGMA_FORMAT_NONLITERAL_IGNORED
1991
u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
1992
AnnotationArray* annotations_typeArray, int &byte_i_ref,
1993
const char * trace_mesg, TRAPS) {
1994
1995
address cp_index_addr = (address)
1996
annotations_typeArray->adr_at(byte_i_ref);
1997
u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
1998
u2 new_cp_index = find_new_index(old_cp_index);
1999
if (new_cp_index != 0) {
2000
RC_TRACE_WITH_THREAD(0x02000000, THREAD, (trace_mesg, old_cp_index));
2001
Bytes::put_Java_u2(cp_index_addr, new_cp_index);
2002
old_cp_index = new_cp_index;
2003
}
2004
byte_i_ref += 2;
2005
return old_cp_index;
2006
}
2007
PRAGMA_DIAG_POP
2008
2009
2010
// Rewrite constant pool references in the element_value portion of an
2011
// annotations_typeArray. This "structure" is from section 4.8.15.1 of
2012
// the 2nd-edition of the VM spec:
2013
//
2014
// struct element_value {
2015
// u1 tag;
2016
// union {
2017
// u2 const_value_index;
2018
// {
2019
// u2 type_name_index;
2020
// u2 const_name_index;
2021
// } enum_const_value;
2022
// u2 class_info_index;
2023
// annotation annotation_value;
2024
// struct {
2025
// u2 num_values;
2026
// element_value values[num_values];
2027
// } array_value;
2028
// } value;
2029
// }
2030
//
2031
bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
2032
AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
2033
2034
if ((byte_i_ref + 1) > annotations_typeArray->length()) {
2035
// not enough room for a tag let alone the rest of an element_value
2036
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2037
("length() is too small for a tag"));
2038
return false;
2039
}
2040
2041
u1 tag = annotations_typeArray->at(byte_i_ref);
2042
byte_i_ref++;
2043
RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("tag='%c'", tag));
2044
2045
switch (tag) {
2046
// These BaseType tag values are from Table 4.2 in VM spec:
2047
case 'B': // byte
2048
case 'C': // char
2049
case 'D': // double
2050
case 'F': // float
2051
case 'I': // int
2052
case 'J': // long
2053
case 'S': // short
2054
case 'Z': // boolean
2055
2056
// The remaining tag values are from Table 4.8 in the 2nd-edition of
2057
// the VM spec:
2058
case 's':
2059
{
2060
// For the above tag values (including the BaseType values),
2061
// value.const_value_index is right union field.
2062
2063
if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2064
// not enough room for a const_value_index
2065
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2066
("length() is too small for a const_value_index"));
2067
return false;
2068
}
2069
2070
u2 const_value_index = rewrite_cp_ref_in_annotation_data(
2071
annotations_typeArray, byte_i_ref,
2072
"mapped old const_value_index=%d", THREAD);
2073
2074
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2075
("const_value_index=%d", const_value_index));
2076
} break;
2077
2078
case 'e':
2079
{
2080
// for the above tag value, value.enum_const_value is right union field
2081
2082
if ((byte_i_ref + 4) > annotations_typeArray->length()) {
2083
// not enough room for a enum_const_value
2084
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2085
("length() is too small for a enum_const_value"));
2086
return false;
2087
}
2088
2089
u2 type_name_index = rewrite_cp_ref_in_annotation_data(
2090
annotations_typeArray, byte_i_ref,
2091
"mapped old type_name_index=%d", THREAD);
2092
2093
u2 const_name_index = rewrite_cp_ref_in_annotation_data(
2094
annotations_typeArray, byte_i_ref,
2095
"mapped old const_name_index=%d", THREAD);
2096
2097
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2098
("type_name_index=%d const_name_index=%d", type_name_index,
2099
const_name_index));
2100
} break;
2101
2102
case 'c':
2103
{
2104
// for the above tag value, value.class_info_index is right union field
2105
2106
if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2107
// not enough room for a class_info_index
2108
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2109
("length() is too small for a class_info_index"));
2110
return false;
2111
}
2112
2113
u2 class_info_index = rewrite_cp_ref_in_annotation_data(
2114
annotations_typeArray, byte_i_ref,
2115
"mapped old class_info_index=%d", THREAD);
2116
2117
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2118
("class_info_index=%d", class_info_index));
2119
} break;
2120
2121
case '@':
2122
// For the above tag value, value.attr_value is the right union
2123
// field. This is a nested annotation.
2124
if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
2125
byte_i_ref, THREAD)) {
2126
// propagate failure back to caller
2127
return false;
2128
}
2129
break;
2130
2131
case '[':
2132
{
2133
if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2134
// not enough room for a num_values field
2135
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2136
("length() is too small for a num_values field"));
2137
return false;
2138
}
2139
2140
// For the above tag value, value.array_value is the right union
2141
// field. This is an array of nested element_value.
2142
u2 num_values = Bytes::get_Java_u2((address)
2143
annotations_typeArray->adr_at(byte_i_ref));
2144
byte_i_ref += 2;
2145
RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("num_values=%d", num_values));
2146
2147
int calc_num_values = 0;
2148
for (; calc_num_values < num_values; calc_num_values++) {
2149
if (!rewrite_cp_refs_in_element_value(
2150
annotations_typeArray, byte_i_ref, THREAD)) {
2151
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2152
("bad nested element_value at %d", calc_num_values));
2153
// propagate failure back to caller
2154
return false;
2155
}
2156
}
2157
assert(num_values == calc_num_values, "sanity check");
2158
} break;
2159
2160
default:
2161
RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("bad tag=0x%x", tag));
2162
return false;
2163
} // end decode tag field
2164
2165
return true;
2166
} // end rewrite_cp_refs_in_element_value()
2167
2168
2169
// Rewrite constant pool references in a fields_annotations field.
2170
bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
2171
instanceKlassHandle scratch_class, TRAPS) {
2172
2173
Array<AnnotationArray*>* fields_annotations = scratch_class->fields_annotations();
2174
2175
if (fields_annotations == NULL || fields_annotations->length() == 0) {
2176
// no fields_annotations so nothing to do
2177
return true;
2178
}
2179
2180
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2181
("fields_annotations length=%d", fields_annotations->length()));
2182
2183
for (int i = 0; i < fields_annotations->length(); i++) {
2184
AnnotationArray* field_annotations = fields_annotations->at(i);
2185
if (field_annotations == NULL || field_annotations->length() == 0) {
2186
// this field does not have any annotations so skip it
2187
continue;
2188
}
2189
2190
int byte_i = 0; // byte index into field_annotations
2191
if (!rewrite_cp_refs_in_annotations_typeArray(field_annotations, byte_i,
2192
THREAD)) {
2193
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2194
("bad field_annotations at %d", i));
2195
// propagate failure back to caller
2196
return false;
2197
}
2198
}
2199
2200
return true;
2201
} // end rewrite_cp_refs_in_fields_annotations()
2202
2203
2204
// Rewrite constant pool references in a methods_annotations field.
2205
bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
2206
instanceKlassHandle scratch_class, TRAPS) {
2207
2208
for (int i = 0; i < scratch_class->methods()->length(); i++) {
2209
Method* m = scratch_class->methods()->at(i);
2210
AnnotationArray* method_annotations = m->constMethod()->method_annotations();
2211
2212
if (method_annotations == NULL || method_annotations->length() == 0) {
2213
// this method does not have any annotations so skip it
2214
continue;
2215
}
2216
2217
int byte_i = 0; // byte index into method_annotations
2218
if (!rewrite_cp_refs_in_annotations_typeArray(method_annotations, byte_i,
2219
THREAD)) {
2220
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2221
("bad method_annotations at %d", i));
2222
// propagate failure back to caller
2223
return false;
2224
}
2225
}
2226
2227
return true;
2228
} // end rewrite_cp_refs_in_methods_annotations()
2229
2230
2231
// Rewrite constant pool references in a methods_parameter_annotations
2232
// field. This "structure" is adapted from the
2233
// RuntimeVisibleParameterAnnotations_attribute described in section
2234
// 4.8.17 of the 2nd-edition of the VM spec:
2235
//
2236
// methods_parameter_annotations_typeArray {
2237
// u1 num_parameters;
2238
// {
2239
// u2 num_annotations;
2240
// annotation annotations[num_annotations];
2241
// } parameter_annotations[num_parameters];
2242
// }
2243
//
2244
bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations(
2245
instanceKlassHandle scratch_class, TRAPS) {
2246
2247
for (int i = 0; i < scratch_class->methods()->length(); i++) {
2248
Method* m = scratch_class->methods()->at(i);
2249
AnnotationArray* method_parameter_annotations = m->constMethod()->parameter_annotations();
2250
if (method_parameter_annotations == NULL
2251
|| method_parameter_annotations->length() == 0) {
2252
// this method does not have any parameter annotations so skip it
2253
continue;
2254
}
2255
2256
if (method_parameter_annotations->length() < 1) {
2257
// not enough room for a num_parameters field
2258
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2259
("length() is too small for a num_parameters field at %d", i));
2260
return false;
2261
}
2262
2263
int byte_i = 0; // byte index into method_parameter_annotations
2264
2265
u1 num_parameters = method_parameter_annotations->at(byte_i);
2266
byte_i++;
2267
2268
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2269
("num_parameters=%d", num_parameters));
2270
2271
int calc_num_parameters = 0;
2272
for (; calc_num_parameters < num_parameters; calc_num_parameters++) {
2273
if (!rewrite_cp_refs_in_annotations_typeArray(
2274
method_parameter_annotations, byte_i, THREAD)) {
2275
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2276
("bad method_parameter_annotations at %d", calc_num_parameters));
2277
// propagate failure back to caller
2278
return false;
2279
}
2280
}
2281
assert(num_parameters == calc_num_parameters, "sanity check");
2282
}
2283
2284
return true;
2285
} // end rewrite_cp_refs_in_methods_parameter_annotations()
2286
2287
2288
// Rewrite constant pool references in a methods_default_annotations
2289
// field. This "structure" is adapted from the AnnotationDefault_attribute
2290
// that is described in section 4.8.19 of the 2nd-edition of the VM spec:
2291
//
2292
// methods_default_annotations_typeArray {
2293
// element_value default_value;
2294
// }
2295
//
2296
bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations(
2297
instanceKlassHandle scratch_class, TRAPS) {
2298
2299
for (int i = 0; i < scratch_class->methods()->length(); i++) {
2300
Method* m = scratch_class->methods()->at(i);
2301
AnnotationArray* method_default_annotations = m->constMethod()->default_annotations();
2302
if (method_default_annotations == NULL
2303
|| method_default_annotations->length() == 0) {
2304
// this method does not have any default annotations so skip it
2305
continue;
2306
}
2307
2308
int byte_i = 0; // byte index into method_default_annotations
2309
2310
if (!rewrite_cp_refs_in_element_value(
2311
method_default_annotations, byte_i, THREAD)) {
2312
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2313
("bad default element_value at %d", i));
2314
// propagate failure back to caller
2315
return false;
2316
}
2317
}
2318
2319
return true;
2320
} // end rewrite_cp_refs_in_methods_default_annotations()
2321
2322
2323
// Rewrite constant pool references in a class_type_annotations field.
2324
bool VM_RedefineClasses::rewrite_cp_refs_in_class_type_annotations(
2325
instanceKlassHandle scratch_class, TRAPS) {
2326
2327
AnnotationArray* class_type_annotations = scratch_class->class_type_annotations();
2328
if (class_type_annotations == NULL || class_type_annotations->length() == 0) {
2329
// no class_type_annotations so nothing to do
2330
return true;
2331
}
2332
2333
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2334
("class_type_annotations length=%d", class_type_annotations->length()));
2335
2336
int byte_i = 0; // byte index into class_type_annotations
2337
return rewrite_cp_refs_in_type_annotations_typeArray(class_type_annotations,
2338
byte_i, "ClassFile", THREAD);
2339
} // end rewrite_cp_refs_in_class_type_annotations()
2340
2341
2342
// Rewrite constant pool references in a fields_type_annotations field.
2343
bool VM_RedefineClasses::rewrite_cp_refs_in_fields_type_annotations(
2344
instanceKlassHandle scratch_class, TRAPS) {
2345
2346
Array<AnnotationArray*>* fields_type_annotations = scratch_class->fields_type_annotations();
2347
if (fields_type_annotations == NULL || fields_type_annotations->length() == 0) {
2348
// no fields_type_annotations so nothing to do
2349
return true;
2350
}
2351
2352
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2353
("fields_type_annotations length=%d", fields_type_annotations->length()));
2354
2355
for (int i = 0; i < fields_type_annotations->length(); i++) {
2356
AnnotationArray* field_type_annotations = fields_type_annotations->at(i);
2357
if (field_type_annotations == NULL || field_type_annotations->length() == 0) {
2358
// this field does not have any annotations so skip it
2359
continue;
2360
}
2361
2362
int byte_i = 0; // byte index into field_type_annotations
2363
if (!rewrite_cp_refs_in_type_annotations_typeArray(field_type_annotations,
2364
byte_i, "field_info", THREAD)) {
2365
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2366
("bad field_type_annotations at %d", i));
2367
// propagate failure back to caller
2368
return false;
2369
}
2370
}
2371
2372
return true;
2373
} // end rewrite_cp_refs_in_fields_type_annotations()
2374
2375
2376
// Rewrite constant pool references in a methods_type_annotations field.
2377
bool VM_RedefineClasses::rewrite_cp_refs_in_methods_type_annotations(
2378
instanceKlassHandle scratch_class, TRAPS) {
2379
2380
for (int i = 0; i < scratch_class->methods()->length(); i++) {
2381
Method* m = scratch_class->methods()->at(i);
2382
AnnotationArray* method_type_annotations = m->constMethod()->type_annotations();
2383
2384
if (method_type_annotations == NULL || method_type_annotations->length() == 0) {
2385
// this method does not have any annotations so skip it
2386
continue;
2387
}
2388
2389
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2390
("methods type_annotations length=%d", method_type_annotations->length()));
2391
2392
int byte_i = 0; // byte index into method_type_annotations
2393
if (!rewrite_cp_refs_in_type_annotations_typeArray(method_type_annotations,
2394
byte_i, "method_info", THREAD)) {
2395
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2396
("bad method_type_annotations at %d", i));
2397
// propagate failure back to caller
2398
return false;
2399
}
2400
}
2401
2402
return true;
2403
} // end rewrite_cp_refs_in_methods_type_annotations()
2404
2405
2406
// Rewrite constant pool references in a type_annotations
2407
// field. This "structure" is adapted from the
2408
// RuntimeVisibleTypeAnnotations_attribute described in
2409
// section 4.7.20 of the Java SE 8 Edition of the VM spec:
2410
//
2411
// type_annotations_typeArray {
2412
// u2 num_annotations;
2413
// type_annotation annotations[num_annotations];
2414
// }
2415
//
2416
bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotations_typeArray(
2417
AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2418
const char * location_mesg, TRAPS) {
2419
2420
if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2421
// not enough room for num_annotations field
2422
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2423
("length() is too small for num_annotations field"));
2424
return false;
2425
}
2426
2427
u2 num_annotations = Bytes::get_Java_u2((address)
2428
type_annotations_typeArray->adr_at(byte_i_ref));
2429
byte_i_ref += 2;
2430
2431
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2432
("num_type_annotations=%d", num_annotations));
2433
2434
int calc_num_annotations = 0;
2435
for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
2436
if (!rewrite_cp_refs_in_type_annotation_struct(type_annotations_typeArray,
2437
byte_i_ref, location_mesg, THREAD)) {
2438
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2439
("bad type_annotation_struct at %d", calc_num_annotations));
2440
// propagate failure back to caller
2441
return false;
2442
}
2443
}
2444
assert(num_annotations == calc_num_annotations, "sanity check");
2445
2446
if (byte_i_ref != type_annotations_typeArray->length()) {
2447
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2448
("read wrong amount of bytes at end of processing "
2449
"type_annotations_typeArray (%d of %d bytes were read)",
2450
byte_i_ref, type_annotations_typeArray->length()));
2451
return false;
2452
}
2453
2454
return true;
2455
} // end rewrite_cp_refs_in_type_annotations_typeArray()
2456
2457
2458
// Rewrite constant pool references in a type_annotation
2459
// field. This "structure" is adapted from the
2460
// RuntimeVisibleTypeAnnotations_attribute described in
2461
// section 4.7.20 of the Java SE 8 Edition of the VM spec:
2462
//
2463
// type_annotation {
2464
// u1 target_type;
2465
// union {
2466
// type_parameter_target;
2467
// supertype_target;
2468
// type_parameter_bound_target;
2469
// empty_target;
2470
// method_formal_parameter_target;
2471
// throws_target;
2472
// localvar_target;
2473
// catch_target;
2474
// offset_target;
2475
// type_argument_target;
2476
// } target_info;
2477
// type_path target_path;
2478
// annotation anno;
2479
// }
2480
//
2481
bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotation_struct(
2482
AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2483
const char * location_mesg, TRAPS) {
2484
2485
if (!skip_type_annotation_target(type_annotations_typeArray,
2486
byte_i_ref, location_mesg, THREAD)) {
2487
return false;
2488
}
2489
2490
if (!skip_type_annotation_type_path(type_annotations_typeArray,
2491
byte_i_ref, THREAD)) {
2492
return false;
2493
}
2494
2495
if (!rewrite_cp_refs_in_annotation_struct(type_annotations_typeArray,
2496
byte_i_ref, THREAD)) {
2497
return false;
2498
}
2499
2500
return true;
2501
} // end rewrite_cp_refs_in_type_annotation_struct()
2502
2503
2504
// Read, verify and skip over the target_type and target_info part
2505
// so that rewriting can continue in the later parts of the struct.
2506
//
2507
// u1 target_type;
2508
// union {
2509
// type_parameter_target;
2510
// supertype_target;
2511
// type_parameter_bound_target;
2512
// empty_target;
2513
// method_formal_parameter_target;
2514
// throws_target;
2515
// localvar_target;
2516
// catch_target;
2517
// offset_target;
2518
// type_argument_target;
2519
// } target_info;
2520
//
2521
bool VM_RedefineClasses::skip_type_annotation_target(
2522
AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2523
const char * location_mesg, TRAPS) {
2524
2525
if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2526
// not enough room for a target_type let alone the rest of a type_annotation
2527
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2528
("length() is too small for a target_type"));
2529
return false;
2530
}
2531
2532
u1 target_type = type_annotations_typeArray->at(byte_i_ref);
2533
byte_i_ref += 1;
2534
RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("target_type=0x%.2x", target_type));
2535
RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("location=%s", location_mesg));
2536
2537
// Skip over target_info
2538
switch (target_type) {
2539
case 0x00:
2540
// kind: type parameter declaration of generic class or interface
2541
// location: ClassFile
2542
case 0x01:
2543
// kind: type parameter declaration of generic method or constructor
2544
// location: method_info
2545
2546
{
2547
// struct:
2548
// type_parameter_target {
2549
// u1 type_parameter_index;
2550
// }
2551
//
2552
if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2553
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2554
("length() is too small for a type_parameter_target"));
2555
return false;
2556
}
2557
2558
u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2559
byte_i_ref += 1;
2560
2561
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2562
("type_parameter_target: type_parameter_index=%d",
2563
type_parameter_index));
2564
} break;
2565
2566
case 0x10:
2567
// kind: type in extends clause of class or interface declaration
2568
// (including the direct superclass of an anonymous class declaration),
2569
// or in implements clause of interface declaration
2570
// location: ClassFile
2571
2572
{
2573
// struct:
2574
// supertype_target {
2575
// u2 supertype_index;
2576
// }
2577
//
2578
if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2579
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2580
("length() is too small for a supertype_target"));
2581
return false;
2582
}
2583
2584
u2 supertype_index = Bytes::get_Java_u2((address)
2585
type_annotations_typeArray->adr_at(byte_i_ref));
2586
byte_i_ref += 2;
2587
2588
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2589
("supertype_target: supertype_index=%d", supertype_index));
2590
} break;
2591
2592
case 0x11:
2593
// kind: type in bound of type parameter declaration of generic class or interface
2594
// location: ClassFile
2595
case 0x12:
2596
// kind: type in bound of type parameter declaration of generic method or constructor
2597
// location: method_info
2598
2599
{
2600
// struct:
2601
// type_parameter_bound_target {
2602
// u1 type_parameter_index;
2603
// u1 bound_index;
2604
// }
2605
//
2606
if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2607
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2608
("length() is too small for a type_parameter_bound_target"));
2609
return false;
2610
}
2611
2612
u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2613
byte_i_ref += 1;
2614
u1 bound_index = type_annotations_typeArray->at(byte_i_ref);
2615
byte_i_ref += 1;
2616
2617
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2618
("type_parameter_bound_target: type_parameter_index=%d, bound_index=%d",
2619
type_parameter_index, bound_index));
2620
} break;
2621
2622
case 0x13:
2623
// kind: type in field declaration
2624
// location: field_info
2625
case 0x14:
2626
// kind: return type of method, or type of newly constructed object
2627
// location: method_info
2628
case 0x15:
2629
// kind: receiver type of method or constructor
2630
// location: method_info
2631
2632
{
2633
// struct:
2634
// empty_target {
2635
// }
2636
//
2637
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2638
("empty_target"));
2639
} break;
2640
2641
case 0x16:
2642
// kind: type in formal parameter declaration of method, constructor, or lambda expression
2643
// location: method_info
2644
2645
{
2646
// struct:
2647
// formal_parameter_target {
2648
// u1 formal_parameter_index;
2649
// }
2650
//
2651
if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2652
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2653
("length() is too small for a formal_parameter_target"));
2654
return false;
2655
}
2656
2657
u1 formal_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2658
byte_i_ref += 1;
2659
2660
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2661
("formal_parameter_target: formal_parameter_index=%d",
2662
formal_parameter_index));
2663
} break;
2664
2665
case 0x17:
2666
// kind: type in throws clause of method or constructor
2667
// location: method_info
2668
2669
{
2670
// struct:
2671
// throws_target {
2672
// u2 throws_type_index
2673
// }
2674
//
2675
if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2676
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2677
("length() is too small for a throws_target"));
2678
return false;
2679
}
2680
2681
u2 throws_type_index = Bytes::get_Java_u2((address)
2682
type_annotations_typeArray->adr_at(byte_i_ref));
2683
byte_i_ref += 2;
2684
2685
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2686
("throws_target: throws_type_index=%d", throws_type_index));
2687
} break;
2688
2689
case 0x40:
2690
// kind: type in local variable declaration
2691
// location: Code
2692
case 0x41:
2693
// kind: type in resource variable declaration
2694
// location: Code
2695
2696
{
2697
// struct:
2698
// localvar_target {
2699
// u2 table_length;
2700
// struct {
2701
// u2 start_pc;
2702
// u2 length;
2703
// u2 index;
2704
// } table[table_length];
2705
// }
2706
//
2707
if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2708
// not enough room for a table_length let alone the rest of a localvar_target
2709
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2710
("length() is too small for a localvar_target table_length"));
2711
return false;
2712
}
2713
2714
u2 table_length = Bytes::get_Java_u2((address)
2715
type_annotations_typeArray->adr_at(byte_i_ref));
2716
byte_i_ref += 2;
2717
2718
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2719
("localvar_target: table_length=%d", table_length));
2720
2721
int table_struct_size = 2 + 2 + 2; // 3 u2 variables per table entry
2722
int table_size = table_length * table_struct_size;
2723
2724
if ((byte_i_ref + table_size) > type_annotations_typeArray->length()) {
2725
// not enough room for a table
2726
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2727
("length() is too small for a table array of length %d", table_length));
2728
return false;
2729
}
2730
2731
// Skip over table
2732
byte_i_ref += table_size;
2733
} break;
2734
2735
case 0x42:
2736
// kind: type in exception parameter declaration
2737
// location: Code
2738
2739
{
2740
// struct:
2741
// catch_target {
2742
// u2 exception_table_index;
2743
// }
2744
//
2745
if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2746
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2747
("length() is too small for a catch_target"));
2748
return false;
2749
}
2750
2751
u2 exception_table_index = Bytes::get_Java_u2((address)
2752
type_annotations_typeArray->adr_at(byte_i_ref));
2753
byte_i_ref += 2;
2754
2755
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2756
("catch_target: exception_table_index=%d", exception_table_index));
2757
} break;
2758
2759
case 0x43:
2760
// kind: type in instanceof expression
2761
// location: Code
2762
case 0x44:
2763
// kind: type in new expression
2764
// location: Code
2765
case 0x45:
2766
// kind: type in method reference expression using ::new
2767
// location: Code
2768
case 0x46:
2769
// kind: type in method reference expression using ::Identifier
2770
// location: Code
2771
2772
{
2773
// struct:
2774
// offset_target {
2775
// u2 offset;
2776
// }
2777
//
2778
if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2779
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2780
("length() is too small for a offset_target"));
2781
return false;
2782
}
2783
2784
u2 offset = Bytes::get_Java_u2((address)
2785
type_annotations_typeArray->adr_at(byte_i_ref));
2786
byte_i_ref += 2;
2787
2788
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2789
("offset_target: offset=%d", offset));
2790
} break;
2791
2792
case 0x47:
2793
// kind: type in cast expression
2794
// location: Code
2795
case 0x48:
2796
// kind: type argument for generic constructor in new expression or
2797
// explicit constructor invocation statement
2798
// location: Code
2799
case 0x49:
2800
// kind: type argument for generic method in method invocation expression
2801
// location: Code
2802
case 0x4A:
2803
// kind: type argument for generic constructor in method reference expression using ::new
2804
// location: Code
2805
case 0x4B:
2806
// kind: type argument for generic method in method reference expression using ::Identifier
2807
// location: Code
2808
2809
{
2810
// struct:
2811
// type_argument_target {
2812
// u2 offset;
2813
// u1 type_argument_index;
2814
// }
2815
//
2816
if ((byte_i_ref + 3) > type_annotations_typeArray->length()) {
2817
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2818
("length() is too small for a type_argument_target"));
2819
return false;
2820
}
2821
2822
u2 offset = Bytes::get_Java_u2((address)
2823
type_annotations_typeArray->adr_at(byte_i_ref));
2824
byte_i_ref += 2;
2825
u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
2826
byte_i_ref += 1;
2827
2828
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2829
("type_argument_target: offset=%d, type_argument_index=%d",
2830
offset, type_argument_index));
2831
} break;
2832
2833
default:
2834
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2835
("unknown target_type"));
2836
#ifdef ASSERT
2837
ShouldNotReachHere();
2838
#endif
2839
return false;
2840
}
2841
2842
return true;
2843
} // end skip_type_annotation_target()
2844
2845
2846
// Read, verify and skip over the type_path part so that rewriting
2847
// can continue in the later parts of the struct.
2848
//
2849
// type_path {
2850
// u1 path_length;
2851
// {
2852
// u1 type_path_kind;
2853
// u1 type_argument_index;
2854
// } path[path_length];
2855
// }
2856
//
2857
bool VM_RedefineClasses::skip_type_annotation_type_path(
2858
AnnotationArray* type_annotations_typeArray, int &byte_i_ref, TRAPS) {
2859
2860
if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2861
// not enough room for a path_length let alone the rest of the type_path
2862
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2863
("length() is too small for a type_path"));
2864
return false;
2865
}
2866
2867
u1 path_length = type_annotations_typeArray->at(byte_i_ref);
2868
byte_i_ref += 1;
2869
2870
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2871
("type_path: path_length=%d", path_length));
2872
2873
int calc_path_length = 0;
2874
for (; calc_path_length < path_length; calc_path_length++) {
2875
if ((byte_i_ref + 1 + 1) > type_annotations_typeArray->length()) {
2876
// not enough room for a path
2877
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2878
("length() is too small for path entry %d of %d",
2879
calc_path_length, path_length));
2880
return false;
2881
}
2882
2883
u1 type_path_kind = type_annotations_typeArray->at(byte_i_ref);
2884
byte_i_ref += 1;
2885
u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
2886
byte_i_ref += 1;
2887
2888
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2889
("type_path: path[%d]: type_path_kind=%d, type_argument_index=%d",
2890
calc_path_length, type_path_kind, type_argument_index));
2891
2892
if (type_path_kind > 3 || (type_path_kind != 3 && type_argument_index != 0)) {
2893
// not enough room for a path
2894
RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2895
("inconsistent type_path values"));
2896
return false;
2897
}
2898
}
2899
assert(path_length == calc_path_length, "sanity check");
2900
2901
return true;
2902
} // end skip_type_annotation_type_path()
2903
2904
2905
// Rewrite constant pool references in the method's stackmap table.
2906
// These "structures" are adapted from the StackMapTable_attribute that
2907
// is described in section 4.8.4 of the 6.0 version of the VM spec
2908
// (dated 2005.10.26):
2909
// file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
2910
//
2911
// stack_map {
2912
// u2 number_of_entries;
2913
// stack_map_frame entries[number_of_entries];
2914
// }
2915
//
2916
void VM_RedefineClasses::rewrite_cp_refs_in_stack_map_table(
2917
methodHandle method, TRAPS) {
2918
2919
if (!method->has_stackmap_table()) {
2920
return;
2921
}
2922
2923
AnnotationArray* stackmap_data = method->stackmap_data();
2924
address stackmap_p = (address)stackmap_data->adr_at(0);
2925
address stackmap_end = stackmap_p + stackmap_data->length();
2926
2927
assert(stackmap_p + 2 <= stackmap_end, "no room for number_of_entries");
2928
u2 number_of_entries = Bytes::get_Java_u2(stackmap_p);
2929
stackmap_p += 2;
2930
2931
RC_TRACE_WITH_THREAD(0x04000000, THREAD,
2932
("number_of_entries=%u", number_of_entries));
2933
2934
// walk through each stack_map_frame
2935
u2 calc_number_of_entries = 0;
2936
for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
2937
// The stack_map_frame structure is a u1 frame_type followed by
2938
// 0 or more bytes of data:
2939
//
2940
// union stack_map_frame {
2941
// same_frame;
2942
// same_locals_1_stack_item_frame;
2943
// same_locals_1_stack_item_frame_extended;
2944
// chop_frame;
2945
// same_frame_extended;
2946
// append_frame;
2947
// full_frame;
2948
// }
2949
2950
assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
2951
// The Linux compiler does not like frame_type to be u1 or u2. It
2952
// issues the following warning for the first if-statement below:
2953
//
2954
// "warning: comparison is always true due to limited range of data type"
2955
//
2956
u4 frame_type = *stackmap_p;
2957
stackmap_p++;
2958
2959
// same_frame {
2960
// u1 frame_type = SAME; /* 0-63 */
2961
// }
2962
if (frame_type >= 0 && frame_type <= 63) {
2963
// nothing more to do for same_frame
2964
}
2965
2966
// same_locals_1_stack_item_frame {
2967
// u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
2968
// verification_type_info stack[1];
2969
// }
2970
else if (frame_type >= 64 && frame_type <= 127) {
2971
rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2972
calc_number_of_entries, frame_type, THREAD);
2973
}
2974
2975
// reserved for future use
2976
else if (frame_type >= 128 && frame_type <= 246) {
2977
// nothing more to do for reserved frame_types
2978
}
2979
2980
// same_locals_1_stack_item_frame_extended {
2981
// u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
2982
// u2 offset_delta;
2983
// verification_type_info stack[1];
2984
// }
2985
else if (frame_type == 247) {
2986
stackmap_p += 2;
2987
rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2988
calc_number_of_entries, frame_type, THREAD);
2989
}
2990
2991
// chop_frame {
2992
// u1 frame_type = CHOP; /* 248-250 */
2993
// u2 offset_delta;
2994
// }
2995
else if (frame_type >= 248 && frame_type <= 250) {
2996
stackmap_p += 2;
2997
}
2998
2999
// same_frame_extended {
3000
// u1 frame_type = SAME_FRAME_EXTENDED; /* 251*/
3001
// u2 offset_delta;
3002
// }
3003
else if (frame_type == 251) {
3004
stackmap_p += 2;
3005
}
3006
3007
// append_frame {
3008
// u1 frame_type = APPEND; /* 252-254 */
3009
// u2 offset_delta;
3010
// verification_type_info locals[frame_type - 251];
3011
// }
3012
else if (frame_type >= 252 && frame_type <= 254) {
3013
assert(stackmap_p + 2 <= stackmap_end,
3014
"no room for offset_delta");
3015
stackmap_p += 2;
3016
u1 len = frame_type - 251;
3017
for (u1 i = 0; i < len; i++) {
3018
rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3019
calc_number_of_entries, frame_type, THREAD);
3020
}
3021
}
3022
3023
// full_frame {
3024
// u1 frame_type = FULL_FRAME; /* 255 */
3025
// u2 offset_delta;
3026
// u2 number_of_locals;
3027
// verification_type_info locals[number_of_locals];
3028
// u2 number_of_stack_items;
3029
// verification_type_info stack[number_of_stack_items];
3030
// }
3031
else if (frame_type == 255) {
3032
assert(stackmap_p + 2 + 2 <= stackmap_end,
3033
"no room for smallest full_frame");
3034
stackmap_p += 2;
3035
3036
u2 number_of_locals = Bytes::get_Java_u2(stackmap_p);
3037
stackmap_p += 2;
3038
3039
for (u2 locals_i = 0; locals_i < number_of_locals; locals_i++) {
3040
rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3041
calc_number_of_entries, frame_type, THREAD);
3042
}
3043
3044
// Use the largest size for the number_of_stack_items, but only get
3045
// the right number of bytes.
3046
u2 number_of_stack_items = Bytes::get_Java_u2(stackmap_p);
3047
stackmap_p += 2;
3048
3049
for (u2 stack_i = 0; stack_i < number_of_stack_items; stack_i++) {
3050
rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3051
calc_number_of_entries, frame_type, THREAD);
3052
}
3053
}
3054
} // end while there is a stack_map_frame
3055
assert(number_of_entries == calc_number_of_entries, "sanity check");
3056
} // end rewrite_cp_refs_in_stack_map_table()
3057
3058
3059
// Rewrite constant pool references in the verification type info
3060
// portion of the method's stackmap table. These "structures" are
3061
// adapted from the StackMapTable_attribute that is described in
3062
// section 4.8.4 of the 6.0 version of the VM spec (dated 2005.10.26):
3063
// file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
3064
//
3065
// The verification_type_info structure is a u1 tag followed by 0 or
3066
// more bytes of data:
3067
//
3068
// union verification_type_info {
3069
// Top_variable_info;
3070
// Integer_variable_info;
3071
// Float_variable_info;
3072
// Long_variable_info;
3073
// Double_variable_info;
3074
// Null_variable_info;
3075
// UninitializedThis_variable_info;
3076
// Object_variable_info;
3077
// Uninitialized_variable_info;
3078
// }
3079
//
3080
void VM_RedefineClasses::rewrite_cp_refs_in_verification_type_info(
3081
address& stackmap_p_ref, address stackmap_end, u2 frame_i,
3082
u1 frame_type, TRAPS) {
3083
3084
assert(stackmap_p_ref + 1 <= stackmap_end, "no room for tag");
3085
u1 tag = *stackmap_p_ref;
3086
stackmap_p_ref++;
3087
3088
switch (tag) {
3089
// Top_variable_info {
3090
// u1 tag = ITEM_Top; /* 0 */
3091
// }
3092
// verificationType.hpp has zero as ITEM_Bogus instead of ITEM_Top
3093
case 0: // fall through
3094
3095
// Integer_variable_info {
3096
// u1 tag = ITEM_Integer; /* 1 */
3097
// }
3098
case ITEM_Integer: // fall through
3099
3100
// Float_variable_info {
3101
// u1 tag = ITEM_Float; /* 2 */
3102
// }
3103
case ITEM_Float: // fall through
3104
3105
// Double_variable_info {
3106
// u1 tag = ITEM_Double; /* 3 */
3107
// }
3108
case ITEM_Double: // fall through
3109
3110
// Long_variable_info {
3111
// u1 tag = ITEM_Long; /* 4 */
3112
// }
3113
case ITEM_Long: // fall through
3114
3115
// Null_variable_info {
3116
// u1 tag = ITEM_Null; /* 5 */
3117
// }
3118
case ITEM_Null: // fall through
3119
3120
// UninitializedThis_variable_info {
3121
// u1 tag = ITEM_UninitializedThis; /* 6 */
3122
// }
3123
case ITEM_UninitializedThis:
3124
// nothing more to do for the above tag types
3125
break;
3126
3127
// Object_variable_info {
3128
// u1 tag = ITEM_Object; /* 7 */
3129
// u2 cpool_index;
3130
// }
3131
case ITEM_Object:
3132
{
3133
assert(stackmap_p_ref + 2 <= stackmap_end, "no room for cpool_index");
3134
u2 cpool_index = Bytes::get_Java_u2(stackmap_p_ref);
3135
u2 new_cp_index = find_new_index(cpool_index);
3136
if (new_cp_index != 0) {
3137
RC_TRACE_WITH_THREAD(0x04000000, THREAD,
3138
("mapped old cpool_index=%d", cpool_index));
3139
Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
3140
cpool_index = new_cp_index;
3141
}
3142
stackmap_p_ref += 2;
3143
3144
RC_TRACE_WITH_THREAD(0x04000000, THREAD,
3145
("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i,
3146
frame_type, cpool_index));
3147
} break;
3148
3149
// Uninitialized_variable_info {
3150
// u1 tag = ITEM_Uninitialized; /* 8 */
3151
// u2 offset;
3152
// }
3153
case ITEM_Uninitialized:
3154
assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3155
stackmap_p_ref += 2;
3156
break;
3157
3158
default:
3159
RC_TRACE_WITH_THREAD(0x04000000, THREAD,
3160
("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag));
3161
ShouldNotReachHere();
3162
break;
3163
} // end switch (tag)
3164
} // end rewrite_cp_refs_in_verification_type_info()
3165
3166
3167
// Change the constant pool associated with klass scratch_class to
3168
// scratch_cp. If shrink is true, then scratch_cp_length elements
3169
// are copied from scratch_cp to a smaller constant pool and the
3170
// smaller constant pool is associated with scratch_class.
3171
void VM_RedefineClasses::set_new_constant_pool(
3172
ClassLoaderData* loader_data,
3173
instanceKlassHandle scratch_class, constantPoolHandle scratch_cp,
3174
int scratch_cp_length, TRAPS) {
3175
assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3176
3177
// scratch_cp is a merged constant pool and has enough space for a
3178
// worst case merge situation. We want to associate the minimum
3179
// sized constant pool with the klass to save space.
3180
ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3181
constantPoolHandle smaller_cp(THREAD, cp);
3182
3183
// preserve version() value in the smaller copy
3184
int version = scratch_cp->version();
3185
assert(version != 0, "sanity check");
3186
smaller_cp->set_version(version);
3187
3188
// attach klass to new constant pool
3189
// reference to the cp holder is needed for copy_operands()
3190
smaller_cp->set_pool_holder(scratch_class());
3191
3192
scratch_cp->copy_cp_to(1, scratch_cp_length - 1, smaller_cp, 1, THREAD);
3193
if (HAS_PENDING_EXCEPTION) {
3194
// Exception is handled in the caller
3195
loader_data->add_to_deallocate_list(smaller_cp());
3196
return;
3197
}
3198
scratch_cp = smaller_cp;
3199
3200
// attach new constant pool to klass
3201
scratch_class->set_constants(scratch_cp());
3202
3203
int i; // for portability
3204
3205
// update each field in klass to use new constant pool indices as needed
3206
for (JavaFieldStream fs(scratch_class); !fs.done(); fs.next()) {
3207
jshort cur_index = fs.name_index();
3208
jshort new_index = find_new_index(cur_index);
3209
if (new_index != 0) {
3210
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3211
("field-name_index change: %d to %d", cur_index, new_index));
3212
fs.set_name_index(new_index);
3213
}
3214
cur_index = fs.signature_index();
3215
new_index = find_new_index(cur_index);
3216
if (new_index != 0) {
3217
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3218
("field-signature_index change: %d to %d", cur_index, new_index));
3219
fs.set_signature_index(new_index);
3220
}
3221
cur_index = fs.initval_index();
3222
new_index = find_new_index(cur_index);
3223
if (new_index != 0) {
3224
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3225
("field-initval_index change: %d to %d", cur_index, new_index));
3226
fs.set_initval_index(new_index);
3227
}
3228
cur_index = fs.generic_signature_index();
3229
new_index = find_new_index(cur_index);
3230
if (new_index != 0) {
3231
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3232
("field-generic_signature change: %d to %d", cur_index, new_index));
3233
fs.set_generic_signature_index(new_index);
3234
}
3235
} // end for each field
3236
3237
// Update constant pool indices in the inner classes info to use
3238
// new constant indices as needed. The inner classes info is a
3239
// quadruple:
3240
// (inner_class_info, outer_class_info, inner_name, inner_access_flags)
3241
InnerClassesIterator iter(scratch_class);
3242
for (; !iter.done(); iter.next()) {
3243
int cur_index = iter.inner_class_info_index();
3244
if (cur_index == 0) {
3245
continue; // JVM spec. allows null inner class refs so skip it
3246
}
3247
int new_index = find_new_index(cur_index);
3248
if (new_index != 0) {
3249
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3250
("inner_class_info change: %d to %d", cur_index, new_index));
3251
iter.set_inner_class_info_index(new_index);
3252
}
3253
cur_index = iter.outer_class_info_index();
3254
new_index = find_new_index(cur_index);
3255
if (new_index != 0) {
3256
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3257
("outer_class_info change: %d to %d", cur_index, new_index));
3258
iter.set_outer_class_info_index(new_index);
3259
}
3260
cur_index = iter.inner_name_index();
3261
new_index = find_new_index(cur_index);
3262
if (new_index != 0) {
3263
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3264
("inner_name change: %d to %d", cur_index, new_index));
3265
iter.set_inner_name_index(new_index);
3266
}
3267
} // end for each inner class
3268
3269
// Attach each method in klass to the new constant pool and update
3270
// to use new constant pool indices as needed:
3271
Array<Method*>* methods = scratch_class->methods();
3272
for (i = methods->length() - 1; i >= 0; i--) {
3273
methodHandle method(THREAD, methods->at(i));
3274
method->set_constants(scratch_cp());
3275
3276
int new_index = find_new_index(method->name_index());
3277
if (new_index != 0) {
3278
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3279
("method-name_index change: %d to %d", method->name_index(),
3280
new_index));
3281
method->set_name_index(new_index);
3282
}
3283
new_index = find_new_index(method->signature_index());
3284
if (new_index != 0) {
3285
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3286
("method-signature_index change: %d to %d",
3287
method->signature_index(), new_index));
3288
method->set_signature_index(new_index);
3289
}
3290
new_index = find_new_index(method->generic_signature_index());
3291
if (new_index != 0) {
3292
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3293
("method-generic_signature_index change: %d to %d",
3294
method->generic_signature_index(), new_index));
3295
method->set_generic_signature_index(new_index);
3296
}
3297
3298
// Update constant pool indices in the method's checked exception
3299
// table to use new constant indices as needed.
3300
int cext_length = method->checked_exceptions_length();
3301
if (cext_length > 0) {
3302
CheckedExceptionElement * cext_table =
3303
method->checked_exceptions_start();
3304
for (int j = 0; j < cext_length; j++) {
3305
int cur_index = cext_table[j].class_cp_index;
3306
int new_index = find_new_index(cur_index);
3307
if (new_index != 0) {
3308
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3309
("cext-class_cp_index change: %d to %d", cur_index, new_index));
3310
cext_table[j].class_cp_index = (u2)new_index;
3311
}
3312
} // end for each checked exception table entry
3313
} // end if there are checked exception table entries
3314
3315
// Update each catch type index in the method's exception table
3316
// to use new constant pool indices as needed. The exception table
3317
// holds quadruple entries of the form:
3318
// (beg_bci, end_bci, handler_bci, klass_index)
3319
3320
ExceptionTable ex_table(method());
3321
int ext_length = ex_table.length();
3322
3323
for (int j = 0; j < ext_length; j ++) {
3324
int cur_index = ex_table.catch_type_index(j);
3325
int new_index = find_new_index(cur_index);
3326
if (new_index != 0) {
3327
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3328
("ext-klass_index change: %d to %d", cur_index, new_index));
3329
ex_table.set_catch_type_index(j, new_index);
3330
}
3331
} // end for each exception table entry
3332
3333
// Update constant pool indices in the method's local variable
3334
// table to use new constant indices as needed. The local variable
3335
// table hold sextuple entries of the form:
3336
// (start_pc, length, name_index, descriptor_index, signature_index, slot)
3337
int lvt_length = method->localvariable_table_length();
3338
if (lvt_length > 0) {
3339
LocalVariableTableElement * lv_table =
3340
method->localvariable_table_start();
3341
for (int j = 0; j < lvt_length; j++) {
3342
int cur_index = lv_table[j].name_cp_index;
3343
int new_index = find_new_index(cur_index);
3344
if (new_index != 0) {
3345
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3346
("lvt-name_cp_index change: %d to %d", cur_index, new_index));
3347
lv_table[j].name_cp_index = (u2)new_index;
3348
}
3349
cur_index = lv_table[j].descriptor_cp_index;
3350
new_index = find_new_index(cur_index);
3351
if (new_index != 0) {
3352
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3353
("lvt-descriptor_cp_index change: %d to %d", cur_index,
3354
new_index));
3355
lv_table[j].descriptor_cp_index = (u2)new_index;
3356
}
3357
cur_index = lv_table[j].signature_cp_index;
3358
new_index = find_new_index(cur_index);
3359
if (new_index != 0) {
3360
RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3361
("lvt-signature_cp_index change: %d to %d", cur_index, new_index));
3362
lv_table[j].signature_cp_index = (u2)new_index;
3363
}
3364
} // end for each local variable table entry
3365
} // end if there are local variable table entries
3366
3367
rewrite_cp_refs_in_stack_map_table(method, THREAD);
3368
} // end for each method
3369
} // end set_new_constant_pool()
3370
3371
3372
// Unevolving classes may point to methods of the_class directly
3373
// from their constant pool caches, itables, and/or vtables. We
3374
// use the ClassLoaderDataGraph::classes_do() facility and this helper
3375
// to fix up these pointers.
3376
3377
// Adjust cpools and vtables closure
3378
void VM_RedefineClasses::AdjustCpoolCacheAndVtable::do_klass(Klass* k) {
3379
3380
// This is a very busy routine. We don't want too much tracing
3381
// printed out.
3382
bool trace_name_printed = false;
3383
InstanceKlass *the_class = InstanceKlass::cast(_the_class_oop);
3384
3385
// Very noisy: only enable this call if you are trying to determine
3386
// that a specific class gets found by this routine.
3387
// RC_TRACE macro has an embedded ResourceMark
3388
// RC_TRACE_WITH_THREAD(0x00100000, THREAD,
3389
// ("adjust check: name=%s", k->external_name()));
3390
// trace_name_printed = true;
3391
3392
// If the class being redefined is java.lang.Object, we need to fix all
3393
// array class vtables also
3394
if (k->oop_is_array() && _the_class_oop == SystemDictionary::Object_klass()) {
3395
k->vtable()->adjust_method_entries(the_class, &trace_name_printed);
3396
3397
} else if (k->oop_is_instance()) {
3398
HandleMark hm(_thread);
3399
InstanceKlass *ik = InstanceKlass::cast(k);
3400
3401
// HotSpot specific optimization! HotSpot does not currently
3402
// support delegation from the bootstrap class loader to a
3403
// user-defined class loader. This means that if the bootstrap
3404
// class loader is the initiating class loader, then it will also
3405
// be the defining class loader. This also means that classes
3406
// loaded by the bootstrap class loader cannot refer to classes
3407
// loaded by a user-defined class loader. Note: a user-defined
3408
// class loader can delegate to the bootstrap class loader.
3409
//
3410
// If the current class being redefined has a user-defined class
3411
// loader as its defining class loader, then we can skip all
3412
// classes loaded by the bootstrap class loader.
3413
bool is_user_defined =
3414
InstanceKlass::cast(_the_class_oop)->class_loader() != NULL;
3415
if (is_user_defined && ik->class_loader() == NULL) {
3416
return;
3417
}
3418
3419
// Fix the vtable embedded in the_class and subclasses of the_class,
3420
// if one exists. We discard scratch_class and we don't keep an
3421
// InstanceKlass around to hold obsolete methods so we don't have
3422
// any other InstanceKlass embedded vtables to update. The vtable
3423
// holds the Method*s for virtual (but not final) methods.
3424
// Default methods, or concrete methods in interfaces are stored
3425
// in the vtable, so if an interface changes we need to check
3426
// adjust_method_entries() for every InstanceKlass, which will also
3427
// adjust the default method vtable indices.
3428
// We also need to adjust any default method entries that are
3429
// not yet in the vtable, because the vtable setup is in progress.
3430
// This must be done after we adjust the default_methods and
3431
// default_vtable_indices for methods already in the vtable.
3432
// If redefining Unsafe, walk all the vtables looking for entries.
3433
if (ik->vtable_length() > 0 && (_the_class_oop->is_interface()
3434
|| _the_class_oop == SystemDictionary::misc_Unsafe_klass()
3435
|| ik->is_subtype_of(_the_class_oop))) {
3436
// ik->vtable() creates a wrapper object; rm cleans it up
3437
ResourceMark rm(_thread);
3438
3439
ik->vtable()->adjust_method_entries(the_class, &trace_name_printed);
3440
ik->adjust_default_methods(the_class, &trace_name_printed);
3441
}
3442
3443
// If the current class has an itable and we are either redefining an
3444
// interface or if the current class is a subclass of the_class, then
3445
// we potentially have to fix the itable. If we are redefining an
3446
// interface, then we have to call adjust_method_entries() for
3447
// every InstanceKlass that has an itable since there isn't a
3448
// subclass relationship between an interface and an InstanceKlass.
3449
// If redefining Unsafe, walk all the itables looking for entries.
3450
if (ik->itable_length() > 0 && (_the_class_oop->is_interface()
3451
|| _the_class_oop == SystemDictionary::misc_Unsafe_klass()
3452
|| ik->is_subclass_of(_the_class_oop))) {
3453
// ik->itable() creates a wrapper object; rm cleans it up
3454
ResourceMark rm(_thread);
3455
3456
ik->itable()->adjust_method_entries(the_class, &trace_name_printed);
3457
}
3458
3459
// The constant pools in other classes (other_cp) can refer to
3460
// methods in the_class. We have to update method information in
3461
// other_cp's cache. If other_cp has a previous version, then we
3462
// have to repeat the process for each previous version. The
3463
// constant pool cache holds the Method*s for non-virtual
3464
// methods and for virtual, final methods.
3465
//
3466
// Special case: if the current class is the_class, then new_cp
3467
// has already been attached to the_class and old_cp has already
3468
// been added as a previous version. The new_cp doesn't have any
3469
// cached references to old methods so it doesn't need to be
3470
// updated. We can simply start with the previous version(s) in
3471
// that case.
3472
constantPoolHandle other_cp;
3473
ConstantPoolCache* cp_cache;
3474
3475
if (ik != _the_class_oop) {
3476
// this klass' constant pool cache may need adjustment
3477
other_cp = constantPoolHandle(ik->constants());
3478
cp_cache = other_cp->cache();
3479
if (cp_cache != NULL) {
3480
cp_cache->adjust_method_entries(the_class, &trace_name_printed);
3481
}
3482
}
3483
3484
// the previous versions' constant pool caches may need adjustment
3485
for (InstanceKlass* pv_node = ik->previous_versions();
3486
pv_node != NULL;
3487
pv_node = pv_node->previous_versions()) {
3488
cp_cache = pv_node->constants()->cache();
3489
if (cp_cache != NULL) {
3490
cp_cache->adjust_method_entries(pv_node, &trace_name_printed);
3491
}
3492
}
3493
}
3494
}
3495
3496
void VM_RedefineClasses::update_jmethod_ids() {
3497
for (int j = 0; j < _matching_methods_length; ++j) {
3498
Method* old_method = _matching_old_methods[j];
3499
jmethodID jmid = old_method->find_jmethod_id_or_null();
3500
if (jmid != NULL) {
3501
// There is a jmethodID, change it to point to the new method
3502
methodHandle new_method_h(_matching_new_methods[j]);
3503
Method::change_method_associated_with_jmethod_id(jmid, new_method_h());
3504
assert(Method::resolve_jmethod_id(jmid) == _matching_new_methods[j],
3505
"should be replaced");
3506
}
3507
}
3508
}
3509
3510
int VM_RedefineClasses::check_methods_and_mark_as_obsolete() {
3511
int emcp_method_count = 0;
3512
int obsolete_count = 0;
3513
int old_index = 0;
3514
for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
3515
Method* old_method = _matching_old_methods[j];
3516
Method* new_method = _matching_new_methods[j];
3517
Method* old_array_method;
3518
3519
// Maintain an old_index into the _old_methods array by skipping
3520
// deleted methods
3521
while ((old_array_method = _old_methods->at(old_index)) != old_method) {
3522
++old_index;
3523
}
3524
3525
if (MethodComparator::methods_EMCP(old_method, new_method)) {
3526
// The EMCP definition from JSR-163 requires the bytecodes to be
3527
// the same with the exception of constant pool indices which may
3528
// differ. However, the constants referred to by those indices
3529
// must be the same.
3530
//
3531
// We use methods_EMCP() for comparison since constant pool
3532
// merging can remove duplicate constant pool entries that were
3533
// present in the old method and removed from the rewritten new
3534
// method. A faster binary comparison function would consider the
3535
// old and new methods to be different when they are actually
3536
// EMCP.
3537
//
3538
// The old and new methods are EMCP and you would think that we
3539
// could get rid of one of them here and now and save some space.
3540
// However, the concept of EMCP only considers the bytecodes and
3541
// the constant pool entries in the comparison. Other things,
3542
// e.g., the line number table (LNT) or the local variable table
3543
// (LVT) don't count in the comparison. So the new (and EMCP)
3544
// method can have a new LNT that we need so we can't just
3545
// overwrite the new method with the old method.
3546
//
3547
// When this routine is called, we have already attached the new
3548
// methods to the_class so the old methods are effectively
3549
// overwritten. However, if an old method is still executing,
3550
// then the old method cannot be collected until sometime after
3551
// the old method call has returned. So the overwriting of old
3552
// methods by new methods will save us space except for those
3553
// (hopefully few) old methods that are still executing.
3554
//
3555
// A method refers to a ConstMethod* and this presents another
3556
// possible avenue to space savings. The ConstMethod* in the
3557
// new method contains possibly new attributes (LNT, LVT, etc).
3558
// At first glance, it seems possible to save space by replacing
3559
// the ConstMethod* in the old method with the ConstMethod*
3560
// from the new method. The old and new methods would share the
3561
// same ConstMethod* and we would save the space occupied by
3562
// the old ConstMethod*. However, the ConstMethod* contains
3563
// a back reference to the containing method. Sharing the
3564
// ConstMethod* between two methods could lead to confusion in
3565
// the code that uses the back reference. This would lead to
3566
// brittle code that could be broken in non-obvious ways now or
3567
// in the future.
3568
//
3569
// Another possibility is to copy the ConstMethod* from the new
3570
// method to the old method and then overwrite the new method with
3571
// the old method. Since the ConstMethod* contains the bytecodes
3572
// for the method embedded in the oop, this option would change
3573
// the bytecodes out from under any threads executing the old
3574
// method and make the thread's bcp invalid. Since EMCP requires
3575
// that the bytecodes be the same modulo constant pool indices, it
3576
// is straight forward to compute the correct new bcp in the new
3577
// ConstMethod* from the old bcp in the old ConstMethod*. The
3578
// time consuming part would be searching all the frames in all
3579
// of the threads to find all of the calls to the old method.
3580
//
3581
// It looks like we will have to live with the limited savings
3582
// that we get from effectively overwriting the old methods
3583
// when the new methods are attached to the_class.
3584
3585
// Count number of methods that are EMCP. The method will be marked
3586
// old but not obsolete if it is EMCP.
3587
emcp_method_count++;
3588
3589
// An EMCP method is _not_ obsolete. An obsolete method has a
3590
// different jmethodID than the current method. An EMCP method
3591
// has the same jmethodID as the current method. Having the
3592
// same jmethodID for all EMCP versions of a method allows for
3593
// a consistent view of the EMCP methods regardless of which
3594
// EMCP method you happen to have in hand. For example, a
3595
// breakpoint set in one EMCP method will work for all EMCP
3596
// versions of the method including the current one.
3597
} else {
3598
// mark obsolete methods as such
3599
old_method->set_is_obsolete();
3600
obsolete_count++;
3601
3602
// obsolete methods need a unique idnum so they become new entries in
3603
// the jmethodID cache in InstanceKlass
3604
assert(old_method->method_idnum() == new_method->method_idnum(), "must match");
3605
u2 num = InstanceKlass::cast(_the_class_oop)->next_method_idnum();
3606
if (num != ConstMethod::UNSET_IDNUM) {
3607
old_method->set_method_idnum(num);
3608
}
3609
3610
// With tracing we try not to "yack" too much. The position of
3611
// this trace assumes there are fewer obsolete methods than
3612
// EMCP methods.
3613
RC_TRACE(0x00000100, ("mark %s(%s) as obsolete",
3614
old_method->name()->as_C_string(),
3615
old_method->signature()->as_C_string()));
3616
}
3617
old_method->set_is_old();
3618
}
3619
for (int i = 0; i < _deleted_methods_length; ++i) {
3620
Method* old_method = _deleted_methods[i];
3621
3622
assert(!old_method->has_vtable_index(),
3623
"cannot delete methods with vtable entries");;
3624
3625
// Mark all deleted methods as old, obsolete and deleted
3626
old_method->set_is_deleted();
3627
old_method->set_is_old();
3628
old_method->set_is_obsolete();
3629
++obsolete_count;
3630
// With tracing we try not to "yack" too much. The position of
3631
// this trace assumes there are fewer obsolete methods than
3632
// EMCP methods.
3633
RC_TRACE(0x00000100, ("mark deleted %s(%s) as obsolete",
3634
old_method->name()->as_C_string(),
3635
old_method->signature()->as_C_string()));
3636
}
3637
assert((emcp_method_count + obsolete_count) == _old_methods->length(),
3638
"sanity check");
3639
RC_TRACE(0x00000100, ("EMCP_cnt=%d, obsolete_cnt=%d", emcp_method_count,
3640
obsolete_count));
3641
return emcp_method_count;
3642
}
3643
3644
// This internal class transfers the native function registration from old methods
3645
// to new methods. It is designed to handle both the simple case of unchanged
3646
// native methods and the complex cases of native method prefixes being added and/or
3647
// removed.
3648
// It expects only to be used during the VM_RedefineClasses op (a safepoint).
3649
//
3650
// This class is used after the new methods have been installed in "the_class".
3651
//
3652
// So, for example, the following must be handled. Where 'm' is a method and
3653
// a number followed by an underscore is a prefix.
3654
//
3655
// Old Name New Name
3656
// Simple transfer to new method m -> m
3657
// Add prefix m -> 1_m
3658
// Remove prefix 1_m -> m
3659
// Simultaneous add of prefixes m -> 3_2_1_m
3660
// Simultaneous removal of prefixes 3_2_1_m -> m
3661
// Simultaneous add and remove 1_m -> 2_m
3662
// Same, caused by prefix removal only 3_2_1_m -> 3_2_m
3663
//
3664
class TransferNativeFunctionRegistration {
3665
private:
3666
instanceKlassHandle the_class;
3667
int prefix_count;
3668
char** prefixes;
3669
3670
// Recursively search the binary tree of possibly prefixed method names.
3671
// Iteration could be used if all agents were well behaved. Full tree walk is
3672
// more resilent to agents not cleaning up intermediate methods.
3673
// Branch at each depth in the binary tree is:
3674
// (1) without the prefix.
3675
// (2) with the prefix.
3676
// where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...)
3677
Method* search_prefix_name_space(int depth, char* name_str, size_t name_len,
3678
Symbol* signature) {
3679
TempNewSymbol name_symbol = SymbolTable::probe(name_str, (int)name_len);
3680
if (name_symbol != NULL) {
3681
Method* method = the_class()->lookup_method(name_symbol, signature);
3682
if (method != NULL) {
3683
// Even if prefixed, intermediate methods must exist.
3684
if (method->is_native()) {
3685
// Wahoo, we found a (possibly prefixed) version of the method, return it.
3686
return method;
3687
}
3688
if (depth < prefix_count) {
3689
// Try applying further prefixes (other than this one).
3690
method = search_prefix_name_space(depth+1, name_str, name_len, signature);
3691
if (method != NULL) {
3692
return method; // found
3693
}
3694
3695
// Try adding this prefix to the method name and see if it matches
3696
// another method name.
3697
char* prefix = prefixes[depth];
3698
size_t prefix_len = strlen(prefix);
3699
size_t trial_len = name_len + prefix_len;
3700
char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
3701
strcpy(trial_name_str, prefix);
3702
strcat(trial_name_str, name_str);
3703
method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
3704
signature);
3705
if (method != NULL) {
3706
// If found along this branch, it was prefixed, mark as such
3707
method->set_is_prefixed_native();
3708
return method; // found
3709
}
3710
}
3711
}
3712
}
3713
return NULL; // This whole branch bore nothing
3714
}
3715
3716
// Return the method name with old prefixes stripped away.
3717
char* method_name_without_prefixes(Method* method) {
3718
Symbol* name = method->name();
3719
char* name_str = name->as_utf8();
3720
3721
// Old prefixing may be defunct, strip prefixes, if any.
3722
for (int i = prefix_count-1; i >= 0; i--) {
3723
char* prefix = prefixes[i];
3724
size_t prefix_len = strlen(prefix);
3725
if (strncmp(prefix, name_str, prefix_len) == 0) {
3726
name_str += prefix_len;
3727
}
3728
}
3729
return name_str;
3730
}
3731
3732
// Strip any prefixes off the old native method, then try to find a
3733
// (possibly prefixed) new native that matches it.
3734
Method* strip_and_search_for_new_native(Method* method) {
3735
ResourceMark rm;
3736
char* name_str = method_name_without_prefixes(method);
3737
return search_prefix_name_space(0, name_str, strlen(name_str),
3738
method->signature());
3739
}
3740
3741
public:
3742
3743
// Construct a native method transfer processor for this class.
3744
TransferNativeFunctionRegistration(instanceKlassHandle _the_class) {
3745
assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
3746
3747
the_class = _the_class;
3748
prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
3749
}
3750
3751
// Attempt to transfer any of the old or deleted methods that are native
3752
void transfer_registrations(Method** old_methods, int methods_length) {
3753
for (int j = 0; j < methods_length; j++) {
3754
Method* old_method = old_methods[j];
3755
3756
if (old_method->is_native() && old_method->has_native_function()) {
3757
Method* new_method = strip_and_search_for_new_native(old_method);
3758
if (new_method != NULL) {
3759
// Actually set the native function in the new method.
3760
// Redefine does not send events (except CFLH), certainly not this
3761
// behind the scenes re-registration.
3762
new_method->set_native_function(old_method->native_function(),
3763
!Method::native_bind_event_is_interesting);
3764
}
3765
}
3766
}
3767
}
3768
};
3769
3770
// Don't lose the association between a native method and its JNI function.
3771
void VM_RedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle the_class) {
3772
TransferNativeFunctionRegistration transfer(the_class);
3773
transfer.transfer_registrations(_deleted_methods, _deleted_methods_length);
3774
transfer.transfer_registrations(_matching_old_methods, _matching_methods_length);
3775
}
3776
3777
// Deoptimize all compiled code that depends on this class.
3778
//
3779
// If the can_redefine_classes capability is obtained in the onload
3780
// phase then the compiler has recorded all dependencies from startup.
3781
// In that case we need only deoptimize and throw away all compiled code
3782
// that depends on the class.
3783
//
3784
// If can_redefine_classes is obtained sometime after the onload
3785
// phase then the dependency information may be incomplete. In that case
3786
// the first call to RedefineClasses causes all compiled code to be
3787
// thrown away. As can_redefine_classes has been obtained then
3788
// all future compilations will record dependencies so second and
3789
// subsequent calls to RedefineClasses need only throw away code
3790
// that depends on the class.
3791
//
3792
void VM_RedefineClasses::flush_dependent_code(instanceKlassHandle k_h, TRAPS) {
3793
assert_locked_or_safepoint(Compile_lock);
3794
3795
// All dependencies have been recorded from startup or this is a second or
3796
// subsequent use of RedefineClasses
3797
if (JvmtiExport::all_dependencies_are_recorded()) {
3798
Universe::flush_evol_dependents_on(k_h);
3799
} else {
3800
CodeCache::mark_all_nmethods_for_deoptimization();
3801
3802
ResourceMark rm(THREAD);
3803
DeoptimizationMarker dm;
3804
3805
// Deoptimize all activations depending on marked nmethods
3806
Deoptimization::deoptimize_dependents();
3807
3808
// Make the dependent methods not entrant
3809
CodeCache::make_marked_nmethods_not_entrant();
3810
3811
// From now on we know that the dependency information is complete
3812
JvmtiExport::set_all_dependencies_are_recorded(true);
3813
}
3814
}
3815
3816
void VM_RedefineClasses::compute_added_deleted_matching_methods() {
3817
Method* old_method;
3818
Method* new_method;
3819
3820
_matching_old_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3821
_matching_new_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3822
_added_methods = NEW_RESOURCE_ARRAY(Method*, _new_methods->length());
3823
_deleted_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3824
3825
_matching_methods_length = 0;
3826
_deleted_methods_length = 0;
3827
_added_methods_length = 0;
3828
3829
int nj = 0;
3830
int oj = 0;
3831
while (true) {
3832
if (oj >= _old_methods->length()) {
3833
if (nj >= _new_methods->length()) {
3834
break; // we've looked at everything, done
3835
}
3836
// New method at the end
3837
new_method = _new_methods->at(nj);
3838
_added_methods[_added_methods_length++] = new_method;
3839
++nj;
3840
} else if (nj >= _new_methods->length()) {
3841
// Old method, at the end, is deleted
3842
old_method = _old_methods->at(oj);
3843
_deleted_methods[_deleted_methods_length++] = old_method;
3844
++oj;
3845
} else {
3846
old_method = _old_methods->at(oj);
3847
new_method = _new_methods->at(nj);
3848
if (old_method->name() == new_method->name()) {
3849
if (old_method->signature() == new_method->signature()) {
3850
_matching_old_methods[_matching_methods_length ] = old_method;
3851
_matching_new_methods[_matching_methods_length++] = new_method;
3852
++nj;
3853
++oj;
3854
} else {
3855
// added overloaded have already been moved to the end,
3856
// so this is a deleted overloaded method
3857
_deleted_methods[_deleted_methods_length++] = old_method;
3858
++oj;
3859
}
3860
} else { // names don't match
3861
if (old_method->name()->fast_compare(new_method->name()) > 0) {
3862
// new method
3863
_added_methods[_added_methods_length++] = new_method;
3864
++nj;
3865
} else {
3866
// deleted method
3867
_deleted_methods[_deleted_methods_length++] = old_method;
3868
++oj;
3869
}
3870
}
3871
}
3872
}
3873
assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity");
3874
assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity");
3875
}
3876
3877
3878
void VM_RedefineClasses::swap_annotations(instanceKlassHandle the_class,
3879
instanceKlassHandle scratch_class) {
3880
// Swap annotation fields values
3881
Annotations* old_annotations = the_class->annotations();
3882
the_class->set_annotations(scratch_class->annotations());
3883
scratch_class->set_annotations(old_annotations);
3884
}
3885
3886
3887
// Install the redefinition of a class:
3888
// - house keeping (flushing breakpoints and caches, deoptimizing
3889
// dependent compiled code)
3890
// - replacing parts in the_class with parts from scratch_class
3891
// - adding a weak reference to track the obsolete but interesting
3892
// parts of the_class
3893
// - adjusting constant pool caches and vtables in other classes
3894
// that refer to methods in the_class. These adjustments use the
3895
// ClassLoaderDataGraph::classes_do() facility which only allows
3896
// a helper method to be specified. The interesting parameters
3897
// that we would like to pass to the helper method are saved in
3898
// static global fields in the VM operation.
3899
void VM_RedefineClasses::redefine_single_class(jclass the_jclass,
3900
Klass* scratch_class_oop, TRAPS) {
3901
3902
HandleMark hm(THREAD); // make sure handles from this call are freed
3903
RC_TIMER_START(_timer_rsc_phase1);
3904
3905
instanceKlassHandle scratch_class(THREAD, scratch_class_oop);
3906
instanceKlassHandle the_class(THREAD, get_ik(the_jclass));
3907
3908
// Remove all breakpoints in methods of this class
3909
JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints();
3910
jvmti_breakpoints.clearall_in_class_at_safepoint(the_class());
3911
3912
// Deoptimize all compiled code that depends on this class
3913
flush_dependent_code(the_class, THREAD);
3914
3915
_old_methods = the_class->methods();
3916
_new_methods = scratch_class->methods();
3917
_the_class_oop = the_class();
3918
compute_added_deleted_matching_methods();
3919
update_jmethod_ids();
3920
3921
// Attach new constant pool to the original klass. The original
3922
// klass still refers to the old constant pool (for now).
3923
scratch_class->constants()->set_pool_holder(the_class());
3924
3925
#if 0
3926
// In theory, with constant pool merging in place we should be able
3927
// to save space by using the new, merged constant pool in place of
3928
// the old constant pool(s). By "pool(s)" I mean the constant pool in
3929
// the klass version we are replacing now and any constant pool(s) in
3930
// previous versions of klass. Nice theory, doesn't work in practice.
3931
// When this code is enabled, even simple programs throw NullPointer
3932
// exceptions. I'm guessing that this is caused by some constant pool
3933
// cache difference between the new, merged constant pool and the
3934
// constant pool that was just being used by the klass. I'm keeping
3935
// this code around to archive the idea, but the code has to remain
3936
// disabled for now.
3937
3938
// Attach each old method to the new constant pool. This can be
3939
// done here since we are past the bytecode verification and
3940
// constant pool optimization phases.
3941
for (int i = _old_methods->length() - 1; i >= 0; i--) {
3942
Method* method = _old_methods->at(i);
3943
method->set_constants(scratch_class->constants());
3944
}
3945
3946
{
3947
// walk all previous versions of the klass
3948
InstanceKlass *ik = (InstanceKlass *)the_class();
3949
PreviousVersionWalker pvw(ik);
3950
instanceKlassHandle ikh;
3951
do {
3952
ikh = pvw.next_previous_version();
3953
if (!ikh.is_null()) {
3954
ik = ikh();
3955
3956
// attach previous version of klass to the new constant pool
3957
ik->set_constants(scratch_class->constants());
3958
3959
// Attach each method in the previous version of klass to the
3960
// new constant pool
3961
Array<Method*>* prev_methods = ik->methods();
3962
for (int i = prev_methods->length() - 1; i >= 0; i--) {
3963
Method* method = prev_methods->at(i);
3964
method->set_constants(scratch_class->constants());
3965
}
3966
}
3967
} while (!ikh.is_null());
3968
}
3969
#endif
3970
3971
// Replace methods and constantpool
3972
the_class->set_methods(_new_methods);
3973
scratch_class->set_methods(_old_methods); // To prevent potential GCing of the old methods,
3974
// and to be able to undo operation easily.
3975
3976
Array<int>* old_ordering = the_class->method_ordering();
3977
the_class->set_method_ordering(scratch_class->method_ordering());
3978
scratch_class->set_method_ordering(old_ordering);
3979
3980
ConstantPool* old_constants = the_class->constants();
3981
the_class->set_constants(scratch_class->constants());
3982
scratch_class->set_constants(old_constants); // See the previous comment.
3983
#if 0
3984
// We are swapping the guts of "the new class" with the guts of "the
3985
// class". Since the old constant pool has just been attached to "the
3986
// new class", it seems logical to set the pool holder in the old
3987
// constant pool also. However, doing this will change the observable
3988
// class hierarchy for any old methods that are still executing. A
3989
// method can query the identity of its "holder" and this query uses
3990
// the method's constant pool link to find the holder. The change in
3991
// holding class from "the class" to "the new class" can confuse
3992
// things.
3993
//
3994
// Setting the old constant pool's holder will also cause
3995
// verification done during vtable initialization below to fail.
3996
// During vtable initialization, the vtable's class is verified to be
3997
// a subtype of the method's holder. The vtable's class is "the
3998
// class" and the method's holder is gotten from the constant pool
3999
// link in the method itself. For "the class"'s directly implemented
4000
// methods, the method holder is "the class" itself (as gotten from
4001
// the new constant pool). The check works fine in this case. The
4002
// check also works fine for methods inherited from super classes.
4003
//
4004
// Miranda methods are a little more complicated. A miranda method is
4005
// provided by an interface when the class implementing the interface
4006
// does not provide its own method. These interfaces are implemented
4007
// internally as an InstanceKlass. These special instanceKlasses
4008
// share the constant pool of the class that "implements" the
4009
// interface. By sharing the constant pool, the method holder of a
4010
// miranda method is the class that "implements" the interface. In a
4011
// non-redefine situation, the subtype check works fine. However, if
4012
// the old constant pool's pool holder is modified, then the check
4013
// fails because there is no class hierarchy relationship between the
4014
// vtable's class and "the new class".
4015
4016
old_constants->set_pool_holder(scratch_class());
4017
#endif
4018
4019
// track number of methods that are EMCP for add_previous_version() call below
4020
int emcp_method_count = check_methods_and_mark_as_obsolete();
4021
transfer_old_native_function_registrations(the_class);
4022
4023
// The class file bytes from before any retransformable agents mucked
4024
// with them was cached on the scratch class, move to the_class.
4025
// Note: we still want to do this if nothing needed caching since it
4026
// should get cleared in the_class too.
4027
if (the_class->get_cached_class_file() == 0) {
4028
// the_class doesn't have a cache yet so copy it
4029
the_class->set_cached_class_file(scratch_class->get_cached_class_file());
4030
}
4031
else if (scratch_class->get_cached_class_file() !=
4032
the_class->get_cached_class_file()) {
4033
// The same class can be present twice in the scratch classes list or there
4034
// are multiple concurrent RetransformClasses calls on different threads.
4035
// In such cases we have to deallocate scratch_class cached_class_file.
4036
os::free(scratch_class->get_cached_class_file());
4037
}
4038
4039
// NULL out in scratch class to not delete twice. The class to be redefined
4040
// always owns these bytes.
4041
scratch_class->set_cached_class_file(NULL);
4042
4043
// Replace inner_classes
4044
Array<u2>* old_inner_classes = the_class->inner_classes();
4045
the_class->set_inner_classes(scratch_class->inner_classes());
4046
scratch_class->set_inner_classes(old_inner_classes);
4047
4048
// Initialize the vtable and interface table after
4049
// methods have been rewritten
4050
{
4051
ResourceMark rm(THREAD);
4052
// no exception should happen here since we explicitly
4053
// do not check loader constraints.
4054
// compare_and_normalize_class_versions has already checked:
4055
// - classloaders unchanged, signatures unchanged
4056
// - all instanceKlasses for redefined classes reused & contents updated
4057
the_class->vtable()->initialize_vtable(false, THREAD);
4058
the_class->itable()->initialize_itable(false, THREAD);
4059
assert(!HAS_PENDING_EXCEPTION || (THREAD->pending_exception()->is_a(SystemDictionary::ThreadDeath_klass())), "redefine exception");
4060
}
4061
4062
// Leave arrays of jmethodIDs and itable index cache unchanged
4063
4064
// Copy the "source file name" attribute from new class version
4065
the_class->set_source_file_name_index(
4066
scratch_class->source_file_name_index());
4067
4068
// Copy the "source debug extension" attribute from new class version
4069
the_class->set_source_debug_extension(
4070
scratch_class->source_debug_extension(),
4071
scratch_class->source_debug_extension() == NULL ? 0 :
4072
(int)strlen(scratch_class->source_debug_extension()));
4073
4074
// Use of javac -g could be different in the old and the new
4075
if (scratch_class->access_flags().has_localvariable_table() !=
4076
the_class->access_flags().has_localvariable_table()) {
4077
4078
AccessFlags flags = the_class->access_flags();
4079
if (scratch_class->access_flags().has_localvariable_table()) {
4080
flags.set_has_localvariable_table();
4081
} else {
4082
flags.clear_has_localvariable_table();
4083
}
4084
the_class->set_access_flags(flags);
4085
}
4086
4087
swap_annotations(the_class, scratch_class);
4088
4089
// Replace minor version number of class file
4090
u2 old_minor_version = the_class->minor_version();
4091
the_class->set_minor_version(scratch_class->minor_version());
4092
scratch_class->set_minor_version(old_minor_version);
4093
4094
// Replace major version number of class file
4095
u2 old_major_version = the_class->major_version();
4096
the_class->set_major_version(scratch_class->major_version());
4097
scratch_class->set_major_version(old_major_version);
4098
4099
// Replace CP indexes for class and name+type of enclosing method
4100
u2 old_class_idx = the_class->enclosing_method_class_index();
4101
u2 old_method_idx = the_class->enclosing_method_method_index();
4102
the_class->set_enclosing_method_indices(
4103
scratch_class->enclosing_method_class_index(),
4104
scratch_class->enclosing_method_method_index());
4105
scratch_class->set_enclosing_method_indices(old_class_idx, old_method_idx);
4106
4107
the_class->set_has_been_redefined();
4108
4109
// keep track of previous versions of this class
4110
the_class->add_previous_version(scratch_class, emcp_method_count);
4111
4112
RC_TIMER_STOP(_timer_rsc_phase1);
4113
RC_TIMER_START(_timer_rsc_phase2);
4114
4115
// Adjust constantpool caches and vtables for all classes
4116
// that reference methods of the evolved class.
4117
AdjustCpoolCacheAndVtable adjust_cpool_cache_and_vtable(THREAD);
4118
ClassLoaderDataGraph::classes_do(&adjust_cpool_cache_and_vtable);
4119
4120
// JSR-292 support
4121
MemberNameTable* mnt = the_class->member_names();
4122
if (mnt != NULL) {
4123
bool trace_name_printed = false;
4124
mnt->adjust_method_entries(the_class(), &trace_name_printed);
4125
}
4126
4127
if (the_class->oop_map_cache() != NULL) {
4128
// Flush references to any obsolete methods from the oop map cache
4129
// so that obsolete methods are not pinned.
4130
the_class->oop_map_cache()->flush_obsolete_entries();
4131
}
4132
4133
// increment the classRedefinedCount field in the_class and in any
4134
// direct and indirect subclasses of the_class
4135
increment_class_counter((InstanceKlass *)the_class(), THREAD);
4136
4137
// RC_TRACE macro has an embedded ResourceMark
4138
RC_TRACE_WITH_THREAD(0x00000001, THREAD,
4139
("redefined name=%s, count=%d (avail_mem=" UINT64_FORMAT "K)",
4140
the_class->external_name(),
4141
java_lang_Class::classRedefinedCount(the_class->java_mirror()),
4142
os::available_memory() >> 10));
4143
4144
{
4145
ResourceMark rm(THREAD);
4146
Events::log_redefinition(THREAD, "redefined class name=%s, count=%d",
4147
the_class->external_name(),
4148
java_lang_Class::classRedefinedCount(the_class->java_mirror()));
4149
4150
}
4151
RC_TIMER_STOP(_timer_rsc_phase2);
4152
} // end redefine_single_class()
4153
4154
4155
// Increment the classRedefinedCount field in the specific InstanceKlass
4156
// and in all direct and indirect subclasses.
4157
void VM_RedefineClasses::increment_class_counter(InstanceKlass *ik, TRAPS) {
4158
oop class_mirror = ik->java_mirror();
4159
Klass* class_oop = java_lang_Class::as_Klass(class_mirror);
4160
int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1;
4161
java_lang_Class::set_classRedefinedCount(class_mirror, new_count);
4162
4163
if (class_oop != _the_class_oop) {
4164
// _the_class_oop count is printed at end of redefine_single_class()
4165
RC_TRACE_WITH_THREAD(0x00000008, THREAD,
4166
("updated count in subclass=%s to %d", ik->external_name(), new_count));
4167
}
4168
4169
for (Klass *subk = ik->subklass(); subk != NULL;
4170
subk = subk->next_sibling()) {
4171
if (subk->oop_is_instance()) {
4172
// Only update instanceKlasses
4173
InstanceKlass *subik = (InstanceKlass*)subk;
4174
// recursively do subclasses of the current subclass
4175
increment_class_counter(subik, THREAD);
4176
}
4177
}
4178
}
4179
4180
void VM_RedefineClasses::CheckClass::do_klass(Klass* k) {
4181
bool no_old_methods = true; // be optimistic
4182
4183
// Both array and instance classes have vtables.
4184
// a vtable should never contain old or obsolete methods
4185
ResourceMark rm(_thread);
4186
if (k->vtable_length() > 0 &&
4187
!k->vtable()->check_no_old_or_obsolete_entries()) {
4188
if (RC_TRACE_ENABLED(0x00004000)) {
4189
RC_TRACE_WITH_THREAD(0x00004000, _thread,
4190
("klassVtable::check_no_old_or_obsolete_entries failure"
4191
" -- OLD or OBSOLETE method found -- class: %s",
4192
k->signature_name()));
4193
k->vtable()->dump_vtable();
4194
}
4195
no_old_methods = false;
4196
}
4197
4198
if (k->oop_is_instance()) {
4199
HandleMark hm(_thread);
4200
InstanceKlass *ik = InstanceKlass::cast(k);
4201
4202
// an itable should never contain old or obsolete methods
4203
if (ik->itable_length() > 0 &&
4204
!ik->itable()->check_no_old_or_obsolete_entries()) {
4205
if (RC_TRACE_ENABLED(0x00004000)) {
4206
RC_TRACE_WITH_THREAD(0x00004000, _thread,
4207
("klassItable::check_no_old_or_obsolete_entries failure"
4208
" -- OLD or OBSOLETE method found -- class: %s",
4209
ik->signature_name()));
4210
ik->itable()->dump_itable();
4211
}
4212
no_old_methods = false;
4213
}
4214
4215
// the constant pool cache should never contain non-deleted old or obsolete methods
4216
if (ik->constants() != NULL &&
4217
ik->constants()->cache() != NULL &&
4218
!ik->constants()->cache()->check_no_old_or_obsolete_entries()) {
4219
if (RC_TRACE_ENABLED(0x00004000)) {
4220
RC_TRACE_WITH_THREAD(0x00004000, _thread,
4221
("cp-cache::check_no_old_or_obsolete_entries failure"
4222
" -- OLD or OBSOLETE method found -- class: %s",
4223
ik->signature_name()));
4224
ik->constants()->cache()->dump_cache();
4225
}
4226
no_old_methods = false;
4227
}
4228
}
4229
4230
// print and fail guarantee if old methods are found.
4231
if (!no_old_methods) {
4232
if (RC_TRACE_ENABLED(0x00004000)) {
4233
dump_methods();
4234
} else {
4235
tty->print_cr("INFO: use the '-XX:TraceRedefineClasses=16384' option "
4236
"to see more info about the following guarantee() failure.");
4237
}
4238
guarantee(false, "OLD and/or OBSOLETE method(s) found");
4239
}
4240
}
4241
4242
4243
void VM_RedefineClasses::dump_methods() {
4244
int j;
4245
RC_TRACE(0x00004000, ("_old_methods --"));
4246
for (j = 0; j < _old_methods->length(); ++j) {
4247
Method* m = _old_methods->at(j);
4248
RC_TRACE_NO_CR(0x00004000, ("%4d (%5d) ", j, m->vtable_index()));
4249
m->access_flags().print_on(tty);
4250
tty->print(" -- ");
4251
m->print_name(tty);
4252
tty->cr();
4253
}
4254
RC_TRACE(0x00004000, ("_new_methods --"));
4255
for (j = 0; j < _new_methods->length(); ++j) {
4256
Method* m = _new_methods->at(j);
4257
RC_TRACE_NO_CR(0x00004000, ("%4d (%5d) ", j, m->vtable_index()));
4258
m->access_flags().print_on(tty);
4259
tty->print(" -- ");
4260
m->print_name(tty);
4261
tty->cr();
4262
}
4263
RC_TRACE(0x00004000, ("_matching_(old/new)_methods --"));
4264
for (j = 0; j < _matching_methods_length; ++j) {
4265
Method* m = _matching_old_methods[j];
4266
RC_TRACE_NO_CR(0x00004000, ("%4d (%5d) ", j, m->vtable_index()));
4267
m->access_flags().print_on(tty);
4268
tty->print(" -- ");
4269
m->print_name(tty);
4270
tty->cr();
4271
m = _matching_new_methods[j];
4272
RC_TRACE_NO_CR(0x00004000, (" (%5d) ", m->vtable_index()));
4273
m->access_flags().print_on(tty);
4274
tty->cr();
4275
}
4276
RC_TRACE(0x00004000, ("_deleted_methods --"));
4277
for (j = 0; j < _deleted_methods_length; ++j) {
4278
Method* m = _deleted_methods[j];
4279
RC_TRACE_NO_CR(0x00004000, ("%4d (%5d) ", j, m->vtable_index()));
4280
m->access_flags().print_on(tty);
4281
tty->print(" -- ");
4282
m->print_name(tty);
4283
tty->cr();
4284
}
4285
RC_TRACE(0x00004000, ("_added_methods --"));
4286
for (j = 0; j < _added_methods_length; ++j) {
4287
Method* m = _added_methods[j];
4288
RC_TRACE_NO_CR(0x00004000, ("%4d (%5d) ", j, m->vtable_index()));
4289
m->access_flags().print_on(tty);
4290
tty->print(" -- ");
4291
m->print_name(tty);
4292
tty->cr();
4293
}
4294
}
4295
4296
void VM_RedefineClasses::print_on_error(outputStream* st) const {
4297
VM_Operation::print_on_error(st);
4298
if (_the_class_oop != NULL) {
4299
ResourceMark rm;
4300
st->print_cr(", redefining class %s", _the_class_oop->external_name());
4301
}
4302
}
4303
4304