Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/openjdk-multiarch-jdk8u
Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/share/vm/oops/method.cpp
32285 views
1
/*
2
* Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation.
8
*
9
* This code is distributed in the hope that it will be useful, but WITHOUT
10
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12
* version 2 for more details (a copy is included in the LICENSE file that
13
* accompanied this code).
14
*
15
* You should have received a copy of the GNU General Public License version
16
* 2 along with this work; if not, write to the Free Software Foundation,
17
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
*
19
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
* or visit www.oracle.com if you need additional information or have any
21
* questions.
22
*
23
*/
24
25
#include "precompiled.hpp"
26
#include "classfile/metadataOnStackMark.hpp"
27
#include "classfile/systemDictionary.hpp"
28
#include "code/debugInfoRec.hpp"
29
#include "gc_interface/collectedHeap.inline.hpp"
30
#include "interpreter/bytecodeStream.hpp"
31
#include "interpreter/bytecodeTracer.hpp"
32
#include "interpreter/bytecodes.hpp"
33
#include "interpreter/interpreter.hpp"
34
#include "interpreter/oopMapCache.hpp"
35
#include "memory/gcLocker.hpp"
36
#include "memory/generation.hpp"
37
#include "memory/heapInspection.hpp"
38
#include "memory/metadataFactory.hpp"
39
#include "memory/metaspaceShared.hpp"
40
#include "memory/oopFactory.hpp"
41
#include "oops/constMethod.hpp"
42
#include "oops/methodData.hpp"
43
#include "oops/method.hpp"
44
#include "oops/oop.inline.hpp"
45
#include "oops/symbol.hpp"
46
#include "prims/jvmtiExport.hpp"
47
#include "prims/methodHandles.hpp"
48
#include "prims/nativeLookup.hpp"
49
#include "runtime/arguments.hpp"
50
#include "runtime/compilationPolicy.hpp"
51
#include "runtime/frame.inline.hpp"
52
#include "runtime/handles.inline.hpp"
53
#include "runtime/orderAccess.inline.hpp"
54
#include "runtime/relocator.hpp"
55
#include "runtime/sharedRuntime.hpp"
56
#include "runtime/signature.hpp"
57
#include "utilities/quickSort.hpp"
58
#include "utilities/xmlstream.hpp"
59
60
PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
61
62
// Implementation of Method
63
64
Method* Method::allocate(ClassLoaderData* loader_data,
65
int byte_code_size,
66
AccessFlags access_flags,
67
InlineTableSizes* sizes,
68
ConstMethod::MethodType method_type,
69
TRAPS) {
70
assert(!access_flags.is_native() || byte_code_size == 0,
71
"native methods should not contain byte codes");
72
ConstMethod* cm = ConstMethod::allocate(loader_data,
73
byte_code_size,
74
sizes,
75
method_type,
76
CHECK_NULL);
77
78
int size = Method::size(access_flags.is_native());
79
80
return new (loader_data, size, false, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, size);
81
}
82
83
Method::Method(ConstMethod* xconst, AccessFlags access_flags, int size) {
84
No_Safepoint_Verifier no_safepoint;
85
set_constMethod(xconst);
86
set_access_flags(access_flags);
87
set_method_size(size);
88
set_intrinsic_id(vmIntrinsics::_none);
89
set_jfr_towrite(false);
90
set_force_inline(false);
91
set_hidden(false);
92
set_dont_inline(false);
93
set_has_injected_profile(false);
94
set_running_emcp(false);
95
set_method_data(NULL);
96
clear_method_counters();
97
set_vtable_index(Method::garbage_vtable_index);
98
99
// Fix and bury in Method*
100
set_interpreter_entry(NULL); // sets i2i entry and from_int
101
set_adapter_entry(NULL);
102
clear_code(false /* don't need a lock */); // from_c/from_i get set to c2i/i2i
103
104
if (access_flags.is_native()) {
105
clear_native_function();
106
set_signature_handler(NULL);
107
}
108
109
NOT_PRODUCT(set_compiled_invocation_count(0);)
110
}
111
112
// Release Method*. The nmethod will be gone when we get here because
113
// we've walked the code cache.
114
void Method::deallocate_contents(ClassLoaderData* loader_data) {
115
clear_jmethod_id(loader_data);
116
MetadataFactory::free_metadata(loader_data, constMethod());
117
set_constMethod(NULL);
118
MetadataFactory::free_metadata(loader_data, method_data());
119
set_method_data(NULL);
120
MetadataFactory::free_metadata(loader_data, method_counters());
121
clear_method_counters();
122
// The nmethod will be gone when we get here.
123
if (code() != NULL) _code = NULL;
124
}
125
126
address Method::get_i2c_entry() {
127
assert(_adapter != NULL, "must have");
128
return _adapter->get_i2c_entry();
129
}
130
131
address Method::get_c2i_entry() {
132
assert(_adapter != NULL, "must have");
133
return _adapter->get_c2i_entry();
134
}
135
136
address Method::get_c2i_unverified_entry() {
137
assert(_adapter != NULL, "must have");
138
return _adapter->get_c2i_unverified_entry();
139
}
140
141
char* Method::name_and_sig_as_C_string() const {
142
return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
143
}
144
145
char* Method::name_and_sig_as_C_string(char* buf, int size) const {
146
return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
147
}
148
149
char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
150
const char* klass_name = klass->external_name();
151
int klass_name_len = (int)strlen(klass_name);
152
int method_name_len = method_name->utf8_length();
153
int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
154
char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
155
strcpy(dest, klass_name);
156
dest[klass_name_len] = '.';
157
strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
158
strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
159
dest[len] = 0;
160
return dest;
161
}
162
163
char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
164
Symbol* klass_name = klass->name();
165
klass_name->as_klass_external_name(buf, size);
166
int len = (int)strlen(buf);
167
168
if (len < size - 1) {
169
buf[len++] = '.';
170
171
method_name->as_C_string(&(buf[len]), size - len);
172
len = (int)strlen(buf);
173
174
signature->as_C_string(&(buf[len]), size - len);
175
}
176
177
return buf;
178
}
179
180
int Method::fast_exception_handler_bci_for(methodHandle mh, KlassHandle ex_klass, int throw_bci, TRAPS) {
181
// exception table holds quadruple entries of the form (beg_bci, end_bci, handler_bci, klass_index)
182
// access exception table
183
ExceptionTable table(mh());
184
int length = table.length();
185
// iterate through all entries sequentially
186
constantPoolHandle pool(THREAD, mh->constants());
187
for (int i = 0; i < length; i ++) {
188
//reacquire the table in case a GC happened
189
ExceptionTable table(mh());
190
int beg_bci = table.start_pc(i);
191
int end_bci = table.end_pc(i);
192
assert(beg_bci <= end_bci, "inconsistent exception table");
193
if (beg_bci <= throw_bci && throw_bci < end_bci) {
194
// exception handler bci range covers throw_bci => investigate further
195
int handler_bci = table.handler_pc(i);
196
int klass_index = table.catch_type_index(i);
197
if (klass_index == 0) {
198
return handler_bci;
199
} else if (ex_klass.is_null()) {
200
return handler_bci;
201
} else {
202
// we know the exception class => get the constraint class
203
// this may require loading of the constraint class; if verification
204
// fails or some other exception occurs, return handler_bci
205
Klass* k = pool->klass_at(klass_index, CHECK_(handler_bci));
206
KlassHandle klass = KlassHandle(THREAD, k);
207
assert(klass.not_null(), "klass not loaded");
208
if (ex_klass->is_subtype_of(klass())) {
209
return handler_bci;
210
}
211
}
212
}
213
}
214
215
return -1;
216
}
217
218
void Method::mask_for(int bci, InterpreterOopMap* mask) {
219
220
Thread* myThread = Thread::current();
221
methodHandle h_this(myThread, this);
222
#ifdef ASSERT
223
bool has_capability = myThread->is_VM_thread() ||
224
myThread->is_ConcurrentGC_thread() ||
225
myThread->is_GC_task_thread();
226
227
if (!has_capability) {
228
if (!VerifyStack && !VerifyLastFrame) {
229
// verify stack calls this outside VM thread
230
warning("oopmap should only be accessed by the "
231
"VM, GC task or CMS threads (or during debugging)");
232
InterpreterOopMap local_mask;
233
method_holder()->mask_for(h_this, bci, &local_mask);
234
local_mask.print();
235
}
236
}
237
#endif
238
method_holder()->mask_for(h_this, bci, mask);
239
return;
240
}
241
242
243
int Method::bci_from(address bcp) const {
244
#ifdef ASSERT
245
{ ResourceMark rm;
246
assert(is_native() && bcp == code_base() || contains(bcp) || is_error_reported(),
247
err_msg("bcp doesn't belong to this method: bcp: " INTPTR_FORMAT ", method: %s", bcp, name_and_sig_as_C_string()));
248
}
249
#endif
250
return bcp - code_base();
251
}
252
253
254
// Return (int)bcx if it appears to be a valid BCI.
255
// Return bci_from((address)bcx) if it appears to be a valid BCP.
256
// Return -1 otherwise.
257
// Used by profiling code, when invalid data is a possibility.
258
// The caller is responsible for validating the Method* itself.
259
int Method::validate_bci_from_bcx(intptr_t bcx) const {
260
// keep bci as -1 if not a valid bci
261
int bci = -1;
262
if (bcx == 0 || (address)bcx == code_base()) {
263
// code_size() may return 0 and we allow 0 here
264
// the method may be native
265
bci = 0;
266
} else if (frame::is_bci(bcx)) {
267
if (bcx < code_size()) {
268
bci = (int)bcx;
269
}
270
} else if (contains((address)bcx)) {
271
bci = (address)bcx - code_base();
272
}
273
// Assert that if we have dodged any asserts, bci is negative.
274
assert(bci == -1 || bci == bci_from(bcp_from(bci)), "sane bci if >=0");
275
return bci;
276
}
277
278
address Method::bcp_from(int bci) const {
279
assert((is_native() && bci == 0) || (!is_native() && 0 <= bci && bci < code_size()), err_msg("illegal bci: %d", bci));
280
address bcp = code_base() + bci;
281
assert(is_native() && bcp == code_base() || contains(bcp), "bcp doesn't belong to this method");
282
return bcp;
283
}
284
285
286
int Method::size(bool is_native) {
287
// If native, then include pointers for native_function and signature_handler
288
int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
289
int extra_words = align_size_up(extra_bytes, BytesPerWord) / BytesPerWord;
290
return align_object_size(header_size() + extra_words);
291
}
292
293
294
Symbol* Method::klass_name() const {
295
Klass* k = method_holder();
296
assert(k->is_klass(), "must be klass");
297
InstanceKlass* ik = (InstanceKlass*) k;
298
return ik->name();
299
}
300
301
302
// Attempt to return method oop to original state. Clear any pointers
303
// (to objects outside the shared spaces). We won't be able to predict
304
// where they should point in a new JVM. Further initialize some
305
// entries now in order allow them to be write protected later.
306
307
void Method::remove_unshareable_info() {
308
unlink_method();
309
}
310
311
void Method::set_vtable_index(int index) {
312
if (is_shared() && !MetaspaceShared::remapped_readwrite()) {
313
// At runtime initialize_vtable is rerun as part of link_class_impl()
314
// for a shared class loaded by the non-boot loader to obtain the loader
315
// constraints based on the runtime classloaders' context.
316
return; // don't write into the shared class
317
} else {
318
_vtable_index = index;
319
}
320
}
321
322
void Method::set_itable_index(int index) {
323
if (is_shared() && !MetaspaceShared::remapped_readwrite()) {
324
// At runtime initialize_itable is rerun as part of link_class_impl()
325
// for a shared class loaded by the non-boot loader to obtain the loader
326
// constraints based on the runtime classloaders' context. The dumptime
327
// itable index should be the same as the runtime index.
328
assert(_vtable_index == itable_index_max - index,
329
"archived itable index is different from runtime index");
330
return; // don’t write into the shared class
331
} else {
332
_vtable_index = itable_index_max - index;
333
}
334
assert(valid_itable_index(), "");
335
}
336
337
338
339
bool Method::was_executed_more_than(int n) {
340
// Invocation counter is reset when the Method* is compiled.
341
// If the method has compiled code we therefore assume it has
342
// be excuted more than n times.
343
if (is_accessor() || is_empty_method() || (code() != NULL)) {
344
// interpreter doesn't bump invocation counter of trivial methods
345
// compiler does not bump invocation counter of compiled methods
346
return true;
347
}
348
else if ((method_counters() != NULL &&
349
method_counters()->invocation_counter()->carry()) ||
350
(method_data() != NULL &&
351
method_data()->invocation_counter()->carry())) {
352
// The carry bit is set when the counter overflows and causes
353
// a compilation to occur. We don't know how many times
354
// the counter has been reset, so we simply assume it has
355
// been executed more than n times.
356
return true;
357
} else {
358
return invocation_count() > n;
359
}
360
}
361
362
#ifndef PRODUCT
363
void Method::print_invocation_count() {
364
if (is_static()) tty->print("static ");
365
if (is_final()) tty->print("final ");
366
if (is_synchronized()) tty->print("synchronized ");
367
if (is_native()) tty->print("native ");
368
method_holder()->name()->print_symbol_on(tty);
369
tty->print(".");
370
name()->print_symbol_on(tty);
371
signature()->print_symbol_on(tty);
372
373
if (WizardMode) {
374
// dump the size of the byte codes
375
tty->print(" {%d}", code_size());
376
}
377
tty->cr();
378
379
tty->print_cr (" interpreter_invocation_count: %8d ", interpreter_invocation_count());
380
tty->print_cr (" invocation_counter: %8d ", invocation_count());
381
tty->print_cr (" backedge_counter: %8d ", backedge_count());
382
if (CountCompiledCalls) {
383
tty->print_cr (" compiled_invocation_count: %8d ", compiled_invocation_count());
384
}
385
386
}
387
#endif
388
389
// Build a MethodData* object to hold information about this method
390
// collected in the interpreter.
391
void Method::build_interpreter_method_data(methodHandle method, TRAPS) {
392
// Do not profile method if current thread holds the pending list lock,
393
// which avoids deadlock for acquiring the MethodData_lock.
394
if (InstanceRefKlass::owns_pending_list_lock((JavaThread*)THREAD)) {
395
return;
396
}
397
398
// Grab a lock here to prevent multiple
399
// MethodData*s from being created.
400
MutexLocker ml(MethodData_lock, THREAD);
401
if (method->method_data() == NULL) {
402
ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
403
MethodData* method_data = MethodData::allocate(loader_data, method, CHECK);
404
method->set_method_data(method_data);
405
if (PrintMethodData && (Verbose || WizardMode)) {
406
ResourceMark rm(THREAD);
407
tty->print("build_interpreter_method_data for ");
408
method->print_name(tty);
409
tty->cr();
410
// At the end of the run, the MDO, full of data, will be dumped.
411
}
412
}
413
}
414
415
MethodCounters* Method::build_method_counters(Method* m, TRAPS) {
416
methodHandle mh(m);
417
ClassLoaderData* loader_data = mh->method_holder()->class_loader_data();
418
MethodCounters* counters = MethodCounters::allocate(loader_data, CHECK_NULL);
419
if (!mh->init_method_counters(counters)) {
420
MetadataFactory::free_metadata(loader_data, counters);
421
}
422
return mh->method_counters();
423
}
424
425
void Method::cleanup_inline_caches() {
426
// The current system doesn't use inline caches in the interpreter
427
// => nothing to do (keep this method around for future use)
428
}
429
430
431
int Method::extra_stack_words() {
432
// not an inline function, to avoid a header dependency on Interpreter
433
return extra_stack_entries() * Interpreter::stackElementSize;
434
}
435
436
437
void Method::compute_size_of_parameters(Thread *thread) {
438
ArgumentSizeComputer asc(signature());
439
set_size_of_parameters(asc.size() + (is_static() ? 0 : 1));
440
}
441
442
BasicType Method::result_type() const {
443
ResultTypeFinder rtf(signature());
444
return rtf.type();
445
}
446
447
448
bool Method::is_empty_method() const {
449
return code_size() == 1
450
&& *code_base() == Bytecodes::_return;
451
}
452
453
454
bool Method::is_vanilla_constructor() const {
455
// Returns true if this method is a vanilla constructor, i.e. an "<init>" "()V" method
456
// which only calls the superclass vanilla constructor and possibly does stores of
457
// zero constants to local fields:
458
//
459
// aload_0
460
// invokespecial
461
// indexbyte1
462
// indexbyte2
463
//
464
// followed by an (optional) sequence of:
465
//
466
// aload_0
467
// aconst_null / iconst_0 / fconst_0 / dconst_0
468
// putfield
469
// indexbyte1
470
// indexbyte2
471
//
472
// followed by:
473
//
474
// return
475
476
assert(name() == vmSymbols::object_initializer_name(), "Should only be called for default constructors");
477
assert(signature() == vmSymbols::void_method_signature(), "Should only be called for default constructors");
478
int size = code_size();
479
// Check if size match
480
if (size == 0 || size % 5 != 0) return false;
481
address cb = code_base();
482
int last = size - 1;
483
if (cb[0] != Bytecodes::_aload_0 || cb[1] != Bytecodes::_invokespecial || cb[last] != Bytecodes::_return) {
484
// Does not call superclass default constructor
485
return false;
486
}
487
// Check optional sequence
488
for (int i = 4; i < last; i += 5) {
489
if (cb[i] != Bytecodes::_aload_0) return false;
490
if (!Bytecodes::is_zero_const(Bytecodes::cast(cb[i+1]))) return false;
491
if (cb[i+2] != Bytecodes::_putfield) return false;
492
}
493
return true;
494
}
495
496
497
bool Method::compute_has_loops_flag() {
498
BytecodeStream bcs(this);
499
Bytecodes::Code bc;
500
501
while ((bc = bcs.next()) >= 0) {
502
switch( bc ) {
503
case Bytecodes::_ifeq:
504
case Bytecodes::_ifnull:
505
case Bytecodes::_iflt:
506
case Bytecodes::_ifle:
507
case Bytecodes::_ifne:
508
case Bytecodes::_ifnonnull:
509
case Bytecodes::_ifgt:
510
case Bytecodes::_ifge:
511
case Bytecodes::_if_icmpeq:
512
case Bytecodes::_if_icmpne:
513
case Bytecodes::_if_icmplt:
514
case Bytecodes::_if_icmpgt:
515
case Bytecodes::_if_icmple:
516
case Bytecodes::_if_icmpge:
517
case Bytecodes::_if_acmpeq:
518
case Bytecodes::_if_acmpne:
519
case Bytecodes::_goto:
520
case Bytecodes::_jsr:
521
if( bcs.dest() < bcs.next_bci() ) _access_flags.set_has_loops();
522
break;
523
524
case Bytecodes::_goto_w:
525
case Bytecodes::_jsr_w:
526
if( bcs.dest_w() < bcs.next_bci() ) _access_flags.set_has_loops();
527
break;
528
}
529
}
530
_access_flags.set_loops_flag_init();
531
return _access_flags.has_loops();
532
}
533
534
bool Method::is_final_method(AccessFlags class_access_flags) const {
535
// or "does_not_require_vtable_entry"
536
// default method or overpass can occur, is not final (reuses vtable entry)
537
// private methods get vtable entries for backward class compatibility.
538
if (is_overpass() || is_default_method()) return false;
539
return is_final() || class_access_flags.is_final();
540
}
541
542
bool Method::is_final_method() const {
543
return is_final_method(method_holder()->access_flags());
544
}
545
546
bool Method::is_default_method() const {
547
if (method_holder() != NULL &&
548
method_holder()->is_interface() &&
549
!is_abstract()) {
550
return true;
551
} else {
552
return false;
553
}
554
}
555
556
bool Method::can_be_statically_bound(AccessFlags class_access_flags) const {
557
if (is_final_method(class_access_flags)) return true;
558
#ifdef ASSERT
559
ResourceMark rm;
560
bool is_nonv = (vtable_index() == nonvirtual_vtable_index);
561
if (class_access_flags.is_interface()) {
562
assert(is_nonv == is_static(), err_msg("is_nonv=%s", name_and_sig_as_C_string()));
563
}
564
#endif
565
assert(valid_vtable_index() || valid_itable_index(), "method must be linked before we ask this question");
566
return vtable_index() == nonvirtual_vtable_index;
567
}
568
569
bool Method::can_be_statically_bound() const {
570
return can_be_statically_bound(method_holder()->access_flags());
571
}
572
573
bool Method::is_accessor() const {
574
if (code_size() != 5) return false;
575
if (size_of_parameters() != 1) return false;
576
if (java_code_at(0) != Bytecodes::_aload_0 ) return false;
577
if (java_code_at(1) != Bytecodes::_getfield) return false;
578
if (java_code_at(4) != Bytecodes::_areturn &&
579
java_code_at(4) != Bytecodes::_ireturn ) return false;
580
return true;
581
}
582
583
bool Method::is_constant_getter() const {
584
int last_index = code_size() - 1;
585
// Check if the first 1-3 bytecodes are a constant push
586
// and the last bytecode is a return.
587
return (2 <= code_size() && code_size() <= 4 &&
588
Bytecodes::is_const(java_code_at(0)) &&
589
Bytecodes::length_for(java_code_at(0)) == last_index &&
590
Bytecodes::is_return(java_code_at(last_index)));
591
}
592
593
bool Method::is_initializer() const {
594
return is_object_initializer() || is_static_initializer();
595
}
596
597
bool Method::has_valid_initializer_flags() const {
598
return (is_static() ||
599
method_holder()->major_version() < 51);
600
}
601
602
bool Method::is_static_initializer() const {
603
// For classfiles version 51 or greater, ensure that the clinit method is
604
// static. Non-static methods with the name "<clinit>" are not static
605
// initializers. (older classfiles exempted for backward compatibility)
606
return name() == vmSymbols::class_initializer_name() &&
607
has_valid_initializer_flags();
608
}
609
610
bool Method::is_object_initializer() const {
611
return name() == vmSymbols::object_initializer_name();
612
}
613
614
objArrayHandle Method::resolved_checked_exceptions_impl(Method* this_oop, TRAPS) {
615
int length = this_oop->checked_exceptions_length();
616
if (length == 0) { // common case
617
return objArrayHandle(THREAD, Universe::the_empty_class_klass_array());
618
} else {
619
methodHandle h_this(THREAD, this_oop);
620
objArrayOop m_oop = oopFactory::new_objArray(SystemDictionary::Class_klass(), length, CHECK_(objArrayHandle()));
621
objArrayHandle mirrors (THREAD, m_oop);
622
for (int i = 0; i < length; i++) {
623
CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
624
Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
625
assert(k->is_subclass_of(SystemDictionary::Throwable_klass()), "invalid exception class");
626
mirrors->obj_at_put(i, k->java_mirror());
627
}
628
return mirrors;
629
}
630
};
631
632
633
int Method::line_number_from_bci(int bci) const {
634
if (bci == SynchronizationEntryBCI) bci = 0;
635
assert(bci == 0 || 0 <= bci && bci < code_size(), "illegal bci");
636
int best_bci = 0;
637
int best_line = -1;
638
639
if (has_linenumber_table()) {
640
// The line numbers are a short array of 2-tuples [start_pc, line_number].
641
// Not necessarily sorted and not necessarily one-to-one.
642
CompressedLineNumberReadStream stream(compressed_linenumber_table());
643
while (stream.read_pair()) {
644
if (stream.bci() == bci) {
645
// perfect match
646
return stream.line();
647
} else {
648
// update best_bci/line
649
if (stream.bci() < bci && stream.bci() >= best_bci) {
650
best_bci = stream.bci();
651
best_line = stream.line();
652
}
653
}
654
}
655
}
656
return best_line;
657
}
658
659
660
bool Method::is_klass_loaded_by_klass_index(int klass_index) const {
661
if( constants()->tag_at(klass_index).is_unresolved_klass() ) {
662
Thread *thread = Thread::current();
663
Symbol* klass_name = constants()->klass_name_at(klass_index);
664
Handle loader(thread, method_holder()->class_loader());
665
Handle prot (thread, method_holder()->protection_domain());
666
return SystemDictionary::find(klass_name, loader, prot, thread) != NULL;
667
} else {
668
return true;
669
}
670
}
671
672
673
bool Method::is_klass_loaded(int refinfo_index, bool must_be_resolved) const {
674
int klass_index = constants()->klass_ref_index_at(refinfo_index);
675
if (must_be_resolved) {
676
// Make sure klass is resolved in constantpool.
677
if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;
678
}
679
return is_klass_loaded_by_klass_index(klass_index);
680
}
681
682
683
void Method::set_native_function(address function, bool post_event_flag) {
684
assert(function != NULL, "use clear_native_function to unregister natives");
685
assert(!is_method_handle_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
686
address* native_function = native_function_addr();
687
688
// We can see racers trying to place the same native function into place. Once
689
// is plenty.
690
address current = *native_function;
691
if (current == function) return;
692
if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
693
function != NULL) {
694
// native_method_throw_unsatisfied_link_error_entry() should only
695
// be passed when post_event_flag is false.
696
assert(function !=
697
SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
698
"post_event_flag mis-match");
699
700
// post the bind event, and possible change the bind function
701
JvmtiExport::post_native_method_bind(this, &function);
702
}
703
*native_function = function;
704
// This function can be called more than once. We must make sure that we always
705
// use the latest registered method -> check if a stub already has been generated.
706
// If so, we have to make it not_entrant.
707
nmethod* nm = code(); // Put it into local variable to guard against concurrent updates
708
if (nm != NULL) {
709
nm->make_not_entrant();
710
}
711
}
712
713
714
bool Method::has_native_function() const {
715
if (is_method_handle_intrinsic())
716
return false; // special-cased in SharedRuntime::generate_native_wrapper
717
address func = native_function();
718
return (func != NULL && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry());
719
}
720
721
722
void Method::clear_native_function() {
723
// Note: is_method_handle_intrinsic() is allowed here.
724
set_native_function(
725
SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
726
!native_bind_event_is_interesting);
727
clear_code();
728
}
729
730
address Method::critical_native_function() {
731
methodHandle mh(this);
732
return NativeLookup::lookup_critical_entry(mh);
733
}
734
735
736
void Method::set_signature_handler(address handler) {
737
address* signature_handler = signature_handler_addr();
738
*signature_handler = handler;
739
}
740
741
742
void Method::print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason) {
743
if (PrintCompilation && report) {
744
ttyLocker ttyl;
745
tty->print("made not %scompilable on ", is_osr ? "OSR " : "");
746
if (comp_level == CompLevel_all) {
747
tty->print("all levels ");
748
} else {
749
tty->print("levels ");
750
for (int i = (int)CompLevel_none; i <= comp_level; i++) {
751
tty->print("%d ", i);
752
}
753
}
754
this->print_short_name(tty);
755
int size = this->code_size();
756
if (size > 0) {
757
tty->print(" (%d bytes)", size);
758
}
759
if (reason != NULL) {
760
tty->print(" %s", reason);
761
}
762
tty->cr();
763
}
764
if ((TraceDeoptimization || LogCompilation) && (xtty != NULL)) {
765
ttyLocker ttyl;
766
xtty->begin_elem("make_not_compilable thread='" UINTX_FORMAT "' osr='%d' level='%d'",
767
os::current_thread_id(), is_osr, comp_level);
768
if (reason != NULL) {
769
xtty->print(" reason=\'%s\'", reason);
770
}
771
xtty->method(this);
772
xtty->stamp();
773
xtty->end_elem();
774
}
775
}
776
777
bool Method::is_always_compilable() const {
778
// Generated adapters must be compiled
779
if (is_method_handle_intrinsic() && is_synthetic()) {
780
assert(!is_not_c1_compilable(), "sanity check");
781
assert(!is_not_c2_compilable(), "sanity check");
782
return true;
783
}
784
785
return false;
786
}
787
788
bool Method::is_not_compilable(int comp_level) const {
789
if (number_of_breakpoints() > 0)
790
return true;
791
if (is_always_compilable())
792
return false;
793
if (comp_level == CompLevel_any)
794
return is_not_c1_compilable() || is_not_c2_compilable();
795
if (is_c1_compile(comp_level))
796
return is_not_c1_compilable();
797
if (is_c2_compile(comp_level))
798
return is_not_c2_compilable();
799
return false;
800
}
801
802
// call this when compiler finds that this method is not compilable
803
void Method::set_not_compilable(int comp_level, bool report, const char* reason) {
804
if (is_always_compilable()) {
805
// Don't mark a method which should be always compilable
806
return;
807
}
808
print_made_not_compilable(comp_level, /*is_osr*/ false, report, reason);
809
if (comp_level == CompLevel_all) {
810
set_not_c1_compilable();
811
set_not_c2_compilable();
812
} else {
813
if (is_c1_compile(comp_level))
814
set_not_c1_compilable();
815
if (is_c2_compile(comp_level))
816
set_not_c2_compilable();
817
}
818
CompilationPolicy::policy()->disable_compilation(this);
819
assert(!CompilationPolicy::can_be_compiled(this, comp_level), "sanity check");
820
}
821
822
bool Method::is_not_osr_compilable(int comp_level) const {
823
if (is_not_compilable(comp_level))
824
return true;
825
if (comp_level == CompLevel_any)
826
return is_not_c1_osr_compilable() || is_not_c2_osr_compilable();
827
if (is_c1_compile(comp_level))
828
return is_not_c1_osr_compilable();
829
if (is_c2_compile(comp_level))
830
return is_not_c2_osr_compilable();
831
return false;
832
}
833
834
void Method::set_not_osr_compilable(int comp_level, bool report, const char* reason) {
835
print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
836
if (comp_level == CompLevel_all) {
837
set_not_c1_osr_compilable();
838
set_not_c2_osr_compilable();
839
} else {
840
if (is_c1_compile(comp_level))
841
set_not_c1_osr_compilable();
842
if (is_c2_compile(comp_level))
843
set_not_c2_osr_compilable();
844
}
845
CompilationPolicy::policy()->disable_compilation(this);
846
assert(!CompilationPolicy::can_be_osr_compiled(this, comp_level), "sanity check");
847
}
848
849
// Revert to using the interpreter and clear out the nmethod
850
void Method::clear_code(bool acquire_lock /* = true */) {
851
MutexLockerEx pl(acquire_lock ? Patching_lock : NULL, Mutex::_no_safepoint_check_flag);
852
// this may be NULL if c2i adapters have not been made yet
853
// Only should happen at allocate time.
854
if (_adapter == NULL) {
855
_from_compiled_entry = NULL;
856
} else {
857
_from_compiled_entry = _adapter->get_c2i_entry();
858
}
859
OrderAccess::storestore();
860
_from_interpreted_entry = _i2i_entry;
861
OrderAccess::storestore();
862
_code = NULL;
863
}
864
865
// Called by class data sharing to remove any entry points (which are not shared)
866
void Method::unlink_method() {
867
_code = NULL;
868
_i2i_entry = NULL;
869
_from_interpreted_entry = NULL;
870
if (is_native()) {
871
*native_function_addr() = NULL;
872
set_signature_handler(NULL);
873
}
874
NOT_PRODUCT(set_compiled_invocation_count(0);)
875
_adapter = NULL;
876
_from_compiled_entry = NULL;
877
878
// In case of DumpSharedSpaces, _method_data should always be NULL.
879
//
880
// During runtime (!DumpSharedSpaces), when we are cleaning a
881
// shared class that failed to load, this->link_method() may
882
// have already been called (before an exception happened), so
883
// this->_method_data may not be NULL.
884
assert(!DumpSharedSpaces || _method_data == NULL, "unexpected method data?");
885
886
set_method_data(NULL);
887
clear_method_counters();
888
}
889
890
// Called when the method_holder is getting linked. Setup entrypoints so the method
891
// is ready to be called from interpreter, compiler, and vtables.
892
void Method::link_method(methodHandle h_method, TRAPS) {
893
// If the code cache is full, we may reenter this function for the
894
// leftover methods that weren't linked.
895
if (_i2i_entry != NULL) return;
896
897
assert(_adapter == NULL, "init'd to NULL" );
898
assert( _code == NULL, "nothing compiled yet" );
899
900
// Setup interpreter entrypoint
901
assert(this == h_method(), "wrong h_method()" );
902
address entry = Interpreter::entry_for_method(h_method);
903
assert(entry != NULL, "interpreter entry must be non-null");
904
// Sets both _i2i_entry and _from_interpreted_entry
905
set_interpreter_entry(entry);
906
907
// Don't overwrite already registered native entries.
908
if (is_native() && !has_native_function()) {
909
set_native_function(
910
SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
911
!native_bind_event_is_interesting);
912
}
913
914
// Setup compiler entrypoint. This is made eagerly, so we do not need
915
// special handling of vtables. An alternative is to make adapters more
916
// lazily by calling make_adapter() from from_compiled_entry() for the
917
// normal calls. For vtable calls life gets more complicated. When a
918
// call-site goes mega-morphic we need adapters in all methods which can be
919
// called from the vtable. We need adapters on such methods that get loaded
920
// later. Ditto for mega-morphic itable calls. If this proves to be a
921
// problem we'll make these lazily later.
922
(void) make_adapters(h_method, CHECK);
923
924
// ONLY USE the h_method now as make_adapter may have blocked
925
926
}
927
928
address Method::make_adapters(methodHandle mh, TRAPS) {
929
// Adapters for compiled code are made eagerly here. They are fairly
930
// small (generally < 100 bytes) and quick to make (and cached and shared)
931
// so making them eagerly shouldn't be too expensive.
932
AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
933
if (adapter == NULL ) {
934
THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "out of space in CodeCache for adapters");
935
}
936
937
mh->set_adapter_entry(adapter);
938
mh->_from_compiled_entry = adapter->get_c2i_entry();
939
return adapter->get_c2i_entry();
940
}
941
942
void Method::restore_unshareable_info(TRAPS) {
943
// Since restore_unshareable_info can be called more than once for a method, don't
944
// redo any work. If this field is restored, there is nothing to do.
945
if (_from_compiled_entry == NULL) {
946
// restore method's vtable by calling a virtual function
947
restore_vtable();
948
949
methodHandle mh(THREAD, this);
950
link_method(mh, CHECK);
951
}
952
}
953
954
955
// The verified_code_entry() must be called when a invoke is resolved
956
// on this method.
957
958
// It returns the compiled code entry point, after asserting not null.
959
// This function is called after potential safepoints so that nmethod
960
// or adapter that it points to is still live and valid.
961
// This function must not hit a safepoint!
962
address Method::verified_code_entry() {
963
debug_only(No_Safepoint_Verifier nsv;)
964
assert(_from_compiled_entry != NULL, "must be set");
965
return _from_compiled_entry;
966
}
967
968
// Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
969
// (could be racing a deopt).
970
// Not inline to avoid circular ref.
971
bool Method::check_code() const {
972
// cached in a register or local. There's a race on the value of the field.
973
nmethod *code = (nmethod *)OrderAccess::load_ptr_acquire(&_code);
974
return code == NULL || (code->method() == NULL) || (code->method() == (Method*)this && !code->is_osr_method());
975
}
976
977
// Install compiled code. Instantly it can execute.
978
void Method::set_code(methodHandle mh, nmethod *code) {
979
MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
980
assert( code, "use clear_code to remove code" );
981
assert( mh->check_code(), "" );
982
983
guarantee(mh->adapter() != NULL, "Adapter blob must already exist!");
984
985
// These writes must happen in this order, because the interpreter will
986
// directly jump to from_interpreted_entry which jumps to an i2c adapter
987
// which jumps to _from_compiled_entry.
988
mh->_code = code; // Assign before allowing compiled code to exec
989
990
int comp_level = code->comp_level();
991
// In theory there could be a race here. In practice it is unlikely
992
// and not worth worrying about.
993
if (comp_level > mh->highest_comp_level()) {
994
mh->set_highest_comp_level(comp_level);
995
}
996
997
OrderAccess::storestore();
998
#ifdef SHARK
999
mh->_from_interpreted_entry = code->insts_begin();
1000
#else //!SHARK
1001
mh->_from_compiled_entry = code->verified_entry_point();
1002
OrderAccess::storestore();
1003
// Instantly compiled code can execute.
1004
if (!mh->is_method_handle_intrinsic())
1005
mh->_from_interpreted_entry = mh->get_i2c_entry();
1006
#endif //!SHARK
1007
}
1008
1009
1010
bool Method::is_overridden_in(Klass* k) const {
1011
InstanceKlass* ik = InstanceKlass::cast(k);
1012
1013
if (ik->is_interface()) return false;
1014
1015
// If method is an interface, we skip it - except if it
1016
// is a miranda method
1017
if (method_holder()->is_interface()) {
1018
// Check that method is not a miranda method
1019
if (ik->lookup_method(name(), signature()) == NULL) {
1020
// No implementation exist - so miranda method
1021
return false;
1022
}
1023
return true;
1024
}
1025
1026
assert(ik->is_subclass_of(method_holder()), "should be subklass");
1027
assert(ik->vtable() != NULL, "vtable should exist");
1028
if (!has_vtable_index()) {
1029
return false;
1030
} else {
1031
Method* vt_m = ik->method_at_vtable(vtable_index());
1032
return vt_m != this;
1033
}
1034
}
1035
1036
1037
// give advice about whether this Method* should be cached or not
1038
bool Method::should_not_be_cached() const {
1039
if (is_old()) {
1040
// This method has been redefined. It is either EMCP or obsolete
1041
// and we don't want to cache it because that would pin the method
1042
// down and prevent it from being collectible if and when it
1043
// finishes executing.
1044
return true;
1045
}
1046
1047
// caching this method should be just fine
1048
return false;
1049
}
1050
1051
1052
/**
1053
* Returns true if this is one of the specially treated methods for
1054
* security related stack walks (like Reflection.getCallerClass).
1055
*/
1056
bool Method::is_ignored_by_security_stack_walk() const {
1057
const bool use_new_reflection = JDK_Version::is_gte_jdk14x_version() && UseNewReflection;
1058
1059
if (intrinsic_id() == vmIntrinsics::_invoke) {
1060
// This is Method.invoke() -- ignore it
1061
return true;
1062
}
1063
if (use_new_reflection &&
1064
method_holder()->is_subclass_of(SystemDictionary::reflect_MethodAccessorImpl_klass())) {
1065
// This is an auxilary frame -- ignore it
1066
return true;
1067
}
1068
if (is_method_handle_intrinsic() || is_compiled_lambda_form()) {
1069
// This is an internal adapter frame for method handles -- ignore it
1070
return true;
1071
}
1072
return false;
1073
}
1074
1075
1076
// Constant pool structure for invoke methods:
1077
enum {
1078
_imcp_invoke_name = 1, // utf8: 'invokeExact', etc.
1079
_imcp_invoke_signature, // utf8: (variable Symbol*)
1080
_imcp_limit
1081
};
1082
1083
// Test if this method is an MH adapter frame generated by Java code.
1084
// Cf. java/lang/invoke/InvokerBytecodeGenerator
1085
bool Method::is_compiled_lambda_form() const {
1086
return intrinsic_id() == vmIntrinsics::_compiledLambdaForm;
1087
}
1088
1089
// Test if this method is an internal MH primitive method.
1090
bool Method::is_method_handle_intrinsic() const {
1091
vmIntrinsics::ID iid = intrinsic_id();
1092
return (MethodHandles::is_signature_polymorphic(iid) &&
1093
MethodHandles::is_signature_polymorphic_intrinsic(iid));
1094
}
1095
1096
bool Method::has_member_arg() const {
1097
vmIntrinsics::ID iid = intrinsic_id();
1098
return (MethodHandles::is_signature_polymorphic(iid) &&
1099
MethodHandles::has_member_arg(iid));
1100
}
1101
1102
// Make an instance of a signature-polymorphic internal MH primitive.
1103
methodHandle Method::make_method_handle_intrinsic(vmIntrinsics::ID iid,
1104
Symbol* signature,
1105
TRAPS) {
1106
ResourceMark rm;
1107
methodHandle empty;
1108
1109
KlassHandle holder = SystemDictionary::MethodHandle_klass();
1110
Symbol* name = MethodHandles::signature_polymorphic_intrinsic_name(iid);
1111
assert(iid == MethodHandles::signature_polymorphic_name_id(name), "");
1112
if (TraceMethodHandles) {
1113
tty->print_cr("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string());
1114
}
1115
1116
// invariant: cp->symbol_at_put is preceded by a refcount increment (more usually a lookup)
1117
name->increment_refcount();
1118
signature->increment_refcount();
1119
1120
int cp_length = _imcp_limit;
1121
ClassLoaderData* loader_data = holder->class_loader_data();
1122
constantPoolHandle cp;
1123
{
1124
ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty));
1125
cp = constantPoolHandle(THREAD, cp_oop);
1126
}
1127
cp->set_pool_holder(InstanceKlass::cast(holder()));
1128
cp->symbol_at_put(_imcp_invoke_name, name);
1129
cp->symbol_at_put(_imcp_invoke_signature, signature);
1130
cp->set_has_preresolution();
1131
1132
// decide on access bits: public or not?
1133
int flags_bits = (JVM_ACC_NATIVE | JVM_ACC_SYNTHETIC | JVM_ACC_FINAL);
1134
bool must_be_static = MethodHandles::is_signature_polymorphic_static(iid);
1135
if (must_be_static) flags_bits |= JVM_ACC_STATIC;
1136
assert((flags_bits & JVM_ACC_PUBLIC) == 0, "do not expose these methods");
1137
1138
methodHandle m;
1139
{
1140
InlineTableSizes sizes;
1141
Method* m_oop = Method::allocate(loader_data, 0,
1142
accessFlags_from(flags_bits), &sizes,
1143
ConstMethod::NORMAL, CHECK_(empty));
1144
m = methodHandle(THREAD, m_oop);
1145
}
1146
m->set_constants(cp());
1147
m->set_name_index(_imcp_invoke_name);
1148
m->set_signature_index(_imcp_invoke_signature);
1149
assert(MethodHandles::is_signature_polymorphic_name(m->name()), "");
1150
assert(m->signature() == signature, "");
1151
ResultTypeFinder rtf(signature);
1152
m->constMethod()->set_result_type(rtf.type());
1153
m->compute_size_of_parameters(THREAD);
1154
m->init_intrinsic_id();
1155
assert(m->is_method_handle_intrinsic(), "");
1156
#ifdef ASSERT
1157
if (!MethodHandles::is_signature_polymorphic(m->intrinsic_id())) m->print();
1158
assert(MethodHandles::is_signature_polymorphic(m->intrinsic_id()), "must be an invoker");
1159
assert(m->intrinsic_id() == iid, "correctly predicted iid");
1160
#endif //ASSERT
1161
1162
// Finally, set up its entry points.
1163
assert(m->can_be_statically_bound(), "");
1164
m->set_vtable_index(Method::nonvirtual_vtable_index);
1165
m->link_method(m, CHECK_(empty));
1166
1167
if (TraceMethodHandles && (Verbose || WizardMode))
1168
m->print_on(tty);
1169
1170
return m;
1171
}
1172
1173
Klass* Method::check_non_bcp_klass(Klass* klass) {
1174
if (klass != NULL && klass->class_loader() != NULL) {
1175
if (klass->oop_is_objArray())
1176
klass = ObjArrayKlass::cast(klass)->bottom_klass();
1177
return klass;
1178
}
1179
return NULL;
1180
}
1181
1182
1183
methodHandle Method::clone_with_new_data(methodHandle m, u_char* new_code, int new_code_length,
1184
u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
1185
// Code below does not work for native methods - they should never get rewritten anyway
1186
assert(!m->is_native(), "cannot rewrite native methods");
1187
// Allocate new Method*
1188
AccessFlags flags = m->access_flags();
1189
1190
ConstMethod* cm = m->constMethod();
1191
int checked_exceptions_len = cm->checked_exceptions_length();
1192
int localvariable_len = cm->localvariable_table_length();
1193
int exception_table_len = cm->exception_table_length();
1194
int method_parameters_len = cm->method_parameters_length();
1195
int method_annotations_len = cm->method_annotations_length();
1196
int parameter_annotations_len = cm->parameter_annotations_length();
1197
int type_annotations_len = cm->type_annotations_length();
1198
int default_annotations_len = cm->default_annotations_length();
1199
1200
InlineTableSizes sizes(
1201
localvariable_len,
1202
new_compressed_linenumber_size,
1203
exception_table_len,
1204
checked_exceptions_len,
1205
method_parameters_len,
1206
cm->generic_signature_index(),
1207
method_annotations_len,
1208
parameter_annotations_len,
1209
type_annotations_len,
1210
default_annotations_len,
1211
0);
1212
1213
ClassLoaderData* loader_data = m->method_holder()->class_loader_data();
1214
Method* newm_oop = Method::allocate(loader_data,
1215
new_code_length,
1216
flags,
1217
&sizes,
1218
m->method_type(),
1219
CHECK_(methodHandle()));
1220
methodHandle newm (THREAD, newm_oop);
1221
int new_method_size = newm->method_size();
1222
1223
// Create a shallow copy of Method part, but be careful to preserve the new ConstMethod*
1224
ConstMethod* newcm = newm->constMethod();
1225
int new_const_method_size = newm->constMethod()->size();
1226
1227
memcpy(newm(), m(), sizeof(Method));
1228
1229
// Create shallow copy of ConstMethod.
1230
memcpy(newcm, m->constMethod(), sizeof(ConstMethod));
1231
1232
// Reset correct method/const method, method size, and parameter info
1233
newm->set_constMethod(newcm);
1234
newm->constMethod()->set_code_size(new_code_length);
1235
newm->constMethod()->set_constMethod_size(new_const_method_size);
1236
newm->set_method_size(new_method_size);
1237
assert(newm->code_size() == new_code_length, "check");
1238
assert(newm->method_parameters_length() == method_parameters_len, "check");
1239
assert(newm->checked_exceptions_length() == checked_exceptions_len, "check");
1240
assert(newm->exception_table_length() == exception_table_len, "check");
1241
assert(newm->localvariable_table_length() == localvariable_len, "check");
1242
// Copy new byte codes
1243
memcpy(newm->code_base(), new_code, new_code_length);
1244
// Copy line number table
1245
if (new_compressed_linenumber_size > 0) {
1246
memcpy(newm->compressed_linenumber_table(),
1247
new_compressed_linenumber_table,
1248
new_compressed_linenumber_size);
1249
}
1250
// Copy method_parameters
1251
if (method_parameters_len > 0) {
1252
memcpy(newm->method_parameters_start(),
1253
m->method_parameters_start(),
1254
method_parameters_len * sizeof(MethodParametersElement));
1255
}
1256
// Copy checked_exceptions
1257
if (checked_exceptions_len > 0) {
1258
memcpy(newm->checked_exceptions_start(),
1259
m->checked_exceptions_start(),
1260
checked_exceptions_len * sizeof(CheckedExceptionElement));
1261
}
1262
// Copy exception table
1263
if (exception_table_len > 0) {
1264
memcpy(newm->exception_table_start(),
1265
m->exception_table_start(),
1266
exception_table_len * sizeof(ExceptionTableElement));
1267
}
1268
// Copy local variable number table
1269
if (localvariable_len > 0) {
1270
memcpy(newm->localvariable_table_start(),
1271
m->localvariable_table_start(),
1272
localvariable_len * sizeof(LocalVariableTableElement));
1273
}
1274
// Copy stackmap table
1275
if (m->has_stackmap_table()) {
1276
int code_attribute_length = m->stackmap_data()->length();
1277
Array<u1>* stackmap_data =
1278
MetadataFactory::new_array<u1>(loader_data, code_attribute_length, 0, CHECK_NULL);
1279
memcpy((void*)stackmap_data->adr_at(0),
1280
(void*)m->stackmap_data()->adr_at(0), code_attribute_length);
1281
newm->set_stackmap_data(stackmap_data);
1282
}
1283
1284
// copy annotations over to new method
1285
newcm->copy_annotations_from(cm);
1286
return newm;
1287
}
1288
1289
vmSymbols::SID Method::klass_id_for_intrinsics(Klass* holder) {
1290
// if loader is not the default loader (i.e., != NULL), we can't know the intrinsics
1291
// because we are not loading from core libraries
1292
// exception: the AES intrinsics come from lib/ext/sunjce_provider.jar
1293
// which does not use the class default class loader so we check for its loader here
1294
InstanceKlass* ik = InstanceKlass::cast(holder);
1295
if ((ik->class_loader() != NULL) && !SystemDictionary::is_ext_class_loader(ik->class_loader())) {
1296
return vmSymbols::NO_SID; // regardless of name, no intrinsics here
1297
}
1298
1299
// see if the klass name is well-known:
1300
Symbol* klass_name = ik->name();
1301
return vmSymbols::find_sid(klass_name);
1302
}
1303
1304
void Method::init_intrinsic_id() {
1305
assert(_intrinsic_id == vmIntrinsics::_none, "do this just once");
1306
const uintptr_t max_id_uint = right_n_bits((int)(sizeof(_intrinsic_id) * BitsPerByte));
1307
assert((uintptr_t)vmIntrinsics::ID_LIMIT <= max_id_uint, "else fix size");
1308
assert(intrinsic_id_size_in_bytes() == sizeof(_intrinsic_id), "");
1309
1310
// the klass name is well-known:
1311
vmSymbols::SID klass_id = klass_id_for_intrinsics(method_holder());
1312
assert(klass_id != vmSymbols::NO_SID, "caller responsibility");
1313
1314
// ditto for method and signature:
1315
vmSymbols::SID name_id = vmSymbols::find_sid(name());
1316
if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
1317
&& name_id == vmSymbols::NO_SID)
1318
return;
1319
vmSymbols::SID sig_id = vmSymbols::find_sid(signature());
1320
if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
1321
&& sig_id == vmSymbols::NO_SID) return;
1322
jshort flags = access_flags().as_short();
1323
1324
vmIntrinsics::ID id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
1325
if (id != vmIntrinsics::_none) {
1326
set_intrinsic_id(id);
1327
return;
1328
}
1329
1330
// A few slightly irregular cases:
1331
switch (klass_id) {
1332
case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_StrictMath):
1333
// Second chance: check in regular Math.
1334
switch (name_id) {
1335
case vmSymbols::VM_SYMBOL_ENUM_NAME(min_name):
1336
case vmSymbols::VM_SYMBOL_ENUM_NAME(max_name):
1337
case vmSymbols::VM_SYMBOL_ENUM_NAME(sqrt_name):
1338
// pretend it is the corresponding method in the non-strict class:
1339
klass_id = vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_Math);
1340
id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
1341
break;
1342
}
1343
break;
1344
1345
// Signature-polymorphic methods: MethodHandle.invoke*, InvokeDynamic.*.
1346
case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle):
1347
if (!is_native()) break;
1348
id = MethodHandles::signature_polymorphic_name_id(method_holder(), name());
1349
if (is_static() != MethodHandles::is_signature_polymorphic_static(id))
1350
id = vmIntrinsics::_none;
1351
break;
1352
}
1353
1354
if (id != vmIntrinsics::_none) {
1355
// Set up its iid. It is an alias method.
1356
set_intrinsic_id(id);
1357
return;
1358
}
1359
}
1360
1361
// These two methods are static since a GC may move the Method
1362
bool Method::load_signature_classes(methodHandle m, TRAPS) {
1363
if (THREAD->is_Compiler_thread()) {
1364
// There is nothing useful this routine can do from within the Compile thread.
1365
// Hopefully, the signature contains only well-known classes.
1366
// We could scan for this and return true/false, but the caller won't care.
1367
return false;
1368
}
1369
bool sig_is_loaded = true;
1370
Handle class_loader(THREAD, m->method_holder()->class_loader());
1371
Handle protection_domain(THREAD, m->method_holder()->protection_domain());
1372
ResourceMark rm(THREAD);
1373
Symbol* signature = m->signature();
1374
for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
1375
if (ss.is_object()) {
1376
Symbol* sym = ss.as_symbol(CHECK_(false));
1377
Symbol* name = sym;
1378
Klass* klass = SystemDictionary::resolve_or_null(name, class_loader,
1379
protection_domain, THREAD);
1380
// We are loading classes eagerly. If a ClassNotFoundException or
1381
// a LinkageError was generated, be sure to ignore it.
1382
if (HAS_PENDING_EXCEPTION) {
1383
if (PENDING_EXCEPTION->is_a(SystemDictionary::ClassNotFoundException_klass()) ||
1384
PENDING_EXCEPTION->is_a(SystemDictionary::LinkageError_klass())) {
1385
CLEAR_PENDING_EXCEPTION;
1386
} else {
1387
return false;
1388
}
1389
}
1390
if( klass == NULL) { sig_is_loaded = false; }
1391
}
1392
}
1393
return sig_is_loaded;
1394
}
1395
1396
bool Method::has_unloaded_classes_in_signature(methodHandle m, TRAPS) {
1397
Handle class_loader(THREAD, m->method_holder()->class_loader());
1398
Handle protection_domain(THREAD, m->method_holder()->protection_domain());
1399
ResourceMark rm(THREAD);
1400
Symbol* signature = m->signature();
1401
for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
1402
if (ss.type() == T_OBJECT) {
1403
Symbol* name = ss.as_symbol_or_null();
1404
if (name == NULL) return true;
1405
Klass* klass = SystemDictionary::find(name, class_loader, protection_domain, THREAD);
1406
if (klass == NULL) return true;
1407
}
1408
}
1409
return false;
1410
}
1411
1412
// Exposed so field engineers can debug VM
1413
void Method::print_short_name(outputStream* st) {
1414
ResourceMark rm;
1415
#ifdef PRODUCT
1416
st->print(" %s::", method_holder()->external_name());
1417
#else
1418
st->print(" %s::", method_holder()->internal_name());
1419
#endif
1420
name()->print_symbol_on(st);
1421
if (WizardMode) signature()->print_symbol_on(st);
1422
else if (MethodHandles::is_signature_polymorphic(intrinsic_id()))
1423
MethodHandles::print_as_basic_type_signature_on(st, signature(), true);
1424
}
1425
1426
// Comparer for sorting an object array containing
1427
// Method*s.
1428
static int method_comparator(Method* a, Method* b) {
1429
return a->name()->fast_compare(b->name());
1430
}
1431
1432
// This is only done during class loading, so it is OK to assume method_idnum matches the methods() array
1433
// default_methods also uses this without the ordering for fast find_method
1434
void Method::sort_methods(Array<Method*>* methods, bool idempotent, bool set_idnums) {
1435
int length = methods->length();
1436
if (length > 1) {
1437
{
1438
No_Safepoint_Verifier nsv;
1439
QuickSort::sort<Method*>(methods->data(), length, method_comparator, idempotent);
1440
}
1441
// Reset method ordering
1442
if (set_idnums) {
1443
for (int i = 0; i < length; i++) {
1444
Method* m = methods->at(i);
1445
m->set_method_idnum(i);
1446
m->set_orig_method_idnum(i);
1447
}
1448
}
1449
}
1450
}
1451
1452
//-----------------------------------------------------------------------------------
1453
// Non-product code unless JVM/TI needs it
1454
1455
#if !defined(PRODUCT) || INCLUDE_JVMTI
1456
class SignatureTypePrinter : public SignatureTypeNames {
1457
private:
1458
outputStream* _st;
1459
bool _use_separator;
1460
1461
void type_name(const char* name) {
1462
if (_use_separator) _st->print(", ");
1463
_st->print("%s", name);
1464
_use_separator = true;
1465
}
1466
1467
public:
1468
SignatureTypePrinter(Symbol* signature, outputStream* st) : SignatureTypeNames(signature) {
1469
_st = st;
1470
_use_separator = false;
1471
}
1472
1473
void print_parameters() { _use_separator = false; iterate_parameters(); }
1474
void print_returntype() { _use_separator = false; iterate_returntype(); }
1475
};
1476
1477
1478
void Method::print_name(outputStream* st) {
1479
Thread *thread = Thread::current();
1480
ResourceMark rm(thread);
1481
SignatureTypePrinter sig(signature(), st);
1482
st->print("%s ", is_static() ? "static" : "virtual");
1483
sig.print_returntype();
1484
st->print(" %s.", method_holder()->internal_name());
1485
name()->print_symbol_on(st);
1486
st->print("(");
1487
sig.print_parameters();
1488
st->print(")");
1489
}
1490
#endif // !PRODUCT || INCLUDE_JVMTI
1491
1492
1493
//-----------------------------------------------------------------------------------
1494
// Non-product code
1495
1496
#ifndef PRODUCT
1497
void Method::print_codes_on(outputStream* st) const {
1498
print_codes_on(0, code_size(), st);
1499
}
1500
1501
void Method::print_codes_on(int from, int to, outputStream* st) const {
1502
Thread *thread = Thread::current();
1503
ResourceMark rm(thread);
1504
methodHandle mh (thread, (Method*)this);
1505
BytecodeStream s(mh);
1506
s.set_interval(from, to);
1507
BytecodeTracer::set_closure(BytecodeTracer::std_closure());
1508
while (s.next() >= 0) BytecodeTracer::trace(mh, s.bcp(), st);
1509
}
1510
#endif // not PRODUCT
1511
1512
1513
// Simple compression of line number tables. We use a regular compressed stream, except that we compress deltas
1514
// between (bci,line) pairs since they are smaller. If (bci delta, line delta) fits in (5-bit unsigned, 3-bit unsigned)
1515
// we save it as one byte, otherwise we write a 0xFF escape character and use regular compression. 0x0 is used
1516
// as end-of-stream terminator.
1517
1518
void CompressedLineNumberWriteStream::write_pair_regular(int bci_delta, int line_delta) {
1519
// bci and line number does not compress into single byte.
1520
// Write out escape character and use regular compression for bci and line number.
1521
write_byte((jubyte)0xFF);
1522
write_signed_int(bci_delta);
1523
write_signed_int(line_delta);
1524
}
1525
1526
// See comment in method.hpp which explains why this exists.
1527
#if defined(_M_AMD64) && _MSC_VER >= 1400
1528
#pragma optimize("", off)
1529
void CompressedLineNumberWriteStream::write_pair(int bci, int line) {
1530
write_pair_inline(bci, line);
1531
}
1532
#pragma optimize("", on)
1533
#endif
1534
1535
CompressedLineNumberReadStream::CompressedLineNumberReadStream(u_char* buffer) : CompressedReadStream(buffer) {
1536
_bci = 0;
1537
_line = 0;
1538
};
1539
1540
1541
bool CompressedLineNumberReadStream::read_pair() {
1542
jubyte next = read_byte();
1543
// Check for terminator
1544
if (next == 0) return false;
1545
if (next == 0xFF) {
1546
// Escape character, regular compression used
1547
_bci += read_signed_int();
1548
_line += read_signed_int();
1549
} else {
1550
// Single byte compression used
1551
_bci += next >> 3;
1552
_line += next & 0x7;
1553
}
1554
return true;
1555
}
1556
1557
1558
Bytecodes::Code Method::orig_bytecode_at(int bci) const {
1559
BreakpointInfo* bp = method_holder()->breakpoints();
1560
for (; bp != NULL; bp = bp->next()) {
1561
if (bp->match(this, bci)) {
1562
return bp->orig_bytecode();
1563
}
1564
}
1565
{
1566
ResourceMark rm;
1567
fatal(err_msg("no original bytecode found in %s at bci %d", name_and_sig_as_C_string(), bci));
1568
}
1569
return Bytecodes::_shouldnotreachhere;
1570
}
1571
1572
void Method::set_orig_bytecode_at(int bci, Bytecodes::Code code) {
1573
assert(code != Bytecodes::_breakpoint, "cannot patch breakpoints this way");
1574
BreakpointInfo* bp = method_holder()->breakpoints();
1575
for (; bp != NULL; bp = bp->next()) {
1576
if (bp->match(this, bci)) {
1577
bp->set_orig_bytecode(code);
1578
// and continue, in case there is more than one
1579
}
1580
}
1581
}
1582
1583
void Method::set_breakpoint(int bci) {
1584
InstanceKlass* ik = method_holder();
1585
BreakpointInfo *bp = new BreakpointInfo(this, bci);
1586
bp->set_next(ik->breakpoints());
1587
ik->set_breakpoints(bp);
1588
// do this last:
1589
bp->set(this);
1590
}
1591
1592
static void clear_matches(Method* m, int bci) {
1593
InstanceKlass* ik = m->method_holder();
1594
BreakpointInfo* prev_bp = NULL;
1595
BreakpointInfo* next_bp;
1596
for (BreakpointInfo* bp = ik->breakpoints(); bp != NULL; bp = next_bp) {
1597
next_bp = bp->next();
1598
// bci value of -1 is used to delete all breakpoints in method m (ex: clear_all_breakpoint).
1599
if (bci >= 0 ? bp->match(m, bci) : bp->match(m)) {
1600
// do this first:
1601
bp->clear(m);
1602
// unhook it
1603
if (prev_bp != NULL)
1604
prev_bp->set_next(next_bp);
1605
else
1606
ik->set_breakpoints(next_bp);
1607
delete bp;
1608
// When class is redefined JVMTI sets breakpoint in all versions of EMCP methods
1609
// at same location. So we have multiple matching (method_index and bci)
1610
// BreakpointInfo nodes in BreakpointInfo list. We should just delete one
1611
// breakpoint for clear_breakpoint request and keep all other method versions
1612
// BreakpointInfo for future clear_breakpoint request.
1613
// bcivalue of -1 is used to clear all breakpoints (see clear_all_breakpoints)
1614
// which is being called when class is unloaded. We delete all the Breakpoint
1615
// information for all versions of method. We may not correctly restore the original
1616
// bytecode in all method versions, but that is ok. Because the class is being unloaded
1617
// so these methods won't be used anymore.
1618
if (bci >= 0) {
1619
break;
1620
}
1621
} else {
1622
// This one is a keeper.
1623
prev_bp = bp;
1624
}
1625
}
1626
}
1627
1628
void Method::clear_breakpoint(int bci) {
1629
assert(bci >= 0, "");
1630
clear_matches(this, bci);
1631
}
1632
1633
void Method::clear_all_breakpoints() {
1634
clear_matches(this, -1);
1635
}
1636
1637
1638
int Method::invocation_count() {
1639
MethodCounters *mcs = method_counters();
1640
if (TieredCompilation) {
1641
MethodData* const mdo = method_data();
1642
if (((mcs != NULL) ? mcs->invocation_counter()->carry() : false) ||
1643
((mdo != NULL) ? mdo->invocation_counter()->carry() : false)) {
1644
return InvocationCounter::count_limit;
1645
} else {
1646
return ((mcs != NULL) ? mcs->invocation_counter()->count() : 0) +
1647
((mdo != NULL) ? mdo->invocation_counter()->count() : 0);
1648
}
1649
} else {
1650
return (mcs == NULL) ? 0 : mcs->invocation_counter()->count();
1651
}
1652
}
1653
1654
int Method::backedge_count() {
1655
MethodCounters *mcs = method_counters();
1656
if (TieredCompilation) {
1657
MethodData* const mdo = method_data();
1658
if (((mcs != NULL) ? mcs->backedge_counter()->carry() : false) ||
1659
((mdo != NULL) ? mdo->backedge_counter()->carry() : false)) {
1660
return InvocationCounter::count_limit;
1661
} else {
1662
return ((mcs != NULL) ? mcs->backedge_counter()->count() : 0) +
1663
((mdo != NULL) ? mdo->backedge_counter()->count() : 0);
1664
}
1665
} else {
1666
return (mcs == NULL) ? 0 : mcs->backedge_counter()->count();
1667
}
1668
}
1669
1670
int Method::highest_comp_level() const {
1671
const MethodCounters* mcs = method_counters();
1672
if (mcs != NULL) {
1673
return mcs->highest_comp_level();
1674
} else {
1675
return CompLevel_none;
1676
}
1677
}
1678
1679
int Method::highest_osr_comp_level() const {
1680
const MethodCounters* mcs = method_counters();
1681
if (mcs != NULL) {
1682
return mcs->highest_osr_comp_level();
1683
} else {
1684
return CompLevel_none;
1685
}
1686
}
1687
1688
void Method::set_highest_comp_level(int level) {
1689
MethodCounters* mcs = method_counters();
1690
if (mcs != NULL) {
1691
mcs->set_highest_comp_level(level);
1692
}
1693
}
1694
1695
void Method::set_highest_osr_comp_level(int level) {
1696
MethodCounters* mcs = method_counters();
1697
if (mcs != NULL) {
1698
mcs->set_highest_osr_comp_level(level);
1699
}
1700
}
1701
1702
BreakpointInfo::BreakpointInfo(Method* m, int bci) {
1703
_bci = bci;
1704
_name_index = m->name_index();
1705
_signature_index = m->signature_index();
1706
_orig_bytecode = (Bytecodes::Code) *m->bcp_from(_bci);
1707
if (_orig_bytecode == Bytecodes::_breakpoint)
1708
_orig_bytecode = m->orig_bytecode_at(_bci);
1709
_next = NULL;
1710
}
1711
1712
void BreakpointInfo::set(Method* method) {
1713
#ifdef ASSERT
1714
{
1715
Bytecodes::Code code = (Bytecodes::Code) *method->bcp_from(_bci);
1716
if (code == Bytecodes::_breakpoint)
1717
code = method->orig_bytecode_at(_bci);
1718
assert(orig_bytecode() == code, "original bytecode must be the same");
1719
}
1720
#endif
1721
Thread *thread = Thread::current();
1722
*method->bcp_from(_bci) = Bytecodes::_breakpoint;
1723
method->incr_number_of_breakpoints(thread);
1724
SystemDictionary::notice_modification();
1725
{
1726
// Deoptimize all dependents on this method
1727
HandleMark hm(thread);
1728
methodHandle mh(thread, method);
1729
Universe::flush_dependents_on_method(mh);
1730
}
1731
}
1732
1733
void BreakpointInfo::clear(Method* method) {
1734
*method->bcp_from(_bci) = orig_bytecode();
1735
assert(method->number_of_breakpoints() > 0, "must not go negative");
1736
method->decr_number_of_breakpoints(Thread::current());
1737
}
1738
1739
// jmethodID handling
1740
1741
// This is a block allocating object, sort of like JNIHandleBlock, only a
1742
// lot simpler. There aren't many of these, they aren't long, they are rarely
1743
// deleted and so we can do some suboptimal things.
1744
// It's allocated on the CHeap because once we allocate a jmethodID, we can
1745
// never get rid of it.
1746
// It would be nice to be able to parameterize the number of methods for
1747
// the null_class_loader but then we'd have to turn this and ClassLoaderData
1748
// into templates.
1749
1750
// I feel like this brain dead class should exist somewhere in the STL
1751
1752
class JNIMethodBlock : public CHeapObj<mtClass> {
1753
enum { number_of_methods = 8 };
1754
1755
Method* _methods[number_of_methods];
1756
int _top;
1757
JNIMethodBlock* _next;
1758
public:
1759
static Method* const _free_method;
1760
1761
JNIMethodBlock() : _next(NULL), _top(0) {
1762
for (int i = 0; i< number_of_methods; i++) _methods[i] = _free_method;
1763
}
1764
1765
Method** add_method(Method* m) {
1766
if (_top < number_of_methods) {
1767
// top points to the next free entry.
1768
int i = _top;
1769
_methods[i] = m;
1770
_top++;
1771
return &_methods[i];
1772
} else if (_top == number_of_methods) {
1773
// if the next free entry ran off the block see if there's a free entry
1774
for (int i = 0; i< number_of_methods; i++) {
1775
if (_methods[i] == _free_method) {
1776
_methods[i] = m;
1777
return &_methods[i];
1778
}
1779
}
1780
// Only check each block once for frees. They're very unlikely.
1781
// Increment top past the end of the block.
1782
_top++;
1783
}
1784
// need to allocate a next block.
1785
if (_next == NULL) {
1786
_next = new JNIMethodBlock();
1787
}
1788
return _next->add_method(m);
1789
}
1790
1791
bool contains(Method** m) {
1792
for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {
1793
for (int i = 0; i< number_of_methods; i++) {
1794
if (&(b->_methods[i]) == m) {
1795
return true;
1796
}
1797
}
1798
}
1799
return false; // not found
1800
}
1801
1802
// Doesn't really destroy it, just marks it as free so it can be reused.
1803
void destroy_method(Method** m) {
1804
#ifdef ASSERT
1805
assert(contains(m), "should be a methodID");
1806
#endif // ASSERT
1807
*m = _free_method;
1808
}
1809
void clear_method(Method* m) {
1810
for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {
1811
for (int i = 0; i < number_of_methods; i++) {
1812
if (b->_methods[i] == m) {
1813
b->_methods[i] = NULL;
1814
return;
1815
}
1816
}
1817
}
1818
// not found
1819
}
1820
1821
// During class unloading the methods are cleared, which is different
1822
// than freed.
1823
void clear_all_methods() {
1824
for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {
1825
for (int i = 0; i< number_of_methods; i++) {
1826
b->_methods[i] = NULL;
1827
}
1828
}
1829
}
1830
#ifndef PRODUCT
1831
int count_methods() {
1832
// count all allocated methods
1833
int count = 0;
1834
for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {
1835
for (int i = 0; i< number_of_methods; i++) {
1836
if (b->_methods[i] != _free_method) count++;
1837
}
1838
}
1839
return count;
1840
}
1841
#endif // PRODUCT
1842
};
1843
1844
// Something that can't be mistaken for an address or a markOop
1845
Method* const JNIMethodBlock::_free_method = (Method*)55;
1846
1847
// Add a method id to the jmethod_ids
1848
jmethodID Method::make_jmethod_id(ClassLoaderData* loader_data, Method* m) {
1849
ClassLoaderData* cld = loader_data;
1850
1851
if (!SafepointSynchronize::is_at_safepoint()) {
1852
// Have to add jmethod_ids() to class loader data thread-safely.
1853
// Also have to add the method to the list safely, which the cld lock
1854
// protects as well.
1855
MutexLockerEx ml(cld->metaspace_lock(), Mutex::_no_safepoint_check_flag);
1856
if (cld->jmethod_ids() == NULL) {
1857
cld->set_jmethod_ids(new JNIMethodBlock());
1858
}
1859
// jmethodID is a pointer to Method*
1860
return (jmethodID)cld->jmethod_ids()->add_method(m);
1861
} else {
1862
// At safepoint, we are single threaded and can set this.
1863
if (cld->jmethod_ids() == NULL) {
1864
cld->set_jmethod_ids(new JNIMethodBlock());
1865
}
1866
// jmethodID is a pointer to Method*
1867
return (jmethodID)cld->jmethod_ids()->add_method(m);
1868
}
1869
}
1870
1871
// Mark a jmethodID as free. This is called when there is a data race in
1872
// InstanceKlass while creating the jmethodID cache.
1873
void Method::destroy_jmethod_id(ClassLoaderData* loader_data, jmethodID m) {
1874
ClassLoaderData* cld = loader_data;
1875
Method** ptr = (Method**)m;
1876
assert(cld->jmethod_ids() != NULL, "should have method handles");
1877
cld->jmethod_ids()->destroy_method(ptr);
1878
}
1879
1880
void Method::change_method_associated_with_jmethod_id(jmethodID jmid, Method* new_method) {
1881
// Can't assert the method_holder is the same because the new method has the
1882
// scratch method holder.
1883
assert(resolve_jmethod_id(jmid)->method_holder()->class_loader()
1884
== new_method->method_holder()->class_loader(),
1885
"changing to a different class loader");
1886
// Just change the method in place, jmethodID pointer doesn't change.
1887
*((Method**)jmid) = new_method;
1888
}
1889
1890
bool Method::is_method_id(jmethodID mid) {
1891
Method* m = resolve_jmethod_id(mid);
1892
if (m == NULL) {
1893
return false;
1894
}
1895
InstanceKlass* ik = m->method_holder();
1896
if (ik == NULL) {
1897
return false;
1898
}
1899
ClassLoaderData* cld = ik->class_loader_data();
1900
if (cld->jmethod_ids() == NULL) return false;
1901
return (cld->jmethod_ids()->contains((Method**)mid));
1902
}
1903
1904
Method* Method::checked_resolve_jmethod_id(jmethodID mid) {
1905
if (mid == NULL) return NULL;
1906
if (!Method::is_method_id(mid)) {
1907
return NULL;
1908
}
1909
Method* o = resolve_jmethod_id(mid);
1910
if (o == NULL || o == JNIMethodBlock::_free_method || !((Metadata*)o)->is_method()) {
1911
return NULL;
1912
}
1913
return o;
1914
};
1915
1916
void Method::set_on_stack(const bool value) {
1917
// Set both the method itself and its constant pool. The constant pool
1918
// on stack means some method referring to it is also on the stack.
1919
constants()->set_on_stack(value);
1920
1921
bool succeeded = _access_flags.set_on_stack(value);
1922
if (value && succeeded) {
1923
MetadataOnStackMark::record(this, Thread::current());
1924
}
1925
}
1926
1927
void Method::clear_jmethod_id(ClassLoaderData* loader_data) {
1928
loader_data->jmethod_ids()->clear_method(this);
1929
}
1930
1931
// Called when the class loader is unloaded to make all methods weak.
1932
void Method::clear_jmethod_ids(ClassLoaderData* loader_data) {
1933
loader_data->jmethod_ids()->clear_all_methods();
1934
}
1935
1936
bool Method::has_method_vptr(const void* ptr) {
1937
Method m;
1938
// This assumes that the vtbl pointer is the first word of a C++ object.
1939
// This assumption is also in universe.cpp patch_klass_vtble
1940
void* vtbl2 = dereference_vptr((const void*)&m);
1941
void* this_vtbl = dereference_vptr(ptr);
1942
return vtbl2 == this_vtbl;
1943
}
1944
1945
// Check that this pointer is valid by checking that the vtbl pointer matches
1946
bool Method::is_valid_method() const {
1947
if (this == NULL) {
1948
return false;
1949
} else if (!is_metaspace_object()) {
1950
return false;
1951
} else {
1952
return has_method_vptr((const void*)this);
1953
}
1954
}
1955
1956
#ifndef PRODUCT
1957
void Method::print_jmethod_ids(ClassLoaderData* loader_data, outputStream* out) {
1958
out->print_cr("jni_method_id count = %d", loader_data->jmethod_ids()->count_methods());
1959
}
1960
#endif // PRODUCT
1961
1962
1963
// Printing
1964
1965
#ifndef PRODUCT
1966
1967
void Method::print_on(outputStream* st) const {
1968
ResourceMark rm;
1969
assert(is_method(), "must be method");
1970
st->print_cr("%s", internal_name());
1971
// get the effect of PrintOopAddress, always, for methods:
1972
st->print_cr(" - this oop: " INTPTR_FORMAT, (intptr_t)this);
1973
st->print (" - method holder: "); method_holder()->print_value_on(st); st->cr();
1974
st->print (" - constants: " INTPTR_FORMAT " ", (address)constants());
1975
constants()->print_value_on(st); st->cr();
1976
st->print (" - access: 0x%x ", access_flags().as_int()); access_flags().print_on(st); st->cr();
1977
st->print (" - name: "); name()->print_value_on(st); st->cr();
1978
st->print (" - signature: "); signature()->print_value_on(st); st->cr();
1979
st->print_cr(" - max stack: %d", max_stack());
1980
st->print_cr(" - max locals: %d", max_locals());
1981
st->print_cr(" - size of params: %d", size_of_parameters());
1982
st->print_cr(" - method size: %d", method_size());
1983
if (intrinsic_id() != vmIntrinsics::_none)
1984
st->print_cr(" - intrinsic id: %d %s", intrinsic_id(), vmIntrinsics::name_at(intrinsic_id()));
1985
if (highest_comp_level() != CompLevel_none)
1986
st->print_cr(" - highest level: %d", highest_comp_level());
1987
st->print_cr(" - vtable index: %d", _vtable_index);
1988
st->print_cr(" - i2i entry: " INTPTR_FORMAT, interpreter_entry());
1989
st->print( " - adapters: ");
1990
AdapterHandlerEntry* a = ((Method*)this)->adapter();
1991
if (a == NULL)
1992
st->print_cr(INTPTR_FORMAT, a);
1993
else
1994
a->print_adapter_on(st);
1995
st->print_cr(" - compiled entry " INTPTR_FORMAT, from_compiled_entry());
1996
st->print_cr(" - code size: %d", code_size());
1997
if (code_size() != 0) {
1998
st->print_cr(" - code start: " INTPTR_FORMAT, code_base());
1999
st->print_cr(" - code end (excl): " INTPTR_FORMAT, code_base() + code_size());
2000
}
2001
if (method_data() != NULL) {
2002
st->print_cr(" - method data: " INTPTR_FORMAT, (address)method_data());
2003
}
2004
st->print_cr(" - checked ex length: %d", checked_exceptions_length());
2005
if (checked_exceptions_length() > 0) {
2006
CheckedExceptionElement* table = checked_exceptions_start();
2007
st->print_cr(" - checked ex start: " INTPTR_FORMAT, table);
2008
if (Verbose) {
2009
for (int i = 0; i < checked_exceptions_length(); i++) {
2010
st->print_cr(" - throws %s", constants()->printable_name_at(table[i].class_cp_index));
2011
}
2012
}
2013
}
2014
if (has_linenumber_table()) {
2015
u_char* table = compressed_linenumber_table();
2016
st->print_cr(" - linenumber start: " INTPTR_FORMAT, table);
2017
if (Verbose) {
2018
CompressedLineNumberReadStream stream(table);
2019
while (stream.read_pair()) {
2020
st->print_cr(" - line %d: %d", stream.line(), stream.bci());
2021
}
2022
}
2023
}
2024
st->print_cr(" - localvar length: %d", localvariable_table_length());
2025
if (localvariable_table_length() > 0) {
2026
LocalVariableTableElement* table = localvariable_table_start();
2027
st->print_cr(" - localvar start: " INTPTR_FORMAT, table);
2028
if (Verbose) {
2029
for (int i = 0; i < localvariable_table_length(); i++) {
2030
int bci = table[i].start_bci;
2031
int len = table[i].length;
2032
const char* name = constants()->printable_name_at(table[i].name_cp_index);
2033
const char* desc = constants()->printable_name_at(table[i].descriptor_cp_index);
2034
int slot = table[i].slot;
2035
st->print_cr(" - %s %s bci=%d len=%d slot=%d", desc, name, bci, len, slot);
2036
}
2037
}
2038
}
2039
if (code() != NULL) {
2040
st->print (" - compiled code: ");
2041
code()->print_value_on(st);
2042
}
2043
if (is_native()) {
2044
st->print_cr(" - native function: " INTPTR_FORMAT, native_function());
2045
st->print_cr(" - signature handler: " INTPTR_FORMAT, signature_handler());
2046
}
2047
}
2048
2049
#endif //PRODUCT
2050
2051
void Method::print_value_on(outputStream* st) const {
2052
assert(is_method(), "must be method");
2053
st->print("%s", internal_name());
2054
print_address_on(st);
2055
st->print(" ");
2056
name()->print_value_on(st);
2057
st->print(" ");
2058
signature()->print_value_on(st);
2059
st->print(" in ");
2060
method_holder()->print_value_on(st);
2061
if (WizardMode) st->print("#%d", _vtable_index);
2062
if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());
2063
if (WizardMode && code() != NULL) st->print(" ((nmethod*)%p)", code());
2064
}
2065
2066
#if INCLUDE_SERVICES
2067
// Size Statistics
2068
void Method::collect_statistics(KlassSizeStats *sz) const {
2069
int mysize = sz->count(this);
2070
sz->_method_bytes += mysize;
2071
sz->_method_all_bytes += mysize;
2072
sz->_rw_bytes += mysize;
2073
2074
if (constMethod()) {
2075
constMethod()->collect_statistics(sz);
2076
}
2077
if (method_data()) {
2078
method_data()->collect_statistics(sz);
2079
}
2080
}
2081
#endif // INCLUDE_SERVICES
2082
2083
// Verification
2084
2085
void Method::verify_on(outputStream* st) {
2086
guarantee(is_method(), "object must be method");
2087
guarantee(constants()->is_constantPool(), "should be constant pool");
2088
guarantee(constMethod()->is_constMethod(), "should be ConstMethod*");
2089
MethodData* md = method_data();
2090
guarantee(md == NULL ||
2091
md->is_methodData(), "should be method data");
2092
}
2093
2094