Path: blob/jdk8u272-b10-aarch32-20201026/hotspot/src/share/vm/oops/instanceMirrorKlass.cpp
48693 views
/*1* Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "classfile/javaClasses.hpp"26#include "classfile/systemDictionary.hpp"27#include "gc_implementation/shared/markSweep.inline.hpp"28#include "gc_interface/collectedHeap.inline.hpp"29#include "memory/genOopClosures.inline.hpp"30#include "memory/iterator.inline.hpp"31#include "memory/oopFactory.hpp"32#include "oops/instanceKlass.hpp"33#include "oops/instanceMirrorKlass.hpp"34#include "oops/instanceOop.hpp"35#include "oops/oop.inline.hpp"36#include "oops/symbol.hpp"37#include "runtime/handles.inline.hpp"38#include "utilities/macros.hpp"39#if INCLUDE_ALL_GCS40#include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"41#include "gc_implementation/g1/g1CollectedHeap.inline.hpp"42#include "gc_implementation/g1/g1OopClosures.inline.hpp"43#include "gc_implementation/g1/g1RemSet.inline.hpp"44#include "gc_implementation/g1/heapRegionManager.inline.hpp"45#include "gc_implementation/parNew/parOopClosures.inline.hpp"46#include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"47#include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"48#include "oops/oop.pcgc.inline.hpp"49#endif // INCLUDE_ALL_GCS5051int InstanceMirrorKlass::_offset_of_static_fields = 0;5253#ifdef ASSERT54template <class T> void assert_is_in(T *p) {55T heap_oop = oopDesc::load_heap_oop(p);56if (!oopDesc::is_null(heap_oop)) {57oop o = oopDesc::decode_heap_oop_not_null(heap_oop);58assert(Universe::heap()->is_in(o), "should be in heap");59}60}61template <class T> void assert_is_in_closed_subset(T *p) {62T heap_oop = oopDesc::load_heap_oop(p);63if (!oopDesc::is_null(heap_oop)) {64oop o = oopDesc::decode_heap_oop_not_null(heap_oop);65assert(Universe::heap()->is_in_closed_subset(o), "should be in closed");66}67}68template <class T> void assert_is_in_reserved(T *p) {69T heap_oop = oopDesc::load_heap_oop(p);70if (!oopDesc::is_null(heap_oop)) {71oop o = oopDesc::decode_heap_oop_not_null(heap_oop);72assert(Universe::heap()->is_in_reserved(o), "should be in reserved");73}74}75template <class T> void assert_nothing(T *p) {}7677#else78template <class T> void assert_is_in(T *p) {}79template <class T> void assert_is_in_closed_subset(T *p) {}80template <class T> void assert_is_in_reserved(T *p) {}81template <class T> void assert_nothing(T *p) {}82#endif // ASSERT8384#define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE( \85T, start_p, count, do_oop, \86assert_fn) \87{ \88T* p = (T*)(start_p); \89T* const end = p + (count); \90while (p < end) { \91(assert_fn)(p); \92do_oop; \93++p; \94} \95}9697#define InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \98T, start_p, count, low, high, \99do_oop, assert_fn) \100{ \101T* const l = (T*)(low); \102T* const h = (T*)(high); \103assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \104mask_bits((intptr_t)h, sizeof(T)-1) == 0, \105"bounded region must be properly aligned"); \106T* p = (T*)(start_p); \107T* end = p + (count); \108if (p < l) p = l; \109if (end > h) end = h; \110while (p < end) { \111(assert_fn)(p); \112do_oop; \113++p; \114} \115}116117118#define InstanceMirrorKlass_OOP_ITERATE(start_p, count, \119do_oop, assert_fn) \120{ \121if (UseCompressedOops) { \122InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \123start_p, count, \124do_oop, assert_fn) \125} else { \126InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(oop, \127start_p, count, \128do_oop, assert_fn) \129} \130}131132// The following macros call specialized macros, passing either oop or133// narrowOop as the specialization type. These test the UseCompressedOops134// flag.135#define InstanceMirrorKlass_BOUNDED_OOP_ITERATE(start_p, count, low, high, \136do_oop, assert_fn) \137{ \138if (UseCompressedOops) { \139InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop, \140start_p, count, \141low, high, \142do_oop, assert_fn) \143} else { \144InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop, \145start_p, count, \146low, high, \147do_oop, assert_fn) \148} \149}150151152void InstanceMirrorKlass::oop_follow_contents(oop obj) {153InstanceKlass::oop_follow_contents(obj);154155// Follow the klass field in the mirror.156Klass* klass = java_lang_Class::as_Klass(obj);157if (klass != NULL) {158// An anonymous class doesn't have its own class loader, so the call159// to follow_klass will mark and push its java mirror instead of the160// class loader. When handling the java mirror for an anonymous class161// we need to make sure its class loader data is claimed, this is done162// by calling follow_class_loader explicitly. For non-anonymous classes163// the call to follow_class_loader is made when the class loader itself164// is handled.165if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {166MarkSweep::follow_class_loader(klass->class_loader_data());167} else {168MarkSweep::follow_klass(klass);169}170} else {171// If klass is NULL then this a mirror for a primitive type.172// We don't have to follow them, since they are handled as strong173// roots in Universe::oops_do.174assert(java_lang_Class::is_primitive(obj), "Sanity check");175}176177InstanceMirrorKlass_OOP_ITERATE( \178start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \179MarkSweep::mark_and_push(p), \180assert_is_in_closed_subset)181}182183#if INCLUDE_ALL_GCS184void InstanceMirrorKlass::oop_follow_contents(ParCompactionManager* cm,185oop obj) {186InstanceKlass::oop_follow_contents(cm, obj);187188// Follow the klass field in the mirror.189Klass* klass = java_lang_Class::as_Klass(obj);190if (klass != NULL) {191// An anonymous class doesn't have its own class loader, so the call192// to follow_klass will mark and push its java mirror instead of the193// class loader. When handling the java mirror for an anonymous class194// we need to make sure its class loader data is claimed, this is done195// by calling follow_class_loader explicitly. For non-anonymous classes196// the call to follow_class_loader is made when the class loader itself197// is handled.198if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {199PSParallelCompact::follow_class_loader(cm, klass->class_loader_data());200} else {201PSParallelCompact::follow_klass(cm, klass);202}203} else {204// If klass is NULL then this a mirror for a primitive type.205// We don't have to follow them, since they are handled as strong206// roots in Universe::oops_do.207assert(java_lang_Class::is_primitive(obj), "Sanity check");208}209210InstanceMirrorKlass_OOP_ITERATE( \211start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \212PSParallelCompact::mark_and_push(cm, p), \213assert_is_in)214}215#endif // INCLUDE_ALL_GCS216217int InstanceMirrorKlass::oop_adjust_pointers(oop obj) {218int size = oop_size(obj);219InstanceKlass::oop_adjust_pointers(obj);220221InstanceMirrorKlass_OOP_ITERATE( \222start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \223MarkSweep::adjust_pointer(p), \224assert_nothing)225return size;226}227228#define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(T, nv_suffix) \229InstanceMirrorKlass_OOP_ITERATE( \230start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \231(closure)->do_oop##nv_suffix(p), \232assert_is_in_closed_subset) \233return oop_size(obj); \234235#define InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(T, nv_suffix, mr) \236InstanceMirrorKlass_BOUNDED_OOP_ITERATE( \237start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \238mr.start(), mr.end(), \239(closure)->do_oop##nv_suffix(p), \240assert_is_in_closed_subset) \241return oop_size(obj); \242243244// Macro to define InstanceMirrorKlass::oop_oop_iterate for virtual/nonvirtual for245// all closures. Macros calling macros above for each oop size.246247#define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \248\249int InstanceMirrorKlass:: \250oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \251/* Get size before changing pointers */ \252SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \253\254InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \255\256if_do_metadata_checked(closure, nv_suffix) { \257Klass* klass = java_lang_Class::as_Klass(obj); \258/* We'll get NULL for primitive mirrors. */ \259if (klass != NULL) { \260closure->do_klass##nv_suffix(klass); \261} \262} \263\264if (UseCompressedOops) { \265InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix); \266} else { \267InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix); \268} \269}270271#if INCLUDE_ALL_GCS272#define InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \273\274int InstanceMirrorKlass:: \275oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \276/* Get size before changing pointers */ \277SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \278\279InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \280\281if (UseCompressedOops) { \282InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix); \283} else { \284InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix); \285} \286}287#endif // INCLUDE_ALL_GCS288289290#define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \291\292int InstanceMirrorKlass:: \293oop_oop_iterate##nv_suffix##_m(oop obj, \294OopClosureType* closure, \295MemRegion mr) { \296SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \297\298InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \299\300if_do_metadata_checked(closure, nv_suffix) { \301if (mr.contains(obj)) { \302Klass* klass = java_lang_Class::as_Klass(obj); \303/* We'll get NULL for primitive mirrors. */ \304if (klass != NULL) { \305closure->do_klass##nv_suffix(klass); \306} \307} \308} \309\310if (UseCompressedOops) { \311InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr); \312} else { \313InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr); \314} \315}316317ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)318ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)319#if INCLUDE_ALL_GCS320ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)321ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)322#endif // INCLUDE_ALL_GCS323ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)324ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)325326#if INCLUDE_ALL_GCS327void InstanceMirrorKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {328// Note that we don't have to follow the mirror -> klass pointer, since all329// klasses that are dirty will be scavenged when we iterate over the330// ClassLoaderData objects.331332InstanceKlass::oop_push_contents(pm, obj);333InstanceMirrorKlass_OOP_ITERATE( \334start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\335if (PSScavenge::should_scavenge(p)) { \336pm->claim_or_forward_depth(p); \337}, \338assert_nothing )339}340341int InstanceMirrorKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {342int size = oop_size(obj);343InstanceKlass::oop_update_pointers(cm, obj);344345InstanceMirrorKlass_OOP_ITERATE( \346start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\347PSParallelCompact::adjust_pointer(p), \348assert_nothing)349return size;350}351#endif // INCLUDE_ALL_GCS352353int InstanceMirrorKlass::instance_size(KlassHandle k) {354if (k() != NULL && k->oop_is_instance()) {355return align_object_size(size_helper() + InstanceKlass::cast(k())->static_field_size());356}357return size_helper();358}359360instanceOop InstanceMirrorKlass::allocate_instance(KlassHandle k, TRAPS) {361// Query before forming handle.362int size = instance_size(k);363KlassHandle h_k(THREAD, this);364instanceOop i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL);365366// Since mirrors can be variable sized because of the static fields, store367// the size in the mirror itself.368java_lang_Class::set_oop_size(i, size);369370return i;371}372373int InstanceMirrorKlass::oop_size(oop obj) const {374return java_lang_Class::oop_size(obj);375}376377int InstanceMirrorKlass::compute_static_oop_field_count(oop obj) {378Klass* k = java_lang_Class::as_Klass(obj);379if (k != NULL && k->oop_is_instance()) {380return InstanceKlass::cast(k)->static_oop_field_count();381}382return 0;383}384385386