Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/share/vm/oops/instanceMirrorKlass.cpp
32285 views
/*1* Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "classfile/javaClasses.hpp"26#include "classfile/systemDictionary.hpp"27#include "gc_implementation/shared/markSweep.inline.hpp"28#include "gc_interface/collectedHeap.inline.hpp"29#include "memory/genOopClosures.inline.hpp"30#include "memory/iterator.inline.hpp"31#include "memory/oopFactory.hpp"32#include "oops/instanceKlass.hpp"33#include "oops/instanceMirrorKlass.hpp"34#include "oops/instanceOop.hpp"35#include "oops/oop.inline.hpp"36#include "oops/symbol.hpp"37#include "runtime/handles.inline.hpp"38#include "utilities/macros.hpp"39#if INCLUDE_ALL_GCS40#include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"41#include "gc_implementation/g1/g1CollectedHeap.inline.hpp"42#include "gc_implementation/g1/g1OopClosures.inline.hpp"43#include "gc_implementation/g1/g1RemSet.inline.hpp"44#include "gc_implementation/g1/heapRegionManager.inline.hpp"45#include "gc_implementation/parNew/parOopClosures.inline.hpp"46#include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"47#include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"48#include "gc_implementation/shenandoah/shenandoahOopClosures.inline.hpp"49#include "oops/oop.pcgc.inline.hpp"50#endif // INCLUDE_ALL_GCS5152int InstanceMirrorKlass::_offset_of_static_fields = 0;5354#ifdef ASSERT55template <class T> void assert_is_in(T *p) {56T heap_oop = oopDesc::load_heap_oop(p);57if (!oopDesc::is_null(heap_oop)) {58oop o = oopDesc::decode_heap_oop_not_null(heap_oop);59assert(Universe::heap()->is_in(o), "should be in heap");60}61}62template <class T> void assert_is_in_closed_subset(T *p) {63T heap_oop = oopDesc::load_heap_oop(p);64if (!oopDesc::is_null(heap_oop)) {65oop o = oopDesc::decode_heap_oop_not_null(heap_oop);66assert(Universe::heap()->is_in_closed_subset(o), "should be in closed");67}68}69template <class T> void assert_is_in_reserved(T *p) {70T heap_oop = oopDesc::load_heap_oop(p);71if (!oopDesc::is_null(heap_oop)) {72oop o = oopDesc::decode_heap_oop_not_null(heap_oop);73assert(Universe::heap()->is_in_reserved(o), "should be in reserved");74}75}76template <class T> void assert_nothing(T *p) {}7778#else79template <class T> void assert_is_in(T *p) {}80template <class T> void assert_is_in_closed_subset(T *p) {}81template <class T> void assert_is_in_reserved(T *p) {}82template <class T> void assert_nothing(T *p) {}83#endif // ASSERT8485#define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE( \86T, start_p, count, do_oop, \87assert_fn) \88{ \89T* p = (T*)(start_p); \90T* const end = p + (count); \91while (p < end) { \92(assert_fn)(p); \93do_oop; \94++p; \95} \96}9798#define InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \99T, start_p, count, low, high, \100do_oop, assert_fn) \101{ \102T* const l = (T*)(low); \103T* const h = (T*)(high); \104assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \105mask_bits((intptr_t)h, sizeof(T)-1) == 0, \106"bounded region must be properly aligned"); \107T* p = (T*)(start_p); \108T* end = p + (count); \109if (p < l) p = l; \110if (end > h) end = h; \111while (p < end) { \112(assert_fn)(p); \113do_oop; \114++p; \115} \116}117118119#define InstanceMirrorKlass_OOP_ITERATE(start_p, count, \120do_oop, assert_fn) \121{ \122if (UseCompressedOops) { \123InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \124start_p, count, \125do_oop, assert_fn) \126} else { \127InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(oop, \128start_p, count, \129do_oop, assert_fn) \130} \131}132133// The following macros call specialized macros, passing either oop or134// narrowOop as the specialization type. These test the UseCompressedOops135// flag.136#define InstanceMirrorKlass_BOUNDED_OOP_ITERATE(start_p, count, low, high, \137do_oop, assert_fn) \138{ \139if (UseCompressedOops) { \140InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop, \141start_p, count, \142low, high, \143do_oop, assert_fn) \144} else { \145InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop, \146start_p, count, \147low, high, \148do_oop, assert_fn) \149} \150}151152153void InstanceMirrorKlass::oop_follow_contents(oop obj) {154InstanceKlass::oop_follow_contents(obj);155156// Follow the klass field in the mirror.157Klass* klass = java_lang_Class::as_Klass(obj);158if (klass != NULL) {159// An anonymous class doesn't have its own class loader, so the call160// to follow_klass will mark and push its java mirror instead of the161// class loader. When handling the java mirror for an anonymous class162// we need to make sure its class loader data is claimed, this is done163// by calling follow_class_loader explicitly. For non-anonymous classes164// the call to follow_class_loader is made when the class loader itself165// is handled.166if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {167MarkSweep::follow_class_loader(klass->class_loader_data());168} else {169MarkSweep::follow_klass(klass);170}171} else {172// If klass is NULL then this a mirror for a primitive type.173// We don't have to follow them, since they are handled as strong174// roots in Universe::oops_do.175assert(java_lang_Class::is_primitive(obj), "Sanity check");176}177178InstanceMirrorKlass_OOP_ITERATE( \179start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \180MarkSweep::mark_and_push(p), \181assert_is_in_closed_subset)182}183184#if INCLUDE_ALL_GCS185void InstanceMirrorKlass::oop_follow_contents(ParCompactionManager* cm,186oop obj) {187InstanceKlass::oop_follow_contents(cm, obj);188189// Follow the klass field in the mirror.190Klass* klass = java_lang_Class::as_Klass(obj);191if (klass != NULL) {192// An anonymous class doesn't have its own class loader, so the call193// to follow_klass will mark and push its java mirror instead of the194// class loader. When handling the java mirror for an anonymous class195// we need to make sure its class loader data is claimed, this is done196// by calling follow_class_loader explicitly. For non-anonymous classes197// the call to follow_class_loader is made when the class loader itself198// is handled.199if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {200PSParallelCompact::follow_class_loader(cm, klass->class_loader_data());201} else {202PSParallelCompact::follow_klass(cm, klass);203}204} else {205// If klass is NULL then this a mirror for a primitive type.206// We don't have to follow them, since they are handled as strong207// roots in Universe::oops_do.208assert(java_lang_Class::is_primitive(obj), "Sanity check");209}210211InstanceMirrorKlass_OOP_ITERATE( \212start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \213PSParallelCompact::mark_and_push(cm, p), \214assert_is_in)215}216#endif // INCLUDE_ALL_GCS217218int InstanceMirrorKlass::oop_adjust_pointers(oop obj) {219int size = oop_size(obj);220InstanceKlass::oop_adjust_pointers(obj);221222InstanceMirrorKlass_OOP_ITERATE( \223start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \224MarkSweep::adjust_pointer(p), \225assert_nothing)226return size;227}228229#define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(T, nv_suffix) \230InstanceMirrorKlass_OOP_ITERATE( \231start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \232(closure)->do_oop##nv_suffix(p), \233assert_is_in_closed_subset) \234return oop_size(obj); \235236#define InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(T, nv_suffix, mr) \237InstanceMirrorKlass_BOUNDED_OOP_ITERATE( \238start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \239mr.start(), mr.end(), \240(closure)->do_oop##nv_suffix(p), \241assert_is_in_closed_subset) \242return oop_size(obj); \243244245// Macro to define InstanceMirrorKlass::oop_oop_iterate for virtual/nonvirtual for246// all closures. Macros calling macros above for each oop size.247248#define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \249\250int InstanceMirrorKlass:: \251oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \252/* Get size before changing pointers */ \253SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \254\255InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \256\257if_do_metadata_checked(closure, nv_suffix) { \258Klass* klass = java_lang_Class::as_Klass(obj); \259/* We'll get NULL for primitive mirrors. */ \260if (klass != NULL) { \261closure->do_klass##nv_suffix(klass); \262} \263} \264\265if (UseCompressedOops) { \266InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix); \267} else { \268InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix); \269} \270}271272#if INCLUDE_ALL_GCS273#define InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \274\275int InstanceMirrorKlass:: \276oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \277/* Get size before changing pointers */ \278SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \279\280InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \281\282if (UseCompressedOops) { \283InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix); \284} else { \285InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix); \286} \287}288#endif // INCLUDE_ALL_GCS289290291#define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \292\293int InstanceMirrorKlass:: \294oop_oop_iterate##nv_suffix##_m(oop obj, \295OopClosureType* closure, \296MemRegion mr) { \297SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \298\299InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \300\301if_do_metadata_checked(closure, nv_suffix) { \302if (mr.contains(obj)) { \303Klass* klass = java_lang_Class::as_Klass(obj); \304/* We'll get NULL for primitive mirrors. */ \305if (klass != NULL) { \306closure->do_klass##nv_suffix(klass); \307} \308} \309} \310\311if (UseCompressedOops) { \312InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr); \313} else { \314InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr); \315} \316}317318ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)319ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)320#if INCLUDE_ALL_GCS321ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)322ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)323#endif // INCLUDE_ALL_GCS324ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)325ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)326327#if INCLUDE_ALL_GCS328void InstanceMirrorKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {329// Note that we don't have to follow the mirror -> klass pointer, since all330// klasses that are dirty will be scavenged when we iterate over the331// ClassLoaderData objects.332333InstanceKlass::oop_push_contents(pm, obj);334InstanceMirrorKlass_OOP_ITERATE( \335start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\336if (PSScavenge::should_scavenge(p)) { \337pm->claim_or_forward_depth(p); \338}, \339assert_nothing )340}341342int InstanceMirrorKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {343int size = oop_size(obj);344InstanceKlass::oop_update_pointers(cm, obj);345346InstanceMirrorKlass_OOP_ITERATE( \347start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\348PSParallelCompact::adjust_pointer(p), \349assert_nothing)350return size;351}352#endif // INCLUDE_ALL_GCS353354int InstanceMirrorKlass::instance_size(KlassHandle k) {355if (k() != NULL && k->oop_is_instance()) {356return align_object_size(size_helper() + InstanceKlass::cast(k())->static_field_size());357}358return size_helper();359}360361instanceOop InstanceMirrorKlass::allocate_instance(KlassHandle k, TRAPS) {362// Query before forming handle.363int size = instance_size(k);364KlassHandle h_k(THREAD, this);365instanceOop i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL);366367// Since mirrors can be variable sized because of the static fields, store368// the size in the mirror itself.369java_lang_Class::set_oop_size(i, size);370371return i;372}373374int InstanceMirrorKlass::oop_size(oop obj) const {375return java_lang_Class::oop_size(obj);376}377378int InstanceMirrorKlass::compute_static_oop_field_count(oop obj) {379Klass* k = java_lang_Class::as_Klass(obj);380if (k != NULL && k->oop_is_instance()) {381return InstanceKlass::cast(k)->static_oop_field_count();382}383return 0;384}385386387