Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/openj9
Path: blob/master/runtime/gc_glue_java/GlobalCollectorDelegate.cpp
5985 views
1
/*******************************************************************************
2
* Copyright (c) 2017, 2021 IBM Corp. and others
3
*
4
* This program and the accompanying materials are made available under
5
* the terms of the Eclipse Public License 2.0 which accompanies this
6
* distribution and is available at https://www.eclipse.org/legal/epl-2.0/
7
* or the Apache License, Version 2.0 which accompanies this distribution and
8
* is available at https://www.apache.org/licenses/LICENSE-2.0.
9
*
10
* This Source Code may also be made available under the following
11
* Secondary Licenses when the conditions for such availability set
12
* forth in the Eclipse Public License, v. 2.0 are satisfied: GNU
13
* General Public License, version 2 with the GNU Classpath
14
* Exception [1] and GNU General Public License, version 2 with the
15
* OpenJDK Assembly Exception [2].
16
*
17
* [1] https://www.gnu.org/software/classpath/license.html
18
* [2] http://openjdk.java.net/legal/assembly-exception.html
19
*
20
* SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 OR LicenseRef-GPL-2.0 WITH Assembly-exception
21
*******************************************************************************/
22
23
#include "j9.h"
24
#include "j9cfg.h"
25
#include "j9consts.h"
26
#include "j9nonbuilder.h"
27
#include "j9nongenerated.h"
28
#include "mmprivatehook.h"
29
#include "ModronAssertions.h"
30
#include "omrgcconsts.h"
31
#include "omrhookable.h"
32
33
#include "ClassHeapIterator.hpp"
34
#include "ClassLoaderIterator.hpp"
35
#include "ClassLoaderManager.hpp"
36
#include "ClassLoaderSegmentIterator.hpp"
37
#include "ClassUnloadStats.hpp"
38
#include "ConfigurationDelegate.hpp"
39
#include "EnvironmentBase.hpp"
40
#include "FinalizerSupport.hpp"
41
#include "FrequentObjectsStats.hpp"
42
#include "GCExtensionsBase.hpp"
43
#include "GCObjectEvents.hpp"
44
#include "GlobalCollectorDelegate.hpp"
45
#include "Heap.hpp"
46
#include "HeapRegionDescriptorStandard.hpp"
47
#include "HeapRegionIteratorStandard.hpp"
48
#include "MarkingDelegate.hpp"
49
#include "MarkingScheme.hpp"
50
#include "MemorySubSpace.hpp"
51
#include "ObjectModel.hpp"
52
#include "ParallelGlobalGC.hpp"
53
#include "ParallelHeapWalker.hpp"
54
#if defined(OMR_ENV_DATA64) && defined(OMR_GC_FULL_POINTERS)
55
#include "ReadBarrierVerifier.hpp"
56
#endif /* defined(OMR_ENV_DATA64) && defined(OMR_GC_FULL_POINTERS) */
57
#include "ReferenceChainWalkerMarkMap.hpp"
58
#include "ReferenceObjectList.hpp"
59
#include "ScavengerJavaStats.hpp"
60
#include "StandardAccessBarrier.hpp"
61
#include "VMThreadListIterator.hpp"
62
63
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
64
/**
65
* Function to fix single object on the heap but only if its class is marked as dying. Other dead objects
66
* who still have valid classes (that is, classes not about to be unloaded) are ignored (see CMVC 122959
67
* for the rationale behind this function).
68
*/
69
static void
70
fixObjectIfClassDying(OMR_VMThread *omrVMThread, MM_HeapRegionDescriptor *region, omrobjectptr_t object, void *userData)
71
{
72
/* Check to see if the object's class is being unloaded. If so, it can't be left as dark matter so abandon it */
73
uintptr_t classFlags = J9CLASS_FLAGS(J9GC_J9OBJECT_CLAZZ_CMP(object, OMRVMTHREAD_COMPRESS_OBJECT_REFERENCES(omrVMThread)));
74
if (0 != (classFlags & J9AccClassDying)) {
75
MM_MemorySubSpace *memorySubSpace = region->getSubSpace();
76
uintptr_t deadObjectByteSize = MM_GCExtensions::getExtensions(omrVMThread)->objectModel.getConsumedSizeInBytesWithHeader(object);
77
memorySubSpace->abandonHeapChunk(object, ((U_8*)object) + deadObjectByteSize);
78
/* the userdata is a counter of dead objects fixed up so increment it here as a uintptr_t */
79
*((uintptr_t *)userData) += 1;
80
}
81
}
82
#endif /* defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING) */
83
84
bool
85
MM_GlobalCollectorDelegate::initialize(MM_EnvironmentBase *env, MM_GlobalCollector *globalCollector, MM_MarkingScheme *markingScheme)
86
{
87
_markingScheme = markingScheme;
88
_globalCollector = globalCollector;
89
_javaVM = (J9JavaVM*)env->getLanguageVM();
90
_extensions = MM_GCExtensions::getExtensions(env);
91
92
/* This delegate is used primarily by MM_ParallelGlobalGC but is declared in base MM_GlobalCollector
93
* class which is base class for MM_IncrementalGlobalGC (balanced) and MM_RealtimeGC (realtime). The
94
* only MM_GlobalCollector methods (postCollect and isTimeForGlobalGCKickoff) that use this
95
* delegate do not use _globalCollector or _markingScheme, so these are required to be NULLed for the
96
* balanced and realtime GC policies (for safety), and not NULL for standard GC policies.
97
*/
98
Assert_MM_true((NULL != _globalCollector) == _extensions->isStandardGC());
99
Assert_MM_true((NULL != _markingScheme) == _extensions->isStandardGC());
100
101
/* Balanced and realtime polices will instantiate their own access barrier */
102
if (_extensions->isStandardGC()) {
103
104
#if defined(OMR_ENV_DATA64) && defined(OMR_GC_FULL_POINTERS)
105
if (1 == _extensions->fvtest_enableReadBarrierVerification) {
106
_extensions->accessBarrier = MM_ReadBarrierVerifier::newInstance(env, _markingScheme);
107
} else
108
#endif /* defined(OMR_ENV_DATA64) && defined(OMR_GC_FULL_POINTERS) */
109
{
110
_extensions->accessBarrier = MM_StandardAccessBarrier::newInstance(env, _markingScheme);
111
}
112
113
if (NULL == _extensions->accessBarrier) {
114
return false;
115
}
116
}
117
118
return true;
119
}
120
121
void
122
MM_GlobalCollectorDelegate::tearDown(MM_EnvironmentBase *env)
123
{
124
if (_extensions->isStandardGC() && (NULL != _extensions->accessBarrier)) {
125
_extensions->accessBarrier->kill(env);
126
_extensions->accessBarrier = NULL;
127
}
128
}
129
130
void
131
MM_GlobalCollectorDelegate::mainThreadGarbageCollectStarted(MM_EnvironmentBase *env)
132
{
133
/* Clear the java specific mark stats */
134
_extensions->markJavaStats.clear();
135
136
#if defined(J9VM_GC_MODRON_COMPACTION)
137
_criticalSectionCount = MM_StandardAccessBarrier::getJNICriticalRegionCount(_extensions);
138
#endif /* J9VM_GC_MODRON_COMPACTION */
139
140
#if defined(J9VM_GC_MODRON_SCAVENGER)
141
if (_extensions->scavengerEnabled) {
142
/* clear scavenger stats for correcting the ownableSynchronizerObjects stats, only in generational gc */
143
_extensions->scavengerJavaStats.clearOwnableSynchronizerCounts();
144
}
145
#endif /* defined(J9VM_GC_MODRON_SCAVENGER) */
146
147
#if defined(J9VM_GC_FINALIZATION)
148
/* this should not be set by the GC since it is used by components in order to record that they performed some operation which will require that we do some finalization */
149
_finalizationRequired = false;
150
#endif /* J9VM_GC_FINALIZATION */
151
152
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
153
bool forceUnloading = false;
154
155
/* Set the dynamic class unloading flag based on command line and runtime state */
156
switch (_extensions->dynamicClassUnloading) {
157
case MM_GCExtensions::DYNAMIC_CLASS_UNLOADING_NEVER:
158
_extensions->runtimeCheckDynamicClassUnloading = false;
159
forceUnloading = false;
160
break;
161
case MM_GCExtensions::DYNAMIC_CLASS_UNLOADING_ALWAYS:
162
_extensions->runtimeCheckDynamicClassUnloading = true;
163
forceUnloading = true;
164
break;
165
case MM_GCExtensions::DYNAMIC_CLASS_UNLOADING_ON_CLASS_LOADER_CHANGES:
166
forceUnloading = env->_cycleState->_gcCode.isAggressiveGC();
167
_extensions->runtimeCheckDynamicClassUnloading = forceUnloading || _extensions->classLoaderManager->isTimeForClassUnloading(env);
168
break;
169
default:
170
break;
171
}
172
173
if (_extensions->runtimeCheckDynamicClassUnloading) {
174
/* request collector enter classUnloadMutex if possible (if forceUnloading is set - always)*/
175
_extensions->runtimeCheckDynamicClassUnloading = enterClassUnloadMutex(env, forceUnloading);
176
}
177
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
178
}
179
180
void
181
MM_GlobalCollectorDelegate::mainThreadGarbageCollectFinished(MM_EnvironmentBase *env, bool compactedThisCycle)
182
{
183
/* Check that all reference object lists are empty:
184
* lists must be processed at Mark and nothing should be flushed after
185
*/
186
UDATA listCount = _extensions->gcThreadCount;
187
MM_HeapRegionDescriptorStandard *region = NULL;
188
GC_HeapRegionIteratorStandard regionIterator(_extensions->heap->getHeapRegionManager());
189
while(NULL != (region = regionIterator.nextRegion())) {
190
/* check all lists for regions, they should be empty */
191
MM_HeapRegionDescriptorStandardExtension *regionExtension = MM_ConfigurationDelegate::getHeapRegionDescriptorStandardExtension(env, region);
192
for (UDATA i = 0; i < listCount; i++) {
193
MM_ReferenceObjectList *list = &regionExtension->_referenceObjectLists[i];
194
Assert_MM_true(list->isWeakListEmpty());
195
Assert_MM_true(list->isSoftListEmpty());
196
Assert_MM_true(list->isPhantomListEmpty());
197
}
198
}
199
200
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
201
MM_MarkingDelegate::clearClassLoadersScannedFlag(env);
202
203
/* If we allowed class unloading during this gc, we must release the classUnloadMutex */
204
if (_extensions->runtimeCheckDynamicClassUnloading) {
205
exitClassUnloadMutex(env);
206
}
207
208
/* make sure that we are going to get at least some number of bytes back since this will otherwise waste time in monitor operations and potentially get exclusive in order to do nothing */
209
J9VMThread *vmThread = (J9VMThread *)env->getLanguageVMThread();
210
uintptr_t reclaimableMemory = _extensions->classLoaderManager->reclaimableMemory();
211
if (reclaimableMemory > 0) {
212
if (!compactedThisCycle) {
213
bool isExplicitGC = env->_cycleState->_gcCode.isExplicitGC();
214
if (isExplicitGC || (reclaimableMemory > _extensions->deadClassLoaderCacheSize)) {
215
/* fix the heap */
216
Trc_MM_DoFixHeapForUnload_Entry(vmThread, MEMORY_TYPE_RAM);
217
MM_ParallelGlobalGC *parallelGlobalCollector = (MM_ParallelGlobalGC *)_globalCollector;
218
UDATA fixedObjectCount = parallelGlobalCollector->fixHeapForWalk(env, MEMORY_TYPE_RAM, FIXUP_CLASS_UNLOADING, fixObjectIfClassDying);
219
if (0 < fixedObjectCount) {
220
Trc_MM_DoFixHeapForUnload_Exit(vmThread, fixedObjectCount);
221
} else {
222
Trc_MM_DoFixHeapForUnload_ExitNotNeeded(vmThread);
223
}
224
/* now flush the cached segments */
225
Trc_MM_FlushUndeadSegments_Entry(vmThread, isExplicitGC ? "SystemGC" : "Dead Class Loader Cache Full");
226
_extensions->classLoaderManager->flushUndeadSegments(env);
227
Trc_MM_FlushUndeadSegments_Exit(vmThread);
228
}
229
} else {
230
#if defined(J9VM_GC_MODRON_COMPACTION)
231
Trc_MM_FlushUndeadSegments_Entry(vmThread, "Compaction");
232
_extensions->classLoaderManager->flushUndeadSegments(env);
233
Trc_MM_FlushUndeadSegments_Exit(vmThread);
234
#else
235
Assert_MM_unreachable();
236
#endif /* defined(J9VM_GC_MODRON_COMPACTION) */
237
}
238
}
239
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
240
}
241
242
void
243
MM_GlobalCollectorDelegate::postMarkProcessing(MM_EnvironmentBase *env)
244
{
245
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
246
if (_extensions->runtimeCheckDynamicClassUnloading != 0) {
247
PORT_ACCESS_FROM_ENVIRONMENT(env);
248
OMR_VMThread *vmThread = env->getOmrVMThread();
249
Trc_MM_ClassUnloadingStart((J9VMThread *)vmThread->_language_vmthread);
250
TRIGGER_J9HOOK_MM_PRIVATE_CLASS_UNLOADING_START(
251
_extensions->privateHookInterface,
252
vmThread,
253
j9time_hires_clock(),
254
J9HOOK_MM_PRIVATE_CLASS_UNLOADING_START);
255
256
unloadDeadClassLoaders(env);
257
258
MM_ClassUnloadStats *classUnloadStats = &_extensions->globalGCStats.classUnloadStats;
259
Trc_MM_ClassUnloadingEnd((J9VMThread *)vmThread->_language_vmthread,
260
classUnloadStats->_classLoaderUnloadedCount,
261
classUnloadStats->_classesUnloadedCount);
262
TRIGGER_J9HOOK_MM_CLASS_UNLOADING_END(
263
_extensions->hookInterface,
264
(J9VMThread *)vmThread->_language_vmthread,
265
j9time_hires_clock(),
266
J9HOOK_MM_CLASS_UNLOADING_END,
267
classUnloadStats->_endTime - classUnloadStats->_startTime,
268
classUnloadStats->_classLoaderUnloadedCount,
269
classUnloadStats->_classesUnloadedCount,
270
classUnloadStats->_classUnloadMutexQuiesceTime,
271
classUnloadStats->_endSetupTime - classUnloadStats->_startSetupTime,
272
classUnloadStats->_endScanTime - classUnloadStats->_startScanTime,
273
classUnloadStats->_endPostTime - classUnloadStats->_startPostTime);
274
275
/* If there was dynamic class unloading checks during the run, record the new number of class
276
* loaders last seen during a DCU pass
277
*/
278
_extensions->classLoaderManager->setLastUnloadNumOfClassLoaders();
279
_extensions->classLoaderManager->setLastUnloadNumOfAnonymousClasses();
280
}
281
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
282
283
#if defined(J9VM_GC_FINALIZATION)
284
if (_finalizationRequired) {
285
/* Signal the finalizer */
286
omrthread_monitor_enter(_javaVM->finalizeMainMonitor);
287
_javaVM->finalizeMainFlags |= J9_FINALIZE_FLAGS_MAIN_WAKE_UP;
288
omrthread_monitor_notify_all(_javaVM->finalizeMainMonitor);
289
omrthread_monitor_exit(_javaVM->finalizeMainMonitor);
290
}
291
#endif /* J9VM_GC_FINALIZATION */
292
}
293
294
bool
295
MM_GlobalCollectorDelegate::isAllowUserHeapWalk()
296
{
297
/* Enable only if required for debugging. */
298
return (0 != (_javaVM->requiredDebugAttributes & J9VM_DEBUG_ATTRIBUTE_ALLOW_USER_HEAP_WALK));
299
}
300
301
void
302
MM_GlobalCollectorDelegate::prepareHeapForWalk(MM_EnvironmentBase *env)
303
{
304
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
305
/* Clear the appropriate flags of all classLoaders */
306
GC_ClassLoaderIterator classLoaderIterator(_javaVM->classLoaderBlocks);
307
J9ClassLoader *classLoader;
308
while((classLoader = classLoaderIterator.nextSlot()) != NULL) {
309
classLoader->gcFlags &= ~J9_GC_CLASS_LOADER_SCANNED;
310
}
311
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
312
}
313
314
#if defined(OMR_ENV_DATA64) && defined(OMR_GC_FULL_POINTERS)
315
void
316
MM_GlobalCollectorDelegate::poisonSlots(MM_EnvironmentBase *env)
317
{
318
((MM_ReadBarrierVerifier *)_extensions->accessBarrier)->poisonSlots(env);
319
}
320
321
void
322
MM_GlobalCollectorDelegate::healSlots(MM_EnvironmentBase *env)
323
{
324
((MM_ReadBarrierVerifier *)_extensions->accessBarrier)->healSlots(env);
325
}
326
#endif /* defined(OMR_ENV_DATA64) && defined(OMR_GC_FULL_POINTERS) */
327
328
bool
329
MM_GlobalCollectorDelegate::heapAddRange(MM_EnvironmentBase *env, MM_MemorySubSpace *subspace, UDATA size, void *lowAddress, void *highAddress)
330
{
331
if(NULL != _extensions->referenceChainWalkerMarkMap) {
332
return _extensions->referenceChainWalkerMarkMap->heapAddRange(env, size, lowAddress, highAddress);
333
}
334
return true;
335
}
336
337
bool
338
MM_GlobalCollectorDelegate::heapRemoveRange(MM_EnvironmentBase *env, MM_MemorySubSpace *subspace, UDATA size, void *lowAddress, void *highAddress, void *lowValidAddress, void *highValidAddress)
339
{
340
if (NULL != _extensions->referenceChainWalkerMarkMap) {
341
return _extensions->referenceChainWalkerMarkMap->heapRemoveRange(env, size, lowAddress, highAddress, lowValidAddress, highValidAddress);
342
}
343
return true;
344
}
345
346
bool
347
MM_GlobalCollectorDelegate::isTimeForGlobalGCKickoff()
348
{
349
bool result = false;
350
351
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
352
uintptr_t numClassLoaderBlocks = pool_numElements(_javaVM->classLoaderBlocks);
353
uintptr_t numAnonymousClasses = _javaVM->anonClassCount;
354
355
Trc_MM_GlobalCollector_isTimeForGlobalGCKickoff_Entry(
356
_extensions->dynamicClassUnloading,
357
numClassLoaderBlocks,
358
_extensions->dynamicClassUnloadingKickoffThreshold,
359
_extensions->classLoaderManager->getLastUnloadNumOfClassLoaders());
360
361
Trc_MM_GlobalCollector_isTimeForGlobalGCKickoff_anonClasses(
362
numAnonymousClasses,
363
_extensions->classLoaderManager->getLastUnloadNumOfAnonymousClasses(),
364
_extensions->classUnloadingAnonymousClassWeight
365
);
366
367
Assert_MM_true(numAnonymousClasses >= _extensions->classLoaderManager->getLastUnloadNumOfAnonymousClasses());
368
369
if ((0 != _extensions->dynamicClassUnloadingKickoffThreshold) && (_extensions->dynamicClassUnloading != MM_GCExtensions::DYNAMIC_CLASS_UNLOADING_NEVER)) {
370
uintptr_t recentlyLoaded = (uintptr_t) ((numAnonymousClasses - _extensions->classLoaderManager->getLastUnloadNumOfAnonymousClasses()) * _extensions->classUnloadingAnonymousClassWeight);
371
/* todo aryoung: getLastUnloadNumOfClassLoaders() includes the class loaders which
372
* were unloaded but still required finalization when the last classUnloading occured.
373
* This means that the threshold check is wrong when there are classes which require finalization.
374
* Temporarily make sure that we do not create a negative recently loaded.
375
*/
376
if (numClassLoaderBlocks > _extensions->classLoaderManager->getLastUnloadNumOfClassLoaders()) {
377
recentlyLoaded += (numClassLoaderBlocks - _extensions->classLoaderManager->getLastUnloadNumOfClassLoaders());
378
}
379
result = recentlyLoaded >= _extensions->dynamicClassUnloadingKickoffThreshold;
380
}
381
382
Trc_MM_GlobalCollector_isTimeForGlobalGCKickoff_Exit(result ? "true" : "false");
383
#endif /* defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING) */
384
385
return result;
386
}
387
388
void
389
MM_GlobalCollectorDelegate::postCollect(MM_EnvironmentBase* env, MM_MemorySubSpace* subSpace)
390
{
391
/* update the dynamic soft reference age based on the free space in the oldest generation after this collection so we know how many to clear next time */
392
MM_Heap* heap = (MM_Heap*)_extensions->heap;
393
uintptr_t heapSize = heap->getActiveMemorySize(MEMORY_TYPE_OLD);
394
uintptr_t freeSize = heap->getApproximateActiveFreeMemorySize(MEMORY_TYPE_OLD);
395
double percentFree = ((double)freeSize) / ((double)heapSize);
396
_extensions->dynamicMaxSoftReferenceAge = (uintptr_t)(percentFree * (double)(_extensions->maxSoftReferenceAge));
397
Assert_MM_true(_extensions->dynamicMaxSoftReferenceAge <= _extensions->maxSoftReferenceAge);
398
}
399
400
#if defined(J9VM_GC_MODRON_COMPACTION)
401
CompactPreventedReason
402
MM_GlobalCollectorDelegate::checkIfCompactionShouldBePrevented(MM_EnvironmentBase *env)
403
{
404
CompactPreventedReason reason = COMPACT_PREVENTED_NONE;
405
/* If there are active JNI critical regions then objects can't be moved. */
406
407
if (0 < _criticalSectionCount) {
408
reason = COMPACT_PREVENTED_CRITICAL_REGIONS;
409
}
410
411
return reason;
412
}
413
#endif /* J9VM_GC_MODRON_COMPACTION */
414
415
416
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
417
bool
418
MM_GlobalCollectorDelegate::enterClassUnloadMutex(MM_EnvironmentBase *env, bool force)
419
{
420
bool result = true;
421
422
MM_ClassUnloadStats *classUnloadStats = &_extensions->globalGCStats.classUnloadStats;
423
if (force) {
424
classUnloadStats->_classUnloadMutexQuiesceTime = _extensions->classLoaderManager->enterClassUnloadMutex(env);
425
} else {
426
classUnloadStats->_classUnloadMutexQuiesceTime = J9CONST64(0);
427
result = _extensions->classLoaderManager->tryEnterClassUnloadMutex(env);
428
}
429
430
return result;
431
}
432
433
void
434
MM_GlobalCollectorDelegate::exitClassUnloadMutex(MM_EnvironmentBase *env)
435
{
436
_extensions->classLoaderManager->exitClassUnloadMutex(env);
437
}
438
439
void
440
MM_GlobalCollectorDelegate::unloadDeadClassLoaders(MM_EnvironmentBase *env)
441
{
442
Trc_MM_ParallelGlobalGC_unloadDeadClassLoaders_entry(env->getLanguageVMThread());
443
PORT_ACCESS_FROM_ENVIRONMENT(env);
444
MM_ClassUnloadStats *classUnloadStats = &_extensions->globalGCStats.classUnloadStats;
445
446
/* The list of classLoaders to be unloaded by cleanUpClassLoadersEnd is rooted in unloadLink */
447
448
/* set the vmState whilst we're unloading classes */
449
UDATA vmState = env->pushVMstate(OMRVMSTATE_GC_CLEANING_METADATA);
450
451
/* Count the classes we're unloading and perform class-specific clean up work for each unloading class.
452
* If we're unloading any classes, perform common class-unloading clean up.
453
*/
454
classUnloadStats->_startTime = j9time_hires_clock();
455
classUnloadStats->_startSetupTime = classUnloadStats->_startTime;
456
457
J9ClassLoader *classLoadersUnloadedList = _extensions->classLoaderManager->identifyClassLoadersToUnload(env, _markingScheme->getMarkMap(), classUnloadStats);
458
_extensions->classLoaderManager->cleanUpClassLoadersStart(env, classLoadersUnloadedList, _markingScheme->getMarkMap(), classUnloadStats);
459
460
classUnloadStats->_endSetupTime = j9time_hires_clock();
461
classUnloadStats->_startScanTime = classUnloadStats->_endSetupTime;
462
463
/* The list of classLoaders to be unloaded by cleanUpClassLoadersEnd is rooted in unloadLink */
464
J9ClassLoader *unloadLink = NULL;
465
J9MemorySegment *reclaimedSegments = NULL;
466
_extensions->classLoaderManager->cleanUpClassLoaders(env, classLoadersUnloadedList, &reclaimedSegments, &unloadLink, &_finalizationRequired);
467
468
/* Free the class memory segments associated with dead classLoaders, unload (free) the dead classLoaders that don't
469
* require finalization, and perform any final clean up after the dead classLoaders are gone.
470
*/
471
classUnloadStats->_endScanTime = j9time_hires_clock();
472
classUnloadStats->_startPostTime = classUnloadStats->_endScanTime;
473
474
/* enqueue all the segments we just salvaged from the dead class loaders for delayed free (this work was historically attributed in the unload end operation so it goes after the timer start) */
475
_extensions->classLoaderManager->enqueueUndeadClassSegments(reclaimedSegments);
476
_extensions->classLoaderManager->cleanUpClassLoadersEnd(env, unloadLink);
477
478
classUnloadStats->_endPostTime = j9time_hires_clock();
479
classUnloadStats->_endTime = classUnloadStats->_endPostTime;
480
481
env->popVMstate(vmState);
482
483
Trc_MM_ParallelGlobalGC_unloadDeadClassLoaders_exit(env->getLanguageVMThread());
484
}
485
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
486
487
488