Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/openj9
Path: blob/master/runtime/gc_glue_java/MetronomeDelegate.cpp
5990 views
1
/*******************************************************************************
2
* Copyright (c) 2019, 2022 IBM Corp. and others
3
*
4
* This program and the accompanying materials are made available under
5
* the terms of the Eclipse Public License 2.0 which accompanies this
6
* distribution and is available at https://www.eclipse.org/legal/epl-2.0/
7
* or the Apache License, Version 2.0 which accompanies this distribution and
8
* is available at https://www.apache.org/licenses/LICENSE-2.0.
9
*
10
* This Source Code may also be made available under the following
11
* Secondary Licenses when the conditions for such availability set
12
* forth in the Eclipse Public License, v. 2.0 are satisfied: GNU
13
* General Public License, version 2 with the GNU Classpath
14
* Exception [1] and GNU General Public License, version 2 with the
15
* OpenJDK Assembly Exception [2].
16
*
17
* [1] https://www.gnu.org/software/classpath/license.html
18
* [2] http://openjdk.java.net/legal/assembly-exception.html
19
*
20
* SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 OR LicenseRef-GPL-2.0 WITH Assembly-exception
21
*******************************************************************************/
22
23
#include "MetronomeDelegate.hpp"
24
25
#if defined(J9VM_GC_REALTIME)
26
27
#include "omr.h"
28
29
#include "ClassHeapIterator.hpp"
30
#include "ClassLoaderIterator.hpp"
31
#include "ClassLoaderLinkedListIterator.hpp"
32
#include "ClassLoaderManager.hpp"
33
#include "ClassLoaderSegmentIterator.hpp"
34
#include "EnvironmentRealtime.hpp"
35
#include "FinalizableClassLoaderBuffer.hpp"
36
#include "FinalizableObjectBuffer.hpp"
37
#include "FinalizableReferenceBuffer.hpp"
38
#include "FinalizeListManager.hpp"
39
#include "FinalizerSupport.hpp"
40
#include "GCExtensionsBase.hpp"
41
#include "Heap.hpp"
42
#include "HeapRegionDescriptorRealtime.hpp"
43
#include "MetronomeAlarmThread.hpp"
44
#include "JNICriticalRegion.hpp"
45
#include "OwnableSynchronizerObjectBufferRealtime.hpp"
46
#include "OwnableSynchronizerObjectList.hpp"
47
#include "RealtimeAccessBarrier.hpp"
48
#include "RealtimeGC.hpp"
49
#include "RealtimeMarkingScheme.hpp"
50
#include "RealtimeMarkingSchemeRootMarker.hpp"
51
#include "RealtimeMarkingSchemeRootClearer.hpp"
52
#include "RealtimeMarkTask.hpp"
53
#include "RealtimeRootScanner.hpp"
54
#include "ReferenceObjectBufferRealtime.hpp"
55
#include "ReferenceObjectList.hpp"
56
#include "Scheduler.hpp"
57
#include "UnfinalizedObjectBufferRealtime.hpp"
58
#include "UnfinalizedObjectList.hpp"
59
60
void
61
MM_MetronomeDelegate::yieldWhenRequested(MM_EnvironmentBase *env)
62
{
63
MM_GCExtensionsBase *ext = env->getExtensions();
64
UDATA accessMask;
65
MM_Scheduler *sched = (MM_Scheduler *)ext->dispatcher;
66
if (sched->_mode != MM_Scheduler::MUTATOR) {
67
MM_JNICriticalRegion::releaseAccess((J9VMThread *)env->getOmrVMThread()->_language_vmthread, &accessMask);
68
while (sched->_mode != MM_Scheduler::MUTATOR) {
69
omrthread_sleep(10);
70
}
71
MM_JNICriticalRegion::reacquireAccess((J9VMThread *)env->getOmrVMThread()->_language_vmthread, accessMask);
72
}
73
}
74
75
/**
76
* C entrypoint for the newly created alarm thread.
77
*/
78
int J9THREAD_PROC
79
MM_MetronomeDelegate::metronomeAlarmThreadWrapper(void* userData)
80
{
81
MM_MetronomeAlarmThread *alarmThread = (MM_MetronomeAlarmThread *)userData;
82
J9JavaVM *javaVM = (J9JavaVM *)alarmThread->getScheduler()->_extensions->getOmrVM()->_language_vm;
83
PORT_ACCESS_FROM_JAVAVM(javaVM);
84
uintptr_t rc;
85
86
j9sig_protect(MM_MetronomeDelegate::signalProtectedFunction, (void*)userData,
87
javaVM->internalVMFunctions->structuredSignalHandlerVM, javaVM,
88
J9PORT_SIG_FLAG_SIGALLSYNC | J9PORT_SIG_FLAG_MAY_CONTINUE_EXECUTION,
89
&rc);
90
91
omrthread_monitor_enter(alarmThread->_mutex);
92
alarmThread->_alarmThreadActive = MM_MetronomeAlarmThread::ALARM_THREAD_SHUTDOWN;
93
omrthread_monitor_notify(alarmThread->_mutex);
94
omrthread_exit(alarmThread->_mutex);
95
96
return 0;
97
}
98
99
uintptr_t
100
MM_MetronomeDelegate::signalProtectedFunction(J9PortLibrary *privatePortLibrary, void* userData)
101
{
102
MM_MetronomeAlarmThread *alarmThread = (MM_MetronomeAlarmThread *)userData;
103
J9JavaVM *javaVM = (J9JavaVM *)alarmThread->getScheduler()->_extensions->getOmrVM()->_language_vm;
104
J9VMThread *vmThread = NULL;
105
MM_EnvironmentRealtime *env = NULL;
106
107
if (JNI_OK != (javaVM->internalVMFunctions->attachSystemDaemonThread(javaVM, &vmThread, "GC Alarm"))) {
108
return 0;
109
}
110
111
env = MM_EnvironmentRealtime::getEnvironment(vmThread->omrVMThread);
112
113
alarmThread->run(env);
114
115
javaVM->internalVMFunctions->DetachCurrentThread((JavaVM*)javaVM);
116
117
return 0;
118
}
119
120
void
121
MM_MetronomeDelegate::clearGCStats()
122
{
123
_extensions->markJavaStats.clear();
124
}
125
126
void
127
MM_MetronomeDelegate::clearGCStatsEnvironment(MM_EnvironmentRealtime *env)
128
{
129
env->_markStats.clear();
130
env->getGCEnvironment()->_markJavaStats.clear();
131
env->_workPacketStats.clear();
132
}
133
134
void
135
MM_MetronomeDelegate::mergeGCStats(MM_EnvironmentRealtime *env)
136
{
137
GC_Environment *gcEnv = env->getGCEnvironment();
138
139
MM_GlobalGCStats *finalGCStats= &_extensions->globalGCStats;
140
finalGCStats->markStats.merge(&env->_markStats);
141
_extensions->markJavaStats.merge(&gcEnv->_markJavaStats);
142
finalGCStats->workPacketStats.merge(&env->_workPacketStats);
143
}
144
145
uintptr_t
146
MM_MetronomeDelegate::getSplitArraysProcessed(MM_EnvironmentRealtime *env)
147
{
148
GC_Environment *gcEnv = env->getGCEnvironment();
149
return gcEnv->_markJavaStats.splitArraysProcessed;
150
}
151
152
bool
153
MM_MetronomeDelegate::initialize(MM_EnvironmentBase *env)
154
{
155
_scheduler = _realtimeGC->_sched;
156
_markingScheme = _realtimeGC->getMarkingScheme();
157
158
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
159
_unmarkedImpliesClasses = false;
160
#endif /* defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING) */
161
_unmarkedImpliesCleared = false;
162
_unmarkedImpliesStringsCleared = false;
163
164
/* allocate and initialize the global reference object lists */
165
if (!allocateAndInitializeReferenceObjectLists(env)){
166
return false;
167
}
168
169
/* allocate and initialize the global unfinalized object lists */
170
if (!allocateAndInitializeUnfinalizedObjectLists(env)) {
171
return false;
172
}
173
174
/* allocate and initialize the global ownable synchronizer object lists */
175
if (!allocateAndInitializeOwnableSynchronizerObjectLists(env)) {
176
return false;
177
}
178
179
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
180
if (!_extensions->dynamicClassUnloadingThresholdForced) {
181
_extensions->dynamicClassUnloadingThreshold = 1;
182
}
183
if (!_extensions->dynamicClassUnloadingKickoffThresholdForced) {
184
_extensions->dynamicClassUnloadingKickoffThreshold = 0;
185
}
186
#endif /* defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING) */
187
188
/* Create the appropriate access barrier for Metronome */
189
MM_RealtimeAccessBarrier *accessBarrier = NULL;
190
accessBarrier = allocateAccessBarrier(env);
191
if (NULL == accessBarrier) {
192
return false;
193
}
194
MM_GCExtensions::getExtensions(_javaVM)->accessBarrier = (MM_ObjectAccessBarrier *)accessBarrier;
195
196
_javaVM->realtimeHeapMapBasePageRounded = _markingScheme->_markMap->getHeapMapBaseRegionRounded();
197
_javaVM->realtimeHeapMapBits = _markingScheme->_markMap->getHeapMapBits();
198
199
return true;
200
}
201
202
bool
203
MM_MetronomeDelegate::allocateAndInitializeReferenceObjectLists(MM_EnvironmentBase *env)
204
{
205
const UDATA listCount = getReferenceObjectListCount(env);
206
Assert_MM_true(0 < listCount);
207
_extensions->referenceObjectLists = (MM_ReferenceObjectList *)env->getForge()->allocate((sizeof(MM_ReferenceObjectList) * listCount), MM_AllocationCategory::FIXED, J9_GET_CALLSITE());
208
if (NULL == _extensions->referenceObjectLists) {
209
return false;
210
}
211
for (UDATA index = 0; index < listCount; index++) {
212
new(&_extensions->referenceObjectLists[index]) MM_ReferenceObjectList();
213
}
214
return true;
215
}
216
217
bool
218
MM_MetronomeDelegate::allocateAndInitializeUnfinalizedObjectLists(MM_EnvironmentBase *env)
219
{
220
const UDATA listCount = getUnfinalizedObjectListCount(env);
221
Assert_MM_true(0 < listCount);
222
MM_UnfinalizedObjectList *unfinalizedObjectLists = (MM_UnfinalizedObjectList *)env->getForge()->allocate((sizeof(MM_UnfinalizedObjectList) * listCount), MM_AllocationCategory::FIXED, J9_GET_CALLSITE());
223
if (NULL == unfinalizedObjectLists) {
224
return false;
225
}
226
for (UDATA index = 0; index < listCount; index++) {
227
new(&unfinalizedObjectLists[index]) MM_UnfinalizedObjectList();
228
/* add each list to the global list. we need to maintain the doubly linked list
229
* to ensure uniformity with SE/Balanced.
230
*/
231
MM_UnfinalizedObjectList *previousUnfinalizedObjectList = (0 == index) ? NULL : &unfinalizedObjectLists[index-1];
232
MM_UnfinalizedObjectList *nextUnfinalizedObjectList = ((listCount - 1) == index) ? NULL : &unfinalizedObjectLists[index+1];
233
234
unfinalizedObjectLists[index].setNextList(nextUnfinalizedObjectList);
235
unfinalizedObjectLists[index].setPreviousList(previousUnfinalizedObjectList);
236
}
237
_extensions->unfinalizedObjectLists = unfinalizedObjectLists;
238
return true;
239
}
240
241
bool
242
MM_MetronomeDelegate::allocateAndInitializeOwnableSynchronizerObjectLists(MM_EnvironmentBase *env)
243
{
244
const UDATA listCount = getOwnableSynchronizerObjectListCount(env);
245
Assert_MM_true(0 < listCount);
246
MM_OwnableSynchronizerObjectList *ownableSynchronizerObjectLists = (MM_OwnableSynchronizerObjectList *)env->getForge()->allocate((sizeof(MM_OwnableSynchronizerObjectList) * listCount), MM_AllocationCategory::FIXED, J9_GET_CALLSITE());
247
if (NULL == ownableSynchronizerObjectLists) {
248
return false;
249
}
250
for (UDATA index = 0; index < listCount; index++) {
251
new(&ownableSynchronizerObjectLists[index]) MM_OwnableSynchronizerObjectList();
252
/* add each list to the global list. we need to maintain the doubly linked list
253
* to ensure uniformity with SE/Balanced.
254
*/
255
MM_OwnableSynchronizerObjectList *previousOwnableSynchronizerObjectList = (0 == index) ? NULL : &ownableSynchronizerObjectLists[index-1];
256
MM_OwnableSynchronizerObjectList *nextOwnableSynchronizerObjectList = ((listCount - 1) == index) ? NULL : &ownableSynchronizerObjectLists[index+1];
257
258
ownableSynchronizerObjectLists[index].setNextList(nextOwnableSynchronizerObjectList);
259
ownableSynchronizerObjectLists[index].setPreviousList(previousOwnableSynchronizerObjectList);
260
}
261
_extensions->setOwnableSynchronizerObjectLists(ownableSynchronizerObjectLists);
262
return true;
263
}
264
265
void
266
MM_MetronomeDelegate::tearDown(MM_EnvironmentBase *env)
267
{
268
if (NULL != _extensions->referenceObjectLists) {
269
env->getForge()->free(_extensions->referenceObjectLists);
270
_extensions->referenceObjectLists = NULL;
271
}
272
273
if (NULL != _extensions->unfinalizedObjectLists) {
274
env->getForge()->free(_extensions->unfinalizedObjectLists);
275
_extensions->unfinalizedObjectLists = NULL;
276
}
277
278
if (NULL != _extensions->getOwnableSynchronizerObjectLists()) {
279
env->getForge()->free(_extensions->getOwnableSynchronizerObjectLists());
280
_extensions->setOwnableSynchronizerObjectLists(NULL);
281
}
282
283
if (NULL != _extensions->accessBarrier) {
284
_extensions->accessBarrier->kill(env);
285
_extensions->accessBarrier = NULL;
286
}
287
288
_javaVM->realtimeHeapMapBits = NULL;
289
}
290
291
void
292
MM_MetronomeDelegate::mainSetupForGC(MM_EnvironmentBase *env)
293
{
294
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
295
/* Set the dynamic class unloading flag based on command line and runtime state */
296
switch (_extensions->dynamicClassUnloading) {
297
case MM_GCExtensions::DYNAMIC_CLASS_UNLOADING_NEVER:
298
_extensions->runtimeCheckDynamicClassUnloading = false;
299
break;
300
case MM_GCExtensions::DYNAMIC_CLASS_UNLOADING_ALWAYS:
301
_extensions->runtimeCheckDynamicClassUnloading = true;
302
break;
303
case MM_GCExtensions::DYNAMIC_CLASS_UNLOADING_ON_CLASS_LOADER_CHANGES:
304
_extensions->runtimeCheckDynamicClassUnloading = (_extensions->aggressive || _extensions->classLoaderManager->isTimeForClassUnloading(env));
305
break;
306
default:
307
break;
308
}
309
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
310
311
#if defined(J9VM_GC_FINALIZATION)
312
_finalizationRequired = false;
313
#endif /* J9VM_GC_FINALIZATION */
314
}
315
316
void
317
MM_MetronomeDelegate::mainCleanupAfterGC(MM_EnvironmentBase *env)
318
{
319
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
320
/* flush the dead class segments if their size exceeds the CacheSize mark.
321
* Heap fixup should have been completed in this cycle.
322
*/
323
if (_extensions->classLoaderManager->reclaimableMemory() > _extensions->deadClassLoaderCacheSize) {
324
Trc_MM_FlushUndeadSegments_Entry(env->getLanguageVMThread(), "Non-zero reclaimable memory available");
325
_extensions->classLoaderManager->flushUndeadSegments(env);
326
Trc_MM_FlushUndeadSegments_Exit(env->getLanguageVMThread());
327
}
328
#endif /* defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING) */
329
}
330
331
void
332
MM_MetronomeDelegate::incrementalCollectStart(MM_EnvironmentRealtime *env)
333
{
334
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
335
_dynamicClassUnloadingEnabled = ((_extensions->runtimeCheckDynamicClassUnloading != 0) ? true : false);
336
#endif /* defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING) */
337
}
338
339
void
340
MM_MetronomeDelegate::incrementalCollect(MM_EnvironmentRealtime *env)
341
{
342
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
343
PORT_ACCESS_FROM_ENVIRONMENT(env);
344
_dynamicClassUnloadingEnabled = ((_extensions->runtimeCheckDynamicClassUnloading != 0) ? true : false);
345
346
if (_extensions->runtimeCheckDynamicClassUnloading != 0) {
347
MM_ClassUnloadStats *classUnloadStats = &_extensions->globalGCStats.classUnloadStats;
348
_realtimeGC->setCollectorUnloadingClassLoaders();
349
reportClassUnloadingStart(env);
350
classUnloadStats->_startTime = j9time_hires_clock();
351
unloadDeadClassLoaders(env);
352
classUnloadStats->_endTime = j9time_hires_clock();
353
reportClassUnloadingEnd(env);
354
355
/* If there was dynamic class unloading checks during the run, record the new number of class
356
* loaders last seen during a DCU pass
357
*/
358
_extensions->classLoaderManager->setLastUnloadNumOfClassLoaders();
359
_extensions->classLoaderManager->setLastUnloadNumOfAnonymousClasses();
360
}
361
362
/* Handling of classes done. Return back to "mark if necessary" mode */
363
_unmarkedImpliesClasses = false;
364
365
/* Clear the appropriate flags of all classLoaders */
366
GC_ClassLoaderIterator classLoaderIterator(_javaVM->classLoaderBlocks);
367
J9ClassLoader *classLoader;
368
while((classLoader = classLoaderIterator.nextSlot()) != NULL) {
369
classLoader->gcFlags &= ~J9_GC_CLASS_LOADER_SCANNED;
370
}
371
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
372
373
/* If the J9VM_DEBUG_ATTRIBUTE_ALLOW_USER_HEAP_WALK flag is set,
374
* or if we are about to unload classes and free class memory segments
375
* then fix the heap so that it can be walked by debugging tools
376
*/
377
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
378
bool fixupForClassUnload = (_extensions->classLoaderManager->reclaimableMemory() > _extensions->deadClassLoaderCacheSize);
379
#else /* defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING) */
380
bool fixupForClassUnload = false;
381
#endif /* defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING) */
382
if (J9VM_DEBUG_ATTRIBUTE_ALLOW_USER_HEAP_WALK == (((J9JavaVM *)env->getLanguageVM())->requiredDebugAttributes & J9VM_DEBUG_ATTRIBUTE_ALLOW_USER_HEAP_WALK)
383
|| fixupForClassUnload) {
384
_realtimeGC->_fixHeapForWalk = true;
385
}
386
}
387
388
void
389
MM_MetronomeDelegate::doAuxiliaryGCWork(MM_EnvironmentBase *env)
390
{
391
#if defined(J9VM_GC_FINALIZATION)
392
if(isFinalizationRequired()) {
393
omrthread_monitor_enter(_javaVM->finalizeMainMonitor);
394
_javaVM->finalizeMainFlags |= J9_FINALIZE_FLAGS_MAIN_WAKE_UP;
395
omrthread_monitor_notify_all(_javaVM->finalizeMainMonitor);
396
omrthread_monitor_exit(_javaVM->finalizeMainMonitor);
397
}
398
#endif /* J9VM_GC_FINALIZATION */
399
}
400
401
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
402
void
403
MM_MetronomeDelegate::processDyingClasses(MM_EnvironmentRealtime *env, UDATA* classUnloadCountResult, UDATA* anonymousClassUnloadCountResult, UDATA* classLoaderUnloadCountResult, J9ClassLoader** classLoaderUnloadListResult)
404
{
405
J9ClassLoader *classLoader = NULL;
406
J9VMThread *vmThread = (J9VMThread *)env->getLanguageVMThread();
407
UDATA classUnloadCount = 0;
408
UDATA anonymousClassUnloadCount = 0;
409
UDATA classLoaderUnloadCount = 0;
410
J9ClassLoader *unloadLink = NULL;
411
J9Class *classUnloadList = NULL;
412
J9Class *anonymousClassUnloadList = NULL;
413
414
/*
415
* Verify that boolean array class has been marked. Assertion is done to ensure correctness
416
* of an optimization in ClassIteratorClassSlots that only checks booleanArrayClass Interfaces
417
* since all array claseses share the same ITable.
418
*/
419
Assert_MM_true(_markingScheme->isMarked(_javaVM->booleanArrayClass->classObject));
420
421
/*
422
* Walk anonymous classes and set unmarked as dying
423
*
424
* Do this walk before classloaders to be unloaded walk to create list of anonymous classes to be unloaded and use it
425
* as sublist to continue to build general list of classes to be unloaded
426
*
427
* Anonymous classes suppose to be allocated one per segment
428
* This is not relevant here however becomes important at segment removal time
429
*/
430
anonymousClassUnloadList = addDyingClassesToList(env, _javaVM->anonClassLoader, false, anonymousClassUnloadList, &anonymousClassUnloadCount);
431
432
/* class unload list includes anonymous class unload list */
433
classUnloadList = anonymousClassUnloadList;
434
classUnloadCount += anonymousClassUnloadCount;
435
436
GC_ClassLoaderLinkedListIterator classLoaderIterator(env, _extensions->classLoaderManager);
437
while(NULL != (classLoader = (J9ClassLoader *)classLoaderIterator.nextSlot())) {
438
if (0 == (classLoader->gcFlags & J9_GC_CLASS_LOADER_DEAD)) {
439
Assert_MM_true(NULL == classLoader->unloadLink);
440
if(_markingScheme->isMarked(classLoader->classLoaderObject) ) {
441
classLoader->gcFlags &= ~J9_GC_CLASS_LOADER_SCANNED;
442
} else {
443
/* Anonymous classloader should not be unloaded */
444
Assert_MM_true(0 == (classLoader->flags & J9CLASSLOADER_ANON_CLASS_LOADER));
445
446
classLoaderUnloadCount += 1;
447
classLoader->gcFlags |= J9_GC_CLASS_LOADER_DEAD;
448
449
/* add this loader to the linked list of loaders being unloaded in this cycle */
450
classLoader->unloadLink = unloadLink;
451
unloadLink = classLoader;
452
453
classUnloadList = addDyingClassesToList(env, classLoader, true, classUnloadList, &classUnloadCount);
454
}
455
}
456
yieldFromClassUnloading(env);
457
}
458
459
if (0 != classUnloadCount) {
460
/* Call classes unload hook */
461
TRIGGER_J9HOOK_VM_CLASSES_UNLOAD(_javaVM->hookInterface, vmThread, classUnloadCount, classUnloadList);
462
yieldFromClassUnloading(env);
463
}
464
465
if (0 != anonymousClassUnloadCount) {
466
/* Call anonymous classes unload hook */
467
TRIGGER_J9HOOK_VM_ANON_CLASSES_UNLOAD(_javaVM->hookInterface, vmThread, anonymousClassUnloadCount, anonymousClassUnloadList);
468
yieldFromClassUnloading(env);
469
}
470
471
if (0 != classLoaderUnloadCount) {
472
/* Call classloader unload hook */
473
TRIGGER_J9HOOK_VM_CLASS_LOADERS_UNLOAD(_javaVM->hookInterface, vmThread, unloadLink);
474
yieldFromClassUnloading(env);
475
}
476
477
/* Ensure that the VM has an accurate anonymous class count */
478
_javaVM->anonClassCount -= anonymousClassUnloadCount;
479
480
*classUnloadCountResult = classUnloadCount;
481
*anonymousClassUnloadCountResult = anonymousClassUnloadCount;
482
*classLoaderUnloadCountResult = classLoaderUnloadCount;
483
*classLoaderUnloadListResult = unloadLink;
484
}
485
486
J9Class *
487
MM_MetronomeDelegate::addDyingClassesToList(MM_EnvironmentRealtime *env, J9ClassLoader * classLoader, bool setAll, J9Class *classUnloadListStart, UDATA *classUnloadCountResult)
488
{
489
J9VMThread *vmThread = (J9VMThread *)env->getLanguageVMThread();
490
J9Class *classUnloadList = classUnloadListStart;
491
UDATA classUnloadCount = 0;
492
493
if (NULL != classLoader) {
494
GC_ClassLoaderSegmentIterator segmentIterator(classLoader, MEMORY_TYPE_RAM_CLASS);
495
J9MemorySegment *segment = NULL;
496
while(NULL != (segment = segmentIterator.nextSegment())) {
497
GC_ClassHeapIterator classHeapIterator(_javaVM, segment);
498
J9Class *clazz = NULL;
499
while(NULL != (clazz = classHeapIterator.nextClass())) {
500
501
J9CLASS_EXTENDED_FLAGS_CLEAR(clazz, J9ClassGCScanned);
502
503
J9Object *classObject = clazz->classObject;
504
if (setAll || !_markingScheme->isMarked(classObject)) {
505
506
/* with setAll all classes must be unmarked */
507
Assert_MM_true(!_markingScheme->isMarked(classObject));
508
509
classUnloadCount += 1;
510
511
/* Remove the class from the subclass traversal list */
512
_extensions->classLoaderManager->removeFromSubclassHierarchy(env, clazz);
513
/* Mark class as dying */
514
clazz->classDepthAndFlags |= J9AccClassDying;
515
516
/* Call class unload hook */
517
Trc_MM_cleanUpClassLoadersStart_triggerClassUnload(env->getLanguageVMThread(),clazz,
518
(UDATA) J9UTF8_LENGTH(J9ROMCLASS_CLASSNAME(clazz->romClass)),
519
J9UTF8_DATA(J9ROMCLASS_CLASSNAME(clazz->romClass)));
520
TRIGGER_J9HOOK_VM_CLASS_UNLOAD(_javaVM->hookInterface, vmThread, clazz);
521
522
/* add class to dying anonymous classes link list */
523
clazz->gcLink = classUnloadList;
524
classUnloadList = clazz;
525
}
526
}
527
}
528
}
529
530
*classUnloadCountResult += classUnloadCount;
531
return classUnloadList;
532
}
533
534
/**
535
* Free classloaders which are being unloaded during this GC cycle. Also remove all
536
* dead classes from the traversal list.
537
* @note the traversal code belongs in its own function or possibly processDyingClasses
538
* or possible processDyingClasses. It is currently here for historic reasons.
539
*
540
* @param deadClassLoaders Linked list of classloaders dying during this GC cycle
541
*/
542
void
543
MM_MetronomeDelegate::processUnlinkedClassLoaders(MM_EnvironmentBase *envModron, J9ClassLoader *deadClassLoaders)
544
{
545
MM_EnvironmentRealtime *env = MM_EnvironmentRealtime::getEnvironment(envModron);
546
J9ClassLoader *unloadLink = deadClassLoaders;
547
J9VMThread *vmThread = (J9VMThread *)env->getLanguageVMThread();
548
J9JavaVM *javaVM = (J9JavaVM *)env->getLanguageVM();
549
550
/* Remove dead classes from the traversal list (if necessary) */
551
J9Class *jlObject = J9VMJAVALANGOBJECT_OR_NULL(javaVM);
552
J9Class *previousClass = jlObject;
553
J9Class *nextClass = (NULL != jlObject) ? jlObject->subclassTraversalLink : jlObject;
554
while ((NULL != nextClass) && (jlObject != nextClass)) {
555
if (J9CLASS_FLAGS(nextClass) & J9AccClassDying) {
556
while ((NULL != nextClass->subclassTraversalLink) && (jlObject != nextClass) && (J9CLASS_FLAGS(nextClass) & 0x08000000)) {
557
nextClass = nextClass->subclassTraversalLink;
558
}
559
previousClass->subclassTraversalLink = nextClass;
560
}
561
previousClass = nextClass;
562
nextClass = nextClass->subclassTraversalLink;
563
/* TODO CRGTMP Do we need to yield here? Is yielding safe? */
564
}
565
566
/* Free memory for dead classloaders */
567
while (NULL != unloadLink) {
568
J9ClassLoader *nextUnloadLink = unloadLink->unloadLink;
569
_javaVM->internalVMFunctions->freeClassLoader(unloadLink, _javaVM, vmThread, 1);
570
unloadLink = nextUnloadLink;
571
yieldFromClassUnloading(env);
572
}
573
}
574
575
void
576
MM_MetronomeDelegate::updateClassUnloadStats(MM_EnvironmentBase *env, UDATA classUnloadCount, UDATA anonymousClassUnloadCount, UDATA classLoaderUnloadCount)
577
{
578
MM_ClassUnloadStats *classUnloadStats = &_extensions->globalGCStats.classUnloadStats;
579
580
/* TODO CRGTMP move global stats into super class implementation once it is created */
581
classUnloadStats->_classesUnloadedCount = classUnloadCount;
582
classUnloadStats->_anonymousClassesUnloadedCount = anonymousClassUnloadCount;
583
classUnloadStats->_classLoaderUnloadedCount = classLoaderUnloadCount;
584
585
/* Record increment stats */
586
_extensions->globalGCStats.metronomeStats.classesUnloadedCount = classUnloadCount;
587
_extensions->globalGCStats.metronomeStats.anonymousClassesUnloadedCount = anonymousClassUnloadCount;
588
_extensions->globalGCStats.metronomeStats.classLoaderUnloadedCount = classLoaderUnloadCount;
589
}
590
591
/**
592
* Unload classcloaders that are no longer referenced. If the classloader has shared
593
* libraries open place it on the finalize queue instead of freeing it.
594
*
595
*/
596
void
597
MM_MetronomeDelegate::unloadDeadClassLoaders(MM_EnvironmentBase *envModron)
598
{
599
MM_EnvironmentRealtime *env = MM_EnvironmentRealtime::getEnvironment(envModron);
600
J9ClassLoader *unloadLink = NULL;
601
UDATA classUnloadCount = 0;
602
UDATA anonymousClassUnloadCount = 0;
603
UDATA classLoaderUnloadCount = 0;
604
J9ClassLoader *classLoadersUnloadedList = NULL;
605
J9MemorySegment *reclaimedSegments = NULL;
606
607
/* set the vmState whilst we're unloading classes */
608
UDATA vmState = env->pushVMstate(OMRVMSTATE_GC_CLEANING_METADATA);
609
610
lockClassUnloadMonitor(env);
611
612
processDyingClasses(env, &classUnloadCount, &anonymousClassUnloadCount, &classLoaderUnloadCount, &classLoadersUnloadedList);
613
614
/* cleanup segments in anonymous classloader */
615
_extensions->classLoaderManager->cleanUpSegmentsInAnonymousClassLoader(env, &reclaimedSegments);
616
617
/* enqueue all the segments we just salvaged from the dead class loaders for delayed free (this work was historically attributed in the unload end operation so it goes after the timer start) */
618
_extensions->classLoaderManager->enqueueUndeadClassSegments(reclaimedSegments);
619
620
yieldFromClassUnloading(env);
621
622
GC_FinalizableClassLoaderBuffer buffer(_extensions);
623
624
while (NULL != classLoadersUnloadedList) {
625
/* fetch the next loader immediately, since we will re-use the unloadLink in this loop */
626
J9ClassLoader* classLoader = classLoadersUnloadedList;
627
classLoadersUnloadedList = classLoader->unloadLink;
628
629
Assert_MM_true(0 == (classLoader->gcFlags & J9_GC_CLASS_LOADER_SCANNED));
630
Assert_MM_true(J9_GC_CLASS_LOADER_DEAD == (classLoader->gcFlags & J9_GC_CLASS_LOADER_DEAD));
631
Assert_MM_true(0 == (classLoader->gcFlags & (J9_GC_CLASS_LOADER_UNLOADING | J9_GC_CLASS_LOADER_ENQ_UNLOAD)));
632
633
/* Class loader died this collection, so do cleanup work */
634
reclaimedSegments = NULL;
635
636
/* Perform classLoader-specific clean up work, including freeing the classLoader's class hash table and
637
* class path entries.
638
*/
639
_javaVM->internalVMFunctions->cleanUpClassLoader((J9VMThread *)env->getLanguageVMThread(), classLoader);
640
/* free any ROM classes now and enqueue any RAM classes */
641
_extensions->classLoaderManager->cleanUpSegmentsAlongClassLoaderLink(_javaVM, classLoader->classSegments, &reclaimedSegments);
642
/* we are taking responsibility for cleaning these here so free them */
643
classLoader->classSegments = NULL;
644
/* enqueue all the segments we just salvaged from the dead class loaders for delayed free (this work was historically attributed in the unload end operation so it goes after the timer start) */
645
_extensions->classLoaderManager->enqueueUndeadClassSegments(reclaimedSegments);
646
647
/* Remove this classloader slot */
648
_extensions->classLoaderManager->unlinkClassLoader(classLoader);
649
650
#if defined(J9VM_GC_FINALIZATION)
651
/* Determine if the classLoader needs to be enqueued for finalization (for shared library unloading),
652
* otherwise add it to the list of classLoaders to be unloaded by cleanUpClassLoadersEnd.
653
*/
654
if(((NULL != classLoader->sharedLibraries)
655
&& (0 != pool_numElements(classLoader->sharedLibraries)))
656
|| (_extensions->fvtest_forceFinalizeClassLoaders)) {
657
/* Attempt to enqueue the class loader for the finalizer */
658
buffer.add(env, classLoader);
659
classLoader->gcFlags |= J9_GC_CLASS_LOADER_ENQ_UNLOAD;
660
_finalizationRequired = true;
661
} else
662
#endif /* J9VM_GC_FINALIZATION */
663
{
664
/* Add the classLoader to the list of classLoaders to unloaded by cleanUpClassLoadersEnd */
665
classLoader->unloadLink = unloadLink;
666
unloadLink = classLoader;
667
}
668
yieldFromClassUnloading(env);
669
}
670
671
buffer.flush(env);
672
673
updateClassUnloadStats(env, classUnloadCount, anonymousClassUnloadCount, classLoaderUnloadCount);
674
675
processUnlinkedClassLoaders(env, unloadLink);
676
677
unlockClassUnloadMonitor(env);
678
679
env->popVMstate(vmState);
680
}
681
682
/**
683
* Check to see if it is time to yield. If it is time to yield the GC
684
* must release the classUnloadMonitor before yielding. Once the GC
685
* comes back from the yield it is required to acquire the classUnloadMonitor
686
* again.
687
*/
688
void
689
MM_MetronomeDelegate::yieldFromClassUnloading(MM_EnvironmentRealtime *env)
690
{
691
if (_realtimeGC->shouldYield(env)) {
692
unlockClassUnloadMonitor(env);
693
_realtimeGC->yield(env);
694
lockClassUnloadMonitor(env);
695
}
696
}
697
698
/**
699
* The GC is required to hold the classUnloadMonitor while it is unloading classes.
700
* This will ensure that the JIT will abort and ongoing compilations
701
*/
702
void
703
MM_MetronomeDelegate::lockClassUnloadMonitor(MM_EnvironmentRealtime *env)
704
{
705
/* Grab the classUnloadMonitor so that the JIT and the GC will not interfere with each other */
706
#if defined(J9VM_JIT_CLASS_UNLOAD_RWMONITOR)
707
if (0 != omrthread_rwmutex_try_enter_write(_javaVM->classUnloadMutex)) {
708
#else
709
if (0 != omrthread_monitor_try_enter(_javaVM->classUnloadMutex)) {
710
#endif /* J9VM_JIT_CLASS_UNLOAD_RWMONITOR */
711
/* Failed acquire the monitor so interrupt the JIT. This will allow the GC
712
* to continue unloading classes.
713
*/
714
TRIGGER_J9HOOK_MM_INTERRUPT_COMPILATION(_extensions->hookInterface, (J9VMThread *)env->getLanguageVMThread());
715
#if defined(J9VM_JIT_CLASS_UNLOAD_RWMONITOR)
716
omrthread_rwmutex_enter_write(_javaVM->classUnloadMutex);
717
#else
718
omrthread_monitor_enter(_javaVM->classUnloadMutex);
719
#endif /* J9VM_JIT_CLASS_UNLOAD_RWMONITOR */
720
}
721
}
722
723
/**
724
* Release the classUnloadMonitor. This will allow the JIT to compile new methods.
725
*/
726
void
727
MM_MetronomeDelegate::unlockClassUnloadMonitor(MM_EnvironmentRealtime *env)
728
{
729
#if defined(J9VM_JIT_CLASS_UNLOAD_RWMONITOR)
730
omrthread_rwmutex_exit_write(_javaVM->classUnloadMutex);
731
#else
732
omrthread_monitor_exit(_javaVM->classUnloadMutex);
733
#endif /* J9VM_JIT_CLASS_UNLOAD_RWMONITOR */
734
}
735
736
void
737
MM_MetronomeDelegate::reportClassUnloadingStart(MM_EnvironmentBase *env)
738
{
739
PORT_ACCESS_FROM_ENVIRONMENT(env);
740
Trc_MM_ClassUnloadingStart(env->getLanguageVMThread());
741
742
TRIGGER_J9HOOK_MM_PRIVATE_CLASS_UNLOADING_START(
743
_extensions->privateHookInterface,
744
env->getOmrVMThread(),
745
j9time_hires_clock(),
746
J9HOOK_MM_PRIVATE_CLASS_UNLOADING_START);
747
}
748
749
void
750
MM_MetronomeDelegate::reportClassUnloadingEnd(MM_EnvironmentBase *env)
751
{
752
PORT_ACCESS_FROM_ENVIRONMENT(env);
753
MM_ClassUnloadStats *classUnloadStats = &_extensions->globalGCStats.classUnloadStats;
754
755
Trc_MM_ClassUnloadingEnd(env->getLanguageVMThread(),
756
classUnloadStats->_classLoaderUnloadedCount,
757
classUnloadStats->_classesUnloadedCount);
758
759
TRIGGER_J9HOOK_MM_CLASS_UNLOADING_END(
760
_extensions->hookInterface,
761
(J9VMThread *)env->getLanguageVMThread(),
762
j9time_hires_clock(),
763
J9HOOK_MM_CLASS_UNLOADING_END,
764
classUnloadStats->_endTime - classUnloadStats->_startTime,
765
classUnloadStats->_classLoaderUnloadedCount,
766
classUnloadStats->_classesUnloadedCount,
767
classUnloadStats->_classUnloadMutexQuiesceTime,
768
classUnloadStats->_endSetupTime - classUnloadStats->_startSetupTime,
769
classUnloadStats->_endScanTime - classUnloadStats->_startScanTime,
770
classUnloadStats->_endPostTime - classUnloadStats->_startPostTime);
771
}
772
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
773
774
void
775
MM_MetronomeDelegate::reportSyncGCEnd(MM_EnvironmentBase *env)
776
{
777
OMRPORT_ACCESS_FROM_ENVIRONMENT(env);
778
UDATA approximateFreeMemorySize = _extensions->heap->getApproximateActiveFreeMemorySize();
779
#if defined(OMR_GC_DYNAMIC_CLASS_UNLOADING)
780
MM_ClassUnloadStats *classUnloadStats = &_extensions->globalGCStats.classUnloadStats;
781
UDATA classLoaderUnloadCount = classUnloadStats->_classLoaderUnloadedCount;
782
UDATA classUnloadCount = classUnloadStats->_classesUnloadedCount;
783
UDATA anonymousClassUnloadCount = classUnloadStats->_anonymousClassesUnloadedCount;
784
#else /* defined(OMR_GC_DYNAMIC_CLASS_UNLOADING) */
785
UDATA classLoaderUnloadCount = 0;
786
UDATA classUnloadCount = 0;
787
UDATA anonymousClassUnloadCount = 0;
788
#endif /* defined(OMR_GC_DYNAMIC_CLASS_UNLOADING) */
789
UDATA weakReferenceCount = _extensions->markJavaStats._weakReferenceStats._cleared;
790
UDATA softReferenceCount = _extensions->markJavaStats._softReferenceStats._cleared;
791
UDATA maxSoftReferenceAge = _extensions->getMaxSoftReferenceAge();
792
UDATA softReferenceAge = _extensions->getDynamicMaxSoftReferenceAge();
793
UDATA phantomReferenceCount = _extensions->markJavaStats._phantomReferenceStats._cleared;
794
UDATA finalizerCount = _extensions->globalGCStats.metronomeStats.getWorkPacketOverflowCount();
795
UDATA packetOverflowCount = _extensions->globalGCStats.metronomeStats.getWorkPacketOverflowCount();
796
UDATA objectOverflowCount = _extensions->globalGCStats.metronomeStats.getObjectOverflowCount();
797
798
Trc_MM_SynchGCEnd(env->getLanguageVMThread(),
799
approximateFreeMemorySize,
800
0,
801
classLoaderUnloadCount,
802
classUnloadCount,
803
weakReferenceCount,
804
softReferenceCount,
805
maxSoftReferenceAge,
806
softReferenceAge,
807
phantomReferenceCount,
808
finalizerCount,
809
packetOverflowCount,
810
objectOverflowCount
811
);
812
813
TRIGGER_J9HOOK_MM_PRIVATE_METRONOME_SYNCHRONOUS_GC_END(_extensions->privateHookInterface,
814
env->getOmrVMThread(), omrtime_hires_clock(),
815
J9HOOK_MM_PRIVATE_METRONOME_SYNCHRONOUS_GC_END,
816
approximateFreeMemorySize,
817
0,
818
classLoaderUnloadCount,
819
classUnloadCount,
820
anonymousClassUnloadCount,
821
weakReferenceCount,
822
softReferenceCount,
823
maxSoftReferenceAge,
824
softReferenceAge,
825
phantomReferenceCount,
826
finalizerCount,
827
packetOverflowCount,
828
objectOverflowCount
829
);
830
}
831
/**
832
* Factory method for creating the access barrier. Note that the default realtime access barrier
833
* doesn't handle the RTSJ checks.
834
*/
835
MM_RealtimeAccessBarrier*
836
MM_MetronomeDelegate::allocateAccessBarrier(MM_EnvironmentBase *env)
837
{
838
return MM_RealtimeAccessBarrier::newInstance(env);
839
}
840
841
/**
842
* Iterates over all threads and enables the double barrier for each thread by setting the
843
* remembered set fragment index to the reserved index.
844
*/
845
void
846
MM_MetronomeDelegate::enableDoubleBarrier(MM_EnvironmentBase *env)
847
{
848
MM_GCExtensions* extensions = MM_GCExtensions::getExtensions(env);
849
MM_RealtimeAccessBarrier* realtimeAccessBarrier = (MM_RealtimeAccessBarrier*)extensions->accessBarrier;
850
GC_VMThreadListIterator vmThreadListIterator(_javaVM);
851
852
/* First, enable the global double barrier flag so new threads will have the double barrier enabled. */
853
realtimeAccessBarrier->setDoubleBarrierActive();
854
while(J9VMThread* thread = vmThreadListIterator.nextVMThread()) {
855
/* Second, enable the double barrier on all threads individually. */
856
realtimeAccessBarrier->setDoubleBarrierActiveOnThread(MM_EnvironmentBase::getEnvironment(thread->omrVMThread));
857
}
858
}
859
860
/**
861
* Disables the double barrier for the specified thread.
862
*/
863
void
864
MM_MetronomeDelegate::disableDoubleBarrierOnThread(MM_EnvironmentBase* env, OMR_VMThread* vmThread)
865
{
866
/* This gets called on a per thread basis as threads get scanned. */
867
MM_GCExtensions* extensions = MM_GCExtensions::getExtensions(env);
868
MM_RealtimeAccessBarrier* realtimeAccessBarrier = (MM_RealtimeAccessBarrier*)extensions->accessBarrier;
869
realtimeAccessBarrier->setDoubleBarrierInactiveOnThread(MM_EnvironmentBase::getEnvironment(vmThread));
870
}
871
872
/**
873
* Disables the global double barrier flag. This should be called after all threads have been scanned
874
* and disableDoubleBarrierOnThread has been called on each of them.
875
*/
876
void
877
MM_MetronomeDelegate::disableDoubleBarrier(MM_EnvironmentBase* env)
878
{
879
/* The enabling of the double barrier must traverse all threads, but the double barrier gets disabled
880
* on a per thread basis as threads get scanned, so no need to traverse all threads in this method.
881
*/
882
MM_GCExtensions* extensions = MM_GCExtensions::getExtensions(env);
883
MM_RealtimeAccessBarrier* realtimeAccessBarrier = (MM_RealtimeAccessBarrier*)extensions->accessBarrier;
884
realtimeAccessBarrier->setDoubleBarrierInactive();
885
}
886
887
888
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
889
/**
890
* Walk all class loaders marking their classes if the classLoader object has been
891
* marked.
892
*
893
* @return true if any classloaders/classes are marked, false otherwise
894
*/
895
bool
896
MM_MetronomeDelegate::doClassTracing(MM_EnvironmentRealtime *env)
897
{
898
J9ClassLoader *classLoader;
899
bool didWork = false;
900
901
MM_GCExtensions* extensions = MM_GCExtensions::getExtensions(env);
902
GC_ClassLoaderLinkedListIterator classLoaderIterator(env, extensions->classLoaderManager);
903
904
while((classLoader = (J9ClassLoader *)classLoaderIterator.nextSlot()) != NULL) {
905
if (0 == (classLoader->gcFlags & J9_GC_CLASS_LOADER_DEAD)) {
906
if(J9CLASSLOADER_ANON_CLASS_LOADER == (classLoader->flags & J9CLASSLOADER_ANON_CLASS_LOADER)) {
907
/* Anonymous classloader should be scanned on level of classes every time */
908
GC_ClassLoaderSegmentIterator segmentIterator(classLoader, MEMORY_TYPE_RAM_CLASS);
909
J9MemorySegment *segment = NULL;
910
while(NULL != (segment = segmentIterator.nextSegment())) {
911
GC_ClassHeapIterator classHeapIterator(_javaVM, segment);
912
J9Class *clazz = NULL;
913
while(NULL != (clazz = classHeapIterator.nextClass())) {
914
if((0 == (J9CLASS_EXTENDED_FLAGS(clazz) & J9ClassGCScanned)) && _markingScheme->isMarked(clazz->classObject)) {
915
J9CLASS_EXTENDED_FLAGS_SET(clazz, J9ClassGCScanned);
916
917
/* Scan class */
918
GC_ClassIterator objectSlotIterator(env, clazz);
919
volatile j9object_t *objectSlotPtr = NULL;
920
while((objectSlotPtr = objectSlotIterator.nextSlot()) != NULL) {
921
didWork |= _markingScheme->markObject(env, *objectSlotPtr);
922
}
923
924
GC_ClassIteratorClassSlots classSlotIterator(_javaVM, clazz);
925
J9Class *classPtr;
926
while (NULL != (classPtr = classSlotIterator.nextSlot())) {
927
didWork |= markClass(env, classPtr);
928
}
929
}
930
}
931
_realtimeGC->condYield(env, 0);
932
}
933
} else {
934
/* Check if the class loader has not been scanned but the class loader is live */
935
if( !(classLoader->gcFlags & J9_GC_CLASS_LOADER_SCANNED) && _markingScheme->isMarked((J9Object *)classLoader->classLoaderObject)) {
936
/* Flag the class loader as being scanned */
937
classLoader->gcFlags |= J9_GC_CLASS_LOADER_SCANNED;
938
939
GC_ClassLoaderSegmentIterator segmentIterator(classLoader, MEMORY_TYPE_RAM_CLASS);
940
J9MemorySegment *segment = NULL;
941
J9Class *clazz;
942
943
while(NULL != (segment = segmentIterator.nextSegment())) {
944
GC_ClassHeapIterator classHeapIterator(_javaVM, segment);
945
while(NULL != (clazz = classHeapIterator.nextClass())) {
946
/* Scan class */
947
GC_ClassIterator objectSlotIterator(env, clazz);
948
volatile j9object_t *objectSlotPtr = NULL;
949
while((objectSlotPtr = objectSlotIterator.nextSlot()) != NULL) {
950
didWork |= _markingScheme->markObject(env, *objectSlotPtr);
951
}
952
953
GC_ClassIteratorClassSlots classSlotIterator(_javaVM, clazz);
954
J9Class *classPtr;
955
while (NULL != (classPtr = classSlotIterator.nextSlot())) {
956
didWork |= markClass(env, classPtr);
957
}
958
}
959
_realtimeGC->condYield(env, 0);
960
}
961
962
/* CMVC 131487 */
963
J9HashTableState walkState;
964
/*
965
* We believe that (NULL == classLoader->classHashTable) is set ONLY for DEAD class loader
966
* so, if this pointer happened to be NULL at this point let it crash here
967
*/
968
Assert_MM_true(NULL != classLoader->classHashTable);
969
/*
970
* CMVC 178060 : disable hash table growth to prevent hash table entries from being rehashed during GC yield
971
* while GC was in the middle of iterating the hash table.
972
*/
973
hashTableSetFlag(classLoader->classHashTable, J9HASH_TABLE_DO_NOT_REHASH);
974
clazz = _javaVM->internalVMFunctions->hashClassTableStartDo(classLoader, &walkState, 0);
975
while (NULL != clazz) {
976
didWork |= markClass(env, clazz);
977
clazz = _javaVM->internalVMFunctions->hashClassTableNextDo(&walkState);
978
979
/**
980
* Jazz103 55784: We cannot rehash the table in the middle of iteration and the Space-opt hashtable cannot grow if
981
* J9HASH_TABLE_DO_NOT_REHASH is enabled. Don't yield if the hashtable is space-optimized because we run the
982
* risk of the mutator not being able to grow to accommodate new elements.
983
*/
984
if (!hashTableIsSpaceOptimized(classLoader->classHashTable)) {
985
_realtimeGC->condYield(env, 0);
986
}
987
}
988
/*
989
* CMVC 178060 : re-enable hash table growth. disable hash table growth to prevent hash table entries from being rehashed during GC yield
990
* while GC was in the middle of iterating the hash table.
991
*/
992
hashTableResetFlag(classLoader->classHashTable, J9HASH_TABLE_DO_NOT_REHASH);
993
994
if (NULL != classLoader->moduleHashTable) {
995
J9Module **modulePtr = (J9Module **)hashTableStartDo(classLoader->moduleHashTable, &walkState);
996
while (NULL != modulePtr) {
997
J9Module * const module = *modulePtr;
998
999
didWork |= _markingScheme->markObject(env, module->moduleObject);
1000
didWork |= _markingScheme->markObject(env, module->moduleName);
1001
didWork |= _markingScheme->markObject(env, module->version);
1002
modulePtr = (J9Module**)hashTableNextDo(&walkState);
1003
}
1004
1005
if (classLoader == _javaVM->systemClassLoader) {
1006
didWork |= _markingScheme->markObject(env, _javaVM->unamedModuleForSystemLoader->moduleObject);
1007
}
1008
}
1009
}
1010
}
1011
}
1012
/* This yield point is for the case when there are lots of classloaders that will be unloaded */
1013
_realtimeGC->condYield(env, 0);
1014
}
1015
return didWork;
1016
}
1017
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
1018
1019
bool
1020
MM_MetronomeDelegate::doTracing(MM_EnvironmentRealtime* env)
1021
{
1022
/* TODO CRGTMP make class tracing concurrent */
1023
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
1024
if(isDynamicClassUnloadingEnabled()) {
1025
return doClassTracing(env);
1026
}
1027
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
1028
return false;
1029
}
1030
1031
void
1032
MM_MetronomeDelegate::defaultMemorySpaceAllocated(MM_GCExtensionsBase *extensions, void* defaultMemorySpace)
1033
{
1034
J9JavaVM* vm = (J9JavaVM *)extensions->getOmrVM()->_language_vm;
1035
1036
vm->heapBase = extensions->heap->getHeapBase();
1037
vm->heapTop = extensions->heap->getHeapTop();
1038
}
1039
1040
/**
1041
* This function has to be called at the beginning of continueGC because requestExclusiveVMAccess
1042
* assumes the current J9VMThread does not have VM Access. All java threads that cause a GC (either
1043
* System.gc or allocation failure) will have VM access when entering the GC so we have to give it up.
1044
*
1045
* @param threadRequestingExclusive the J9VMThread for the MetronomeGCThread that will
1046
* be requesting exclusive vm access.
1047
*/
1048
void
1049
MM_MetronomeDelegate::preRequestExclusiveVMAccess(OMR_VMThread *threadRequestingExclusive)
1050
{
1051
if (threadRequestingExclusive == NULL) {
1052
return;
1053
}
1054
J9VMThread *vmThread = (J9VMThread *)threadRequestingExclusive->_language_vmthread;
1055
vmThread->javaVM->internalVMFunctions->internalReleaseVMAccess(vmThread);
1056
}
1057
1058
/**
1059
* This function is called when leaving continueGC so the J9VMThread associated with current
1060
* MetronomeGCThread will get its VM Access back before returning to run Java code.
1061
*
1062
* @param threadRequestingExclusive the J9VMThread for the MetronomeGCThread that requested
1063
* exclusive vm access.
1064
*/
1065
void
1066
MM_MetronomeDelegate::postRequestExclusiveVMAccess(OMR_VMThread *threadRequestingExclusive)
1067
{
1068
if (NULL == threadRequestingExclusive) {
1069
return;
1070
}
1071
J9VMThread *vmThread = (J9VMThread *)threadRequestingExclusive->_language_vmthread;
1072
vmThread->javaVM->internalVMFunctions->internalAcquireVMAccess(vmThread);
1073
}
1074
1075
1076
1077
/**
1078
* A call to requestExclusiveVMAccess must be followed by a call to waitForExclusiveVMAccess,
1079
* but not necessarily by the same thread.
1080
*
1081
* @param env the requesting thread.
1082
* @param block boolean input parameter specifying whether we should block and wait, if another party is requesting at the same time, or we return
1083
* @return boolean returning whether request was successful or not (make sense only if block is set to FALSE)
1084
*/
1085
uintptr_t
1086
MM_MetronomeDelegate::requestExclusiveVMAccess(MM_EnvironmentBase *env, uintptr_t block, uintptr_t *gcPriority)
1087
{
1088
return _javaVM->internalVMFunctions->requestExclusiveVMAccessMetronomeTemp(_javaVM, block, &_vmResponsesRequiredForExclusiveVMAccess, &_jniResponsesRequiredForExclusiveVMAccess, gcPriority);
1089
}
1090
1091
/**
1092
* Block until the earlier request for exclusive VM access completes.
1093
* @note This can only be called by the MainGC thread.
1094
* @param The requesting thread.
1095
*/
1096
void
1097
MM_MetronomeDelegate::waitForExclusiveVMAccess(MM_EnvironmentBase *env, bool waitRequired)
1098
{
1099
J9VMThread *mainGCThread = (J9VMThread *)env->getLanguageVMThread();
1100
1101
if (waitRequired) {
1102
_javaVM->internalVMFunctions->waitForExclusiveVMAccessMetronomeTemp((J9VMThread *)env->getLanguageVMThread(), _vmResponsesRequiredForExclusiveVMAccess, _jniResponsesRequiredForExclusiveVMAccess);
1103
}
1104
++(mainGCThread->omrVMThread->exclusiveCount);
1105
}
1106
1107
/**
1108
* Acquire (request and block until success) exclusive VM access.
1109
* @note This can only be called by the MainGC thread.
1110
* @param The requesting thread.
1111
*/
1112
void
1113
MM_MetronomeDelegate::acquireExclusiveVMAccess(MM_EnvironmentBase *env, bool waitRequired)
1114
{
1115
J9VMThread *mainGCThread = (J9VMThread *)env->getLanguageVMThread();
1116
1117
if (waitRequired) {
1118
_javaVM->internalVMFunctions->acquireExclusiveVMAccessFromExternalThread(_javaVM);
1119
}
1120
++(mainGCThread->omrVMThread->exclusiveCount);
1121
1122
}
1123
1124
/**
1125
* Release the held exclusive VM access.
1126
* @note This can only be called by the MainGC thread.
1127
* @param The requesting thread.
1128
*/
1129
void
1130
MM_MetronomeDelegate::releaseExclusiveVMAccess(MM_EnvironmentBase *env, bool releaseRequired)
1131
{
1132
J9VMThread *mainGCThread = (J9VMThread *)env->getLanguageVMThread();
1133
1134
--(mainGCThread->omrVMThread->exclusiveCount);
1135
if (releaseRequired) {
1136
_javaVM->internalVMFunctions->releaseExclusiveVMAccessMetronome(mainGCThread);
1137
/* Set the exclusive access response counts to an unusual value,
1138
* just for debug purposes, so we can detect scenarios, when main
1139
* thread is waiting for Xaccess with noone requesting it before.
1140
*/
1141
_vmResponsesRequiredForExclusiveVMAccess = 0x7fffffff;
1142
_jniResponsesRequiredForExclusiveVMAccess = 0x7fffffff;
1143
}
1144
}
1145
1146
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
1147
/**
1148
* Mark this class
1149
*/
1150
bool
1151
MM_MetronomeDelegate::markClass(MM_EnvironmentRealtime *env, J9Class *clazz)
1152
{
1153
bool result = false;
1154
if (clazz != NULL) {
1155
result = markClassNoCheck(env, clazz);
1156
}
1157
return result;
1158
}
1159
#endif /* defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING) */
1160
1161
#if defined(J9VM_GC_FINALIZATION)
1162
void
1163
MM_MetronomeDelegate::scanUnfinalizedObjects(MM_EnvironmentRealtime *env)
1164
{
1165
const UDATA maxIndex = getUnfinalizedObjectListCount(env);
1166
/* first we need to move the current list to the prior list and process the prior list,
1167
* because if object has not yet become finalizable, we have to re-insert it back to the current list.
1168
*/
1169
if (env->_currentTask->synchronizeGCThreadsAndReleaseMain(env, UNIQUE_ID)) {
1170
GC_OMRVMInterface::flushNonAllocationCaches(env);
1171
UDATA listIndex;
1172
for (listIndex = 0; listIndex < maxIndex; ++listIndex) {
1173
MM_UnfinalizedObjectList *unfinalizedObjectList = &_extensions->unfinalizedObjectLists[listIndex];
1174
unfinalizedObjectList->startUnfinalizedProcessing();
1175
}
1176
env->_currentTask->releaseSynchronizedGCThreads(env);
1177
}
1178
1179
GC_Environment *gcEnv = env->getGCEnvironment();
1180
GC_FinalizableObjectBuffer buffer(_extensions);
1181
UDATA listIndex;
1182
for (listIndex = 0; listIndex < maxIndex; ++listIndex) {
1183
if(J9MODRON_HANDLE_NEXT_WORK_UNIT(env)) {
1184
MM_UnfinalizedObjectList *unfinalizedObjectList = &_extensions->unfinalizedObjectLists[listIndex];
1185
J9Object *object = unfinalizedObjectList->getPriorList();
1186
UDATA objectsVisited = 0;
1187
1188
while (NULL != object) {
1189
objectsVisited += 1;
1190
gcEnv->_markJavaStats._unfinalizedCandidates += 1;
1191
J9Object* next = _extensions->accessBarrier->getFinalizeLink(object);
1192
if (_markingScheme->markObject(env, object)) {
1193
/* object was not previously marked -- it is now finalizable so push it to the local buffer */
1194
buffer.add(env, object);
1195
gcEnv->_markJavaStats._unfinalizedEnqueued += 1;
1196
_finalizationRequired = true;
1197
} else {
1198
/* object was already marked. It is still unfinalized */
1199
gcEnv->_unfinalizedObjectBuffer->add(env, object);
1200
}
1201
object = next;
1202
if (UNFINALIZED_OBJECT_YIELD_CHECK_INTERVAL == objectsVisited ) {
1203
_scheduler->condYieldFromGC(env);
1204
objectsVisited = 0;
1205
}
1206
}
1207
_scheduler->condYieldFromGC(env);
1208
}
1209
}
1210
1211
/* Flush the local buffer of finalizable objects to the global list */
1212
buffer.flush(env);
1213
1214
/* restore everything to a flushed state before exiting */
1215
gcEnv->_unfinalizedObjectBuffer->flush(env);
1216
}
1217
#endif /* J9VM_GC_FINALIZATION */
1218
1219
void
1220
MM_MetronomeDelegate::scanOwnableSynchronizerObjects(MM_EnvironmentRealtime *env)
1221
{
1222
const UDATA maxIndex = getOwnableSynchronizerObjectListCount(env);
1223
1224
/* first we need to move the current list to the prior list and process the prior list,
1225
* because if object has been marked, we have to re-insert it back to the current list.
1226
*/
1227
if (env->_currentTask->synchronizeGCThreadsAndReleaseMain(env, UNIQUE_ID)) {
1228
GC_OMRVMInterface::flushNonAllocationCaches(env);
1229
UDATA listIndex;
1230
for (listIndex = 0; listIndex < maxIndex; ++listIndex) {
1231
MM_OwnableSynchronizerObjectList *ownableSynchronizerObjectList = &_extensions->getOwnableSynchronizerObjectLists()[listIndex];
1232
ownableSynchronizerObjectList->startOwnableSynchronizerProcessing();
1233
}
1234
env->_currentTask->releaseSynchronizedGCThreads(env);
1235
}
1236
1237
GC_Environment *gcEnv = env->getGCEnvironment();
1238
MM_OwnableSynchronizerObjectBuffer *buffer = gcEnv->_ownableSynchronizerObjectBuffer;
1239
UDATA listIndex;
1240
for (listIndex = 0; listIndex < maxIndex; ++listIndex) {
1241
MM_OwnableSynchronizerObjectList *list = &_extensions->getOwnableSynchronizerObjectLists()[listIndex];
1242
if (!list->wasEmpty()) {
1243
if (J9MODRON_HANDLE_NEXT_WORK_UNIT(env)) {
1244
J9Object *object = list->getPriorList();
1245
UDATA objectsVisited = 0;
1246
while (NULL != object) {
1247
objectsVisited += 1;
1248
gcEnv->_markJavaStats._ownableSynchronizerCandidates += 1;
1249
1250
/* Get next before adding it to the buffer, as buffer modifies OwnableSynchronizerLink */
1251
J9Object* next = _extensions->accessBarrier->getOwnableSynchronizerLink(object);
1252
if (_markingScheme->isMarked(object)) {
1253
buffer->add(env, object);
1254
} else {
1255
gcEnv->_markJavaStats._ownableSynchronizerCleared += 1;
1256
}
1257
object = next;
1258
1259
if (OWNABLE_SYNCHRONIZER_OBJECT_YIELD_CHECK_INTERVAL == objectsVisited ) {
1260
_scheduler->condYieldFromGC(env);
1261
objectsVisited = 0;
1262
}
1263
}
1264
_scheduler->condYieldFromGC(env);
1265
}
1266
}
1267
}
1268
/* restore everything to a flushed state before exiting */
1269
buffer->flush(env);
1270
}
1271
1272
void
1273
MM_MetronomeDelegate::scanWeakReferenceObjects(MM_EnvironmentRealtime *env)
1274
{
1275
GC_Environment *gcEnv = env->getGCEnvironment();
1276
Assert_MM_true(gcEnv->_referenceObjectBuffer->isEmpty());
1277
const UDATA maxIndex = getReferenceObjectListCount(env);
1278
UDATA listIndex;
1279
for (listIndex = 0; listIndex < maxIndex; ++listIndex) {
1280
if(J9MODRON_HANDLE_NEXT_WORK_UNIT(env)) {
1281
MM_ReferenceObjectList *referenceObjectList = &_extensions->referenceObjectLists[listIndex];
1282
referenceObjectList->startWeakReferenceProcessing();
1283
processReferenceList(env, NULL, referenceObjectList->getPriorWeakList(), &gcEnv->_markJavaStats._weakReferenceStats);
1284
_scheduler->condYieldFromGC(env);
1285
}
1286
}
1287
Assert_MM_true(gcEnv->_referenceObjectBuffer->isEmpty());
1288
}
1289
1290
void
1291
MM_MetronomeDelegate::scanSoftReferenceObjects(MM_EnvironmentRealtime *env)
1292
{
1293
GC_Environment *gcEnv = env->getGCEnvironment();
1294
Assert_MM_true(gcEnv->_referenceObjectBuffer->isEmpty());
1295
const UDATA maxIndex = getReferenceObjectListCount(env);
1296
UDATA listIndex;
1297
for (listIndex = 0; listIndex < maxIndex; ++listIndex) {
1298
if(J9MODRON_HANDLE_NEXT_WORK_UNIT(env)) {
1299
MM_ReferenceObjectList *referenceObjectList = &_extensions->referenceObjectLists[listIndex];
1300
referenceObjectList->startSoftReferenceProcessing();
1301
processReferenceList(env, NULL, referenceObjectList->getPriorSoftList(), &gcEnv->_markJavaStats._softReferenceStats);
1302
_scheduler->condYieldFromGC(env);
1303
}
1304
}
1305
Assert_MM_true(gcEnv->_referenceObjectBuffer->isEmpty());
1306
}
1307
1308
void
1309
MM_MetronomeDelegate::scanPhantomReferenceObjects(MM_EnvironmentRealtime *env)
1310
{
1311
GC_Environment *gcEnv = env->getGCEnvironment();
1312
/* unfinalized processing may discover more phantom reference objects */
1313
gcEnv->_referenceObjectBuffer->flush(env);
1314
const UDATA maxIndex = getReferenceObjectListCount(env);
1315
UDATA listIndex;
1316
for (listIndex = 0; listIndex < maxIndex; ++listIndex) {
1317
if(J9MODRON_HANDLE_NEXT_WORK_UNIT(env)) {
1318
MM_ReferenceObjectList *referenceObjectList = &_extensions->referenceObjectLists[listIndex];
1319
referenceObjectList->startPhantomReferenceProcessing();
1320
processReferenceList(env, NULL, referenceObjectList->getPriorPhantomList(), &gcEnv->_markJavaStats._phantomReferenceStats);
1321
_scheduler->condYieldFromGC(env);
1322
}
1323
}
1324
Assert_MM_true(gcEnv->_referenceObjectBuffer->isEmpty());
1325
}
1326
1327
void
1328
MM_MetronomeDelegate::processReferenceList(MM_EnvironmentRealtime *env, MM_HeapRegionDescriptorRealtime *region, J9Object* headOfList, MM_ReferenceStats *referenceStats)
1329
{
1330
UDATA objectsVisited = 0;
1331
#if defined(J9VM_GC_FINALIZATION)
1332
GC_FinalizableReferenceBuffer buffer(_extensions);
1333
#endif /* J9VM_GC_FINALIZATION */
1334
J9Object* referenceObj = headOfList;
1335
1336
while (NULL != referenceObj) {
1337
objectsVisited += 1;
1338
referenceStats->_candidates += 1;
1339
1340
Assert_MM_true(_markingScheme->isMarked(referenceObj));
1341
1342
J9Object* nextReferenceObj = _extensions->accessBarrier->getReferenceLink(referenceObj);
1343
1344
GC_SlotObject referentSlotObject(_extensions->getOmrVM(), J9GC_J9VMJAVALANGREFERENCE_REFERENT_ADDRESS(env, referenceObj));
1345
J9Object *referent = referentSlotObject.readReferenceFromSlot();
1346
if (NULL != referent) {
1347
UDATA referenceObjectType = J9CLASS_FLAGS(J9GC_J9OBJECT_CLAZZ(referenceObj, env)) & J9AccClassReferenceMask;
1348
if (_markingScheme->isMarked(referent)) {
1349
if (J9AccClassReferenceSoft == referenceObjectType) {
1350
U_32 age = J9GC_J9VMJAVALANGSOFTREFERENCE_AGE(env, referenceObj);
1351
if (age < _extensions->getMaxSoftReferenceAge()) {
1352
/* Soft reference hasn't aged sufficiently yet - increment the age */
1353
J9GC_J9VMJAVALANGSOFTREFERENCE_AGE(env, referenceObj) = age + 1;
1354
}
1355
}
1356
} else {
1357
/* transition the state to cleared */
1358
Assert_MM_true(GC_ObjectModel::REF_STATE_INITIAL == J9GC_J9VMJAVALANGREFERENCE_STATE(env, referenceObj));
1359
J9GC_J9VMJAVALANGREFERENCE_STATE(env, referenceObj) = GC_ObjectModel::REF_STATE_CLEARED;
1360
1361
referenceStats->_cleared += 1;
1362
1363
/* Phantom references keep it's referent alive in Java 8 and doesn't in Java 9 and later */
1364
if ((J9AccClassReferencePhantom == referenceObjectType) && ((J2SE_VERSION(_javaVM) & J2SE_VERSION_MASK) <= J2SE_18)) {
1365
/* Scanning will be done after the enqueuing */
1366
_markingScheme->markObject(env, referent);
1367
} else {
1368
referentSlotObject.writeReferenceToSlot(NULL);
1369
}
1370
#if defined(J9VM_GC_FINALIZATION)
1371
/* Check if the reference has a queue */
1372
if (0 != J9GC_J9VMJAVALANGREFERENCE_QUEUE(env, referenceObj)) {
1373
/* Reference object can be enqueued onto the finalizable list */
1374
buffer.add(env, referenceObj);
1375
referenceStats->_enqueued += 1;
1376
/* Flag for the finalizer */
1377
_finalizationRequired = true;
1378
}
1379
#endif /* J9VM_GC_FINALIZATION */
1380
}
1381
}
1382
referenceObj = nextReferenceObj;
1383
if (REFERENCE_OBJECT_YIELD_CHECK_INTERVAL == objectsVisited) {
1384
_scheduler->condYieldFromGC(env);
1385
objectsVisited = 0;
1386
}
1387
}
1388
#if defined(J9VM_GC_FINALIZATION)
1389
buffer.flush(env);
1390
#endif /* J9VM_GC_FINALIZATION */
1391
}
1392
1393
/**
1394
* Mark all of the roots. The system and application classloaders need to be set
1395
* to marked/scanned before root marking begins.
1396
*
1397
* @note Once the root lists all have barriers this code may change to call rootScanner.scanRoots();
1398
*
1399
*/
1400
void
1401
MM_MetronomeDelegate::markLiveObjectsRoots(MM_EnvironmentRealtime *env)
1402
{
1403
MM_RealtimeMarkingSchemeRootMarker rootMarker(env, _realtimeGC);
1404
env->setRootScanner(&rootMarker);
1405
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
1406
rootMarker.setClassDataAsRoots(!isDynamicClassUnloadingEnabled());
1407
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
1408
1409
/* Mark root set classes */
1410
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
1411
if(env->isMainThread()) {
1412
/* TODO: This code belongs somewhere else? */
1413
/* Setting the permanent class loaders to scanned without a locked operation is safe
1414
* Class loaders will not be rescanned until a thread synchronize is executed
1415
*/
1416
if(isDynamicClassUnloadingEnabled()) {
1417
((J9ClassLoader *)_javaVM->systemClassLoader)->gcFlags |= J9_GC_CLASS_LOADER_SCANNED;
1418
_markingScheme->markObject(env, (J9Object *)((J9ClassLoader *)_javaVM->systemClassLoader)->classLoaderObject);
1419
if(_javaVM->applicationClassLoader) {
1420
((J9ClassLoader *)_javaVM->applicationClassLoader)->gcFlags |= J9_GC_CLASS_LOADER_SCANNED;
1421
_markingScheme->markObject(env, (J9Object *)((J9ClassLoader *)_javaVM->applicationClassLoader)->classLoaderObject);
1422
}
1423
}
1424
}
1425
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
1426
1427
/* Note: it's important to scan the finalizable objects queue (atomically) before the
1428
* threads, because the finalizer threads are among the threads and, once any one of
1429
* them is scanned and then allowed to execute, any object it takes off the finalizer
1430
* queue had better also be scanned. An alternative would be to put a special read
1431
* barrier in the queue-removal action but controlling the order is an easy solution.
1432
*
1433
* It is also important to scan JNI global references after scanning threads, because
1434
* the JNI global reference barrier is executed at deletion time, not creation time.
1435
* We could have barriers in both, but controlling the order is an easy solution.
1436
*
1437
*
1438
* The Metronome write barrier ensures that no unscanned thread can expose an object
1439
* to other threads without it becoming a root and no scanned thread can make an
1440
* object that was once reachable unreachable until it has been traced ("snapshot at
1441
* the beginning with a fuzzy snapshot"). This eliminates other order dependencies
1442
* between portions of the scan or requirements that multiple phases be done as an
1443
* atomic unit. However, some phases are still done atomically because we have not
1444
* yet determined whether the iterators that they use are safe and complete and have
1445
* not even analyzed in all cases whether correctness depends on completeness.
1446
*/
1447
if (env->_currentTask->synchronizeGCThreadsAndReleaseMain(env, UNIQUE_ID)) {
1448
#if defined(J9VM_GC_FINALIZATION)
1449
/* Note: if iterators are safe in scanFinalizableObjects, disableYield() could be
1450
* removed.
1451
*/
1452
env->disableYield();
1453
rootMarker.scanFinalizableObjects(env);
1454
env->enableYield();
1455
_scheduler->condYieldFromGC(env);
1456
#endif /* J9VM_GC_FINALIZATION */
1457
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
1458
if (!isDynamicClassUnloadingEnabled()) {
1459
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
1460
/* We are scanning all classes, no need to include stack frame references */
1461
rootMarker.setIncludeStackFrameClassReferences(false);
1462
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
1463
} else {
1464
rootMarker.setIncludeStackFrameClassReferences(true);
1465
}
1466
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
1467
env->_currentTask->releaseSynchronizedGCThreads(env);
1468
}
1469
1470
rootMarker.scanThreads(env);
1471
1472
if (env->_currentTask->synchronizeGCThreadsAndReleaseMain(env, UNIQUE_ID)) {
1473
_extensions->newThreadAllocationColor = GC_MARK;
1474
_realtimeGC->disableDoubleBarrier(env);
1475
if (_realtimeGC->verbose(env) >= 3) {
1476
rootMarker.reportThreadCount(env);
1477
}
1478
1479
/* Note: if iterators are safe for some or all remaining atomic root categories,
1480
* disableYield() could be removed or moved inside scanAtomicRoots.
1481
*/
1482
env->disableYield();
1483
rootMarker.scanAtomicRoots(env);
1484
env->enableYield();
1485
rootMarker.scanIncrementalRoots(env);
1486
1487
env->_currentTask->releaseSynchronizedGCThreads(env);
1488
}
1489
1490
env->setRootScanner(NULL);
1491
}
1492
1493
void
1494
MM_MetronomeDelegate::markLiveObjectsScan(MM_EnvironmentRealtime *env)
1495
{
1496
env->getGCEnvironment()->_referenceObjectBuffer->flush(env);
1497
}
1498
1499
void
1500
MM_MetronomeDelegate::markLiveObjectsComplete(MM_EnvironmentRealtime *env)
1501
{
1502
/* Process reference objects and finalizable objects. */
1503
MM_RealtimeMarkingSchemeRootClearer rootScanner(env, _realtimeGC);
1504
env->setRootScanner(&rootScanner);
1505
rootScanner.scanClearable(env);
1506
env->setRootScanner(NULL);
1507
Assert_MM_true(env->getGCEnvironment()->_referenceObjectBuffer->isEmpty());
1508
}
1509
1510
void
1511
MM_MetronomeDelegate::checkReferenceBuffer(MM_EnvironmentRealtime *env)
1512
{
1513
Assert_MM_true(env->getGCEnvironment()->_referenceObjectBuffer->isEmpty());
1514
}
1515
1516
void
1517
MM_MetronomeDelegate::setUnmarkedImpliesCleared()
1518
{
1519
_unmarkedImpliesCleared = true;
1520
}
1521
1522
void
1523
MM_MetronomeDelegate::unsetUnmarkedImpliesCleared()
1524
{
1525
/* This flag is set during the soft reference scanning just before unmarked references are to be
1526
* cleared. It's used to prevent objects that are going to be cleared (e.g. referent that is not marked,
1527
* or unmarked string constant) from escaping.
1528
*/
1529
_unmarkedImpliesCleared = false;
1530
_unmarkedImpliesStringsCleared = false;
1531
1532
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
1533
/* enable to use mark information to detect is class dead */
1534
_unmarkedImpliesClasses = true;
1535
#endif /* defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING) */
1536
}
1537
1538
#endif /* defined(J9VM_GC_REALTIME) */
1539
1540
1541