Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/openj9
Path: blob/master/runtime/gc_glue_java/MarkingDelegate.cpp
5990 views
1
/*******************************************************************************
2
* Copyright (c) 2017, 2022 IBM Corp. and others
3
*
4
* This program and the accompanying materials are made available under
5
* the terms of the Eclipse Public License 2.0 which accompanies this
6
* distribution and is available at https://www.eclipse.org/legal/epl-2.0/
7
* or the Apache License, Version 2.0 which accompanies this distribution and
8
* is available at https://www.apache.org/licenses/LICENSE-2.0.
9
*
10
* This Source Code may also be made available under the following
11
* Secondary Licenses when the conditions for such availability set
12
* forth in the Eclipse Public License, v. 2.0 are satisfied: GNU
13
* General Public License, version 2 with the GNU Classpath
14
* Exception [1] and GNU General Public License, version 2 with the
15
* OpenJDK Assembly Exception [2].
16
*
17
* [1] https://www.gnu.org/software/classpath/license.html
18
* [2] http://openjdk.java.net/legal/assembly-exception.html
19
*
20
* SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 OR LicenseRef-GPL-2.0 WITH Assembly-exception
21
*******************************************************************************/
22
23
#include "j9.h"
24
#include "j9cfg.h"
25
#include "j9class.h"
26
#include "j9consts.h"
27
#include "j9cp.h"
28
#include "j9modron.h"
29
#include "j9nongenerated.h"
30
#include "j2sever.h"
31
#include "omrcomp.h"
32
#include "omrsrp.h"
33
34
#include "ClassHeapIterator.hpp"
35
#include "ClassLoaderIterator.hpp"
36
#include "ClassLoaderSegmentIterator.hpp"
37
#include "ClassModel.hpp"
38
#if defined(J9VM_GC_FINALIZATION)
39
#include "CollectorLanguageInterfaceImpl.hpp"
40
#endif /* defined(J9VM_GC_FINALIZATION) */
41
#include "ConfigurationDelegate.hpp"
42
#include "EnvironmentDelegate.hpp"
43
#include "FinalizableReferenceBuffer.hpp"
44
#include "GlobalCollector.hpp"
45
#include "Heap.hpp"
46
#include "HeapRegionDescriptorStandard.hpp"
47
#include "HeapRegionIteratorStandard.hpp"
48
#include "MarkingScheme.hpp"
49
#include "MarkingSchemeRootMarker.hpp"
50
#include "MarkingSchemeRootClearer.hpp"
51
#include "OwnableSynchronizerObjectList.hpp"
52
#include "ParallelDispatcher.hpp"
53
#include "ReferenceObjectBuffer.hpp"
54
#include "RootScanner.hpp"
55
#include "StackSlotValidator.hpp"
56
#include "Task.hpp"
57
#include "UnfinalizedObjectList.hpp"
58
#include "WorkPackets.hpp"
59
60
#include "MarkingDelegate.hpp"
61
62
/* Verify that leaf bit optimization build flags are defined identically for j9 and omr */
63
#if defined(J9VM_GC_LEAF_BITS) != defined(OMR_GC_LEAF_BITS)
64
#error "Build flags J9VM_GC_LEAF_BITS and OMR_GC_LEAF_BITS must enabled/disabled identically"
65
#endif /* defined(J9VM_GC_LEAF_BITS) */
66
67
bool
68
MM_MarkingDelegate::initialize(MM_EnvironmentBase *env, MM_MarkingScheme *markingScheme)
69
{
70
_omrVM = env->getOmrVM();
71
_extensions = MM_GCExtensions::getExtensions(env);
72
_markingScheme = markingScheme;
73
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
74
_markMap = (_extensions->dynamicClassUnloading != MM_GCExtensions::DYNAMIC_CLASS_UNLOADING_NEVER) ? markingScheme->getMarkMap() : NULL;
75
#endif /* defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING) */
76
return true;
77
}
78
79
80
81
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
82
void
83
MM_MarkingDelegate::clearClassLoadersScannedFlag(MM_EnvironmentBase *env)
84
{
85
J9JavaVM *javaVM = (J9JavaVM*)env->getLanguageVM();
86
87
/**
88
* ClassLoaders might be scanned already at concurrent stage.
89
* Clear the "scanned" flags of all classLoaders to scan them again
90
**/
91
GC_ClassLoaderIterator classLoaderIterator(javaVM->classLoaderBlocks);
92
J9ClassLoader *classLoader;
93
while ((classLoader = classLoaderIterator.nextSlot()) != NULL) {
94
classLoader->gcFlags &= ~J9_GC_CLASS_LOADER_SCANNED;
95
}
96
97
/*
98
* Clear "scanned" flag for all classes in anonymous classloader
99
*/
100
classLoader = javaVM->anonClassLoader;
101
if (NULL != classLoader) {
102
GC_ClassLoaderSegmentIterator segmentIterator(classLoader, MEMORY_TYPE_RAM_CLASS);
103
J9MemorySegment *segment = NULL;
104
while (NULL != (segment = segmentIterator.nextSegment())) {
105
GC_ClassHeapIterator classHeapIterator(javaVM, segment);
106
J9Class *clazz = NULL;
107
while (NULL != (clazz = classHeapIterator.nextClass())) {
108
J9CLASS_EXTENDED_FLAGS_CLEAR(clazz, J9ClassGCScanned);
109
}
110
}
111
}
112
}
113
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
114
115
void
116
MM_MarkingDelegate::mainSetupForWalk(MM_EnvironmentBase *env)
117
{
118
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
119
_markMap = NULL;
120
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
121
122
/* treat all interned strings as roots for the purposes of a heap walk */
123
_collectStringConstantsEnabled = false;
124
}
125
126
void
127
MM_MarkingDelegate::workerSetupForGC(MM_EnvironmentBase *env)
128
{
129
GC_Environment *gcEnv = env->getGCEnvironment();
130
gcEnv->_markJavaStats.clear();
131
#if defined(J9VM_GC_MODRON_SCAVENGER)
132
if (_extensions->scavengerEnabled) {
133
/* clear scavenger stats for correcting the ownableSynchronizerObjects stats, only in generational gc */
134
gcEnv->_scavengerJavaStats.clearOwnableSynchronizerCounts();
135
}
136
#endif /* defined(J9VM_GC_MODRON_SCAVENGER) */
137
#if defined(OMR_GC_MODRON_STANDARD) || defined(OMR_GC_REALTIME)
138
/* record that this thread is participating in this cycle */
139
env->_markStats._gcCount = env->_workPacketStats._gcCount = _extensions->globalGCStats.gcCount;
140
#endif /* defined(OMR_GC_MODRON_STANDARD) || defined(OMR_GC_REALTIME) */
141
}
142
143
void
144
MM_MarkingDelegate::workerCompleteGC(MM_EnvironmentBase *env)
145
{
146
/* ensure that all buffers have been flushed before we start reference processing */
147
GC_Environment *gcEnv = env->getGCEnvironment();
148
gcEnv->_referenceObjectBuffer->flush(env);
149
150
if (env->_currentTask->synchronizeGCThreadsAndReleaseSingleThread(env, UNIQUE_ID)) {
151
env->_cycleState->_referenceObjectOptions |= MM_CycleState::references_clear_soft;
152
env->_cycleState->_referenceObjectOptions |= MM_CycleState::references_clear_weak;
153
env->_currentTask->releaseSynchronizedGCThreads(env);
154
}
155
MM_MarkingSchemeRootClearer rootClearer(env, _markingScheme, this);
156
rootClearer.setStringTableAsRoot(!_collectStringConstantsEnabled);
157
rootClearer.scanClearable(env);
158
}
159
160
void
161
MM_MarkingDelegate::workerCleanupAfterGC(MM_EnvironmentBase *env)
162
{
163
GC_Environment *gcEnv = env->getGCEnvironment();
164
Assert_MM_true(gcEnv->_referenceObjectBuffer->isEmpty());
165
166
_extensions->markJavaStats.merge(&gcEnv->_markJavaStats);
167
#if defined(J9VM_GC_MODRON_SCAVENGER)
168
if (_extensions->scavengerEnabled) {
169
/* merge scavenger ownableSynchronizerObjects stats, only in generational gc */
170
_extensions->scavengerJavaStats.mergeOwnableSynchronizerCounts(&gcEnv->_scavengerJavaStats);
171
}
172
#endif /* defined(J9VM_GC_MODRON_SCAVENGER) */
173
}
174
175
void
176
MM_MarkingDelegate::mainSetupForGC(MM_EnvironmentBase *env)
177
{
178
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
179
clearClassLoadersScannedFlag(env);
180
_markMap = (0 != _extensions->runtimeCheckDynamicClassUnloading) ? _markingScheme->getMarkMap() : NULL;
181
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
182
183
_collectStringConstantsEnabled = _extensions->collectStringConstants;
184
}
185
186
void
187
MM_MarkingDelegate::mainCleanupAfterGC(MM_EnvironmentBase *env)
188
{
189
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
190
_markMap = (_extensions->dynamicClassUnloading != MM_GCExtensions::DYNAMIC_CLASS_UNLOADING_NEVER) ? _markingScheme->getMarkMap() : NULL;
191
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
192
}
193
194
void
195
MM_MarkingDelegate::startRootListProcessing(MM_EnvironmentBase *env)
196
{
197
/* Start unfinalized object and ownable synchronizer processing */
198
if (J9MODRON_HANDLE_NEXT_WORK_UNIT(env)) {
199
_shouldScanUnfinalizedObjects = false;
200
_shouldScanOwnableSynchronizerObjects = false;
201
MM_HeapRegionDescriptorStandard *region = NULL;
202
GC_HeapRegionIteratorStandard regionIterator(_extensions->heap->getHeapRegionManager());
203
while (NULL != (region = regionIterator.nextRegion())) {
204
MM_HeapRegionDescriptorStandardExtension *regionExtension = MM_ConfigurationDelegate::getHeapRegionDescriptorStandardExtension(env, region);
205
for (UDATA i = 0; i < regionExtension->_maxListIndex; i++) {
206
/* Start unfinalized object processing for region */
207
MM_UnfinalizedObjectList *unfinalizedObjectList = &(regionExtension->_unfinalizedObjectLists[i]);
208
unfinalizedObjectList->startUnfinalizedProcessing();
209
if (!unfinalizedObjectList->wasEmpty()) {
210
_shouldScanUnfinalizedObjects = true;
211
}
212
/* Start ownable synchronizer processing for region */
213
MM_OwnableSynchronizerObjectList *ownableSynchronizerObjectList = &(regionExtension->_ownableSynchronizerObjectLists[i]);
214
ownableSynchronizerObjectList->startOwnableSynchronizerProcessing();
215
if (!ownableSynchronizerObjectList->wasEmpty()) {
216
_shouldScanOwnableSynchronizerObjects = true;
217
}
218
}
219
}
220
}
221
}
222
223
void
224
MM_MarkingDelegate::scanRoots(MM_EnvironmentBase *env, bool processLists)
225
{
226
if (processLists) {
227
startRootListProcessing(env);
228
}
229
230
/* Reset MM_RootScanner base class for scanning */
231
MM_MarkingSchemeRootMarker rootMarker(env, _markingScheme, this);
232
rootMarker.setStringTableAsRoot(!_collectStringConstantsEnabled);
233
234
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
235
/* Mark root set classes */
236
rootMarker.setClassDataAsRoots(!isDynamicClassUnloadingEnabled());
237
if (isDynamicClassUnloadingEnabled()) {
238
/* Setting the permanent class loaders to scanned without a locked operation is safe
239
* Class loaders will not be rescanned until a thread synchronize is executed
240
*/
241
if (env->isMainThread()) {
242
J9JavaVM * javaVM = (J9JavaVM*)env->getLanguageVM();
243
((J9ClassLoader *)javaVM->systemClassLoader)->gcFlags |= J9_GC_CLASS_LOADER_SCANNED;
244
_markingScheme->markObject(env, (omrobjectptr_t )((J9ClassLoader *)javaVM->systemClassLoader)->classLoaderObject);
245
if (javaVM->applicationClassLoader) {
246
((J9ClassLoader *)javaVM->applicationClassLoader)->gcFlags |= J9_GC_CLASS_LOADER_SCANNED;
247
_markingScheme->markObject(env, (omrobjectptr_t )((J9ClassLoader *)javaVM->applicationClassLoader)->classLoaderObject);
248
}
249
}
250
}
251
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
252
253
/* Scan roots */
254
rootMarker.scanRoots(env);
255
}
256
257
void
258
MM_MarkingDelegate::completeMarking(MM_EnvironmentBase *env)
259
{
260
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
261
if (isDynamicClassUnloadingEnabled()) {
262
J9ClassLoader *classLoader;
263
J9JavaVM * javaVM = (J9JavaVM*)env->getLanguageVM();
264
265
if (env->_currentTask->synchronizeGCThreadsAndReleaseSingleThread(env, UNIQUE_ID)) {
266
_anotherClassMarkPass = false;
267
_anotherClassMarkLoopIteration = true;
268
env->_currentTask->releaseSynchronizedGCThreads(env);
269
}
270
271
while (_anotherClassMarkLoopIteration) {
272
GC_ClassLoaderIterator classLoaderIterator(javaVM->classLoaderBlocks);
273
while ((classLoader = classLoaderIterator.nextSlot()) != NULL) {
274
/* We cannot go more granular (for example per class segment) since the class loader flag is changed */
275
/* Several threads might contend setting the value of the flags */
276
if (J9MODRON_HANDLE_NEXT_WORK_UNIT(env)) {
277
if (0 == (classLoader->gcFlags & J9_GC_CLASS_LOADER_DEAD)) {
278
if (J9CLASSLOADER_ANON_CLASS_LOADER == (classLoader->flags & J9CLASSLOADER_ANON_CLASS_LOADER)) {
279
/* Anonymous classloader should be scanned on level of classes every time */
280
GC_ClassLoaderSegmentIterator segmentIterator(classLoader, MEMORY_TYPE_RAM_CLASS);
281
J9MemorySegment *segment = NULL;
282
while (NULL != (segment = segmentIterator.nextSegment())) {
283
GC_ClassHeapIterator classHeapIterator(javaVM, segment);
284
J9Class *clazz = NULL;
285
while (NULL != (clazz = classHeapIterator.nextClass())) {
286
Assert_MM_true(!J9_ARE_ANY_BITS_SET(clazz->classDepthAndFlags, J9AccClassDying));
287
if ((0 == (J9CLASS_EXTENDED_FLAGS(clazz) & J9ClassGCScanned)) && _markingScheme->isMarked(clazz->classObject)) {
288
J9CLASS_EXTENDED_FLAGS_SET(clazz, J9ClassGCScanned);
289
290
scanClass(env, clazz);
291
/* This may result in other class loaders being marked,
292
* so we have to do another pass
293
*/
294
_anotherClassMarkPass = true;
295
}
296
}
297
}
298
} else {
299
/* Check if the class loader has not been scanned but the class loader is live */
300
if ((0 == (classLoader->gcFlags & J9_GC_CLASS_LOADER_SCANNED)) && _markingScheme->isMarked(classLoader->classLoaderObject)) {
301
/* Flag the class loader as being scanned */
302
classLoader->gcFlags |= J9_GC_CLASS_LOADER_SCANNED;
303
304
GC_ClassLoaderSegmentIterator segmentIterator(classLoader, MEMORY_TYPE_RAM_CLASS);
305
J9MemorySegment *segment = NULL;
306
J9Class *clazz = NULL;
307
while (NULL != (segment = segmentIterator.nextSegment())) {
308
GC_ClassHeapIterator classHeapIterator(javaVM, segment);
309
while (NULL != (clazz = classHeapIterator.nextClass())) {
310
scanClass(env, clazz);
311
/* This may result in other class loaders being marked,
312
* so we have to do another pass
313
*/
314
_anotherClassMarkPass = true;
315
}
316
}
317
318
/* CMVC 131487 */
319
J9HashTableState walkState;
320
/*
321
* We believe that (NULL == classLoader->classHashTable) is set ONLY for DEAD class loader
322
* so, if this pointer happened to be NULL at this point let it crash here
323
*/
324
Assert_MM_true(NULL != classLoader->classHashTable);
325
clazz = javaVM->internalVMFunctions->hashClassTableStartDo(classLoader, &walkState, 0);
326
while (NULL != clazz) {
327
_markingScheme->markObjectNoCheck(env, (omrobjectptr_t )clazz->classObject);
328
_anotherClassMarkPass = true;
329
clazz = javaVM->internalVMFunctions->hashClassTableNextDo(&walkState);
330
}
331
332
if (NULL != classLoader->moduleHashTable) {
333
J9HashTableState moduleWalkState;
334
J9Module **modulePtr = (J9Module**)hashTableStartDo(classLoader->moduleHashTable, &moduleWalkState);
335
while (NULL != modulePtr) {
336
J9Module * const module = *modulePtr;
337
338
_markingScheme->markObjectNoCheck(env, (omrobjectptr_t )module->moduleObject);
339
if (NULL != module->moduleName) {
340
_markingScheme->markObjectNoCheck(env, (omrobjectptr_t )module->moduleName);
341
}
342
if (NULL != module->version) {
343
_markingScheme->markObjectNoCheck(env, (omrobjectptr_t )module->version);
344
}
345
modulePtr = (J9Module**)hashTableNextDo(&moduleWalkState);
346
}
347
348
if (classLoader == javaVM->systemClassLoader) {
349
_markingScheme->markObjectNoCheck(env, (omrobjectptr_t )javaVM->unamedModuleForSystemLoader->moduleObject);
350
}
351
}
352
}
353
}
354
}
355
}
356
}
357
358
/* In case some GC threads don't find a classLoader to work with (or are quick to finish with it)
359
* let them help empty out the generic work stack.
360
*/
361
_markingScheme->completeScan(env);
362
363
/* have to stop the threads while resetting the flag, to prevent them rushing through another pass and
364
* losing an early "set flag"
365
*/
366
if (env->_currentTask->synchronizeGCThreadsAndReleaseSingleThread(env, UNIQUE_ID)) {
367
/* if work is complete, end loop */
368
_anotherClassMarkLoopIteration = _anotherClassMarkPass;
369
_anotherClassMarkPass = false;
370
env->_currentTask->releaseSynchronizedGCThreads(env);
371
}
372
}
373
}
374
#endif /* defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING) */
375
}
376
377
void
378
MM_MarkingDelegate::scanClass(MM_EnvironmentBase *env, J9Class *clazz)
379
{
380
/* Note: Class loader objects are handled separately */
381
/*
382
* Scan and mark using GC_ClassIterator:
383
* - class object
384
* - class constant pool
385
* - class statics
386
* - class method types
387
* - class call sites
388
* - class varhandle method types
389
*
390
* As this function can be invoked during concurrent mark the slot is
391
* volatile so we must ensure that the compiler generates the correct
392
* code if markObject() is inlined.
393
*/
394
GC_ClassIterator classIterator(env, clazz, true);
395
while (volatile omrobjectptr_t *slotPtr = classIterator.nextSlot()) {
396
_markingScheme->markObject(env, *slotPtr);
397
}
398
399
#if defined(J9VM_GC_DYNAMIC_CLASS_UNLOADING)
400
if (isDynamicClassUnloadingEnabled()) {
401
GC_ClassIteratorClassSlots classSlotIterator((J9JavaVM*)env->getLanguageVM(), clazz);
402
J9Class *classPtr;
403
while (NULL != (classPtr = classSlotIterator.nextSlot())) {
404
_markingScheme->markObject(env, classPtr->classObject);
405
}
406
}
407
#endif /* J9VM_GC_DYNAMIC_CLASS_UNLOADING */
408
}
409
410
void
411
MM_MarkingDelegate::processReferenceList(MM_EnvironmentBase *env, MM_HeapRegionDescriptorStandard* region, omrobjectptr_t headOfList, MM_ReferenceStats *referenceStats)
412
{
413
/* no list can possibly contain more reference objects than there are bytes in a region. */
414
const UDATA maxObjects = region->getSize();
415
UDATA objectsVisited = 0;
416
GC_FinalizableReferenceBuffer buffer(_extensions);
417
#if defined(J9VM_GC_FINALIZATION)
418
bool finalizationRequired = false;
419
#endif /* defined(J9VM_GC_FINALIZATION) */
420
421
omrobjectptr_t referenceObj = headOfList;
422
while (NULL != referenceObj) {
423
objectsVisited += 1;
424
referenceStats->_candidates += 1;
425
426
Assert_MM_true(_markingScheme->isMarked(referenceObj));
427
Assert_MM_true(objectsVisited < maxObjects);
428
429
omrobjectptr_t nextReferenceObj = _extensions->accessBarrier->getReferenceLink(referenceObj);
430
431
GC_SlotObject referentSlotObject(_omrVM, J9GC_J9VMJAVALANGREFERENCE_REFERENT_ADDRESS(env, referenceObj));
432
433
if (NULL != referentSlotObject.readReferenceFromSlot()) {
434
_markingScheme->fixupForwardedSlot(&referentSlotObject);
435
omrobjectptr_t referent = referentSlotObject.readReferenceFromSlot();
436
437
UDATA referenceObjectType = J9CLASS_FLAGS(J9GC_J9OBJECT_CLAZZ(referenceObj, env)) & J9AccClassReferenceMask;
438
if (_markingScheme->isMarked(referent)) {
439
if (J9AccClassReferenceSoft == referenceObjectType) {
440
U_32 age = J9GC_J9VMJAVALANGSOFTREFERENCE_AGE(env, referenceObj);
441
if (age < _extensions->getMaxSoftReferenceAge()) {
442
/* Soft reference hasn't aged sufficiently yet - increment the age */
443
J9GC_J9VMJAVALANGSOFTREFERENCE_AGE(env, referenceObj) = age + 1;
444
}
445
}
446
} else {
447
/* transition the state to cleared */
448
Assert_MM_true(GC_ObjectModel::REF_STATE_INITIAL == J9GC_J9VMJAVALANGREFERENCE_STATE(env, referenceObj));
449
J9GC_J9VMJAVALANGREFERENCE_STATE(env, referenceObj) = GC_ObjectModel::REF_STATE_CLEARED;
450
451
referenceStats->_cleared += 1;
452
453
/* Phantom references keep it's referent alive in Java 8 and doesn't in Java 9 and later */
454
J9JavaVM * javaVM = (J9JavaVM*)env->getLanguageVM();
455
if ((J9AccClassReferencePhantom == referenceObjectType) && ((J2SE_VERSION(javaVM) & J2SE_VERSION_MASK) <= J2SE_18)) {
456
/* Phantom objects keep their referent - scanning will be done after the enqueuing */
457
_markingScheme->inlineMarkObject(env, referent);
458
} else {
459
referentSlotObject.writeReferenceToSlot(NULL);
460
}
461
462
/* Check if the reference has a queue */
463
if (0 != J9GC_J9VMJAVALANGREFERENCE_QUEUE(env, referenceObj)) {
464
/* Reference object can be enqueued onto the finalizable list */
465
buffer.add(env, referenceObj);
466
referenceStats->_enqueued += 1;
467
#if defined(J9VM_GC_FINALIZATION)
468
/* inform global GC if finalization is required */
469
if (!finalizationRequired) {
470
MM_GlobalCollector *globalCollector = (MM_GlobalCollector *)_extensions->getGlobalCollector();
471
globalCollector->getGlobalCollectorDelegate()->setFinalizationRequired();
472
finalizationRequired = true;
473
}
474
#endif /* defined(J9VM_GC_FINALIZATION) */
475
}
476
}
477
}
478
479
referenceObj = nextReferenceObj;
480
}
481
482
buffer.flush(env);
483
}
484
485
bool
486
MM_MarkingDelegate::processReference(MM_EnvironmentBase *env, omrobjectptr_t objectPtr)
487
{
488
bool isReferenceCleared = false;
489
bool referentMustBeMarked = false;
490
bool referentMustBeCleared = getReferenceStatus(env, objectPtr, &referentMustBeMarked, &isReferenceCleared);
491
492
clearReference(env, objectPtr, isReferenceCleared, referentMustBeCleared);
493
494
return referentMustBeMarked;
495
}
496
497
bool
498
MM_MarkingDelegate::getReferenceStatus(MM_EnvironmentBase *env, omrobjectptr_t objectPtr, bool *referentMustBeMarked, bool *isReferenceCleared)
499
{
500
/*
501
* the method getReferenceStatus() is shared between STW gc and concurrent gc,
502
* during concurrent gc, the cycleState of the mutator thread might not be set,
503
* but if the cycleState of the thread is not set, we know it is in concurrent mode(not clearable phase).
504
*/
505
UDATA referenceObjectOptions = MM_CycleState::references_default;
506
if (NULL != env->_cycleState) {
507
referenceObjectOptions = env->_cycleState->_referenceObjectOptions;
508
}
509
510
I_32 referenceState = J9GC_J9VMJAVALANGREFERENCE_STATE(env, objectPtr);
511
*isReferenceCleared = (GC_ObjectModel::REF_STATE_CLEARED == referenceState) || (GC_ObjectModel::REF_STATE_ENQUEUED == referenceState);
512
*referentMustBeMarked = *isReferenceCleared;
513
bool referentMustBeCleared = false;
514
515
UDATA referenceObjectType = J9CLASS_FLAGS(J9GC_J9OBJECT_CLAZZ(objectPtr, env)) & J9AccClassReferenceMask;
516
switch (referenceObjectType) {
517
case J9AccClassReferenceWeak:
518
referentMustBeCleared = (0 != (referenceObjectOptions & MM_CycleState::references_clear_weak));
519
break;
520
case J9AccClassReferenceSoft:
521
referentMustBeCleared = (0 != (referenceObjectOptions & MM_CycleState::references_clear_soft));
522
*referentMustBeMarked = *referentMustBeMarked || (
523
((0 == (referenceObjectOptions & MM_CycleState::references_soft_as_weak))
524
&& ((UDATA)J9GC_J9VMJAVALANGSOFTREFERENCE_AGE(env, objectPtr) < _extensions->getDynamicMaxSoftReferenceAge())));
525
break;
526
case J9AccClassReferencePhantom:
527
referentMustBeCleared = (0 != (referenceObjectOptions & MM_CycleState::references_clear_phantom));
528
break;
529
default:
530
Assert_MM_unreachable();
531
}
532
533
return referentMustBeCleared;
534
}
535
536
void
537
MM_MarkingDelegate::clearReference(MM_EnvironmentBase *env, omrobjectptr_t objectPtr, bool isReferenceCleared, bool referentMustBeCleared)
538
{
539
if (referentMustBeCleared) {
540
/* Discovering this object at this stage in the GC indicates that it is being resurrected. Clear its referent slot. */
541
GC_SlotObject referentPtr(_omrVM, J9GC_J9VMJAVALANGREFERENCE_REFERENT_ADDRESS(env, objectPtr));
542
referentPtr.writeReferenceToSlot(NULL);
543
/* record that the reference has been cleared if it's not already in the cleared or enqueued state */
544
if (!isReferenceCleared) {
545
J9GC_J9VMJAVALANGREFERENCE_STATE(env, objectPtr) = GC_ObjectModel::REF_STATE_CLEARED;
546
}
547
} else {
548
/* we don't need to process cleared or enqueued references. */
549
if (!isReferenceCleared) {
550
/* for overflow case we assume only 3 active reference states(REF_STATE_INITIAL, REF_STATE_CLEARED, REF_STATE_ENQUEUED),
551
REF_STATE_REMEMBERED is only for balanced case, ReferenceCleared(REF_STATE_CLEARED, REF_STATE_ENQUEUED) notReferenceCleared(REF_STATE_INITIAL),
552
if any new states is added, should reconsider the logic */
553
env->getGCEnvironment()->_referenceObjectBuffer->add(env, objectPtr);
554
}
555
}
556
}
557
558
fomrobject_t *
559
MM_MarkingDelegate::setupReferenceObjectScanner(MM_EnvironmentBase *env, omrobjectptr_t objectPtr, MM_MarkingSchemeScanReason reason)
560
{
561
bool isReferenceCleared = false;
562
bool referentMustBeMarked = false;
563
bool referentMustBeCleared = getReferenceStatus(env, objectPtr, &referentMustBeMarked, &isReferenceCleared);
564
565
GC_SlotObject referentSlotObject(_omrVM, J9GC_J9VMJAVALANGREFERENCE_REFERENT_ADDRESS(env, objectPtr));
566
if (SCAN_REASON_PACKET == reason) {
567
clearReference(env, objectPtr, isReferenceCleared, referentMustBeCleared);
568
}
569
570
fomrobject_t* referentSlotPtr = NULL;
571
if (!referentMustBeMarked) {
572
referentSlotPtr = referentSlotObject.readAddressFromSlot();
573
}
574
return referentSlotPtr;
575
}
576
577
uintptr_t
578
MM_MarkingDelegate::setupPointerArrayScanner(MM_EnvironmentBase *env, omrobjectptr_t objectPtr, MM_MarkingSchemeScanReason reason, uintptr_t *sizeToDo, uintptr_t *slotsToDo)
579
{
580
uintptr_t startIndex = 0;
581
uintptr_t headerBytesToScan = 0;
582
uintptr_t workItem = (uintptr_t)env->_workStack.peek(env);
583
if (PACKET_ARRAY_SPLIT_TAG == (workItem & PACKET_ARRAY_SPLIT_TAG)) {
584
Assert_MM_true(SCAN_REASON_PACKET == reason);
585
env->_workStack.pop(env);
586
/* since we are putting extra tagged objects on the work stack we are responsible for ensuring that the object scanned
587
* count is correct. The MM_MarkingScheme::scanObject code will increment _objectScanned by 1 for EVERY object
588
* popped off of the work stack if the scan reason is SCAN_REASON_PACKET. This code is only executed during regular
589
* packet scanning.
590
*/
591
env->_markStats._objectsScanned -= 1;
592
/* only mark the class the first time we scan any array */
593
startIndex = workItem >> PACKET_ARRAY_SPLIT_SHIFT;
594
} else {
595
/* account for header size on first scan */
596
headerBytesToScan = _extensions->indexableObjectModel.getHeaderSize((J9IndexableObject *)objectPtr);
597
}
598
599
uintptr_t slotsToScan = 0;
600
uintptr_t const referenceSize = env->compressObjectReferences() ? sizeof(uint32_t) : sizeof(uintptr_t);
601
uintptr_t maxSlotsToScan = OMR_MAX(*sizeToDo / referenceSize, 1);
602
Assert_MM_true(maxSlotsToScan > 0);
603
uintptr_t sizeInElements = _extensions->indexableObjectModel.getSizeInElements((J9IndexableObject *)objectPtr);
604
if (sizeInElements > 0) {
605
Assert_MM_true(startIndex < sizeInElements);
606
slotsToScan = sizeInElements - startIndex;
607
608
/* pointer arrays are split into segments to improve parallelism. split amount is proportional to array size
609
* and inverse proportional to active thread count.
610
* additionally, the less busy we are, the smaller the split amount, while obeying specified minimum and maximum.
611
*/
612
613
uintptr_t arraySplitSize = slotsToScan / (_extensions->dispatcher->activeThreadCount() + 2 * _markingScheme->getWorkPackets()->getThreadWaitCount());
614
arraySplitSize = OMR_MAX(arraySplitSize, _extensions->markingArraySplitMinimumAmount);
615
arraySplitSize = OMR_MIN(arraySplitSize, _extensions->markingArraySplitMaximumAmount);
616
617
if ((slotsToScan > arraySplitSize) || (slotsToScan > maxSlotsToScan)) {
618
slotsToScan = OMR_MIN(arraySplitSize, maxSlotsToScan);
619
620
/* immediately make the next chunk available for another thread to start processing */
621
uintptr_t nextIndex = startIndex + slotsToScan;
622
Assert_MM_true(nextIndex < sizeInElements);
623
void *element1 = (void *)objectPtr;
624
void *element2 = (void *)((nextIndex << PACKET_ARRAY_SPLIT_SHIFT) | PACKET_ARRAY_SPLIT_TAG);
625
Assert_MM_true(nextIndex == (((uintptr_t)element2) >> PACKET_ARRAY_SPLIT_SHIFT));
626
env->_workStack.push(env, element1, element2);
627
env->_workStack.flushOutputPacket(env);
628
MM_MarkJavaStats *markJavaStats = &(env->getGCEnvironment()->_markJavaStats);
629
markJavaStats->splitArraysProcessed += 1;
630
markJavaStats->splitArraysAmount += slotsToScan;
631
}
632
}
633
634
*sizeToDo = headerBytesToScan + (slotsToScan * referenceSize);
635
*slotsToDo = slotsToScan;
636
return startIndex;
637
}
638
639