Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/jdk17u
Path: blob/master/src/hotspot/share/gc/shenandoah/shenandoahDegeneratedGC.cpp
66644 views
1
/*
2
* Copyright (c) 2021, Red Hat, Inc. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation.
8
*
9
* This code is distributed in the hope that it will be useful, but WITHOUT
10
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12
* version 2 for more details (a copy is included in the LICENSE file that
13
* accompanied this code).
14
*
15
* You should have received a copy of the GNU General Public License version
16
* 2 along with this work; if not, write to the Free Software Foundation,
17
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
*
19
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
* or visit www.oracle.com if you need additional information or have any
21
* questions.
22
*
23
*/
24
25
#include "precompiled.hpp"
26
27
#include "gc/shared/collectorCounters.hpp"
28
#include "gc/shenandoah/shenandoahCollectorPolicy.hpp"
29
#include "gc/shenandoah/shenandoahConcurrentMark.hpp"
30
#include "gc/shenandoah/shenandoahDegeneratedGC.hpp"
31
#include "gc/shenandoah/shenandoahFullGC.hpp"
32
#include "gc/shenandoah/shenandoahHeap.inline.hpp"
33
#include "gc/shenandoah/shenandoahMetrics.hpp"
34
#include "gc/shenandoah/shenandoahMonitoringSupport.hpp"
35
#include "gc/shenandoah/shenandoahOopClosures.inline.hpp"
36
#include "gc/shenandoah/shenandoahRootProcessor.inline.hpp"
37
#include "gc/shenandoah/shenandoahSTWMark.hpp"
38
#include "gc/shenandoah/shenandoahUtils.hpp"
39
#include "gc/shenandoah/shenandoahVerifier.hpp"
40
#include "gc/shenandoah/shenandoahWorkerPolicy.hpp"
41
#include "gc/shenandoah/shenandoahVMOperations.hpp"
42
#include "runtime/vmThread.hpp"
43
#include "utilities/events.hpp"
44
45
ShenandoahDegenGC::ShenandoahDegenGC(ShenandoahDegenPoint degen_point) :
46
ShenandoahGC(),
47
_degen_point(degen_point) {
48
}
49
50
bool ShenandoahDegenGC::collect(GCCause::Cause cause) {
51
vmop_degenerated();
52
return true;
53
}
54
55
void ShenandoahDegenGC::vmop_degenerated() {
56
TraceCollectorStats tcs(ShenandoahHeap::heap()->monitoring_support()->full_stw_collection_counters());
57
ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::degen_gc_gross);
58
VM_ShenandoahDegeneratedGC degenerated_gc(this);
59
VMThread::execute(&degenerated_gc);
60
}
61
62
void ShenandoahDegenGC::entry_degenerated() {
63
const char* msg = degen_event_message(_degen_point);
64
ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::degen_gc, true /* log_heap_usage */);
65
EventMark em("%s", msg);
66
ShenandoahHeap* const heap = ShenandoahHeap::heap();
67
68
ShenandoahWorkerScope scope(heap->workers(),
69
ShenandoahWorkerPolicy::calc_workers_for_stw_degenerated(),
70
"stw degenerated gc");
71
72
heap->set_degenerated_gc_in_progress(true);
73
op_degenerated();
74
heap->set_degenerated_gc_in_progress(false);
75
}
76
77
void ShenandoahDegenGC::op_degenerated() {
78
ShenandoahHeap* const heap = ShenandoahHeap::heap();
79
// Degenerated GC is STW, but it can also fail. Current mechanics communicates
80
// GC failure via cancelled_concgc() flag. So, if we detect the failure after
81
// some phase, we have to upgrade the Degenerate GC to Full GC.
82
heap->clear_cancelled_gc();
83
84
ShenandoahMetricsSnapshot metrics;
85
metrics.snap_before();
86
87
switch (_degen_point) {
88
// The cases below form the Duff's-like device: it describes the actual GC cycle,
89
// but enters it at different points, depending on which concurrent phase had
90
// degenerated.
91
92
case _degenerated_outside_cycle:
93
// We have degenerated from outside the cycle, which means something is bad with
94
// the heap, most probably heavy humongous fragmentation, or we are very low on free
95
// space. It makes little sense to wait for Full GC to reclaim as much as it can, when
96
// we can do the most aggressive degen cycle, which includes processing references and
97
// class unloading, unless those features are explicitly disabled.
98
//
99
100
// Degenerated from concurrent root mark, reset the flag for STW mark
101
if (heap->is_concurrent_mark_in_progress()) {
102
ShenandoahConcurrentMark::cancel();
103
heap->set_concurrent_mark_in_progress(false);
104
}
105
106
// Note that we can only do this for "outside-cycle" degens, otherwise we would risk
107
// changing the cycle parameters mid-cycle during concurrent -> degenerated handover.
108
heap->set_unload_classes(heap->heuristics()->can_unload_classes());
109
110
op_reset();
111
112
// STW mark
113
op_mark();
114
115
case _degenerated_mark:
116
// No fallthrough. Continue mark, handed over from concurrent mark if
117
// concurrent mark has yet completed
118
if (_degen_point == ShenandoahDegenPoint::_degenerated_mark &&
119
heap->is_concurrent_mark_in_progress()) {
120
op_finish_mark();
121
}
122
assert(!heap->cancelled_gc(), "STW mark can not OOM");
123
124
/* Degen select Collection Set. etc. */
125
op_prepare_evacuation();
126
127
op_cleanup_early();
128
129
case _degenerated_evac:
130
// If heuristics thinks we should do the cycle, this flag would be set,
131
// and we can do evacuation. Otherwise, it would be the shortcut cycle.
132
if (heap->is_evacuation_in_progress()) {
133
134
// Degeneration under oom-evac protocol might have left some objects in
135
// collection set un-evacuated. Restart evacuation from the beginning to
136
// capture all objects. For all the objects that are already evacuated,
137
// it would be a simple check, which is supposed to be fast. This is also
138
// safe to do even without degeneration, as CSet iterator is at beginning
139
// in preparation for evacuation anyway.
140
//
141
// Before doing that, we need to make sure we never had any cset-pinned
142
// regions. This may happen if allocation failure happened when evacuating
143
// the about-to-be-pinned object, oom-evac protocol left the object in
144
// the collection set, and then the pin reached the cset region. If we continue
145
// the cycle here, we would trash the cset and alive objects in it. To avoid
146
// it, we fail degeneration right away and slide into Full GC to recover.
147
148
{
149
heap->sync_pinned_region_status();
150
heap->collection_set()->clear_current_index();
151
152
ShenandoahHeapRegion* r;
153
while ((r = heap->collection_set()->next()) != NULL) {
154
if (r->is_pinned()) {
155
heap->cancel_gc(GCCause::_shenandoah_upgrade_to_full_gc);
156
op_degenerated_fail();
157
return;
158
}
159
}
160
161
heap->collection_set()->clear_current_index();
162
}
163
op_evacuate();
164
if (heap->cancelled_gc()) {
165
op_degenerated_fail();
166
return;
167
}
168
}
169
170
// If heuristics thinks we should do the cycle, this flag would be set,
171
// and we need to do update-refs. Otherwise, it would be the shortcut cycle.
172
if (heap->has_forwarded_objects()) {
173
op_init_updaterefs();
174
assert(!heap->cancelled_gc(), "STW reference update can not OOM");
175
}
176
177
case _degenerated_updaterefs:
178
if (heap->has_forwarded_objects()) {
179
op_updaterefs();
180
op_update_roots();
181
assert(!heap->cancelled_gc(), "STW reference update can not OOM");
182
}
183
184
if (ClassUnloading) {
185
// Disarm nmethods that armed in concurrent cycle.
186
// In above case, update roots should disarm them
187
ShenandoahCodeRoots::disarm_nmethods();
188
}
189
190
op_cleanup_complete();
191
break;
192
default:
193
ShouldNotReachHere();
194
}
195
196
if (ShenandoahVerify) {
197
heap->verifier()->verify_after_degenerated();
198
}
199
200
if (VerifyAfterGC) {
201
Universe::verify();
202
}
203
204
metrics.snap_after();
205
206
// Check for futility and fail. There is no reason to do several back-to-back Degenerated cycles,
207
// because that probably means the heap is overloaded and/or fragmented.
208
if (!metrics.is_good_progress()) {
209
heap->notify_gc_no_progress();
210
heap->cancel_gc(GCCause::_shenandoah_upgrade_to_full_gc);
211
op_degenerated_futile();
212
} else {
213
heap->notify_gc_progress();
214
}
215
}
216
217
void ShenandoahDegenGC::op_reset() {
218
ShenandoahHeap::heap()->prepare_gc();
219
}
220
221
void ShenandoahDegenGC::op_mark() {
222
assert(!ShenandoahHeap::heap()->is_concurrent_mark_in_progress(), "Should be reset");
223
ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_stw_mark);
224
ShenandoahSTWMark mark(false /*full gc*/);
225
mark.clear();
226
mark.mark();
227
}
228
229
void ShenandoahDegenGC::op_finish_mark() {
230
ShenandoahConcurrentMark mark;
231
mark.finish_mark();
232
}
233
234
void ShenandoahDegenGC::op_prepare_evacuation() {
235
ShenandoahHeap* const heap = ShenandoahHeap::heap();
236
if (ShenandoahVerify) {
237
heap->verifier()->verify_roots_no_forwarded();
238
}
239
240
// STW cleanup weak roots and unload classes
241
heap->parallel_cleaning(false /*full gc*/);
242
// Prepare regions and collection set
243
heap->prepare_regions_and_collection_set(false /*concurrent*/);
244
245
// Retire the TLABs, which will force threads to reacquire their TLABs after the pause.
246
// This is needed for two reasons. Strong one: new allocations would be with new freeset,
247
// which would be outside the collection set, so no cset writes would happen there.
248
// Weaker one: new allocations would happen past update watermark, and so less work would
249
// be needed for reference updates (would update the large filler instead).
250
if (UseTLAB) {
251
ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_final_manage_labs);
252
heap->tlabs_retire(false);
253
}
254
255
if (!heap->collection_set()->is_empty()) {
256
heap->set_evacuation_in_progress(true);
257
heap->set_has_forwarded_objects(true);
258
259
if(ShenandoahVerify) {
260
heap->verifier()->verify_during_evacuation();
261
}
262
} else {
263
if (ShenandoahVerify) {
264
heap->verifier()->verify_after_concmark();
265
}
266
267
if (VerifyAfterGC) {
268
Universe::verify();
269
}
270
}
271
}
272
273
void ShenandoahDegenGC::op_cleanup_early() {
274
ShenandoahHeap::heap()->recycle_trash();
275
}
276
277
void ShenandoahDegenGC::op_evacuate() {
278
ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_stw_evac);
279
ShenandoahHeap::heap()->evacuate_collection_set(false /* concurrent*/);
280
}
281
282
void ShenandoahDegenGC::op_init_updaterefs() {
283
// Evacuation has completed
284
ShenandoahHeap* const heap = ShenandoahHeap::heap();
285
heap->set_evacuation_in_progress(false);
286
heap->set_concurrent_weak_root_in_progress(false);
287
heap->set_concurrent_strong_root_in_progress(false);
288
289
heap->prepare_update_heap_references(false /*concurrent*/);
290
heap->set_update_refs_in_progress(true);
291
}
292
293
void ShenandoahDegenGC::op_updaterefs() {
294
ShenandoahHeap* const heap = ShenandoahHeap::heap();
295
ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_updaterefs);
296
// Handed over from concurrent update references phase
297
heap->update_heap_references(false /*concurrent*/);
298
299
heap->set_update_refs_in_progress(false);
300
heap->set_has_forwarded_objects(false);
301
}
302
303
void ShenandoahDegenGC::op_update_roots() {
304
ShenandoahHeap* const heap = ShenandoahHeap::heap();
305
306
update_roots(false /*full_gc*/);
307
308
heap->update_heap_region_states(false /*concurrent*/);
309
310
if (ShenandoahVerify) {
311
heap->verifier()->verify_after_updaterefs();
312
}
313
314
if (VerifyAfterGC) {
315
Universe::verify();
316
}
317
318
heap->rebuild_free_set(false /*concurrent*/);
319
}
320
321
void ShenandoahDegenGC::op_cleanup_complete() {
322
ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_cleanup_complete);
323
ShenandoahHeap::heap()->recycle_trash();
324
}
325
326
void ShenandoahDegenGC::op_degenerated_fail() {
327
log_info(gc)("Cannot finish degeneration, upgrading to Full GC");
328
ShenandoahHeap::heap()->shenandoah_policy()->record_degenerated_upgrade_to_full();
329
330
ShenandoahFullGC full_gc;
331
full_gc.op_full(GCCause::_shenandoah_upgrade_to_full_gc);
332
}
333
334
void ShenandoahDegenGC::op_degenerated_futile() {
335
ShenandoahHeap::heap()->shenandoah_policy()->record_degenerated_upgrade_to_full();
336
ShenandoahFullGC full_gc;
337
full_gc.op_full(GCCause::_shenandoah_upgrade_to_full_gc);
338
}
339
340
const char* ShenandoahDegenGC::degen_event_message(ShenandoahDegenPoint point) const {
341
switch (point) {
342
case _degenerated_unset:
343
return "Pause Degenerated GC (<UNSET>)";
344
case _degenerated_outside_cycle:
345
return "Pause Degenerated GC (Outside of Cycle)";
346
case _degenerated_mark:
347
return "Pause Degenerated GC (Mark)";
348
case _degenerated_evac:
349
return "Pause Degenerated GC (Evacuation)";
350
case _degenerated_updaterefs:
351
return "Pause Degenerated GC (Update Refs)";
352
default:
353
ShouldNotReachHere();
354
return "ERROR";
355
}
356
}
357
358