Path: blob/master/src/hotspot/share/gc/shenandoah/shenandoahFreeSet.cpp
40957 views
/*1* Copyright (c) 2016, 2021, Red Hat, Inc. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "gc/shared/tlab_globals.hpp"26#include "gc/shenandoah/shenandoahFreeSet.hpp"27#include "gc/shenandoah/shenandoahHeap.inline.hpp"28#include "gc/shenandoah/shenandoahHeapRegionSet.hpp"29#include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"30#include "logging/logStream.hpp"31#include "memory/resourceArea.hpp"32#include "runtime/orderAccess.hpp"3334ShenandoahFreeSet::ShenandoahFreeSet(ShenandoahHeap* heap, size_t max_regions) :35_heap(heap),36_mutator_free_bitmap(max_regions, mtGC),37_collector_free_bitmap(max_regions, mtGC),38_max(max_regions)39{40clear_internal();41}4243void ShenandoahFreeSet::increase_used(size_t num_bytes) {44shenandoah_assert_heaplocked();45_used += num_bytes;4647assert(_used <= _capacity, "must not use more than we have: used: " SIZE_FORMAT48", capacity: " SIZE_FORMAT ", num_bytes: " SIZE_FORMAT, _used, _capacity, num_bytes);49}5051bool ShenandoahFreeSet::is_mutator_free(size_t idx) const {52assert (idx < _max, "index is sane: " SIZE_FORMAT " < " SIZE_FORMAT " (left: " SIZE_FORMAT ", right: " SIZE_FORMAT ")",53idx, _max, _mutator_leftmost, _mutator_rightmost);54return _mutator_free_bitmap.at(idx);55}5657bool ShenandoahFreeSet::is_collector_free(size_t idx) const {58assert (idx < _max, "index is sane: " SIZE_FORMAT " < " SIZE_FORMAT " (left: " SIZE_FORMAT ", right: " SIZE_FORMAT ")",59idx, _max, _collector_leftmost, _collector_rightmost);60return _collector_free_bitmap.at(idx);61}6263HeapWord* ShenandoahFreeSet::allocate_single(ShenandoahAllocRequest& req, bool& in_new_region) {64// Scan the bitmap looking for a first fit.65//66// Leftmost and rightmost bounds provide enough caching to walk bitmap efficiently. Normally,67// we would find the region to allocate at right away.68//69// Allocations are biased: new application allocs go to beginning of the heap, and GC allocs70// go to the end. This makes application allocation faster, because we would clear lots71// of regions from the beginning most of the time.72//73// Free set maintains mutator and collector views, and normally they allocate in their views only,74// unless we special cases for stealing and mixed allocations.7576switch (req.type()) {77case ShenandoahAllocRequest::_alloc_tlab:78case ShenandoahAllocRequest::_alloc_shared: {7980// Try to allocate in the mutator view81for (size_t idx = _mutator_leftmost; idx <= _mutator_rightmost; idx++) {82if (is_mutator_free(idx)) {83HeapWord* result = try_allocate_in(_heap->get_region(idx), req, in_new_region);84if (result != NULL) {85return result;86}87}88}8990// There is no recovery. Mutator does not touch collector view at all.91break;92}93case ShenandoahAllocRequest::_alloc_gclab:94case ShenandoahAllocRequest::_alloc_shared_gc: {95// size_t is unsigned, need to dodge underflow when _leftmost = 09697// Fast-path: try to allocate in the collector view first98for (size_t c = _collector_rightmost + 1; c > _collector_leftmost; c--) {99size_t idx = c - 1;100if (is_collector_free(idx)) {101HeapWord* result = try_allocate_in(_heap->get_region(idx), req, in_new_region);102if (result != NULL) {103return result;104}105}106}107108// No dice. Can we borrow space from mutator view?109if (!ShenandoahEvacReserveOverflow) {110return NULL;111}112113// Try to steal the empty region from the mutator view114for (size_t c = _mutator_rightmost + 1; c > _mutator_leftmost; c--) {115size_t idx = c - 1;116if (is_mutator_free(idx)) {117ShenandoahHeapRegion* r = _heap->get_region(idx);118if (can_allocate_from(r)) {119flip_to_gc(r);120HeapWord *result = try_allocate_in(r, req, in_new_region);121if (result != NULL) {122return result;123}124}125}126}127128// No dice. Do not try to mix mutator and GC allocations, because129// URWM moves due to GC allocations would expose unparsable mutator130// allocations.131132break;133}134default:135ShouldNotReachHere();136}137138return NULL;139}140141HeapWord* ShenandoahFreeSet::try_allocate_in(ShenandoahHeapRegion* r, ShenandoahAllocRequest& req, bool& in_new_region) {142assert (!has_no_alloc_capacity(r), "Performance: should avoid full regions on this path: " SIZE_FORMAT, r->index());143144if (_heap->is_concurrent_weak_root_in_progress() &&145r->is_trash()) {146return NULL;147}148149try_recycle_trashed(r);150151in_new_region = r->is_empty();152153HeapWord* result = NULL;154size_t size = req.size();155156if (ShenandoahElasticTLAB && req.is_lab_alloc()) {157size_t free = align_down(r->free() >> LogHeapWordSize, MinObjAlignment);158if (size > free) {159size = free;160}161if (size >= req.min_size()) {162result = r->allocate(size, req.type());163assert (result != NULL, "Allocation must succeed: free " SIZE_FORMAT ", actual " SIZE_FORMAT, free, size);164}165} else {166result = r->allocate(size, req.type());167}168169if (result != NULL) {170// Allocation successful, bump stats:171if (req.is_mutator_alloc()) {172increase_used(size * HeapWordSize);173}174175// Record actual allocation size176req.set_actual_size(size);177178if (req.is_gc_alloc()) {179r->set_update_watermark(r->top());180}181}182183if (result == NULL || has_no_alloc_capacity(r)) {184// Region cannot afford this or future allocations. Retire it.185//186// While this seems a bit harsh, especially in the case when this large allocation does not187// fit, but the next small one would, we are risking to inflate scan times when lots of188// almost-full regions precede the fully-empty region where we want allocate the entire TLAB.189// TODO: Record first fully-empty region, and use that for large allocations190191// Record the remainder as allocation waste192if (req.is_mutator_alloc()) {193size_t waste = r->free();194if (waste > 0) {195increase_used(waste);196_heap->notify_mutator_alloc_words(waste >> LogHeapWordSize, true);197}198}199200size_t num = r->index();201_collector_free_bitmap.clear_bit(num);202_mutator_free_bitmap.clear_bit(num);203// Touched the bounds? Need to update:204if (touches_bounds(num)) {205adjust_bounds();206}207assert_bounds();208}209return result;210}211212bool ShenandoahFreeSet::touches_bounds(size_t num) const {213return num == _collector_leftmost || num == _collector_rightmost || num == _mutator_leftmost || num == _mutator_rightmost;214}215216void ShenandoahFreeSet::recompute_bounds() {217// Reset to the most pessimistic case:218_mutator_rightmost = _max - 1;219_mutator_leftmost = 0;220_collector_rightmost = _max - 1;221_collector_leftmost = 0;222223// ...and adjust from there224adjust_bounds();225}226227void ShenandoahFreeSet::adjust_bounds() {228// Rewind both mutator bounds until the next bit.229while (_mutator_leftmost < _max && !is_mutator_free(_mutator_leftmost)) {230_mutator_leftmost++;231}232while (_mutator_rightmost > 0 && !is_mutator_free(_mutator_rightmost)) {233_mutator_rightmost--;234}235// Rewind both collector bounds until the next bit.236while (_collector_leftmost < _max && !is_collector_free(_collector_leftmost)) {237_collector_leftmost++;238}239while (_collector_rightmost > 0 && !is_collector_free(_collector_rightmost)) {240_collector_rightmost--;241}242}243244HeapWord* ShenandoahFreeSet::allocate_contiguous(ShenandoahAllocRequest& req) {245shenandoah_assert_heaplocked();246247size_t words_size = req.size();248size_t num = ShenandoahHeapRegion::required_regions(words_size * HeapWordSize);249250// No regions left to satisfy allocation, bye.251if (num > mutator_count()) {252return NULL;253}254255// Find the continuous interval of $num regions, starting from $beg and ending in $end,256// inclusive. Contiguous allocations are biased to the beginning.257258size_t beg = _mutator_leftmost;259size_t end = beg;260261while (true) {262if (end >= _max) {263// Hit the end, goodbye264return NULL;265}266267// If regions are not adjacent, then current [beg; end] is useless, and we may fast-forward.268// If region is not completely free, the current [beg; end] is useless, and we may fast-forward.269if (!is_mutator_free(end) || !can_allocate_from(_heap->get_region(end))) {270end++;271beg = end;272continue;273}274275if ((end - beg + 1) == num) {276// found the match277break;278}279280end++;281};282283size_t remainder = words_size & ShenandoahHeapRegion::region_size_words_mask();284285// Initialize regions:286for (size_t i = beg; i <= end; i++) {287ShenandoahHeapRegion* r = _heap->get_region(i);288try_recycle_trashed(r);289290assert(i == beg || _heap->get_region(i - 1)->index() + 1 == r->index(), "Should be contiguous");291assert(r->is_empty(), "Should be empty");292293if (i == beg) {294r->make_humongous_start();295} else {296r->make_humongous_cont();297}298299// Trailing region may be non-full, record the remainder there300size_t used_words;301if ((i == end) && (remainder != 0)) {302used_words = remainder;303} else {304used_words = ShenandoahHeapRegion::region_size_words();305}306307r->set_top(r->bottom() + used_words);308309_mutator_free_bitmap.clear_bit(r->index());310}311312// While individual regions report their true use, all humongous regions are313// marked used in the free set.314increase_used(ShenandoahHeapRegion::region_size_bytes() * num);315316if (remainder != 0) {317// Record this remainder as allocation waste318_heap->notify_mutator_alloc_words(ShenandoahHeapRegion::region_size_words() - remainder, true);319}320321// Allocated at left/rightmost? Move the bounds appropriately.322if (beg == _mutator_leftmost || end == _mutator_rightmost) {323adjust_bounds();324}325assert_bounds();326327req.set_actual_size(words_size);328return _heap->get_region(beg)->bottom();329}330331bool ShenandoahFreeSet::can_allocate_from(ShenandoahHeapRegion *r) {332return r->is_empty() || (r->is_trash() && !_heap->is_concurrent_weak_root_in_progress());333}334335size_t ShenandoahFreeSet::alloc_capacity(ShenandoahHeapRegion *r) {336if (r->is_trash()) {337// This would be recycled on allocation path338return ShenandoahHeapRegion::region_size_bytes();339} else {340return r->free();341}342}343344bool ShenandoahFreeSet::has_no_alloc_capacity(ShenandoahHeapRegion *r) {345return alloc_capacity(r) == 0;346}347348void ShenandoahFreeSet::try_recycle_trashed(ShenandoahHeapRegion *r) {349if (r->is_trash()) {350_heap->decrease_used(r->used());351r->recycle();352}353}354355void ShenandoahFreeSet::recycle_trash() {356// lock is not reentrable, check we don't have it357shenandoah_assert_not_heaplocked();358359for (size_t i = 0; i < _heap->num_regions(); i++) {360ShenandoahHeapRegion* r = _heap->get_region(i);361if (r->is_trash()) {362ShenandoahHeapLocker locker(_heap->lock());363try_recycle_trashed(r);364}365SpinPause(); // allow allocators to take the lock366}367}368369void ShenandoahFreeSet::flip_to_gc(ShenandoahHeapRegion* r) {370size_t idx = r->index();371372assert(_mutator_free_bitmap.at(idx), "Should be in mutator view");373assert(can_allocate_from(r), "Should not be allocated");374375_mutator_free_bitmap.clear_bit(idx);376_collector_free_bitmap.set_bit(idx);377_collector_leftmost = MIN2(idx, _collector_leftmost);378_collector_rightmost = MAX2(idx, _collector_rightmost);379380_capacity -= alloc_capacity(r);381382if (touches_bounds(idx)) {383adjust_bounds();384}385assert_bounds();386}387388void ShenandoahFreeSet::clear() {389shenandoah_assert_heaplocked();390clear_internal();391}392393void ShenandoahFreeSet::clear_internal() {394_mutator_free_bitmap.clear();395_collector_free_bitmap.clear();396_mutator_leftmost = _max;397_mutator_rightmost = 0;398_collector_leftmost = _max;399_collector_rightmost = 0;400_capacity = 0;401_used = 0;402}403404void ShenandoahFreeSet::rebuild() {405shenandoah_assert_heaplocked();406clear();407408for (size_t idx = 0; idx < _heap->num_regions(); idx++) {409ShenandoahHeapRegion* region = _heap->get_region(idx);410if (region->is_alloc_allowed() || region->is_trash()) {411assert(!region->is_cset(), "Shouldn't be adding those to the free set");412413// Do not add regions that would surely fail allocation414if (has_no_alloc_capacity(region)) continue;415416_capacity += alloc_capacity(region);417assert(_used <= _capacity, "must not use more than we have");418419assert(!is_mutator_free(idx), "We are about to add it, it shouldn't be there already");420_mutator_free_bitmap.set_bit(idx);421}422}423424// Evac reserve: reserve trailing space for evacuations425size_t to_reserve = _heap->max_capacity() / 100 * ShenandoahEvacReserve;426size_t reserved = 0;427428for (size_t idx = _heap->num_regions() - 1; idx > 0; idx--) {429if (reserved >= to_reserve) break;430431ShenandoahHeapRegion* region = _heap->get_region(idx);432if (_mutator_free_bitmap.at(idx) && can_allocate_from(region)) {433_mutator_free_bitmap.clear_bit(idx);434_collector_free_bitmap.set_bit(idx);435size_t ac = alloc_capacity(region);436_capacity -= ac;437reserved += ac;438}439}440441recompute_bounds();442assert_bounds();443}444445void ShenandoahFreeSet::log_status() {446shenandoah_assert_heaplocked();447448LogTarget(Info, gc, ergo) lt;449if (lt.is_enabled()) {450ResourceMark rm;451LogStream ls(lt);452453{454size_t last_idx = 0;455size_t max = 0;456size_t max_contig = 0;457size_t empty_contig = 0;458459size_t total_used = 0;460size_t total_free = 0;461size_t total_free_ext = 0;462463for (size_t idx = _mutator_leftmost; idx <= _mutator_rightmost; idx++) {464if (is_mutator_free(idx)) {465ShenandoahHeapRegion *r = _heap->get_region(idx);466size_t free = alloc_capacity(r);467468max = MAX2(max, free);469470if (r->is_empty()) {471total_free_ext += free;472if (last_idx + 1 == idx) {473empty_contig++;474} else {475empty_contig = 1;476}477} else {478empty_contig = 0;479}480481total_used += r->used();482total_free += free;483484max_contig = MAX2(max_contig, empty_contig);485last_idx = idx;486}487}488489size_t max_humongous = max_contig * ShenandoahHeapRegion::region_size_bytes();490size_t free = capacity() - used();491492ls.print("Free: " SIZE_FORMAT "%s, Max: " SIZE_FORMAT "%s regular, " SIZE_FORMAT "%s humongous, ",493byte_size_in_proper_unit(total_free), proper_unit_for_byte_size(total_free),494byte_size_in_proper_unit(max), proper_unit_for_byte_size(max),495byte_size_in_proper_unit(max_humongous), proper_unit_for_byte_size(max_humongous)496);497498ls.print("Frag: ");499size_t frag_ext;500if (total_free_ext > 0) {501frag_ext = 100 - (100 * max_humongous / total_free_ext);502} else {503frag_ext = 0;504}505ls.print(SIZE_FORMAT "%% external, ", frag_ext);506507size_t frag_int;508if (mutator_count() > 0) {509frag_int = (100 * (total_used / mutator_count()) / ShenandoahHeapRegion::region_size_bytes());510} else {511frag_int = 0;512}513ls.print(SIZE_FORMAT "%% internal; ", frag_int);514}515516{517size_t max = 0;518size_t total_free = 0;519520for (size_t idx = _collector_leftmost; idx <= _collector_rightmost; idx++) {521if (is_collector_free(idx)) {522ShenandoahHeapRegion *r = _heap->get_region(idx);523size_t free = alloc_capacity(r);524max = MAX2(max, free);525total_free += free;526}527}528529ls.print_cr("Reserve: " SIZE_FORMAT "%s, Max: " SIZE_FORMAT "%s",530byte_size_in_proper_unit(total_free), proper_unit_for_byte_size(total_free),531byte_size_in_proper_unit(max), proper_unit_for_byte_size(max));532}533}534}535536HeapWord* ShenandoahFreeSet::allocate(ShenandoahAllocRequest& req, bool& in_new_region) {537shenandoah_assert_heaplocked();538assert_bounds();539540if (req.size() > ShenandoahHeapRegion::humongous_threshold_words()) {541switch (req.type()) {542case ShenandoahAllocRequest::_alloc_shared:543case ShenandoahAllocRequest::_alloc_shared_gc:544in_new_region = true;545return allocate_contiguous(req);546case ShenandoahAllocRequest::_alloc_gclab:547case ShenandoahAllocRequest::_alloc_tlab:548in_new_region = false;549assert(false, "Trying to allocate TLAB larger than the humongous threshold: " SIZE_FORMAT " > " SIZE_FORMAT,550req.size(), ShenandoahHeapRegion::humongous_threshold_words());551return NULL;552default:553ShouldNotReachHere();554return NULL;555}556} else {557return allocate_single(req, in_new_region);558}559}560561size_t ShenandoahFreeSet::unsafe_peek_free() const {562// Deliberately not locked, this method is unsafe when free set is modified.563564for (size_t index = _mutator_leftmost; index <= _mutator_rightmost; index++) {565if (index < _max && is_mutator_free(index)) {566ShenandoahHeapRegion* r = _heap->get_region(index);567if (r->free() >= MinTLABSize) {568return r->free();569}570}571}572573// It appears that no regions left574return 0;575}576577void ShenandoahFreeSet::print_on(outputStream* out) const {578out->print_cr("Mutator Free Set: " SIZE_FORMAT "", mutator_count());579for (size_t index = _mutator_leftmost; index <= _mutator_rightmost; index++) {580if (is_mutator_free(index)) {581_heap->get_region(index)->print_on(out);582}583}584out->print_cr("Collector Free Set: " SIZE_FORMAT "", collector_count());585for (size_t index = _collector_leftmost; index <= _collector_rightmost; index++) {586if (is_collector_free(index)) {587_heap->get_region(index)->print_on(out);588}589}590}591592/*593* Internal fragmentation metric: describes how fragmented the heap regions are.594*595* It is derived as:596*597* sum(used[i]^2, i=0..k)598* IF = 1 - ------------------------------599* C * sum(used[i], i=0..k)600*601* ...where k is the number of regions in computation, C is the region capacity, and602* used[i] is the used space in the region.603*604* The non-linearity causes IF to be lower for the cases where the same total heap605* used is densely packed. For example:606* a) Heap is completely full => IF = 0607* b) Heap is half full, first 50% regions are completely full => IF = 0608* c) Heap is half full, each region is 50% full => IF = 1/2609* d) Heap is quarter full, first 50% regions are completely full => IF = 0610* e) Heap is quarter full, each region is 25% full => IF = 3/4611* f) Heap has one small object per each region => IF =~ 1612*/613double ShenandoahFreeSet::internal_fragmentation() {614double squared = 0;615double linear = 0;616int count = 0;617618for (size_t index = _mutator_leftmost; index <= _mutator_rightmost; index++) {619if (is_mutator_free(index)) {620ShenandoahHeapRegion* r = _heap->get_region(index);621size_t used = r->used();622squared += used * used;623linear += used;624count++;625}626}627628if (count > 0) {629double s = squared / (ShenandoahHeapRegion::region_size_bytes() * linear);630return 1 - s;631} else {632return 0;633}634}635636/*637* External fragmentation metric: describes how fragmented the heap is.638*639* It is derived as:640*641* EF = 1 - largest_contiguous_free / total_free642*643* For example:644* a) Heap is completely empty => EF = 0645* b) Heap is completely full => EF = 0646* c) Heap is first-half full => EF = 1/2647* d) Heap is half full, full and empty regions interleave => EF =~ 1648*/649double ShenandoahFreeSet::external_fragmentation() {650size_t last_idx = 0;651size_t max_contig = 0;652size_t empty_contig = 0;653654size_t free = 0;655656for (size_t index = _mutator_leftmost; index <= _mutator_rightmost; index++) {657if (is_mutator_free(index)) {658ShenandoahHeapRegion* r = _heap->get_region(index);659if (r->is_empty()) {660free += ShenandoahHeapRegion::region_size_bytes();661if (last_idx + 1 == index) {662empty_contig++;663} else {664empty_contig = 1;665}666} else {667empty_contig = 0;668}669670max_contig = MAX2(max_contig, empty_contig);671last_idx = index;672}673}674675if (free > 0) {676return 1 - (1.0 * max_contig * ShenandoahHeapRegion::region_size_bytes() / free);677} else {678return 0;679}680}681682#ifdef ASSERT683void ShenandoahFreeSet::assert_bounds() const {684// Performance invariants. Failing these would not break the free set, but performance685// would suffer.686assert (_mutator_leftmost <= _max, "leftmost in bounds: " SIZE_FORMAT " < " SIZE_FORMAT, _mutator_leftmost, _max);687assert (_mutator_rightmost < _max, "rightmost in bounds: " SIZE_FORMAT " < " SIZE_FORMAT, _mutator_rightmost, _max);688689assert (_mutator_leftmost == _max || is_mutator_free(_mutator_leftmost), "leftmost region should be free: " SIZE_FORMAT, _mutator_leftmost);690assert (_mutator_rightmost == 0 || is_mutator_free(_mutator_rightmost), "rightmost region should be free: " SIZE_FORMAT, _mutator_rightmost);691692size_t beg_off = _mutator_free_bitmap.get_next_one_offset(0);693size_t end_off = _mutator_free_bitmap.get_next_one_offset(_mutator_rightmost + 1);694assert (beg_off >= _mutator_leftmost, "free regions before the leftmost: " SIZE_FORMAT ", bound " SIZE_FORMAT, beg_off, _mutator_leftmost);695assert (end_off == _max, "free regions past the rightmost: " SIZE_FORMAT ", bound " SIZE_FORMAT, end_off, _mutator_rightmost);696697assert (_collector_leftmost <= _max, "leftmost in bounds: " SIZE_FORMAT " < " SIZE_FORMAT, _collector_leftmost, _max);698assert (_collector_rightmost < _max, "rightmost in bounds: " SIZE_FORMAT " < " SIZE_FORMAT, _collector_rightmost, _max);699700assert (_collector_leftmost == _max || is_collector_free(_collector_leftmost), "leftmost region should be free: " SIZE_FORMAT, _collector_leftmost);701assert (_collector_rightmost == 0 || is_collector_free(_collector_rightmost), "rightmost region should be free: " SIZE_FORMAT, _collector_rightmost);702703beg_off = _collector_free_bitmap.get_next_one_offset(0);704end_off = _collector_free_bitmap.get_next_one_offset(_collector_rightmost + 1);705assert (beg_off >= _collector_leftmost, "free regions before the leftmost: " SIZE_FORMAT ", bound " SIZE_FORMAT, beg_off, _collector_leftmost);706assert (end_off == _max, "free regions past the rightmost: " SIZE_FORMAT ", bound " SIZE_FORMAT, end_off, _collector_rightmost);707}708#endif709710711