Path: blob/aarch64-shenandoah-jdk8u272-b10/hotspot/src/share/vm/memory/allocation.hpp
32285 views
/*1* Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#ifndef SHARE_VM_MEMORY_ALLOCATION_HPP25#define SHARE_VM_MEMORY_ALLOCATION_HPP2627#include "runtime/globals.hpp"28#include "utilities/globalDefinitions.hpp"29#include "utilities/macros.hpp"30#ifdef COMPILER131#include "c1/c1_globals.hpp"32#endif33#ifdef COMPILER234#include "opto/c2_globals.hpp"35#endif3637#include <new>3839#define ARENA_ALIGN_M1 (((size_t)(ARENA_AMALLOC_ALIGNMENT)) - 1)40#define ARENA_ALIGN_MASK (~((size_t)ARENA_ALIGN_M1))41#define ARENA_ALIGN(x) ((((size_t)(x)) + ARENA_ALIGN_M1) & ARENA_ALIGN_MASK)424344// noinline attribute45#ifdef _WINDOWS46#define _NOINLINE_ __declspec(noinline)47#else48#if __GNUC__ < 3 // gcc 2.x does not support noinline attribute49#define _NOINLINE_50#else51#define _NOINLINE_ __attribute__ ((noinline))52#endif53#endif5455class AllocFailStrategy {56public:57enum AllocFailEnum { EXIT_OOM, RETURN_NULL };58};59typedef AllocFailStrategy::AllocFailEnum AllocFailType;6061// All classes in the virtual machine must be subclassed62// by one of the following allocation classes:63//64// For objects allocated in the resource area (see resourceArea.hpp).65// - ResourceObj66//67// For objects allocated in the C-heap (managed by: free & malloc).68// - CHeapObj69//70// For objects allocated on the stack.71// - StackObj72//73// For embedded objects.74// - ValueObj75//76// For classes used as name spaces.77// - AllStatic78//79// For classes in Metaspace (class data)80// - MetaspaceObj81//82// The printable subclasses are used for debugging and define virtual83// member functions for printing. Classes that avoid allocating the84// vtbl entries in the objects should therefore not be the printable85// subclasses.86//87// The following macros and function should be used to allocate memory88// directly in the resource area or in the C-heap, The _OBJ variants89// of the NEW/FREE_C_HEAP macros are used for alloc/dealloc simple90// objects which are not inherited from CHeapObj, note constructor and91// destructor are not called. The preferable way to allocate objects92// is using the new operator.93//94// WARNING: The array variant must only be used for a homogenous array95// where all objects are of the exact type specified. If subtypes are96// stored in the array then must pay attention to calling destructors97// at needed.98//99// NEW_RESOURCE_ARRAY(type, size)100// NEW_RESOURCE_OBJ(type)101// NEW_C_HEAP_ARRAY(type, size)102// NEW_C_HEAP_OBJ(type, memflags)103// FREE_C_HEAP_ARRAY(type, old, memflags)104// FREE_C_HEAP_OBJ(objname, type, memflags)105// char* AllocateHeap(size_t size, const char* name);106// void FreeHeap(void* p);107//108// C-heap allocation can be traced using +PrintHeapAllocation.109// malloc and free should therefore never called directly.110111// Base class for objects allocated in the C-heap.112113// In non product mode we introduce a super class for all allocation classes114// that supports printing.115// We avoid the superclass in product mode since some C++ compilers add116// a word overhead for empty super classes.117118#ifdef PRODUCT119#define ALLOCATION_SUPER_CLASS_SPEC120#else121#define ALLOCATION_SUPER_CLASS_SPEC : public AllocatedObj122class AllocatedObj {123public:124// Printing support125void print() const;126void print_value() const;127128virtual void print_on(outputStream* st) const;129virtual void print_value_on(outputStream* st) const;130};131#endif132133134/*135* Memory types136*/137enum MemoryType {138// Memory type by sub systems. It occupies lower byte.139mtJavaHeap = 0x00, // Java heap140mtClass = 0x01, // memory class for Java classes141mtThread = 0x02, // memory for thread objects142mtThreadStack = 0x03,143mtCode = 0x04, // memory for generated code144mtGC = 0x05, // memory for GC145mtCompiler = 0x06, // memory for compiler146mtInternal = 0x07, // memory used by VM, but does not belong to147// any of above categories, and not used for148// native memory tracking149mtOther = 0x08, // memory not used by VM150mtSymbol = 0x09, // symbol151mtNMT = 0x0A, // memory used by native memory tracking152mtClassShared = 0x0B, // class data sharing153mtChunk = 0x0C, // chunk that holds content of arenas154mtTest = 0x0D, // Test type for verifying NMT155mtTracing = 0x0E, // memory used for Tracing156mtNone = 0x0F, // undefined157mt_number_of_types = 0x10 // number of memory types (mtDontTrack158// is not included as validate type)159};160161typedef MemoryType MEMFLAGS;162163164#if INCLUDE_NMT165166extern bool NMT_track_callsite;167168#else169170const bool NMT_track_callsite = false;171172#endif // INCLUDE_NMT173174class NativeCallStack;175176177template <MEMFLAGS F> class CHeapObj ALLOCATION_SUPER_CLASS_SPEC {178public:179_NOINLINE_ void* operator new(size_t size, const NativeCallStack& stack) throw();180_NOINLINE_ void* operator new(size_t size) throw();181_NOINLINE_ void* operator new (size_t size, const std::nothrow_t& nothrow_constant,182const NativeCallStack& stack) throw();183_NOINLINE_ void* operator new (size_t size, const std::nothrow_t& nothrow_constant)184throw();185_NOINLINE_ void* operator new [](size_t size, const NativeCallStack& stack) throw();186_NOINLINE_ void* operator new [](size_t size) throw();187_NOINLINE_ void* operator new [](size_t size, const std::nothrow_t& nothrow_constant,188const NativeCallStack& stack) throw();189_NOINLINE_ void* operator new [](size_t size, const std::nothrow_t& nothrow_constant)190throw();191void operator delete(void* p);192void operator delete [] (void* p);193};194195// Base class for objects allocated on the stack only.196// Calling new or delete will result in fatal error.197198class StackObj ALLOCATION_SUPER_CLASS_SPEC {199private:200void* operator new(size_t size) throw();201void* operator new [](size_t size) throw();202#ifdef __IBMCPP__203public:204#endif205void operator delete(void* p);206void operator delete [](void* p);207};208209// Base class for objects used as value objects.210// Calling new or delete will result in fatal error.211//212// Portability note: Certain compilers (e.g. gcc) will213// always make classes bigger if it has a superclass, even214// if the superclass does not have any virtual methods or215// instance fields. The HotSpot implementation relies on this216// not to happen. So never make a ValueObj class a direct subclass217// of this object, but use the VALUE_OBJ_CLASS_SPEC class instead, e.g.,218// like this:219//220// class A VALUE_OBJ_CLASS_SPEC {221// ...222// }223//224// With gcc and possible other compilers the VALUE_OBJ_CLASS_SPEC can225// be defined as a an empty string "".226//227class _ValueObj {228private:229void* operator new(size_t size) throw();230void operator delete(void* p);231void* operator new [](size_t size) throw();232void operator delete [](void* p);233};234235236// Base class for objects stored in Metaspace.237// Calling delete will result in fatal error.238//239// Do not inherit from something with a vptr because this class does240// not introduce one. This class is used to allocate both shared read-only241// and shared read-write classes.242//243244class ClassLoaderData;245246class MetaspaceObj {247public:248bool is_metaspace_object() const;249bool is_shared() const;250void print_address_on(outputStream* st) const; // nonvirtual address printing251252#define METASPACE_OBJ_TYPES_DO(f) \253f(Unknown) \254f(Class) \255f(Symbol) \256f(TypeArrayU1) \257f(TypeArrayU2) \258f(TypeArrayU4) \259f(TypeArrayU8) \260f(TypeArrayOther) \261f(Method) \262f(ConstMethod) \263f(MethodData) \264f(ConstantPool) \265f(ConstantPoolCache) \266f(Annotation) \267f(MethodCounters) \268f(Deallocated)269270#define METASPACE_OBJ_TYPE_DECLARE(name) name ## Type,271#define METASPACE_OBJ_TYPE_NAME_CASE(name) case name ## Type: return #name;272273enum Type {274// Types are MetaspaceObj::ClassType, MetaspaceObj::SymbolType, etc275METASPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_DECLARE)276_number_of_types277};278279static const char * type_name(Type type) {280switch(type) {281METASPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_NAME_CASE)282default:283ShouldNotReachHere();284return NULL;285}286}287288static MetaspaceObj::Type array_type(size_t elem_size) {289switch (elem_size) {290case 1: return TypeArrayU1Type;291case 2: return TypeArrayU2Type;292case 4: return TypeArrayU4Type;293case 8: return TypeArrayU8Type;294default:295return TypeArrayOtherType;296}297}298299void* operator new(size_t size, ClassLoaderData* loader_data,300size_t word_size, bool read_only,301Type type, Thread* thread) throw();302// can't use TRAPS from this header file.303void operator delete(void* p) { ShouldNotCallThis(); }304};305306// Base class for classes that constitute name spaces.307308class AllStatic {309public:310AllStatic() { ShouldNotCallThis(); }311~AllStatic() { ShouldNotCallThis(); }312};313314315//------------------------------Chunk------------------------------------------316// Linked list of raw memory chunks317class Chunk: CHeapObj<mtChunk> {318friend class VMStructs;319320protected:321Chunk* _next; // Next Chunk in list322const size_t _len; // Size of this Chunk323public:324void* operator new(size_t size, AllocFailType alloc_failmode, size_t length) throw();325void operator delete(void* p);326Chunk(size_t length);327328enum {329// default sizes; make them slightly smaller than 2**k to guard against330// buddy-system style malloc implementations331#ifdef _LP64332slack = 40, // [RGV] Not sure if this is right, but make it333// a multiple of 8.334#else335slack = 20, // suspected sizeof(Chunk) + internal malloc headers336#endif337338tiny_size = 256 - slack, // Size of first chunk (tiny)339init_size = 1*K - slack, // Size of first chunk (normal aka small)340medium_size= 10*K - slack, // Size of medium-sized chunk341size = 32*K - slack, // Default size of an Arena chunk (following the first)342non_pool_size = init_size + 32 // An initial size which is not one of above343};344345void chop(); // Chop this chunk346void next_chop(); // Chop next chunk347static size_t aligned_overhead_size(void) { return ARENA_ALIGN(sizeof(Chunk)); }348static size_t aligned_overhead_size(size_t byte_size) { return ARENA_ALIGN(byte_size); }349350size_t length() const { return _len; }351Chunk* next() const { return _next; }352void set_next(Chunk* n) { _next = n; }353// Boundaries of data area (possibly unused)354char* bottom() const { return ((char*) this) + aligned_overhead_size(); }355char* top() const { return bottom() + _len; }356bool contains(char* p) const { return bottom() <= p && p <= top(); }357358// Start the chunk_pool cleaner task359static void start_chunk_pool_cleaner_task();360361static void clean_chunk_pool();362};363364//------------------------------Arena------------------------------------------365// Fast allocation of memory366class Arena : public CHeapObj<mtNone> {367protected:368friend class ResourceMark;369friend class HandleMark;370friend class NoHandleMark;371friend class VMStructs;372373MEMFLAGS _flags; // Memory tracking flags374375Chunk *_first; // First chunk376Chunk *_chunk; // current chunk377char *_hwm, *_max; // High water mark and max in current chunk378// Get a new Chunk of at least size x379void* grow(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);380size_t _size_in_bytes; // Size of arena (used for native memory tracking)381382NOT_PRODUCT(static julong _bytes_allocated;) // total #bytes allocated since start383friend class AllocStats;384debug_only(void* malloc(size_t size);)385debug_only(void* internal_malloc_4(size_t x);)386NOT_PRODUCT(void inc_bytes_allocated(size_t x);)387388void signal_out_of_memory(size_t request, const char* whence) const;389390bool check_for_overflow(size_t request, const char* whence,391AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) const {392if (UINTPTR_MAX - request < (uintptr_t)_hwm) {393if (alloc_failmode == AllocFailStrategy::RETURN_NULL) {394return false;395}396signal_out_of_memory(request, whence);397}398return true;399}400401public:402Arena(MEMFLAGS memflag);403Arena(MEMFLAGS memflag, size_t init_size);404~Arena();405void destruct_contents();406char* hwm() const { return _hwm; }407408// new operators409void* operator new (size_t size) throw();410void* operator new (size_t size, const std::nothrow_t& nothrow_constant) throw();411412// dynamic memory type tagging413void* operator new(size_t size, MEMFLAGS flags) throw();414void* operator new(size_t size, const std::nothrow_t& nothrow_constant, MEMFLAGS flags) throw();415void operator delete(void* p);416417// Fast allocate in the arena. Common case is: pointer test + increment.418void* Amalloc(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {419assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2");420x = ARENA_ALIGN(x);421debug_only(if (UseMallocOnly) return malloc(x);)422if (!check_for_overflow(x, "Arena::Amalloc", alloc_failmode))423return NULL;424NOT_PRODUCT(inc_bytes_allocated(x);)425if (_hwm + x > _max) {426return grow(x, alloc_failmode);427} else {428char *old = _hwm;429_hwm += x;430return old;431}432}433// Further assume size is padded out to words434void *Amalloc_4(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {435assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );436debug_only(if (UseMallocOnly) return malloc(x);)437if (!check_for_overflow(x, "Arena::Amalloc_4", alloc_failmode))438return NULL;439NOT_PRODUCT(inc_bytes_allocated(x);)440if (_hwm + x > _max) {441return grow(x, alloc_failmode);442} else {443char *old = _hwm;444_hwm += x;445return old;446}447}448449// Allocate with 'double' alignment. It is 8 bytes on sparc.450// In other cases Amalloc_D() should be the same as Amalloc_4().451void* Amalloc_D(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {452assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );453debug_only(if (UseMallocOnly) return malloc(x);)454#if defined(SPARC) && !defined(_LP64)455#define DALIGN_M1 7456size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm;457x += delta;458#endif459if (!check_for_overflow(x, "Arena::Amalloc_D", alloc_failmode))460return NULL;461NOT_PRODUCT(inc_bytes_allocated(x);)462if (_hwm + x > _max) {463return grow(x, alloc_failmode); // grow() returns a result aligned >= 8 bytes.464} else {465char *old = _hwm;466_hwm += x;467#if defined(SPARC) && !defined(_LP64)468old += delta; // align to 8-bytes469#endif470return old;471}472}473474// Fast delete in area. Common case is: NOP (except for storage reclaimed)475void Afree(void *ptr, size_t size) {476#ifdef ASSERT477if (ZapResourceArea) memset(ptr, badResourceValue, size); // zap freed memory478if (UseMallocOnly) return;479#endif480if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr;481}482483void *Arealloc( void *old_ptr, size_t old_size, size_t new_size,484AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);485486// Move contents of this arena into an empty arena487Arena *move_contents(Arena *empty_arena);488489// Determine if pointer belongs to this Arena or not.490bool contains( const void *ptr ) const;491492// Total of all chunks in use (not thread-safe)493size_t used() const;494495// Total # of bytes used496size_t size_in_bytes() const { return _size_in_bytes; };497void set_size_in_bytes(size_t size);498499static void free_malloced_objects(Chunk* chunk, char* hwm, char* max, char* hwm2) PRODUCT_RETURN;500static void free_all(char** start, char** end) PRODUCT_RETURN;501502private:503// Reset this Arena to empty, access will trigger grow if necessary504void reset(void) {505_first = _chunk = NULL;506_hwm = _max = NULL;507set_size_in_bytes(0);508}509};510511// One of the following macros must be used when allocating512// an array or object from an arena513#define NEW_ARENA_ARRAY(arena, type, size) \514(type*) (arena)->Amalloc((size) * sizeof(type))515516#define REALLOC_ARENA_ARRAY(arena, type, old, old_size, new_size) \517(type*) (arena)->Arealloc((char*)(old), (old_size) * sizeof(type), \518(new_size) * sizeof(type) )519520#define FREE_ARENA_ARRAY(arena, type, old, size) \521(arena)->Afree((char*)(old), (size) * sizeof(type))522523#define NEW_ARENA_OBJ(arena, type) \524NEW_ARENA_ARRAY(arena, type, 1)525526527//%note allocation_1528extern char* resource_allocate_bytes(size_t size,529AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);530extern char* resource_allocate_bytes(Thread* thread, size_t size,531AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);532extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size,533AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);534extern void resource_free_bytes( char *old, size_t size );535536//----------------------------------------------------------------------537// Base class for objects allocated in the resource area per default.538// Optionally, objects may be allocated on the C heap with539// new(ResourceObj::C_HEAP) Foo(...) or in an Arena with new (&arena)540// ResourceObj's can be allocated within other objects, but don't use541// new or delete (allocation_type is unknown). If new is used to allocate,542// use delete to deallocate.543class ResourceObj ALLOCATION_SUPER_CLASS_SPEC {544public:545enum allocation_type { STACK_OR_EMBEDDED = 0, RESOURCE_AREA, C_HEAP, ARENA, allocation_mask = 0x3 };546static void set_allocation_type(address res, allocation_type type) NOT_DEBUG_RETURN;547#ifdef ASSERT548private:549// When this object is allocated on stack the new() operator is not550// called but garbage on stack may look like a valid allocation_type.551// Store negated 'this' pointer when new() is called to distinguish cases.552// Use second array's element for verification value to distinguish garbage.553uintptr_t _allocation_t[2];554bool is_type_set() const;555public:556allocation_type get_allocation_type() const;557bool allocated_on_stack() const { return get_allocation_type() == STACK_OR_EMBEDDED; }558bool allocated_on_res_area() const { return get_allocation_type() == RESOURCE_AREA; }559bool allocated_on_C_heap() const { return get_allocation_type() == C_HEAP; }560bool allocated_on_arena() const { return get_allocation_type() == ARENA; }561ResourceObj(); // default construtor562ResourceObj(const ResourceObj& r); // default copy construtor563ResourceObj& operator=(const ResourceObj& r); // default copy assignment564~ResourceObj();565#endif // ASSERT566567public:568void* operator new(size_t size, allocation_type type, MEMFLAGS flags) throw();569void* operator new [](size_t size, allocation_type type, MEMFLAGS flags) throw();570void* operator new(size_t size, const std::nothrow_t& nothrow_constant,571allocation_type type, MEMFLAGS flags) throw();572void* operator new [](size_t size, const std::nothrow_t& nothrow_constant,573allocation_type type, MEMFLAGS flags) throw();574575void* operator new(size_t size, Arena *arena) throw() {576address res = (address)arena->Amalloc(size);577DEBUG_ONLY(set_allocation_type(res, ARENA);)578return res;579}580581void* operator new [](size_t size, Arena *arena) throw() {582address res = (address)arena->Amalloc(size);583DEBUG_ONLY(set_allocation_type(res, ARENA);)584return res;585}586587void* operator new(size_t size) throw() {588address res = (address)resource_allocate_bytes(size);589DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);)590return res;591}592593void* operator new(size_t size, const std::nothrow_t& nothrow_constant) throw() {594address res = (address)resource_allocate_bytes(size, AllocFailStrategy::RETURN_NULL);595DEBUG_ONLY(if (res != NULL) set_allocation_type(res, RESOURCE_AREA);)596return res;597}598599void* operator new [](size_t size) throw() {600address res = (address)resource_allocate_bytes(size);601DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);)602return res;603}604605void* operator new [](size_t size, const std::nothrow_t& nothrow_constant) throw() {606address res = (address)resource_allocate_bytes(size, AllocFailStrategy::RETURN_NULL);607DEBUG_ONLY(if (res != NULL) set_allocation_type(res, RESOURCE_AREA);)608return res;609}610611void operator delete(void* p);612void operator delete [](void* p);613};614615// One of the following macros must be used when allocating an array616// or object to determine whether it should reside in the C heap on in617// the resource area.618619#define NEW_RESOURCE_ARRAY(type, size)\620(type*) resource_allocate_bytes((size) * sizeof(type))621622#define NEW_RESOURCE_ARRAY_RETURN_NULL(type, size)\623(type*) resource_allocate_bytes((size) * sizeof(type), AllocFailStrategy::RETURN_NULL)624625#define NEW_RESOURCE_ARRAY_IN_THREAD(thread, type, size)\626(type*) resource_allocate_bytes(thread, (size) * sizeof(type))627628#define NEW_RESOURCE_ARRAY_IN_THREAD_RETURN_NULL(thread, type, size)\629(type*) resource_allocate_bytes(thread, (size) * sizeof(type), AllocFailStrategy::RETURN_NULL)630631#define REALLOC_RESOURCE_ARRAY(type, old, old_size, new_size)\632(type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type), (new_size) * sizeof(type))633634#define REALLOC_RESOURCE_ARRAY_RETURN_NULL(type, old, old_size, new_size)\635(type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type),\636(new_size) * sizeof(type), AllocFailStrategy::RETURN_NULL)637638#define FREE_RESOURCE_ARRAY(type, old, size)\639resource_free_bytes((char*)(old), (size) * sizeof(type))640641#define FREE_FAST(old)\642/* nop */643644#define NEW_RESOURCE_OBJ(type)\645NEW_RESOURCE_ARRAY(type, 1)646647#define NEW_RESOURCE_OBJ_RETURN_NULL(type)\648NEW_RESOURCE_ARRAY_RETURN_NULL(type, 1)649650#define NEW_C_HEAP_ARRAY3(type, size, memflags, pc, allocfail)\651(type*) AllocateHeap((size) * sizeof(type), memflags, pc, allocfail)652653#define NEW_C_HEAP_ARRAY2(type, size, memflags, pc)\654(type*) (AllocateHeap((size) * sizeof(type), memflags, pc))655656#define NEW_C_HEAP_ARRAY(type, size, memflags)\657(type*) (AllocateHeap((size) * sizeof(type), memflags))658659#define NEW_C_HEAP_ARRAY2_RETURN_NULL(type, size, memflags, pc)\660NEW_C_HEAP_ARRAY3(type, (size), memflags, pc, AllocFailStrategy::RETURN_NULL)661662#define NEW_C_HEAP_ARRAY_RETURN_NULL(type, size, memflags)\663NEW_C_HEAP_ARRAY3(type, (size), memflags, CURRENT_PC, AllocFailStrategy::RETURN_NULL)664665#define REALLOC_C_HEAP_ARRAY(type, old, size, memflags)\666(type*) (ReallocateHeap((char*)(old), (size) * sizeof(type), memflags))667668#define REALLOC_C_HEAP_ARRAY_RETURN_NULL(type, old, size, memflags)\669(type*) (ReallocateHeap((char*)(old), (size) * sizeof(type), memflags, AllocFailStrategy::RETURN_NULL))670671#define FREE_C_HEAP_ARRAY(type, old, memflags) \672FreeHeap((char*)(old), memflags)673674// allocate type in heap without calling ctor675#define NEW_C_HEAP_OBJ(type, memflags)\676NEW_C_HEAP_ARRAY(type, 1, memflags)677678#define NEW_C_HEAP_OBJ_RETURN_NULL(type, memflags)\679NEW_C_HEAP_ARRAY_RETURN_NULL(type, 1, memflags)680681// deallocate obj of type in heap without calling dtor682#define FREE_C_HEAP_OBJ(objname, memflags)\683FreeHeap((char*)objname, memflags);684685// for statistics686#ifndef PRODUCT687class AllocStats : StackObj {688julong start_mallocs, start_frees;689julong start_malloc_bytes, start_mfree_bytes, start_res_bytes;690public:691AllocStats();692693julong num_mallocs(); // since creation of receiver694julong alloc_bytes();695julong num_frees();696julong free_bytes();697julong resource_bytes();698void print();699};700#endif701702703//------------------------------ReallocMark---------------------------------704// Code which uses REALLOC_RESOURCE_ARRAY should check an associated705// ReallocMark, which is declared in the same scope as the reallocated706// pointer. Any operation that could __potentially__ cause a reallocation707// should check the ReallocMark.708class ReallocMark: public StackObj {709protected:710NOT_PRODUCT(int _nesting;)711712public:713ReallocMark() PRODUCT_RETURN;714void check() PRODUCT_RETURN;715};716717// Helper class to allocate arrays that may become large.718// Uses the OS malloc for allocations smaller than ArrayAllocatorMallocLimit719// and uses mapped memory for larger allocations.720// Most OS mallocs do something similar but Solaris malloc does not revert721// to mapped memory for large allocations. By default ArrayAllocatorMallocLimit722// is set so that we always use malloc except for Solaris where we set the723// limit to get mapped memory.724template <class E, MEMFLAGS F>725class ArrayAllocator VALUE_OBJ_CLASS_SPEC {726char* _addr;727bool _use_malloc;728size_t _size;729bool _free_in_destructor;730public:731ArrayAllocator(bool free_in_destructor = true) :732_addr(NULL), _use_malloc(false), _size(0), _free_in_destructor(free_in_destructor) { }733734~ArrayAllocator() {735if (_free_in_destructor) {736free();737}738}739740E* allocate(size_t length);741void free();742};743744#endif // SHARE_VM_MEMORY_ALLOCATION_HPP745746747