Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mobile
Path: blob/master/src/hotspot/share/memory/allocation.inline.hpp
40949 views
1
/*
2
* Copyright (c) 1997, 2020, Oracle and/or its affiliates. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation.
8
*
9
* This code is distributed in the hope that it will be useful, but WITHOUT
10
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12
* version 2 for more details (a copy is included in the LICENSE file that
13
* accompanied this code).
14
*
15
* You should have received a copy of the GNU General Public License version
16
* 2 along with this work; if not, write to the Free Software Foundation,
17
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
*
19
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
* or visit www.oracle.com if you need additional information or have any
21
* questions.
22
*
23
*/
24
25
#ifndef SHARE_MEMORY_ALLOCATION_INLINE_HPP
26
#define SHARE_MEMORY_ALLOCATION_INLINE_HPP
27
28
#include "memory/allocation.hpp"
29
30
#include "runtime/atomic.hpp"
31
#include "runtime/globals.hpp"
32
#include "runtime/os.hpp"
33
#include "utilities/align.hpp"
34
#include "utilities/globalDefinitions.hpp"
35
36
// Explicit C-heap memory management
37
38
#ifndef PRODUCT
39
// Increments unsigned long value for statistics (not atomic on MP, but avoids word-tearing on 32 bit).
40
inline void inc_stat_counter(volatile julong* dest, julong add_value) {
41
#ifdef _LP64
42
*dest += add_value;
43
#else
44
julong value = Atomic::load(dest);
45
Atomic::store(dest, value + add_value);
46
#endif
47
}
48
#endif
49
50
template <class E>
51
size_t MmapArrayAllocator<E>::size_for(size_t length) {
52
size_t size = length * sizeof(E);
53
int alignment = os::vm_allocation_granularity();
54
return align_up(size, alignment);
55
}
56
57
template <class E>
58
E* MmapArrayAllocator<E>::allocate_or_null(size_t length, MEMFLAGS flags) {
59
size_t size = size_for(length);
60
61
char* addr = os::reserve_memory(size, !ExecMem, flags);
62
if (addr == NULL) {
63
return NULL;
64
}
65
66
if (os::commit_memory(addr, size, !ExecMem)) {
67
return (E*)addr;
68
} else {
69
os::release_memory(addr, size);
70
return NULL;
71
}
72
}
73
74
template <class E>
75
E* MmapArrayAllocator<E>::allocate(size_t length, MEMFLAGS flags) {
76
size_t size = size_for(length);
77
78
char* addr = os::reserve_memory(size, !ExecMem, flags);
79
if (addr == NULL) {
80
vm_exit_out_of_memory(size, OOM_MMAP_ERROR, "Allocator (reserve)");
81
}
82
83
os::commit_memory_or_exit(addr, size, !ExecMem, "Allocator (commit)");
84
85
return (E*)addr;
86
}
87
88
template <class E>
89
void MmapArrayAllocator<E>::free(E* addr, size_t length) {
90
bool result = os::release_memory((char*)addr, size_for(length));
91
assert(result, "Failed to release memory");
92
}
93
94
template <class E>
95
size_t MallocArrayAllocator<E>::size_for(size_t length) {
96
return length * sizeof(E);
97
}
98
99
template <class E>
100
E* MallocArrayAllocator<E>::allocate(size_t length, MEMFLAGS flags) {
101
return (E*)AllocateHeap(size_for(length), flags);
102
}
103
104
template<class E>
105
void MallocArrayAllocator<E>::free(E* addr) {
106
FreeHeap(addr);
107
}
108
109
template <class E>
110
bool ArrayAllocator<E>::should_use_malloc(size_t length) {
111
return MallocArrayAllocator<E>::size_for(length) < ArrayAllocatorMallocLimit;
112
}
113
114
template <class E>
115
E* ArrayAllocator<E>::allocate_malloc(size_t length, MEMFLAGS flags) {
116
return MallocArrayAllocator<E>::allocate(length, flags);
117
}
118
119
template <class E>
120
E* ArrayAllocator<E>::allocate_mmap(size_t length, MEMFLAGS flags) {
121
return MmapArrayAllocator<E>::allocate(length, flags);
122
}
123
124
template <class E>
125
E* ArrayAllocator<E>::allocate(size_t length, MEMFLAGS flags) {
126
if (should_use_malloc(length)) {
127
return allocate_malloc(length, flags);
128
}
129
130
return allocate_mmap(length, flags);
131
}
132
133
template <class E>
134
E* ArrayAllocator<E>::reallocate(E* old_addr, size_t old_length, size_t new_length, MEMFLAGS flags) {
135
E* new_addr = (new_length > 0)
136
? allocate(new_length, flags)
137
: NULL;
138
139
if (new_addr != NULL && old_addr != NULL) {
140
memcpy(new_addr, old_addr, MIN2(old_length, new_length) * sizeof(E));
141
}
142
143
if (old_addr != NULL) {
144
free(old_addr, old_length);
145
}
146
147
return new_addr;
148
}
149
150
template<class E>
151
void ArrayAllocator<E>::free_malloc(E* addr, size_t length) {
152
MallocArrayAllocator<E>::free(addr);
153
}
154
155
template<class E>
156
void ArrayAllocator<E>::free_mmap(E* addr, size_t length) {
157
MmapArrayAllocator<E>::free(addr, length);
158
}
159
160
template<class E>
161
void ArrayAllocator<E>::free(E* addr, size_t length) {
162
if (addr != NULL) {
163
if (should_use_malloc(length)) {
164
free_malloc(addr, length);
165
} else {
166
free_mmap(addr, length);
167
}
168
}
169
}
170
171
#endif // SHARE_MEMORY_ALLOCATION_INLINE_HPP
172
173