Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/openj9
Path: blob/master/runtime/gc_include/ObjectAllocationAPI.hpp
5986 views
1
/*******************************************************************************
2
* Copyright (c) 1991, 2021 IBM Corp. and others
3
*
4
* This program and the accompanying materials are made available under
5
* the terms of the Eclipse Public License 2.0 which accompanies this
6
* distribution and is available at https://www.eclipse.org/legal/epl-2.0/
7
* or the Apache License, Version 2.0 which accompanies this distribution and
8
* is available at https://www.apache.org/licenses/LICENSE-2.0.
9
*
10
* This Source Code may also be made available under the following
11
* Secondary Licenses when the conditions for such availability set
12
* forth in the Eclipse Public License, v. 2.0 are satisfied: GNU
13
* General Public License, version 2 with the GNU Classpath
14
* Exception [1] and GNU General Public License, version 2 with the
15
* OpenJDK Assembly Exception [2].
16
*
17
* [1] https://www.gnu.org/software/classpath/license.html
18
* [2] http://openjdk.java.net/legal/assembly-exception.html
19
*
20
* SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 OR LicenseRef-GPL-2.0 WITH Assembly-exception
21
*******************************************************************************/
22
23
#if !defined(OBJECTALLOCATIONAPI_HPP_)
24
#define OBJECTALLOCATIONAPI_HPP_
25
26
#include "j9cfg.h"
27
28
#if defined(OMR_OVERRIDE_COMPRESS_OBJECT_REFERENCES)
29
#if OMR_OVERRIDE_COMPRESS_OBJECT_REFERENCES
30
#define MM_ObjectAllocationAPI MM_ObjectAllocationAPICompressed
31
#else /* OMR_OVERRIDE_COMPRESS_OBJECT_REFERENCES */
32
#define MM_ObjectAllocationAPI MM_ObjectAllocationAPIFull
33
#endif /* OMR_OVERRIDE_COMPRESS_OBJECT_REFERENCES */
34
#endif /* OMR_OVERRIDE_COMPRESS_OBJECT_REFERENCES */
35
36
#include "j9.h"
37
#include "j9consts.h"
38
#include "j9generated.h"
39
#include "j9protos.h"
40
#include "omrgcconsts.h"
41
42
#include "AtomicSupport.hpp"
43
#include "ObjectMonitor.hpp"
44
45
class MM_ObjectAllocationAPI
46
{
47
/*
48
* Data members
49
*/
50
private:
51
const uintptr_t _gcAllocationType;
52
#if defined(J9VM_GC_BATCH_CLEAR_TLH)
53
const uintptr_t _initializeSlotsOnTLHAllocate;
54
#endif /* J9VM_GC_BATCH_CLEAR_TLH */
55
const uintptr_t _objectAlignmentInBytes;
56
57
#if defined (J9VM_GC_SEGREGATED_HEAP)
58
const J9VMGCSizeClasses *_sizeClasses;
59
#endif /* J9VM_GC_SEGREGATED_HEAP */
60
61
VMINLINE void
62
initializeIndexableSlots(bool initializeSlots, uintptr_t dataSize, void *dataAddr)
63
{
64
if (initializeSlots) {
65
memset(dataAddr, 0, dataSize);
66
}
67
}
68
69
VMINLINE void
70
initializeContiguousIndexableObject(J9VMThread *currentThread, bool initializeSlots, J9Class *arrayClass, uint32_t size, uintptr_t dataSize, j9object_t *objectHeader)
71
{
72
bool isCompressedReferences = J9VMTHREAD_COMPRESS_OBJECT_REFERENCES(currentThread);
73
74
if (isCompressedReferences) {
75
uintptr_t headerSize = sizeof(J9IndexableObjectContiguousCompressed);
76
J9IndexableObjectContiguousCompressed *header = (J9IndexableObjectContiguousCompressed*)*objectHeader;
77
header->clazz = (uint32_t)(uintptr_t)arrayClass;
78
header->size = size;
79
void *dataAddr = (void *)((uintptr_t)header + headerSize);
80
#if defined(J9VM_ENV_DATA64)
81
header->dataAddr = dataAddr;
82
#endif /* J9VM_ENV_DATA64 */
83
initializeIndexableSlots(initializeSlots, dataSize, dataAddr);
84
} else {
85
uintptr_t headerSize = sizeof(J9IndexableObjectContiguousFull);
86
J9IndexableObjectContiguousFull *header = (J9IndexableObjectContiguousFull*)*objectHeader;
87
header->clazz = (uintptr_t)arrayClass;
88
header->size = size;
89
void *dataAddr = (void *)((uintptr_t)header + headerSize);
90
#if defined(J9VM_ENV_DATA64)
91
header->dataAddr = dataAddr;
92
#endif /* J9VM_ENV_DATA64 */
93
initializeIndexableSlots(initializeSlots, dataSize, dataAddr);
94
}
95
}
96
97
VMINLINE void
98
initializeDiscontiguousIndexableObject(J9VMThread *currentThread, J9Class *arrayClass, j9object_t *objectHeader)
99
{
100
bool isCompressedReferences = J9VMTHREAD_COMPRESS_OBJECT_REFERENCES(currentThread);
101
102
if (isCompressedReferences) {
103
J9IndexableObjectDiscontiguousCompressed *header = (J9IndexableObjectDiscontiguousCompressed*)*objectHeader;
104
header->clazz = (uint32_t)(uintptr_t)arrayClass;
105
header->mustBeZero = 0;
106
header->size = 0;
107
#if defined(J9VM_ENV_DATA64)
108
uintptr_t headerSize = sizeof(J9IndexableObjectDiscontiguousCompressed);
109
header->dataAddr = (void *)((uintptr_t)header + headerSize);
110
#endif /* J9VM_ENV_DATA64 */
111
} else {
112
J9IndexableObjectDiscontiguousFull *header = (J9IndexableObjectDiscontiguousFull*)*objectHeader;
113
header->clazz = (uintptr_t)arrayClass;
114
header->mustBeZero = 0;
115
header->size = 0;
116
#if defined(J9VM_ENV_DATA64)
117
uintptr_t headerSize = sizeof(J9IndexableObjectDiscontiguousFull);
118
header->dataAddr = (void *)((uintptr_t)header + headerSize);
119
#endif /* J9VM_ENV_DATA64 */
120
}
121
}
122
123
protected:
124
public:
125
126
/*
127
* Function members
128
*/
129
private:
130
131
VMINLINE j9object_t
132
inlineAllocateIndexableObjectImpl(J9VMThread *currentThread, J9Class *arrayClass, uint32_t size, uintptr_t dataSize, bool validSize, bool initializeSlots = true, bool memoryBarrier = true)
133
{
134
j9object_t instance = NULL;
135
136
#if defined(J9VM_GC_THREAD_LOCAL_HEAP) || defined(J9VM_GC_SEGREGATED_HEAP)
137
if (0 != size) {
138
/* Contiguous Array */
139
140
#if !defined(J9VM_ENV_DATA64)
141
if (validSize)
142
#endif /* J9VM_ENV_DATA64 */
143
{
144
/* Calculate the size of the object */
145
uintptr_t const headerSize = J9VMTHREAD_CONTIGUOUS_HEADER_SIZE(currentThread);
146
uintptr_t const dataSize = ((uintptr_t)size) * J9ARRAYCLASS_GET_STRIDE(arrayClass);
147
uintptr_t allocateSize = ROUND_UP_TO_POWEROF2(dataSize + headerSize, _objectAlignmentInBytes);
148
#if defined(J9VM_ENV_DATA64)
149
if (allocateSize < J9_GC_MINIMUM_INDEXABLE_OBJECT_SIZE) {
150
allocateSize = J9_GC_MINIMUM_INDEXABLE_OBJECT_SIZE;
151
}
152
#else /* !J9VM_ENV_DATA64 */
153
if (allocateSize < J9_GC_MINIMUM_OBJECT_SIZE) {
154
allocateSize = J9_GC_MINIMUM_OBJECT_SIZE;
155
}
156
#endif /* J9VM_ENV_DATA64 */
157
/* Allocate the memory */
158
j9object_t objectHeader = NULL;
159
160
switch(_gcAllocationType) {
161
162
#if defined(J9VM_GC_THREAD_LOCAL_HEAP)
163
case OMR_GC_ALLOCATION_TYPE_TLH:
164
if (allocateSize <= ((uintptr_t) currentThread->heapTop - (uintptr_t) currentThread->heapAlloc)) {
165
uint8_t *heapAlloc = currentThread->heapAlloc;
166
uint8_t *afterAlloc = heapAlloc + allocateSize;
167
objectHeader = (j9object_t)heapAlloc;
168
currentThread->heapAlloc = afterAlloc;
169
#if defined(J9VM_GC_TLH_PREFETCH_FTA)
170
currentThread->tlhPrefetchFTA -= allocateSize;
171
#endif /* J9VM_GC_TLH_PREFETCH_FTA */
172
#if defined(J9VM_GC_BATCH_CLEAR_TLH)
173
/* Do not zero the TLH if it is already zero's */
174
initializeSlots = initializeSlots && _initializeSlotsOnTLHAllocate;
175
#endif /* J9VM_GC_BATCH_CLEAR_TLH */
176
} else {
177
return NULL;
178
}
179
break;
180
#endif /* J9VM_GC_THREAD_LOCAL_HEAP */
181
182
#if defined(J9VM_GC_SEGREGATED_HEAP)
183
184
case OMR_GC_ALLOCATION_TYPE_SEGREGATED:
185
/* Metronome requires that slots are always initialized */
186
187
/* ensure the allocation will fit in a small size */
188
if (allocateSize <= J9VMGC_SIZECLASSES_MAX_SMALL_SIZE_BYTES) {
189
190
/* fetch the size class based on the allocation size */
191
uintptr_t slotsRequested = allocateSize / sizeof(uintptr_t);
192
uintptr_t sizeClassIndex = _sizeClasses->sizeClassIndex[slotsRequested];
193
194
/* Ensure the cache for the current size class is not empty. */
195
J9VMGCSegregatedAllocationCacheEntry *cacheEntry =
196
(J9VMGCSegregatedAllocationCacheEntry *)((uintptr_t)currentThread + J9_VMTHREAD_SEGREGATED_ALLOCATION_CACHE_OFFSET
197
+ (sizeClassIndex * sizeof(J9VMGCSegregatedAllocationCacheEntry)));
198
uintptr_t cellSize = _sizeClasses->smallCellSizes[sizeClassIndex];
199
200
if (cellSize <= ((uintptr_t) cacheEntry->top - (uintptr_t) cacheEntry->current)) {
201
objectHeader = (j9object_t)cacheEntry->current;
202
cacheEntry->current = (uintptr_t *) ((uintptr_t) cacheEntry->current + cellSize);
203
/* The metronome pre write barrier might scan this object - always zero it */
204
initializeSlots = true;
205
} else {
206
return NULL;
207
}
208
} else {
209
return NULL;
210
}
211
break;
212
#endif /* J9VM_GC_SEGREGATED_HEAP */
213
214
default:
215
/* Inline allocation not supported */
216
return NULL;
217
}
218
219
/* Initialize the object */
220
initializeContiguousIndexableObject(currentThread, initializeSlots, arrayClass, size, dataSize, &objectHeader);
221
222
if (memoryBarrier) {
223
VM_AtomicSupport::writeBarrier();
224
}
225
instance = objectHeader;
226
}
227
} else {
228
#if defined(J9VM_ENV_DATA64)
229
/* Calculate size of indexable object */
230
uintptr_t const headerSize = J9VMTHREAD_DISCONTIGUOUS_HEADER_SIZE(currentThread);
231
uintptr_t allocateSize = ROUND_UP_TO_POWEROF2(headerSize, _objectAlignmentInBytes);
232
/* Discontiguous header size is always equal or greater than J9_GC_MINIMUM_INDEXABLE_OBJECT_SIZE; therefore,
233
* there's no need to check if allocateSize is less than J9_GC_MINIMUM_INDEXABLE_OBJECT_SIZE
234
*/
235
#else
236
/* Zero-length array is discontiguous - assume minimum object size */
237
uintptr_t allocateSize = J9_GC_MINIMUM_OBJECT_SIZE;
238
#endif /* J9VM_ENV_DATA64 */
239
240
/* Allocate the memory */
241
j9object_t objectHeader = NULL;
242
switch(_gcAllocationType) {
243
244
#if defined(J9VM_GC_THREAD_LOCAL_HEAP)
245
case OMR_GC_ALLOCATION_TYPE_TLH:
246
247
if (allocateSize <= ((uintptr_t) currentThread->heapTop - (uintptr_t) currentThread->heapAlloc)) {
248
uint8_t *heapAlloc = currentThread->heapAlloc;
249
uint8_t *afterAlloc = heapAlloc + allocateSize;
250
objectHeader = (j9object_t) heapAlloc;
251
currentThread->heapAlloc = afterAlloc;
252
#if defined(J9VM_GC_TLH_PREFETCH_FTA)
253
currentThread->tlhPrefetchFTA -= allocateSize;
254
#endif /* J9VM_GC_TLH_PREFETCH_FTA */
255
} else {
256
return NULL;
257
}
258
break;
259
#endif /* J9VM_GC_THREAD_LOCAL_HEAP */
260
261
#if defined(J9VM_GC_SEGREGATED_HEAP)
262
case OMR_GC_ALLOCATION_TYPE_SEGREGATED:
263
/* ensure the allocation will fit in a small size */
264
if (allocateSize <= J9VMGC_SIZECLASSES_MAX_SMALL_SIZE_BYTES) {
265
266
/* fetch the size class based on the allocation size */
267
uintptr_t slotsRequested = allocateSize / sizeof(uintptr_t);
268
uintptr_t sizeClassIndex = _sizeClasses->sizeClassIndex[slotsRequested];
269
270
/* Ensure the cache for the current size class is not empty. */
271
J9VMGCSegregatedAllocationCacheEntry *cacheEntry =
272
(J9VMGCSegregatedAllocationCacheEntry *)((uintptr_t)currentThread + J9_VMTHREAD_SEGREGATED_ALLOCATION_CACHE_OFFSET
273
+ (sizeClassIndex * sizeof(J9VMGCSegregatedAllocationCacheEntry)));
274
uintptr_t cellSize = _sizeClasses->smallCellSizes[sizeClassIndex];
275
276
if (cellSize <= ((uintptr_t) cacheEntry->top - (uintptr_t) cacheEntry->current)) {
277
objectHeader = (j9object_t) cacheEntry->current;
278
cacheEntry->current = (uintptr_t *) ((uintptr_t) cacheEntry->current + cellSize);
279
} else {
280
return NULL;
281
}
282
} else {
283
return NULL;
284
}
285
break;
286
#endif /* J9VM_GC_SEGREGATED_HEAP */
287
288
default:
289
return NULL;
290
break;
291
}
292
293
/* Initialize the object */
294
initializeDiscontiguousIndexableObject(currentThread, arrayClass, &objectHeader);
295
296
if (memoryBarrier) {
297
VM_AtomicSupport::writeBarrier();
298
}
299
instance = objectHeader;
300
301
#endif /* defined(J9VM_GC_THREAD_LOCAL_HEAP) || defined(J9VM_GC_SEGREGATED_HEAP) */
302
303
}
304
305
return instance;
306
}
307
308
protected:
309
public:
310
311
/**
312
* Create an instance.
313
*/
314
MM_ObjectAllocationAPI(J9VMThread *currentThread)
315
: _gcAllocationType(currentThread->javaVM->gcAllocationType)
316
#if defined(J9VM_GC_BATCH_CLEAR_TLH)
317
, _initializeSlotsOnTLHAllocate(currentThread->javaVM->initializeSlotsOnTLHAllocate)
318
#endif /* J9VM_GC_BATCH_CLEAR_TLH */
319
, _objectAlignmentInBytes(currentThread->omrVMThread->_vm->_objectAlignmentInBytes)
320
#if defined (J9VM_GC_SEGREGATED_HEAP)
321
, _sizeClasses(currentThread->javaVM->realtimeSizeClasses)
322
#endif /* J9VM_GC_SEGREGATED_HEAP */
323
{}
324
325
VMINLINE j9object_t
326
inlineAllocateObject(J9VMThread *currentThread, J9Class *clazz, bool initializeSlots = true, bool memoryBarrier = true)
327
{
328
j9object_t instance = NULL;
329
#if defined(J9VM_GC_THREAD_LOCAL_HEAP) || defined(J9VM_GC_SEGREGATED_HEAP)
330
/* Calculate the size of the object */
331
uintptr_t const headerSize = J9VMTHREAD_OBJECT_HEADER_SIZE(currentThread);
332
uintptr_t dataSize = clazz->totalInstanceSize;
333
uintptr_t allocateSize = ROUND_UP_TO_POWEROF2(dataSize + headerSize, _objectAlignmentInBytes);
334
if (allocateSize < J9_GC_MINIMUM_OBJECT_SIZE) {
335
allocateSize = J9_GC_MINIMUM_OBJECT_SIZE;
336
}
337
338
/* Allocate the object */
339
switch(_gcAllocationType) {
340
#if defined(J9VM_GC_THREAD_LOCAL_HEAP)
341
case OMR_GC_ALLOCATION_TYPE_TLH:
342
if (allocateSize <= ((uintptr_t) currentThread->heapTop - (uintptr_t) currentThread->heapAlloc)) {
343
uint8_t *heapAlloc = currentThread->heapAlloc;
344
uint8_t *afterAlloc = heapAlloc + allocateSize;
345
currentThread->heapAlloc = afterAlloc;
346
#if defined(J9VM_GC_TLH_PREFETCH_FTA)
347
currentThread->tlhPrefetchFTA -= allocateSize;
348
#endif /* J9VM_GC_TLH_PREFETCH_FTA */
349
instance = (j9object_t) heapAlloc;
350
#if defined(J9VM_GC_BATCH_CLEAR_TLH)
351
/* Do not zero the TLH if it is already zero'd */
352
initializeSlots = initializeSlots && _initializeSlotsOnTLHAllocate;
353
#endif /* J9VM_GC_BATCH_CLEAR_TLH */
354
} else {
355
return NULL;
356
}
357
break;
358
#endif /* J9VM_GC_THREAD_LOCAL_HEAP */
359
360
#if defined(J9VM_GC_SEGREGATED_HEAP)
361
case OMR_GC_ALLOCATION_TYPE_SEGREGATED:
362
/* ensure the allocation will fit in a small size */
363
if (allocateSize <= J9VMGC_SIZECLASSES_MAX_SMALL_SIZE_BYTES) {
364
365
/* fetch the size class based on the allocation size */
366
uintptr_t slotsRequested = allocateSize / sizeof(uintptr_t);
367
uintptr_t sizeClassIndex = _sizeClasses->sizeClassIndex[slotsRequested];
368
369
/* Ensure the cache for the current size class is not empty. */
370
J9VMGCSegregatedAllocationCacheEntry *cacheEntry =
371
(J9VMGCSegregatedAllocationCacheEntry *)((uintptr_t)currentThread + J9_VMTHREAD_SEGREGATED_ALLOCATION_CACHE_OFFSET
372
+ (sizeClassIndex * sizeof(J9VMGCSegregatedAllocationCacheEntry)));
373
uintptr_t cellSize = _sizeClasses->smallCellSizes[sizeClassIndex];
374
375
if (cellSize <= ((uintptr_t) cacheEntry->top - (uintptr_t) cacheEntry->current)) {
376
instance = (j9object_t) cacheEntry->current;
377
cacheEntry->current = (uintptr_t *) ((uintptr_t) cacheEntry->current + cellSize);
378
/* The metronome pre write barrier might scan this object - always zero it */
379
initializeSlots = true;
380
} else {
381
return NULL;
382
}
383
} else {
384
return NULL;
385
}
386
break;
387
#endif /* J9VM_GC_SEGREGATED_HEAP */
388
389
default:
390
return NULL;
391
}
392
393
/* Initialize the object */
394
if (J9VMTHREAD_COMPRESS_OBJECT_REFERENCES(currentThread)) {
395
J9ObjectCompressed *objectHeader = (J9ObjectCompressed*) instance;
396
if (J9CLASS_IS_ENSUREHASHED(clazz)) {
397
objectHeader->clazz = (uint32_t)(uintptr_t)clazz | (uint32_t)OBJECT_HEADER_HAS_BEEN_HASHED_IN_CLASS;
398
} else {
399
objectHeader->clazz = (uint32_t)(uintptr_t)clazz;
400
}
401
if (initializeSlots) {
402
memset(objectHeader + 1, 0, dataSize);
403
}
404
} else {
405
J9ObjectFull *objectHeader = (J9ObjectFull*) instance;
406
if (J9CLASS_IS_ENSUREHASHED(clazz)) {
407
objectHeader->clazz = (uintptr_t)clazz | (uintptr_t)OBJECT_HEADER_HAS_BEEN_HASHED_IN_CLASS;
408
} else {
409
objectHeader->clazz = (uintptr_t)clazz;
410
}
411
if (initializeSlots) {
412
memset(objectHeader + 1, 0, dataSize);
413
}
414
}
415
416
if (initializeSlots) {
417
if (LN_HAS_LOCKWORD(currentThread, instance)) {
418
j9objectmonitor_t initialLockword = VM_ObjectMonitor::getInitialLockword(currentThread->javaVM, clazz);
419
if (0 != initialLockword) {
420
j9objectmonitor_t *lockEA = J9OBJECT_MONITOR_EA(currentThread, instance);
421
J9_STORE_LOCKWORD(currentThread, lockEA, initialLockword);
422
}
423
}
424
}
425
426
if (memoryBarrier) {
427
VM_AtomicSupport::writeBarrier();
428
}
429
#endif /* J9VM_GC_THREAD_LOCAL_HEAP || J9VM_GC_SEGREGATED_HEAP */
430
return instance;
431
}
432
433
VMINLINE j9object_t
434
inlineAllocateIndexableValueTypeObject(J9VMThread *currentThread, J9Class *arrayClass, uint32_t size, bool initializeSlots = true, bool memoryBarrier = true, bool sizeCheck = true)
435
{
436
uintptr_t dataSize = ((uintptr_t)size) * J9ARRAYCLASS_GET_STRIDE(arrayClass);
437
bool validSize = true;
438
#if !defined(J9VM_ENV_DATA64)
439
validSize = !sizeCheck || (size < ((uint32_t)J9_MAXIMUM_INDEXABLE_DATA_SIZE / J9ARRAYCLASS_GET_STRIDE(arrayClass)));
440
#endif /* J9VM_ENV_DATA64 */
441
return inlineAllocateIndexableObjectImpl(currentThread, arrayClass, size, dataSize, validSize, initializeSlots, memoryBarrier);
442
}
443
444
VMINLINE j9object_t
445
inlineAllocateIndexableObject(J9VMThread *currentThread, J9Class *arrayClass, uint32_t size, bool initializeSlots = true, bool memoryBarrier = true, bool sizeCheck = true)
446
{
447
uintptr_t scale = ((J9ROMArrayClass*)(arrayClass->romClass))->arrayShape;
448
uintptr_t dataSize = ((uintptr_t)size) << scale;
449
bool validSize = true;
450
#if !defined(J9VM_ENV_DATA64)
451
validSize = !sizeCheck || (size < ((uint32_t)J9_MAXIMUM_INDEXABLE_DATA_SIZE >> scale));
452
#endif /* J9VM_ENV_DATA64 */
453
return inlineAllocateIndexableObjectImpl(currentThread, arrayClass, size, dataSize, validSize, initializeSlots, memoryBarrier);
454
}
455
456
};
457
458
#endif /* OBJECTALLOCATIONAPI_HPP_ */
459
460