Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/openj9
Path: blob/master/runtime/gc_base/ObjectAccessBarrier.cpp
5986 views
1
/*******************************************************************************
2
* Copyright (c) 1991, 2021 IBM Corp. and others
3
*
4
* This program and the accompanying materials are made available under
5
* the terms of the Eclipse Public License 2.0 which accompanies this
6
* distribution and is available at https://www.eclipse.org/legal/epl-2.0/
7
* or the Apache License, Version 2.0 which accompanies this distribution and
8
* is available at https://www.apache.org/licenses/LICENSE-2.0.
9
*
10
* This Source Code may also be made available under the following
11
* Secondary Licenses when the conditions for such availability set
12
* forth in the Eclipse Public License, v. 2.0 are satisfied: GNU
13
* General Public License, version 2 with the GNU Classpath
14
* Exception [1] and GNU General Public License, version 2 with the
15
* OpenJDK Assembly Exception [2].
16
*
17
* [1] https://www.gnu.org/software/classpath/license.html
18
* [2] http://openjdk.java.net/legal/assembly-exception.html
19
*
20
* SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 OR LicenseRef-GPL-2.0 WITH Assembly-exception
21
*******************************************************************************/
22
23
24
/**
25
* @file
26
* @ingroup GC_Base
27
*/
28
29
#include "ObjectAccessBarrier.hpp"
30
31
#include "j9protos.h"
32
#include "ModronAssertions.h"
33
#include "rommeth.h"
34
35
#include "ArrayletObjectModel.hpp"
36
#include "AtomicOperations.hpp"
37
#include "EnvironmentBase.hpp"
38
#include "HeapRegionManager.hpp"
39
#include "MemorySpace.hpp"
40
#include "ObjectAccessBarrierAPI.hpp"
41
#include "ObjectMonitor.hpp"
42
#include "VMHelpers.hpp"
43
#include "VMThreadListIterator.hpp"
44
45
bool
46
MM_ObjectAccessBarrier::initialize(MM_EnvironmentBase *env)
47
{
48
_extensions = MM_GCExtensions::getExtensions(env);
49
_heap = _extensions->heap;
50
J9JavaVM *vm = (J9JavaVM*)env->getOmrVM()->_language_vm;
51
OMR_VM *omrVM = env->getOmrVM();
52
char *refSignature = (char*) "I";
53
54
if (sizeof(U_64) == J9JAVAVM_REFERENCE_SIZE(vm)) {
55
refSignature = (char *) "J";
56
}
57
58
#if defined(OMR_GC_COMPRESSED_POINTERS)
59
if (env->compressObjectReferences()) {
60
61
#if defined(J9VM_GC_REALTIME)
62
/*
63
* Do not allow 4-bit shift for Metronome
64
* Cell Sizes Table for Segregated heap should be modified to have aligned to 16 values
65
*/
66
if (_extensions->isMetronomeGC()) {
67
if (DEFAULT_LOW_MEMORY_HEAP_CEILING_SHIFT < omrVM->_compressedPointersShift) {
68
/* Non-standard NLS message required */
69
_extensions->heapInitializationFailureReason = MM_GCExtensionsBase::HEAP_INITIALIZATION_FAILURE_REASON_METRONOME_DOES_NOT_SUPPORT_4BIT_SHIFT;
70
return false;
71
}
72
}
73
#endif /* J9VM_GC_REALTIME */
74
75
#if defined(OMR_GC_FULL_POINTERS)
76
_compressObjectReferences = true;
77
#endif /* defined(OMR_GC_FULL_POINTERS) */
78
_compressedPointersShift = omrVM->_compressedPointersShift;
79
vm->compressedPointersShift = omrVM->_compressedPointersShift;
80
Trc_MM_CompressedAccessBarrierInitialized(env->getLanguageVMThread(), 0, _compressedPointersShift);
81
}
82
#endif /* defined(OMR_GC_COMPRESSED_POINTERS) */
83
84
vm->objectAlignmentInBytes = omrVM->_objectAlignmentInBytes;
85
vm->objectAlignmentShift = omrVM->_objectAlignmentShift;
86
87
/* request an extra slot in java/lang/ref/Reference which we will use to maintain linked lists of reference objects */
88
if (0 != vm->internalVMFunctions->addHiddenInstanceField(vm, "java/lang/ref/Reference", "gcLink", refSignature, &_referenceLinkOffset)) {
89
return false;
90
}
91
/* request an extra slot in java/util/concurrent/locks/AbstractOwnableSynchronizer which we will use to maintain linked lists of ownable synchronizer objects */
92
if (0 != vm->internalVMFunctions->addHiddenInstanceField(vm, "java/util/concurrent/locks/AbstractOwnableSynchronizer", "ownableSynchronizerLink", refSignature, &_ownableSynchronizerLinkOffset)) {
93
return false;
94
}
95
96
return true;
97
}
98
99
void
100
MM_ObjectAccessBarrier::kill(MM_EnvironmentBase *env)
101
{
102
tearDown(env);
103
env->getForge()->free(this);
104
}
105
106
void
107
MM_ObjectAccessBarrier::tearDown(MM_EnvironmentBase *env)
108
{
109
}
110
111
/**
112
* Read an object pointer from an object.
113
* This function is only concerned with moving the actual data. Do not re-implement
114
* unless the value is stored in a non-native format (e.g. compressed object pointers).
115
* See readObject() for higher-level actions.
116
* By default, forwards to readU32Impl() on 32-bit platforms, and readU64Impl() on 64-bit.
117
* @param srcObject the object being read from
118
* @param srcAddress the address of the field to be read
119
* @param isVolatile non-zero if the field is volatile, zero otherwise
120
*/
121
mm_j9object_t
122
MM_ObjectAccessBarrier::readObjectImpl(J9VMThread *vmThread, mm_j9object_t srcObject, fj9object_t *srcAddress, bool isVolatile)
123
{
124
mm_j9object_t result = NULL;
125
if (compressObjectReferences()) {
126
result = convertPointerFromToken(*(uint32_t*)srcAddress);
127
} else {
128
result = (mm_j9object_t)*(uintptr_t*)srcAddress;
129
}
130
return result;
131
}
132
133
/**
134
* Read a static object field.
135
* This function is only concerned with moving the actual data. Do not re-implement
136
* unless the value is stored in a non-native format (e.g. compressed object pointers).
137
* See staticReadObject() for higher-level actions.
138
* By default, forwards to readU32Impl() on 32-bit platforms, and readU64Impl() on 64-bit.
139
* @param clazz the class which contains the static field
140
* @param srcAddress the address of the field to be read
141
* @param isVolatile non-zero if the field is volatile, zero otherwise
142
*/
143
mm_j9object_t
144
MM_ObjectAccessBarrier::staticReadObjectImpl(J9VMThread *vmThread, J9Class *clazz, j9object_t *srcAddress, bool isVolatile)
145
{
146
return *srcAddress;
147
}
148
149
/**
150
* Read an object from an internal VM slot (J9VMThread, J9JavaVM, named field of J9Class).
151
* This function is only concerned with moving the actual data. Do not re-implement
152
* unless the value is stored in a non-native format (e.g. compressed object pointers).
153
* See readObjectFromInternalVMSlot() for higher-level actions.
154
* By default, forwards to readU32Impl() on 32-bit platforms, and readU64Impl() on 64-bit.
155
* @param srcAddress the address of the field to be read
156
* @param isVolatile non-zero if the field is volatile, zero otherwise
157
*/
158
mm_j9object_t
159
MM_ObjectAccessBarrier::readObjectFromInternalVMSlotImpl(J9VMThread *vmThread, j9object_t *srcAddress, bool isVolatile)
160
{
161
return *srcAddress;
162
}
163
164
/**
165
* Store an object pointer into an object.
166
* This function is only concerned with moving the actual data. Do not re-implement
167
* unless the value is stored in a non-native format (e.g. compressed object pointers).
168
* See storeObject() for higher-level actions.
169
* By default, forwards to storeU32Impl() on 32-bit platforms, and storeU64Impl() on 64-bit.
170
* @param destObject the object to read from
171
* @param destAddress the address of the field to be read
172
* @param value the value to be stored
173
* @param isVolatile non-zero if the field is volatile, zero otherwise
174
*/
175
void
176
MM_ObjectAccessBarrier::storeObjectImpl(J9VMThread *vmThread, mm_j9object_t destObject, fj9object_t *destAddress, mm_j9object_t value, bool isVolatile)
177
{
178
if (compressObjectReferences()) {
179
*(uint32_t*)destAddress = (uint32_t)convertTokenFromPointer(value);
180
} else {
181
*(uintptr_t*)destAddress = (uintptr_t)value;
182
}
183
}
184
185
/**
186
* Store a static field.
187
* This function is only concerned with moving the actual data. Do not re-implement
188
* unless the value is stored in a non-native format (e.g. compressed object pointers).
189
* See storeObject() for higher-level actions.
190
* By default, forwards to storeU32Impl() on 32-bit platforms, and storeU64Impl() on 64-bit.
191
* @param clazz the class the field belongs to
192
* @param destAddress the address of the field to be read
193
* @param value the value to be stored
194
* @param isVolatile non-zero if the field is volatile, zero otherwise
195
*/
196
void
197
MM_ObjectAccessBarrier::staticStoreObjectImpl(J9VMThread *vmThread, J9Class* clazz, j9object_t *destAddress, mm_j9object_t value, bool isVolatile)
198
{
199
*destAddress = value;
200
}
201
202
/**
203
* Write an object to an internal VM slot (J9VMThread, J9JavaVM, named field of J9Class).
204
* This function is only concerned with moving the actual data. Do not re-implement
205
* unless the value is stored in a non-native format (e.g. compressed object pointers).
206
* See storeObject() for higher-level actions.
207
* By default, forwards to storeU32Impl() on 32-bit platforms, and storeU64Impl() on 64-bit.
208
* @param destAddress the address of the field to be read
209
* @param value the value to be stored
210
* @param isVolatile non-zero if the field is volatile, zero otherwise
211
*/
212
void
213
MM_ObjectAccessBarrier::storeObjectToInternalVMSlotImpl(J9VMThread *vmThread, j9object_t *destAddress, mm_j9object_t value, bool isVolatile)
214
{
215
*destAddress = value;
216
}
217
218
/**
219
* Read a U_8 from an object.
220
* This function is only concerned with moving the actual data. Do not re-implement
221
* unless the value is stored in a non-native format (e.g. compressed object pointers).
222
*
223
* @param srcObject the object being read from
224
* @param srcAddress the address of the field to be read
225
* @param isVolatile non-zero if the field is volatile, zero otherwise
226
*/
227
U_8
228
MM_ObjectAccessBarrier::readU8Impl(J9VMThread *vmThread, mm_j9object_t srcObject, U_8 *srcAddress, bool isVolatile)
229
{
230
return *srcAddress;
231
}
232
233
/**
234
* Store a U_8 into an object.
235
* This function is only concerned with moving the actual data. Do not re-implement
236
* unless the value is stored in a non-native format (e.g. compressed object pointers).
237
*
238
* @param destObject the object to read from
239
* @param destAddress the address of the field to be read
240
* @param value the value to be stored
241
* @param isVolatile non-zero if the field is volatile, zero otherwise
242
*/
243
void
244
MM_ObjectAccessBarrier::storeU8Impl(J9VMThread *vmThread, mm_j9object_t destObject, U_8 *destAddress, U_8 value, bool isVolatile)
245
{
246
*destAddress = value;
247
}
248
249
/**
250
* Read a I_8 from an object.
251
* This function is only concerned with moving the actual data. Do not re-implement
252
* unless the value is stored in a non-native format (e.g. compressed object pointers).
253
*
254
* @param srcObject the object being read from
255
* @param srcAddress the address of the field to be read
256
* @param isVolatile non-zero if the field is volatile, zero otherwise
257
*/
258
I_8
259
MM_ObjectAccessBarrier::readI8Impl(J9VMThread *vmThread, mm_j9object_t srcObject, I_8 *srcAddress, bool isVolatile)
260
{
261
return *srcAddress;
262
}
263
264
/**
265
* Store a I_8 into an object.
266
* This function is only concerned with moving the actual data. Do not re-implement
267
* unless the value is stored in a non-native format (e.g. compressed object pointers).
268
*
269
* @param destObject the object to read from
270
* @param destAddress the address of the field to be read
271
* @param value the value to be stored
272
* @param isVolatile non-zero if the field is volatile, zero otherwise
273
*/
274
void
275
MM_ObjectAccessBarrier::storeI8Impl(J9VMThread *vmThread, mm_j9object_t destObject, I_8 *destAddress, I_8 value, bool isVolatile)
276
{
277
*destAddress = value;
278
}
279
280
/**
281
* Read a U_16 from an object.
282
* This function is only concerned with moving the actual data. Do not re-implement
283
* unless the value is stored in a non-native format (e.g. compressed object pointers).
284
*
285
* @param srcObject the object being read from
286
* @param srcAddress the address of the field to be read
287
* @param isVolatile non-zero if the field is volatile, zero otherwise
288
*/
289
U_16
290
MM_ObjectAccessBarrier::readU16Impl(J9VMThread *vmThread, mm_j9object_t srcObject, U_16 *srcAddress, bool isVolatile)
291
{
292
return *srcAddress;
293
}
294
295
/**
296
* Store a U_16 into an object.
297
* This function is only concerned with moving the actual data. Do not re-implement
298
* unless the value is stored in a non-native format (e.g. compressed object pointers).
299
*
300
* @param destObject the object to read from
301
* @param destAddress the address of the field to be read
302
* @param value the value to be stored
303
* @param isVolatile non-zero if the field is volatile, zero otherwise
304
*/
305
void
306
MM_ObjectAccessBarrier::storeU16Impl(J9VMThread *vmThread, mm_j9object_t destObject, U_16 *destAddress, U_16 value, bool isVolatile)
307
{
308
*destAddress = value;
309
}
310
311
/**
312
* Read a I_16 from an object.
313
* This function is only concerned with moving the actual data. Do not re-implement
314
* unless the value is stored in a non-native format (e.g. compressed object pointers).
315
*
316
* @param srcObject the object being read from
317
* @param srcAddress the address of the field to be read
318
* @param isVolatile non-zero if the field is volatile, zero otherwise
319
*/
320
I_16
321
MM_ObjectAccessBarrier::readI16Impl(J9VMThread *vmThread, mm_j9object_t srcObject, I_16 *srcAddress, bool isVolatile)
322
{
323
return *srcAddress;
324
}
325
326
/**
327
* Store a I_16 into an object.
328
* This function is only concerned with moving the actual data. Do not re-implement
329
* unless the value is stored in a non-native format (e.g. compressed object pointers).
330
*
331
* @param destObject the object to read from
332
* @param destAddress the address of the field to be read
333
* @param value the value to be stored
334
* @param isVolatile non-zero if the field is volatile, zero otherwise
335
*/
336
void
337
MM_ObjectAccessBarrier::storeI16Impl(J9VMThread *vmThread, mm_j9object_t destObject, I_16 *destAddress, I_16 value, bool isVolatile)
338
{
339
*destAddress = value;
340
}
341
342
/**
343
* Read a U_32 from an object.
344
* This function is only concerned with moving the actual data. Do not re-implement
345
* unless the value is stored in a non-native format (e.g. compressed object pointers).
346
* See readU32() for higher-level actions.
347
* @param srcObject the object being read from
348
* @param srcAddress the address of the field to be read
349
* @param isVolatile non-zero if the field is volatile, zero otherwise
350
*/
351
U_32
352
MM_ObjectAccessBarrier::readU32Impl(J9VMThread *vmThread, mm_j9object_t srcObject, U_32 *srcAddress, bool isVolatile)
353
{
354
return *srcAddress;
355
}
356
357
/**
358
* Store a U_32 into an object.
359
* This function is only concerned with moving the actual data. Do not re-implement
360
* unless the value is stored in a non-native format (e.g. compressed object pointers).
361
* See storeU32() for higher-level actions.
362
* @param destObject the object to read from
363
* @param destAddress the address of the field to be read
364
* @param value the value to be stored
365
* @param isVolatile non-zero if the field is volatile, zero otherwise
366
*/
367
void
368
MM_ObjectAccessBarrier::storeU32Impl(J9VMThread *vmThread, mm_j9object_t destObject, U_32 *destAddress, U_32 value, bool isVolatile)
369
{
370
*destAddress = value;
371
}
372
373
/**
374
* Read a I_32 from an object.
375
* This function is only concerned with moving the actual data. Do not re-implement
376
* unless the value is stored in a non-native format (e.g. compressed object pointers).
377
* See readI32() for higher-level actions.
378
* By default, forwards to readU32Impl, and casts the return value.
379
* @param srcObject the object being read from
380
* @param srcAddress the address of the field to be read
381
* @param isVolatile non-zero if the field is volatile, zero otherwise
382
*/
383
I_32
384
MM_ObjectAccessBarrier::readI32Impl(J9VMThread *vmThread, mm_j9object_t srcObject, I_32 *srcAddress, bool isVolatile)
385
{
386
return *srcAddress;
387
}
388
389
/**
390
* Store a U_32 into an object.
391
* This function is only concerned with moving the actual data. Do not re-implement
392
* unless the value is stored in a non-native format (e.g. compressed object pointers).
393
* See storeI32() for higher-level actions.
394
* By default, casts the value to unsigned and forwards to storeU32().
395
* @param destObject the object to read from
396
* @param destAddress the address of the field to be read
397
* @param value the value to be stored
398
* @param isVolatile non-zero if the field is volatile, zero otherwise
399
*/
400
void
401
MM_ObjectAccessBarrier::storeI32Impl(J9VMThread *vmThread, mm_j9object_t destObject, I_32 *destAddress, I_32 value, bool isVolatile)
402
{
403
*destAddress = value;
404
}
405
406
/**
407
* Read a U_64 from an object.
408
* This function is only concerned with moving the actual data. Do not re-implement
409
* unless the value is stored in a non-native format (e.g. compressed object pointers).
410
* See readU64() for higher-level actions.
411
* @param srcObject the object being read from
412
* @param srcAddress the address of the field to be read
413
* @param isVolatile non-zero if the field is volatile, zero otherwise
414
*/
415
U_64
416
MM_ObjectAccessBarrier::readU64Impl(J9VMThread *vmThread, mm_j9object_t srcObject, U_64 *srcAddress, bool isVolatile)
417
{
418
#if !defined(J9VM_ENV_DATA64)
419
if (isVolatile) {
420
return longVolatileRead(vmThread, srcAddress);
421
}
422
#endif /* J9VM_ENV_DATA64 */
423
return *srcAddress;
424
}
425
426
/**
427
* Store a U_64 into an object.
428
* This function is only concerned with moving the actual data. Do not re-implement
429
* unless the value is stored in a non-native format (e.g. compressed object pointers).
430
* See storeU64() for higher-level actions.
431
* @param destObject the object to read from
432
* @param destAddress the address of the field to be read
433
* @param value the value to be stored
434
* @param isVolatile non-zero if the field is volatile, zero otherwise
435
*/
436
void
437
MM_ObjectAccessBarrier::storeU64Impl(J9VMThread *vmThread, mm_j9object_t destObject, U_64 *destAddress, U_64 value, bool isVolatile)
438
{
439
#if !defined(J9VM_ENV_DATA64)
440
if (isVolatile) {
441
longVolatileWrite(vmThread, destAddress, &value);
442
return;
443
}
444
#endif /* J9VM_ENV_DATA64 */
445
*destAddress = value;
446
}
447
448
/**
449
* Read a I_64 from an object.
450
* This function is only concerned with moving the actual data. Do not re-implement
451
* unless the value is stored in a non-native format (e.g. compressed object pointers).
452
* See readI64() for higher-level actions.
453
* By default, forwards to readU64Impl, and casts the return value.
454
* @param srcObject the object being read from
455
* @param srcAddress the address of the field to be read
456
* @param isVolatile non-zero if the field is volatile, zero otherwise
457
*/
458
I_64
459
MM_ObjectAccessBarrier::readI64Impl(J9VMThread *vmThread, mm_j9object_t srcObject, I_64 *srcAddress, bool isVolatile)
460
{
461
#if !defined(J9VM_ENV_DATA64)
462
if (isVolatile) {
463
return longVolatileRead(vmThread, (U_64 *)srcAddress);
464
}
465
#endif /* J9VM_ENV_DATA64 */
466
return *srcAddress;
467
}
468
469
/**
470
* Store an I_64 into an object.
471
* This function is only concerned with moving the actual data. Do not re-implement
472
* unless the value is stored in a non-native format (e.g. compressed object pointers).
473
* See storeI64() for higher-level actions.
474
* By default, casts the value to unsigned and forwards to storeU64Impl().
475
* @param destObject the object to read from
476
* @param destAddress the address of the field to be read
477
* @param value the value to be stored
478
* @param isVolatile non-zero if the field is volatile, zero otherwise
479
*/
480
void
481
MM_ObjectAccessBarrier::storeI64Impl(J9VMThread *vmThread, mm_j9object_t destObject, I_64 *destAddress, I_64 value, bool isVolatile)
482
{
483
#if !defined(J9VM_ENV_DATA64)
484
if (isVolatile) {
485
longVolatileWrite(vmThread, (U_64 *)destAddress, (U_64 *)&value);
486
return;
487
}
488
#endif /* J9VM_ENV_DATA64 */
489
*destAddress = value;
490
}
491
492
/**
493
* Read a non-object address (pointer to internal VM data) from an object.
494
* This function is only concerned with moving the actual data. Do not re-implement
495
* unless the value is stored in a non-native format (e.g. compressed object pointers).
496
* See readAddress() for higher-level actions.
497
* By default, forwards to readU32Impl() on 32-bit platforms, and readU64Impl() on 64-bit.
498
* @param srcObject the object being read from
499
* @param srcAddress the address of the field to be read
500
* @param isVolatile non-zero if the field is volatile, zero otherwise
501
*/
502
void *
503
MM_ObjectAccessBarrier::readAddressImpl(J9VMThread *vmThread, mm_j9object_t srcObject, void **srcAddress, bool isVolatile)
504
{
505
return *srcAddress;
506
}
507
508
/**
509
* Store a non-object address into an object.
510
* This function is only concerned with moving the actual data. Do not re-implement
511
* unless the value is stored in a non-native format (e.g. compressed object pointers).
512
* See storeAddress() for higher-level actions.
513
* By default, forwards to storeU32Impl on 32-bit platforms and storeU64Impl on 64-bit.
514
* @param destObject the object to read from
515
* @param destAddress the address of the field to be read
516
* @param value the value to be stored
517
* @param isVolatile non-zero if the field is volatile, zero otherwise
518
*/
519
void
520
MM_ObjectAccessBarrier::storeAddressImpl(J9VMThread *vmThread, mm_j9object_t destObject, void **destAddress, void *value, bool isVolatile)
521
{
522
*destAddress = value;
523
}
524
525
/**
526
* Call before a read or write of a possibly volatile field.
527
* @note This must be used in tandem with protectIfVolatileAfter()
528
* @param isVolatile true if the field is volatile, false otherwise
529
* @param isRead true if the field is being read, false if it is being written
530
* @param isWide true if the field is wide (64-bit), false otherwise
531
*/
532
void
533
MM_ObjectAccessBarrier::protectIfVolatileBefore(J9VMThread *vmThread, bool isVolatile, bool isRead, bool isWide)
534
{
535
if (isVolatile) {
536
/* need to insert a sync instruction
537
* As this is inline, compiler should optimize this away when not necessary.
538
*/
539
if (!isRead) {
540
/* atomic writeBarrier */
541
MM_AtomicOperations::storeSync();
542
}
543
}
544
}
545
546
/**
547
* Call after a read or write of a possibly volatile field.
548
* @note This must be used in tandem with protectIfVolatileBefore()
549
* @param isVolatile true if the field is volatile, false otherwise
550
* @param isRead true if the field is being read, false if it is being written
551
* @param isWide true if the field is wide (64-bit), false otherwise
552
*/
553
void
554
MM_ObjectAccessBarrier::protectIfVolatileAfter(J9VMThread *vmThread, bool isVolatile, bool isRead, bool isWide)
555
{
556
if (isVolatile) {
557
/* need to insert a sync instruction
558
* As this is inline, compiler should optimize this away when not necessary.
559
*/
560
if (isRead) {
561
/* atomic readBarrier */
562
MM_AtomicOperations::loadSync();
563
} else {
564
/* atomic readWriteBarrier */
565
MM_AtomicOperations::sync();
566
}
567
}
568
}
569
570
/**
571
* Read an object field: perform any pre-use barriers, calculate an effective address
572
* and perform the work.
573
* @param srcObject The object being used.
574
* @param srcOffset The offset of the field.
575
* @param isVolatile non-zero if the field is volatile.
576
*/
577
J9Object *
578
MM_ObjectAccessBarrier::mixedObjectReadObject(J9VMThread *vmThread, J9Object *srcObject, UDATA srcOffset, bool isVolatile)
579
{
580
fj9object_t *actualAddress = J9OAB_MIXEDOBJECT_EA(srcObject, srcOffset, fj9object_t);
581
J9Object *result = NULL;
582
583
if (preObjectRead(vmThread, srcObject, actualAddress)) {
584
protectIfVolatileBefore(vmThread, isVolatile, true, false);
585
result = readObjectImpl(vmThread, srcObject, actualAddress, isVolatile);
586
protectIfVolatileAfter(vmThread, isVolatile, true, false);
587
588
if (!postObjectRead(vmThread, srcObject, actualAddress)) {
589
result = NULL;
590
}
591
}
592
593
/* This must always be called to massage the return value */
594
return result;
595
}
596
597
/**
598
* Read an object field: perform any pre-use barriers, calculate an effective address
599
* and perform the work.
600
* @param srcObject The object being used.
601
* @param srcOffset The offset of the field.
602
* @param isVolatile non-zero if the field is volatile.
603
*/
604
void *
605
MM_ObjectAccessBarrier::mixedObjectReadAddress(J9VMThread *vmThread, J9Object *srcObject, UDATA srcOffset, bool isVolatile)
606
{
607
void **actualAddress = J9OAB_MIXEDOBJECT_EA(srcObject, srcOffset,void *);
608
protectIfVolatileBefore(vmThread, isVolatile, true, false);
609
void *result = readAddressImpl(vmThread, srcObject, actualAddress, isVolatile);
610
protectIfVolatileAfter(vmThread, isVolatile, true, false);
611
return result;
612
}
613
614
/**
615
* Read an object field: perform any pre-use barriers, calculate an effective address
616
* and perform the work.
617
* @param srcObject The object being used.
618
* @param srcOffset The offset of the field.
619
* @param isVolatile non-zero if the field is volatile.
620
*/
621
U_32
622
MM_ObjectAccessBarrier::mixedObjectReadU32(J9VMThread *vmThread, J9Object *srcObject, UDATA srcOffset, bool isVolatile)
623
{
624
U_32 *actualAddress = J9OAB_MIXEDOBJECT_EA(srcObject, srcOffset, U_32);
625
protectIfVolatileBefore(vmThread, isVolatile, true, false);
626
U_32 result = readU32Impl(vmThread, srcObject, actualAddress, isVolatile);
627
protectIfVolatileAfter(vmThread, isVolatile, true, false);
628
return result;
629
}
630
631
/**
632
* Read an object field: perform any pre-use barriers, calculate an effective address
633
* and perform the work.
634
* @param srcObject The object being used.
635
* @param srcOffset The offset of the field.
636
* @param isVolatile non-zero if the field is volatile.
637
*/
638
I_32
639
MM_ObjectAccessBarrier::mixedObjectReadI32(J9VMThread *vmThread, J9Object *srcObject, UDATA srcOffset, bool isVolatile)
640
{
641
I_32 *actualAddress = J9OAB_MIXEDOBJECT_EA(srcObject, srcOffset, I_32);
642
protectIfVolatileBefore(vmThread, isVolatile, true, false);
643
I_32 result = readI32Impl(vmThread, srcObject, actualAddress, isVolatile);
644
protectIfVolatileAfter(vmThread, isVolatile, true, false);
645
return result;
646
}
647
648
/**
649
* Read an object field: perform any pre-use barriers, calculate an effective address
650
* and perform the work.
651
* @param srcObject The object being used.
652
* @param srcOffset The offset of the field.
653
* @param isVolatile non-zero if the field is volatile.
654
*/
655
U_64
656
MM_ObjectAccessBarrier::mixedObjectReadU64(J9VMThread *vmThread, J9Object *srcObject, UDATA srcOffset, bool isVolatile)
657
{
658
U_64 *actualAddress = J9OAB_MIXEDOBJECT_EA(srcObject, srcOffset, U_64);
659
protectIfVolatileBefore(vmThread, isVolatile, true, true);
660
U_64 result = readU64Impl(vmThread, srcObject, actualAddress, isVolatile);
661
protectIfVolatileAfter(vmThread, isVolatile, true, true);
662
return result;
663
}
664
665
/**
666
* Read an object field: perform any pre-use barriers, calculate an effective address
667
* and perform the work.
668
* @param srcObject The object being used.
669
* @param srcOffset The offset of the field.
670
* @param isVolatile non-zero if the field is volatile.
671
*/
672
I_64
673
MM_ObjectAccessBarrier::mixedObjectReadI64(J9VMThread *vmThread, J9Object *srcObject, UDATA srcOffset, bool isVolatile)
674
{
675
I_64 *actualAddress = J9OAB_MIXEDOBJECT_EA(srcObject, srcOffset, I_64);
676
protectIfVolatileBefore(vmThread, isVolatile, true, true);
677
I_64 result = readI64Impl(vmThread, srcObject, actualAddress, isVolatile);
678
protectIfVolatileAfter(vmThread, isVolatile, true, true);
679
return result;
680
}
681
682
/**
683
* Store an object field: perform any pre-use barriers, calculate an effective address
684
* and perform the work.
685
* @param destObject The object being used.
686
* @param destOffset The offset of the field.
687
* @param value The value to be stored
688
* @param isVolatile non-zero if the field is volatile.
689
*/
690
void
691
MM_ObjectAccessBarrier::mixedObjectStoreObject(J9VMThread *vmThread, J9Object *destObject, UDATA destOffset, J9Object *value, bool isVolatile)
692
{
693
fj9object_t *actualAddress = J9OAB_MIXEDOBJECT_EA(destObject, destOffset, fj9object_t);
694
695
if (preObjectStore(vmThread, destObject, actualAddress, value, isVolatile)) {
696
protectIfVolatileBefore(vmThread, isVolatile, false, false);
697
storeObjectImpl(vmThread, destObject, actualAddress, value, isVolatile);
698
protectIfVolatileAfter(vmThread, isVolatile, false, false);
699
700
postObjectStore(vmThread, destObject, actualAddress, value, isVolatile);
701
}
702
}
703
704
/**
705
* Store an object field: perform any pre-use barriers, calculate an effective address
706
* and perform the work.
707
* @param destObject The object being used.
708
* @param destOffset The offset of the field.
709
* @param value The value to be stored
710
* @param isVolatile non-zero if the field is volatile.
711
*/
712
void
713
MM_ObjectAccessBarrier::mixedObjectStoreAddress(J9VMThread *vmThread, J9Object *destObject, UDATA destOffset,void *value, bool isVolatile)
714
{
715
void **actualAddress = J9OAB_MIXEDOBJECT_EA(destObject, destOffset,void *);
716
protectIfVolatileBefore(vmThread, isVolatile, false, false);
717
storeAddressImpl(vmThread, destObject, actualAddress, value, isVolatile);
718
protectIfVolatileAfter(vmThread, isVolatile, false, false);
719
}
720
721
/**
722
* Store an object field: perform any pre-use barriers, calculate an effective address
723
* and perform the work.
724
* @param destObject The object being used.
725
* @param destOffset The offset of the field.
726
* @param value The value to be stored
727
* @param isVolatile non-zero if the field is volatile.
728
*/
729
void
730
MM_ObjectAccessBarrier::mixedObjectStoreU32(J9VMThread *vmThread, J9Object *destObject, UDATA destOffset, U_32 value, bool isVolatile)
731
{
732
U_32 *actualAddress = J9OAB_MIXEDOBJECT_EA(destObject, destOffset, U_32);
733
protectIfVolatileBefore(vmThread, isVolatile, false, false);
734
storeU32Impl(vmThread, destObject, actualAddress, value, isVolatile);
735
protectIfVolatileAfter(vmThread, isVolatile, false, false);
736
}
737
738
/**
739
* Store an object field: perform any pre-use barriers, calculate an effective address
740
* and perform the work.
741
* @param destObject The object being used.
742
* @param destOffset The offset of the field.
743
* @param value The value to be stored
744
* @param isVolatile non-zero if the field is volatile.
745
*/
746
void
747
MM_ObjectAccessBarrier::mixedObjectStoreI32(J9VMThread *vmThread, J9Object *destObject, UDATA destOffset, I_32 value, bool isVolatile)
748
{
749
I_32 *actualAddress = J9OAB_MIXEDOBJECT_EA(destObject, destOffset, I_32);
750
protectIfVolatileBefore(vmThread, isVolatile, false, false);
751
storeI32Impl(vmThread, destObject, actualAddress, value, isVolatile);
752
protectIfVolatileAfter(vmThread, isVolatile, false, false);
753
}
754
755
/**
756
* Store an object field: perform any pre-use barriers, calculate an effective address
757
* and perform the work.
758
* @param destObject The object being used.
759
* @param destOffset The offset of the field.
760
* @param value The value to be stored
761
* @param isVolatile non-zero if the field is volatile.
762
*/
763
void
764
MM_ObjectAccessBarrier::mixedObjectStoreU64(J9VMThread *vmThread, J9Object *destObject, UDATA destOffset, U_64 value, bool isVolatile)
765
{
766
U_64 *actualAddress = J9OAB_MIXEDOBJECT_EA(destObject, destOffset, U_64);
767
protectIfVolatileBefore(vmThread, isVolatile, false, true);
768
storeU64Impl(vmThread, destObject, actualAddress, value, isVolatile);
769
protectIfVolatileAfter(vmThread, isVolatile, false, true);
770
}
771
772
/**
773
* Store an object field: perform any pre-use barriers, calculate an effective address
774
* and perform the work.
775
* @param destObject The object being used.
776
* @param destOffset The offset of the field.
777
* @param value The value to be stored
778
* @param isVolatile non-zero if the field is volatile.
779
*/
780
void
781
MM_ObjectAccessBarrier::mixedObjectStoreI64(J9VMThread *vmThread, J9Object *destObject, UDATA destOffset, I_64 value, bool isVolatile)
782
{
783
I_64 *actualAddress = J9OAB_MIXEDOBJECT_EA(destObject, destOffset, I_64);
784
protectIfVolatileBefore(vmThread, isVolatile, false, true);
785
storeI64Impl(vmThread, destObject, actualAddress, value, isVolatile);
786
protectIfVolatileAfter(vmThread, isVolatile, false, true);
787
}
788
789
/**
790
* Read an array element: perform any pre-use barriers, calculate an effective address
791
* and perform the work.
792
* @param srcObject The array being used.
793
* @param srcIndex The element index
794
*/
795
J9Object *
796
MM_ObjectAccessBarrier::indexableReadObject(J9VMThread *vmThread, J9IndexableObject *srcObject, I_32 srcIndex, bool isVolatile)
797
{
798
UDATA const referenceSize = J9VMTHREAD_REFERENCE_SIZE(vmThread);
799
fj9object_t *actualAddress = (fj9object_t *)indexableEffectiveAddress(vmThread, srcObject, srcIndex, referenceSize);
800
J9Object *result = NULL;
801
802
/* No preObjectRead yet because no barrier require it */
803
if (preObjectRead(vmThread, (J9Object *)srcObject, actualAddress)) {
804
protectIfVolatileBefore(vmThread, isVolatile, true, false);
805
result = readObjectImpl(vmThread, (J9Object*)srcObject, actualAddress);
806
protectIfVolatileAfter(vmThread, isVolatile, true, false);
807
808
if (!postObjectRead(vmThread, (J9Object *)srcObject, actualAddress)) {
809
result = NULL;
810
}
811
}
812
813
/* This must always be called to massage the return value */
814
return result;
815
}
816
817
/**
818
* Read an array element: perform any pre-use barriers, calculate an effective address
819
* and perform the work.
820
* @param srcObject The array being used.
821
* @param srcIndex The element index
822
*/
823
void *
824
MM_ObjectAccessBarrier::indexableReadAddress(J9VMThread *vmThread, J9IndexableObject *srcObject, I_32 srcIndex, bool isVolatile)
825
{
826
void **actualAddress = (void **)indexableEffectiveAddress(vmThread, srcObject, srcIndex, sizeof(void *));
827
protectIfVolatileBefore(vmThread, isVolatile, true, false);
828
void *result = readAddressImpl(vmThread, (J9Object*)srcObject, actualAddress);
829
protectIfVolatileAfter(vmThread, isVolatile, true, false);
830
return result;
831
}
832
833
/**
834
* Read an array element: perform any pre-use barriers, calculate an effective address
835
* and perform the work.
836
* @param srcObject The array being used.
837
* @param srcIndex The element index
838
*/
839
U_8
840
MM_ObjectAccessBarrier::indexableReadU8(J9VMThread *vmThread, J9IndexableObject *srcObject, I_32 srcIndex, bool isVolatile)
841
{
842
U_8 *actualAddress = (U_8 *)indexableEffectiveAddress(vmThread, srcObject, srcIndex, sizeof(U_8));
843
protectIfVolatileBefore(vmThread, isVolatile, true, false);
844
U_8 result = readU8Impl(vmThread, (mm_j9object_t)srcObject, actualAddress);
845
protectIfVolatileAfter(vmThread, isVolatile, true, false);
846
return result;
847
}
848
849
/**
850
* Read an array element: perform any pre-use barriers, calculate an effective address
851
* and perform the work.
852
* @param srcObject The array being used.
853
* @param srcIndex The element index
854
*/
855
I_8
856
MM_ObjectAccessBarrier::indexableReadI8(J9VMThread *vmThread, J9IndexableObject *srcObject, I_32 srcIndex, bool isVolatile)
857
{
858
I_8 *actualAddress = (I_8 *)indexableEffectiveAddress(vmThread, srcObject, srcIndex, sizeof(I_8));
859
protectIfVolatileBefore(vmThread, isVolatile, true, false);
860
I_8 result = readI8Impl(vmThread, (mm_j9object_t)srcObject, actualAddress);
861
protectIfVolatileAfter(vmThread, isVolatile, true, false);
862
return result;
863
}
864
865
/**
866
* Read an array element: perform any pre-use barriers, calculate an effective address
867
* and perform the work.
868
* @param srcObject The array being used.
869
* @param srcIndex The element index
870
*/
871
U_16
872
MM_ObjectAccessBarrier::indexableReadU16(J9VMThread *vmThread, J9IndexableObject *srcObject, I_32 srcIndex, bool isVolatile)
873
{
874
U_16 *actualAddress = (U_16 *)indexableEffectiveAddress(vmThread, srcObject, srcIndex,sizeof(U_16));
875
protectIfVolatileBefore(vmThread, isVolatile, true, false);
876
U_16 result = readU16Impl(vmThread, (mm_j9object_t)srcObject, actualAddress);
877
protectIfVolatileAfter(vmThread, isVolatile, true, false);
878
return result;
879
}
880
881
/**
882
* Read an array element: perform any pre-use barriers, calculate an effective address
883
* and perform the work.
884
* @param srcObject The array being used.
885
* @param srcIndex The element index
886
*/
887
I_16
888
MM_ObjectAccessBarrier::indexableReadI16(J9VMThread *vmThread, J9IndexableObject *srcObject, I_32 srcIndex, bool isVolatile)
889
{
890
I_16 *actualAddress = (I_16 *)indexableEffectiveAddress(vmThread, srcObject, srcIndex, sizeof(I_16));
891
protectIfVolatileBefore(vmThread, isVolatile, true, false);
892
I_16 result = readI16Impl(vmThread, (mm_j9object_t)srcObject, actualAddress);
893
protectIfVolatileAfter(vmThread, isVolatile, true, false);
894
return result;
895
}
896
897
/**
898
* Read an array element: perform any pre-use barriers, calculate an effective address
899
* and perform the work.
900
* @param srcObject The array being used.
901
* @param srcIndex The element index
902
*/
903
U_32
904
MM_ObjectAccessBarrier::indexableReadU32(J9VMThread *vmThread, J9IndexableObject *srcObject, I_32 srcIndex, bool isVolatile)
905
{
906
U_32 *actualAddress = (U_32 *)indexableEffectiveAddress(vmThread, srcObject, srcIndex, sizeof(U_32));
907
protectIfVolatileBefore(vmThread, isVolatile, true, false);
908
U_32 result = readU32Impl(vmThread, (mm_j9object_t)srcObject, actualAddress);
909
protectIfVolatileAfter(vmThread, isVolatile, true, false);
910
return result;
911
}
912
913
/**
914
* Read an array element: perform any pre-use barriers, calculate an effective address
915
* and perform the work.
916
* @param srcObject The array being used.
917
* @param srcIndex The element index
918
*/
919
I_32
920
MM_ObjectAccessBarrier::indexableReadI32(J9VMThread *vmThread, J9IndexableObject *srcObject, I_32 srcIndex, bool isVolatile)
921
{
922
I_32 *actualAddress = (I_32 *)indexableEffectiveAddress(vmThread, srcObject, srcIndex, sizeof(I_32));
923
protectIfVolatileBefore(vmThread, isVolatile, true, false);
924
I_32 result = readI32Impl(vmThread, (mm_j9object_t)srcObject, actualAddress);
925
protectIfVolatileAfter(vmThread, isVolatile, true, false);
926
return result;
927
}
928
929
/**
930
* Read an array element: perform any pre-use barriers, calculate an effective address
931
* and perform the work.
932
* @param srcObject The array being used.
933
* @param srcIndex The element index
934
*/
935
U_64
936
MM_ObjectAccessBarrier::indexableReadU64(J9VMThread *vmThread, J9IndexableObject *srcObject, I_32 srcIndex, bool isVolatile)
937
{
938
U_64 *actualAddress = (U_64 *)indexableEffectiveAddress(vmThread, srcObject, srcIndex, sizeof(U_64));
939
protectIfVolatileBefore(vmThread, isVolatile, true, true);
940
U_64 result = readU64Impl(vmThread, (mm_j9object_t)srcObject, actualAddress, isVolatile);
941
protectIfVolatileAfter(vmThread, isVolatile, true, true);
942
return result;
943
}
944
945
/**
946
* Read an array element: perform any pre-use barriers, calculate an effective address
947
* and perform the work.
948
* @param srcObject The array being used.
949
* @param srcIndex The element index
950
*/
951
I_64
952
MM_ObjectAccessBarrier::indexableReadI64(J9VMThread *vmThread, J9IndexableObject *srcObject, I_32 srcIndex, bool isVolatile)
953
{
954
I_64 *actualAddress = (I_64 *)indexableEffectiveAddress(vmThread, srcObject, srcIndex, sizeof(I_64));
955
protectIfVolatileBefore(vmThread, isVolatile, true, true);
956
I_64 result = readI64Impl(vmThread, (mm_j9object_t)srcObject, actualAddress, isVolatile);
957
protectIfVolatileAfter(vmThread, isVolatile, true, true);
958
return result;
959
}
960
961
962
/**
963
* Store an array element: perform any pre-use barriers, calculate an effective address
964
* and perform the work.
965
* @param destObject The array being used.
966
* @param destIndex The element index
967
* @param value The value to store.
968
*/
969
void
970
MM_ObjectAccessBarrier::indexableStoreObject(J9VMThread *vmThread, J9IndexableObject *destObject, I_32 destIndex, J9Object *value, bool isVolatile)
971
{
972
UDATA const referenceSize = J9VMTHREAD_REFERENCE_SIZE(vmThread);
973
fj9object_t *actualAddress = (fj9object_t *)indexableEffectiveAddress(vmThread, destObject, destIndex, referenceSize);
974
975
if (preObjectStore(vmThread, (J9Object *)destObject, actualAddress, value)) {
976
protectIfVolatileBefore(vmThread, isVolatile, false, false);
977
storeObjectImpl(vmThread, (J9Object*)destObject, actualAddress, value);
978
protectIfVolatileAfter(vmThread, isVolatile, false, false);
979
postObjectStore(vmThread, (J9Object *)destObject, actualAddress, value);
980
}
981
}
982
983
/**
984
* Store an array element: perform any pre-use barriers, calculate an effective address
985
* and perform the work.
986
* @param destObject The array being used.
987
* @param destIndex The element index
988
* @param value The value to store.
989
*/
990
void
991
MM_ObjectAccessBarrier::indexableStoreAddress(J9VMThread *vmThread, J9IndexableObject *destObject, I_32 destIndex,void *value, bool isVolatile)
992
{
993
void **actualAddress = (void **)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(void *));
994
protectIfVolatileBefore(vmThread, isVolatile, false, false);
995
storeAddressImpl(vmThread, (mm_j9object_t)destObject, actualAddress, value);
996
protectIfVolatileAfter(vmThread, isVolatile, false, false);
997
}
998
999
/**
1000
* Store an array element: perform any pre-use barriers, calculate an effective address
1001
* and perform the work.
1002
* @param destObject The array being used.
1003
* @param destIndex The element index
1004
* @param value The value to store.
1005
*/
1006
void
1007
MM_ObjectAccessBarrier::indexableStoreU8(J9VMThread *vmThread, J9IndexableObject *destObject, I_32 destIndex, U_8 value, bool isVolatile)
1008
{
1009
U_8 *actualAddress = (U_8 *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(U_8));
1010
protectIfVolatileBefore(vmThread, isVolatile, false, false);
1011
storeU8Impl(vmThread, (mm_j9object_t)destObject, actualAddress, value);
1012
protectIfVolatileAfter(vmThread, isVolatile, false, false);
1013
}
1014
1015
/**
1016
* Store an array element: perform any pre-use barriers, calculate an effective address
1017
* and perform the work.
1018
* @param destObject The array being used.
1019
* @param destIndex The element index
1020
* @param value The value to store.
1021
*/
1022
void
1023
MM_ObjectAccessBarrier::indexableStoreI8(J9VMThread *vmThread, J9IndexableObject *destObject, I_32 destIndex, I_8 value, bool isVolatile)
1024
{
1025
I_8 *actualAddress = (I_8 *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(I_8));
1026
protectIfVolatileBefore(vmThread, isVolatile, false, false);
1027
storeI8Impl(vmThread, (mm_j9object_t)destObject, actualAddress, value);
1028
protectIfVolatileAfter(vmThread, isVolatile, false, false);
1029
}
1030
1031
/**
1032
* Store an array element: perform any pre-use barriers, calculate an effective address
1033
* and perform the work.
1034
* @param destObject The array being used.
1035
* @param destIndex The element index
1036
* @param value The value to store.
1037
*/
1038
void
1039
MM_ObjectAccessBarrier::indexableStoreU16(J9VMThread *vmThread, J9IndexableObject *destObject, I_32 destIndex, U_16 value, bool isVolatile)
1040
{
1041
U_16 *actualAddress = (U_16 *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(U_16));
1042
protectIfVolatileBefore(vmThread, isVolatile, false, false);
1043
storeU16Impl(vmThread, (mm_j9object_t)destObject, actualAddress, value);
1044
protectIfVolatileAfter(vmThread, isVolatile, false, false);
1045
}
1046
1047
/**
1048
* Store an array element: perform any pre-use barriers, calculate an effective address
1049
* and perform the work.
1050
* @param destObject The array being used.
1051
* @param destIndex The element index
1052
* @param value The value to store.
1053
*/
1054
void
1055
MM_ObjectAccessBarrier::indexableStoreI16(J9VMThread *vmThread, J9IndexableObject *destObject, I_32 destIndex, I_16 value, bool isVolatile)
1056
{
1057
I_16 *actualAddress = (I_16 *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(I_16));
1058
protectIfVolatileBefore(vmThread, isVolatile, false, false);
1059
storeI16Impl(vmThread, (mm_j9object_t)destObject, actualAddress, value);
1060
protectIfVolatileAfter(vmThread, isVolatile, false, false);
1061
}
1062
1063
/**
1064
* Store an array element: perform any pre-use barriers, calculate an effective address
1065
* and perform the work.
1066
* @param destObject The array being used.
1067
* @param destIndex The element index
1068
* @param value The value to store.
1069
*/
1070
void
1071
MM_ObjectAccessBarrier::indexableStoreU32(J9VMThread *vmThread, J9IndexableObject *destObject, I_32 destIndex, U_32 value, bool isVolatile)
1072
{
1073
U_32 *actualAddress = (U_32 *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(U_32));
1074
protectIfVolatileBefore(vmThread, isVolatile, false, false);
1075
storeU32Impl(vmThread, (mm_j9object_t)destObject, actualAddress, value);
1076
protectIfVolatileAfter(vmThread, isVolatile, false, false);
1077
}
1078
1079
/**
1080
* Store an array element: perform any pre-use barriers, calculate an effective address
1081
* and perform the work.
1082
* @param destObject The array being used.
1083
* @param destIndex The element index
1084
* @param value The value to store.
1085
*/
1086
void
1087
MM_ObjectAccessBarrier::indexableStoreI32(J9VMThread *vmThread, J9IndexableObject *destObject, I_32 destIndex, I_32 value, bool isVolatile)
1088
{
1089
I_32 *actualAddress = (I_32 *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(I_32));
1090
protectIfVolatileBefore(vmThread, isVolatile, false, false);
1091
storeI32Impl(vmThread, (mm_j9object_t)destObject, actualAddress, value);
1092
protectIfVolatileAfter(vmThread, isVolatile, false, false);
1093
}
1094
1095
/**
1096
* Store an array element: perform any pre-use barriers, calculate an effective address
1097
* and perform the work.
1098
* @param destObject The array being used.
1099
* @param destIndex The element index
1100
* @param value The value to store.
1101
*/
1102
void
1103
MM_ObjectAccessBarrier::indexableStoreU64(J9VMThread *vmThread, J9IndexableObject *destObject, I_32 destIndex, U_64 value, bool isVolatile)
1104
{
1105
U_64 *actualAddress = (U_64 *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(U_64));
1106
protectIfVolatileBefore(vmThread, isVolatile, false, true);
1107
storeU64Impl(vmThread, (mm_j9object_t)destObject, actualAddress, value, isVolatile);
1108
protectIfVolatileAfter(vmThread, isVolatile, false, true);
1109
}
1110
1111
/**
1112
* Store an array element: perform any pre-use barriers, calculate an effective address
1113
* and perform the work.
1114
* @param destObject The array being used.
1115
* @param destIndex The element index
1116
* @param value The value to store.
1117
*/
1118
void
1119
MM_ObjectAccessBarrier::indexableStoreI64(J9VMThread *vmThread, J9IndexableObject *destObject, I_32 destIndex, I_64 value, bool isVolatile)
1120
{
1121
I_64 *actualAddress = (I_64 *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(I_64));
1122
protectIfVolatileBefore(vmThread, isVolatile, false, true);
1123
storeI64Impl(vmThread, (mm_j9object_t)destObject, actualAddress, value, isVolatile);
1124
protectIfVolatileAfter(vmThread, isVolatile, false, true);
1125
}
1126
1127
/**
1128
* Copy object fields into flattened array element
1129
*
1130
* @param vmThread thread token
1131
* @param arrayClazz array J9Class
1132
* @param srcObject object whose fields will be copied
1133
* @param arrayRef array object
1134
* @param index index of array element where fields are copied to
1135
*/
1136
void
1137
MM_ObjectAccessBarrier::copyObjectFieldsToFlattenedArrayElement(J9VMThread *vmThread, J9ArrayClass *arrayClazz, j9object_t srcObject, J9IndexableObject *arrayRef, I_32 index)
1138
{
1139
UDATA elementStartOffset = J9VMTHREAD_OBJECT_HEADER_SIZE(vmThread);
1140
U_8 *elementAddress = (U_8*)indexableEffectiveAddress(vmThread, arrayRef, index, J9ARRAYCLASS_GET_STRIDE((J9Class *) arrayClazz));
1141
IDATA elementOffset = (elementAddress - (U_8*)arrayRef);
1142
J9Class *elementClazz = J9GC_J9OBJECT_CLAZZ_THREAD(srcObject, vmThread);
1143
Assert_MM_true(J9_IS_J9CLASS_VALUETYPE(elementClazz));
1144
Assert_MM_true(elementClazz == arrayClazz->leafComponentType);
1145
1146
elementStartOffset += J9CLASS_PREPADDING_SIZE(elementClazz);
1147
1148
copyObjectFields(vmThread, elementClazz, srcObject, elementStartOffset, (j9object_t) arrayRef, elementOffset);
1149
}
1150
1151
/**
1152
* Copy object fields into flattened array element
1153
*
1154
* @param vmThread thread token
1155
* @param arrayClazz array J9Class
1156
* @param destObject object where array element fields will be copied to
1157
* @param arrayRef array object
1158
* @param index index of array element where fields are copied to
1159
*/
1160
void
1161
MM_ObjectAccessBarrier::copyObjectFieldsFromFlattenedArrayElement(J9VMThread *vmThread, J9ArrayClass *arrayClazz, j9object_t destObject, J9IndexableObject *arrayRef, I_32 index)
1162
{
1163
UDATA elementStartOffset = J9VMTHREAD_OBJECT_HEADER_SIZE(vmThread);
1164
U_8 *elementAddress = (U_8*)indexableEffectiveAddress(vmThread, arrayRef, index, J9ARRAYCLASS_GET_STRIDE((J9Class *) arrayClazz));
1165
IDATA elementOffset = (elementAddress - (U_8*)arrayRef);
1166
J9Class *elementClazz = J9GC_J9OBJECT_CLAZZ_THREAD(destObject, vmThread);
1167
Assert_MM_true(J9_IS_J9CLASS_VALUETYPE(elementClazz));
1168
Assert_MM_true(elementClazz == arrayClazz->leafComponentType);
1169
1170
elementStartOffset += J9CLASS_PREPADDING_SIZE(elementClazz);
1171
1172
copyObjectFields(vmThread, elementClazz, (j9object_t) arrayRef, elementOffset, destObject, elementStartOffset);
1173
}
1174
1175
1176
/**
1177
* Read a static field.
1178
* @param srcSlot The static field slot.
1179
* @param isVolatile non-zero if the field is volatile.
1180
*/
1181
J9Object *
1182
MM_ObjectAccessBarrier::staticReadObject(J9VMThread *vmThread, J9Class *clazz, J9Object **srcSlot, bool isVolatile)
1183
{
1184
J9Object *result = NULL;
1185
1186
if (preObjectRead(vmThread, clazz, srcSlot)) {
1187
protectIfVolatileBefore(vmThread, isVolatile, true, true);
1188
result = staticReadObjectImpl(vmThread, clazz, srcSlot, isVolatile);
1189
protectIfVolatileAfter(vmThread, isVolatile, true, true);
1190
1191
if (!postObjectRead(vmThread, clazz, srcSlot)) {
1192
result = NULL;
1193
}
1194
}
1195
1196
/* This must always be called to massage the return value */
1197
return result;
1198
}
1199
1200
/**
1201
* Read a static field.
1202
* @param srcSlot The static field slot.
1203
* @param isVolatile non-zero if the field is volatile.
1204
*/
1205
void *
1206
MM_ObjectAccessBarrier::staticReadAddress(J9VMThread *vmThread, J9Class *clazz, void **srcSlot, bool isVolatile)
1207
{
1208
protectIfVolatileBefore(vmThread, isVolatile, true, true);
1209
void *result = readAddressImpl(vmThread, NULL, srcSlot, isVolatile);
1210
protectIfVolatileAfter(vmThread, isVolatile, true, true);
1211
return result;
1212
}
1213
1214
/**
1215
* Read a static field.
1216
* @param srcSlot The static field slot.
1217
* @param isVolatile non-zero if the field is volatile.
1218
*/
1219
U_32
1220
MM_ObjectAccessBarrier::staticReadU32(J9VMThread *vmThread, J9Class *clazz, U_32 *srcSlot, bool isVolatile)
1221
{
1222
protectIfVolatileBefore(vmThread, isVolatile, true, true);
1223
U_32 result = readU32Impl(vmThread, NULL, srcSlot, isVolatile);
1224
protectIfVolatileAfter(vmThread, isVolatile, true, true);
1225
return result;
1226
}
1227
1228
/**
1229
* Read a static field.
1230
* @param srcSlot The static field slot.
1231
* @param isVolatile non-zero if the field is volatile.
1232
*/
1233
I_32
1234
MM_ObjectAccessBarrier::staticReadI32(J9VMThread *vmThread, J9Class *clazz, I_32 *srcSlot, bool isVolatile)
1235
{
1236
protectIfVolatileBefore(vmThread, isVolatile, true, true);
1237
I_32 result = readI32Impl(vmThread, NULL, srcSlot, isVolatile);
1238
protectIfVolatileAfter(vmThread, isVolatile, true, true);
1239
return result;
1240
}
1241
1242
/**
1243
* Read a static field.
1244
* @param srcSlot The static field slot.
1245
* @param isVolatile non-zero if the field is volatile.
1246
*/
1247
U_64
1248
MM_ObjectAccessBarrier::staticReadU64(J9VMThread *vmThread, J9Class *clazz, U_64 *srcSlot, bool isVolatile)
1249
{
1250
protectIfVolatileBefore(vmThread, isVolatile, true, true);
1251
U_64 result = readU64Impl(vmThread, NULL, srcSlot, isVolatile);
1252
protectIfVolatileAfter(vmThread, isVolatile, true, true);
1253
return result;
1254
}
1255
1256
/**
1257
* Read a static field.
1258
* @param srcSlot The static field slot.
1259
* @param isVolatile non-zero if the field is volatile.
1260
*/
1261
I_64
1262
MM_ObjectAccessBarrier::staticReadI64(J9VMThread *vmThread, J9Class *clazz, I_64 *srcSlot, bool isVolatile)
1263
{
1264
protectIfVolatileBefore(vmThread, isVolatile, true, true);
1265
I_64 result = readI64Impl(vmThread, NULL, srcSlot, isVolatile);
1266
protectIfVolatileAfter(vmThread, isVolatile, true, true);
1267
return result;
1268
}
1269
1270
/**
1271
* Store a static field.
1272
* @param destSlot The static slot being used.
1273
* @param value The value to be stored
1274
* @param isVolatile non-zero if the field is volatile.
1275
*/
1276
void
1277
MM_ObjectAccessBarrier::staticStoreObject(J9VMThread *vmThread, J9Class *clazz, J9Object **destSlot, J9Object *value, bool isVolatile)
1278
{
1279
j9object_t destObject = J9VM_J9CLASS_TO_HEAPCLASS(clazz);
1280
if (preObjectStore(vmThread, destObject, destSlot, value, isVolatile)) {
1281
protectIfVolatileBefore(vmThread, isVolatile, false, true);
1282
staticStoreObjectImpl(vmThread, clazz, destSlot, value, isVolatile);
1283
protectIfVolatileAfter(vmThread, isVolatile, false, true);
1284
1285
postObjectStore(vmThread, clazz, destSlot, value, isVolatile);
1286
}
1287
}
1288
1289
/**
1290
* Store a static field.
1291
* @param destSlot The static slot being used.
1292
* @param value The value to be stored
1293
* @param isVolatile non-zero if the field is volatile.
1294
*/
1295
void
1296
MM_ObjectAccessBarrier::staticStoreAddress(J9VMThread *vmThread, J9Class *clazz, void **destSlot,void *value, bool isVolatile)
1297
{
1298
protectIfVolatileBefore(vmThread, isVolatile, false, true);
1299
storeAddressImpl(vmThread, NULL, destSlot, value, isVolatile);
1300
protectIfVolatileAfter(vmThread, isVolatile, false, true);
1301
}
1302
1303
/**
1304
* Store a static field.
1305
* @param destSlot The static slot being used.
1306
* @param value The value to be stored
1307
* @param isVolatile non-zero if the field is volatile.
1308
*/
1309
void
1310
MM_ObjectAccessBarrier::staticStoreU32(J9VMThread *vmThread, J9Class *clazz, U_32 *destSlot, U_32 value, bool isVolatile)
1311
{
1312
protectIfVolatileBefore(vmThread, isVolatile, false, true);
1313
storeU32Impl(vmThread, NULL, destSlot, value, isVolatile);
1314
protectIfVolatileAfter(vmThread, isVolatile, false, true);
1315
}
1316
1317
/**
1318
* Store a static field.
1319
* @param destSlot The static slot being used.
1320
* @param value The value to be stored
1321
* @param isVolatile non-zero if the field is volatile.
1322
*/
1323
void
1324
MM_ObjectAccessBarrier::staticStoreI32(J9VMThread *vmThread, J9Class *clazz, I_32 *destSlot, I_32 value, bool isVolatile)
1325
{
1326
protectIfVolatileBefore(vmThread, isVolatile, false, true);
1327
storeI32Impl(vmThread, NULL, destSlot, value, isVolatile);
1328
protectIfVolatileAfter(vmThread, isVolatile, false, true);
1329
}
1330
1331
/**
1332
* Store a static field.
1333
* @param destSlot The static slot being used.
1334
* @param value The value to be stored
1335
* @param isVolatile non-zero if the field is volatile.
1336
*/
1337
void
1338
MM_ObjectAccessBarrier::staticStoreU64(J9VMThread *vmThread, J9Class *clazz, U_64 *destSlot, U_64 value, bool isVolatile)
1339
{
1340
protectIfVolatileBefore(vmThread, isVolatile, false, true);
1341
storeU64Impl(vmThread, NULL, destSlot, value, isVolatile);
1342
protectIfVolatileAfter(vmThread, isVolatile, false, true);
1343
}
1344
1345
/**
1346
* Store a static field.
1347
* @param destSlot The static slot being used.
1348
* @param value The value to be stored
1349
* @param isVolatile non-zero if the field is volatile.
1350
*/
1351
void
1352
MM_ObjectAccessBarrier::staticStoreI64(J9VMThread *vmThread, J9Class *clazz, I_64 *destSlot, I_64 value, bool isVolatile)
1353
{
1354
protectIfVolatileBefore(vmThread, isVolatile, false, true);
1355
storeI64Impl(vmThread, NULL, destSlot, value, isVolatile);
1356
protectIfVolatileAfter(vmThread, isVolatile, false, true);
1357
}
1358
1359
/**
1360
* Return a pointer to the first byte of an array's object data
1361
* @param arrayObject the base pointer of the array object
1362
*/
1363
U_8 *
1364
MM_ObjectAccessBarrier::getArrayObjectDataAddress(J9VMThread *vmThread, J9IndexableObject *arrayObject)
1365
{
1366
if (_extensions->indexableObjectModel.isInlineContiguousArraylet(arrayObject)) {
1367
return (U_8 *)_extensions->indexableObjectModel.getDataPointerForContiguous(arrayObject);
1368
} else {
1369
return (U_8 *)_extensions->indexableObjectModel.getArrayoidPointer(arrayObject);
1370
}
1371
}
1372
1373
/**
1374
* Return the address of the lockword for the given object, or NULL if it
1375
* does not have an inline lockword.
1376
*/
1377
j9objectmonitor_t *
1378
MM_ObjectAccessBarrier::getLockwordAddress(J9VMThread *vmThread, J9Object *object)
1379
{
1380
j9objectmonitor_t *lockwordAddress = NULL;
1381
J9Class *clazz = J9OBJECT_CLAZZ(vmThread, object);
1382
if (!J9_IS_J9CLASS_VALUETYPE(clazz)) {
1383
UDATA lockOffset = clazz->lockOffset;
1384
if ((IDATA) lockOffset >= 0) {
1385
lockwordAddress = (j9objectmonitor_t *)(((U_8 *)object) + lockOffset);
1386
}
1387
}
1388
return lockwordAddress;
1389
}
1390
1391
/**
1392
* Copy all of the fields of an object into another object.
1393
* The new object was just allocated inside the VM, so all fields are NULL.
1394
* @TODO This does not currently check if the fields that it is reading are volatile.
1395
*/
1396
void
1397
MM_ObjectAccessBarrier::cloneObject(J9VMThread *vmThread, J9Object *srcObject, J9Object *destObject)
1398
{
1399
UDATA elementStartOffset = J9VMTHREAD_OBJECT_HEADER_SIZE(vmThread);
1400
J9Class *cloneClazz = J9GC_J9OBJECT_CLAZZ_THREAD(srcObject, vmThread);
1401
1402
elementStartOffset += J9CLASS_PREPADDING_SIZE(cloneClazz);
1403
1404
copyObjectFields(vmThread, cloneClazz, srcObject, elementStartOffset, destObject, elementStartOffset);
1405
}
1406
1407
BOOLEAN
1408
MM_ObjectAccessBarrier::structuralCompareFlattenedObjects(J9VMThread *vmThread, J9Class *valueClass, j9object_t lhsObject, j9object_t rhsObject, UDATA startOffset)
1409
{
1410
bool result = true;
1411
bool const compressed = J9VMTHREAD_COMPRESS_OBJECT_REFERENCES(vmThread);
1412
UDATA const referenceSize = J9VMTHREAD_REFERENCE_SIZE(vmThread);
1413
bool hasReferences = J9CLASS_HAS_REFERENCES(valueClass);
1414
/* for non value-types this is just the instance size */
1415
UDATA limit = J9CLASS_UNPADDED_INSTANCE_SIZE(valueClass);
1416
UDATA offset = 0;
1417
1418
Assert_MM_true(J9_IS_J9CLASS_VALUETYPE(valueClass));
1419
1420
if (hasReferences) {
1421
UDATA descriptionIndex = J9_OBJECT_DESCRIPTION_SIZE - 1;
1422
const UDATA *descriptionPtr = (UDATA *) valueClass->instanceDescription;
1423
UDATA descriptionBits = 0;
1424
1425
if (((UDATA)descriptionPtr) & 1) {
1426
descriptionBits = ((UDATA)descriptionPtr) >> 1;
1427
} else {
1428
descriptionBits = *descriptionPtr++;
1429
}
1430
1431
while (offset < limit) {
1432
/* Determine if the slot contains an object pointer or not */
1433
if (descriptionBits & 1) {
1434
if (mixedObjectReadObject(vmThread, lhsObject, startOffset + offset, false) != mixedObjectReadObject(vmThread, rhsObject, startOffset + offset, false)) {
1435
result = false;
1436
break;
1437
}
1438
} else {
1439
fomrobject_t lhsValue = GC_SlotObject::readSlot((fomrobject_t*)((UDATA)lhsObject + startOffset + offset), compressed);
1440
fomrobject_t rhsValue = GC_SlotObject::readSlot((fomrobject_t*)((UDATA)rhsObject + startOffset + offset), compressed);
1441
if (lhsValue != rhsValue) {
1442
result = false;
1443
break;
1444
}
1445
}
1446
descriptionBits >>= 1;
1447
if (descriptionIndex-- == 0) {
1448
descriptionBits = *descriptionPtr++;
1449
descriptionIndex = J9_OBJECT_DESCRIPTION_SIZE - 1;
1450
}
1451
offset += referenceSize;
1452
}
1453
} else {
1454
/* no instanceDescription bits needed on this path */
1455
1456
while (offset < limit) {
1457
fomrobject_t lhsValue = GC_SlotObject::readSlot((fomrobject_t*)((UDATA)lhsObject + startOffset + offset), compressed);
1458
fomrobject_t rhsValue = GC_SlotObject::readSlot((fomrobject_t*)((UDATA)rhsObject + startOffset + offset), compressed);
1459
if (lhsValue != rhsValue) {
1460
result = false;
1461
break;
1462
}
1463
offset += referenceSize;
1464
}
1465
}
1466
1467
return result;
1468
}
1469
1470
/**
1471
* Copy all of the fields of a value class instance to another value class instance.
1472
* The source or destination may be a flattened value within another object, meaning
1473
* srcOffset and destOffset need not be equal. This is based on cloneObject(...). If
1474
* Type has pre-padding the size of the object will be adjusted to remove the padding
1475
* bytes. The caller of this API must ensure that the starting offset provided does
1476
* not include pre-padding.
1477
* @TODO This does not currently check if the fields that it is reading are volatile.
1478
*
1479
* @oaram objectClass The j9class.
1480
* @param srcObject The object containing the value class instance fields being copied.
1481
* @param srcOffset The offset of the value class instance fields in srcObject.
1482
* @param destValue The object containing the value class instance fields being copied to.
1483
* @param destOffset The offset of the value class instance fields in destObject.
1484
*/
1485
void
1486
MM_ObjectAccessBarrier::copyObjectFields(J9VMThread *vmThread, J9Class *objectClass, J9Object *srcObject, UDATA srcOffset, J9Object *destObject, UDATA destOffset)
1487
{
1488
/* For valueTypes we currently do not make a distinction between values that only contain
1489
* primitives and values that may contain a reference (ie. value vs mixed-value
1490
* in packedObject terminology). As a result, we will treat all values as if
1491
* they may contain references. In the future this may change.
1492
*
1493
* Value types have no need or lockwords, however, they are still present in the
1494
* current implementation. For now we will just skip over them by specifying
1495
* appropriate offsets. We will also skip over the bit in the instance description.
1496
*/
1497
bool isValueType = J9_IS_J9CLASS_VALUETYPE(objectClass);
1498
1499
j9objectmonitor_t *lockwordAddress = NULL;
1500
I_32 hashCode = 0;
1501
bool isDestObjectPreHashed = false;
1502
1503
if (!isValueType) {
1504
isDestObjectPreHashed = _extensions->objectModel.hasBeenHashed(destObject);
1505
if (isDestObjectPreHashed) {
1506
hashCode = _extensions->objectModel.getObjectHashCode(vmThread->javaVM, destObject);
1507
}
1508
}
1509
1510
UDATA offset = 0;
1511
/* for non value-types this is just the instance size */
1512
UDATA limit = J9CLASS_UNPADDED_INSTANCE_SIZE(objectClass);
1513
UDATA const referenceSize = J9VMTHREAD_REFERENCE_SIZE(vmThread);
1514
bool hasReferences = J9CLASS_HAS_REFERENCES(objectClass);
1515
1516
if (hasReferences) {
1517
const UDATA *descriptionPtr = (UDATA *) objectClass->instanceDescription;
1518
UDATA descriptionBits = 0;
1519
if (((UDATA)descriptionPtr) & 1) {
1520
descriptionBits = ((UDATA)descriptionPtr) >> 1;
1521
} else {
1522
descriptionBits = *descriptionPtr++;
1523
}
1524
1525
UDATA descriptionIndex = J9_OBJECT_DESCRIPTION_SIZE - 1;
1526
1527
while (offset < limit) {
1528
/* Determine if the slot contains an object pointer or not */
1529
if (descriptionBits & 1) {
1530
J9Object *objectPtr = mixedObjectReadObject(vmThread, srcObject, srcOffset + offset, false);
1531
mixedObjectStoreObject(vmThread, destObject, destOffset + offset, objectPtr, false);
1532
} else {
1533
UDATA srcAddress = (UDATA)srcObject + srcOffset + offset;
1534
UDATA destAddress = (UDATA)destObject + destOffset + offset;
1535
bool copy64Bits = false;
1536
UDATA descriptionBitsNext = descriptionBits;
1537
const UDATA *descriptionPtrNext = descriptionPtr;
1538
UDATA descriptionIndexNext = descriptionIndex;
1539
if (isValueType
1540
&& (sizeof(uint32_t) == referenceSize)
1541
&& (0 == (srcAddress & 7))
1542
&& ((offset + referenceSize) < limit)
1543
) {
1544
descriptionBitsNext >>= 1;
1545
if (descriptionIndexNext-- == 0) {
1546
descriptionBitsNext = *(descriptionPtrNext++);
1547
descriptionIndexNext = J9_OBJECT_DESCRIPTION_SIZE - 1;
1548
}
1549
if (0 == (descriptionBitsNext & 1)) {
1550
copy64Bits = true;
1551
}
1552
}
1553
if (copy64Bits) {
1554
*(uint64_t *)destAddress = *(uint64_t *)srcAddress;
1555
descriptionBits = descriptionBitsNext;
1556
descriptionPtr = descriptionPtrNext;
1557
descriptionIndex = descriptionIndexNext;
1558
/**
1559
* When doing 64-bit copy, offset needs to be advanced 8 bytes. referenceSize is 4 bytes here.
1560
* Advance offset 4 bytes here. Another 4 bytes are advanced at the end of the while loop below.
1561
*/
1562
offset += referenceSize;
1563
} else if (sizeof(uint32_t) == referenceSize) {
1564
*(uint32_t *)destAddress = *(uint32_t *)srcAddress;
1565
} else {
1566
*(uintptr_t *)destAddress = *(uintptr_t *)srcAddress;
1567
}
1568
}
1569
descriptionBits >>= 1;
1570
if (descriptionIndex-- == 0) {
1571
descriptionBits = *descriptionPtr++;
1572
descriptionIndex = J9_OBJECT_DESCRIPTION_SIZE - 1;
1573
}
1574
offset += referenceSize;
1575
}
1576
} else {
1577
/* no instanceDescription bits needed on this path */
1578
while (offset < limit) {
1579
UDATA srcAddress = (UDATA)srcObject + srcOffset + offset;
1580
UDATA destAddress = (UDATA)destObject + destOffset + offset;
1581
/* prefer to copy 64 bits at a time if possible */
1582
if ((sizeof(uint64_t) == referenceSize)
1583
|| (isValueType && (0 == (srcAddress & 7)) && ((offset + referenceSize) < limit))
1584
) {
1585
*(uint64_t *)destAddress = *(uint64_t *)srcAddress;
1586
offset += sizeof(uint64_t);
1587
} else {
1588
*(uint32_t *)destAddress = *(uint32_t *)srcAddress;
1589
offset += sizeof(uint32_t);
1590
}
1591
}
1592
}
1593
1594
if (!isValueType) {
1595
/* If an object was pre-hashed and a hash was stored within the fields of the object restore it.*/
1596
if (isDestObjectPreHashed) {
1597
UDATA hashcodeOffset = _extensions->mixedObjectModel.getHashcodeOffset(destObject);
1598
if (hashcodeOffset <= limit) {
1599
I_32 *hashcodePointer = (I_32*)((U_8*)destObject + hashcodeOffset);
1600
*hashcodePointer = hashCode;
1601
}
1602
}
1603
1604
/* initialize lockword, if present */
1605
lockwordAddress = getLockwordAddress(vmThread, destObject);
1606
if (NULL != lockwordAddress) {
1607
j9objectmonitor_t lwValue = VM_ObjectMonitor::getInitialLockword(vmThread->javaVM, objectClass);
1608
J9_STORE_LOCKWORD(vmThread, lockwordAddress, lwValue);
1609
}
1610
}
1611
}
1612
1613
/**
1614
* Copy all of the fields of an indexable object into another indexable object.
1615
* The new object was just allocated inside the VM, so all fields are NULL.
1616
* @TODO This does not currently check if the fields that it is reading are volatile.
1617
*/
1618
void
1619
MM_ObjectAccessBarrier::cloneIndexableObject(J9VMThread *vmThread, J9IndexableObject *srcObject, J9IndexableObject *destObject)
1620
{
1621
j9objectmonitor_t *lockwordAddress = NULL;
1622
bool isObjectArray = _extensions->objectModel.isObjectArray(srcObject);
1623
1624
if (_extensions->objectModel.hasBeenHashed((J9Object*)destObject)) {
1625
/* this assertion should never be triggered because we never pre-hash arrays */
1626
Assert_MM_unreachable();
1627
}
1628
1629
if (isObjectArray) {
1630
I_32 size = (I_32)_extensions->indexableObjectModel.getSizeInElements(srcObject);
1631
for (I_32 i = 0; i < size; i++) {
1632
J9Object *objectPtr = J9JAVAARRAYOFOBJECT_LOAD(vmThread, srcObject, i);
1633
J9JAVAARRAYOFOBJECT_STORE(vmThread, destObject, i, objectPtr);
1634
}
1635
} else {
1636
_extensions->indexableObjectModel.memcpyArray(destObject, srcObject);
1637
}
1638
1639
/* initialize lockword, if present */
1640
J9Class *objectClass = J9GC_J9OBJECT_CLAZZ_THREAD(destObject, vmThread);
1641
lockwordAddress = getLockwordAddress(vmThread, (J9Object*) destObject);
1642
if (NULL != lockwordAddress) {
1643
j9objectmonitor_t lwValue = VM_ObjectMonitor::getInitialLockword(vmThread->javaVM, objectClass);
1644
J9_STORE_LOCKWORD(vmThread, lockwordAddress, lwValue);
1645
}
1646
1647
return;
1648
}
1649
1650
1651
/* Return an j9object_t that can be stored in the constantpool.
1652
*
1653
* Not all collectors scan the constantpool on every GC and therefore for
1654
* these collectors the objects must be in tenure space.
1655
*
1656
* Note, the stack must be walkable as a GC may occur during this function.
1657
*
1658
* Note, this doesn't handle arrays.
1659
*
1660
* @param vmThread The current vmThread
1661
* @param toConvert The object to convert to a constantpool allowed form.
1662
*
1663
* @return an object that can be put in the constantpool or null if OOM.
1664
*/
1665
J9Object*
1666
MM_ObjectAccessBarrier::asConstantPoolObject(J9VMThread *vmThread, J9Object* toConvert, UDATA allocationFlags)
1667
{
1668
Assert_MM_true(allocationFlags & (J9_GC_ALLOCATE_OBJECT_TENURED | J9_GC_ALLOCATE_OBJECT_NON_INSTRUMENTABLE));
1669
return toConvert;
1670
}
1671
1672
/**
1673
* Write an object to an internal VM slot (J9VMThread, J9JavaVM, named field of J9Class).
1674
* @param destSlot the slot to be used
1675
* @param value the value to be stored
1676
*/
1677
void
1678
MM_ObjectAccessBarrier::storeObjectToInternalVMSlot(J9VMThread *vmThread, J9Object** destSlot, J9Object *value)
1679
{
1680
if (preObjectStore(vmThread, destSlot, value, false)) {
1681
storeObjectToInternalVMSlotImpl(vmThread, destSlot, value, false);
1682
postObjectStore(vmThread, destSlot, value, false);
1683
}
1684
}
1685
1686
/**
1687
* Read an object from an internal VM slot (J9VMThread, J9JavaVM, named field of J9Class).
1688
* @param srcSlot the slot to be used
1689
*/
1690
J9Object *
1691
MM_ObjectAccessBarrier::readObjectFromInternalVMSlot(J9VMThread *vmThread, J9Object **srcSlot)
1692
{
1693
return readObjectFromInternalVMSlotImpl(vmThread, srcSlot, false);
1694
}
1695
1696
/**
1697
* compareAndSwapObject performs an atomic compare-and-swap on an object field or array element.
1698
* @param destObject the object containing the field being swapped into
1699
* @param destAddress the address of the destination field of the operation
1700
* @param compareObject the object to be compared with contents of destSlot
1701
* @param swapObject the object to be stored in the destSlot if compareObject is there now
1702
* @todo This should be converted to take the offset, not the address
1703
**/
1704
bool
1705
MM_ObjectAccessBarrier::compareAndSwapObject(J9VMThread *vmThread, J9Object *destObject, fj9object_t *destAddress, J9Object *compareObject, J9Object *swapObject)
1706
{
1707
fj9object_t *actualDestAddress;
1708
fj9object_t compareValue = convertTokenFromPointer(compareObject);
1709
fj9object_t swapValue = convertTokenFromPointer(swapObject);
1710
bool result = false;
1711
1712
/* TODO: To make this API more consistent, it should probably be split into separate
1713
* indexable and non-indexable versions. Currently, when called on an indexable object,
1714
* the REAL address is passed. For non-indexable objects, the address is the shadow
1715
* address
1716
*/
1717
if (_extensions->objectModel.isIndexable(destObject)) {
1718
actualDestAddress = destAddress;
1719
} else {
1720
actualDestAddress = J9OAB_MIXEDOBJECT_EA(destObject, ((UDATA)destAddress - (UDATA)destObject), fj9object_t);
1721
}
1722
1723
if (preObjectRead(vmThread, destObject, actualDestAddress)) {
1724
/* Note: This is a bit of a special case -- we call preObjectStore even though the store
1725
* may not actually occur. This is safe and correct for Metronome.
1726
*/
1727
preObjectStore(vmThread, destObject, actualDestAddress, swapObject, true);
1728
protectIfVolatileBefore(vmThread, true, false, false);
1729
1730
if (J9VMTHREAD_COMPRESS_OBJECT_REFERENCES(vmThread)) {
1731
result = ((U_32)(UDATA)compareValue == MM_AtomicOperations::lockCompareExchangeU32((U_32 *)actualDestAddress, (U_32)(UDATA)compareValue, (U_32)(UDATA)swapValue));
1732
} else {
1733
result = ((UDATA)compareValue == MM_AtomicOperations::lockCompareExchange((UDATA *)actualDestAddress, (UDATA)compareValue, (UDATA)swapValue));
1734
}
1735
protectIfVolatileAfter(vmThread, true, false, false);
1736
if (result) {
1737
postObjectStore(vmThread, destObject, actualDestAddress, swapObject, true);
1738
}
1739
}
1740
1741
return result;
1742
}
1743
1744
/**
1745
* staticCompareAndSwapObject performs an atomic compare-and-swap on a static object field.
1746
* @param destClass the class containing the statics field being swapped into
1747
* @param destAddress the address of the destination field of the operation
1748
* @param compareObject the object to be compared with contents of destSlot
1749
* @param swapObject the object to be stored in the destSlot if compareObject is there now
1750
* @todo This should be converted to take the offset, not the address
1751
**/
1752
bool
1753
MM_ObjectAccessBarrier::staticCompareAndSwapObject(J9VMThread *vmThread, J9Class *destClass, j9object_t *destAddress, J9Object *compareObject, J9Object *swapObject)
1754
{
1755
bool result = false;
1756
1757
if (preObjectRead(vmThread, destClass, destAddress)) {
1758
/* Note: This is a bit of a special case -- we call preObjectStore even though the store
1759
* may not actually occur. This is safe and correct for Metronome.
1760
*/
1761
preObjectStore(vmThread, (J9Object *)J9VM_J9CLASS_TO_HEAPCLASS(destClass), destAddress, swapObject, true);
1762
protectIfVolatileBefore(vmThread, true, false, false);
1763
1764
result = ((UDATA)compareObject == MM_AtomicOperations::lockCompareExchange((UDATA *)destAddress, (UDATA)compareObject, (UDATA)swapObject));
1765
1766
protectIfVolatileAfter(vmThread, true, false, false);
1767
if (result) {
1768
postObjectStore(vmThread, destClass, destAddress, swapObject, true);
1769
}
1770
}
1771
1772
return result;
1773
}
1774
1775
/**
1776
* Performs an atomic compare-and-swap on an int field of a mixed object
1777
* @param destObject the object containing the field being swapped into
1778
* @param destAddress the address of the destination field of the operation
1779
* @param compareValue the value to be compared with contents of destSlot
1780
* @param swapValue the value to be stored in the destSlot if compareValue is there now
1781
**/
1782
bool
1783
MM_ObjectAccessBarrier::mixedObjectCompareAndSwapInt(J9VMThread *vmThread, J9Object *destObject, UDATA offset, U_32 compareValue, U_32 swapValue)
1784
{
1785
U_32 *actualDestAddress = J9OAB_MIXEDOBJECT_EA(destObject, offset, U_32);
1786
1787
protectIfVolatileBefore(vmThread, true, false, false);
1788
bool result = (compareValue == MM_AtomicOperations::lockCompareExchangeU32(actualDestAddress, compareValue, swapValue));
1789
protectIfVolatileAfter(vmThread, true, false, false);
1790
return result;
1791
}
1792
1793
/**
1794
* Performs an atomic compare-and-swap on a static int field.
1795
* @param destClass the class containing the statics field being swapped into
1796
* @param destAddress the address of the destination field of the operation
1797
* @param compareValue the value to be compared with contents of destSlot
1798
* @param swapValue the value to be stored in the destSlot if compareValue is there now
1799
**/
1800
bool
1801
MM_ObjectAccessBarrier::staticCompareAndSwapInt(J9VMThread *vmThread, J9Class *destClass, U_32 *destAddress, U_32 compareValue, U_32 swapValue)
1802
{
1803
protectIfVolatileBefore(vmThread, true, false, false);
1804
bool result = (compareValue == MM_AtomicOperations::lockCompareExchangeU32(destAddress, compareValue, swapValue));
1805
protectIfVolatileAfter(vmThread, true, false, false);
1806
return result;
1807
}
1808
1809
/**
1810
* Performs an atomic compare-and-swap on an long field of a mixed object
1811
* @param destObject the object containing the field being swapped into
1812
* @param destAddress the address of the destination field of the operation
1813
* @param compareValue the value to be compared with contents of destSlot
1814
* @param swapValue the value to be stored in the destSlot if compareValue is there now
1815
**/
1816
bool
1817
MM_ObjectAccessBarrier::mixedObjectCompareAndSwapLong(J9VMThread *vmThread, J9Object *destObject, UDATA offset, U_64 compareValue, U_64 swapValue)
1818
{
1819
U_64 *actualDestAddress = J9OAB_MIXEDOBJECT_EA(destObject, offset, U_64);
1820
1821
protectIfVolatileBefore(vmThread, true, false, true);
1822
bool result = (compareValue == MM_AtomicOperations::lockCompareExchangeU64(actualDestAddress, compareValue, swapValue));
1823
protectIfVolatileAfter(vmThread, true, false, true);
1824
return result;
1825
}
1826
1827
/**
1828
* Performs an atomic compare-and-swap on a static long field.
1829
* @param destClass the class containing the statics field being swapped into
1830
* @param destAddress the address of the destination field of the operation
1831
* @param compareValue the value to be compared with contents of destSlot
1832
* @param swapValue the value to be stored in the destSlot if compareValue is there now
1833
**/
1834
bool
1835
MM_ObjectAccessBarrier::staticCompareAndSwapLong(J9VMThread *vmThread, J9Class *destClass, U_64 *destAddress, U_64 compareValue, U_64 swapValue)
1836
{
1837
protectIfVolatileBefore(vmThread, true, false, true);
1838
bool result = (compareValue == MM_AtomicOperations::lockCompareExchangeU64(destAddress, compareValue, swapValue));
1839
protectIfVolatileAfter(vmThread, true, false, true);
1840
return result;
1841
}
1842
1843
/**
1844
* Performs an atomic compare-and-exchange on an object field or array element.
1845
* @param destObject the object containing the field being swapped into
1846
* @param destAddress the address of the destination field of the operation
1847
* @param compareObject the object to be compared with contents of destSlot
1848
* @param swapObject the object to be stored in the destSlot if compareObject is there now
1849
* @return the object stored in the object field before the update
1850
* @todo This should be converted to take the offset, not the address
1851
**/
1852
J9Object *
1853
MM_ObjectAccessBarrier::compareAndExchangeObject(J9VMThread *vmThread, J9Object *destObject, fj9object_t *destAddress, J9Object *compareObject, J9Object *swapObject)
1854
{
1855
fj9object_t *actualDestAddress;
1856
fj9object_t compareValue = convertTokenFromPointer(compareObject);
1857
fj9object_t swapValue = convertTokenFromPointer(swapObject);
1858
J9Object *result = NULL;
1859
1860
/* TODO: To make this API more consistent, it should probably be split into separate
1861
* indexable and non-indexable versions. Currently, when called on an indexable object,
1862
* the REAL address is passed. For non-indexable objects, the address is the shadow
1863
* address
1864
*/
1865
if (_extensions->objectModel.isIndexable(destObject)) {
1866
actualDestAddress = destAddress;
1867
} else {
1868
actualDestAddress = J9OAB_MIXEDOBJECT_EA(destObject, ((UDATA)destAddress - (UDATA)destObject), fj9object_t);
1869
}
1870
1871
if (preObjectRead(vmThread, destObject, actualDestAddress)) {
1872
/* Note: This is a bit of a special case -- we call preObjectStore even though the store
1873
* may not actually occur. This is safe and correct for Metronome.
1874
*/
1875
preObjectStore(vmThread, destObject, actualDestAddress, swapObject, true);
1876
protectIfVolatileBefore(vmThread, true, false, false);
1877
1878
if (J9VMTHREAD_COMPRESS_OBJECT_REFERENCES(vmThread)) {
1879
result = (J9Object *)(UDATA)MM_AtomicOperations::lockCompareExchangeU32((U_32 *)actualDestAddress, (U_32)(UDATA)compareValue, (U_32)(UDATA)swapValue);
1880
} else {
1881
result = (J9Object *)MM_AtomicOperations::lockCompareExchange((UDATA *)actualDestAddress, (UDATA)compareValue, (UDATA)swapValue);
1882
}
1883
1884
protectIfVolatileAfter(vmThread, true, false, false);
1885
if (result) {
1886
postObjectStore(vmThread, destObject, actualDestAddress, swapObject, true);
1887
}
1888
}
1889
1890
return result;
1891
}
1892
1893
/**
1894
* Performs an atomic compare-and-exchange on a static object field.
1895
* @param destClass the class containing the statics field being swapped into
1896
* @param destAddress the address of the destination field of the operation
1897
* @param compareObject the object to be compared with contents of destSlot
1898
* @param swapObject the object to be stored in the destSlot if compareObject is there now
1899
* @return the object stored in the object field before the update
1900
* @todo This should be converted to take the offset, not the address
1901
**/
1902
J9Object *
1903
MM_ObjectAccessBarrier::staticCompareAndExchangeObject(J9VMThread *vmThread, J9Class *destClass, j9object_t *destAddress, J9Object *compareObject, J9Object *swapObject)
1904
{
1905
J9Object *result = NULL;
1906
1907
if (preObjectRead(vmThread, destClass, destAddress)) {
1908
/* Note: This is a bit of a special case -- we call preObjectStore even though the store
1909
* may not actually occur. This is safe and correct for Metronome.
1910
*/
1911
preObjectStore(vmThread, (J9Object *)J9VM_J9CLASS_TO_HEAPCLASS(destClass), destAddress, swapObject, true);
1912
protectIfVolatileBefore(vmThread, true, false, false);
1913
1914
result = (J9Object *)MM_AtomicOperations::lockCompareExchange((UDATA *)destAddress, (UDATA)compareObject, (UDATA)swapObject);
1915
1916
protectIfVolatileAfter(vmThread, true, false, false);
1917
if (result) {
1918
postObjectStore(vmThread, destClass, destAddress, swapObject, true);
1919
}
1920
}
1921
1922
return result;
1923
}
1924
1925
/**
1926
* Performs an atomic compare-and-exchange on an int field of a mixed object
1927
* @param destObject the object containing the field being swapped into
1928
* @param destAddress the address of the destination field of the operation
1929
* @param compareValue the value to be compared with contents of destSlot
1930
* @param swapValue the value to be stored in the destSlot if compareValue is there now
1931
* @return the int stored in the object field before the update
1932
**/
1933
U_32
1934
MM_ObjectAccessBarrier::mixedObjectCompareAndExchangeInt(J9VMThread *vmThread, J9Object *destObject, UDATA offset, U_32 compareValue, U_32 swapValue)
1935
{
1936
U_32 *actualDestAddress = J9OAB_MIXEDOBJECT_EA(destObject, offset, U_32);
1937
1938
protectIfVolatileBefore(vmThread, true, false, false);
1939
U_32 result = MM_AtomicOperations::lockCompareExchangeU32(actualDestAddress, compareValue, swapValue);
1940
protectIfVolatileAfter(vmThread, true, false, false);
1941
return result;
1942
}
1943
1944
/**
1945
* Performs an atomic compare-and-exchange on a static int field.
1946
* @param destClass the class containing the statics field being swapped into
1947
* @param destAddress the address of the destination field of the operation
1948
* @param compareValue the value to be compared with contents of destSlot
1949
* @param swapValue the value to be stored in the destSlot if compareValue is there now
1950
* @return the int stored in the object field before the update
1951
**/
1952
U_32
1953
MM_ObjectAccessBarrier::staticCompareAndExchangeInt(J9VMThread *vmThread, J9Class *destClass, U_32 *destAddress, U_32 compareValue, U_32 swapValue)
1954
{
1955
protectIfVolatileBefore(vmThread, true, false, false);
1956
U_32 result = MM_AtomicOperations::lockCompareExchangeU32(destAddress, compareValue, swapValue);
1957
protectIfVolatileAfter(vmThread, true, false, false);
1958
return result;
1959
}
1960
1961
/**
1962
* Performs an atomic compare-and-exchange on an long field of a mixed object
1963
* @param destObject the object containing the field being swapped into
1964
* @param destAddress the address of the destination field of the operation
1965
* @param compareValue the value to be compared with contents of destSlot
1966
* @param swapValue the value to be stored in the destSlot if compareValue is there now
1967
* @return the long stored in the object field before the update
1968
**/
1969
U_64
1970
MM_ObjectAccessBarrier::mixedObjectCompareAndExchangeLong(J9VMThread *vmThread, J9Object *destObject, UDATA offset, U_64 compareValue, U_64 swapValue)
1971
{
1972
U_64 *actualDestAddress = J9OAB_MIXEDOBJECT_EA(destObject, offset, U_64);
1973
1974
protectIfVolatileBefore(vmThread, true, false, true);
1975
U_64 result = MM_AtomicOperations::lockCompareExchangeU64(actualDestAddress, compareValue, swapValue);
1976
protectIfVolatileAfter(vmThread, true, false, true);
1977
return result;
1978
}
1979
1980
/**
1981
* Performs an atomic compare-and-exchange on a static long field.
1982
* @param destClass the class containing the statics field being swapped into
1983
* @param destAddress the address of the destination field of the operation
1984
* @param compareValue the value to be compared with contents of destSlot
1985
* @param swapValue the value to be stored in the destSlot if compareValue is there now
1986
* @return the long stored in the object field before the update
1987
**/
1988
U_64
1989
MM_ObjectAccessBarrier::staticCompareAndExchangeLong(J9VMThread *vmThread, J9Class *destClass, U_64 *destAddress, U_64 compareValue, U_64 swapValue)
1990
{
1991
protectIfVolatileBefore(vmThread, true, false, true);
1992
U_64 result = MM_AtomicOperations::lockCompareExchangeU64(destAddress, compareValue, swapValue);
1993
protectIfVolatileAfter(vmThread, true, false, true);
1994
return result;
1995
}
1996
1997
/**
1998
* Called before an object is stored into another object.
1999
* @return true if the store should proceed, false otherwise
2000
*/
2001
bool
2002
MM_ObjectAccessBarrier::preObjectStore(J9VMThread *vmThread, J9Object *destObject, fj9object_t *destAddress, J9Object *value, bool isVolatile)
2003
{
2004
return true;
2005
}
2006
2007
/**
2008
* Called before an object is stored into a class.
2009
* @return true if the store should proceed, false otherwise
2010
*/
2011
bool
2012
MM_ObjectAccessBarrier::preObjectStore(J9VMThread *vmThread, J9Object *destObject, J9Object **destAddress, J9Object *value, bool isVolatile)
2013
{
2014
return true;
2015
}
2016
2017
/**
2018
* Called before an object is stored into an internal VM structure.
2019
* @return true if the store should proceed, false otherwise
2020
*/
2021
bool
2022
MM_ObjectAccessBarrier::preObjectStore(J9VMThread *vmThread, J9Object **destAddress, J9Object *value, bool isVolatile)
2023
{
2024
return true;
2025
}
2026
2027
/**
2028
* Called after an object is stored into another object.
2029
*/
2030
void
2031
MM_ObjectAccessBarrier::postObjectStore(J9VMThread *vmThread, J9Object *destObject, fj9object_t *destAddress, J9Object *value, bool isVolatile)
2032
{
2033
}
2034
2035
/**
2036
* Called after an object is stored into a class.
2037
*/
2038
void
2039
MM_ObjectAccessBarrier::postObjectStore(J9VMThread *vmThread, J9Class *destObject, J9Object **destAddress, J9Object *value, bool isVolatile)
2040
{
2041
}
2042
2043
/**
2044
* Called after an object is stored into an internal VM structure.
2045
*/
2046
void
2047
MM_ObjectAccessBarrier::postObjectStore(J9VMThread *vmThread, J9Object **destAddress, J9Object *value, bool isVolatile)
2048
{
2049
}
2050
2051
/**
2052
* TODO: This should probably be postBatchObjectStore, not pre-.
2053
*/
2054
bool
2055
MM_ObjectAccessBarrier::postBatchObjectStore(J9VMThread *vmThread, J9Object *destObject, bool isVolatile)
2056
{
2057
#if defined(J9VM_GC_COMBINATION_SPEC)
2058
/* (assert here to verify that we aren't defaulting to this implementation through some unknown path - delete once combination is stable) */
2059
Assert_MM_unreachable();
2060
#endif /* defined(J9VM_GC_COMBINATION_SPEC) */
2061
return true;
2062
}
2063
2064
bool
2065
MM_ObjectAccessBarrier::postBatchObjectStore(J9VMThread *vmThread, J9Class *destClass, bool isVolatile)
2066
{
2067
#if defined(J9VM_GC_COMBINATION_SPEC)
2068
/* (assert here to verify that we aren't defaulting to this implementation through some unknown path - delete once combination is stable) */
2069
Assert_MM_unreachable();
2070
#endif /* defined(J9VM_GC_COMBINATION_SPEC) */
2071
return true;
2072
}
2073
2074
bool
2075
MM_ObjectAccessBarrier::preObjectRead(J9VMThread *vmThread, J9Object *srcObject, fj9object_t *srcAddress)
2076
{
2077
return true;
2078
}
2079
2080
bool
2081
MM_ObjectAccessBarrier::preWeakRootSlotRead(J9VMThread *vmThread, j9object_t *srcAddress)
2082
{
2083
return true;
2084
}
2085
2086
bool
2087
MM_ObjectAccessBarrier::preWeakRootSlotRead(J9JavaVM *vm, j9object_t *srcAddress)
2088
{
2089
return true;
2090
}
2091
2092
2093
bool
2094
MM_ObjectAccessBarrier::preObjectRead(J9VMThread *vmThread, J9Class *srcClass, J9Object **srcAddress)
2095
{
2096
return true;
2097
}
2098
2099
bool
2100
MM_ObjectAccessBarrier::postObjectRead(J9VMThread *vmThread, J9Object *srcObject, fj9object_t *srcAddress)
2101
{
2102
return true;
2103
}
2104
2105
bool
2106
MM_ObjectAccessBarrier::postObjectRead(J9VMThread *vmThread, J9Class *srcClass, J9Object **srcAddress)
2107
{
2108
return true;
2109
}
2110
2111
/**
2112
* Fills array (or part of array) with specific object value.
2113
* Example, compressed pointers access barrier will mangle the value
2114
* pointer before filling up the array with it.
2115
*/
2116
void
2117
MM_ObjectAccessBarrier::fillArrayOfObjects(J9VMThread *vmThread, j9array_t destObject, I_32 destIndex, I_32 count, j9object_t value)
2118
{
2119
#if defined(J9VM_GC_COMBINATION_SPEC)
2120
/* (assert here to verify that we aren't defaulting to this implementation through some unknown path - delete once combination is stable) */
2121
Assert_MM_unreachable();
2122
#endif /* defined(J9VM_GC_COMBINATION_SPEC) */
2123
if (J9VMTHREAD_COMPRESS_OBJECT_REFERENCES(vmThread)) {
2124
uint32_t *destPtr = (uint32_t *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(uint32_t));
2125
uint32_t actualValue = (uint32_t)convertTokenFromPointer(value);
2126
uint32_t *endPtr = destPtr + count;
2127
2128
while (destPtr < endPtr) {
2129
*destPtr++ = actualValue;
2130
}
2131
} else {
2132
uintptr_t *destPtr = (uintptr_t *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(uintptr_t));
2133
uintptr_t actualValue = (uintptr_t)convertTokenFromPointer(value);
2134
uintptr_t *endPtr = destPtr + count;
2135
2136
while (destPtr < endPtr) {
2137
*destPtr++ = actualValue;
2138
}
2139
}
2140
}
2141
2142
/**
2143
* Returns the shadow heap base for compressed pointers.
2144
* Used by the JIT in the interim solution until we store J9 class objects
2145
* on the heap.
2146
* @return shadow heap base.
2147
*/
2148
UDATA
2149
MM_ObjectAccessBarrier::compressedPointersShadowHeapBase(J9VMThread *vmThread)
2150
{
2151
assume0(false);
2152
return 0;
2153
}
2154
2155
/**
2156
* Returns the shadow heap base for compressed pointers.
2157
* Used by the JIT in the interim solution until we store J9 class objects
2158
* on the heap.
2159
* @return shadow heap top.
2160
*/
2161
UDATA
2162
MM_ObjectAccessBarrier::compressedPointersShadowHeapTop(J9VMThread *vmThread)
2163
{
2164
assume0(false);
2165
return 0;
2166
}
2167
2168
/**
2169
* @return this cannot fail (overloaded can) => returns ARRAY_COPY_NOT_DONE
2170
*/
2171
I_32
2172
MM_ObjectAccessBarrier::doCopyContiguousBackward(J9VMThread *vmThread, J9IndexableObject *srcObject, J9IndexableObject *destObject, I_32 srcIndex, I_32 destIndex, I_32 lengthInSlots)
2173
{
2174
srcIndex += lengthInSlots;
2175
destIndex += lengthInSlots;
2176
2177
if (J9VMTHREAD_COMPRESS_OBJECT_REFERENCES(vmThread)) {
2178
uint32_t *srcSlot = (uint32_t *)indexableEffectiveAddress(vmThread, srcObject, srcIndex, sizeof(uint32_t));
2179
uint32_t *destSlot = (uint32_t *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(uint32_t));
2180
uint32_t *srcEndSlot = srcSlot - lengthInSlots;
2181
2182
while (srcSlot > srcEndSlot) {
2183
*--destSlot = *--srcSlot;
2184
}
2185
} else {
2186
uintptr_t *srcSlot = (uintptr_t *)indexableEffectiveAddress(vmThread, srcObject, srcIndex, sizeof(uintptr_t));
2187
uintptr_t *destSlot = (uintptr_t *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(uintptr_t));
2188
uintptr_t *srcEndSlot = srcSlot - lengthInSlots;
2189
2190
while (srcSlot > srcEndSlot) {
2191
*--destSlot = *--srcSlot;
2192
}
2193
}
2194
2195
return ARRAY_COPY_SUCCESSFUL;
2196
}
2197
2198
/**
2199
* @return this cannot fail (overloaded can) => returns ARRAY_COPY_NOT_DONE
2200
*/
2201
I_32
2202
MM_ObjectAccessBarrier::doCopyContiguousForward(J9VMThread *vmThread, J9IndexableObject *srcObject, J9IndexableObject *destObject, I_32 srcIndex, I_32 destIndex, I_32 lengthInSlots)
2203
{
2204
if (J9VMTHREAD_COMPRESS_OBJECT_REFERENCES(vmThread)) {
2205
uint32_t *srcSlot = (uint32_t *)indexableEffectiveAddress(vmThread, srcObject, srcIndex, sizeof(uint32_t));
2206
uint32_t *destSlot = (uint32_t *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(uint32_t));
2207
uint32_t *srcEndSlot = srcSlot + lengthInSlots;
2208
2209
while (srcSlot < srcEndSlot) {
2210
*destSlot++ = *srcSlot++;
2211
}
2212
} else {
2213
uintptr_t *srcSlot = (uintptr_t *)indexableEffectiveAddress(vmThread, srcObject, srcIndex, sizeof(uintptr_t));
2214
uintptr_t *destSlot = (uintptr_t *)indexableEffectiveAddress(vmThread, destObject, destIndex, sizeof(uintptr_t));
2215
uintptr_t *srcEndSlot = srcSlot + lengthInSlots;
2216
2217
while (srcSlot < srcEndSlot) {
2218
*destSlot++ = *srcSlot++;
2219
}
2220
}
2221
2222
return ARRAY_COPY_SUCCESSFUL;
2223
}
2224
2225
I_32
2226
MM_ObjectAccessBarrier::getObjectHashCode(J9JavaVM *vm, J9Object *object)
2227
{
2228
return _extensions->objectModel.getObjectHashCode(vm, object);
2229
}
2230
2231
void
2232
MM_ObjectAccessBarrier::setFinalizeLink(j9object_t object, j9object_t value)
2233
{
2234
fj9object_t* finalizeLink = getFinalizeLinkAddress(object);
2235
GC_SlotObject slot(_extensions->getOmrVM(), finalizeLink);
2236
slot.writeReferenceToSlot(value);
2237
}
2238
2239
void
2240
MM_ObjectAccessBarrier::setReferenceLink(j9object_t object, j9object_t value)
2241
{
2242
Assert_MM_true(NULL != object);
2243
UDATA linkOffset = _referenceLinkOffset;
2244
/* offset will be UDATA_MAX until java/lang/ref/Reference is loaded */
2245
Assert_MM_true(UDATA_MAX != linkOffset);
2246
fj9object_t *referenceLink = (fj9object_t*)((UDATA)object + linkOffset);
2247
GC_SlotObject slot(_extensions->getOmrVM(), referenceLink);
2248
slot.writeReferenceToSlot(value);
2249
}
2250
2251
void
2252
MM_ObjectAccessBarrier::setOwnableSynchronizerLink(j9object_t object, j9object_t value)
2253
{
2254
Assert_MM_true(NULL != object);
2255
UDATA linkOffset = _ownableSynchronizerLinkOffset;
2256
/* offset will be UDATA_MAX until java/util/concurrent/locks/AbstractOwnableSynchronizer is loaded */
2257
Assert_MM_true(UDATA_MAX != linkOffset);
2258
if (NULL == value) {
2259
/* set the last object in the list pointing to itself */
2260
value = object;
2261
}
2262
fj9object_t *ownableSynchronizerLink = (fj9object_t*)((UDATA)object + linkOffset);
2263
GC_SlotObject slot(_extensions->getOmrVM(), ownableSynchronizerLink);
2264
slot.writeReferenceToSlot(value);
2265
}
2266
2267
void
2268
MM_ObjectAccessBarrier::printNativeMethod(J9VMThread* vmThread)
2269
{
2270
J9SFJNINativeMethodFrame *nativeMethodFrame = VM_VMHelpers::findNativeMethodFrame(vmThread);
2271
J9Method *method = nativeMethodFrame->method;
2272
J9JavaVM *javaVM = vmThread->javaVM;
2273
PORT_ACCESS_FROM_JAVAVM(javaVM);
2274
2275
if (NULL != method) {
2276
J9UTF8 * className = J9ROMCLASS_CLASSNAME(J9_CP_FROM_METHOD(method)->ramClass->romClass);
2277
J9ROMMethod * romMethod = J9_ROM_METHOD_FROM_RAM_METHOD(method);
2278
J9UTF8 * name = J9ROMMETHOD_NAME(romMethod);
2279
J9UTF8 * sig = J9ROMMETHOD_SIGNATURE(romMethod);
2280
2281
j9tty_printf(PORTLIB, "%p: Native Method %p (%.*s.%.*s%.*s)\n",
2282
vmThread, method,
2283
(U_32) J9UTF8_LENGTH(className), J9UTF8_DATA(className), (U_32) J9UTF8_LENGTH(name), J9UTF8_DATA(name), (U_32) J9UTF8_LENGTH(sig), J9UTF8_DATA(sig));
2284
2285
Trc_MM_ObjectAccessBarrier_printNativeMethod(vmThread, method,
2286
(U_32) J9UTF8_LENGTH(className), J9UTF8_DATA(className), (U_32) J9UTF8_LENGTH(name), J9UTF8_DATA(name), (U_32) J9UTF8_LENGTH(sig), J9UTF8_DATA(sig));
2287
} else {
2288
j9tty_printf(PORTLIB, "%p: Native Method Unknown\n", vmThread);
2289
Trc_MM_ObjectAccessBarrier_printNativeMethodUnknown(vmThread);
2290
}
2291
}
2292
2293
2294
2295