Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/openj9
Path: blob/master/runtime/compiler/z/codegen/J9MemoryReference.cpp
6004 views
1
/*******************************************************************************
2
* Copyright (c) 2000, 2020 IBM Corp. and others
3
*
4
* This program and the accompanying materials are made available under
5
* the terms of the Eclipse Public License 2.0 which accompanies this
6
* distribution and is available at https://www.eclipse.org/legal/epl-2.0/
7
* or the Apache License, Version 2.0 which accompanies this distribution and
8
* is available at https://www.apache.org/licenses/LICENSE-2.0.
9
*
10
* This Source Code may also be made available under the following
11
* Secondary Licenses when the conditions for such availability set
12
* forth in the Eclipse Public License, v. 2.0 are satisfied: GNU
13
* General Public License, version 2 with the GNU Classpath
14
* Exception [1] and GNU General Public License, version 2 with the
15
* OpenJDK Assembly Exception [2].
16
*
17
* [1] https://www.gnu.org/software/classpath/license.html
18
* [2] http://openjdk.java.net/legal/assembly-exception.html
19
*
20
* SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 OR LicenseRef-GPL-2.0 WITH Assembly-exception
21
*******************************************************************************/
22
23
#pragma csect(CODE,"TRJ9MRBase#C")
24
#pragma csect(STATIC,"TRJ9MRBase#S")
25
#pragma csect(TEST,"TRJ9MRBase#T")
26
27
#include "codegen/MemoryReference.hpp"
28
#include "codegen/InstOpCode.hpp"
29
#include "codegen/Machine.hpp"
30
#include "codegen/CodeGenerator.hpp"
31
#include "codegen/Instruction.hpp"
32
#include "codegen/Relocation.hpp"
33
#include "codegen/S390Register.hpp"
34
#include "il/AutomaticSymbol.hpp"
35
#include "il/Node.hpp"
36
#include "il/Node_inlines.hpp"
37
38
#include <stddef.h>
39
#include <stdint.h>
40
#include <string.h>
41
#include "codegen/CodeGenerator.hpp"
42
#include "codegen/ConstantDataSnippet.hpp"
43
#include "env/FrontEnd.hpp"
44
#include "codegen/InstOpCode.hpp"
45
#include "codegen/Instruction.hpp"
46
#include "codegen/Linkage.hpp"
47
#include "codegen/Linkage_inlines.hpp"
48
#include "codegen/LiveRegister.hpp"
49
#include "codegen/Machine.hpp"
50
#include "codegen/MemoryReference.hpp"
51
#include "codegen/RealRegister.hpp"
52
#include "codegen/Register.hpp"
53
#include "codegen/RegisterConstants.hpp"
54
#include "codegen/RegisterPair.hpp"
55
#include "codegen/Relocation.hpp"
56
#include "codegen/Snippet.hpp"
57
#include "codegen/TreeEvaluator.hpp"
58
#include "codegen/UnresolvedDataSnippet.hpp"
59
#include "compile/Compilation.hpp"
60
#include "compile/ResolvedMethod.hpp"
61
#include "compile/SymbolReferenceTable.hpp"
62
#include "control/Options.hpp"
63
#include "control/Options_inlines.hpp"
64
#include "cs2/sparsrbit.h"
65
#include "env/CompilerEnv.hpp"
66
#include "env/ObjectModel.hpp"
67
#include "env/StackMemoryRegion.hpp"
68
#include "env/TRMemory.hpp"
69
#include "env/defines.h"
70
#include "env/jittypes.h"
71
#include "il/Block.hpp"
72
#include "il/DataTypes.hpp"
73
#include "il/ILOpCodes.hpp"
74
#include "il/ILOps.hpp"
75
#include "il/RegisterMappedSymbol.hpp"
76
#include "il/ResolvedMethodSymbol.hpp"
77
#include "il/StaticSymbol.hpp"
78
#include "il/Symbol.hpp"
79
#include "il/SymbolReference.hpp"
80
#include "il/TreeTop.hpp"
81
#include "il/TreeTop_inlines.hpp"
82
#include "infra/Array.hpp"
83
#include "infra/Assert.hpp"
84
#include "infra/Bit.hpp"
85
#include "infra/Flags.hpp"
86
#include "infra/List.hpp"
87
#include "ras/Debug.hpp"
88
#include "z/codegen/EndianConversion.hpp"
89
#include "z/codegen/S390Evaluator.hpp"
90
#include "z/codegen/S390GenerateInstructions.hpp"
91
#include "z/codegen/S390Instruction.hpp"
92
93
void recursivelyIncrementReferenceCount(TR::Node *node, rcount_t increment, List<TR::Node> & incrementedNodesList, List<TR::Node> &nodesAlreadyEvaluatedBeforeFoldingList,
94
TR::CodeGenerator *cg)
95
{
96
TR::Compilation *comp = cg->comp();
97
TR_ASSERT( increment > 0,"recursivelyIncrementReferenceCount only valid for positive increments\n");
98
if (cg->traceBCDCodeGen())
99
traceMsg(comp,"\t\t\trecAdjust node - %s (%p) and add to list, increment %d: refCount %d->%d\n",
100
node->getOpCode().getName(),node,increment,node->getReferenceCount(),node->getReferenceCount()+increment);
101
incrementedNodesList.add(node);
102
node->setReferenceCount(node->getReferenceCount()+increment);
103
104
if (node->getRegister() && node->getRegister()->getOpaquePseudoRegister())
105
{
106
TR_OpaquePseudoRegister *pseudoReg = node->getRegister()->getOpaquePseudoRegister();
107
TR_StorageReference *storageReference = pseudoReg->getStorageReference();
108
TR_ASSERT( storageReference,"the pseudoReg should have a non-null storage reference\n");
109
if (storageReference->isTemporaryBased())
110
storageReference->incrementTemporaryReferenceCount();
111
}
112
else if (node->getOpCode().hasSymbolReference() && node->getSymbolReference() &&
113
node->getSymbolReference()->isTempVariableSizeSymRef() && node->getSymbolReference()->getSymbol() &&
114
node->getSymbolReference()->getSymbol()->isVariableSizeSymbol())
115
{
116
TR_ASSERT( node->getOpCodeValue()==TR::loadaddr,"temporary symbol references should only be attached to loadaddr nodes\n");
117
TR::AutomaticSymbol *sym = node->getSymbolReference()->getSymbol()->castToVariableSizeSymbol();
118
if (comp->cg()->traceBCDCodeGen())
119
traceMsg(comp,"\tincrement temporary #%d (sym %p -- from loadaddr node %p) reference count %d->%d\n",
120
node->getSymbolReference()->getReferenceNumber(),sym,node,sym->getReferenceCount(),sym->getReferenceCount()+1);
121
sym->setReferenceCount(sym->getReferenceCount()+1);
122
}
123
if (node->getRegister() == NULL)
124
{
125
if (cg->traceBCDCodeGen())
126
traceMsg(comp,"\t\t\tnode has no register so do recurse\n");
127
for (int32_t childCount = node->getNumChildren()-1; childCount >= 0; childCount--)
128
recursivelyIncrementReferenceCount(node->getChild(childCount), increment, incrementedNodesList, nodesAlreadyEvaluatedBeforeFoldingList, cg);
129
}
130
else
131
{
132
nodesAlreadyEvaluatedBeforeFoldingList.add(node);
133
if (cg->traceBCDCodeGen())
134
traceMsg(comp,"\t\t\tnode has a register so do not recurse\n");
135
}
136
return;
137
}
138
139
void J9::Z::MemoryReference::addInstrSpecificRelocation(TR::CodeGenerator* cg, TR::Instruction* instr, int32_t disp, uint8_t * cursor)
140
{
141
}
142
143
bool
144
J9::Z::MemoryReference::typeNeedsAlignment(TR::Node *node)
145
{
146
if (node && node->getType().isBCD())
147
return true;
148
else
149
return OMR::Z::MemoryReference::typeNeedsAlignment(node);
150
}
151
152
void
153
J9::Z::MemoryReference::tryForceFolding(TR::Node *& rootLoadOrStore, TR::CodeGenerator *& cg, TR_StorageReference *& storageReference, TR::SymbolReference *& symRef, TR::Symbol *& symbol,
154
List<TR::Node>& nodesAlreadyEvaluatedBeforeFoldingList)
155
{
156
if (storageReference)
157
{
158
bool isImpliedMemoryReference = false;
159
160
TR::Compilation *comp = cg->comp();
161
TR::Node *storageRefNode = storageReference->getNode();
162
bool isIndirect = storageRefNode->getOpCode().isIndirect();
163
164
TR_ASSERT(!storageRefNode->getOpCode().isLoadConst(), "storageRefNode %s (%p) const should be BCD or Aggr type\n", storageRefNode->getOpCode().getName(),storageRefNode);
165
166
TR_ASSERT(storageRefNode->getOpCode().isLoadVar() || storageRefNode->getOpCode().isStore(), "expecting storageRef node %p to be a loadVar or store\n",storageRefNode);
167
168
_symbolReference = storageReference->getSymbolReference();
169
170
symRef = _symbolReference;
171
_originalSymbolReference = _symbolReference;
172
173
symbol = _symbolReference->getSymbol();
174
175
if (cg->traceBCDCodeGen())
176
traceMsg(comp,"\t\tmr storageRef case: setting rootLoadOrStore from %s (%p) to storageRef->node %s (%p) (ref->nodeRefCount %d, symRef #%d (sym=%p), isIndirect %s, isConst %s)\n",
177
rootLoadOrStore?rootLoadOrStore->getOpCode().getName():"NULL",
178
rootLoadOrStore,
179
storageRefNode?storageRefNode->getOpCode().getName():"NULL",
180
storageRefNode,
181
storageReference->getNodeReferenceCount(),
182
_symbolReference->getReferenceNumber(),
183
symbol,
184
isIndirect?"yes":"no",
185
"no");
186
187
rootLoadOrStore = storageRefNode;
188
if (isIndirect)
189
{
190
if (storageReference->isNodeBasedHint())
191
{
192
isImpliedMemoryReference = true;
193
if (cg->traceBCDCodeGen())
194
traceMsg(comp,"\t\tset isImpliedMemoryReference=true as ref isNodeBasedHint=true\n");
195
}
196
else
197
{
198
TR_ASSERT(storageReference->getNodeReferenceCount() >= 1,"storageReference->getNodeReferenceCount() should be >=1 and not %d storageRefNode:[%p]\n",storageReference->getNodeReferenceCount(), storageRefNode);
199
isImpliedMemoryReference = (storageReference->getNodeReferenceCount() > 1);
200
if (cg->traceBCDCodeGen())
201
traceMsg(comp,"\t\tset isImpliedMemoryReference=%s as ref->getNodeReferenceCount() %s 1\n",
202
isImpliedMemoryReference?"true":"false",isImpliedMemoryReference?">":"==");
203
}
204
}
205
if (!storageReference->isNodeBasedHint())
206
{
207
if (cg->traceBCDCodeGen())
208
traceMsg(comp,"\t\tdec nodeRefCount %d->%d on storageRef #%d (storageRefNode %s (%p))\n",
209
storageReference->getNodeReferenceCount(),storageReference->getNodeReferenceCount()-1,
210
storageReference->getReferenceNumber(),
211
storageReference->getNode()->getOpCode().getName(),storageReference->getNode());
212
storageReference->decrementNodeReferenceCount();
213
}
214
TR_ASSERT(rootLoadOrStore->getOpCode().isLoad() || rootLoadOrStore->getOpCode().isStore(),"rootLoadOrStore should be a load or store opcode\n");
215
216
if (isImpliedMemoryReference)
217
{
218
// An addressHint is an implied reference to an address tree. The nodes in this tree must have their references counts incremented to
219
// account for this implied use. Failing to reflect these implied uses could allow an evaluator to incorrectly clobber a result register that is needed in
220
// a subsequent implied or explicit use (i.e. an evaluator may perform a refCount==1 codegen optimization when it really should not given the implied use).
221
// A drawback of incrementing the reference counts is that populateMemoryReference will never try to fold an address tree into a 390 memory reference when
222
// the refCount of the subTree is > 1.
223
// Depending on the simplicity of the tree (for example, if it is just a simple add of a base+const) then it is probably still worthwhile attempting to fold
224
// the tree.
225
// The flag forceFolding is set in these cases to force populateMemoryReference to attempt folding even when the (now) higher refCounts would usually disallow it.
226
TR::Node *addressChild = rootLoadOrStore->getFirstChild();
227
228
self()->setForceFoldingIfAdvantageous(cg, addressChild);
229
230
if (self()->forceFolding() || self()->forceFirstTimeFolding() || addressChild->getOpCodeValue() == TR::loadaddr)
231
{
232
if (cg->traceBCDCodeGen())
233
traceMsg(comp,"\t\tisImpliedMemoryReference=true and %s so recInc refCounts for rootLoadOrStore %s (%p) and addressChild %s (%p)\n",
234
self()->forceFirstTimeFolding()?
235
"forceFirstTimeFolding":
236
(self()->forceFolding()?"forceFolding=true":"addressChild is a loadaddr"),
237
rootLoadOrStore->getOpCode().getName(),rootLoadOrStore,
238
addressChild->getOpCode().getName(),addressChild);
239
_incrementedNodesList.init();
240
nodesAlreadyEvaluatedBeforeFoldingList.init();
241
// Increment the reference counts for this tree to reflect this implicit use so:
242
// 1. Any evaluators called for this tree will see accurate reference counts that include this implicit use so no
243
// incorrect codegen optimizations are performed.
244
// 2. So any folding of the addressChild's children can be performed for this and future implicit uses and for the original use without
245
// underflowing the addressChild's children's referenceCounts (as each implied and actual use would reference the children each time when
246
// folding).
247
recursivelyIncrementReferenceCount(addressChild, 1, _incrementedNodesList, nodesAlreadyEvaluatedBeforeFoldingList, cg);
248
}
249
else
250
{
251
self()->setForceEvaluation();
252
if (cg->traceBCDCodeGen())
253
traceMsg(comp,"\t\tisImpliedMemoryReference=true and forceFolding=false so increment addressChild %s (%p) refCount %d->%d and setForceEvaluation to true\n",
254
addressChild->getOpCode().getName(),addressChild,addressChild->getReferenceCount(),addressChild->getReferenceCount()+1);
255
addressChild->incReferenceCount();
256
}
257
}
258
}
259
}
260
261
TR::UnresolvedDataSnippet *
262
J9::Z::MemoryReference::createUnresolvedDataSnippet(TR::Node * node, TR::CodeGenerator * cg, TR::SymbolReference * symRef, TR::Register * tempReg, bool isStore)
263
{
264
TR::UnresolvedDataSnippet * uds;
265
TR::Instruction * cursor;
266
267
self()->setUnresolvedSnippet(uds = new (cg->trHeapMemory()) TR::UnresolvedDataSnippet(cg, node, symRef, isStore, false));
268
cg->addSnippet(self()->getUnresolvedSnippet());
269
270
// generate branch to the unresolved data snippet
271
cursor = generateRegUnresolvedSym(cg, TR::InstOpCode::getLoadOpCode(), node, tempReg, symRef, uds);
272
uds->setBranchInstruction(cursor);
273
274
return uds;
275
}
276
277
TR::UnresolvedDataSnippet *
278
J9::Z::MemoryReference::createUnresolvedDataSnippetForiaload(TR::Node * node, TR::CodeGenerator * cg, TR::SymbolReference * symRef, TR::Register * tempReg, bool & isStore)
279
{
280
// Have to catch the case where, on first glance, a putstatic looks
281
// like a 'read' since the unresolved ref is on the iaload, not the
282
// iistore. The 'right' fix is to set a bit on the sym instead
283
//
284
TR::Node * rootNode = cg->getCurrentEvaluationTreeTop()->getNode();
285
if (rootNode->getOpCode().isResolveCheck() &&
286
rootNode->getFirstChild()->getOpCode().isStoreIndirect() &&
287
rootNode->getFirstChild()->getFirstChild() == node &&
288
!rootNode->getFirstChild()->getSymbolReference()->isUnresolved())
289
{
290
isStore = true;
291
}
292
293
TR::UnresolvedDataSnippet * uds = self()->createUnresolvedDataSnippet(node, cg, symRef, tempReg, isStore);
294
self()->getUnresolvedSnippet()->createUnresolvedData(cg, _baseNode);
295
self()->getUnresolvedSnippet()->getUnresolvedData()->setUnresolvedDataSnippet(self()->getUnresolvedSnippet());
296
return uds;
297
}
298
299
void
300
J9::Z::MemoryReference::createUnresolvedSnippetWithNodeRegister(TR::Node * node, TR::CodeGenerator * cg, TR::SymbolReference * symRef, TR::Register *& writableLiteralPoolRegister)
301
{
302
TR::Register * tempReg = node->getRegister();
303
if (tempReg == NULL)
304
{
305
tempReg = node->setRegister(cg->allocateRegister());
306
}
307
else if (tempReg->getKind() == TR_FPR)
308
{
309
tempReg = cg->allocateRegister(TR_FPR);
310
}
311
else if (tempReg->getKind() == TR_VRF)
312
{
313
tempReg = cg->allocateRegister(TR_VRF);
314
}
315
316
self()->createUnresolvedDataSnippet(node, cg, symRef, tempReg, false);
317
318
if (node->getOpCodeValue() == TR::loadaddr)
319
{
320
if (cg->isLiteralPoolOnDemandOn())
321
{
322
writableLiteralPoolRegister = cg->allocateRegister();
323
generateLoadLiteralPoolAddress(cg, node, writableLiteralPoolRegister);
324
cg->stopUsingRegister(writableLiteralPoolRegister);
325
}
326
else
327
{
328
writableLiteralPoolRegister = cg->getLitPoolRealRegister();
329
}
330
}
331
332
self()->setBaseRegister(tempReg, cg);
333
_baseNode = node;
334
}
335
336
void
337
J9::Z::MemoryReference::createUnresolvedDataSnippetForBaseNode(TR::CodeGenerator * cg, TR::Register * writableLiteralPoolRegister)
338
{
339
self()->getUnresolvedSnippet()->createUnresolvedData(cg, _baseNode);
340
self()->getUnresolvedSnippet()->getUnresolvedData()->setUnresolvedDataSnippet(self()->getUnresolvedSnippet());
341
self()->setBaseRegister(writableLiteralPoolRegister, cg);
342
}
343
344
void
345
J9::Z::MemoryReference::createPatchableDataInLitpool(TR::Node * node, TR::CodeGenerator * cg, TR::Register * tempReg, TR::UnresolvedDataSnippet * uds)
346
{
347
// create a patchable data in litpool
348
TR::S390WritableDataSnippet * litpool = cg->CreateWritableConstant(node);
349
litpool->setUnresolvedDataSnippet(uds);
350
litpool->resetNeedLitPoolBasePtr();
351
TR::S390RILInstruction * LRLinst;
352
LRLinst = (TR::S390RILInstruction *) generateRILInstruction(cg, TR::InstOpCode::getLoadRelativeLongOpCode(), node, tempReg, reinterpret_cast<uintptr_t*>(0xBABE), 0);
353
uds->setDataReferenceInstruction(LRLinst);
354
LRLinst->setSymbolReference(uds->getDataSymbolReference());
355
LRLinst->setTargetSnippet(litpool);
356
LRLinst->setTargetSymbol(uds->getDataSymbol());
357
TR_Debug * debugObj = cg->getDebug();
358
359
if (debugObj)
360
{
361
debugObj->addInstructionComment(LRLinst, "LoadLitPoolEntry");
362
}
363
364
self()->setBaseRegister(tempReg, cg);
365
}
366
367
bool
368
J9::Z::MemoryReference::symRefHasTemporaryNegativeOffset()
369
{
370
return self()->getStorageReference() && self()->getStorageReference()->getSymbolReference() && self()->getStorageReference()->getSymbolReference()->hasTemporaryNegativeOffset();
371
}
372
373
void
374
J9::Z::MemoryReference::setMemRefAndGetUnresolvedData(TR::Snippet *& snippet)
375
{
376
self()->getUnresolvedSnippet()->setMemoryReference(self());
377
snippet = self()->getUnresolvedSnippet()->getUnresolvedData();
378
}
379
380
TR::MemoryReference *
381
reuseS390MemRefFromStorageRef(TR::MemoryReference *baseMR, int32_t offset, TR::Node *node, TR_StorageReference *storageReference, TR::CodeGenerator *cg, bool enforceSSLimits)
382
{
383
if (baseMR == NULL)
384
{
385
baseMR = generateS390MemRefFromStorageRef(node, storageReference, cg, enforceSSLimits);
386
baseMR->addToOffset(offset);
387
}
388
else
389
{
390
baseMR = reuseS390MemoryReference(baseMR, offset, node, cg, enforceSSLimits);
391
}
392
return baseMR;
393
}
394
395
396
/**
397
* When isNewTemp=true then do not transfer any deadBytes from the node/reg as this is a memref for brand new tempStorageRef -- node is still needed
398
* in this case to attach on any instructions for line number lookup
399
*/
400
TR::MemoryReference *
401
generateS390MemRefFromStorageRef(TR::Node *node, TR_StorageReference *storageReference, TR::CodeGenerator * cg, bool enforceSSLimits, bool isNewTemp)
402
{
403
TR::Compilation *comp = cg->comp();
404
TR_ASSERT( storageReference,"must specify a storageReference when creating an aligned memory reference\n");
405
TR::MemoryReference *memRef = NULL;
406
// A memRef created for an indirect load may have its symRef replaced with its child's symRef (when the child is a loadaddr for example)
407
// When it comes to right aligning the memRef the symbol size of the indirect load itself is required and *not* the symbol size of the child
408
// So in these cases cache the indirect load symbol size on the memRef for later use when right aligning.
409
// One example of this in a indirect load off the loadaddr of comp()->getWCodeMainLitSymRef(). In this case the symbol size reflects all the constants present.
410
// Another example is an indirect load off of a loadaddr auto. The auto symbol size may be larger than the indirect load size being performed.
411
if (storageReference->isTemporaryBased())
412
{
413
memRef = new (cg->trHeapMemory()) TR::MemoryReference(node, storageReference->getTemporarySymbolReference(), cg, storageReference);
414
}
415
else
416
{
417
memRef = new (cg->trHeapMemory()) TR::MemoryReference(node, cg, false, storageReference);
418
memRef->setFixedSizeForAlignment(storageReference->getSymbolSize());
419
}
420
421
if (!isNewTemp &&
422
!storageReference->isSingleUseTemporary() &&
423
node &&
424
node->getOpaquePseudoRegister() &&
425
node->getOpaquePseudoRegister()->getRightAlignedDeadAndIgnoredBytes() > 0)
426
{
427
int32_t regDeadAndIgnoredBytes = node->getOpaquePseudoRegister()->getRightAlignedDeadAndIgnoredBytes();
428
if (cg->traceBCDCodeGen())
429
traceMsg(comp,"\tgenerateS390AlignedMemoryReference: adjust memRef->_offset for regDeadAndIgnoredBytes (%d->%d) from node %s (%p) and reg %s\n",
430
memRef->getOffset(),memRef->getOffset()-regDeadAndIgnoredBytes,node->getOpCode().getName(),node,cg->getDebug()->getName(node->getOpaquePseudoRegister()));
431
memRef->addToTemporaryNegativeOffset(node, -regDeadAndIgnoredBytes, cg);
432
}
433
434
if (storageReference->isTemporaryBased() && storageReference->getSymbolReference()->hasTemporaryNegativeOffset())
435
{
436
if (cg->traceBCDCodeGen())
437
traceMsg(comp,"\tgenerateS390AlignedMemoryReference mr %p: call addToTemporaryNegativeOffset flag for storageRef->symRef #%d (offset = %d) and node %s (%p) to memRef\n",
438
memRef,storageReference->getReferenceNumber(),storageReference->getSymbolReference()->getOffset(),
439
node->getOpCode().getName(),node);
440
memRef->enforceSSFormatLimits(node, cg, NULL);
441
memRef->setHasTemporaryNegativeOffset();
442
}
443
444
if (cg->traceBCDCodeGen() && storageReference->isTemporaryBased() && !storageReference->isSingleUseTemporary())
445
{
446
if (storageReference->getTemporaryReferenceCount() == 0)
447
traceMsg(comp,"**ERROR**: using an already freed temp #%d sym %p (node %p storageRef %p)\n",
448
storageReference->getReferenceNumber(),storageReference->getTemporarySymbol(),node,storageReference);
449
else if (!storageReference->getTemporarySymbol()->isReferenced())
450
traceMsg(comp,"**ERROR**: using an unreferenced temp #%d sym %p (node %p storageRef %p)\n",
451
storageReference->getReferenceNumber(),storageReference->getTemporarySymbol(),node,storageReference);
452
}
453
454
TR_ASSERT(!storageReference->isTemporaryBased() ||
455
storageReference->isSingleUseTemporary() || // refCount is always zero for these
456
storageReference->getTemporaryReferenceCount() > 0,
457
"using an already freed temporary symbol reference\n");
458
459
TR_ASSERT(!storageReference->isTemporaryBased() ||
460
!storageReference->isSingleUseTemporary() || // refCount is always zero for these
461
storageReference->getTemporaryReferenceCount() == 0,
462
"single use temps must have a refCount of 0 and not %d\n",storageReference->getTemporaryReferenceCount());
463
464
TR_ASSERT(!storageReference->isTemporaryBased() ||
465
storageReference->getTemporarySymbol()->isReferenced(),
466
"using a temporary symbol that is marked as unreferenced\n");
467
468
// enforcing the ss limits early means that each consuming instruction does not have to consolidate an index register or a large offset to a new base register
469
if (enforceSSLimits)
470
memRef->enforceSSFormatLimits(node, cg, NULL);
471
472
//Make sure that the listing symref doesn't stay on the memref if the storage ref has a different symbol
473
if (memRef->getListingSymbolReference())
474
{
475
if (storageReference->isTemporaryBased())
476
{
477
memRef->setListingSymbolReference(NULL);
478
}
479
else if (storageReference->isConstantNodeBased())
480
{
481
memRef->setListingSymbolReference(NULL);
482
}
483
else if (storageReference->isNonConstantNodeBased())
484
{
485
if (memRef->getListingSymbolReference() != storageReference->getNode()->getSymbolReference())
486
{
487
memRef->setListingSymbolReference(NULL);
488
}
489
}
490
}
491
492
return memRef;
493
}
494
495
TR::MemoryReference *
496
generateS390RightAlignedMemoryReference(TR::Node *node, TR_StorageReference *storageReference, TR::CodeGenerator * cg, bool enforceSSLimits, bool isNewTemp)
497
{
498
TR_ASSERT(!node->getType().isAggregate(),"do not use aligned memrefs for aggrs on node %p\n",node);
499
TR::MemoryReference *mr = generateS390MemRefFromStorageRef(node, storageReference, cg, enforceSSLimits, isNewTemp);
500
mr->setRightAlignMemRef();
501
return mr;
502
}
503
504
TR::MemoryReference *
505
generateS390LeftAlignedMemoryReference(TR::Node *node, TR_StorageReference *storageReference, TR::CodeGenerator * cg, int32_t leftMostByte, bool enforceSSLimits, bool isNewTemp)
506
{
507
TR_ASSERT(!node->getType().isAggregate(),"do not use aligned memrefs for aggrs on node %p\n",node);
508
TR::MemoryReference *mr = generateS390MemRefFromStorageRef(node, storageReference, cg, enforceSSLimits, isNewTemp);
509
mr->setLeftAlignMemRef(leftMostByte);
510
return mr;
511
}
512
513
TR::MemoryReference *
514
reuseS390LeftAlignedMemoryReference(TR::MemoryReference *baseMR, TR::Node *node, TR_StorageReference *storageReference, TR::CodeGenerator *cg, int32_t leftMostByte, bool enforceSSLimits)
515
{
516
TR_ASSERT(!node->getType().isAggregate(),"do not use aligned memrefs for aggrs on node %p\n",node);
517
if (baseMR == NULL)
518
baseMR = generateS390LeftAlignedMemoryReference(node, storageReference, cg, leftMostByte, enforceSSLimits);
519
else if (baseMR->getMemRefUsedBefore())
520
baseMR = generateS390LeftAlignedMemoryReference(*baseMR, node, 0, cg, leftMostByte, enforceSSLimits);
521
else
522
baseMR->setLeftAlignMemRef(leftMostByte);
523
return baseMR;
524
}
525
526
TR::MemoryReference *
527
reuseS390RightAlignedMemoryReference(TR::MemoryReference *baseMR, TR::Node *node, TR_StorageReference *storageReference, TR::CodeGenerator *cg, bool enforceSSLimits)
528
{
529
TR_ASSERT(!node->getType().isAggregate(),"do not use aligned memrefs for aggrs on node %p\n",node);
530
if (baseMR == NULL)
531
baseMR = generateS390RightAlignedMemoryReference(node, storageReference, cg, enforceSSLimits);
532
else if (baseMR->getMemRefUsedBefore())
533
baseMR = generateS390RightAlignedMemoryReference(*baseMR, node, 0, cg, enforceSSLimits);
534
else
535
baseMR->setRightAlignMemRef();
536
return baseMR;
537
}
538
539