Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/openj9
Path: blob/master/runtime/compiler/control/RecompilationInfo.hpp
6000 views
1
/*******************************************************************************
2
* Copyright (c) 2000, 2021 IBM Corp. and others
3
*
4
* This program and the accompanying materials are made available under
5
* the terms of the Eclipse Public License 2.0 which accompanies this
6
* distribution and is available at https://www.eclipse.org/legal/epl-2.0/
7
* or the Apache License, Version 2.0 which accompanies this distribution and
8
* is available at https://www.apache.org/licenses/LICENSE-2.0.
9
*
10
* This Source Code may also be made available under the following
11
* Secondary Licenses when the conditions for such availability set
12
* forth in the Eclipse Public License, v. 2.0 are satisfied: GNU
13
* General Public License, version 2 with the GNU Classpath
14
* Exception [1] and GNU General Public License, version 2 with the
15
* OpenJDK Assembly Exception [2].
16
*
17
* [1] https://www.gnu.org/software/classpath/license.html
18
* [2] http://openjdk.java.net/legal/assembly-exception.html
19
*
20
* SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 OR LicenseRef-GPL-2.0 WITH Assembly-exception
21
*******************************************************************************/
22
23
#ifndef TR_RECOMPILATION_INFO_INCL
24
#define TR_RECOMPILATION_INFO_INCL
25
26
27
#include <stddef.h>
28
#include <stdint.h>
29
#include "compile/Compilation.hpp"
30
#include "compile/CompilationTypes.hpp"
31
#include "control/Options.hpp"
32
#include "control/Options_inlines.hpp"
33
#include "env/CHTable.hpp"
34
#include "env/TRMemory.hpp"
35
#include "env/defines.h"
36
#include "env/jittypes.h"
37
#include "infra/Assert.hpp"
38
#include "infra/Flags.hpp"
39
#include "infra/Link.hpp"
40
#include "infra/Timer.hpp"
41
#include "runtime/J9Profiler.hpp"
42
43
class TR_FrontEnd;
44
class TR_OpaqueMethodBlock;
45
class TR_OptimizationPlan;
46
class TR_ResolvedMethod;
47
namespace TR { class Instruction; }
48
namespace TR { class SymbolReference; }
49
50
// Bits to represent sampling mechanism in method return info field.
51
// Bits 3, 4, 5, 6, 7 are reserved for this purpose (could use fewer)
52
// This has to be in sync with LinkageInfo (in Runtime.hpp).
53
//
54
#define METHOD_SAMPLING_RECOMPILATION 0x00000010
55
#define METHOD_COUNTING_RECOMPILATION 0x00000020
56
#define METHOD_HAS_BEEN_RECOMPILED 0x00000040
57
58
// Adaptive Profiling Parameters: chose smaller counts for methods
59
// with fewer back-edges.
60
// 0 1 2 3 4 >= 5 <-- number of async checks
61
static int32_t profilingCountsTable[] = { 100, 625, 1250, 2500, 5000, 10000 }; // <-- profiling count
62
static int32_t profilingFreqTable [] = { 19, 29, 47, 47, 47, 53 }; // <-- profiling frequency
63
// (should be prime)
64
#define MAX_BACKEDGES (5) // max index in the above array
65
66
#define DEFAULT_PROFILING_FREQUENCY (profilingFreqTable [MAX_BACKEDGES])
67
#define DEFAULT_PROFILING_COUNT (profilingCountsTable[MAX_BACKEDGES])
68
69
namespace TR { class DefaultCompilationStrategy; }
70
namespace TR { class ThresholdCompilationStrategy; }
71
namespace OMR { class Recompilation; }
72
namespace J9 { class Recompilation; }
73
74
// Persistent information associated with a method for recompilation
75
//
76
class TR_PersistentMethodInfo
77
{
78
friend class OMR::Recompilation;
79
friend class J9::Recompilation;
80
friend class TR::CompilationInfo;
81
friend class TR_S390Recompilation; // FIXME: ugly
82
friend class ::OMR::Options;
83
friend class TR::DefaultCompilationStrategy;
84
friend class TR::ThresholdCompilationStrategy;
85
86
public:
87
TR_PERSISTENT_ALLOC(TR_Memory::PersistentMethodInfo);
88
89
TR_PersistentMethodInfo() {}
90
TR_PersistentMethodInfo(TR::Compilation *);
91
TR_PersistentMethodInfo(TR_OpaqueMethodBlock *);
92
93
static TR_PersistentMethodInfo * get(TR::Compilation *);
94
static TR_PersistentMethodInfo * get(TR_ResolvedMethod * method);
95
TR_OpaqueMethodBlock * getMethodInfo() {return _methodInfo;}
96
void * getAddressOfMethodInfo() { return &_methodInfo; }
97
98
void setMethodInfo(void *mi) { _methodInfo = (TR_OpaqueMethodBlock *)mi; }
99
100
101
void setDisableProfiling() { _flags.set(ProfilingDisabled); }
102
bool profilingDisabled() { return _flags.testAny(ProfilingDisabled); }
103
104
void setDisableMiscSamplingCounterDecrementation() { _flags.set(DisableMiscSamplingCounterDecrementation); }
105
bool disableMiscSamplingCounterDecrementation() { return _flags.testAny(DisableMiscSamplingCounterDecrementation); }
106
107
void setOptLevelDowngraded(bool b) { _flags.set(OptLevelDowngraded, b); }
108
bool isOptLevelDowngraded() { return _flags.testAny(OptLevelDowngraded); }
109
110
void setReasonForRecompilation(int32_t reason) { _flags.setValue(CompilationReasonMask, reason); }
111
int32_t getReasonForRecompilation() { return _flags.getValue(CompilationReasonMask); }
112
113
bool hasBeenReplaced() { return _flags.testAny(HasBeenReplaced); }
114
void setHasBeenReplaced(bool b=true) { _flags.set(HasBeenReplaced, b); }
115
116
bool wasNeverInterpreted() { return _flags.testAny(WasNeverInterpreted); }
117
void setWasNeverInterpreted(bool b) { _flags.set(WasNeverInterpreted, b); }
118
119
bool wasScannedForInlining() { return _flags.testAny(WasScannedForInlining); }
120
void setWasScannedForInlining(bool b) { _flags.set(WasScannedForInlining, b); }
121
122
bool isInDataCache() { return _flags.testAny(IsInDataCache); }
123
void setIsInDataCache(bool b) { _flags.set(IsInDataCache, b); }
124
125
bool hasFailedDLTCompRetrials() { return _flags.testAny(HasFailedDLTCompRetrials); }
126
void setHasFailedDLTCompRetrials(bool b) { _flags.set(HasFailedDLTCompRetrials, b); }
127
128
bool hasRefinedAliasSets() { return _flags.testAny(RefinedAliasesMask); }
129
130
bool doesntKillAddressStatics() { return _flags.testAny(DoesntKillAddressStatics); }
131
void setDoesntKillAddressStatics(bool b) { _flags.set(DoesntKillAddressStatics, b); }
132
133
bool doesntKillIntStatics() { return _flags.testAny(DoesntKillIntStatics); }
134
void setDoesntKillIntStatics(bool b) { _flags.set(DoesntKillIntStatics, b); }
135
136
bool doesntKillNonIntPrimitiveStatics() { return _flags.testAny(DoesntKillNonIntPrimitiveStatics); }
137
void setDoesntKillNonIntPrimitiveStatics(bool b) { _flags.set(DoesntKillNonIntPrimitiveStatics, b); }
138
139
bool doesntKillAddressFields() { return _flags.testAny(DoesntKillAddressFields); }
140
void setDoesntKillAddressFields(bool b) { _flags.set(DoesntKillAddressFields, b); }
141
142
bool doesntKillIntFields() { return _flags.testAny(DoesntKillIntFields); }
143
void setDoesntKillIntFields(bool b) { _flags.set(DoesntKillIntFields, b); }
144
145
bool doesntKillNonIntPrimitiveFields() { return _flags.testAny(DoesntKillNonIntPrimitiveFields); }
146
void setDoesntKillNonIntPrimitiveFields(bool b) { _flags.set(DoesntKillNonIntPrimitiveFields, b); }
147
148
bool doesntKillAddressArrayShadows() { return _flags.testAny(DoesntKillAddressArrayShadows); }
149
void setDoesntKillAddressArrayShadows(bool b) { _flags.set(DoesntKillAddressArrayShadows, b); }
150
151
bool doesntKillIntArrayShadows() { return _flags.testAny(DoesntKillIntArrayShadows); }
152
void setDoesntKillIntArrayShadows(bool b) { _flags.set(DoesntKillIntArrayShadows, b); }
153
154
bool doesntKillNonIntPrimitiveArrayShadows() { return _flags.testAny(DoesntKillNonIntPrimitiveArrayShadows); }
155
void setDoesntKillNonIntPrimitiveArrayShadows(bool b) { _flags.set(DoesntKillNonIntPrimitiveArrayShadows, b); }
156
157
bool doesntKillEverything() { return _flags.testAny(DoesntKillEverything); }
158
void setDoesntKillEverything(bool b) { _flags.set(DoesntKillEverything, b); }
159
160
bool doesntKillAnything() { return _flags.testAll(RefinedAliasesMask); }
161
162
// Accessor methods for the "cpoCounter". This does not really
163
// need to be its own counter, as it is conceptually the same as
164
// "_counter". However, the original _counter is still during instrumentation, so
165
// it was simplest to keep them separate
166
//
167
int32_t cpoGetCounter() {return _cpoSampleCounter;}
168
int32_t cpoIncCounter() {return ++_cpoSampleCounter;}
169
int32_t cpoSetCounter(int newCount) {return _cpoSampleCounter = newCount;}
170
171
uint16_t getTimeStamp() { return _timeStamp; }
172
173
TR_OptimizationPlan * getOptimizationPlan() {return _optimizationPlan;}
174
void setOptimizationPlan(TR_OptimizationPlan *optPlan) { _optimizationPlan = optPlan; }
175
uint8_t getNumberOfInvalidations() {return _numberOfInvalidations;}
176
void incrementNumberOfInvalidations() {_numberOfInvalidations++;}
177
uint8_t getNumberOfInlinedMethodRedefinition() {return _numberOfInlinedMethodRedefinition;}
178
void incrementNumberOfInlinedMethodRedefinition() {_numberOfInlinedMethodRedefinition++;}
179
int16_t getNumPrexAssumptions() {return _numPrexAssumptions;}
180
void incNumPrexAssumptions() {_numPrexAssumptions++;}
181
182
enum InfoBits
183
{
184
// Normally set by the previous compilation to indicate that the next
185
// compilation should use profiling. Sometimes we can start out without
186
// profiling and then set it during if switched our minds.
187
// At the end of compilation we set it to the value we want for the
188
// next compilation.
189
// If the flag ProfilingDisabled (below) is set we should never set this flag.
190
UseProfiling = 0x00000001,
191
192
CanBeCalledInSinglePrecisionMode = 0x00000002,
193
194
// This flag disables any future profiling of this method.
195
// Normally we set this when we know that profiling is going to have
196
// a large overhead.
197
ProfilingDisabled = 0x00000004,
198
199
// This flag is used to disable the decrementation of the sampling
200
// counter for reasons other than sampling or EDO. Normally it is set in
201
// sampleMethod when a recompilation is triggered by PIC misses decrementing
202
// the sampling counter or by profiling of PIC addresses at warm.
203
DisableMiscSamplingCounterDecrementation=0x00000008,
204
205
// This flag is set when a method is silently downgraded from warm to cold
206
OptLevelDowngraded = 0x00000010,
207
208
HasFailedDLTCompRetrials = 0x00000020,
209
210
RefinedAliasesMask = 0x0000FFC0,
211
212
DoesntKillAddressStatics = 0x00000040,
213
DoesntKillIntStatics = 0x00000080,
214
DoesntKillNonIntPrimitiveStatics = 0x00000100,
215
DoesntKillAddressFields = 0x00000200,
216
DoesntKillIntFields = 0x00000400,
217
DoesntKillNonIntPrimitiveFields = 0x00000800,
218
DoesntKillAddressArrayShadows = 0x00001000,
219
DoesntKillIntArrayShadows = 0x00002000,
220
DoesntKillNonIntPrimitiveArrayShadows= 0x00004000,
221
DoesntKillEverything = 0x00008000,
222
223
// Define 4 bits to record the reason for recompilation (RAS feature; will be printed in VLOG)
224
CompilationReasonMask = 0x000F0000,
225
RecompDueToThreshold = 0x00010000,
226
RecompDueToCounterZero = 0x00020000,
227
RecompDueToMegamorphicCallProfile = 0x00030000, // also PIC miss (because we cannot distinguish between the two)
228
RecompDueToEdo = 0x00040000,
229
RecompDueToOptLevelUpgrade = 0x00050000,
230
RecompDueToSecondaryQueue = 0x00060000,
231
RecompDueToRecompilationPushing = 0x00070000,
232
RecompDueToGCR = 0x00080000,
233
RecompDueToForcedAOTUpgrade = 0x00090000,
234
RecompDueToRI = 0x000A0000,
235
RecompDueToJProfiling = 0x000B0000,
236
RecompDueToInlinedMethodRedefinition = 0x000C0000,
237
// NOTE: recompilations due to EDO decrementation cannot be tracked
238
// because they are triggered from a snippet (must change the code for snippet)
239
// Also, the recompilations after a profiling step cannot be marked as such.
240
// NOTE: recompilations can be triggered by invalidations too, but this
241
// information is already available in the linkage info for the body
242
243
HasBeenReplaced = 0x00100000, // HCR: this struct is for the old version of a replaced method
244
// Note: _methodInfo points to the methodInfo for the new version
245
// Note: this flag is accessed from recomp asm code, so be careful about changing it
246
WasNeverInterpreted = 0x00200000, // for methods that were compiled at count=0
247
// Attention: this is not always accurate
248
WasScannedForInlining = 0x00400000, // New scanning for warm method inlining
249
IsInDataCache = 0x00800000, // This TR_PersistentMethodInfo is stored in the datacache for AOT
250
lastFlag = 0x80000000
251
};
252
253
void setNextCompileLevel(TR_Hotness level, bool profile)
254
{
255
_nextHotness = level; if (profile) TR_ASSERT(!profilingDisabled(), "assertion failure");
256
_flags.set(UseProfiling, profile);
257
}
258
259
TR_Hotness getNextCompileLevel() { return _nextHotness; }
260
bool getNextCompileProfiling() { return _flags.testAny(UseProfiling); }
261
262
/**
263
* Methods to update and access profile information. These will modify reference counts.
264
* Most accesses to profiling data should go TR_AccessesProfileInfo on TR::Compilation,
265
* as it will manage reference counts for a compilation.
266
*
267
* Several threads may attempt to manipulate reference counts on these at once, potentially
268
* resulting in a deallocation before it was intended. The low bit of the relevant pointer
269
* is reused to avoid these situations. All accesses to _bestProfileInfo and _recentProfileInfo
270
* should consider this.
271
*/
272
TR_PersistentProfileInfo *getBestProfileInfo() { return getForSharedInfo(&_bestProfileInfo); }
273
TR_PersistentProfileInfo *getRecentProfileInfo() { return getForSharedInfo(&_recentProfileInfo); }
274
void setBestProfileInfo(TR_PersistentProfileInfo * ppi) { setForSharedInfo(&_bestProfileInfo, ppi); }
275
void setRecentProfileInfo(TR_PersistentProfileInfo * ppi) { setForSharedInfo(&_recentProfileInfo, ppi); }
276
277
// ### IMPORTANT ###
278
// Method info must always be the first field in this structure
279
// Flags must always be second
280
private:
281
TR_OpaqueMethodBlock *_methodInfo;
282
flags32_t _flags;
283
// ### IMPORTANT ###
284
285
// During compilation _nextHotness is really the present hotness
286
// at which compilation is taking place. This is setup at the end
287
// of compilation to correct hotness level the next compilation should
288
// be at. This may get tweaked by the sampling thread at runtime.
289
//
290
TR_Hotness _nextHotness;
291
292
293
TR_OptimizationPlan *_optimizationPlan;
294
295
int32_t _cpoSampleCounter; // TODO remove this field
296
uint16_t _timeStamp;
297
uint8_t _numberOfInvalidations; // how many times this method has been invalidated
298
uint8_t _numberOfInlinedMethodRedefinition; // how many times this method triggers recompilation because of its inlined callees being redefined
299
int16_t _numPrexAssumptions;
300
301
TR_PersistentProfileInfo *_bestProfileInfo;
302
TR_PersistentProfileInfo *_recentProfileInfo;
303
304
TR_PersistentProfileInfo * getForSharedInfo(TR_PersistentProfileInfo** ptr);
305
void setForSharedInfo(TR_PersistentProfileInfo** ptr, TR_PersistentProfileInfo *newInfo);
306
};
307
308
309
// This information is kept for every jitted method that can be recompiled
310
// It may be garbage collected along with the jitted method
311
// The only way to get the following information is via a pointer that is kept
312
// in the prologue of the jitted method.
313
//
314
class TR_PersistentJittedBodyInfo
315
{
316
friend class OMR::Recompilation;
317
friend class J9::Recompilation;
318
friend class TR::CompilationInfo;
319
friend class TR_S390Recompilation; // FIXME: ugly
320
friend class TR::DefaultCompilationStrategy;
321
friend class TR_EmilyPersistentJittedBodyInfo;
322
friend class ::OMR::Options;
323
friend class J9::Options;
324
325
#if defined(TR_HOST_X86) || defined(TR_HOST_POWER) || defined(TR_HOST_S390) || defined(TR_HOST_ARM) || defined(TR_HOST_ARM64)
326
friend void fixPersistentMethodInfo(void *table, bool isJITClientAOTLoad);
327
#endif
328
329
public:
330
TR_PERSISTENT_ALLOC(TR_Memory::PersistentJittedBodyInfo);
331
332
static TR_PersistentJittedBodyInfo *get(void *startPC);
333
334
bool getHasLoops() { return _flags.testAny(HasLoops); }
335
bool getUsesPreexistence() { return _flags.testAny(UsesPreexistence); }
336
bool getDisableSampling() { return _flags.testAny(DisableSampling); }
337
void setDisableSampling(bool b) { _flags.set(DisableSampling, b); }
338
bool getIsProfilingBody() { return _flags.testAny(IsProfilingBody); }
339
bool getIsAotedBody() { return _flags.testAny(IsAotedBody); }
340
void setIsAotedBody(bool b) { _flags.set(IsAotedBody, b); }
341
bool getIsRemoteCompileBody() { return _flags.testAny(IsRemoteCompileBody); }
342
void setIsRemoteCompileBody(bool b){ _flags.set(IsRemoteCompileBody, b); }
343
bool getSamplingRecomp() { return _flags.testAny(SamplingRecomp); }
344
void setSamplingRecomp() { _flags.set(SamplingRecomp, true); }
345
bool getIsPushedForRecompilation(){ return _flags.testAny(IsPushedForRecompilation); }
346
void setIsPushedForRecompilation(){ _flags.set(IsPushedForRecompilation, true); }
347
bool getIsInvalidated() { return _isInvalidated; }
348
void setIsInvalidated() { _isInvalidated = true; }
349
350
bool getFastHotRecompilation() { return _flags.testAny(FastHotRecompilation); }
351
void setFastHotRecompilation(bool b){ _flags.set(FastHotRecompilation, b); }
352
bool getFastScorchingRecompilation(){ return _flags.testAny(FastScorchingRecompilation); }
353
void setFastScorchingRecompilation(bool b){ _flags.set(FastScorchingRecompilation, b); }
354
bool getFastRecompilation() { return _flags.testAny(FastRecompilationMask); }
355
356
bool getUsesGCR() { return _flags.testAny(UsesGCR); }
357
void setUsesGCR() { _flags.set(UsesGCR, true); }
358
359
bool getReducedWarm() { return _flags.testAny(ReducedWarm); }
360
void setReducedWarm() { _flags.set(ReducedWarm, true); }
361
362
bool getUsesSamplingJProfiling() { return _flags.testAny(UsesSamplingJProfiling); }
363
void setUsesSamplingJProfiling() { _flags.set(UsesSamplingJProfiling, true); }
364
365
bool getUsesJProfiling() { return _flags.testAny(UsesJProfiling); }
366
void setUsesJProfiling() { _flags.set(UsesJProfiling, true); }
367
368
// used in dump recompilations
369
void *getStartPCAfterPreviousCompile() { return _startPCAfterPreviousCompile; }
370
void setStartPCAfterPreviousCompile(void *oldStartPC) { _startPCAfterPreviousCompile = oldStartPC; }
371
372
TR_PersistentMethodInfo *getMethodInfo() { return _methodInfo; }
373
int32_t getCounter() const { return _counter; }
374
int32_t getStartCount() const { return _startCount; }
375
TR_Hotness getHotness() const { return _hotness; }
376
void setHotness(TR_Hotness h) { _hotness = h; }
377
int32_t getOldStartCount() const { return _startCount - _oldStartCountDelta; } // FIXME: what if this is negative?
378
uint16_t getOldStartCountDelta() const { return _oldStartCountDelta; }
379
uint16_t getHotStartCountDelta() const { return _hotStartCountDelta; }
380
void setHotStartCountDelta(uint16_t v) { _hotStartCountDelta = v; }
381
382
// TODO: can we eliminate this mechanism?
383
// FIXME: this should be unsigned
384
uint8_t getAggressiveRecompilationChances() const { return _aggressiveRecompilationChances; }
385
uint8_t decAggressiveRecompilationChances() { return _aggressiveRecompilationChances > 0 ? --_aggressiveRecompilationChances : 0; }
386
387
uint8_t getNumScorchingIntervals() const { return _numScorchingIntervals; }
388
void incNumScorchingIntervals() { if (_numScorchingIntervals < 255) ++_numScorchingIntervals; }
389
void setMethodInfo(TR_PersistentMethodInfo *mi) { _methodInfo = mi; }
390
void setStartCount(int32_t count) { _startCount = count; }
391
void setCounter(int32_t counter) { _counter = counter; }
392
void setOldStartCountDelta(uint16_t count) { _oldStartCountDelta = count; }
393
394
void *getMapTable() const { return _mapTable; }
395
void setMapTable(void* p) { _mapTable = p; }
396
397
bool isLongRunningInterpreted() const { return _longRunningInterpreted; }
398
399
/**
400
* Access and modify the persistent profile info for this body.
401
*
402
* Uses of these methods should only occur while the body info is guaranteed
403
* to not be cleaned up, such as during its compilation. This is because
404
* these calls do not manage reference counts or synchronization, in
405
* an attempt to reduce the overhead on accesses that are known to be safe.
406
*/
407
void setProfileInfo(TR_PersistentProfileInfo * ppi) { _profileInfo = ppi; }
408
TR_PersistentProfileInfo *getProfileInfo() { return _profileInfo; }
409
410
enum
411
{
412
HasLoops = 0x0001,
413
//HasManyIterationsLoops = 0x0002, // Available
414
UsesPreexistence = 0x0004,
415
DisableSampling = 0x0008, // This flag disables sampling of this method even though its recompilable
416
IsProfilingBody = 0x0010,
417
IsAotedBody = 0x0020, // } Mutually
418
IsRemoteCompileBody = 0x0040, // } exclusive
419
SamplingRecomp = 0x0080, // Set when recomp decision is taken due to sampling; used to
420
// prevent further sampling once a decision is taken
421
IsPushedForRecompilation= 0x0100, // Set when the counter of this method is abruptly decremented to 1
422
// by the recompilation pushing mechanism
423
FastRecompilationMask = 0x0600, // RAS
424
FastHotRecompilation = 0x0200, // RAS flag
425
FastScorchingRecompilation=0x0400,// RAS flag
426
UsesGCR = 0x0800,
427
ReducedWarm = 0x1000, // Warm body was optimized to a lesser extent (NoServer) to reduce compilation time
428
UsesSamplingJProfiling = 0x2000, // Body has samplingJProfiling code
429
UsesJProfiling = 0x4000 // Body has jProfiling code
430
};
431
432
// ### IMPORTANT ###
433
// These following four fields must always be the first four elements of this structure
434
private:
435
int32_t _counter; // must be at offset 0
436
TR_PersistentMethodInfo *_methodInfo; // must be at offset 4 (8 for 64bit)
437
void *_startPCAfterPreviousCompile;
438
void *_mapTable; // must be at offset 12 (24 for 64bit)
439
440
// ### IMPORTANT ###
441
442
static TR_PersistentJittedBodyInfo *allocate(TR_PersistentMethodInfo *methodInfo, TR_Hotness hotness, bool profiling, TR::Compilation * comp = 0);
443
TR_PersistentJittedBodyInfo(TR_PersistentMethodInfo *methodInfo, TR_Hotness hotness, bool profile, TR::Compilation * comp = 0);
444
445
int32_t *getCounterAddress() {return &_counter; }
446
int32_t decCounter() { return --_counter; } //FIXME verify implementation
447
448
uint8_t getSampleIntervalCount() { return _sampleIntervalCount; }
449
void setSampleIntervalCount(uint8_t val) { _sampleIntervalCount = val; }
450
uint8_t incSampleIntervalCount(uint8_t maxValue)
451
{
452
if (++_sampleIntervalCount >= maxValue)
453
{
454
_sampleIntervalCount = 0; // wrap around
455
incNumScorchingIntervals();
456
}
457
return _sampleIntervalCount;
458
}
459
460
461
void setHasLoops(bool b) { _flags.set(HasLoops, b); }
462
void setUsesPreexistence(bool b) { _flags.set(UsesPreexistence, b); }
463
void setIsProfilingBody(bool b) { _flags.set(IsProfilingBody, b); }
464
465
int32_t _startCount; // number of global samples at the beginning of the sampling window
466
uint16_t _hotStartCountDelta; // delta from the startCount (for the begin of a hot sampling window)
467
uint16_t _oldStartCountDelta; // delta from the current start count (in the past);
468
flags16_t _flags;
469
uint8_t _sampleIntervalCount; // increases from 0 to 29. Defines a hot sampling window (30 samples)
470
uint8_t _aggressiveRecompilationChances;
471
TR_Hotness _hotness;
472
uint8_t _numScorchingIntervals; // How many times we reached scorching recompilation decision points
473
bool _isInvalidated;
474
bool _longRunningInterpreted; // This cannot be moved into _flags due to synchronization issues
475
TR_PersistentProfileInfo * _profileInfo;
476
public:
477
// Used for HWP-based recompilation
478
bool _hwpInducedRecompilation;
479
bool _hwpReducedWarmCompileRequested;
480
bool _hwpReducedWarmCompileInQueue;
481
uint64_t _hwpInstructionStartCount;
482
uint32_t _hwpInstructionCount;
483
};
484
485
#endif
486
487