Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/angle
Path: blob/main_old/src/tests/test_utils/runner/TestSuite.cpp
1694 views
1
//
2
// Copyright 2019 The ANGLE Project Authors. All rights reserved.
3
// Use of this source code is governed by a BSD-style license that can be
4
// found in the LICENSE file.
5
//
6
// TestSuite:
7
// Basic implementation of a test harness in ANGLE.
8
9
#include "TestSuite.h"
10
11
#include "common/debug.h"
12
#include "common/platform.h"
13
#include "common/string_utils.h"
14
#include "common/system_utils.h"
15
#include "util/Timer.h"
16
17
#include <stdlib.h>
18
#include <time.h>
19
20
#include <fstream>
21
#include <unordered_map>
22
23
#include <gtest/gtest.h>
24
#include <rapidjson/document.h>
25
#include <rapidjson/filewritestream.h>
26
#include <rapidjson/istreamwrapper.h>
27
#include <rapidjson/prettywriter.h>
28
29
// We directly call into a function to register the parameterized tests. This saves spinning up
30
// a subprocess with a new gtest filter.
31
#include <gtest/../../src/gtest-internal-inl.h>
32
33
namespace js = rapidjson;
34
35
namespace angle
36
{
37
namespace
38
{
39
constexpr char kBatchId[] = "--batch-id=";
40
constexpr char kFilterFileArg[] = "--filter-file=";
41
constexpr char kFlakyRetries[] = "--flaky-retries=";
42
constexpr char kGTestListTests[] = "--gtest_list_tests";
43
constexpr char kHistogramJsonFileArg[] = "--histogram-json-file=";
44
constexpr char kListTests[] = "--list-tests";
45
constexpr char kPrintTestStdout[] = "--print-test-stdout";
46
constexpr char kResultFileArg[] = "--results-file=";
47
constexpr char kTestTimeoutArg[] = "--test-timeout=";
48
constexpr char kDisableCrashHandler[] = "--disable-crash-handler";
49
constexpr char kIsolatedOutDir[] = "--isolated-outdir=";
50
constexpr char kMaxFailures[] = "--max-failures=";
51
52
constexpr char kStartedTestString[] = "[ RUN ] ";
53
constexpr char kPassedTestString[] = "[ OK ] ";
54
constexpr char kFailedTestString[] = "[ FAILED ] ";
55
constexpr char kSkippedTestString[] = "[ SKIPPED ] ";
56
57
constexpr char kArtifactsFakeTestName[] = "TestArtifactsFakeTest";
58
59
// Note: we use a fairly high test timeout to allow for the first test in a batch to be slow.
60
// Ideally we could use a separate timeout for the slow first test.
61
#if defined(NDEBUG)
62
constexpr int kDefaultTestTimeout = 60;
63
#else
64
constexpr int kDefaultTestTimeout = 120;
65
#endif
66
constexpr int kSlowTestTimeoutScale = 3;
67
#if defined(NDEBUG)
68
constexpr int kDefaultBatchTimeout = 300;
69
#else
70
constexpr int kDefaultBatchTimeout = 600;
71
#endif
72
constexpr int kDefaultBatchSize = 256;
73
constexpr double kIdleMessageTimeout = 15.0;
74
constexpr int kDefaultMaxProcesses = 16;
75
constexpr int kDefaultMaxFailures = 100;
76
77
const char *ParseFlagValue(const char *flag, const char *argument)
78
{
79
if (strstr(argument, flag) == argument)
80
{
81
return argument + strlen(flag);
82
}
83
84
return nullptr;
85
}
86
87
bool ParseIntArg(const char *flag, const char *argument, int *valueOut)
88
{
89
const char *value = ParseFlagValue(flag, argument);
90
if (!value)
91
{
92
return false;
93
}
94
95
char *end = nullptr;
96
const long longValue = strtol(value, &end, 10);
97
98
if (*end != '\0')
99
{
100
printf("Error parsing integer flag value.\n");
101
exit(EXIT_FAILURE);
102
}
103
104
if (longValue == LONG_MAX || longValue == LONG_MIN || static_cast<int>(longValue) != longValue)
105
{
106
printf("Overflow when parsing integer flag value.\n");
107
exit(EXIT_FAILURE);
108
}
109
110
*valueOut = static_cast<int>(longValue);
111
return true;
112
}
113
114
bool ParseIntArgNoDelete(const char *flag, const char *argument, int *valueOut)
115
{
116
ParseIntArg(flag, argument, valueOut);
117
return false;
118
}
119
120
bool ParseFlag(const char *expected, const char *actual, bool *flagOut)
121
{
122
if (strcmp(expected, actual) == 0)
123
{
124
*flagOut = true;
125
return true;
126
}
127
return false;
128
}
129
130
bool ParseStringArg(const char *flag, const char *argument, std::string *valueOut)
131
{
132
const char *value = ParseFlagValue(flag, argument);
133
if (!value)
134
{
135
return false;
136
}
137
138
*valueOut = value;
139
return true;
140
}
141
142
void DeleteArg(int *argc, char **argv, int argIndex)
143
{
144
// Shift the remainder of the argv list left by one. Note that argv has (*argc + 1) elements,
145
// the last one always being NULL. The following loop moves the trailing NULL element as well.
146
for (int index = argIndex; index < *argc; ++index)
147
{
148
argv[index] = argv[index + 1];
149
}
150
(*argc)--;
151
}
152
153
void AddArg(int *argc, char **argv, const char *arg)
154
{
155
// This unsafe const_cast is necessary to work around gtest limitations.
156
argv[*argc] = const_cast<char *>(arg);
157
argv[*argc + 1] = nullptr;
158
(*argc)++;
159
}
160
161
const char *ResultTypeToString(TestResultType type)
162
{
163
switch (type)
164
{
165
case TestResultType::Crash:
166
return "CRASH";
167
case TestResultType::Fail:
168
return "FAIL";
169
case TestResultType::NoResult:
170
return "NOTRUN";
171
case TestResultType::Pass:
172
return "PASS";
173
case TestResultType::Skip:
174
return "SKIP";
175
case TestResultType::Timeout:
176
return "TIMEOUT";
177
case TestResultType::Unknown:
178
return "UNKNOWN";
179
}
180
}
181
182
TestResultType GetResultTypeFromString(const std::string &str)
183
{
184
if (str == "CRASH")
185
return TestResultType::Crash;
186
if (str == "FAIL")
187
return TestResultType::Fail;
188
if (str == "PASS")
189
return TestResultType::Pass;
190
if (str == "NOTRUN")
191
return TestResultType::NoResult;
192
if (str == "SKIP")
193
return TestResultType::Skip;
194
if (str == "TIMEOUT")
195
return TestResultType::Timeout;
196
return TestResultType::Unknown;
197
}
198
199
bool IsFailedResult(TestResultType resultType)
200
{
201
return resultType != TestResultType::Pass && resultType != TestResultType::Skip;
202
}
203
204
js::Value ResultTypeToJSString(TestResultType type, js::Document::AllocatorType *allocator)
205
{
206
js::Value jsName;
207
jsName.SetString(ResultTypeToString(type), *allocator);
208
return jsName;
209
}
210
211
bool WriteJsonFile(const std::string &outputFile, js::Document *doc)
212
{
213
FILE *fp = fopen(outputFile.c_str(), "w");
214
if (!fp)
215
{
216
return false;
217
}
218
219
constexpr size_t kBufferSize = 0xFFFF;
220
std::vector<char> writeBuffer(kBufferSize);
221
js::FileWriteStream os(fp, writeBuffer.data(), kBufferSize);
222
js::PrettyWriter<js::FileWriteStream> writer(os);
223
if (!doc->Accept(writer))
224
{
225
fclose(fp);
226
return false;
227
}
228
fclose(fp);
229
return true;
230
}
231
232
// Writes out a TestResults to the Chromium JSON Test Results format.
233
// https://chromium.googlesource.com/chromium/src.git/+/master/docs/testing/json_test_results_format.md
234
void WriteResultsFile(bool interrupted,
235
const TestResults &testResults,
236
const std::string &outputFile,
237
const char *testSuiteName)
238
{
239
time_t ltime;
240
time(&ltime);
241
struct tm *timeinfo = gmtime(&ltime);
242
ltime = mktime(timeinfo);
243
244
uint64_t secondsSinceEpoch = static_cast<uint64_t>(ltime);
245
246
js::Document doc;
247
doc.SetObject();
248
249
js::Document::AllocatorType &allocator = doc.GetAllocator();
250
251
doc.AddMember("interrupted", interrupted, allocator);
252
doc.AddMember("path_delimiter", ".", allocator);
253
doc.AddMember("version", 3, allocator);
254
doc.AddMember("seconds_since_epoch", secondsSinceEpoch, allocator);
255
256
js::Value tests;
257
tests.SetObject();
258
259
// If we have any test artifacts, make a fake test to house them.
260
if (!testResults.testArtifactPaths.empty())
261
{
262
js::Value artifactsTest;
263
artifactsTest.SetObject();
264
265
artifactsTest.AddMember("actual", "PASS", allocator);
266
artifactsTest.AddMember("expected", "PASS", allocator);
267
268
js::Value artifacts;
269
artifacts.SetObject();
270
271
for (const std::string &testArtifactPath : testResults.testArtifactPaths)
272
{
273
std::vector<std::string> pieces =
274
SplitString(testArtifactPath, "/\\", WhitespaceHandling::TRIM_WHITESPACE,
275
SplitResult::SPLIT_WANT_NONEMPTY);
276
ASSERT(!pieces.empty());
277
278
js::Value basename;
279
basename.SetString(pieces.back(), allocator);
280
281
js::Value artifactPath;
282
artifactPath.SetString(testArtifactPath, allocator);
283
284
js::Value artifactArray;
285
artifactArray.SetArray();
286
artifactArray.PushBack(artifactPath, allocator);
287
288
artifacts.AddMember(basename, artifactArray, allocator);
289
}
290
291
artifactsTest.AddMember("artifacts", artifacts, allocator);
292
293
js::Value fakeTestName;
294
fakeTestName.SetString(testResults.testArtifactsFakeTestName, allocator);
295
tests.AddMember(fakeTestName, artifactsTest, allocator);
296
}
297
298
std::map<TestResultType, uint32_t> counts;
299
300
for (const auto &resultIter : testResults.results)
301
{
302
const TestIdentifier &id = resultIter.first;
303
const TestResult &result = resultIter.second;
304
305
js::Value jsResult;
306
jsResult.SetObject();
307
308
counts[result.type]++;
309
310
std::string actualResult;
311
for (uint32_t fail = 0; fail < result.flakyFailures; ++fail)
312
{
313
actualResult += "FAIL ";
314
}
315
316
actualResult += ResultTypeToString(result.type);
317
318
std::string expectedResult = "PASS";
319
if (result.type == TestResultType::Skip)
320
{
321
expectedResult = "SKIP";
322
}
323
324
// Handle flaky passing tests.
325
if (result.flakyFailures > 0 && result.type == TestResultType::Pass)
326
{
327
expectedResult = "FAIL PASS";
328
jsResult.AddMember("is_flaky", true, allocator);
329
}
330
331
jsResult.AddMember("actual", actualResult, allocator);
332
jsResult.AddMember("expected", expectedResult, allocator);
333
334
if (IsFailedResult(result.type))
335
{
336
jsResult.AddMember("is_unexpected", true, allocator);
337
}
338
339
js::Value times;
340
times.SetArray();
341
times.PushBack(result.elapsedTimeSeconds, allocator);
342
343
jsResult.AddMember("times", times, allocator);
344
345
char testName[500];
346
id.sprintfName(testName);
347
js::Value jsName;
348
jsName.SetString(testName, allocator);
349
350
tests.AddMember(jsName, jsResult, allocator);
351
}
352
353
js::Value numFailuresByType;
354
numFailuresByType.SetObject();
355
356
for (const auto &countIter : counts)
357
{
358
TestResultType type = countIter.first;
359
uint32_t count = countIter.second;
360
361
js::Value jsCount(count);
362
numFailuresByType.AddMember(ResultTypeToJSString(type, &allocator), jsCount, allocator);
363
}
364
365
doc.AddMember("num_failures_by_type", numFailuresByType, allocator);
366
367
doc.AddMember("tests", tests, allocator);
368
369
printf("Writing test results to %s\n", outputFile.c_str());
370
371
if (!WriteJsonFile(outputFile, &doc))
372
{
373
printf("Error writing test results file.\n");
374
}
375
}
376
377
void WriteHistogramJson(const HistogramWriter &histogramWriter,
378
const std::string &outputFile,
379
const char *testSuiteName)
380
{
381
js::Document doc;
382
doc.SetArray();
383
384
histogramWriter.getAsJSON(&doc);
385
386
printf("Writing histogram json to %s\n", outputFile.c_str());
387
388
if (!WriteJsonFile(outputFile, &doc))
389
{
390
printf("Error writing histogram json file.\n");
391
}
392
}
393
394
void WriteOutputFiles(bool interrupted,
395
const TestResults &testResults,
396
const std::string &resultsFile,
397
const HistogramWriter &histogramWriter,
398
const std::string &histogramJsonOutputFile,
399
const char *testSuiteName)
400
{
401
if (!resultsFile.empty())
402
{
403
WriteResultsFile(interrupted, testResults, resultsFile, testSuiteName);
404
}
405
406
if (!histogramJsonOutputFile.empty())
407
{
408
WriteHistogramJson(histogramWriter, histogramJsonOutputFile, testSuiteName);
409
}
410
}
411
412
void UpdateCurrentTestResult(const testing::TestResult &resultIn, TestResults *resultsOut)
413
{
414
TestResult &resultOut = resultsOut->results[resultsOut->currentTest];
415
416
// Note: Crashes and Timeouts are detected by the crash handler and a watchdog thread.
417
if (resultIn.Skipped())
418
{
419
resultOut.type = TestResultType::Skip;
420
}
421
else if (resultIn.Failed())
422
{
423
resultOut.type = TestResultType::Fail;
424
}
425
else
426
{
427
resultOut.type = TestResultType::Pass;
428
}
429
430
resultOut.elapsedTimeSeconds = resultsOut->currentTestTimer.getElapsedTime();
431
}
432
433
TestIdentifier GetTestIdentifier(const testing::TestInfo &testInfo)
434
{
435
return {testInfo.test_suite_name(), testInfo.name()};
436
}
437
438
class TestEventListener : public testing::EmptyTestEventListener
439
{
440
public:
441
// Note: TestResults is owned by the TestSuite. It should outlive TestEventListener.
442
TestEventListener(const std::string &resultsFile,
443
const std::string &histogramJsonFile,
444
const char *testSuiteName,
445
TestResults *testResults,
446
HistogramWriter *histogramWriter)
447
: mResultsFile(resultsFile),
448
mHistogramJsonFile(histogramJsonFile),
449
mTestSuiteName(testSuiteName),
450
mTestResults(testResults),
451
mHistogramWriter(histogramWriter)
452
{}
453
454
void OnTestStart(const testing::TestInfo &testInfo) override
455
{
456
std::lock_guard<std::mutex> guard(mTestResults->currentTestMutex);
457
mTestResults->currentTest = GetTestIdentifier(testInfo);
458
mTestResults->currentTestTimer.start();
459
}
460
461
void OnTestEnd(const testing::TestInfo &testInfo) override
462
{
463
std::lock_guard<std::mutex> guard(mTestResults->currentTestMutex);
464
mTestResults->currentTestTimer.stop();
465
const testing::TestResult &resultIn = *testInfo.result();
466
UpdateCurrentTestResult(resultIn, mTestResults);
467
mTestResults->currentTest = TestIdentifier();
468
}
469
470
void OnTestProgramEnd(const testing::UnitTest &testProgramInfo) override
471
{
472
std::lock_guard<std::mutex> guard(mTestResults->currentTestMutex);
473
mTestResults->allDone = true;
474
WriteOutputFiles(false, *mTestResults, mResultsFile, *mHistogramWriter, mHistogramJsonFile,
475
mTestSuiteName);
476
}
477
478
private:
479
std::string mResultsFile;
480
std::string mHistogramJsonFile;
481
const char *mTestSuiteName;
482
TestResults *mTestResults;
483
HistogramWriter *mHistogramWriter;
484
};
485
486
bool IsTestDisabled(const testing::TestInfo &testInfo)
487
{
488
return ::strstr(testInfo.name(), "DISABLED_") == testInfo.name();
489
}
490
491
using TestIdentifierFilter = std::function<bool(const TestIdentifier &id)>;
492
493
std::vector<TestIdentifier> FilterTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
494
TestIdentifierFilter filter,
495
bool alsoRunDisabledTests)
496
{
497
std::vector<TestIdentifier> tests;
498
499
const testing::UnitTest &testProgramInfo = *testing::UnitTest::GetInstance();
500
for (int suiteIndex = 0; suiteIndex < testProgramInfo.total_test_suite_count(); ++suiteIndex)
501
{
502
const testing::TestSuite &testSuite = *testProgramInfo.GetTestSuite(suiteIndex);
503
for (int testIndex = 0; testIndex < testSuite.total_test_count(); ++testIndex)
504
{
505
const testing::TestInfo &testInfo = *testSuite.GetTestInfo(testIndex);
506
TestIdentifier id = GetTestIdentifier(testInfo);
507
if (filter(id) && (!IsTestDisabled(testInfo) || alsoRunDisabledTests))
508
{
509
tests.emplace_back(id);
510
511
if (fileLinesOut)
512
{
513
(*fileLinesOut)[id] = {testInfo.file(), testInfo.line()};
514
}
515
}
516
}
517
}
518
519
return tests;
520
}
521
522
std::vector<TestIdentifier> GetFilteredTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
523
bool alsoRunDisabledTests)
524
{
525
TestIdentifierFilter gtestIDFilter = [](const TestIdentifier &id) {
526
return testing::internal::UnitTestOptions::FilterMatchesTest(id.testSuiteName, id.testName);
527
};
528
529
return FilterTests(fileLinesOut, gtestIDFilter, alsoRunDisabledTests);
530
}
531
532
std::vector<TestIdentifier> GetShardTests(const std::vector<TestIdentifier> &allTests,
533
int shardIndex,
534
int shardCount,
535
std::map<TestIdentifier, FileLine> *fileLinesOut,
536
bool alsoRunDisabledTests)
537
{
538
std::vector<TestIdentifier> shardTests;
539
540
for (int testIndex = shardIndex; testIndex < static_cast<int>(allTests.size());
541
testIndex += shardCount)
542
{
543
shardTests.emplace_back(allTests[testIndex]);
544
}
545
546
return shardTests;
547
}
548
549
std::string GetTestFilter(const std::vector<TestIdentifier> &tests)
550
{
551
std::stringstream filterStream;
552
553
filterStream << "--gtest_filter=";
554
555
for (size_t testIndex = 0; testIndex < tests.size(); ++testIndex)
556
{
557
if (testIndex != 0)
558
{
559
filterStream << ":";
560
}
561
562
filterStream << tests[testIndex];
563
}
564
565
return filterStream.str();
566
}
567
568
std::string ParseTestSuiteName(const char *executable)
569
{
570
const char *baseNameStart = strrchr(executable, GetPathSeparator());
571
if (!baseNameStart)
572
{
573
baseNameStart = executable;
574
}
575
else
576
{
577
baseNameStart++;
578
}
579
580
const char *suffix = GetExecutableExtension();
581
size_t suffixLen = strlen(suffix);
582
if (suffixLen == 0)
583
{
584
return baseNameStart;
585
}
586
587
if (!EndsWith(baseNameStart, suffix))
588
{
589
return baseNameStart;
590
}
591
592
return std::string(baseNameStart, baseNameStart + strlen(baseNameStart) - suffixLen);
593
}
594
595
bool GetTestArtifactsFromJSON(const js::Value::ConstObject &obj,
596
std::vector<std::string> *testArtifactPathsOut)
597
{
598
if (!obj.HasMember("artifacts"))
599
{
600
printf("No artifacts member.\n");
601
return false;
602
}
603
604
const js::Value &jsArtifacts = obj["artifacts"];
605
if (!jsArtifacts.IsObject())
606
{
607
printf("Artifacts are not an object.\n");
608
return false;
609
}
610
611
const js::Value::ConstObject &artifacts = jsArtifacts.GetObject();
612
for (const auto &artifactMember : artifacts)
613
{
614
const js::Value &artifact = artifactMember.value;
615
if (!artifact.IsArray())
616
{
617
printf("Artifact is not an array of strings of size 1.\n");
618
return false;
619
}
620
621
const js::Value::ConstArray &artifactArray = artifact.GetArray();
622
if (artifactArray.Size() != 1)
623
{
624
printf("Artifact is not an array of strings of size 1.\n");
625
return false;
626
}
627
628
const js::Value &artifactName = artifactArray[0];
629
if (!artifactName.IsString())
630
{
631
printf("Artifact is not an array of strings of size 1.\n");
632
return false;
633
}
634
635
testArtifactPathsOut->push_back(artifactName.GetString());
636
}
637
638
return true;
639
}
640
641
bool GetSingleTestResultFromJSON(const js::Value &name,
642
const js::Value::ConstObject &obj,
643
TestResults *resultsOut)
644
{
645
646
TestIdentifier id;
647
if (!TestIdentifier::ParseFromString(name.GetString(), &id))
648
{
649
printf("Could not parse test identifier.\n");
650
return false;
651
}
652
653
if (!obj.HasMember("expected") || !obj.HasMember("actual"))
654
{
655
printf("No expected or actual member.\n");
656
return false;
657
}
658
659
const js::Value &expected = obj["expected"];
660
const js::Value &actual = obj["actual"];
661
662
if (!expected.IsString() || !actual.IsString())
663
{
664
printf("Expected or actual member is not a string.\n");
665
return false;
666
}
667
668
const std::string actualStr = actual.GetString();
669
670
TestResultType resultType = TestResultType::Unknown;
671
int flakyFailures = 0;
672
if (actualStr.find(' '))
673
{
674
std::istringstream strstr(actualStr);
675
std::string token;
676
while (std::getline(strstr, token, ' '))
677
{
678
resultType = GetResultTypeFromString(token);
679
if (resultType == TestResultType::Unknown)
680
{
681
printf("Failed to parse result type.\n");
682
return false;
683
}
684
if (IsFailedResult(resultType))
685
{
686
flakyFailures++;
687
}
688
}
689
}
690
else
691
{
692
resultType = GetResultTypeFromString(actualStr);
693
if (resultType == TestResultType::Unknown)
694
{
695
printf("Failed to parse result type.\n");
696
return false;
697
}
698
}
699
700
double elapsedTimeSeconds = 0.0;
701
if (obj.HasMember("times"))
702
{
703
const js::Value &times = obj["times"];
704
if (!times.IsArray())
705
{
706
return false;
707
}
708
709
const js::Value::ConstArray &timesArray = times.GetArray();
710
if (timesArray.Size() != 1 || !timesArray[0].IsDouble())
711
{
712
return false;
713
}
714
715
elapsedTimeSeconds = timesArray[0].GetDouble();
716
}
717
718
TestResult &result = resultsOut->results[id];
719
result.elapsedTimeSeconds = elapsedTimeSeconds;
720
result.type = resultType;
721
result.flakyFailures = flakyFailures;
722
return true;
723
}
724
725
bool GetTestResultsFromJSON(const js::Document &document, TestResults *resultsOut)
726
{
727
if (!document.HasMember("tests") || !document["tests"].IsObject())
728
{
729
printf("JSON document has no tests member.\n");
730
return false;
731
}
732
733
const js::Value::ConstObject &tests = document["tests"].GetObject();
734
for (const auto &testMember : tests)
735
{
736
// Get test identifier.
737
const js::Value &name = testMember.name;
738
if (!name.IsString())
739
{
740
printf("Name is not a string.\n");
741
return false;
742
}
743
744
// Get test result.
745
const js::Value &value = testMember.value;
746
if (!value.IsObject())
747
{
748
printf("Test result is not an object.\n");
749
return false;
750
}
751
752
const js::Value::ConstObject &obj = value.GetObject();
753
754
if (BeginsWith(name.GetString(), kArtifactsFakeTestName))
755
{
756
if (!GetTestArtifactsFromJSON(obj, &resultsOut->testArtifactPaths))
757
{
758
return false;
759
}
760
}
761
else
762
{
763
if (!GetSingleTestResultFromJSON(name, obj, resultsOut))
764
{
765
return false;
766
}
767
}
768
}
769
770
return true;
771
}
772
773
bool MergeTestResults(TestResults *input, TestResults *output, int flakyRetries)
774
{
775
for (auto &resultsIter : input->results)
776
{
777
const TestIdentifier &id = resultsIter.first;
778
TestResult &inputResult = resultsIter.second;
779
TestResult &outputResult = output->results[id];
780
781
if (inputResult.type != TestResultType::NoResult)
782
{
783
if (outputResult.type != TestResultType::NoResult)
784
{
785
printf("Warning: duplicate entry for %s.%s.\n", id.testSuiteName.c_str(),
786
id.testName.c_str());
787
return false;
788
}
789
790
// Mark the tests that haven't exhausted their retries as 'SKIP'. This makes ANGLE
791
// attempt the test again.
792
uint32_t runCount = outputResult.flakyFailures + 1;
793
if (IsFailedResult(inputResult.type) && runCount < static_cast<uint32_t>(flakyRetries))
794
{
795
printf("Retrying flaky test: %s.%s.\n", id.testSuiteName.c_str(),
796
id.testName.c_str());
797
inputResult.type = TestResultType::NoResult;
798
outputResult.flakyFailures++;
799
}
800
else
801
{
802
outputResult.elapsedTimeSeconds = inputResult.elapsedTimeSeconds;
803
outputResult.type = inputResult.type;
804
}
805
}
806
}
807
808
output->testArtifactPaths.insert(output->testArtifactPaths.end(),
809
input->testArtifactPaths.begin(),
810
input->testArtifactPaths.end());
811
812
return true;
813
}
814
815
void PrintTestOutputSnippet(const TestIdentifier &id,
816
const TestResult &result,
817
const std::string &fullOutput)
818
{
819
std::stringstream nameStream;
820
nameStream << id;
821
std::string fullName = nameStream.str();
822
823
size_t runPos = fullOutput.find(std::string(kStartedTestString) + fullName);
824
if (runPos == std::string::npos)
825
{
826
printf("Cannot locate test output snippet.\n");
827
return;
828
}
829
830
size_t endPos = fullOutput.find(std::string(kFailedTestString) + fullName, runPos);
831
// Only clip the snippet to the "OK" message if the test really
832
// succeeded. It still might have e.g. crashed after printing it.
833
if (endPos == std::string::npos && result.type == TestResultType::Pass)
834
{
835
endPos = fullOutput.find(std::string(kPassedTestString) + fullName, runPos);
836
}
837
if (endPos != std::string::npos)
838
{
839
size_t newline_pos = fullOutput.find("\n", endPos);
840
if (newline_pos != std::string::npos)
841
endPos = newline_pos + 1;
842
}
843
844
std::cout << "\n";
845
if (endPos != std::string::npos)
846
{
847
std::cout << fullOutput.substr(runPos, endPos - runPos);
848
}
849
else
850
{
851
std::cout << fullOutput.substr(runPos);
852
}
853
}
854
855
std::string GetConfigNameFromTestIdentifier(const TestIdentifier &id)
856
{
857
size_t slashPos = id.testName.find('/');
858
if (slashPos == std::string::npos)
859
{
860
return "default";
861
}
862
863
size_t doubleUnderscorePos = id.testName.find("__");
864
if (doubleUnderscorePos == std::string::npos)
865
{
866
std::string configName = id.testName.substr(slashPos + 1);
867
868
if (!BeginsWith(configName, "ES"))
869
{
870
return "default";
871
}
872
873
return configName;
874
}
875
else
876
{
877
return id.testName.substr(slashPos + 1, doubleUnderscorePos - slashPos - 1);
878
}
879
}
880
881
TestQueue BatchTests(const std::vector<TestIdentifier> &tests, int batchSize)
882
{
883
// First sort tests by configuration.
884
angle::HashMap<std::string, std::vector<TestIdentifier>> testsSortedByConfig;
885
for (const TestIdentifier &id : tests)
886
{
887
std::string config = GetConfigNameFromTestIdentifier(id);
888
testsSortedByConfig[config].push_back(id);
889
}
890
891
// Then group into batches by 'batchSize'.
892
TestQueue testQueue;
893
for (const auto &configAndIds : testsSortedByConfig)
894
{
895
const std::vector<TestIdentifier> &configTests = configAndIds.second;
896
897
// Count the number of batches needed for this config.
898
int batchesForConfig = static_cast<int>(configTests.size() + batchSize - 1) / batchSize;
899
900
// Create batches with striping to split up slow tests.
901
for (int batchIndex = 0; batchIndex < batchesForConfig; ++batchIndex)
902
{
903
std::vector<TestIdentifier> batchTests;
904
for (size_t testIndex = batchIndex; testIndex < configTests.size();
905
testIndex += batchesForConfig)
906
{
907
batchTests.push_back(configTests[testIndex]);
908
}
909
testQueue.emplace(std::move(batchTests));
910
ASSERT(batchTests.empty());
911
}
912
}
913
914
return testQueue;
915
}
916
917
void ListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
918
{
919
std::cout << "Tests list:\n";
920
921
for (const auto &resultIt : resultsMap)
922
{
923
const TestIdentifier &id = resultIt.first;
924
std::cout << id << "\n";
925
}
926
927
std::cout << "End tests list.\n";
928
}
929
930
// Prints the names of the tests matching the user-specified filter flag.
931
// This matches the output from googletest/src/gtest.cc but is much much faster for large filters.
932
// See http://anglebug.com/5164
933
void GTestListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
934
{
935
std::map<std::string, std::vector<std::string>> suites;
936
937
for (const auto &resultIt : resultsMap)
938
{
939
const TestIdentifier &id = resultIt.first;
940
suites[id.testSuiteName].push_back(id.testName);
941
}
942
943
for (const auto &testSuiteIt : suites)
944
{
945
bool printedTestSuiteName = false;
946
947
const std::string &suiteName = testSuiteIt.first;
948
const std::vector<std::string> &testNames = testSuiteIt.second;
949
950
for (const std::string &testName : testNames)
951
{
952
if (!printedTestSuiteName)
953
{
954
printedTestSuiteName = true;
955
printf("%s.\n", suiteName.c_str());
956
}
957
printf(" %s\n", testName.c_str());
958
}
959
}
960
}
961
} // namespace
962
963
// static
964
TestSuite *TestSuite::mInstance = nullptr;
965
966
TestIdentifier::TestIdentifier() = default;
967
968
TestIdentifier::TestIdentifier(const std::string &suiteNameIn, const std::string &nameIn)
969
: testSuiteName(suiteNameIn), testName(nameIn)
970
{}
971
972
TestIdentifier::TestIdentifier(const TestIdentifier &other) = default;
973
974
TestIdentifier::~TestIdentifier() = default;
975
976
TestIdentifier &TestIdentifier::operator=(const TestIdentifier &other) = default;
977
978
void TestIdentifier::sprintfName(char *outBuffer) const
979
{
980
sprintf(outBuffer, "%s.%s", testSuiteName.c_str(), testName.c_str());
981
}
982
983
// static
984
bool TestIdentifier::ParseFromString(const std::string &str, TestIdentifier *idOut)
985
{
986
size_t separator = str.find(".");
987
if (separator == std::string::npos)
988
{
989
return false;
990
}
991
992
idOut->testSuiteName = str.substr(0, separator);
993
idOut->testName = str.substr(separator + 1, str.length() - separator - 1);
994
return true;
995
}
996
997
TestResults::TestResults() = default;
998
999
TestResults::~TestResults() = default;
1000
1001
ProcessInfo::ProcessInfo() = default;
1002
1003
ProcessInfo &ProcessInfo::operator=(ProcessInfo &&rhs)
1004
{
1005
process = std::move(rhs.process);
1006
testsInBatch = std::move(rhs.testsInBatch);
1007
resultsFileName = std::move(rhs.resultsFileName);
1008
filterFileName = std::move(rhs.filterFileName);
1009
commandLine = std::move(rhs.commandLine);
1010
filterString = std::move(rhs.filterString);
1011
return *this;
1012
}
1013
1014
ProcessInfo::~ProcessInfo() = default;
1015
1016
ProcessInfo::ProcessInfo(ProcessInfo &&other)
1017
{
1018
*this = std::move(other);
1019
}
1020
1021
TestSuite::TestSuite(int *argc, char **argv)
1022
: mShardCount(-1),
1023
mShardIndex(-1),
1024
mBotMode(false),
1025
mDebugTestGroups(false),
1026
mGTestListTests(false),
1027
mListTests(false),
1028
mPrintTestStdout(false),
1029
mDisableCrashHandler(false),
1030
mBatchSize(kDefaultBatchSize),
1031
mCurrentResultCount(0),
1032
mTotalResultCount(0),
1033
mMaxProcesses(std::min(NumberOfProcessors(), kDefaultMaxProcesses)),
1034
mTestTimeout(kDefaultTestTimeout),
1035
mBatchTimeout(kDefaultBatchTimeout),
1036
mBatchId(-1),
1037
mFlakyRetries(0),
1038
mMaxFailures(kDefaultMaxFailures),
1039
mFailureCount(0)
1040
{
1041
ASSERT(mInstance == nullptr);
1042
mInstance = this;
1043
1044
Optional<int> filterArgIndex;
1045
bool alsoRunDisabledTests = false;
1046
1047
#if defined(ANGLE_PLATFORM_MACOS)
1048
// By default, we should hook file API functions on macOS to avoid slow Metal shader caching
1049
// file access.
1050
angle::InitMetalFileAPIHooking(*argc, argv);
1051
#endif
1052
1053
#if defined(ANGLE_PLATFORM_WINDOWS)
1054
testing::GTEST_FLAG(catch_exceptions) = false;
1055
#endif
1056
1057
if (*argc <= 0)
1058
{
1059
printf("Missing test arguments.\n");
1060
exit(EXIT_FAILURE);
1061
}
1062
1063
mTestExecutableName = argv[0];
1064
mTestSuiteName = ParseTestSuiteName(mTestExecutableName.c_str());
1065
1066
for (int argIndex = 1; argIndex < *argc;)
1067
{
1068
if (parseSingleArg(argv[argIndex]))
1069
{
1070
DeleteArg(argc, argv, argIndex);
1071
continue;
1072
}
1073
1074
if (ParseFlagValue("--gtest_filter=", argv[argIndex]))
1075
{
1076
filterArgIndex = argIndex;
1077
}
1078
else
1079
{
1080
// Don't include disabled tests in test lists unless the user asks for them.
1081
if (strcmp("--gtest_also_run_disabled_tests", argv[argIndex]) == 0)
1082
{
1083
alsoRunDisabledTests = true;
1084
}
1085
1086
mChildProcessArgs.push_back(argv[argIndex]);
1087
}
1088
++argIndex;
1089
}
1090
1091
mTestResults.currentTestTimeout = mTestTimeout;
1092
1093
#if defined(ANGLE_PLATFORM_ANDROID)
1094
// Workaround for the Android test runner requiring a GTest test list.
1095
if (mListTests && filterArgIndex.valid())
1096
{
1097
DeleteArg(argc, argv, filterArgIndex.value());
1098
}
1099
#endif // defined(ANGLE_PLATFORM_ANDROID)
1100
1101
if (!mDisableCrashHandler)
1102
{
1103
// Note that the crash callback must be owned and not use global constructors.
1104
mCrashCallback = [this]() { onCrashOrTimeout(TestResultType::Crash); };
1105
InitCrashHandler(&mCrashCallback);
1106
}
1107
1108
std::string envShardIndex = angle::GetEnvironmentVar("GTEST_SHARD_INDEX");
1109
if (!envShardIndex.empty())
1110
{
1111
angle::UnsetEnvironmentVar("GTEST_SHARD_INDEX");
1112
if (mShardIndex == -1)
1113
{
1114
std::stringstream shardIndexStream(envShardIndex);
1115
shardIndexStream >> mShardIndex;
1116
}
1117
}
1118
1119
std::string envTotalShards = angle::GetEnvironmentVar("GTEST_TOTAL_SHARDS");
1120
if (!envTotalShards.empty())
1121
{
1122
angle::UnsetEnvironmentVar("GTEST_TOTAL_SHARDS");
1123
if (mShardCount == -1)
1124
{
1125
std::stringstream shardCountStream(envTotalShards);
1126
shardCountStream >> mShardCount;
1127
}
1128
}
1129
1130
if ((mShardIndex == -1) != (mShardCount == -1))
1131
{
1132
printf("Shard index and shard count must be specified together.\n");
1133
exit(EXIT_FAILURE);
1134
}
1135
1136
if (!mFilterFile.empty())
1137
{
1138
if (filterArgIndex.valid())
1139
{
1140
printf("Cannot use gtest_filter in conjunction with a filter file.\n");
1141
exit(EXIT_FAILURE);
1142
}
1143
1144
uint32_t fileSize = 0;
1145
if (!GetFileSize(mFilterFile.c_str(), &fileSize))
1146
{
1147
printf("Error getting filter file size: %s\n", mFilterFile.c_str());
1148
exit(EXIT_FAILURE);
1149
}
1150
1151
std::vector<char> fileContents(fileSize + 1, 0);
1152
if (!ReadEntireFileToString(mFilterFile.c_str(), fileContents.data(), fileSize))
1153
{
1154
printf("Error loading filter file: %s\n", mFilterFile.c_str());
1155
exit(EXIT_FAILURE);
1156
}
1157
mFilterString.assign(fileContents.data());
1158
1159
if (mFilterString.substr(0, strlen("--gtest_filter=")) != std::string("--gtest_filter="))
1160
{
1161
printf("Filter file must start with \"--gtest_filter=\".\n");
1162
exit(EXIT_FAILURE);
1163
}
1164
1165
// Note that we only add a filter string if we previously deleted a shader filter file
1166
// argument. So we will have space for the new filter string in argv.
1167
AddArg(argc, argv, mFilterString.c_str());
1168
}
1169
1170
// Call into gtest internals to force parameterized test name registration.
1171
testing::internal::UnitTestImpl *impl = testing::internal::GetUnitTestImpl();
1172
impl->RegisterParameterizedTests();
1173
1174
// Initialize internal GoogleTest filter arguments so we can call "FilterMatchesTest".
1175
testing::internal::ParseGoogleTestFlagsOnly(argc, argv);
1176
1177
std::vector<TestIdentifier> testSet = GetFilteredTests(&mTestFileLines, alsoRunDisabledTests);
1178
1179
if (mShardCount == 0)
1180
{
1181
printf("Shard count must be > 0.\n");
1182
exit(EXIT_FAILURE);
1183
}
1184
else if (mShardCount > 0)
1185
{
1186
if (mShardIndex >= mShardCount)
1187
{
1188
printf("Shard index must be less than shard count.\n");
1189
exit(EXIT_FAILURE);
1190
}
1191
1192
// If there's only one shard, we can use the testSet as defined above.
1193
if (mShardCount > 1)
1194
{
1195
testSet = GetShardTests(testSet, mShardIndex, mShardCount, &mTestFileLines,
1196
alsoRunDisabledTests);
1197
1198
if (!mBotMode)
1199
{
1200
mFilterString = GetTestFilter(testSet);
1201
1202
if (filterArgIndex.valid())
1203
{
1204
argv[filterArgIndex.value()] = const_cast<char *>(mFilterString.c_str());
1205
}
1206
else
1207
{
1208
// Note that we only add a filter string if we previously deleted a shard
1209
// index/count argument. So we will have space for the new filter string in
1210
// argv.
1211
AddArg(argc, argv, mFilterString.c_str());
1212
}
1213
1214
// Force-re-initialize GoogleTest flags to load the shard filter.
1215
testing::internal::ParseGoogleTestFlagsOnly(argc, argv);
1216
}
1217
}
1218
}
1219
1220
{
1221
std::stringstream fakeTestName;
1222
fakeTestName << kArtifactsFakeTestName;
1223
if (mShardIndex != -1)
1224
{
1225
fakeTestName << "-Shard" << std::setfill('0') << std::setw(2) << mShardIndex;
1226
}
1227
mTestResults.testArtifactsFakeTestName = fakeTestName.str();
1228
}
1229
1230
if (mBotMode)
1231
{
1232
// Split up test batches.
1233
mTestQueue = BatchTests(testSet, mBatchSize);
1234
1235
if (mDebugTestGroups)
1236
{
1237
std::cout << "Test Groups:\n";
1238
1239
while (!mTestQueue.empty())
1240
{
1241
const std::vector<TestIdentifier> &tests = mTestQueue.front();
1242
std::cout << GetConfigNameFromTestIdentifier(tests[0]) << " ("
1243
<< static_cast<int>(tests.size()) << ")\n";
1244
mTestQueue.pop();
1245
}
1246
1247
exit(EXIT_SUCCESS);
1248
}
1249
}
1250
1251
testing::InitGoogleTest(argc, argv);
1252
1253
mTotalResultCount = testSet.size();
1254
1255
if ((mBotMode || !mResultsDirectory.empty()) && mResultsFile.empty())
1256
{
1257
// Create a default output file in bot mode.
1258
mResultsFile = "output.json";
1259
}
1260
1261
if (!mResultsDirectory.empty())
1262
{
1263
std::stringstream resultFileName;
1264
resultFileName << mResultsDirectory << GetPathSeparator() << mResultsFile;
1265
mResultsFile = resultFileName.str();
1266
}
1267
1268
if (!mBotMode)
1269
{
1270
testing::TestEventListeners &listeners = testing::UnitTest::GetInstance()->listeners();
1271
listeners.Append(new TestEventListener(mResultsFile, mHistogramJsonFile,
1272
mTestSuiteName.c_str(), &mTestResults,
1273
&mHistogramWriter));
1274
1275
for (const TestIdentifier &id : testSet)
1276
{
1277
mTestResults.results[id].type = TestResultType::NoResult;
1278
}
1279
}
1280
}
1281
1282
TestSuite::~TestSuite()
1283
{
1284
if (mWatchdogThread.joinable())
1285
{
1286
mWatchdogThread.detach();
1287
}
1288
TerminateCrashHandler();
1289
}
1290
1291
bool TestSuite::parseSingleArg(const char *argument)
1292
{
1293
// Note: Flags should be documented in README.md.
1294
return (ParseIntArg("--shard-count=", argument, &mShardCount) ||
1295
ParseIntArg("--shard-index=", argument, &mShardIndex) ||
1296
ParseIntArg("--batch-size=", argument, &mBatchSize) ||
1297
ParseIntArg("--max-processes=", argument, &mMaxProcesses) ||
1298
ParseIntArg(kTestTimeoutArg, argument, &mTestTimeout) ||
1299
ParseIntArg("--batch-timeout=", argument, &mBatchTimeout) ||
1300
ParseIntArg(kFlakyRetries, argument, &mFlakyRetries) ||
1301
ParseIntArg(kMaxFailures, argument, &mMaxFailures) ||
1302
// Other test functions consume the batch ID, so keep it in the list.
1303
ParseIntArgNoDelete(kBatchId, argument, &mBatchId) ||
1304
ParseStringArg("--results-directory=", argument, &mResultsDirectory) ||
1305
ParseStringArg(kResultFileArg, argument, &mResultsFile) ||
1306
ParseStringArg("--isolated-script-test-output=", argument, &mResultsFile) ||
1307
ParseStringArg(kFilterFileArg, argument, &mFilterFile) ||
1308
ParseStringArg(kHistogramJsonFileArg, argument, &mHistogramJsonFile) ||
1309
// We need these overloads to work around technical debt in the Android test runner.
1310
ParseStringArg("--isolated-script-test-perf-output=", argument, &mHistogramJsonFile) ||
1311
ParseStringArg("--isolated_script_test_perf_output=", argument, &mHistogramJsonFile) ||
1312
ParseStringArg(kIsolatedOutDir, argument, &mTestArtifactDirectory) ||
1313
ParseFlag("--bot-mode", argument, &mBotMode) ||
1314
ParseFlag("--debug-test-groups", argument, &mDebugTestGroups) ||
1315
ParseFlag(kGTestListTests, argument, &mGTestListTests) ||
1316
ParseFlag(kListTests, argument, &mListTests) ||
1317
ParseFlag(kPrintTestStdout, argument, &mPrintTestStdout) ||
1318
ParseFlag(kDisableCrashHandler, argument, &mDisableCrashHandler));
1319
}
1320
1321
void TestSuite::onCrashOrTimeout(TestResultType crashOrTimeout)
1322
{
1323
std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1324
if (mTestResults.currentTest.valid())
1325
{
1326
TestResult &result = mTestResults.results[mTestResults.currentTest];
1327
result.type = crashOrTimeout;
1328
result.elapsedTimeSeconds = mTestResults.currentTestTimer.getElapsedTime();
1329
}
1330
1331
if (mResultsFile.empty())
1332
{
1333
printf("No results file specified.\n");
1334
return;
1335
}
1336
1337
WriteOutputFiles(true, mTestResults, mResultsFile, mHistogramWriter, mHistogramJsonFile,
1338
mTestSuiteName.c_str());
1339
}
1340
1341
bool TestSuite::launchChildTestProcess(uint32_t batchId,
1342
const std::vector<TestIdentifier> &testsInBatch)
1343
{
1344
constexpr uint32_t kMaxPath = 1000;
1345
1346
// Create a temporary file to store the test list
1347
ProcessInfo processInfo;
1348
1349
char filterBuffer[kMaxPath] = {};
1350
if (!CreateTemporaryFile(filterBuffer, kMaxPath))
1351
{
1352
std::cerr << "Error creating temporary file for test list.\n";
1353
return false;
1354
}
1355
processInfo.filterFileName.assign(filterBuffer);
1356
1357
std::string filterString = GetTestFilter(testsInBatch);
1358
1359
FILE *fp = fopen(processInfo.filterFileName.c_str(), "w");
1360
if (!fp)
1361
{
1362
std::cerr << "Error opening temporary file for test list.\n";
1363
return false;
1364
}
1365
fprintf(fp, "%s", filterString.c_str());
1366
fclose(fp);
1367
1368
processInfo.filterString = filterString;
1369
1370
std::string filterFileArg = kFilterFileArg + processInfo.filterFileName;
1371
1372
// Create a temporary file to store the test output.
1373
char resultsBuffer[kMaxPath] = {};
1374
if (!CreateTemporaryFile(resultsBuffer, kMaxPath))
1375
{
1376
std::cerr << "Error creating temporary file for test list.\n";
1377
return false;
1378
}
1379
processInfo.resultsFileName.assign(resultsBuffer);
1380
1381
std::string resultsFileArg = kResultFileArg + processInfo.resultsFileName;
1382
1383
// Construct command line for child process.
1384
std::vector<const char *> args;
1385
1386
args.push_back(mTestExecutableName.c_str());
1387
args.push_back(filterFileArg.c_str());
1388
args.push_back(resultsFileArg.c_str());
1389
1390
std::stringstream batchIdStream;
1391
batchIdStream << kBatchId << batchId;
1392
std::string batchIdString = batchIdStream.str();
1393
args.push_back(batchIdString.c_str());
1394
1395
for (const std::string &arg : mChildProcessArgs)
1396
{
1397
args.push_back(arg.c_str());
1398
}
1399
1400
if (mDisableCrashHandler)
1401
{
1402
args.push_back(kDisableCrashHandler);
1403
}
1404
1405
std::string timeoutStr;
1406
if (mTestTimeout != kDefaultTestTimeout)
1407
{
1408
std::stringstream timeoutStream;
1409
timeoutStream << kTestTimeoutArg << mTestTimeout;
1410
timeoutStr = timeoutStream.str();
1411
args.push_back(timeoutStr.c_str());
1412
}
1413
1414
std::string artifactsDir;
1415
if (!mTestArtifactDirectory.empty())
1416
{
1417
std::stringstream artifactsDirStream;
1418
artifactsDirStream << kIsolatedOutDir << mTestArtifactDirectory;
1419
artifactsDir = artifactsDirStream.str();
1420
args.push_back(artifactsDir.c_str());
1421
}
1422
1423
// Launch child process and wait for completion.
1424
processInfo.process = LaunchProcess(args, ProcessOutputCapture::StdoutAndStderrInterleaved);
1425
1426
if (!processInfo.process->started())
1427
{
1428
std::cerr << "Error launching child process.\n";
1429
return false;
1430
}
1431
1432
std::stringstream commandLineStr;
1433
for (const char *arg : args)
1434
{
1435
commandLineStr << arg << " ";
1436
}
1437
1438
processInfo.commandLine = commandLineStr.str();
1439
processInfo.testsInBatch = testsInBatch;
1440
mCurrentProcesses.emplace_back(std::move(processInfo));
1441
return true;
1442
}
1443
1444
void ParseTestIdentifierAndSetResult(const std::string &testName,
1445
TestResultType result,
1446
TestResults *results)
1447
{
1448
// Trim off any whitespace + extra stuff at the end of the string.
1449
std::string modifiedTestName = testName.substr(0, testName.find(' '));
1450
modifiedTestName = modifiedTestName.substr(0, testName.find('\r'));
1451
TestIdentifier id;
1452
bool ok = TestIdentifier::ParseFromString(modifiedTestName, &id);
1453
ASSERT(ok);
1454
results->results[id] = {result};
1455
}
1456
1457
bool TestSuite::finishProcess(ProcessInfo *processInfo)
1458
{
1459
// Get test results and merge into master list.
1460
TestResults batchResults;
1461
1462
if (!GetTestResultsFromFile(processInfo->resultsFileName.c_str(), &batchResults))
1463
{
1464
std::cerr << "Warning: could not find test results file from child process.\n";
1465
1466
// First assume all tests get skipped.
1467
for (const TestIdentifier &id : processInfo->testsInBatch)
1468
{
1469
batchResults.results[id] = {TestResultType::NoResult};
1470
}
1471
1472
// Attempt to reconstruct passing list from stdout snippets.
1473
const std::string &batchStdout = processInfo->process->getStdout();
1474
std::istringstream linesStream(batchStdout);
1475
1476
std::string line;
1477
while (std::getline(linesStream, line))
1478
{
1479
size_t startPos = line.find(kStartedTestString);
1480
size_t failPos = line.find(kFailedTestString);
1481
size_t passPos = line.find(kPassedTestString);
1482
size_t skippedPos = line.find(kSkippedTestString);
1483
1484
if (startPos != std::string::npos)
1485
{
1486
// Assume a test that's started crashed until we see it completed.
1487
std::string testName = line.substr(strlen(kStartedTestString));
1488
ParseTestIdentifierAndSetResult(testName, TestResultType::Crash, &batchResults);
1489
}
1490
else if (failPos != std::string::npos)
1491
{
1492
std::string testName = line.substr(strlen(kFailedTestString));
1493
ParseTestIdentifierAndSetResult(testName, TestResultType::Fail, &batchResults);
1494
}
1495
else if (passPos != std::string::npos)
1496
{
1497
std::string testName = line.substr(strlen(kPassedTestString));
1498
ParseTestIdentifierAndSetResult(testName, TestResultType::Pass, &batchResults);
1499
}
1500
else if (skippedPos != std::string::npos)
1501
{
1502
std::string testName = line.substr(strlen(kSkippedTestString));
1503
ParseTestIdentifierAndSetResult(testName, TestResultType::Skip, &batchResults);
1504
}
1505
}
1506
}
1507
1508
if (!MergeTestResults(&batchResults, &mTestResults, mFlakyRetries))
1509
{
1510
std::cerr << "Error merging batch test results.\n";
1511
return false;
1512
}
1513
1514
if (!batchResults.results.empty())
1515
{
1516
const TestIdentifier &id = batchResults.results.begin()->first;
1517
std::string config = GetConfigNameFromTestIdentifier(id);
1518
printf("Completed batch with config: %s\n", config.c_str());
1519
1520
for (const auto &resultIter : batchResults.results)
1521
{
1522
const TestResult &result = resultIter.second;
1523
if (result.type != TestResultType::NoResult && IsFailedResult(result.type))
1524
{
1525
printf("To reproduce the batch, use filter:\n%s\n",
1526
processInfo->filterString.c_str());
1527
break;
1528
}
1529
}
1530
}
1531
1532
// Process results and print unexpected errors.
1533
for (const auto &resultIter : batchResults.results)
1534
{
1535
const TestIdentifier &id = resultIter.first;
1536
const TestResult &result = resultIter.second;
1537
1538
// Skip results aren't procesed since they're added back to the test queue below.
1539
if (result.type == TestResultType::NoResult)
1540
{
1541
continue;
1542
}
1543
1544
mCurrentResultCount++;
1545
1546
printf("[%d/%d] %s.%s", mCurrentResultCount, mTotalResultCount, id.testSuiteName.c_str(),
1547
id.testName.c_str());
1548
1549
if (mPrintTestStdout)
1550
{
1551
const std::string &batchStdout = processInfo->process->getStdout();
1552
PrintTestOutputSnippet(id, result, batchStdout);
1553
}
1554
else if (result.type == TestResultType::Pass)
1555
{
1556
printf(" (%0.1lf ms)\n", result.elapsedTimeSeconds * 1000.0);
1557
}
1558
else if (result.type == TestResultType::Skip)
1559
{
1560
printf(" (skipped)\n");
1561
}
1562
else if (result.type == TestResultType::Timeout)
1563
{
1564
printf(" (TIMEOUT in %0.1lf s)\n", result.elapsedTimeSeconds);
1565
mFailureCount++;
1566
}
1567
else
1568
{
1569
printf(" (%s)\n", ResultTypeToString(result.type));
1570
mFailureCount++;
1571
1572
const std::string &batchStdout = processInfo->process->getStdout();
1573
PrintTestOutputSnippet(id, result, batchStdout);
1574
}
1575
}
1576
1577
// On unexpected exit, re-queue any unfinished tests.
1578
std::vector<TestIdentifier> unfinishedTests;
1579
for (const auto &resultIter : batchResults.results)
1580
{
1581
const TestIdentifier &id = resultIter.first;
1582
const TestResult &result = resultIter.second;
1583
1584
if (result.type == TestResultType::NoResult)
1585
{
1586
unfinishedTests.push_back(id);
1587
}
1588
}
1589
1590
if (!unfinishedTests.empty())
1591
{
1592
mTestQueue.emplace(std::move(unfinishedTests));
1593
}
1594
1595
// Clean up any dirty temporary files.
1596
for (const std::string &tempFile : {processInfo->filterFileName, processInfo->resultsFileName})
1597
{
1598
// Note: we should be aware that this cleanup won't happen if the harness itself
1599
// crashes. If this situation comes up in the future we should add crash cleanup to the
1600
// harness.
1601
if (!angle::DeleteFile(tempFile.c_str()))
1602
{
1603
std::cerr << "Warning: Error cleaning up temp file: " << tempFile << "\n";
1604
}
1605
}
1606
1607
processInfo->process.reset();
1608
return true;
1609
}
1610
1611
int TestSuite::run()
1612
{
1613
#if defined(ANGLE_PLATFORM_ANDROID)
1614
if (mListTests && mGTestListTests)
1615
{
1616
// Workaround for the Android test runner requiring a GTest test list.
1617
printf("PlaceholderTest.\n Placeholder\n");
1618
return EXIT_SUCCESS;
1619
}
1620
#endif // defined(ANGLE_PLATFORM_ANDROID)
1621
1622
if (mListTests)
1623
{
1624
ListTests(mTestResults.results);
1625
1626
#if defined(ANGLE_PLATFORM_ANDROID)
1627
// Because of quirks with the Chromium-provided Android test runner, we need to use a few
1628
// tricks to get the test list output. We add placeholder output for a single test to trick
1629
// the test runner into thinking it ran the tests successfully. We also add an end marker
1630
// for the tests list so we can parse the list from the more spammy Android stdout log.
1631
static constexpr char kPlaceholderTestTest[] = R"(
1632
[==========] Running 1 test from 1 test suite.
1633
[----------] Global test environment set-up.
1634
[----------] 1 test from PlaceholderTest
1635
[ RUN ] PlaceholderTest.Placeholder
1636
[ OK ] PlaceholderTest.Placeholder (0 ms)
1637
[----------] 1 test from APITest (0 ms total)
1638
1639
[----------] Global test environment tear-down
1640
[==========] 1 test from 1 test suite ran. (24 ms total)
1641
[ PASSED ] 1 test.
1642
)";
1643
printf(kPlaceholderTestTest);
1644
#endif // defined(ANGLE_PLATFORM_ANDROID)
1645
1646
return EXIT_SUCCESS;
1647
}
1648
1649
if (mGTestListTests)
1650
{
1651
GTestListTests(mTestResults.results);
1652
return EXIT_SUCCESS;
1653
}
1654
1655
// Run tests serially.
1656
if (!mBotMode)
1657
{
1658
// Only start the watchdog if the debugger is not attached and we're a child process.
1659
if (!angle::IsDebuggerAttached() && mBatchId != -1)
1660
{
1661
startWatchdog();
1662
}
1663
1664
int retVal = RUN_ALL_TESTS();
1665
{
1666
std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1667
mTestResults.allDone = true;
1668
}
1669
1670
if (mWatchdogThread.joinable())
1671
{
1672
mWatchdogThread.join();
1673
}
1674
return retVal;
1675
}
1676
1677
Timer totalRunTime;
1678
totalRunTime.start();
1679
1680
Timer messageTimer;
1681
messageTimer.start();
1682
1683
uint32_t batchId = 0;
1684
1685
while (!mTestQueue.empty() || !mCurrentProcesses.empty())
1686
{
1687
bool progress = false;
1688
1689
// Spawn a process if needed and possible.
1690
if (static_cast<int>(mCurrentProcesses.size()) < mMaxProcesses && !mTestQueue.empty())
1691
{
1692
std::vector<TestIdentifier> testsInBatch = mTestQueue.front();
1693
mTestQueue.pop();
1694
1695
if (!launchChildTestProcess(++batchId, testsInBatch))
1696
{
1697
return 1;
1698
}
1699
1700
progress = true;
1701
}
1702
1703
// Check for process completion.
1704
uint32_t totalTestCount = 0;
1705
for (auto processIter = mCurrentProcesses.begin(); processIter != mCurrentProcesses.end();)
1706
{
1707
ProcessInfo &processInfo = *processIter;
1708
if (processInfo.process->finished())
1709
{
1710
if (!finishProcess(&processInfo))
1711
{
1712
return 1;
1713
}
1714
processIter = mCurrentProcesses.erase(processIter);
1715
progress = true;
1716
}
1717
else if (processInfo.process->getElapsedTimeSeconds() > mBatchTimeout)
1718
{
1719
// Terminate the process and record timeouts for the batch.
1720
// Because we can't determine which sub-test caused a timeout, record the whole
1721
// batch as a timeout failure. Can be improved by using socket message passing.
1722
if (!processInfo.process->kill())
1723
{
1724
return 1;
1725
}
1726
for (const TestIdentifier &testIdentifier : processInfo.testsInBatch)
1727
{
1728
// Because the whole batch failed we can't know how long each test took.
1729
mTestResults.results[testIdentifier].type = TestResultType::Timeout;
1730
mFailureCount++;
1731
}
1732
1733
processIter = mCurrentProcesses.erase(processIter);
1734
progress = true;
1735
}
1736
else
1737
{
1738
totalTestCount += static_cast<uint32_t>(processInfo.testsInBatch.size());
1739
processIter++;
1740
}
1741
}
1742
1743
if (progress)
1744
{
1745
messageTimer.start();
1746
}
1747
else if (messageTimer.getElapsedTime() > kIdleMessageTimeout)
1748
{
1749
const ProcessInfo &processInfo = mCurrentProcesses[0];
1750
double processTime = processInfo.process->getElapsedTimeSeconds();
1751
printf("Running %d tests in %d processes, longest for %d seconds.\n", totalTestCount,
1752
static_cast<int>(mCurrentProcesses.size()), static_cast<int>(processTime));
1753
messageTimer.start();
1754
}
1755
1756
// Early exit if we passed the maximum failure threshold. Still wait for current tests.
1757
if (mFailureCount > mMaxFailures && !mTestQueue.empty())
1758
{
1759
printf("Reached maximum failure count (%d), clearing test queue.\n", mMaxFailures);
1760
TestQueue emptyTestQueue;
1761
std::swap(mTestQueue, emptyTestQueue);
1762
}
1763
1764
// Sleep briefly and continue.
1765
angle::Sleep(100);
1766
}
1767
1768
// Dump combined results.
1769
if (mFailureCount > mMaxFailures)
1770
{
1771
printf(
1772
"Omitted results files because the failure count (%d) exceeded the maximum number of "
1773
"failures (%d).\n",
1774
mFailureCount, mMaxFailures);
1775
}
1776
else
1777
{
1778
WriteOutputFiles(false, mTestResults, mResultsFile, mHistogramWriter, mHistogramJsonFile,
1779
mTestSuiteName.c_str());
1780
}
1781
1782
totalRunTime.stop();
1783
printf("Tests completed in %lf seconds\n", totalRunTime.getElapsedTime());
1784
1785
return printFailuresAndReturnCount() == 0 ? 0 : 1;
1786
}
1787
1788
int TestSuite::printFailuresAndReturnCount() const
1789
{
1790
std::vector<std::string> failures;
1791
uint32_t skipCount = 0;
1792
1793
for (const auto &resultIter : mTestResults.results)
1794
{
1795
const TestIdentifier &id = resultIter.first;
1796
const TestResult &result = resultIter.second;
1797
1798
if (result.type == TestResultType::Skip)
1799
{
1800
skipCount++;
1801
}
1802
else if (result.type != TestResultType::Pass)
1803
{
1804
const FileLine &fileLine = mTestFileLines.find(id)->second;
1805
1806
std::stringstream failureMessage;
1807
failureMessage << id << " (" << fileLine.file << ":" << fileLine.line << ") ("
1808
<< ResultTypeToString(result.type) << ")";
1809
failures.emplace_back(failureMessage.str());
1810
}
1811
}
1812
1813
if (failures.empty())
1814
return 0;
1815
1816
printf("%zu test%s failed:\n", failures.size(), failures.size() > 1 ? "s" : "");
1817
for (const std::string &failure : failures)
1818
{
1819
printf(" %s\n", failure.c_str());
1820
}
1821
if (skipCount > 0)
1822
{
1823
printf("%u tests skipped.\n", skipCount);
1824
}
1825
1826
return static_cast<int>(failures.size());
1827
}
1828
1829
void TestSuite::startWatchdog()
1830
{
1831
auto watchdogMain = [this]() {
1832
do
1833
{
1834
{
1835
std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1836
if (mTestResults.currentTestTimer.getElapsedTime() >
1837
mTestResults.currentTestTimeout)
1838
{
1839
break;
1840
}
1841
1842
if (mTestResults.allDone)
1843
return;
1844
}
1845
1846
angle::Sleep(500);
1847
} while (true);
1848
onCrashOrTimeout(TestResultType::Timeout);
1849
::_Exit(EXIT_FAILURE);
1850
};
1851
mWatchdogThread = std::thread(watchdogMain);
1852
}
1853
1854
void TestSuite::addHistogramSample(const std::string &measurement,
1855
const std::string &story,
1856
double value,
1857
const std::string &units)
1858
{
1859
mHistogramWriter.addSample(measurement, story, value, units);
1860
}
1861
1862
std::string TestSuite::addTestArtifact(const std::string &artifactName)
1863
{
1864
mTestResults.testArtifactPaths.push_back(artifactName);
1865
1866
if (mTestArtifactDirectory.empty())
1867
{
1868
return artifactName;
1869
}
1870
1871
std::stringstream pathStream;
1872
pathStream << mTestArtifactDirectory << GetPathSeparator() << artifactName;
1873
return pathStream.str();
1874
}
1875
1876
bool GetTestResultsFromFile(const char *fileName, TestResults *resultsOut)
1877
{
1878
std::ifstream ifs(fileName);
1879
if (!ifs.is_open())
1880
{
1881
std::cerr << "Error opening " << fileName << "\n";
1882
return false;
1883
}
1884
1885
js::IStreamWrapper ifsWrapper(ifs);
1886
js::Document document;
1887
document.ParseStream(ifsWrapper);
1888
1889
if (document.HasParseError())
1890
{
1891
std::cerr << "Parse error reading JSON document: " << document.GetParseError() << "\n";
1892
return false;
1893
}
1894
1895
if (!GetTestResultsFromJSON(document, resultsOut))
1896
{
1897
std::cerr << "Error getting test results from JSON.\n";
1898
return false;
1899
}
1900
1901
return true;
1902
}
1903
1904
void TestSuite::dumpTestExpectationsErrorMessages()
1905
{
1906
std::stringstream errorMsgStream;
1907
for (const auto &message : mTestExpectationsParser.getErrorMessages())
1908
{
1909
errorMsgStream << std::endl << " " << message;
1910
}
1911
1912
std::cerr << "Failed to load test expectations." << errorMsgStream.str() << std::endl;
1913
}
1914
1915
bool TestSuite::loadTestExpectationsFromFileWithConfig(const GPUTestConfig &config,
1916
const std::string &fileName)
1917
{
1918
if (!mTestExpectationsParser.loadTestExpectationsFromFile(config, fileName))
1919
{
1920
dumpTestExpectationsErrorMessages();
1921
return false;
1922
}
1923
return true;
1924
}
1925
1926
bool TestSuite::loadAllTestExpectationsFromFile(const std::string &fileName)
1927
{
1928
if (!mTestExpectationsParser.loadAllTestExpectationsFromFile(fileName))
1929
{
1930
dumpTestExpectationsErrorMessages();
1931
return false;
1932
}
1933
return true;
1934
}
1935
1936
bool TestSuite::logAnyUnusedTestExpectations()
1937
{
1938
std::stringstream unusedMsgStream;
1939
bool anyUnused = false;
1940
for (const auto &message : mTestExpectationsParser.getUnusedExpectationsMessages())
1941
{
1942
anyUnused = true;
1943
unusedMsgStream << std::endl << " " << message;
1944
}
1945
if (anyUnused)
1946
{
1947
std::cerr << "Failed to validate test expectations." << unusedMsgStream.str() << std::endl;
1948
return true;
1949
}
1950
return false;
1951
}
1952
1953
int32_t TestSuite::getTestExpectation(const std::string &testName)
1954
{
1955
return mTestExpectationsParser.getTestExpectation(testName);
1956
}
1957
1958
void TestSuite::maybeUpdateTestTimeout(uint32_t testExpectation)
1959
{
1960
double testTimeout = (testExpectation == GPUTestExpectationsParser::kGpuTestTimeout)
1961
? getSlowTestTimeout()
1962
: mTestTimeout;
1963
std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1964
mTestResults.currentTestTimeout = testTimeout;
1965
}
1966
1967
int32_t TestSuite::getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig &config,
1968
const std::string &testName)
1969
{
1970
uint32_t expectation = mTestExpectationsParser.getTestExpectationWithConfig(config, testName);
1971
maybeUpdateTestTimeout(expectation);
1972
return expectation;
1973
}
1974
1975
int TestSuite::getSlowTestTimeout() const
1976
{
1977
return mTestTimeout * kSlowTestTimeoutScale;
1978
}
1979
1980
const char *TestResultTypeToString(TestResultType type)
1981
{
1982
switch (type)
1983
{
1984
case TestResultType::Crash:
1985
return "Crash";
1986
case TestResultType::Fail:
1987
return "Fail";
1988
case TestResultType::NoResult:
1989
return "NoResult";
1990
case TestResultType::Pass:
1991
return "Pass";
1992
case TestResultType::Skip:
1993
return "Skip";
1994
case TestResultType::Timeout:
1995
return "Timeout";
1996
case TestResultType::Unknown:
1997
return "Unknown";
1998
}
1999
}
2000
} // namespace angle
2001
2002