Path: blob/main_old/src/tests/test_utils/runner/TestSuite.cpp
1694 views
//1// Copyright 2019 The ANGLE Project Authors. All rights reserved.2// Use of this source code is governed by a BSD-style license that can be3// found in the LICENSE file.4//5// TestSuite:6// Basic implementation of a test harness in ANGLE.78#include "TestSuite.h"910#include "common/debug.h"11#include "common/platform.h"12#include "common/string_utils.h"13#include "common/system_utils.h"14#include "util/Timer.h"1516#include <stdlib.h>17#include <time.h>1819#include <fstream>20#include <unordered_map>2122#include <gtest/gtest.h>23#include <rapidjson/document.h>24#include <rapidjson/filewritestream.h>25#include <rapidjson/istreamwrapper.h>26#include <rapidjson/prettywriter.h>2728// We directly call into a function to register the parameterized tests. This saves spinning up29// a subprocess with a new gtest filter.30#include <gtest/../../src/gtest-internal-inl.h>3132namespace js = rapidjson;3334namespace angle35{36namespace37{38constexpr char kBatchId[] = "--batch-id=";39constexpr char kFilterFileArg[] = "--filter-file=";40constexpr char kFlakyRetries[] = "--flaky-retries=";41constexpr char kGTestListTests[] = "--gtest_list_tests";42constexpr char kHistogramJsonFileArg[] = "--histogram-json-file=";43constexpr char kListTests[] = "--list-tests";44constexpr char kPrintTestStdout[] = "--print-test-stdout";45constexpr char kResultFileArg[] = "--results-file=";46constexpr char kTestTimeoutArg[] = "--test-timeout=";47constexpr char kDisableCrashHandler[] = "--disable-crash-handler";48constexpr char kIsolatedOutDir[] = "--isolated-outdir=";49constexpr char kMaxFailures[] = "--max-failures=";5051constexpr char kStartedTestString[] = "[ RUN ] ";52constexpr char kPassedTestString[] = "[ OK ] ";53constexpr char kFailedTestString[] = "[ FAILED ] ";54constexpr char kSkippedTestString[] = "[ SKIPPED ] ";5556constexpr char kArtifactsFakeTestName[] = "TestArtifactsFakeTest";5758// Note: we use a fairly high test timeout to allow for the first test in a batch to be slow.59// Ideally we could use a separate timeout for the slow first test.60#if defined(NDEBUG)61constexpr int kDefaultTestTimeout = 60;62#else63constexpr int kDefaultTestTimeout = 120;64#endif65constexpr int kSlowTestTimeoutScale = 3;66#if defined(NDEBUG)67constexpr int kDefaultBatchTimeout = 300;68#else69constexpr int kDefaultBatchTimeout = 600;70#endif71constexpr int kDefaultBatchSize = 256;72constexpr double kIdleMessageTimeout = 15.0;73constexpr int kDefaultMaxProcesses = 16;74constexpr int kDefaultMaxFailures = 100;7576const char *ParseFlagValue(const char *flag, const char *argument)77{78if (strstr(argument, flag) == argument)79{80return argument + strlen(flag);81}8283return nullptr;84}8586bool ParseIntArg(const char *flag, const char *argument, int *valueOut)87{88const char *value = ParseFlagValue(flag, argument);89if (!value)90{91return false;92}9394char *end = nullptr;95const long longValue = strtol(value, &end, 10);9697if (*end != '\0')98{99printf("Error parsing integer flag value.\n");100exit(EXIT_FAILURE);101}102103if (longValue == LONG_MAX || longValue == LONG_MIN || static_cast<int>(longValue) != longValue)104{105printf("Overflow when parsing integer flag value.\n");106exit(EXIT_FAILURE);107}108109*valueOut = static_cast<int>(longValue);110return true;111}112113bool ParseIntArgNoDelete(const char *flag, const char *argument, int *valueOut)114{115ParseIntArg(flag, argument, valueOut);116return false;117}118119bool ParseFlag(const char *expected, const char *actual, bool *flagOut)120{121if (strcmp(expected, actual) == 0)122{123*flagOut = true;124return true;125}126return false;127}128129bool ParseStringArg(const char *flag, const char *argument, std::string *valueOut)130{131const char *value = ParseFlagValue(flag, argument);132if (!value)133{134return false;135}136137*valueOut = value;138return true;139}140141void DeleteArg(int *argc, char **argv, int argIndex)142{143// Shift the remainder of the argv list left by one. Note that argv has (*argc + 1) elements,144// the last one always being NULL. The following loop moves the trailing NULL element as well.145for (int index = argIndex; index < *argc; ++index)146{147argv[index] = argv[index + 1];148}149(*argc)--;150}151152void AddArg(int *argc, char **argv, const char *arg)153{154// This unsafe const_cast is necessary to work around gtest limitations.155argv[*argc] = const_cast<char *>(arg);156argv[*argc + 1] = nullptr;157(*argc)++;158}159160const char *ResultTypeToString(TestResultType type)161{162switch (type)163{164case TestResultType::Crash:165return "CRASH";166case TestResultType::Fail:167return "FAIL";168case TestResultType::NoResult:169return "NOTRUN";170case TestResultType::Pass:171return "PASS";172case TestResultType::Skip:173return "SKIP";174case TestResultType::Timeout:175return "TIMEOUT";176case TestResultType::Unknown:177return "UNKNOWN";178}179}180181TestResultType GetResultTypeFromString(const std::string &str)182{183if (str == "CRASH")184return TestResultType::Crash;185if (str == "FAIL")186return TestResultType::Fail;187if (str == "PASS")188return TestResultType::Pass;189if (str == "NOTRUN")190return TestResultType::NoResult;191if (str == "SKIP")192return TestResultType::Skip;193if (str == "TIMEOUT")194return TestResultType::Timeout;195return TestResultType::Unknown;196}197198bool IsFailedResult(TestResultType resultType)199{200return resultType != TestResultType::Pass && resultType != TestResultType::Skip;201}202203js::Value ResultTypeToJSString(TestResultType type, js::Document::AllocatorType *allocator)204{205js::Value jsName;206jsName.SetString(ResultTypeToString(type), *allocator);207return jsName;208}209210bool WriteJsonFile(const std::string &outputFile, js::Document *doc)211{212FILE *fp = fopen(outputFile.c_str(), "w");213if (!fp)214{215return false;216}217218constexpr size_t kBufferSize = 0xFFFF;219std::vector<char> writeBuffer(kBufferSize);220js::FileWriteStream os(fp, writeBuffer.data(), kBufferSize);221js::PrettyWriter<js::FileWriteStream> writer(os);222if (!doc->Accept(writer))223{224fclose(fp);225return false;226}227fclose(fp);228return true;229}230231// Writes out a TestResults to the Chromium JSON Test Results format.232// https://chromium.googlesource.com/chromium/src.git/+/master/docs/testing/json_test_results_format.md233void WriteResultsFile(bool interrupted,234const TestResults &testResults,235const std::string &outputFile,236const char *testSuiteName)237{238time_t ltime;239time(<ime);240struct tm *timeinfo = gmtime(<ime);241ltime = mktime(timeinfo);242243uint64_t secondsSinceEpoch = static_cast<uint64_t>(ltime);244245js::Document doc;246doc.SetObject();247248js::Document::AllocatorType &allocator = doc.GetAllocator();249250doc.AddMember("interrupted", interrupted, allocator);251doc.AddMember("path_delimiter", ".", allocator);252doc.AddMember("version", 3, allocator);253doc.AddMember("seconds_since_epoch", secondsSinceEpoch, allocator);254255js::Value tests;256tests.SetObject();257258// If we have any test artifacts, make a fake test to house them.259if (!testResults.testArtifactPaths.empty())260{261js::Value artifactsTest;262artifactsTest.SetObject();263264artifactsTest.AddMember("actual", "PASS", allocator);265artifactsTest.AddMember("expected", "PASS", allocator);266267js::Value artifacts;268artifacts.SetObject();269270for (const std::string &testArtifactPath : testResults.testArtifactPaths)271{272std::vector<std::string> pieces =273SplitString(testArtifactPath, "/\\", WhitespaceHandling::TRIM_WHITESPACE,274SplitResult::SPLIT_WANT_NONEMPTY);275ASSERT(!pieces.empty());276277js::Value basename;278basename.SetString(pieces.back(), allocator);279280js::Value artifactPath;281artifactPath.SetString(testArtifactPath, allocator);282283js::Value artifactArray;284artifactArray.SetArray();285artifactArray.PushBack(artifactPath, allocator);286287artifacts.AddMember(basename, artifactArray, allocator);288}289290artifactsTest.AddMember("artifacts", artifacts, allocator);291292js::Value fakeTestName;293fakeTestName.SetString(testResults.testArtifactsFakeTestName, allocator);294tests.AddMember(fakeTestName, artifactsTest, allocator);295}296297std::map<TestResultType, uint32_t> counts;298299for (const auto &resultIter : testResults.results)300{301const TestIdentifier &id = resultIter.first;302const TestResult &result = resultIter.second;303304js::Value jsResult;305jsResult.SetObject();306307counts[result.type]++;308309std::string actualResult;310for (uint32_t fail = 0; fail < result.flakyFailures; ++fail)311{312actualResult += "FAIL ";313}314315actualResult += ResultTypeToString(result.type);316317std::string expectedResult = "PASS";318if (result.type == TestResultType::Skip)319{320expectedResult = "SKIP";321}322323// Handle flaky passing tests.324if (result.flakyFailures > 0 && result.type == TestResultType::Pass)325{326expectedResult = "FAIL PASS";327jsResult.AddMember("is_flaky", true, allocator);328}329330jsResult.AddMember("actual", actualResult, allocator);331jsResult.AddMember("expected", expectedResult, allocator);332333if (IsFailedResult(result.type))334{335jsResult.AddMember("is_unexpected", true, allocator);336}337338js::Value times;339times.SetArray();340times.PushBack(result.elapsedTimeSeconds, allocator);341342jsResult.AddMember("times", times, allocator);343344char testName[500];345id.sprintfName(testName);346js::Value jsName;347jsName.SetString(testName, allocator);348349tests.AddMember(jsName, jsResult, allocator);350}351352js::Value numFailuresByType;353numFailuresByType.SetObject();354355for (const auto &countIter : counts)356{357TestResultType type = countIter.first;358uint32_t count = countIter.second;359360js::Value jsCount(count);361numFailuresByType.AddMember(ResultTypeToJSString(type, &allocator), jsCount, allocator);362}363364doc.AddMember("num_failures_by_type", numFailuresByType, allocator);365366doc.AddMember("tests", tests, allocator);367368printf("Writing test results to %s\n", outputFile.c_str());369370if (!WriteJsonFile(outputFile, &doc))371{372printf("Error writing test results file.\n");373}374}375376void WriteHistogramJson(const HistogramWriter &histogramWriter,377const std::string &outputFile,378const char *testSuiteName)379{380js::Document doc;381doc.SetArray();382383histogramWriter.getAsJSON(&doc);384385printf("Writing histogram json to %s\n", outputFile.c_str());386387if (!WriteJsonFile(outputFile, &doc))388{389printf("Error writing histogram json file.\n");390}391}392393void WriteOutputFiles(bool interrupted,394const TestResults &testResults,395const std::string &resultsFile,396const HistogramWriter &histogramWriter,397const std::string &histogramJsonOutputFile,398const char *testSuiteName)399{400if (!resultsFile.empty())401{402WriteResultsFile(interrupted, testResults, resultsFile, testSuiteName);403}404405if (!histogramJsonOutputFile.empty())406{407WriteHistogramJson(histogramWriter, histogramJsonOutputFile, testSuiteName);408}409}410411void UpdateCurrentTestResult(const testing::TestResult &resultIn, TestResults *resultsOut)412{413TestResult &resultOut = resultsOut->results[resultsOut->currentTest];414415// Note: Crashes and Timeouts are detected by the crash handler and a watchdog thread.416if (resultIn.Skipped())417{418resultOut.type = TestResultType::Skip;419}420else if (resultIn.Failed())421{422resultOut.type = TestResultType::Fail;423}424else425{426resultOut.type = TestResultType::Pass;427}428429resultOut.elapsedTimeSeconds = resultsOut->currentTestTimer.getElapsedTime();430}431432TestIdentifier GetTestIdentifier(const testing::TestInfo &testInfo)433{434return {testInfo.test_suite_name(), testInfo.name()};435}436437class TestEventListener : public testing::EmptyTestEventListener438{439public:440// Note: TestResults is owned by the TestSuite. It should outlive TestEventListener.441TestEventListener(const std::string &resultsFile,442const std::string &histogramJsonFile,443const char *testSuiteName,444TestResults *testResults,445HistogramWriter *histogramWriter)446: mResultsFile(resultsFile),447mHistogramJsonFile(histogramJsonFile),448mTestSuiteName(testSuiteName),449mTestResults(testResults),450mHistogramWriter(histogramWriter)451{}452453void OnTestStart(const testing::TestInfo &testInfo) override454{455std::lock_guard<std::mutex> guard(mTestResults->currentTestMutex);456mTestResults->currentTest = GetTestIdentifier(testInfo);457mTestResults->currentTestTimer.start();458}459460void OnTestEnd(const testing::TestInfo &testInfo) override461{462std::lock_guard<std::mutex> guard(mTestResults->currentTestMutex);463mTestResults->currentTestTimer.stop();464const testing::TestResult &resultIn = *testInfo.result();465UpdateCurrentTestResult(resultIn, mTestResults);466mTestResults->currentTest = TestIdentifier();467}468469void OnTestProgramEnd(const testing::UnitTest &testProgramInfo) override470{471std::lock_guard<std::mutex> guard(mTestResults->currentTestMutex);472mTestResults->allDone = true;473WriteOutputFiles(false, *mTestResults, mResultsFile, *mHistogramWriter, mHistogramJsonFile,474mTestSuiteName);475}476477private:478std::string mResultsFile;479std::string mHistogramJsonFile;480const char *mTestSuiteName;481TestResults *mTestResults;482HistogramWriter *mHistogramWriter;483};484485bool IsTestDisabled(const testing::TestInfo &testInfo)486{487return ::strstr(testInfo.name(), "DISABLED_") == testInfo.name();488}489490using TestIdentifierFilter = std::function<bool(const TestIdentifier &id)>;491492std::vector<TestIdentifier> FilterTests(std::map<TestIdentifier, FileLine> *fileLinesOut,493TestIdentifierFilter filter,494bool alsoRunDisabledTests)495{496std::vector<TestIdentifier> tests;497498const testing::UnitTest &testProgramInfo = *testing::UnitTest::GetInstance();499for (int suiteIndex = 0; suiteIndex < testProgramInfo.total_test_suite_count(); ++suiteIndex)500{501const testing::TestSuite &testSuite = *testProgramInfo.GetTestSuite(suiteIndex);502for (int testIndex = 0; testIndex < testSuite.total_test_count(); ++testIndex)503{504const testing::TestInfo &testInfo = *testSuite.GetTestInfo(testIndex);505TestIdentifier id = GetTestIdentifier(testInfo);506if (filter(id) && (!IsTestDisabled(testInfo) || alsoRunDisabledTests))507{508tests.emplace_back(id);509510if (fileLinesOut)511{512(*fileLinesOut)[id] = {testInfo.file(), testInfo.line()};513}514}515}516}517518return tests;519}520521std::vector<TestIdentifier> GetFilteredTests(std::map<TestIdentifier, FileLine> *fileLinesOut,522bool alsoRunDisabledTests)523{524TestIdentifierFilter gtestIDFilter = [](const TestIdentifier &id) {525return testing::internal::UnitTestOptions::FilterMatchesTest(id.testSuiteName, id.testName);526};527528return FilterTests(fileLinesOut, gtestIDFilter, alsoRunDisabledTests);529}530531std::vector<TestIdentifier> GetShardTests(const std::vector<TestIdentifier> &allTests,532int shardIndex,533int shardCount,534std::map<TestIdentifier, FileLine> *fileLinesOut,535bool alsoRunDisabledTests)536{537std::vector<TestIdentifier> shardTests;538539for (int testIndex = shardIndex; testIndex < static_cast<int>(allTests.size());540testIndex += shardCount)541{542shardTests.emplace_back(allTests[testIndex]);543}544545return shardTests;546}547548std::string GetTestFilter(const std::vector<TestIdentifier> &tests)549{550std::stringstream filterStream;551552filterStream << "--gtest_filter=";553554for (size_t testIndex = 0; testIndex < tests.size(); ++testIndex)555{556if (testIndex != 0)557{558filterStream << ":";559}560561filterStream << tests[testIndex];562}563564return filterStream.str();565}566567std::string ParseTestSuiteName(const char *executable)568{569const char *baseNameStart = strrchr(executable, GetPathSeparator());570if (!baseNameStart)571{572baseNameStart = executable;573}574else575{576baseNameStart++;577}578579const char *suffix = GetExecutableExtension();580size_t suffixLen = strlen(suffix);581if (suffixLen == 0)582{583return baseNameStart;584}585586if (!EndsWith(baseNameStart, suffix))587{588return baseNameStart;589}590591return std::string(baseNameStart, baseNameStart + strlen(baseNameStart) - suffixLen);592}593594bool GetTestArtifactsFromJSON(const js::Value::ConstObject &obj,595std::vector<std::string> *testArtifactPathsOut)596{597if (!obj.HasMember("artifacts"))598{599printf("No artifacts member.\n");600return false;601}602603const js::Value &jsArtifacts = obj["artifacts"];604if (!jsArtifacts.IsObject())605{606printf("Artifacts are not an object.\n");607return false;608}609610const js::Value::ConstObject &artifacts = jsArtifacts.GetObject();611for (const auto &artifactMember : artifacts)612{613const js::Value &artifact = artifactMember.value;614if (!artifact.IsArray())615{616printf("Artifact is not an array of strings of size 1.\n");617return false;618}619620const js::Value::ConstArray &artifactArray = artifact.GetArray();621if (artifactArray.Size() != 1)622{623printf("Artifact is not an array of strings of size 1.\n");624return false;625}626627const js::Value &artifactName = artifactArray[0];628if (!artifactName.IsString())629{630printf("Artifact is not an array of strings of size 1.\n");631return false;632}633634testArtifactPathsOut->push_back(artifactName.GetString());635}636637return true;638}639640bool GetSingleTestResultFromJSON(const js::Value &name,641const js::Value::ConstObject &obj,642TestResults *resultsOut)643{644645TestIdentifier id;646if (!TestIdentifier::ParseFromString(name.GetString(), &id))647{648printf("Could not parse test identifier.\n");649return false;650}651652if (!obj.HasMember("expected") || !obj.HasMember("actual"))653{654printf("No expected or actual member.\n");655return false;656}657658const js::Value &expected = obj["expected"];659const js::Value &actual = obj["actual"];660661if (!expected.IsString() || !actual.IsString())662{663printf("Expected or actual member is not a string.\n");664return false;665}666667const std::string actualStr = actual.GetString();668669TestResultType resultType = TestResultType::Unknown;670int flakyFailures = 0;671if (actualStr.find(' '))672{673std::istringstream strstr(actualStr);674std::string token;675while (std::getline(strstr, token, ' '))676{677resultType = GetResultTypeFromString(token);678if (resultType == TestResultType::Unknown)679{680printf("Failed to parse result type.\n");681return false;682}683if (IsFailedResult(resultType))684{685flakyFailures++;686}687}688}689else690{691resultType = GetResultTypeFromString(actualStr);692if (resultType == TestResultType::Unknown)693{694printf("Failed to parse result type.\n");695return false;696}697}698699double elapsedTimeSeconds = 0.0;700if (obj.HasMember("times"))701{702const js::Value × = obj["times"];703if (!times.IsArray())704{705return false;706}707708const js::Value::ConstArray ×Array = times.GetArray();709if (timesArray.Size() != 1 || !timesArray[0].IsDouble())710{711return false;712}713714elapsedTimeSeconds = timesArray[0].GetDouble();715}716717TestResult &result = resultsOut->results[id];718result.elapsedTimeSeconds = elapsedTimeSeconds;719result.type = resultType;720result.flakyFailures = flakyFailures;721return true;722}723724bool GetTestResultsFromJSON(const js::Document &document, TestResults *resultsOut)725{726if (!document.HasMember("tests") || !document["tests"].IsObject())727{728printf("JSON document has no tests member.\n");729return false;730}731732const js::Value::ConstObject &tests = document["tests"].GetObject();733for (const auto &testMember : tests)734{735// Get test identifier.736const js::Value &name = testMember.name;737if (!name.IsString())738{739printf("Name is not a string.\n");740return false;741}742743// Get test result.744const js::Value &value = testMember.value;745if (!value.IsObject())746{747printf("Test result is not an object.\n");748return false;749}750751const js::Value::ConstObject &obj = value.GetObject();752753if (BeginsWith(name.GetString(), kArtifactsFakeTestName))754{755if (!GetTestArtifactsFromJSON(obj, &resultsOut->testArtifactPaths))756{757return false;758}759}760else761{762if (!GetSingleTestResultFromJSON(name, obj, resultsOut))763{764return false;765}766}767}768769return true;770}771772bool MergeTestResults(TestResults *input, TestResults *output, int flakyRetries)773{774for (auto &resultsIter : input->results)775{776const TestIdentifier &id = resultsIter.first;777TestResult &inputResult = resultsIter.second;778TestResult &outputResult = output->results[id];779780if (inputResult.type != TestResultType::NoResult)781{782if (outputResult.type != TestResultType::NoResult)783{784printf("Warning: duplicate entry for %s.%s.\n", id.testSuiteName.c_str(),785id.testName.c_str());786return false;787}788789// Mark the tests that haven't exhausted their retries as 'SKIP'. This makes ANGLE790// attempt the test again.791uint32_t runCount = outputResult.flakyFailures + 1;792if (IsFailedResult(inputResult.type) && runCount < static_cast<uint32_t>(flakyRetries))793{794printf("Retrying flaky test: %s.%s.\n", id.testSuiteName.c_str(),795id.testName.c_str());796inputResult.type = TestResultType::NoResult;797outputResult.flakyFailures++;798}799else800{801outputResult.elapsedTimeSeconds = inputResult.elapsedTimeSeconds;802outputResult.type = inputResult.type;803}804}805}806807output->testArtifactPaths.insert(output->testArtifactPaths.end(),808input->testArtifactPaths.begin(),809input->testArtifactPaths.end());810811return true;812}813814void PrintTestOutputSnippet(const TestIdentifier &id,815const TestResult &result,816const std::string &fullOutput)817{818std::stringstream nameStream;819nameStream << id;820std::string fullName = nameStream.str();821822size_t runPos = fullOutput.find(std::string(kStartedTestString) + fullName);823if (runPos == std::string::npos)824{825printf("Cannot locate test output snippet.\n");826return;827}828829size_t endPos = fullOutput.find(std::string(kFailedTestString) + fullName, runPos);830// Only clip the snippet to the "OK" message if the test really831// succeeded. It still might have e.g. crashed after printing it.832if (endPos == std::string::npos && result.type == TestResultType::Pass)833{834endPos = fullOutput.find(std::string(kPassedTestString) + fullName, runPos);835}836if (endPos != std::string::npos)837{838size_t newline_pos = fullOutput.find("\n", endPos);839if (newline_pos != std::string::npos)840endPos = newline_pos + 1;841}842843std::cout << "\n";844if (endPos != std::string::npos)845{846std::cout << fullOutput.substr(runPos, endPos - runPos);847}848else849{850std::cout << fullOutput.substr(runPos);851}852}853854std::string GetConfigNameFromTestIdentifier(const TestIdentifier &id)855{856size_t slashPos = id.testName.find('/');857if (slashPos == std::string::npos)858{859return "default";860}861862size_t doubleUnderscorePos = id.testName.find("__");863if (doubleUnderscorePos == std::string::npos)864{865std::string configName = id.testName.substr(slashPos + 1);866867if (!BeginsWith(configName, "ES"))868{869return "default";870}871872return configName;873}874else875{876return id.testName.substr(slashPos + 1, doubleUnderscorePos - slashPos - 1);877}878}879880TestQueue BatchTests(const std::vector<TestIdentifier> &tests, int batchSize)881{882// First sort tests by configuration.883angle::HashMap<std::string, std::vector<TestIdentifier>> testsSortedByConfig;884for (const TestIdentifier &id : tests)885{886std::string config = GetConfigNameFromTestIdentifier(id);887testsSortedByConfig[config].push_back(id);888}889890// Then group into batches by 'batchSize'.891TestQueue testQueue;892for (const auto &configAndIds : testsSortedByConfig)893{894const std::vector<TestIdentifier> &configTests = configAndIds.second;895896// Count the number of batches needed for this config.897int batchesForConfig = static_cast<int>(configTests.size() + batchSize - 1) / batchSize;898899// Create batches with striping to split up slow tests.900for (int batchIndex = 0; batchIndex < batchesForConfig; ++batchIndex)901{902std::vector<TestIdentifier> batchTests;903for (size_t testIndex = batchIndex; testIndex < configTests.size();904testIndex += batchesForConfig)905{906batchTests.push_back(configTests[testIndex]);907}908testQueue.emplace(std::move(batchTests));909ASSERT(batchTests.empty());910}911}912913return testQueue;914}915916void ListTests(const std::map<TestIdentifier, TestResult> &resultsMap)917{918std::cout << "Tests list:\n";919920for (const auto &resultIt : resultsMap)921{922const TestIdentifier &id = resultIt.first;923std::cout << id << "\n";924}925926std::cout << "End tests list.\n";927}928929// Prints the names of the tests matching the user-specified filter flag.930// This matches the output from googletest/src/gtest.cc but is much much faster for large filters.931// See http://anglebug.com/5164932void GTestListTests(const std::map<TestIdentifier, TestResult> &resultsMap)933{934std::map<std::string, std::vector<std::string>> suites;935936for (const auto &resultIt : resultsMap)937{938const TestIdentifier &id = resultIt.first;939suites[id.testSuiteName].push_back(id.testName);940}941942for (const auto &testSuiteIt : suites)943{944bool printedTestSuiteName = false;945946const std::string &suiteName = testSuiteIt.first;947const std::vector<std::string> &testNames = testSuiteIt.second;948949for (const std::string &testName : testNames)950{951if (!printedTestSuiteName)952{953printedTestSuiteName = true;954printf("%s.\n", suiteName.c_str());955}956printf(" %s\n", testName.c_str());957}958}959}960} // namespace961962// static963TestSuite *TestSuite::mInstance = nullptr;964965TestIdentifier::TestIdentifier() = default;966967TestIdentifier::TestIdentifier(const std::string &suiteNameIn, const std::string &nameIn)968: testSuiteName(suiteNameIn), testName(nameIn)969{}970971TestIdentifier::TestIdentifier(const TestIdentifier &other) = default;972973TestIdentifier::~TestIdentifier() = default;974975TestIdentifier &TestIdentifier::operator=(const TestIdentifier &other) = default;976977void TestIdentifier::sprintfName(char *outBuffer) const978{979sprintf(outBuffer, "%s.%s", testSuiteName.c_str(), testName.c_str());980}981982// static983bool TestIdentifier::ParseFromString(const std::string &str, TestIdentifier *idOut)984{985size_t separator = str.find(".");986if (separator == std::string::npos)987{988return false;989}990991idOut->testSuiteName = str.substr(0, separator);992idOut->testName = str.substr(separator + 1, str.length() - separator - 1);993return true;994}995996TestResults::TestResults() = default;997998TestResults::~TestResults() = default;9991000ProcessInfo::ProcessInfo() = default;10011002ProcessInfo &ProcessInfo::operator=(ProcessInfo &&rhs)1003{1004process = std::move(rhs.process);1005testsInBatch = std::move(rhs.testsInBatch);1006resultsFileName = std::move(rhs.resultsFileName);1007filterFileName = std::move(rhs.filterFileName);1008commandLine = std::move(rhs.commandLine);1009filterString = std::move(rhs.filterString);1010return *this;1011}10121013ProcessInfo::~ProcessInfo() = default;10141015ProcessInfo::ProcessInfo(ProcessInfo &&other)1016{1017*this = std::move(other);1018}10191020TestSuite::TestSuite(int *argc, char **argv)1021: mShardCount(-1),1022mShardIndex(-1),1023mBotMode(false),1024mDebugTestGroups(false),1025mGTestListTests(false),1026mListTests(false),1027mPrintTestStdout(false),1028mDisableCrashHandler(false),1029mBatchSize(kDefaultBatchSize),1030mCurrentResultCount(0),1031mTotalResultCount(0),1032mMaxProcesses(std::min(NumberOfProcessors(), kDefaultMaxProcesses)),1033mTestTimeout(kDefaultTestTimeout),1034mBatchTimeout(kDefaultBatchTimeout),1035mBatchId(-1),1036mFlakyRetries(0),1037mMaxFailures(kDefaultMaxFailures),1038mFailureCount(0)1039{1040ASSERT(mInstance == nullptr);1041mInstance = this;10421043Optional<int> filterArgIndex;1044bool alsoRunDisabledTests = false;10451046#if defined(ANGLE_PLATFORM_MACOS)1047// By default, we should hook file API functions on macOS to avoid slow Metal shader caching1048// file access.1049angle::InitMetalFileAPIHooking(*argc, argv);1050#endif10511052#if defined(ANGLE_PLATFORM_WINDOWS)1053testing::GTEST_FLAG(catch_exceptions) = false;1054#endif10551056if (*argc <= 0)1057{1058printf("Missing test arguments.\n");1059exit(EXIT_FAILURE);1060}10611062mTestExecutableName = argv[0];1063mTestSuiteName = ParseTestSuiteName(mTestExecutableName.c_str());10641065for (int argIndex = 1; argIndex < *argc;)1066{1067if (parseSingleArg(argv[argIndex]))1068{1069DeleteArg(argc, argv, argIndex);1070continue;1071}10721073if (ParseFlagValue("--gtest_filter=", argv[argIndex]))1074{1075filterArgIndex = argIndex;1076}1077else1078{1079// Don't include disabled tests in test lists unless the user asks for them.1080if (strcmp("--gtest_also_run_disabled_tests", argv[argIndex]) == 0)1081{1082alsoRunDisabledTests = true;1083}10841085mChildProcessArgs.push_back(argv[argIndex]);1086}1087++argIndex;1088}10891090mTestResults.currentTestTimeout = mTestTimeout;10911092#if defined(ANGLE_PLATFORM_ANDROID)1093// Workaround for the Android test runner requiring a GTest test list.1094if (mListTests && filterArgIndex.valid())1095{1096DeleteArg(argc, argv, filterArgIndex.value());1097}1098#endif // defined(ANGLE_PLATFORM_ANDROID)10991100if (!mDisableCrashHandler)1101{1102// Note that the crash callback must be owned and not use global constructors.1103mCrashCallback = [this]() { onCrashOrTimeout(TestResultType::Crash); };1104InitCrashHandler(&mCrashCallback);1105}11061107std::string envShardIndex = angle::GetEnvironmentVar("GTEST_SHARD_INDEX");1108if (!envShardIndex.empty())1109{1110angle::UnsetEnvironmentVar("GTEST_SHARD_INDEX");1111if (mShardIndex == -1)1112{1113std::stringstream shardIndexStream(envShardIndex);1114shardIndexStream >> mShardIndex;1115}1116}11171118std::string envTotalShards = angle::GetEnvironmentVar("GTEST_TOTAL_SHARDS");1119if (!envTotalShards.empty())1120{1121angle::UnsetEnvironmentVar("GTEST_TOTAL_SHARDS");1122if (mShardCount == -1)1123{1124std::stringstream shardCountStream(envTotalShards);1125shardCountStream >> mShardCount;1126}1127}11281129if ((mShardIndex == -1) != (mShardCount == -1))1130{1131printf("Shard index and shard count must be specified together.\n");1132exit(EXIT_FAILURE);1133}11341135if (!mFilterFile.empty())1136{1137if (filterArgIndex.valid())1138{1139printf("Cannot use gtest_filter in conjunction with a filter file.\n");1140exit(EXIT_FAILURE);1141}11421143uint32_t fileSize = 0;1144if (!GetFileSize(mFilterFile.c_str(), &fileSize))1145{1146printf("Error getting filter file size: %s\n", mFilterFile.c_str());1147exit(EXIT_FAILURE);1148}11491150std::vector<char> fileContents(fileSize + 1, 0);1151if (!ReadEntireFileToString(mFilterFile.c_str(), fileContents.data(), fileSize))1152{1153printf("Error loading filter file: %s\n", mFilterFile.c_str());1154exit(EXIT_FAILURE);1155}1156mFilterString.assign(fileContents.data());11571158if (mFilterString.substr(0, strlen("--gtest_filter=")) != std::string("--gtest_filter="))1159{1160printf("Filter file must start with \"--gtest_filter=\".\n");1161exit(EXIT_FAILURE);1162}11631164// Note that we only add a filter string if we previously deleted a shader filter file1165// argument. So we will have space for the new filter string in argv.1166AddArg(argc, argv, mFilterString.c_str());1167}11681169// Call into gtest internals to force parameterized test name registration.1170testing::internal::UnitTestImpl *impl = testing::internal::GetUnitTestImpl();1171impl->RegisterParameterizedTests();11721173// Initialize internal GoogleTest filter arguments so we can call "FilterMatchesTest".1174testing::internal::ParseGoogleTestFlagsOnly(argc, argv);11751176std::vector<TestIdentifier> testSet = GetFilteredTests(&mTestFileLines, alsoRunDisabledTests);11771178if (mShardCount == 0)1179{1180printf("Shard count must be > 0.\n");1181exit(EXIT_FAILURE);1182}1183else if (mShardCount > 0)1184{1185if (mShardIndex >= mShardCount)1186{1187printf("Shard index must be less than shard count.\n");1188exit(EXIT_FAILURE);1189}11901191// If there's only one shard, we can use the testSet as defined above.1192if (mShardCount > 1)1193{1194testSet = GetShardTests(testSet, mShardIndex, mShardCount, &mTestFileLines,1195alsoRunDisabledTests);11961197if (!mBotMode)1198{1199mFilterString = GetTestFilter(testSet);12001201if (filterArgIndex.valid())1202{1203argv[filterArgIndex.value()] = const_cast<char *>(mFilterString.c_str());1204}1205else1206{1207// Note that we only add a filter string if we previously deleted a shard1208// index/count argument. So we will have space for the new filter string in1209// argv.1210AddArg(argc, argv, mFilterString.c_str());1211}12121213// Force-re-initialize GoogleTest flags to load the shard filter.1214testing::internal::ParseGoogleTestFlagsOnly(argc, argv);1215}1216}1217}12181219{1220std::stringstream fakeTestName;1221fakeTestName << kArtifactsFakeTestName;1222if (mShardIndex != -1)1223{1224fakeTestName << "-Shard" << std::setfill('0') << std::setw(2) << mShardIndex;1225}1226mTestResults.testArtifactsFakeTestName = fakeTestName.str();1227}12281229if (mBotMode)1230{1231// Split up test batches.1232mTestQueue = BatchTests(testSet, mBatchSize);12331234if (mDebugTestGroups)1235{1236std::cout << "Test Groups:\n";12371238while (!mTestQueue.empty())1239{1240const std::vector<TestIdentifier> &tests = mTestQueue.front();1241std::cout << GetConfigNameFromTestIdentifier(tests[0]) << " ("1242<< static_cast<int>(tests.size()) << ")\n";1243mTestQueue.pop();1244}12451246exit(EXIT_SUCCESS);1247}1248}12491250testing::InitGoogleTest(argc, argv);12511252mTotalResultCount = testSet.size();12531254if ((mBotMode || !mResultsDirectory.empty()) && mResultsFile.empty())1255{1256// Create a default output file in bot mode.1257mResultsFile = "output.json";1258}12591260if (!mResultsDirectory.empty())1261{1262std::stringstream resultFileName;1263resultFileName << mResultsDirectory << GetPathSeparator() << mResultsFile;1264mResultsFile = resultFileName.str();1265}12661267if (!mBotMode)1268{1269testing::TestEventListeners &listeners = testing::UnitTest::GetInstance()->listeners();1270listeners.Append(new TestEventListener(mResultsFile, mHistogramJsonFile,1271mTestSuiteName.c_str(), &mTestResults,1272&mHistogramWriter));12731274for (const TestIdentifier &id : testSet)1275{1276mTestResults.results[id].type = TestResultType::NoResult;1277}1278}1279}12801281TestSuite::~TestSuite()1282{1283if (mWatchdogThread.joinable())1284{1285mWatchdogThread.detach();1286}1287TerminateCrashHandler();1288}12891290bool TestSuite::parseSingleArg(const char *argument)1291{1292// Note: Flags should be documented in README.md.1293return (ParseIntArg("--shard-count=", argument, &mShardCount) ||1294ParseIntArg("--shard-index=", argument, &mShardIndex) ||1295ParseIntArg("--batch-size=", argument, &mBatchSize) ||1296ParseIntArg("--max-processes=", argument, &mMaxProcesses) ||1297ParseIntArg(kTestTimeoutArg, argument, &mTestTimeout) ||1298ParseIntArg("--batch-timeout=", argument, &mBatchTimeout) ||1299ParseIntArg(kFlakyRetries, argument, &mFlakyRetries) ||1300ParseIntArg(kMaxFailures, argument, &mMaxFailures) ||1301// Other test functions consume the batch ID, so keep it in the list.1302ParseIntArgNoDelete(kBatchId, argument, &mBatchId) ||1303ParseStringArg("--results-directory=", argument, &mResultsDirectory) ||1304ParseStringArg(kResultFileArg, argument, &mResultsFile) ||1305ParseStringArg("--isolated-script-test-output=", argument, &mResultsFile) ||1306ParseStringArg(kFilterFileArg, argument, &mFilterFile) ||1307ParseStringArg(kHistogramJsonFileArg, argument, &mHistogramJsonFile) ||1308// We need these overloads to work around technical debt in the Android test runner.1309ParseStringArg("--isolated-script-test-perf-output=", argument, &mHistogramJsonFile) ||1310ParseStringArg("--isolated_script_test_perf_output=", argument, &mHistogramJsonFile) ||1311ParseStringArg(kIsolatedOutDir, argument, &mTestArtifactDirectory) ||1312ParseFlag("--bot-mode", argument, &mBotMode) ||1313ParseFlag("--debug-test-groups", argument, &mDebugTestGroups) ||1314ParseFlag(kGTestListTests, argument, &mGTestListTests) ||1315ParseFlag(kListTests, argument, &mListTests) ||1316ParseFlag(kPrintTestStdout, argument, &mPrintTestStdout) ||1317ParseFlag(kDisableCrashHandler, argument, &mDisableCrashHandler));1318}13191320void TestSuite::onCrashOrTimeout(TestResultType crashOrTimeout)1321{1322std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);1323if (mTestResults.currentTest.valid())1324{1325TestResult &result = mTestResults.results[mTestResults.currentTest];1326result.type = crashOrTimeout;1327result.elapsedTimeSeconds = mTestResults.currentTestTimer.getElapsedTime();1328}13291330if (mResultsFile.empty())1331{1332printf("No results file specified.\n");1333return;1334}13351336WriteOutputFiles(true, mTestResults, mResultsFile, mHistogramWriter, mHistogramJsonFile,1337mTestSuiteName.c_str());1338}13391340bool TestSuite::launchChildTestProcess(uint32_t batchId,1341const std::vector<TestIdentifier> &testsInBatch)1342{1343constexpr uint32_t kMaxPath = 1000;13441345// Create a temporary file to store the test list1346ProcessInfo processInfo;13471348char filterBuffer[kMaxPath] = {};1349if (!CreateTemporaryFile(filterBuffer, kMaxPath))1350{1351std::cerr << "Error creating temporary file for test list.\n";1352return false;1353}1354processInfo.filterFileName.assign(filterBuffer);13551356std::string filterString = GetTestFilter(testsInBatch);13571358FILE *fp = fopen(processInfo.filterFileName.c_str(), "w");1359if (!fp)1360{1361std::cerr << "Error opening temporary file for test list.\n";1362return false;1363}1364fprintf(fp, "%s", filterString.c_str());1365fclose(fp);13661367processInfo.filterString = filterString;13681369std::string filterFileArg = kFilterFileArg + processInfo.filterFileName;13701371// Create a temporary file to store the test output.1372char resultsBuffer[kMaxPath] = {};1373if (!CreateTemporaryFile(resultsBuffer, kMaxPath))1374{1375std::cerr << "Error creating temporary file for test list.\n";1376return false;1377}1378processInfo.resultsFileName.assign(resultsBuffer);13791380std::string resultsFileArg = kResultFileArg + processInfo.resultsFileName;13811382// Construct command line for child process.1383std::vector<const char *> args;13841385args.push_back(mTestExecutableName.c_str());1386args.push_back(filterFileArg.c_str());1387args.push_back(resultsFileArg.c_str());13881389std::stringstream batchIdStream;1390batchIdStream << kBatchId << batchId;1391std::string batchIdString = batchIdStream.str();1392args.push_back(batchIdString.c_str());13931394for (const std::string &arg : mChildProcessArgs)1395{1396args.push_back(arg.c_str());1397}13981399if (mDisableCrashHandler)1400{1401args.push_back(kDisableCrashHandler);1402}14031404std::string timeoutStr;1405if (mTestTimeout != kDefaultTestTimeout)1406{1407std::stringstream timeoutStream;1408timeoutStream << kTestTimeoutArg << mTestTimeout;1409timeoutStr = timeoutStream.str();1410args.push_back(timeoutStr.c_str());1411}14121413std::string artifactsDir;1414if (!mTestArtifactDirectory.empty())1415{1416std::stringstream artifactsDirStream;1417artifactsDirStream << kIsolatedOutDir << mTestArtifactDirectory;1418artifactsDir = artifactsDirStream.str();1419args.push_back(artifactsDir.c_str());1420}14211422// Launch child process and wait for completion.1423processInfo.process = LaunchProcess(args, ProcessOutputCapture::StdoutAndStderrInterleaved);14241425if (!processInfo.process->started())1426{1427std::cerr << "Error launching child process.\n";1428return false;1429}14301431std::stringstream commandLineStr;1432for (const char *arg : args)1433{1434commandLineStr << arg << " ";1435}14361437processInfo.commandLine = commandLineStr.str();1438processInfo.testsInBatch = testsInBatch;1439mCurrentProcesses.emplace_back(std::move(processInfo));1440return true;1441}14421443void ParseTestIdentifierAndSetResult(const std::string &testName,1444TestResultType result,1445TestResults *results)1446{1447// Trim off any whitespace + extra stuff at the end of the string.1448std::string modifiedTestName = testName.substr(0, testName.find(' '));1449modifiedTestName = modifiedTestName.substr(0, testName.find('\r'));1450TestIdentifier id;1451bool ok = TestIdentifier::ParseFromString(modifiedTestName, &id);1452ASSERT(ok);1453results->results[id] = {result};1454}14551456bool TestSuite::finishProcess(ProcessInfo *processInfo)1457{1458// Get test results and merge into master list.1459TestResults batchResults;14601461if (!GetTestResultsFromFile(processInfo->resultsFileName.c_str(), &batchResults))1462{1463std::cerr << "Warning: could not find test results file from child process.\n";14641465// First assume all tests get skipped.1466for (const TestIdentifier &id : processInfo->testsInBatch)1467{1468batchResults.results[id] = {TestResultType::NoResult};1469}14701471// Attempt to reconstruct passing list from stdout snippets.1472const std::string &batchStdout = processInfo->process->getStdout();1473std::istringstream linesStream(batchStdout);14741475std::string line;1476while (std::getline(linesStream, line))1477{1478size_t startPos = line.find(kStartedTestString);1479size_t failPos = line.find(kFailedTestString);1480size_t passPos = line.find(kPassedTestString);1481size_t skippedPos = line.find(kSkippedTestString);14821483if (startPos != std::string::npos)1484{1485// Assume a test that's started crashed until we see it completed.1486std::string testName = line.substr(strlen(kStartedTestString));1487ParseTestIdentifierAndSetResult(testName, TestResultType::Crash, &batchResults);1488}1489else if (failPos != std::string::npos)1490{1491std::string testName = line.substr(strlen(kFailedTestString));1492ParseTestIdentifierAndSetResult(testName, TestResultType::Fail, &batchResults);1493}1494else if (passPos != std::string::npos)1495{1496std::string testName = line.substr(strlen(kPassedTestString));1497ParseTestIdentifierAndSetResult(testName, TestResultType::Pass, &batchResults);1498}1499else if (skippedPos != std::string::npos)1500{1501std::string testName = line.substr(strlen(kSkippedTestString));1502ParseTestIdentifierAndSetResult(testName, TestResultType::Skip, &batchResults);1503}1504}1505}15061507if (!MergeTestResults(&batchResults, &mTestResults, mFlakyRetries))1508{1509std::cerr << "Error merging batch test results.\n";1510return false;1511}15121513if (!batchResults.results.empty())1514{1515const TestIdentifier &id = batchResults.results.begin()->first;1516std::string config = GetConfigNameFromTestIdentifier(id);1517printf("Completed batch with config: %s\n", config.c_str());15181519for (const auto &resultIter : batchResults.results)1520{1521const TestResult &result = resultIter.second;1522if (result.type != TestResultType::NoResult && IsFailedResult(result.type))1523{1524printf("To reproduce the batch, use filter:\n%s\n",1525processInfo->filterString.c_str());1526break;1527}1528}1529}15301531// Process results and print unexpected errors.1532for (const auto &resultIter : batchResults.results)1533{1534const TestIdentifier &id = resultIter.first;1535const TestResult &result = resultIter.second;15361537// Skip results aren't procesed since they're added back to the test queue below.1538if (result.type == TestResultType::NoResult)1539{1540continue;1541}15421543mCurrentResultCount++;15441545printf("[%d/%d] %s.%s", mCurrentResultCount, mTotalResultCount, id.testSuiteName.c_str(),1546id.testName.c_str());15471548if (mPrintTestStdout)1549{1550const std::string &batchStdout = processInfo->process->getStdout();1551PrintTestOutputSnippet(id, result, batchStdout);1552}1553else if (result.type == TestResultType::Pass)1554{1555printf(" (%0.1lf ms)\n", result.elapsedTimeSeconds * 1000.0);1556}1557else if (result.type == TestResultType::Skip)1558{1559printf(" (skipped)\n");1560}1561else if (result.type == TestResultType::Timeout)1562{1563printf(" (TIMEOUT in %0.1lf s)\n", result.elapsedTimeSeconds);1564mFailureCount++;1565}1566else1567{1568printf(" (%s)\n", ResultTypeToString(result.type));1569mFailureCount++;15701571const std::string &batchStdout = processInfo->process->getStdout();1572PrintTestOutputSnippet(id, result, batchStdout);1573}1574}15751576// On unexpected exit, re-queue any unfinished tests.1577std::vector<TestIdentifier> unfinishedTests;1578for (const auto &resultIter : batchResults.results)1579{1580const TestIdentifier &id = resultIter.first;1581const TestResult &result = resultIter.second;15821583if (result.type == TestResultType::NoResult)1584{1585unfinishedTests.push_back(id);1586}1587}15881589if (!unfinishedTests.empty())1590{1591mTestQueue.emplace(std::move(unfinishedTests));1592}15931594// Clean up any dirty temporary files.1595for (const std::string &tempFile : {processInfo->filterFileName, processInfo->resultsFileName})1596{1597// Note: we should be aware that this cleanup won't happen if the harness itself1598// crashes. If this situation comes up in the future we should add crash cleanup to the1599// harness.1600if (!angle::DeleteFile(tempFile.c_str()))1601{1602std::cerr << "Warning: Error cleaning up temp file: " << tempFile << "\n";1603}1604}16051606processInfo->process.reset();1607return true;1608}16091610int TestSuite::run()1611{1612#if defined(ANGLE_PLATFORM_ANDROID)1613if (mListTests && mGTestListTests)1614{1615// Workaround for the Android test runner requiring a GTest test list.1616printf("PlaceholderTest.\n Placeholder\n");1617return EXIT_SUCCESS;1618}1619#endif // defined(ANGLE_PLATFORM_ANDROID)16201621if (mListTests)1622{1623ListTests(mTestResults.results);16241625#if defined(ANGLE_PLATFORM_ANDROID)1626// Because of quirks with the Chromium-provided Android test runner, we need to use a few1627// tricks to get the test list output. We add placeholder output for a single test to trick1628// the test runner into thinking it ran the tests successfully. We also add an end marker1629// for the tests list so we can parse the list from the more spammy Android stdout log.1630static constexpr char kPlaceholderTestTest[] = R"(1631[==========] Running 1 test from 1 test suite.1632[----------] Global test environment set-up.1633[----------] 1 test from PlaceholderTest1634[ RUN ] PlaceholderTest.Placeholder1635[ OK ] PlaceholderTest.Placeholder (0 ms)1636[----------] 1 test from APITest (0 ms total)16371638[----------] Global test environment tear-down1639[==========] 1 test from 1 test suite ran. (24 ms total)1640[ PASSED ] 1 test.1641)";1642printf(kPlaceholderTestTest);1643#endif // defined(ANGLE_PLATFORM_ANDROID)16441645return EXIT_SUCCESS;1646}16471648if (mGTestListTests)1649{1650GTestListTests(mTestResults.results);1651return EXIT_SUCCESS;1652}16531654// Run tests serially.1655if (!mBotMode)1656{1657// Only start the watchdog if the debugger is not attached and we're a child process.1658if (!angle::IsDebuggerAttached() && mBatchId != -1)1659{1660startWatchdog();1661}16621663int retVal = RUN_ALL_TESTS();1664{1665std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);1666mTestResults.allDone = true;1667}16681669if (mWatchdogThread.joinable())1670{1671mWatchdogThread.join();1672}1673return retVal;1674}16751676Timer totalRunTime;1677totalRunTime.start();16781679Timer messageTimer;1680messageTimer.start();16811682uint32_t batchId = 0;16831684while (!mTestQueue.empty() || !mCurrentProcesses.empty())1685{1686bool progress = false;16871688// Spawn a process if needed and possible.1689if (static_cast<int>(mCurrentProcesses.size()) < mMaxProcesses && !mTestQueue.empty())1690{1691std::vector<TestIdentifier> testsInBatch = mTestQueue.front();1692mTestQueue.pop();16931694if (!launchChildTestProcess(++batchId, testsInBatch))1695{1696return 1;1697}16981699progress = true;1700}17011702// Check for process completion.1703uint32_t totalTestCount = 0;1704for (auto processIter = mCurrentProcesses.begin(); processIter != mCurrentProcesses.end();)1705{1706ProcessInfo &processInfo = *processIter;1707if (processInfo.process->finished())1708{1709if (!finishProcess(&processInfo))1710{1711return 1;1712}1713processIter = mCurrentProcesses.erase(processIter);1714progress = true;1715}1716else if (processInfo.process->getElapsedTimeSeconds() > mBatchTimeout)1717{1718// Terminate the process and record timeouts for the batch.1719// Because we can't determine which sub-test caused a timeout, record the whole1720// batch as a timeout failure. Can be improved by using socket message passing.1721if (!processInfo.process->kill())1722{1723return 1;1724}1725for (const TestIdentifier &testIdentifier : processInfo.testsInBatch)1726{1727// Because the whole batch failed we can't know how long each test took.1728mTestResults.results[testIdentifier].type = TestResultType::Timeout;1729mFailureCount++;1730}17311732processIter = mCurrentProcesses.erase(processIter);1733progress = true;1734}1735else1736{1737totalTestCount += static_cast<uint32_t>(processInfo.testsInBatch.size());1738processIter++;1739}1740}17411742if (progress)1743{1744messageTimer.start();1745}1746else if (messageTimer.getElapsedTime() > kIdleMessageTimeout)1747{1748const ProcessInfo &processInfo = mCurrentProcesses[0];1749double processTime = processInfo.process->getElapsedTimeSeconds();1750printf("Running %d tests in %d processes, longest for %d seconds.\n", totalTestCount,1751static_cast<int>(mCurrentProcesses.size()), static_cast<int>(processTime));1752messageTimer.start();1753}17541755// Early exit if we passed the maximum failure threshold. Still wait for current tests.1756if (mFailureCount > mMaxFailures && !mTestQueue.empty())1757{1758printf("Reached maximum failure count (%d), clearing test queue.\n", mMaxFailures);1759TestQueue emptyTestQueue;1760std::swap(mTestQueue, emptyTestQueue);1761}17621763// Sleep briefly and continue.1764angle::Sleep(100);1765}17661767// Dump combined results.1768if (mFailureCount > mMaxFailures)1769{1770printf(1771"Omitted results files because the failure count (%d) exceeded the maximum number of "1772"failures (%d).\n",1773mFailureCount, mMaxFailures);1774}1775else1776{1777WriteOutputFiles(false, mTestResults, mResultsFile, mHistogramWriter, mHistogramJsonFile,1778mTestSuiteName.c_str());1779}17801781totalRunTime.stop();1782printf("Tests completed in %lf seconds\n", totalRunTime.getElapsedTime());17831784return printFailuresAndReturnCount() == 0 ? 0 : 1;1785}17861787int TestSuite::printFailuresAndReturnCount() const1788{1789std::vector<std::string> failures;1790uint32_t skipCount = 0;17911792for (const auto &resultIter : mTestResults.results)1793{1794const TestIdentifier &id = resultIter.first;1795const TestResult &result = resultIter.second;17961797if (result.type == TestResultType::Skip)1798{1799skipCount++;1800}1801else if (result.type != TestResultType::Pass)1802{1803const FileLine &fileLine = mTestFileLines.find(id)->second;18041805std::stringstream failureMessage;1806failureMessage << id << " (" << fileLine.file << ":" << fileLine.line << ") ("1807<< ResultTypeToString(result.type) << ")";1808failures.emplace_back(failureMessage.str());1809}1810}18111812if (failures.empty())1813return 0;18141815printf("%zu test%s failed:\n", failures.size(), failures.size() > 1 ? "s" : "");1816for (const std::string &failure : failures)1817{1818printf(" %s\n", failure.c_str());1819}1820if (skipCount > 0)1821{1822printf("%u tests skipped.\n", skipCount);1823}18241825return static_cast<int>(failures.size());1826}18271828void TestSuite::startWatchdog()1829{1830auto watchdogMain = [this]() {1831do1832{1833{1834std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);1835if (mTestResults.currentTestTimer.getElapsedTime() >1836mTestResults.currentTestTimeout)1837{1838break;1839}18401841if (mTestResults.allDone)1842return;1843}18441845angle::Sleep(500);1846} while (true);1847onCrashOrTimeout(TestResultType::Timeout);1848::_Exit(EXIT_FAILURE);1849};1850mWatchdogThread = std::thread(watchdogMain);1851}18521853void TestSuite::addHistogramSample(const std::string &measurement,1854const std::string &story,1855double value,1856const std::string &units)1857{1858mHistogramWriter.addSample(measurement, story, value, units);1859}18601861std::string TestSuite::addTestArtifact(const std::string &artifactName)1862{1863mTestResults.testArtifactPaths.push_back(artifactName);18641865if (mTestArtifactDirectory.empty())1866{1867return artifactName;1868}18691870std::stringstream pathStream;1871pathStream << mTestArtifactDirectory << GetPathSeparator() << artifactName;1872return pathStream.str();1873}18741875bool GetTestResultsFromFile(const char *fileName, TestResults *resultsOut)1876{1877std::ifstream ifs(fileName);1878if (!ifs.is_open())1879{1880std::cerr << "Error opening " << fileName << "\n";1881return false;1882}18831884js::IStreamWrapper ifsWrapper(ifs);1885js::Document document;1886document.ParseStream(ifsWrapper);18871888if (document.HasParseError())1889{1890std::cerr << "Parse error reading JSON document: " << document.GetParseError() << "\n";1891return false;1892}18931894if (!GetTestResultsFromJSON(document, resultsOut))1895{1896std::cerr << "Error getting test results from JSON.\n";1897return false;1898}18991900return true;1901}19021903void TestSuite::dumpTestExpectationsErrorMessages()1904{1905std::stringstream errorMsgStream;1906for (const auto &message : mTestExpectationsParser.getErrorMessages())1907{1908errorMsgStream << std::endl << " " << message;1909}19101911std::cerr << "Failed to load test expectations." << errorMsgStream.str() << std::endl;1912}19131914bool TestSuite::loadTestExpectationsFromFileWithConfig(const GPUTestConfig &config,1915const std::string &fileName)1916{1917if (!mTestExpectationsParser.loadTestExpectationsFromFile(config, fileName))1918{1919dumpTestExpectationsErrorMessages();1920return false;1921}1922return true;1923}19241925bool TestSuite::loadAllTestExpectationsFromFile(const std::string &fileName)1926{1927if (!mTestExpectationsParser.loadAllTestExpectationsFromFile(fileName))1928{1929dumpTestExpectationsErrorMessages();1930return false;1931}1932return true;1933}19341935bool TestSuite::logAnyUnusedTestExpectations()1936{1937std::stringstream unusedMsgStream;1938bool anyUnused = false;1939for (const auto &message : mTestExpectationsParser.getUnusedExpectationsMessages())1940{1941anyUnused = true;1942unusedMsgStream << std::endl << " " << message;1943}1944if (anyUnused)1945{1946std::cerr << "Failed to validate test expectations." << unusedMsgStream.str() << std::endl;1947return true;1948}1949return false;1950}19511952int32_t TestSuite::getTestExpectation(const std::string &testName)1953{1954return mTestExpectationsParser.getTestExpectation(testName);1955}19561957void TestSuite::maybeUpdateTestTimeout(uint32_t testExpectation)1958{1959double testTimeout = (testExpectation == GPUTestExpectationsParser::kGpuTestTimeout)1960? getSlowTestTimeout()1961: mTestTimeout;1962std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);1963mTestResults.currentTestTimeout = testTimeout;1964}19651966int32_t TestSuite::getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig &config,1967const std::string &testName)1968{1969uint32_t expectation = mTestExpectationsParser.getTestExpectationWithConfig(config, testName);1970maybeUpdateTestTimeout(expectation);1971return expectation;1972}19731974int TestSuite::getSlowTestTimeout() const1975{1976return mTestTimeout * kSlowTestTimeoutScale;1977}19781979const char *TestResultTypeToString(TestResultType type)1980{1981switch (type)1982{1983case TestResultType::Crash:1984return "Crash";1985case TestResultType::Fail:1986return "Fail";1987case TestResultType::NoResult:1988return "NoResult";1989case TestResultType::Pass:1990return "Pass";1991case TestResultType::Skip:1992return "Skip";1993case TestResultType::Timeout:1994return "Timeout";1995case TestResultType::Unknown:1996return "Unknown";1997}1998}1999} // namespace angle200020012002