Commit c06a424f by Jamie Madill Committed by Commit Bot

Test Runner: Robustly handle unexpected crashes.

This adds the ability for the test runner to parse unexpected crashes from the stdout. It also processes the stdout to determine which tests failed. Tests that run after the crash are re-tried in a follow-up child process. Will allow for the test runner to handle very crashy test suites and also processes crashes from win-asan and other configs where it is harder to intercept crashes. Bug: angleproject:5251 Change-Id: Iee03130622571580cb7910f4fb097fe3659d75ec Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/2513288 Commit-Queue: Jamie Madill <jmadill@chromium.org> Reviewed-by: 's avatarYuly Novikov <ynovikov@chromium.org> Reviewed-by: 's avatarTim Van Patten <timvp@google.com>
parent 3fcf17e6
...@@ -28,8 +28,19 @@ class CompilerTestEnvironment : public testing::Environment ...@@ -28,8 +28,19 @@ class CompilerTestEnvironment : public testing::Environment
} }
}; };
// This variable is also defined in test_utils_unittest_helper.
bool gVerbose = false;
int main(int argc, char **argv) int main(int argc, char **argv)
{ {
for (int argIndex = 1; argIndex < argc; ++argIndex)
{
if (strcmp(argv[argIndex], "-v") == 0 || strcmp(argv[argIndex], "--verbose") == 0)
{
gVerbose = true;
}
}
angle::TestSuite testSuite(&argc, argv); angle::TestSuite testSuite(&argc, argv);
testing::AddGlobalTestEnvironment(new CompilerTestEnvironment()); testing::AddGlobalTestEnvironment(new CompilerTestEnvironment());
return testSuite.run(); return testSuite.run();
......
...@@ -25,6 +25,7 @@ following additional command-line arguments: ...@@ -25,6 +25,7 @@ following additional command-line arguments:
* `--shard-count` and `--shard-index` control the test sharding * `--shard-count` and `--shard-index` control the test sharding
* `--test-timeout` limits the amount of time spent in each test * `--test-timeout` limits the amount of time spent in each test
* `--flaky-retries` allows for tests to fail a fixed number of times and still pass * `--flaky-retries` allows for tests to fail a fixed number of times and still pass
* `--disable-crash-handler` forces off OS-level crash handling
`--isolated-script-test-output` and `--isolated-script-perf-test-output` mirror `--results-file` `--isolated-script-test-output` and `--isolated-script-perf-test-output` mirror `--results-file`
and `--histogram-json-file` respectively. and `--histogram-json-file` respectively.
......
...@@ -45,6 +45,12 @@ constexpr char kListTests[] = "--list-tests"; ...@@ -45,6 +45,12 @@ constexpr char kListTests[] = "--list-tests";
constexpr char kPrintTestStdout[] = "--print-test-stdout"; constexpr char kPrintTestStdout[] = "--print-test-stdout";
constexpr char kResultFileArg[] = "--results-file="; constexpr char kResultFileArg[] = "--results-file=";
constexpr char kTestTimeoutArg[] = "--test-timeout="; constexpr char kTestTimeoutArg[] = "--test-timeout=";
constexpr char kDisableCrashHandler[] = "--disable-crash-handler";
constexpr char kStartedTestString[] = "[ RUN ] ";
constexpr char kPassedTestString[] = "[ OK ] ";
constexpr char kFailedTestString[] = "[ FAILED ] ";
#if defined(NDEBUG) #if defined(NDEBUG)
constexpr int kDefaultTestTimeout = 20; constexpr int kDefaultTestTimeout = 20;
#else #else
...@@ -670,19 +676,19 @@ void PrintTestOutputSnippet(const TestIdentifier &id, ...@@ -670,19 +676,19 @@ void PrintTestOutputSnippet(const TestIdentifier &id,
nameStream << id; nameStream << id;
std::string fullName = nameStream.str(); std::string fullName = nameStream.str();
size_t runPos = fullOutput.find(std::string("[ RUN ] ") + fullName); size_t runPos = fullOutput.find(std::string(kStartedTestString) + fullName);
if (runPos == std::string::npos) if (runPos == std::string::npos)
{ {
printf("Cannot locate test output snippet.\n"); printf("Cannot locate test output snippet.\n");
return; return;
} }
size_t endPos = fullOutput.find(std::string("[ FAILED ] ") + fullName, runPos); size_t endPos = fullOutput.find(std::string(kFailedTestString) + fullName, runPos);
// Only clip the snippet to the "OK" message if the test really // Only clip the snippet to the "OK" message if the test really
// succeeded. It still might have e.g. crashed after printing it. // succeeded. It still might have e.g. crashed after printing it.
if (endPos == std::string::npos && result.type == TestResultType::Pass) if (endPos == std::string::npos && result.type == TestResultType::Pass)
{ {
endPos = fullOutput.find(std::string("[ OK ] ") + fullName, runPos); endPos = fullOutput.find(std::string(kPassedTestString) + fullName, runPos);
} }
if (endPos != std::string::npos) if (endPos != std::string::npos)
{ {
...@@ -873,6 +879,7 @@ TestSuite::TestSuite(int *argc, char **argv) ...@@ -873,6 +879,7 @@ TestSuite::TestSuite(int *argc, char **argv)
mGTestListTests(false), mGTestListTests(false),
mListTests(false), mListTests(false),
mPrintTestStdout(false), mPrintTestStdout(false),
mDisableCrashHandler(false),
mBatchSize(kDefaultBatchSize), mBatchSize(kDefaultBatchSize),
mCurrentResultCount(0), mCurrentResultCount(0),
mTotalResultCount(0), mTotalResultCount(0),
...@@ -889,10 +896,6 @@ TestSuite::TestSuite(int *argc, char **argv) ...@@ -889,10 +896,6 @@ TestSuite::TestSuite(int *argc, char **argv)
testing::GTEST_FLAG(catch_exceptions) = false; testing::GTEST_FLAG(catch_exceptions) = false;
#endif #endif
// Note that the crash callback must be owned and not use global constructors.
mCrashCallback = [this]() { onCrashOrTimeout(TestResultType::Crash); };
InitCrashHandler(&mCrashCallback);
if (*argc <= 0) if (*argc <= 0)
{ {
printf("Missing test arguments.\n"); printf("Missing test arguments.\n");
...@@ -927,6 +930,13 @@ TestSuite::TestSuite(int *argc, char **argv) ...@@ -927,6 +930,13 @@ TestSuite::TestSuite(int *argc, char **argv)
++argIndex; ++argIndex;
} }
if (!mDisableCrashHandler)
{
// Note that the crash callback must be owned and not use global constructors.
mCrashCallback = [this]() { onCrashOrTimeout(TestResultType::Crash); };
InitCrashHandler(&mCrashCallback);
}
std::string envShardIndex = angle::GetEnvironmentVar("GTEST_SHARD_INDEX"); std::string envShardIndex = angle::GetEnvironmentVar("GTEST_SHARD_INDEX");
if (!envShardIndex.empty()) if (!envShardIndex.empty())
{ {
...@@ -1121,7 +1131,8 @@ bool TestSuite::parseSingleArg(const char *argument) ...@@ -1121,7 +1131,8 @@ bool TestSuite::parseSingleArg(const char *argument)
ParseFlag("--debug-test-groups", argument, &mDebugTestGroups) || ParseFlag("--debug-test-groups", argument, &mDebugTestGroups) ||
ParseFlag(kGTestListTests, argument, &mGTestListTests) || ParseFlag(kGTestListTests, argument, &mGTestListTests) ||
ParseFlag(kListTests, argument, &mListTests) || ParseFlag(kListTests, argument, &mListTests) ||
ParseFlag(kPrintTestStdout, argument, &mPrintTestStdout)); ParseFlag(kPrintTestStdout, argument, &mPrintTestStdout) ||
ParseFlag(kDisableCrashHandler, argument, &mDisableCrashHandler));
} }
void TestSuite::onCrashOrTimeout(TestResultType crashOrTimeout) void TestSuite::onCrashOrTimeout(TestResultType crashOrTimeout)
...@@ -1202,6 +1213,11 @@ bool TestSuite::launchChildTestProcess(uint32_t batchId, ...@@ -1202,6 +1213,11 @@ bool TestSuite::launchChildTestProcess(uint32_t batchId,
args.push_back(arg.c_str()); args.push_back(arg.c_str());
} }
if (mDisableCrashHandler)
{
args.push_back(kDisableCrashHandler);
}
std::string timeoutStr; std::string timeoutStr;
if (mTestTimeout != kDefaultTestTimeout) if (mTestTimeout != kDefaultTestTimeout)
{ {
...@@ -1232,6 +1248,19 @@ bool TestSuite::launchChildTestProcess(uint32_t batchId, ...@@ -1232,6 +1248,19 @@ bool TestSuite::launchChildTestProcess(uint32_t batchId,
return true; return true;
} }
void ParseTestIdentifierAndSetResult(const std::string &testName,
TestResultType result,
TestResults *results)
{
// Trim off any whitespace + extra stuff at the end of the string.
std::string modifiedTestName = testName.substr(0, testName.find(' '));
modifiedTestName = modifiedTestName.substr(0, testName.find('\r'));
TestIdentifier id;
bool ok = TestIdentifier::ParseFromString(modifiedTestName, &id);
ASSERT(ok);
results->results[id] = {result};
}
bool TestSuite::finishProcess(ProcessInfo *processInfo) bool TestSuite::finishProcess(ProcessInfo *processInfo)
{ {
// Get test results and merge into master list. // Get test results and merge into master list.
...@@ -1239,8 +1268,42 @@ bool TestSuite::finishProcess(ProcessInfo *processInfo) ...@@ -1239,8 +1268,42 @@ bool TestSuite::finishProcess(ProcessInfo *processInfo)
if (!GetTestResultsFromFile(processInfo->resultsFileName.c_str(), &batchResults)) if (!GetTestResultsFromFile(processInfo->resultsFileName.c_str(), &batchResults))
{ {
std::cerr << "Error reading test results from child process.\n"; std::cerr << "Warning: could not find test results file from child process.\n";
return false;
// First assume all tests get skipped.
for (const TestIdentifier &id : processInfo->testsInBatch)
{
batchResults.results[id] = {TestResultType::NoResult};
}
// Attempt to reconstruct passing list from stdout snippets.
const std::string &batchStdout = processInfo->process->getStdout();
std::istringstream linesStream(batchStdout);
std::string line;
while (std::getline(linesStream, line))
{
size_t startPos = line.find(kStartedTestString);
size_t failPos = line.find(kFailedTestString);
size_t passPos = line.find(kPassedTestString);
if (startPos != std::string::npos)
{
// Assume a test that's started crashed until we see it completed.
std::string testName = line.substr(strlen(kStartedTestString));
ParseTestIdentifierAndSetResult(testName, TestResultType::Crash, &batchResults);
}
else if (failPos != std::string::npos)
{
std::string testName = line.substr(strlen(kFailedTestString));
ParseTestIdentifierAndSetResult(testName, TestResultType::Fail, &batchResults);
}
else if (passPos != std::string::npos)
{
std::string testName = line.substr(strlen(kPassedTestString));
ParseTestIdentifierAndSetResult(testName, TestResultType::Pass, &batchResults);
}
}
} }
if (!MergeTestResults(&batchResults, &mTestResults, mFlakyRetries)) if (!MergeTestResults(&batchResults, &mTestResults, mFlakyRetries))
......
...@@ -147,6 +147,7 @@ class TestSuite ...@@ -147,6 +147,7 @@ class TestSuite
bool mGTestListTests; bool mGTestListTests;
bool mListTests; bool mListTests;
bool mPrintTestStdout; bool mPrintTestStdout;
bool mDisableCrashHandler;
int mBatchSize; int mBatchSize;
int mCurrentResultCount; int mCurrentResultCount;
int mTotalResultCount; int mTotalResultCount;
......
...@@ -21,14 +21,15 @@ using namespace angle; ...@@ -21,14 +21,15 @@ using namespace angle;
namespace js = rapidjson; namespace js = rapidjson;
// This file is included in both angle_unittests and test_utils_unittest_helper. This variable is
// defined separately in each test target's main file.
extern bool gVerbose;
namespace namespace
{ {
constexpr char kTestHelperExecutable[] = "test_utils_unittest_helper"; constexpr char kTestHelperExecutable[] = "test_utils_unittest_helper";
constexpr int kFlakyRetries = 3; constexpr int kFlakyRetries = 3;
// Enable this for debugging.
constexpr bool kDebugOutput = false;
class TestSuiteTest : public testing::Test class TestSuiteTest : public testing::Test
{ {
protected: protected:
...@@ -40,7 +41,9 @@ class TestSuiteTest : public testing::Test ...@@ -40,7 +41,9 @@ class TestSuiteTest : public testing::Test
} }
} }
bool runTestSuite(const std::vector<std::string> &extraArgs, TestResults *actualResults) bool runTestSuite(const std::vector<std::string> &extraArgs,
TestResults *actualResults,
bool validateStderr)
{ {
std::string executablePath = GetExecutableDirectory(); std::string executablePath = GetExecutableDirectory();
EXPECT_NE(executablePath, ""); EXPECT_NE(executablePath, "");
...@@ -69,7 +72,7 @@ class TestSuiteTest : public testing::Test ...@@ -69,7 +72,7 @@ class TestSuiteTest : public testing::Test
args.push_back(arg.c_str()); args.push_back(arg.c_str());
} }
if (kDebugOutput) if (gVerbose)
{ {
printf("Test arguments:\n"); printf("Test arguments:\n");
for (const char *arg : args) for (const char *arg : args)
...@@ -83,9 +86,13 @@ class TestSuiteTest : public testing::Test ...@@ -83,9 +86,13 @@ class TestSuiteTest : public testing::Test
EXPECT_TRUE(process->started()); EXPECT_TRUE(process->started());
EXPECT_TRUE(process->finish()); EXPECT_TRUE(process->finish());
EXPECT_TRUE(process->finished()); EXPECT_TRUE(process->finished());
EXPECT_EQ(process->getStderr(), "");
if (kDebugOutput) if (validateStderr)
{
EXPECT_EQ(process->getStderr(), "");
}
if (gVerbose)
{ {
printf("stdout:\n%s\n", process->getStdout().c_str()); printf("stdout:\n%s\n", process->getStdout().c_str());
} }
...@@ -103,13 +110,12 @@ TEST_F(TestSuiteTest, RunMockTests) ...@@ -103,13 +110,12 @@ TEST_F(TestSuiteTest, RunMockTests)
std::vector<std::string> extraArgs = {"--gtest_filter=MockTestSuiteTest.DISABLED_*"}; std::vector<std::string> extraArgs = {"--gtest_filter=MockTestSuiteTest.DISABLED_*"};
TestResults actual; TestResults actual;
ASSERT_TRUE(runTestSuite(extraArgs, &actual)); ASSERT_TRUE(runTestSuite(extraArgs, &actual, true));
std::map<TestIdentifier, TestResult> expectedResults = { std::map<TestIdentifier, TestResult> expectedResults = {
{{"MockTestSuiteTest", "DISABLED_Pass"}, {TestResultType::Pass, 0.0}}, {{"MockTestSuiteTest", "DISABLED_Pass"}, {TestResultType::Pass, 0.0}},
{{"MockTestSuiteTest", "DISABLED_Fail"}, {TestResultType::Fail, 0.0}}, {{"MockTestSuiteTest", "DISABLED_Fail"}, {TestResultType::Fail, 0.0}},
{{"MockTestSuiteTest", "DISABLED_Timeout"}, {TestResultType::Timeout, 0.0}}, {{"MockTestSuiteTest", "DISABLED_Timeout"}, {TestResultType::Timeout, 0.0}},
// {{"MockTestSuiteTest", "DISABLED_Crash"}, {TestResultType::Crash, 0.0}},
}; };
EXPECT_EQ(expectedResults, actual.results); EXPECT_EQ(expectedResults, actual.results);
...@@ -122,7 +128,7 @@ TEST_F(TestSuiteTest, RunFlakyTests) ...@@ -122,7 +128,7 @@ TEST_F(TestSuiteTest, RunFlakyTests)
"--flaky-retries=" + std::to_string(kFlakyRetries)}; "--flaky-retries=" + std::to_string(kFlakyRetries)};
TestResults actual; TestResults actual;
ASSERT_TRUE(runTestSuite(extraArgs, &actual)); ASSERT_TRUE(runTestSuite(extraArgs, &actual, true));
std::map<TestIdentifier, TestResult> expectedResults = { std::map<TestIdentifier, TestResult> expectedResults = {
{{"MockFlakyTestSuiteTest", "DISABLED_Flaky"}, {{"MockFlakyTestSuiteTest", "DISABLED_Flaky"},
...@@ -131,6 +137,27 @@ TEST_F(TestSuiteTest, RunFlakyTests) ...@@ -131,6 +137,27 @@ TEST_F(TestSuiteTest, RunFlakyTests)
EXPECT_EQ(expectedResults, actual.results); EXPECT_EQ(expectedResults, actual.results);
} }
// Verifies that crashes are handled even without the crash handler.
TEST_F(TestSuiteTest, RunCrashingTests)
{
std::vector<std::string> extraArgs = {
"--gtest_filter=MockTestSuiteTest.DISABLED_Pass:MockTestSuiteTest.DISABLED_Fail:"
"MockCrashTestSuiteTest.DISABLED_*",
"--disable-crash-handler"};
TestResults actual;
ASSERT_TRUE(runTestSuite(extraArgs, &actual, false));
std::map<TestIdentifier, TestResult> expectedResults = {
{{"MockTestSuiteTest", "DISABLED_Pass"}, {TestResultType::Pass, 0.0}},
{{"MockTestSuiteTest", "DISABLED_Fail"}, {TestResultType::Fail, 0.0}},
{{"MockCrashTestSuiteTest", "DISABLED_Crash"}, {TestResultType::Crash, 0.0}},
{{"MockCrashTestSuiteTest", "DISABLED_PassAfterCrash"}, {TestResultType::Pass, 0.0}},
};
EXPECT_EQ(expectedResults, actual.results);
}
// Normal passing test. // Normal passing test.
TEST(MockTestSuiteTest, DISABLED_Pass) TEST(MockTestSuiteTest, DISABLED_Pass)
{ {
...@@ -187,8 +214,14 @@ TEST(MockFlakyTestSuiteTest, DISABLED_Flaky) ...@@ -187,8 +214,14 @@ TEST(MockFlakyTestSuiteTest, DISABLED_Flaky)
} }
// Trigger a test crash. // Trigger a test crash.
// TEST(MockTestSuiteTest, DISABLED_Crash) TEST(MockCrashTestSuiteTest, DISABLED_Crash)
// { {
// ANGLE_CRASH(); ANGLE_CRASH();
// } }
// This test runs after the crash test.
TEST(MockCrashTestSuiteTest, DISABLED_PassAfterCrash)
{
EXPECT_TRUE(true);
}
} // namespace } // namespace
...@@ -12,17 +12,27 @@ ...@@ -12,17 +12,27 @@
#include <string.h> #include <string.h>
// This variable is also defined in angle_unittest_main.
bool gVerbose = false;
int main(int argc, char **argv) int main(int argc, char **argv)
{ {
bool runTestSuite = false;
for (int argIndex = 1; argIndex < argc; ++argIndex) for (int argIndex = 1; argIndex < argc; ++argIndex)
{ {
if (strcmp(argv[argIndex], kRunTestSuite) == 0) if (strcmp(argv[argIndex], kRunTestSuite) == 0)
{ {
angle::TestSuite testSuite(&argc, argv); runTestSuite = true;
return testSuite.run();
} }
} }
if (runTestSuite)
{
angle::TestSuite testSuite(&argc, argv);
return testSuite.run();
}
if (argc != 3 || strcmp(argv[1], kRunAppTestArg1) != 0 || strcmp(argv[2], kRunAppTestArg2) != 0) if (argc != 3 || strcmp(argv[1], kRunAppTestArg1) != 0 || strcmp(argv[2], kRunAppTestArg2) != 0)
{ {
fprintf(stderr, "Expected command line:\n%s %s %s\n", argv[0], kRunAppTestArg1, fprintf(stderr, "Expected command line:\n%s %s %s\n", argv[0], kRunAppTestArg1,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment