Commit f8e5b97e by Jamie Madill Committed by Commit Bot

Additional perf test cleanup & functionality.

Adds several new command line arguments and documents all args in the README. Changes the trace tests to output milliseconds. Bug: b/169600938 Change-Id: Ie8c7840a6a8958951992c806979dde6dc7d8a709 Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/2438194 Commit-Queue: Jamie Madill <jmadill@chromium.org> Reviewed-by: 's avatarCourtney Goeltzenleuchter <courtneygo@google.com> Reviewed-by: 's avatarCody Northrop <cnorthrop@google.com>
parent 34f66126
...@@ -36,11 +36,10 @@ using namespace angle; ...@@ -36,11 +36,10 @@ using namespace angle;
namespace namespace
{ {
constexpr size_t kInitialTraceEventBufferSize = 50000; constexpr size_t kInitialTraceEventBufferSize = 50000;
constexpr double kMilliSecondsPerSecond = 1e3;
constexpr double kMicroSecondsPerSecond = 1e6; constexpr double kMicroSecondsPerSecond = 1e6;
constexpr double kNanoSecondsPerSecond = 1e9; constexpr double kNanoSecondsPerSecond = 1e9;
constexpr double kCalibrationRunTimeSeconds = 1.0;
constexpr double kMaximumRunTimeSeconds = 10.0; constexpr double kMaximumRunTimeSeconds = 10.0;
constexpr uint32_t kNumTrials = 3;
struct TraceCategory struct TraceCategory
{ {
...@@ -174,7 +173,8 @@ TraceEvent::TraceEvent(char phaseIn, ...@@ -174,7 +173,8 @@ TraceEvent::TraceEvent(char phaseIn,
ANGLEPerfTest::ANGLEPerfTest(const std::string &name, ANGLEPerfTest::ANGLEPerfTest(const std::string &name,
const std::string &backend, const std::string &backend,
const std::string &story, const std::string &story,
unsigned int iterationsPerStep) unsigned int iterationsPerStep,
const char *units)
: mName(name), : mName(name),
mBackend(backend), mBackend(backend),
mStory(story), mStory(story),
...@@ -195,8 +195,8 @@ ANGLEPerfTest::ANGLEPerfTest(const std::string &name, ...@@ -195,8 +195,8 @@ ANGLEPerfTest::ANGLEPerfTest(const std::string &name,
mStory = mStory.substr(1); mStory = mStory.substr(1);
} }
mReporter = std::make_unique<perf_test::PerfResultReporter>(mName + mBackend, mStory); mReporter = std::make_unique<perf_test::PerfResultReporter>(mName + mBackend, mStory);
mReporter->RegisterImportantMetric(".wall_time", "ns"); mReporter->RegisterImportantMetric(".wall_time", units);
mReporter->RegisterImportantMetric(".gpu_time", "ns"); mReporter->RegisterImportantMetric(".gpu_time", units);
mReporter->RegisterFyiMetric(".steps", "count"); mReporter->RegisterFyiMetric(".steps", "count");
} }
...@@ -209,34 +209,46 @@ void ANGLEPerfTest::run() ...@@ -209,34 +209,46 @@ void ANGLEPerfTest::run()
return; return;
} }
// Calibrate to a fixed number of steps during an initial set time. uint32_t numTrials = OneFrame() ? 1 : gTestTrials;
if (mStepsToRun <= 0)
for (uint32_t trial = 0; trial < numTrials; ++trial)
{ {
calibrateStepsToRun(); doRunLoop(kMaximumRunTimeSeconds, mStepsToRun);
printResults();
if (gVerboseLogging)
{
double trialTime = mTimer.getElapsedTime();
printf("Trial %d time: %.2lf seconds.\n", trial + 1, trialTime);
double secondsPerStep = trialTime / static_cast<double>(mNumStepsPerformed);
double secondsPerIteration = secondsPerStep / static_cast<double>(mIterationsPerStep);
mTestTrialResults.push_back(secondsPerIteration * 1000.0);
}
} }
// Check again for early exit. if (gVerboseLogging)
if (mSkipTest)
{ {
return; double numResults = static_cast<double>(mTestTrialResults.size());
double mean = 0;
for (double trialResult : mTestTrialResults)
{
mean += trialResult;
} }
mean /= numResults;
// Do another warmup run. Seems to consistently improve results. double variance = 0;
if (gStepsToRunOverride != 1) for (double trialResult : mTestTrialResults)
{ {
doRunLoop(kMaximumRunTimeSeconds); double difference = trialResult - mean;
variance += difference * difference;
} }
variance /= numResults;
uint32_t numTrials = gStepsToRunOverride == 1 ? 1 : kNumTrials; double standardDeviation = std::sqrt(variance);
double coefficientOfVariation = standardDeviation / mean;
for (uint32_t trial = 0; trial < numTrials; ++trial) printf("Mean result time: %.4lf ms.\n", mean);
{ printf("Coefficient of variation: %.2lf%%\n", coefficientOfVariation * 100.0);
doRunLoop(kMaximumRunTimeSeconds);
printResults();
if (gVerboseLogging)
{
printf("Trial %d time: %.2lf seconds.\n", trial + 1, mTimer.getElapsedTime());
}
} }
} }
...@@ -246,7 +258,7 @@ void ANGLEPerfTest::setStepsPerRunLoopStep(int stepsPerRunLoop) ...@@ -246,7 +258,7 @@ void ANGLEPerfTest::setStepsPerRunLoopStep(int stepsPerRunLoop)
mStepsPerRunLoopStep = stepsPerRunLoop; mStepsPerRunLoopStep = stepsPerRunLoop;
} }
void ANGLEPerfTest::doRunLoop(double maxRunTime) void ANGLEPerfTest::doRunLoop(double maxRunTime, int maxStepsToRun)
{ {
mNumStepsPerformed = 0; mNumStepsPerformed = 0;
mRunning = true; mRunning = true;
...@@ -264,7 +276,7 @@ void ANGLEPerfTest::doRunLoop(double maxRunTime) ...@@ -264,7 +276,7 @@ void ANGLEPerfTest::doRunLoop(double maxRunTime)
{ {
mRunning = false; mRunning = false;
} }
else if (mNumStepsPerformed >= mStepsToRun) else if (mNumStepsPerformed >= maxStepsToRun)
{ {
mRunning = false; mRunning = false;
} }
...@@ -306,6 +318,7 @@ double ANGLEPerfTest::printResults() ...@@ -306,6 +318,7 @@ double ANGLEPerfTest::printResults()
// already registered. // already registered.
if (!mReporter->GetMetricInfo(clockNames[i], &metricInfo)) if (!mReporter->GetMetricInfo(clockNames[i], &metricInfo))
{ {
printf("Seconds per iteration: %lf\n", secondsPerIteration);
units = secondsPerIteration > 1e-3 ? "us" : "ns"; units = secondsPerIteration > 1e-3 ? "us" : "ns";
mReporter->RegisterImportantMetric(clockNames[i], units); mReporter->RegisterImportantMetric(clockNames[i], units);
} }
...@@ -314,7 +327,11 @@ double ANGLEPerfTest::printResults() ...@@ -314,7 +327,11 @@ double ANGLEPerfTest::printResults()
units = metricInfo.units; units = metricInfo.units;
} }
if (units == "us") if (units == "ms")
{
retValue = secondsPerIteration * kMilliSecondsPerSecond;
}
else if (units == "us")
{ {
retValue = secondsPerIteration * kMicroSecondsPerSecond; retValue = secondsPerIteration * kMicroSecondsPerSecond;
} }
...@@ -334,24 +351,12 @@ double ANGLEPerfTest::normalizedTime(size_t value) const ...@@ -334,24 +351,12 @@ double ANGLEPerfTest::normalizedTime(size_t value) const
void ANGLEPerfTest::calibrateStepsToRun() void ANGLEPerfTest::calibrateStepsToRun()
{ {
// First do two warmup loops. There's no science to this. Two loops was experimentally helpful doRunLoop(gTestTimeSeconds, std::numeric_limits<int>::max());
// on a Windows NVIDIA setup when testing with Vulkan and native trace tests.
for (int i = 0; i < 2; ++i)
{
doRunLoop(kCalibrationRunTimeSeconds);
if (gVerboseLogging)
{
printf("Pre-calibration warm-up took %.2lf seconds.\n", mTimer.getElapsedTime());
}
}
// Now the real computation.
doRunLoop(kCalibrationRunTimeSeconds);
double elapsedTime = mTimer.getElapsedTime(); double elapsedTime = mTimer.getElapsedTime();
// Scale steps down according to the time that exeeded one second. // Scale steps down according to the time that exeeded one second.
double scale = kCalibrationRunTimeSeconds / elapsedTime; double scale = gTestTimeSeconds / elapsedTime;
mStepsToRun = static_cast<unsigned int>(static_cast<double>(mNumStepsPerformed) * scale); mStepsToRun = static_cast<unsigned int>(static_cast<double>(mNumStepsPerformed) * scale);
if (gVerboseLogging) if (gVerboseLogging)
...@@ -430,11 +435,14 @@ std::string RenderTestParams::backendAndStory() const ...@@ -430,11 +435,14 @@ std::string RenderTestParams::backendAndStory() const
return backend() + story(); return backend() + story();
} }
ANGLERenderTest::ANGLERenderTest(const std::string &name, const RenderTestParams &testParams) ANGLERenderTest::ANGLERenderTest(const std::string &name,
const RenderTestParams &testParams,
const char *units)
: ANGLEPerfTest(name, : ANGLEPerfTest(name,
testParams.backend(), testParams.backend(),
testParams.story(), testParams.story(),
OneFrame() ? 1 : testParams.iterationsPerStep), OneFrame() ? 1 : testParams.iterationsPerStep,
units),
mTestParams(testParams), mTestParams(testParams),
mIsTimestampQueryAvailable(false), mIsTimestampQueryAvailable(false),
mGLWindow(nullptr), mGLWindow(nullptr),
...@@ -588,6 +596,8 @@ void ANGLERenderTest::SetUp() ...@@ -588,6 +596,8 @@ void ANGLERenderTest::SetUp()
// FAIL returns. // FAIL returns.
} }
mTestTrialResults.reserve(gTestTrials);
// Capture a screenshot if enabled. // Capture a screenshot if enabled.
if (gScreenShotDir != nullptr) if (gScreenShotDir != nullptr)
{ {
...@@ -598,6 +608,15 @@ void ANGLERenderTest::SetUp() ...@@ -598,6 +608,15 @@ void ANGLERenderTest::SetUp()
saveScreenshot(screenshotName); saveScreenshot(screenshotName);
} }
for (int loopIndex = 0; loopIndex < gWarmupLoops; ++loopIndex)
{
doRunLoop(gTestTimeSeconds, std::numeric_limits<int>::max());
if (gVerboseLogging)
{
printf("Warm-up loop took %.2lf seconds.\n", mTimer.getElapsedTime());
}
}
if (mStepsToRun <= 0) if (mStepsToRun <= 0)
{ {
calibrateStepsToRun(); calibrateStepsToRun();
......
...@@ -60,7 +60,8 @@ class ANGLEPerfTest : public testing::Test, angle::NonCopyable ...@@ -60,7 +60,8 @@ class ANGLEPerfTest : public testing::Test, angle::NonCopyable
ANGLEPerfTest(const std::string &name, ANGLEPerfTest(const std::string &name,
const std::string &backend, const std::string &backend,
const std::string &story, const std::string &story,
unsigned int iterationsPerStep); unsigned int iterationsPerStep,
const char *units = "ns");
~ANGLEPerfTest() override; ~ANGLEPerfTest() override;
virtual void step() = 0; virtual void step() = 0;
...@@ -86,7 +87,7 @@ class ANGLEPerfTest : public testing::Test, angle::NonCopyable ...@@ -86,7 +87,7 @@ class ANGLEPerfTest : public testing::Test, angle::NonCopyable
// Defaults to one step per run loop. Can be changed in any test. // Defaults to one step per run loop. Can be changed in any test.
void setStepsPerRunLoopStep(int stepsPerRunLoop); void setStepsPerRunLoopStep(int stepsPerRunLoop);
void doRunLoop(double maxRunTime); void doRunLoop(double maxRunTime, int maxStepsToRun);
// Overriden in trace perf tests. // Overriden in trace perf tests.
virtual void saveScreenshot(const std::string &screenshotName) {} virtual void saveScreenshot(const std::string &screenshotName) {}
...@@ -107,6 +108,7 @@ class ANGLEPerfTest : public testing::Test, angle::NonCopyable ...@@ -107,6 +108,7 @@ class ANGLEPerfTest : public testing::Test, angle::NonCopyable
int mStepsPerRunLoopStep; int mStepsPerRunLoopStep;
int mIterationsPerStep; int mIterationsPerStep;
bool mRunning; bool mRunning;
std::vector<double> mTestTrialResults;
}; };
enum class SurfaceType enum class SurfaceType
...@@ -133,7 +135,9 @@ struct RenderTestParams : public angle::PlatformParameters ...@@ -133,7 +135,9 @@ struct RenderTestParams : public angle::PlatformParameters
class ANGLERenderTest : public ANGLEPerfTest class ANGLERenderTest : public ANGLEPerfTest
{ {
public: public:
ANGLERenderTest(const std::string &name, const RenderTestParams &testParams); ANGLERenderTest(const std::string &name,
const RenderTestParams &testParams,
const char *units = "ns");
~ANGLERenderTest() override; ~ANGLERenderTest() override;
void addExtensionPrerequisite(const char *extensionName); void addExtensionPrerequisite(const char *extensionName);
......
...@@ -19,10 +19,26 @@ bool gEnableTrace = false; ...@@ -19,10 +19,26 @@ bool gEnableTrace = false;
const char *gTraceFile = "ANGLETrace.json"; const char *gTraceFile = "ANGLETrace.json";
const char *gScreenShotDir = nullptr; const char *gScreenShotDir = nullptr;
bool gVerboseLogging = false; bool gVerboseLogging = false;
double gTestTimeSeconds = 1.0;
int gTestTrials = 3;
// Default to three warmup loops. There's no science to this. More than two loops was experimentally
// helpful on a Windows NVIDIA setup when testing with Vulkan and native trace tests.
int gWarmupLoops = 3;
} // namespace angle } // namespace angle
namespace namespace
{ {
int ReadIntArgument(const char *arg)
{
std::stringstream strstr;
strstr << arg;
int value;
strstr >> value;
return value;
}
// The same as --screenshot-dir, but used by Chrome tests. // The same as --screenshot-dir, but used by Chrome tests.
constexpr char kRenderTestDirArg[] = "--render-test-output-dir="; constexpr char kRenderTestDirArg[] = "--render-test-output-dir=";
} // namespace } // namespace
...@@ -38,6 +54,7 @@ void ANGLEProcessPerfTestArgs(int *argc, char **argv) ...@@ -38,6 +54,7 @@ void ANGLEProcessPerfTestArgs(int *argc, char **argv)
if (strcmp("--one-frame-only", argv[argIndex]) == 0) if (strcmp("--one-frame-only", argv[argIndex]) == 0)
{ {
gStepsToRunOverride = 1; gStepsToRunOverride = 1;
gWarmupLoops = 0;
} }
else if (strcmp("--enable-trace", argv[argIndex]) == 0) else if (strcmp("--enable-trace", argv[argIndex]) == 0)
{ {
...@@ -55,11 +72,7 @@ void ANGLEProcessPerfTestArgs(int *argc, char **argv) ...@@ -55,11 +72,7 @@ void ANGLEProcessPerfTestArgs(int *argc, char **argv)
} }
else if (strcmp("--steps", argv[argIndex]) == 0 && argIndex < *argc - 1) else if (strcmp("--steps", argv[argIndex]) == 0 && argIndex < *argc - 1)
{ {
unsigned int stepsToRun = 0; gStepsToRunOverride = ReadIntArgument(argv[argIndex + 1]);
std::stringstream strstr;
strstr << argv[argIndex + 1];
strstr >> stepsToRun;
gStepsToRunOverride = stepsToRun;
// Skip an additional argument. // Skip an additional argument.
argIndex++; argIndex++;
} }
...@@ -72,10 +85,32 @@ void ANGLEProcessPerfTestArgs(int *argc, char **argv) ...@@ -72,10 +85,32 @@ void ANGLEProcessPerfTestArgs(int *argc, char **argv)
{ {
gVerboseLogging = true; gVerboseLogging = true;
} }
else if (strcmp("--warmup-loops", argv[argIndex]) == 0)
{
gWarmupLoops = ReadIntArgument(argv[argIndex + 1]);
// Skip an additional argument.
argIndex++;
}
else if (strcmp("--no-warmup", argv[argIndex]) == 0)
{
gWarmupLoops = 0;
}
else if (strncmp(kRenderTestDirArg, argv[argIndex], strlen(kRenderTestDirArg)) == 0) else if (strncmp(kRenderTestDirArg, argv[argIndex], strlen(kRenderTestDirArg)) == 0)
{ {
gScreenShotDir = argv[argIndex] + strlen(kRenderTestDirArg); gScreenShotDir = argv[argIndex] + strlen(kRenderTestDirArg);
} }
else if (strcmp("--test-time", argv[argIndex]) == 0)
{
gTestTimeSeconds = ReadIntArgument(argv[argIndex + 1]);
// Skip an additional argument.
argIndex++;
}
else if (strcmp("--trials", argv[argIndex]) == 0)
{
gTestTrials = ReadIntArgument(argv[argIndex + 1]);
// Skip an additional argument.
argIndex++;
}
else else
{ {
argv[argcOutCount++] = argv[argIndex]; argv[argcOutCount++] = argv[argIndex];
......
...@@ -20,6 +20,9 @@ extern bool gEnableTrace; ...@@ -20,6 +20,9 @@ extern bool gEnableTrace;
extern const char *gTraceFile; extern const char *gTraceFile;
extern const char *gScreenShotDir; extern const char *gScreenShotDir;
extern bool gVerboseLogging; extern bool gVerboseLogging;
extern int gWarmupLoops;
extern double gTestTimeSeconds;
extern int gTestTrials;
inline bool OneFrame() inline bool OneFrame()
{ {
......
...@@ -18,6 +18,29 @@ You can choose individual tests to run with `--gtest_filter=*TestName*`. To sele ...@@ -18,6 +18,29 @@ You can choose individual tests to run with `--gtest_filter=*TestName*`. To sele
ANGLE implements a no-op driver for OpenGL, D3D11 and Vulkan. To run on these configurations use the `gl_null`, `d3d11_null` or `vulkan_null` test configurations. These null drivers will not do any GPU work. They will skip the driver entirely. These null configs are useful for diagnosing performance overhead in ANGLE code. ANGLE implements a no-op driver for OpenGL, D3D11 and Vulkan. To run on these configurations use the `gl_null`, `d3d11_null` or `vulkan_null` test configurations. These null drivers will not do any GPU work. They will skip the driver entirely. These null configs are useful for diagnosing performance overhead in ANGLE code.
### Command-line Arguments
Several command-line arguments control how the tests run:
* `--one-frame-only`: Runs tests once and quickly exits. Used as a quick smoke test.
* `--enable-trace`: Write a JSON event log that can be loaded in Chrome.
* `--trace-file file`: Name of the JSON event log for `--enable-trace`.
* `--calibration`: Prints the number of steps a test runs in a fixed time. Used by `perf_test_runner.py`.
* `--steps x`: Fixed number of steps to run for each test.
* `--screenshot-dir dir`: Directory to store test screenshots. Only implemented in `TracePerfTest`.
* `--render-test-output-dir=dir`: Equivalent to `--screenshot-dir dir`.
* `--verbose-logging`: Print extra timing information.
* `--warmup-loops x`: Number of times to warm up the test before starting timing. Defaults to 3.
* `--no-warmup`: Skip warming up the tests. Equivalent to `--warmup-steps 0`.
* `--test-time`: Run each test trial in a fixed time. Defaults to 1 second.
* `--trials`: Number of times to repeat testing. Defaults to 3.
For example, for an endless run with no warmup, run:
`angle_perftests --gtest_filter=TracePerfTest.Run/vulkan_trex_200 --steps 1000000 --no-warmup`
The command line arguments implementations are located in [`ANGLEPerfTestArgs.cpp`](ANGLEPerfTestArgs.cpp).
## Test Breakdown ## Test Breakdown
* [`DrawCallPerfBenchmark`](DrawCallPerf.cpp): Runs a tight loop around DrawArarys calls. * [`DrawCallPerfBenchmark`](DrawCallPerf.cpp): Runs a tight loop around DrawArarys calls.
......
...@@ -120,7 +120,7 @@ angle::GenericProc KHRONOS_APIENTRY TraceLoadProc(const char *procName) ...@@ -120,7 +120,7 @@ angle::GenericProc KHRONOS_APIENTRY TraceLoadProc(const char *procName)
} }
TracePerfTest::TracePerfTest() TracePerfTest::TracePerfTest()
: ANGLERenderTest("TracePerf", GetParam()), mStartFrame(0), mEndFrame(0) : ANGLERenderTest("TracePerf", GetParam(), "ms"), mStartFrame(0), mEndFrame(0)
{ {
const TracePerfParams &param = GetParam(); const TracePerfParams &param = GetParam();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment