Drop dedicated benchmark lifecycle.

Instead, roll the output scraping into the main runner. Pass a perf flag to
the runner in order to control leak checking, apply tags via the macro and
appropriately disable logging. This may be removed in the future.

PiperOrigin-RevId: 384348035
This commit is contained in:
Adin Scannell 2021-07-12 16:58:30 -07:00 committed by gVisor bot
parent f51e0486d4
commit 275932bf08
1 changed files with 11 additions and 4 deletions

View File

@ -20,6 +20,7 @@
#include "benchmark/benchmark.h"
#include "test/util/logging.h"
extern bool FLAGS_gtest_list_tests;
extern bool FLAGS_benchmark_list_tests;
extern std::string FLAGS_benchmark_filter;
@ -40,12 +41,18 @@ void TestInit(int* argc, char*** argv) {
}
int RunAllTests() {
if (FLAGS_benchmark_list_tests || FLAGS_benchmark_filter != ".") {
benchmark::RunSpecifiedBenchmarks();
return 0;
} else {
if (::testing::FLAGS_gtest_list_tests) {
return RUN_ALL_TESTS();
}
if (FLAGS_benchmark_list_tests) {
benchmark::RunSpecifiedBenchmarks();
return 0;
}
// Run selected tests & benchmarks.
int rc = RUN_ALL_TESTS();
benchmark::RunSpecifiedBenchmarks();
return rc;
}
} // namespace testing