mirror of
https://github.com/llvm-mirror/libcxx.git
synced 2025-10-23 10:07:41 +08:00

Summary: This patch does the following: 1. Checks in a copy of the Google Benchmark library into the libc++ repo under `utils/google-benchmark`. 2. Teaches libc++ how to build Google Benchmark against both (A) in-tree libc++ and (B) the platforms native STL. 3. Allows performance benchmarks to be built as part of the libc++ build. Building the benchmarks (and Google Benchmark) is off by default. It must be enabled using the CMake option `-DLIBCXX_INCLUDE_BENCHMARKS=ON`. When this option is enabled the tests under `libcxx/benchmarks` can be built using the `libcxx-benchmarks` target. On Linux platforms where libstdc++ is the default STL the CMake option `-DLIBCXX_BUILD_BENCHMARKS_NATIVE_STDLIB=ON` can be used to build each benchmark test against libstdc++ as well. This is useful for comparing performance between standard libraries. Support for benchmarks is currently very minimal. They must be manually run by the user and there is no mechanism for detecting performance regressions. Known Issues: * `-DLIBCXX_INCLUDE_BENCHMARKS=ON` is only supported for Clang, and not GCC, since the `-stdlib=libc++` option is needed to build Google Benchmark. Reviewers: danalbert, dberlin, chandlerc, mclow.lists, jroelofs Subscribers: chandlerc, dberlin, tberghammer, danalbert, srhines, hfinkel Differential Revision: https://reviews.llvm.org/D22240 git-svn-id: https://llvm.org/svn/llvm-project/libcxx/trunk@276049 91177308-0d34-0410-b5e6-96231b3b80d8
298 lines
10 KiB
C++
298 lines
10 KiB
C++
|
|
#undef NDEBUG
|
|
#include "benchmark/benchmark.h"
|
|
#include "../src/check.h" // NOTE: check.h is for internal use only!
|
|
#include "../src/re.h" // NOTE: re.h is for internal use only
|
|
#include <cassert>
|
|
#include <cstring>
|
|
#include <iostream>
|
|
#include <sstream>
|
|
#include <vector>
|
|
#include <utility>
|
|
#include <algorithm>
|
|
#include <cmath>
|
|
|
|
namespace {
|
|
|
|
// ========================================================================= //
|
|
// -------------------------- Testing Case --------------------------------- //
|
|
// ========================================================================= //
|
|
|
|
enum MatchRules {
|
|
MR_Default, // Skip non-matching lines until a match is found.
|
|
MR_Next // Match must occur on the next line.
|
|
};
|
|
|
|
struct TestCase {
|
|
std::string regex;
|
|
int match_rule;
|
|
|
|
TestCase(std::string re, int rule = MR_Default) : regex(re), match_rule(rule) {}
|
|
|
|
void Check(std::stringstream& remaining_output) const {
|
|
benchmark::Regex r;
|
|
std::string err_str;
|
|
r.Init(regex, &err_str);
|
|
CHECK(err_str.empty()) << "Could not construct regex \"" << regex << "\""
|
|
<< " got Error: " << err_str;
|
|
|
|
std::string line;
|
|
while (remaining_output.eof() == false) {
|
|
CHECK(remaining_output.good());
|
|
std::getline(remaining_output, line);
|
|
if (r.Match(line)) return;
|
|
CHECK(match_rule != MR_Next) << "Expected line \"" << line
|
|
<< "\" to match regex \"" << regex << "\"";
|
|
}
|
|
|
|
CHECK(remaining_output.eof() == false)
|
|
<< "End of output reached before match for regex \"" << regex
|
|
<< "\" was found";
|
|
}
|
|
};
|
|
|
|
std::vector<TestCase> ConsoleOutputTests;
|
|
std::vector<TestCase> JSONOutputTests;
|
|
std::vector<TestCase> CSVOutputTests;
|
|
|
|
// ========================================================================= //
|
|
// -------------------------- Test Helpers --------------------------------- //
|
|
// ========================================================================= //
|
|
|
|
class TestReporter : public benchmark::BenchmarkReporter {
|
|
public:
|
|
TestReporter(std::vector<benchmark::BenchmarkReporter*> reps)
|
|
: reporters_(reps) {}
|
|
|
|
virtual bool ReportContext(const Context& context) {
|
|
bool last_ret = false;
|
|
bool first = true;
|
|
for (auto rep : reporters_) {
|
|
bool new_ret = rep->ReportContext(context);
|
|
CHECK(first || new_ret == last_ret)
|
|
<< "Reports return different values for ReportContext";
|
|
first = false;
|
|
last_ret = new_ret;
|
|
}
|
|
return last_ret;
|
|
}
|
|
|
|
virtual void ReportRuns(const std::vector<Run>& report) {
|
|
for (auto rep : reporters_)
|
|
rep->ReportRuns(report);
|
|
}
|
|
|
|
virtual void Finalize() {
|
|
for (auto rep : reporters_)
|
|
rep->Finalize();
|
|
}
|
|
|
|
private:
|
|
std::vector<benchmark::BenchmarkReporter*> reporters_;
|
|
};
|
|
|
|
|
|
#define CONCAT2(x, y) x##y
|
|
#define CONCAT(x, y) CONCAT2(x, y)
|
|
|
|
#define ADD_CASES(...) \
|
|
int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__)
|
|
|
|
int AddCases(std::vector<TestCase>* out, std::initializer_list<TestCase> const& v) {
|
|
for (auto const& TC : v)
|
|
out->push_back(TC);
|
|
return 0;
|
|
}
|
|
|
|
template <class First>
|
|
std::string join(First f) { return f; }
|
|
|
|
template <class First, class ...Args>
|
|
std::string join(First f, Args&&... args) {
|
|
return std::string(std::move(f)) + "[ ]+" + join(std::forward<Args>(args)...);
|
|
}
|
|
|
|
std::string dec_re = "[0-9]+\\.[0-9]+";
|
|
|
|
#define ADD_COMPLEXITY_CASES(...) \
|
|
int CONCAT(dummy, __LINE__) = AddComplexityTest(__VA_ARGS__)
|
|
|
|
int AddComplexityTest(std::vector<TestCase>* console_out, std::vector<TestCase>* json_out,
|
|
std::vector<TestCase>* csv_out, std::string big_o_test_name,
|
|
std::string rms_test_name, std::string big_o) {
|
|
std::string big_o_str = dec_re + " " + big_o;
|
|
AddCases(console_out, {
|
|
{join("^" + big_o_test_name + "", big_o_str, big_o_str) + "[ ]*$"},
|
|
{join("^" + rms_test_name + "", "[0-9]+ %", "[0-9]+ %") + "[ ]*$"}
|
|
});
|
|
AddCases(json_out, {
|
|
{"\"name\": \"" + big_o_test_name + "\",$"},
|
|
{"\"cpu_coefficient\": [0-9]+,$", MR_Next},
|
|
{"\"real_coefficient\": [0-9]{1,5},$", MR_Next},
|
|
{"\"big_o\": \"" + big_o + "\",$", MR_Next},
|
|
{"\"time_unit\": \"ns\"$", MR_Next},
|
|
{"}", MR_Next},
|
|
{"\"name\": \"" + rms_test_name + "\",$"},
|
|
{"\"rms\": [0-9]+%$", MR_Next},
|
|
{"}", MR_Next}
|
|
});
|
|
AddCases(csv_out, {
|
|
{"^\"" + big_o_test_name + "\",," + dec_re + "," + dec_re + "," + big_o + ",,,,,$"},
|
|
{"^\"" + rms_test_name + "\",," + dec_re + "," + dec_re + ",,,,,,$"}
|
|
});
|
|
return 0;
|
|
}
|
|
|
|
} // end namespace
|
|
|
|
// ========================================================================= //
|
|
// --------------------------- Testing BigO O(1) --------------------------- //
|
|
// ========================================================================= //
|
|
|
|
void BM_Complexity_O1(benchmark::State& state) {
|
|
while (state.KeepRunning()) {
|
|
}
|
|
state.SetComplexityN(state.range_x());
|
|
}
|
|
BENCHMARK(BM_Complexity_O1) -> Range(1, 1<<18) -> Complexity(benchmark::o1);
|
|
BENCHMARK(BM_Complexity_O1) -> Range(1, 1<<18) -> Complexity([](int){return 1.0; });
|
|
BENCHMARK(BM_Complexity_O1) -> Range(1, 1<<18) -> Complexity();
|
|
|
|
const char* big_o_1_test_name = "BM_Complexity_O1_BigO";
|
|
const char* rms_o_1_test_name = "BM_Complexity_O1_RMS";
|
|
const char* enum_auto_big_o_1 = "\\([0-9]+\\)";
|
|
const char* lambda_big_o_1 = "f\\(N\\)";
|
|
|
|
// Add enum tests
|
|
ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests,
|
|
big_o_1_test_name, rms_o_1_test_name, enum_auto_big_o_1);
|
|
|
|
// Add lambda tests
|
|
ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests,
|
|
big_o_1_test_name, rms_o_1_test_name, lambda_big_o_1);
|
|
|
|
// ========================================================================= //
|
|
// --------------------------- Testing BigO O(N) --------------------------- //
|
|
// ========================================================================= //
|
|
|
|
std::vector<int> ConstructRandomVector(int size) {
|
|
std::vector<int> v;
|
|
v.reserve(size);
|
|
for (int i = 0; i < size; ++i) {
|
|
v.push_back(rand() % size);
|
|
}
|
|
return v;
|
|
}
|
|
|
|
void BM_Complexity_O_N(benchmark::State& state) {
|
|
auto v = ConstructRandomVector(state.range_x());
|
|
const int item_not_in_vector = state.range_x()*2; // Test worst case scenario (item not in vector)
|
|
while (state.KeepRunning()) {
|
|
benchmark::DoNotOptimize(std::find(v.begin(), v.end(), item_not_in_vector));
|
|
}
|
|
state.SetComplexityN(state.range_x());
|
|
}
|
|
BENCHMARK(BM_Complexity_O_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity(benchmark::oN);
|
|
BENCHMARK(BM_Complexity_O_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity([](int n) -> double{return n; });
|
|
BENCHMARK(BM_Complexity_O_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity();
|
|
|
|
const char* big_o_n_test_name = "BM_Complexity_O_N_BigO";
|
|
const char* rms_o_n_test_name = "BM_Complexity_O_N_RMS";
|
|
const char* enum_auto_big_o_n = "N";
|
|
const char* lambda_big_o_n = "f\\(N\\)";
|
|
|
|
// Add enum tests
|
|
ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests,
|
|
big_o_n_test_name, rms_o_n_test_name, enum_auto_big_o_n);
|
|
|
|
// Add lambda tests
|
|
ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests,
|
|
big_o_n_test_name, rms_o_n_test_name, lambda_big_o_n);
|
|
|
|
// ========================================================================= //
|
|
// ------------------------- Testing BigO O(N*lgN) ------------------------- //
|
|
// ========================================================================= //
|
|
|
|
static void BM_Complexity_O_N_log_N(benchmark::State& state) {
|
|
auto v = ConstructRandomVector(state.range_x());
|
|
while (state.KeepRunning()) {
|
|
std::sort(v.begin(), v.end());
|
|
}
|
|
state.SetComplexityN(state.range_x());
|
|
}
|
|
BENCHMARK(BM_Complexity_O_N_log_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity(benchmark::oNLogN);
|
|
BENCHMARK(BM_Complexity_O_N_log_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity([](int n) {return n * std::log2(n); });
|
|
BENCHMARK(BM_Complexity_O_N_log_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity();
|
|
|
|
const char* big_o_n_lg_n_test_name = "BM_Complexity_O_N_log_N_BigO";
|
|
const char* rms_o_n_lg_n_test_name = "BM_Complexity_O_N_log_N_RMS";
|
|
const char* enum_auto_big_o_n_lg_n = "NlgN";
|
|
const char* lambda_big_o_n_lg_n = "f\\(N\\)";
|
|
|
|
// Add enum tests
|
|
ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests,
|
|
big_o_n_lg_n_test_name, rms_o_n_lg_n_test_name, enum_auto_big_o_n_lg_n);
|
|
|
|
// Add lambda tests
|
|
ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests,
|
|
big_o_n_lg_n_test_name, rms_o_n_lg_n_test_name, lambda_big_o_n_lg_n);
|
|
|
|
|
|
// ========================================================================= //
|
|
// --------------------------- TEST CASES END ------------------------------ //
|
|
// ========================================================================= //
|
|
|
|
|
|
int main(int argc, char* argv[]) {
|
|
// Add --color_print=false to argv since we don't want to match color codes.
|
|
char new_arg[64];
|
|
char* new_argv[64];
|
|
std::copy(argv, argv + argc, new_argv);
|
|
new_argv[argc++] = std::strcpy(new_arg, "--color_print=false");
|
|
benchmark::Initialize(&argc, new_argv);
|
|
|
|
benchmark::ConsoleReporter CR;
|
|
benchmark::JSONReporter JR;
|
|
benchmark::CSVReporter CSVR;
|
|
struct ReporterTest {
|
|
const char* name;
|
|
std::vector<TestCase>& output_cases;
|
|
benchmark::BenchmarkReporter& reporter;
|
|
std::stringstream out_stream;
|
|
std::stringstream err_stream;
|
|
|
|
ReporterTest(const char* n,
|
|
std::vector<TestCase>& out_tc,
|
|
benchmark::BenchmarkReporter& br)
|
|
: name(n), output_cases(out_tc), reporter(br) {
|
|
reporter.SetOutputStream(&out_stream);
|
|
reporter.SetErrorStream(&err_stream);
|
|
}
|
|
} TestCases[] = {
|
|
{"ConsoleReporter", ConsoleOutputTests, CR},
|
|
{"JSONReporter", JSONOutputTests, JR},
|
|
{"CSVReporter", CSVOutputTests, CSVR}
|
|
};
|
|
|
|
// Create the test reporter and run the benchmarks.
|
|
std::cout << "Running benchmarks...\n";
|
|
TestReporter test_rep({&CR, &JR, &CSVR});
|
|
benchmark::RunSpecifiedBenchmarks(&test_rep);
|
|
|
|
for (auto& rep_test : TestCases) {
|
|
std::string msg = std::string("\nTesting ") + rep_test.name + " Output\n";
|
|
std::string banner(msg.size() - 1, '-');
|
|
std::cout << banner << msg << banner << "\n";
|
|
|
|
std::cerr << rep_test.err_stream.str();
|
|
std::cout << rep_test.out_stream.str();
|
|
|
|
for (const auto& TC : rep_test.output_cases)
|
|
TC.Check(rep_test.out_stream);
|
|
|
|
std::cout << "\n";
|
|
}
|
|
return 0;
|
|
}
|
|
|