summaryrefslogtreecommitdiffstats
path: root/lib/libcxx/utils/google-benchmark/src/benchmark.cc
diff options
context:
space:
mode:
Diffstat (limited to 'lib/libcxx/utils/google-benchmark/src/benchmark.cc')
-rw-r--r--lib/libcxx/utils/google-benchmark/src/benchmark.cc264
1 files changed, 107 insertions, 157 deletions
diff --git a/lib/libcxx/utils/google-benchmark/src/benchmark.cc b/lib/libcxx/utils/google-benchmark/src/benchmark.cc
index 00ffa07ff56..b14bc629143 100644
--- a/lib/libcxx/utils/google-benchmark/src/benchmark.cc
+++ b/lib/libcxx/utils/google-benchmark/src/benchmark.cc
@@ -17,7 +17,9 @@
#include "internal_macros.h"
#ifndef BENCHMARK_OS_WINDOWS
+#ifndef BENCHMARK_OS_FUCHSIA
#include <sys/resource.h>
+#endif
#include <sys/time.h>
#include <unistd.h>
#endif
@@ -27,10 +29,10 @@
#include <condition_variable>
#include <cstdio>
#include <cstdlib>
-#include <cstring>
#include <fstream>
#include <iostream>
#include <memory>
+#include <string>
#include <thread>
#include "check.h"
@@ -38,13 +40,14 @@
#include "commandlineflags.h"
#include "complexity.h"
#include "counter.h"
+#include "internal_macros.h"
#include "log.h"
#include "mutex.h"
#include "re.h"
-#include "stat.h"
+#include "statistics.h"
#include "string_util.h"
-#include "sysinfo.h"
-#include "timers.h"
+#include "thread_manager.h"
+#include "thread_timer.h"
DEFINE_bool(benchmark_list_tests, false,
"Print a list of benchmarks. This option overrides all other "
@@ -82,7 +85,7 @@ DEFINE_string(benchmark_out_format, "json",
"The format to use for file output. Valid values are "
"'console', 'json', or 'csv'.");
-DEFINE_string(benchmark_out, "", "The file to write additonal output to");
+DEFINE_string(benchmark_out, "", "The file to write additional output to");
DEFINE_string(benchmark_color, "auto",
"Whether to use colors in the output. Valid values: "
@@ -91,134 +94,28 @@ DEFINE_string(benchmark_color, "auto",
"environment variable is set to a terminal type that supports "
"colors.");
+DEFINE_bool(benchmark_counters_tabular, false,
+ "Whether to use tabular format when printing user counters to "
+ "the console. Valid values: 'true'/'yes'/1, 'false'/'no'/0."
+ "Defaults to false.");
+
DEFINE_int32(v, 0, "The level of verbose logging to output");
namespace benchmark {
-namespace internal {
-
-void UseCharPointer(char const volatile*) {}
-
-} // end namespace internal
namespace {
-
static const size_t kMaxIterations = 1000000000;
-
} // end namespace
namespace internal {
-class ThreadManager {
- public:
- ThreadManager(int num_threads)
- : alive_threads_(num_threads), start_stop_barrier_(num_threads) {}
-
- Mutex& GetBenchmarkMutex() const RETURN_CAPABILITY(benchmark_mutex_) {
- return benchmark_mutex_;
- }
-
- bool StartStopBarrier() EXCLUDES(end_cond_mutex_) {
- return start_stop_barrier_.wait();
- }
-
- void NotifyThreadComplete() EXCLUDES(end_cond_mutex_) {
- start_stop_barrier_.removeThread();
- if (--alive_threads_ == 0) {
- MutexLock lock(end_cond_mutex_);
- end_condition_.notify_all();
- }
- }
-
- void WaitForAllThreads() EXCLUDES(end_cond_mutex_) {
- MutexLock lock(end_cond_mutex_);
- end_condition_.wait(lock.native_handle(),
- [this]() { return alive_threads_ == 0; });
- }
-
- public:
- struct Result {
- double real_time_used = 0;
- double cpu_time_used = 0;
- double manual_time_used = 0;
- int64_t bytes_processed = 0;
- int64_t items_processed = 0;
- int complexity_n = 0;
- std::string report_label_;
- std::string error_message_;
- bool has_error_ = false;
- UserCounters counters;
- };
- GUARDED_BY(GetBenchmarkMutex()) Result results;
-
- private:
- mutable Mutex benchmark_mutex_;
- std::atomic<int> alive_threads_;
- Barrier start_stop_barrier_;
- Mutex end_cond_mutex_;
- Condition end_condition_;
-};
-
-// Timer management class
-class ThreadTimer {
- public:
- ThreadTimer() = default;
-
- // Called by each thread
- void StartTimer() {
- running_ = true;
- start_real_time_ = ChronoClockNow();
- start_cpu_time_ = ThreadCPUUsage();
- }
-
- // Called by each thread
- void StopTimer() {
- CHECK(running_);
- running_ = false;
- real_time_used_ += ChronoClockNow() - start_real_time_;
- cpu_time_used_ += ThreadCPUUsage() - start_cpu_time_;
- }
-
- // Called by each thread
- void SetIterationTime(double seconds) { manual_time_used_ += seconds; }
-
- bool running() const { return running_; }
-
- // REQUIRES: timer is not running
- double real_time_used() {
- CHECK(!running_);
- return real_time_used_;
- }
-
- // REQUIRES: timer is not running
- double cpu_time_used() {
- CHECK(!running_);
- return cpu_time_used_;
- }
-
- // REQUIRES: timer is not running
- double manual_time_used() {
- CHECK(!running_);
- return manual_time_used_;
- }
-
- private:
- bool running_ = false; // Is the timer running
- double start_real_time_ = 0; // If running_
- double start_cpu_time_ = 0; // If running_
-
- // Accumulated time so far (does not contain current slice if running_)
- double real_time_used_ = 0;
- double cpu_time_used_ = 0;
- // Manually set iteration time. User sets this with SetIterationTime(seconds).
- double manual_time_used_ = 0;
-};
+void UseCharPointer(char const volatile*) {}
namespace {
BenchmarkReporter::Run CreateRunReport(
const benchmark::internal::Benchmark::Instance& b,
- const internal::ThreadManager::Result& results, size_t iters,
- double seconds) {
+ const internal::ThreadManager::Result& results, double seconds) {
// Create report about this benchmark run.
BenchmarkReporter::Run report;
@@ -226,8 +123,8 @@ BenchmarkReporter::Run CreateRunReport(
report.error_occurred = results.has_error_;
report.error_message = results.error_message_;
report.report_label = results.report_label_;
- // Report the total iterations across all threads.
- report.iterations = static_cast<int64_t>(iters) * b.threads;
+ // This is the total iterations across all threads.
+ report.iterations = results.iterations;
report.time_unit = b.time_unit;
if (!report.error_occurred) {
@@ -251,7 +148,9 @@ BenchmarkReporter::Run CreateRunReport(
report.complexity_n = results.complexity_n;
report.complexity = b.complexity;
report.complexity_lambda = b.complexity_lambda;
+ report.statistics = b.statistics;
report.counters = results.counters;
+ internal::Finish(&report.counters, results.iterations, seconds, b.threads);
}
return report;
}
@@ -264,11 +163,12 @@ void RunInThread(const benchmark::internal::Benchmark::Instance* b,
internal::ThreadTimer timer;
State st(iters, b->arg, thread_id, b->threads, &timer, manager);
b->benchmark->Run(st);
- CHECK(st.iterations() == st.max_iterations)
+ CHECK(st.iterations() >= st.max_iterations)
<< "Benchmark returned before State::KeepRunning() returned false!";
{
MutexLock l(manager->GetBenchmarkMutex());
internal::ThreadManager::Result& results = manager->results;
+ results.iterations += st.iterations();
results.cpu_time_used += timer.cpu_time_used();
results.real_time_used += timer.real_time_used();
results.manual_time_used += timer.manual_time_used();
@@ -333,21 +233,23 @@ std::vector<BenchmarkReporter::Run> RunBenchmark(
const double min_time =
!IsZero(b.min_time) ? b.min_time : FLAGS_benchmark_min_time;
+ // clang-format off
+ // turn off clang-format since it mangles prettiness here
// Determine if this run should be reported; Either it has
// run for a sufficient amount of time or because an error was reported.
const bool should_report = repetition_num > 0
- || has_explicit_iteration_count // An exact iteration count was requested
+ || has_explicit_iteration_count // An exact iteration count was requested
|| results.has_error_
- || iters >= kMaxIterations
- || seconds >= min_time // the elapsed time is large enough
+ || iters >= kMaxIterations // No chance to try again, we hit the limit.
+ || seconds >= min_time // the elapsed time is large enough
// CPU time is specified but the elapsed real time greatly exceeds the
// minimum time. Note that user provided timers are except from this
// sanity check.
|| ((results.real_time_used >= 5 * min_time) && !b.use_manual_time);
+ // clang-format on
if (should_report) {
- BenchmarkReporter::Run report =
- CreateRunReport(b, results, iters, seconds);
+ BenchmarkReporter::Run report = CreateRunReport(b, results, seconds);
if (!report.error_occurred && b.complexity != oNone)
complexity_reports->push_back(report);
reports.push_back(report);
@@ -390,25 +292,50 @@ std::vector<BenchmarkReporter::Run> RunBenchmark(
} // namespace
} // namespace internal
-State::State(size_t max_iters, const std::vector<int>& ranges, int thread_i,
+State::State(size_t max_iters, const std::vector<int64_t>& ranges, int thread_i,
int n_threads, internal::ThreadTimer* timer,
internal::ThreadManager* manager)
- : started_(false),
+ : total_iterations_(0),
+ batch_leftover_(0),
+ max_iterations(max_iters),
+ started_(false),
finished_(false),
- total_iterations_(0),
+ error_occurred_(false),
range_(ranges),
bytes_processed_(0),
items_processed_(0),
complexity_n_(0),
- error_occurred_(false),
counters(),
thread_index(thread_i),
threads(n_threads),
- max_iterations(max_iters),
timer_(timer),
manager_(manager) {
CHECK(max_iterations != 0) << "At least one iteration must be run";
CHECK_LT(thread_index, threads) << "thread_index must be less than threads";
+
+ // Note: The use of offsetof below is technically undefined until C++17
+ // because State is not a standard layout type. However, all compilers
+ // currently provide well-defined behavior as an extension (which is
+ // demonstrated since constexpr evaluation must diagnose all undefined
+ // behavior). However, GCC and Clang also warn about this use of offsetof,
+ // which must be suppressed.
+#if defined(__INTEL_COMPILER)
+#pragma warning push
+#pragma warning(disable:1875)
+#elif defined(__GNUC__)
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Winvalid-offsetof"
+#endif
+ // Offset tests to ensure commonly accessed data is on the first cache line.
+ const int cache_line_size = 64;
+ static_assert(offsetof(State, error_occurred_) <=
+ (cache_line_size - sizeof(error_occurred_)),
+ "");
+#if defined(__INTEL_COMPILER)
+#pragma warning pop
+#elif defined(__GNUC__)
+#pragma GCC diagnostic pop
+#endif
}
void State::PauseTiming() {
@@ -432,7 +359,7 @@ void State::SkipWithError(const char* msg) {
manager_->results.has_error_ = true;
}
}
- total_iterations_ = max_iterations;
+ total_iterations_ = 0;
if (timer_->running()) timer_->StopTimer();
}
@@ -448,6 +375,7 @@ void State::SetLabel(const char* label) {
void State::StartKeepRunning() {
CHECK(!started_ && !finished_);
started_ = true;
+ total_iterations_ = error_occurred_ ? 0 : max_iterations;
manager_->StartStopBarrier();
if (!error_occurred_) ResumeTiming();
}
@@ -457,8 +385,8 @@ void State::FinishKeepRunning() {
if (!error_occurred_) {
PauseTiming();
}
- // Total iterations now is one greater than max iterations. Fix this.
- total_iterations_ = max_iterations;
+ // Total iterations has now wrapped around past 0. Fix this.
+ total_iterations_ = 0;
finished_ = true;
manager_->StartStopBarrier();
}
@@ -467,30 +395,30 @@ namespace internal {
namespace {
void RunBenchmarks(const std::vector<Benchmark::Instance>& benchmarks,
- BenchmarkReporter* console_reporter,
- BenchmarkReporter* file_reporter) {
+ BenchmarkReporter* console_reporter,
+ BenchmarkReporter* file_reporter) {
// Note the file_reporter can be null.
CHECK(console_reporter != nullptr);
// Determine the width of the name field using a minimum width of 10.
bool has_repetitions = FLAGS_benchmark_repetitions > 1;
size_t name_field_width = 10;
+ size_t stat_field_width = 0;
for (const Benchmark::Instance& benchmark : benchmarks) {
name_field_width =
std::max<size_t>(name_field_width, benchmark.name.size());
has_repetitions |= benchmark.repetitions > 1;
+
+ for (const auto& Stat : *benchmark.statistics)
+ stat_field_width = std::max<size_t>(stat_field_width, Stat.name_.size());
}
- if (has_repetitions) name_field_width += std::strlen("_stddev");
+ if (has_repetitions) name_field_width += 1 + stat_field_width;
// Print header here
BenchmarkReporter::Context context;
- context.num_cpus = NumCPUs();
- context.mhz_per_cpu = CyclesPerSecond() / 1000000.0f;
-
- context.cpu_scaling_enabled = CpuScalingEnabled();
context.name_field_width = name_field_width;
- // Keep track of runing times of all instances of current benchmark
+ // Keep track of running times of all instances of current benchmark
std::vector<BenchmarkReporter::Run> complexity_reports;
// We flush streams after invoking reporter methods that write to them. This
@@ -521,10 +449,10 @@ void RunBenchmarks(const std::vector<Benchmark::Instance>& benchmarks,
}
std::unique_ptr<BenchmarkReporter> CreateReporter(
- std::string const& name, ConsoleReporter::OutputOptions allow_color) {
+ std::string const& name, ConsoleReporter::OutputOptions output_opts) {
typedef std::unique_ptr<BenchmarkReporter> PtrType;
if (name == "console") {
- return PtrType(new ConsoleReporter(allow_color));
+ return PtrType(new ConsoleReporter(output_opts));
} else if (name == "json") {
return PtrType(new JSONReporter);
} else if (name == "csv") {
@@ -536,6 +464,30 @@ std::unique_ptr<BenchmarkReporter> CreateReporter(
}
} // end namespace
+
+bool IsZero(double n) {
+ return std::abs(n) < std::numeric_limits<double>::epsilon();
+}
+
+ConsoleReporter::OutputOptions GetOutputOptions(bool force_no_color) {
+ int output_opts = ConsoleReporter::OO_Defaults;
+ if ((FLAGS_benchmark_color == "auto" && IsColorTerminal()) ||
+ IsTruthyFlagValue(FLAGS_benchmark_color)) {
+ output_opts |= ConsoleReporter::OO_Color;
+ } else {
+ output_opts &= ~ConsoleReporter::OO_Color;
+ }
+ if (force_no_color) {
+ output_opts &= ~ConsoleReporter::OO_Color;
+ }
+ if (FLAGS_benchmark_counters_tabular) {
+ output_opts |= ConsoleReporter::OO_Tabular;
+ } else {
+ output_opts &= ~ConsoleReporter::OO_Tabular;
+ }
+ return static_cast<ConsoleReporter::OutputOptions>(output_opts);
+}
+
} // end namespace internal
size_t RunSpecifiedBenchmarks() {
@@ -557,29 +509,21 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter,
std::unique_ptr<BenchmarkReporter> default_console_reporter;
std::unique_ptr<BenchmarkReporter> default_file_reporter;
if (!console_reporter) {
- auto output_opts = ConsoleReporter::OO_None;
- if (FLAGS_benchmark_color == "auto")
- output_opts = IsColorTerminal() ? ConsoleReporter::OO_Color
- : ConsoleReporter::OO_None;
- else
- output_opts = IsTruthyFlagValue(FLAGS_benchmark_color)
- ? ConsoleReporter::OO_Color
- : ConsoleReporter::OO_None;
- default_console_reporter =
- internal::CreateReporter(FLAGS_benchmark_format, output_opts);
+ default_console_reporter = internal::CreateReporter(
+ FLAGS_benchmark_format, internal::GetOutputOptions());
console_reporter = default_console_reporter.get();
}
auto& Out = console_reporter->GetOutputStream();
auto& Err = console_reporter->GetErrorStream();
std::string const& fname = FLAGS_benchmark_out;
- if (fname == "" && file_reporter) {
+ if (fname.empty() && file_reporter) {
Err << "A custom file reporter was provided but "
"--benchmark_out=<file> was not specified."
<< std::endl;
std::exit(1);
}
- if (fname != "") {
+ if (!fname.empty()) {
output_file.open(fname);
if (!output_file.is_open()) {
Err << "invalid file name: '" << fname << std::endl;
@@ -625,12 +569,15 @@ void PrintUsageAndExit() {
" [--benchmark_out=<filename>]\n"
" [--benchmark_out_format=<json|console|csv>]\n"
" [--benchmark_color={auto|true|false}]\n"
+ " [--benchmark_counters_tabular={true|false}]\n"
" [--v=<verbosity>]\n");
exit(0);
}
void ParseCommandLineFlags(int* argc, char** argv) {
using namespace benchmark;
+ BenchmarkReporter::Context::executable_name =
+ (argc && *argc > 0) ? argv[0] : "unknown";
for (int i = 1; i < *argc; ++i) {
if (ParseBoolFlag(argv[i], "benchmark_list_tests",
&FLAGS_benchmark_list_tests) ||
@@ -649,6 +596,8 @@ void ParseCommandLineFlags(int* argc, char** argv) {
// "color_print" is the deprecated name for "benchmark_color".
// TODO: Remove this.
ParseStringFlag(argv[i], "color_print", &FLAGS_benchmark_color) ||
+ ParseBoolFlag(argv[i], "benchmark_counters_tabular",
+ &FLAGS_benchmark_counters_tabular) ||
ParseInt32Flag(argv[i], "v", &FLAGS_v)) {
for (int j = i; j != *argc - 1; ++j) argv[j] = argv[j + 1];
@@ -682,7 +631,8 @@ void Initialize(int* argc, char** argv) {
bool ReportUnrecognizedArguments(int argc, char** argv) {
for (int i = 1; i < argc; ++i) {
- fprintf(stderr, "%s: error: unrecognized command-line flag: %s\n", argv[0], argv[i]);
+ fprintf(stderr, "%s: error: unrecognized command-line flag: %s\n", argv[0],
+ argv[i]);
}
return argc > 1;
}