summaryrefslogtreecommitdiffstats
path: root/lib/libcxx/utils/google-benchmark/test
diff options
context:
space:
mode:
authorpatrick <patrick@openbsd.org>2021-01-11 15:31:56 +0000
committerpatrick <patrick@openbsd.org>2021-01-11 15:31:56 +0000
commit16ff81ed8b1ed9aa06fb1a731a2446b66cc49bef (patch)
tree1a7dd8152b94f6f8cd318bfaf85aa40882854583 /lib/libcxx/utils/google-benchmark/test
parentsync (diff)
downloadwireguard-openbsd-16ff81ed8b1ed9aa06fb1a731a2446b66cc49bef.tar.xz
wireguard-openbsd-16ff81ed8b1ed9aa06fb1a731a2446b66cc49bef.zip
Remove libc++ and libc++abi 8.0.0 now that we switched to version 10.0.1
in the gnu/ directory.
Diffstat (limited to 'lib/libcxx/utils/google-benchmark/test')
-rw-r--r--lib/libcxx/utils/google-benchmark/test/AssemblyTests.cmake46
-rw-r--r--lib/libcxx/utils/google-benchmark/test/CMakeLists.txt260
-rw-r--r--lib/libcxx/utils/google-benchmark/test/basic_test.cc136
-rw-r--r--lib/libcxx/utils/google-benchmark/test/benchmark_gtest.cc33
-rw-r--r--lib/libcxx/utils/google-benchmark/test/benchmark_test.cc245
-rw-r--r--lib/libcxx/utils/google-benchmark/test/clobber_memory_assembly_test.cc64
-rw-r--r--lib/libcxx/utils/google-benchmark/test/complexity_test.cc183
-rw-r--r--lib/libcxx/utils/google-benchmark/test/cxx03_test.cc63
-rw-r--r--lib/libcxx/utils/google-benchmark/test/diagnostics_test.cc80
-rw-r--r--lib/libcxx/utils/google-benchmark/test/display_aggregates_only_test.cc43
-rw-r--r--lib/libcxx/utils/google-benchmark/test/donotoptimize_assembly_test.cc163
-rw-r--r--lib/libcxx/utils/google-benchmark/test/donotoptimize_test.cc52
-rw-r--r--lib/libcxx/utils/google-benchmark/test/filter_test.cc104
-rw-r--r--lib/libcxx/utils/google-benchmark/test/fixture_test.cc49
-rw-r--r--lib/libcxx/utils/google-benchmark/test/link_main_test.cc8
-rw-r--r--lib/libcxx/utils/google-benchmark/test/map_test.cc57
-rw-r--r--lib/libcxx/utils/google-benchmark/test/memory_manager_test.cc42
-rw-r--r--lib/libcxx/utils/google-benchmark/test/multiple_ranges_test.cc97
-rw-r--r--lib/libcxx/utils/google-benchmark/test/options_test.cc65
-rw-r--r--lib/libcxx/utils/google-benchmark/test/output_test.h213
-rw-r--r--lib/libcxx/utils/google-benchmark/test/output_test_helper.cc505
-rw-r--r--lib/libcxx/utils/google-benchmark/test/register_benchmark_test.cc184
-rw-r--r--lib/libcxx/utils/google-benchmark/test/report_aggregates_only_test.cc39
-rw-r--r--lib/libcxx/utils/google-benchmark/test/reporter_output_test.cc604
-rw-r--r--lib/libcxx/utils/google-benchmark/test/skip_with_error_test.cc189
-rw-r--r--lib/libcxx/utils/google-benchmark/test/state_assembly_test.cc68
-rw-r--r--lib/libcxx/utils/google-benchmark/test/statistics_gtest.cc28
-rw-r--r--lib/libcxx/utils/google-benchmark/test/string_util_gtest.cc146
-rw-r--r--lib/libcxx/utils/google-benchmark/test/templated_fixture_test.cc28
-rw-r--r--lib/libcxx/utils/google-benchmark/test/user_counters_tabular_test.cc268
-rw-r--r--lib/libcxx/utils/google-benchmark/test/user_counters_test.cc408
-rw-r--r--lib/libcxx/utils/google-benchmark/test/user_counters_thousands_test.cc161
32 files changed, 0 insertions, 4631 deletions
diff --git a/lib/libcxx/utils/google-benchmark/test/AssemblyTests.cmake b/lib/libcxx/utils/google-benchmark/test/AssemblyTests.cmake
deleted file mode 100644
index 3d078586f1d..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/AssemblyTests.cmake
+++ /dev/null
@@ -1,46 +0,0 @@
-
-include(split_list)
-
-set(ASM_TEST_FLAGS "")
-check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
-if (BENCHMARK_HAS_O3_FLAG)
- list(APPEND ASM_TEST_FLAGS -O3)
-endif()
-
-check_cxx_compiler_flag(-g0 BENCHMARK_HAS_G0_FLAG)
-if (BENCHMARK_HAS_G0_FLAG)
- list(APPEND ASM_TEST_FLAGS -g0)
-endif()
-
-check_cxx_compiler_flag(-fno-stack-protector BENCHMARK_HAS_FNO_STACK_PROTECTOR_FLAG)
-if (BENCHMARK_HAS_FNO_STACK_PROTECTOR_FLAG)
- list(APPEND ASM_TEST_FLAGS -fno-stack-protector)
-endif()
-
-split_list(ASM_TEST_FLAGS)
-string(TOUPPER "${CMAKE_CXX_COMPILER_ID}" ASM_TEST_COMPILER)
-
-macro(add_filecheck_test name)
- cmake_parse_arguments(ARG "" "" "CHECK_PREFIXES" ${ARGV})
- add_library(${name} OBJECT ${name}.cc)
- set_target_properties(${name} PROPERTIES COMPILE_FLAGS "-S ${ASM_TEST_FLAGS}")
- set(ASM_OUTPUT_FILE "${CMAKE_CURRENT_BINARY_DIR}/${name}.s")
- add_custom_target(copy_${name} ALL
- COMMAND ${PROJECT_SOURCE_DIR}/tools/strip_asm.py
- $<TARGET_OBJECTS:${name}>
- ${ASM_OUTPUT_FILE}
- BYPRODUCTS ${ASM_OUTPUT_FILE})
- add_dependencies(copy_${name} ${name})
- if (NOT ARG_CHECK_PREFIXES)
- set(ARG_CHECK_PREFIXES "CHECK")
- endif()
- foreach(prefix ${ARG_CHECK_PREFIXES})
- add_test(NAME run_${name}_${prefix}
- COMMAND
- ${LLVM_FILECHECK_EXE} ${name}.cc
- --input-file=${ASM_OUTPUT_FILE}
- --check-prefixes=CHECK,CHECK-${ASM_TEST_COMPILER}
- WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
- endforeach()
-endmacro()
-
diff --git a/lib/libcxx/utils/google-benchmark/test/CMakeLists.txt b/lib/libcxx/utils/google-benchmark/test/CMakeLists.txt
deleted file mode 100644
index f15ce208189..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/CMakeLists.txt
+++ /dev/null
@@ -1,260 +0,0 @@
-# Enable the tests
-
-find_package(Threads REQUIRED)
-include(CheckCXXCompilerFlag)
-
-# NOTE: Some tests use `<cassert>` to perform the test. Therefore we must
-# strip -DNDEBUG from the default CMake flags in DEBUG mode.
-string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE)
-if( NOT uppercase_CMAKE_BUILD_TYPE STREQUAL "DEBUG" )
- add_definitions( -UNDEBUG )
- add_definitions(-DTEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS)
- # Also remove /D NDEBUG to avoid MSVC warnings about conflicting defines.
- foreach (flags_var_to_scrub
- CMAKE_CXX_FLAGS_RELEASE
- CMAKE_CXX_FLAGS_RELWITHDEBINFO
- CMAKE_CXX_FLAGS_MINSIZEREL
- CMAKE_C_FLAGS_RELEASE
- CMAKE_C_FLAGS_RELWITHDEBINFO
- CMAKE_C_FLAGS_MINSIZEREL)
- string (REGEX REPLACE "(^| )[/-]D *NDEBUG($| )" " "
- "${flags_var_to_scrub}" "${${flags_var_to_scrub}}")
- endforeach()
-endif()
-
-check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
-set(BENCHMARK_O3_FLAG "")
-if (BENCHMARK_HAS_O3_FLAG)
- set(BENCHMARK_O3_FLAG "-O3")
-endif()
-
-# NOTE: These flags must be added after find_package(Threads REQUIRED) otherwise
-# they will break the configuration check.
-if (DEFINED BENCHMARK_CXX_LINKER_FLAGS)
- list(APPEND CMAKE_EXE_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS})
-endif()
-
-add_library(output_test_helper STATIC output_test_helper.cc output_test.h)
-
-macro(compile_benchmark_test name)
- add_executable(${name} "${name}.cc")
- target_link_libraries(${name} benchmark ${CMAKE_THREAD_LIBS_INIT})
-endmacro(compile_benchmark_test)
-
-macro(compile_benchmark_test_with_main name)
- add_executable(${name} "${name}.cc")
- target_link_libraries(${name} benchmark_main)
-endmacro(compile_benchmark_test_with_main)
-
-macro(compile_output_test name)
- add_executable(${name} "${name}.cc" output_test.h)
- target_link_libraries(${name} output_test_helper benchmark
- ${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
-endmacro(compile_output_test)
-
-# Demonstration executable
-compile_benchmark_test(benchmark_test)
-add_test(benchmark benchmark_test --benchmark_min_time=0.01)
-
-compile_benchmark_test(filter_test)
-macro(add_filter_test name filter expect)
- add_test(${name} filter_test --benchmark_min_time=0.01 --benchmark_filter=${filter} ${expect})
- add_test(${name}_list_only filter_test --benchmark_list_tests --benchmark_filter=${filter} ${expect})
-endmacro(add_filter_test)
-
-add_filter_test(filter_simple "Foo" 3)
-add_filter_test(filter_simple_negative "-Foo" 2)
-add_filter_test(filter_suffix "BM_.*" 4)
-add_filter_test(filter_suffix_negative "-BM_.*" 1)
-add_filter_test(filter_regex_all ".*" 5)
-add_filter_test(filter_regex_all_negative "-.*" 0)
-add_filter_test(filter_regex_blank "" 5)
-add_filter_test(filter_regex_blank_negative "-" 0)
-add_filter_test(filter_regex_none "monkey" 0)
-add_filter_test(filter_regex_none_negative "-monkey" 5)
-add_filter_test(filter_regex_wildcard ".*Foo.*" 3)
-add_filter_test(filter_regex_wildcard_negative "-.*Foo.*" 2)
-add_filter_test(filter_regex_begin "^BM_.*" 4)
-add_filter_test(filter_regex_begin_negative "-^BM_.*" 1)
-add_filter_test(filter_regex_begin2 "^N" 1)
-add_filter_test(filter_regex_begin2_negative "-^N" 4)
-add_filter_test(filter_regex_end ".*Ba$" 1)
-add_filter_test(filter_regex_end_negative "-.*Ba$" 4)
-
-compile_benchmark_test(options_test)
-add_test(options_benchmarks options_test --benchmark_min_time=0.01)
-
-compile_benchmark_test(basic_test)
-add_test(basic_benchmark basic_test --benchmark_min_time=0.01)
-
-compile_benchmark_test(diagnostics_test)
-add_test(diagnostics_test diagnostics_test --benchmark_min_time=0.01)
-
-compile_benchmark_test(skip_with_error_test)
-add_test(skip_with_error_test skip_with_error_test --benchmark_min_time=0.01)
-
-compile_benchmark_test(donotoptimize_test)
-# Some of the issues with DoNotOptimize only occur when optimization is enabled
-check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
-if (BENCHMARK_HAS_O3_FLAG)
- set_target_properties(donotoptimize_test PROPERTIES COMPILE_FLAGS "-O3")
-endif()
-add_test(donotoptimize_test donotoptimize_test --benchmark_min_time=0.01)
-
-compile_benchmark_test(fixture_test)
-add_test(fixture_test fixture_test --benchmark_min_time=0.01)
-
-compile_benchmark_test(register_benchmark_test)
-add_test(register_benchmark_test register_benchmark_test --benchmark_min_time=0.01)
-
-compile_benchmark_test(map_test)
-add_test(map_test map_test --benchmark_min_time=0.01)
-
-compile_benchmark_test(multiple_ranges_test)
-add_test(multiple_ranges_test multiple_ranges_test --benchmark_min_time=0.01)
-
-compile_benchmark_test_with_main(link_main_test)
-add_test(link_main_test link_main_test --benchmark_min_time=0.01)
-
-compile_output_test(reporter_output_test)
-add_test(reporter_output_test reporter_output_test --benchmark_min_time=0.01)
-
-compile_output_test(templated_fixture_test)
-add_test(templated_fixture_test templated_fixture_test --benchmark_min_time=0.01)
-
-compile_output_test(user_counters_test)
-add_test(user_counters_test user_counters_test --benchmark_min_time=0.01)
-
-compile_output_test(report_aggregates_only_test)
-add_test(report_aggregates_only_test report_aggregates_only_test --benchmark_min_time=0.01)
-
-compile_output_test(display_aggregates_only_test)
-add_test(display_aggregates_only_test display_aggregates_only_test --benchmark_min_time=0.01)
-
-compile_output_test(user_counters_tabular_test)
-add_test(user_counters_tabular_test user_counters_tabular_test --benchmark_counters_tabular=true --benchmark_min_time=0.01)
-
-compile_output_test(user_counters_thousands_test)
-add_test(user_counters_thousands_test user_counters_thousands_test --benchmark_min_time=0.01)
-
-compile_output_test(memory_manager_test)
-add_test(memory_manager_test memory_manager_test --benchmark_min_time=0.01)
-
-check_cxx_compiler_flag(-std=c++03 BENCHMARK_HAS_CXX03_FLAG)
-if (BENCHMARK_HAS_CXX03_FLAG)
- compile_benchmark_test(cxx03_test)
- set_target_properties(cxx03_test
- PROPERTIES
- COMPILE_FLAGS "-std=c++03")
- # libstdc++ provides different definitions within <map> between dialects. When
- # LTO is enabled and -Werror is specified GCC diagnoses this ODR violation
- # causing the test to fail to compile. To prevent this we explicitly disable
- # the warning.
- check_cxx_compiler_flag(-Wno-odr BENCHMARK_HAS_WNO_ODR)
- if (BENCHMARK_ENABLE_LTO AND BENCHMARK_HAS_WNO_ODR)
- set_target_properties(cxx03_test
- PROPERTIES
- LINK_FLAGS "-Wno-odr")
- endif()
- add_test(cxx03 cxx03_test --benchmark_min_time=0.01)
-endif()
-
-# Attempt to work around flaky test failures when running on Appveyor servers.
-if (DEFINED ENV{APPVEYOR})
- set(COMPLEXITY_MIN_TIME "0.5")
-else()
- set(COMPLEXITY_MIN_TIME "0.01")
-endif()
-compile_output_test(complexity_test)
-add_test(complexity_benchmark complexity_test --benchmark_min_time=${COMPLEXITY_MIN_TIME})
-
-###############################################################################
-# GoogleTest Unit Tests
-###############################################################################
-
-if (BENCHMARK_ENABLE_GTEST_TESTS)
- macro(compile_gtest name)
- add_executable(${name} "${name}.cc")
- if (TARGET googletest)
- add_dependencies(${name} googletest)
- endif()
- if (GTEST_INCLUDE_DIRS)
- target_include_directories(${name} PRIVATE ${GTEST_INCLUDE_DIRS})
- endif()
- target_link_libraries(${name} benchmark
- ${GTEST_BOTH_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
- endmacro(compile_gtest)
-
- macro(add_gtest name)
- compile_gtest(${name})
- add_test(${name} ${name})
- endmacro()
-
- add_gtest(benchmark_gtest)
- add_gtest(statistics_gtest)
- add_gtest(string_util_gtest)
-endif(BENCHMARK_ENABLE_GTEST_TESTS)
-
-###############################################################################
-# Assembly Unit Tests
-###############################################################################
-
-if (BENCHMARK_ENABLE_ASSEMBLY_TESTS)
- if (NOT LLVM_FILECHECK_EXE)
- message(FATAL_ERROR "LLVM FileCheck is required when including this file")
- endif()
- include(AssemblyTests.cmake)
- add_filecheck_test(donotoptimize_assembly_test)
- add_filecheck_test(state_assembly_test)
- add_filecheck_test(clobber_memory_assembly_test)
-endif()
-
-
-
-###############################################################################
-# Code Coverage Configuration
-###############################################################################
-
-# Add the coverage command(s)
-if(CMAKE_BUILD_TYPE)
- string(TOLOWER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_LOWER)
-endif()
-if (${CMAKE_BUILD_TYPE_LOWER} MATCHES "coverage")
- find_program(GCOV gcov)
- find_program(LCOV lcov)
- find_program(GENHTML genhtml)
- find_program(CTEST ctest)
- if (GCOV AND LCOV AND GENHTML AND CTEST AND HAVE_CXX_FLAG_COVERAGE)
- add_custom_command(
- OUTPUT ${CMAKE_BINARY_DIR}/lcov/index.html
- COMMAND ${LCOV} -q -z -d .
- COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o before.lcov -i
- COMMAND ${CTEST} --force-new-ctest-process
- COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o after.lcov
- COMMAND ${LCOV} -q -a before.lcov -a after.lcov --output-file final.lcov
- COMMAND ${LCOV} -q -r final.lcov "'${CMAKE_SOURCE_DIR}/test/*'" -o final.lcov
- COMMAND ${GENHTML} final.lcov -o lcov --demangle-cpp --sort -p "${CMAKE_BINARY_DIR}" -t benchmark
- DEPENDS filter_test benchmark_test options_test basic_test fixture_test cxx03_test complexity_test
- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
- COMMENT "Running LCOV"
- )
- add_custom_target(coverage
- DEPENDS ${CMAKE_BINARY_DIR}/lcov/index.html
- COMMENT "LCOV report at lcov/index.html"
- )
- message(STATUS "Coverage command added")
- else()
- if (HAVE_CXX_FLAG_COVERAGE)
- set(CXX_FLAG_COVERAGE_MESSAGE supported)
- else()
- set(CXX_FLAG_COVERAGE_MESSAGE unavailable)
- endif()
- message(WARNING
- "Coverage not available:\n"
- " gcov: ${GCOV}\n"
- " lcov: ${LCOV}\n"
- " genhtml: ${GENHTML}\n"
- " ctest: ${CTEST}\n"
- " --coverage flag: ${CXX_FLAG_COVERAGE_MESSAGE}")
- endif()
-endif()
diff --git a/lib/libcxx/utils/google-benchmark/test/basic_test.cc b/lib/libcxx/utils/google-benchmark/test/basic_test.cc
deleted file mode 100644
index d07fbc00b15..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/basic_test.cc
+++ /dev/null
@@ -1,136 +0,0 @@
-
-#include "benchmark/benchmark.h"
-
-#define BASIC_BENCHMARK_TEST(x) BENCHMARK(x)->Arg(8)->Arg(512)->Arg(8192)
-
-void BM_empty(benchmark::State& state) {
- for (auto _ : state) {
- benchmark::DoNotOptimize(state.iterations());
- }
-}
-BENCHMARK(BM_empty);
-BENCHMARK(BM_empty)->ThreadPerCpu();
-
-void BM_spin_empty(benchmark::State& state) {
- for (auto _ : state) {
- for (int x = 0; x < state.range(0); ++x) {
- benchmark::DoNotOptimize(x);
- }
- }
-}
-BASIC_BENCHMARK_TEST(BM_spin_empty);
-BASIC_BENCHMARK_TEST(BM_spin_empty)->ThreadPerCpu();
-
-void BM_spin_pause_before(benchmark::State& state) {
- for (int i = 0; i < state.range(0); ++i) {
- benchmark::DoNotOptimize(i);
- }
- for (auto _ : state) {
- for (int i = 0; i < state.range(0); ++i) {
- benchmark::DoNotOptimize(i);
- }
- }
-}
-BASIC_BENCHMARK_TEST(BM_spin_pause_before);
-BASIC_BENCHMARK_TEST(BM_spin_pause_before)->ThreadPerCpu();
-
-void BM_spin_pause_during(benchmark::State& state) {
- for (auto _ : state) {
- state.PauseTiming();
- for (int i = 0; i < state.range(0); ++i) {
- benchmark::DoNotOptimize(i);
- }
- state.ResumeTiming();
- for (int i = 0; i < state.range(0); ++i) {
- benchmark::DoNotOptimize(i);
- }
- }
-}
-BASIC_BENCHMARK_TEST(BM_spin_pause_during);
-BASIC_BENCHMARK_TEST(BM_spin_pause_during)->ThreadPerCpu();
-
-void BM_pause_during(benchmark::State& state) {
- for (auto _ : state) {
- state.PauseTiming();
- state.ResumeTiming();
- }
-}
-BENCHMARK(BM_pause_during);
-BENCHMARK(BM_pause_during)->ThreadPerCpu();
-BENCHMARK(BM_pause_during)->UseRealTime();
-BENCHMARK(BM_pause_during)->UseRealTime()->ThreadPerCpu();
-
-void BM_spin_pause_after(benchmark::State& state) {
- for (auto _ : state) {
- for (int i = 0; i < state.range(0); ++i) {
- benchmark::DoNotOptimize(i);
- }
- }
- for (int i = 0; i < state.range(0); ++i) {
- benchmark::DoNotOptimize(i);
- }
-}
-BASIC_BENCHMARK_TEST(BM_spin_pause_after);
-BASIC_BENCHMARK_TEST(BM_spin_pause_after)->ThreadPerCpu();
-
-void BM_spin_pause_before_and_after(benchmark::State& state) {
- for (int i = 0; i < state.range(0); ++i) {
- benchmark::DoNotOptimize(i);
- }
- for (auto _ : state) {
- for (int i = 0; i < state.range(0); ++i) {
- benchmark::DoNotOptimize(i);
- }
- }
- for (int i = 0; i < state.range(0); ++i) {
- benchmark::DoNotOptimize(i);
- }
-}
-BASIC_BENCHMARK_TEST(BM_spin_pause_before_and_after);
-BASIC_BENCHMARK_TEST(BM_spin_pause_before_and_after)->ThreadPerCpu();
-
-void BM_empty_stop_start(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_empty_stop_start);
-BENCHMARK(BM_empty_stop_start)->ThreadPerCpu();
-
-
-void BM_KeepRunning(benchmark::State& state) {
- size_t iter_count = 0;
- assert(iter_count == state.iterations());
- while (state.KeepRunning()) {
- ++iter_count;
- }
- assert(iter_count == state.iterations());
-}
-BENCHMARK(BM_KeepRunning);
-
-void BM_KeepRunningBatch(benchmark::State& state) {
- // Choose a prime batch size to avoid evenly dividing max_iterations.
- const size_t batch_size = 101;
- size_t iter_count = 0;
- while (state.KeepRunningBatch(batch_size)) {
- iter_count += batch_size;
- }
- assert(state.iterations() == iter_count);
-}
-BENCHMARK(BM_KeepRunningBatch);
-
-void BM_RangedFor(benchmark::State& state) {
- size_t iter_count = 0;
- for (auto _ : state) {
- ++iter_count;
- }
- assert(iter_count == state.max_iterations);
-}
-BENCHMARK(BM_RangedFor);
-
-// Ensure that StateIterator provides all the necessary typedefs required to
-// instantiate std::iterator_traits.
-static_assert(std::is_same<
- typename std::iterator_traits<benchmark::State::StateIterator>::value_type,
- typename benchmark::State::StateIterator::value_type>::value, "");
-
-BENCHMARK_MAIN();
diff --git a/lib/libcxx/utils/google-benchmark/test/benchmark_gtest.cc b/lib/libcxx/utils/google-benchmark/test/benchmark_gtest.cc
deleted file mode 100644
index 10683b433ab..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/benchmark_gtest.cc
+++ /dev/null
@@ -1,33 +0,0 @@
-#include <vector>
-
-#include "../src/benchmark_register.h"
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-namespace {
-
-TEST(AddRangeTest, Simple) {
- std::vector<int> dst;
- AddRange(&dst, 1, 2, 2);
- EXPECT_THAT(dst, testing::ElementsAre(1, 2));
-}
-
-TEST(AddRangeTest, Simple64) {
- std::vector<int64_t> dst;
- AddRange(&dst, static_cast<int64_t>(1), static_cast<int64_t>(2), 2);
- EXPECT_THAT(dst, testing::ElementsAre(1, 2));
-}
-
-TEST(AddRangeTest, Advanced) {
- std::vector<int> dst;
- AddRange(&dst, 5, 15, 2);
- EXPECT_THAT(dst, testing::ElementsAre(5, 8, 15));
-}
-
-TEST(AddRangeTest, Advanced64) {
- std::vector<int64_t> dst;
- AddRange(&dst, static_cast<int64_t>(5), static_cast<int64_t>(15), 2);
- EXPECT_THAT(dst, testing::ElementsAre(5, 8, 15));
-}
-
-} // end namespace
diff --git a/lib/libcxx/utils/google-benchmark/test/benchmark_test.cc b/lib/libcxx/utils/google-benchmark/test/benchmark_test.cc
deleted file mode 100644
index 3cd4f5565fa..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/benchmark_test.cc
+++ /dev/null
@@ -1,245 +0,0 @@
-#include "benchmark/benchmark.h"
-
-#include <assert.h>
-#include <math.h>
-#include <stdint.h>
-
-#include <chrono>
-#include <cstdlib>
-#include <iostream>
-#include <limits>
-#include <list>
-#include <map>
-#include <mutex>
-#include <set>
-#include <sstream>
-#include <string>
-#include <thread>
-#include <utility>
-#include <vector>
-
-#if defined(__GNUC__)
-#define BENCHMARK_NOINLINE __attribute__((noinline))
-#else
-#define BENCHMARK_NOINLINE
-#endif
-
-namespace {
-
-int BENCHMARK_NOINLINE Factorial(uint32_t n) {
- return (n == 1) ? 1 : n * Factorial(n - 1);
-}
-
-double CalculatePi(int depth) {
- double pi = 0.0;
- for (int i = 0; i < depth; ++i) {
- double numerator = static_cast<double>(((i % 2) * 2) - 1);
- double denominator = static_cast<double>((2 * i) - 1);
- pi += numerator / denominator;
- }
- return (pi - 1.0) * 4;
-}
-
-std::set<int64_t> ConstructRandomSet(int64_t size) {
- std::set<int64_t> s;
- for (int i = 0; i < size; ++i) s.insert(s.end(), i);
- return s;
-}
-
-std::mutex test_vector_mu;
-std::vector<int>* test_vector = nullptr;
-
-} // end namespace
-
-static void BM_Factorial(benchmark::State& state) {
- int fac_42 = 0;
- for (auto _ : state) fac_42 = Factorial(8);
- // Prevent compiler optimizations
- std::stringstream ss;
- ss << fac_42;
- state.SetLabel(ss.str());
-}
-BENCHMARK(BM_Factorial);
-BENCHMARK(BM_Factorial)->UseRealTime();
-
-static void BM_CalculatePiRange(benchmark::State& state) {
- double pi = 0.0;
- for (auto _ : state) pi = CalculatePi(static_cast<int>(state.range(0)));
- std::stringstream ss;
- ss << pi;
- state.SetLabel(ss.str());
-}
-BENCHMARK_RANGE(BM_CalculatePiRange, 1, 1024 * 1024);
-
-static void BM_CalculatePi(benchmark::State& state) {
- static const int depth = 1024;
- for (auto _ : state) {
- benchmark::DoNotOptimize(CalculatePi(static_cast<int>(depth)));
- }
-}
-BENCHMARK(BM_CalculatePi)->Threads(8);
-BENCHMARK(BM_CalculatePi)->ThreadRange(1, 32);
-BENCHMARK(BM_CalculatePi)->ThreadPerCpu();
-
-static void BM_SetInsert(benchmark::State& state) {
- std::set<int64_t> data;
- for (auto _ : state) {
- state.PauseTiming();
- data = ConstructRandomSet(state.range(0));
- state.ResumeTiming();
- for (int j = 0; j < state.range(1); ++j) data.insert(rand());
- }
- state.SetItemsProcessed(state.iterations() * state.range(1));
- state.SetBytesProcessed(state.iterations() * state.range(1) * sizeof(int));
-}
-
-// Test many inserts at once to reduce the total iterations needed. Otherwise, the slower,
-// non-timed part of each iteration will make the benchmark take forever.
-BENCHMARK(BM_SetInsert)->Ranges({{1 << 10, 8 << 10}, {128, 512}});
-
-template <typename Container,
- typename ValueType = typename Container::value_type>
-static void BM_Sequential(benchmark::State& state) {
- ValueType v = 42;
- for (auto _ : state) {
- Container c;
- for (int64_t i = state.range(0); --i;) c.push_back(v);
- }
- const int64_t items_processed = state.iterations() * state.range(0);
- state.SetItemsProcessed(items_processed);
- state.SetBytesProcessed(items_processed * sizeof(v));
-}
-BENCHMARK_TEMPLATE2(BM_Sequential, std::vector<int>, int)
- ->Range(1 << 0, 1 << 10);
-BENCHMARK_TEMPLATE(BM_Sequential, std::list<int>)->Range(1 << 0, 1 << 10);
-// Test the variadic version of BENCHMARK_TEMPLATE in C++11 and beyond.
-#ifdef BENCHMARK_HAS_CXX11
-BENCHMARK_TEMPLATE(BM_Sequential, std::vector<int>, int)->Arg(512);
-#endif
-
-static void BM_StringCompare(benchmark::State& state) {
- size_t len = static_cast<size_t>(state.range(0));
- std::string s1(len, '-');
- std::string s2(len, '-');
- for (auto _ : state) benchmark::DoNotOptimize(s1.compare(s2));
-}
-BENCHMARK(BM_StringCompare)->Range(1, 1 << 20);
-
-static void BM_SetupTeardown(benchmark::State& state) {
- if (state.thread_index == 0) {
- // No need to lock test_vector_mu here as this is running single-threaded.
- test_vector = new std::vector<int>();
- }
- int i = 0;
- for (auto _ : state) {
- std::lock_guard<std::mutex> l(test_vector_mu);
- if (i % 2 == 0)
- test_vector->push_back(i);
- else
- test_vector->pop_back();
- ++i;
- }
- if (state.thread_index == 0) {
- delete test_vector;
- }
-}
-BENCHMARK(BM_SetupTeardown)->ThreadPerCpu();
-
-static void BM_LongTest(benchmark::State& state) {
- double tracker = 0.0;
- for (auto _ : state) {
- for (int i = 0; i < state.range(0); ++i)
- benchmark::DoNotOptimize(tracker += i);
- }
-}
-BENCHMARK(BM_LongTest)->Range(1 << 16, 1 << 28);
-
-static void BM_ParallelMemset(benchmark::State& state) {
- int64_t size = state.range(0) / static_cast<int64_t>(sizeof(int));
- int thread_size = static_cast<int>(size) / state.threads;
- int from = thread_size * state.thread_index;
- int to = from + thread_size;
-
- if (state.thread_index == 0) {
- test_vector = new std::vector<int>(static_cast<size_t>(size));
- }
-
- for (auto _ : state) {
- for (int i = from; i < to; i++) {
- // No need to lock test_vector_mu as ranges
- // do not overlap between threads.
- benchmark::DoNotOptimize(test_vector->at(i) = 1);
- }
- }
-
- if (state.thread_index == 0) {
- delete test_vector;
- }
-}
-BENCHMARK(BM_ParallelMemset)->Arg(10 << 20)->ThreadRange(1, 4);
-
-static void BM_ManualTiming(benchmark::State& state) {
- int64_t slept_for = 0;
- int64_t microseconds = state.range(0);
- std::chrono::duration<double, std::micro> sleep_duration{
- static_cast<double>(microseconds)};
-
- for (auto _ : state) {
- auto start = std::chrono::high_resolution_clock::now();
- // Simulate some useful workload with a sleep
- std::this_thread::sleep_for(
- std::chrono::duration_cast<std::chrono::nanoseconds>(sleep_duration));
- auto end = std::chrono::high_resolution_clock::now();
-
- auto elapsed =
- std::chrono::duration_cast<std::chrono::duration<double>>(end - start);
-
- state.SetIterationTime(elapsed.count());
- slept_for += microseconds;
- }
- state.SetItemsProcessed(slept_for);
-}
-BENCHMARK(BM_ManualTiming)->Range(1, 1 << 14)->UseRealTime();
-BENCHMARK(BM_ManualTiming)->Range(1, 1 << 14)->UseManualTime();
-
-#ifdef BENCHMARK_HAS_CXX11
-
-template <class... Args>
-void BM_with_args(benchmark::State& state, Args&&...) {
- for (auto _ : state) {
- }
-}
-BENCHMARK_CAPTURE(BM_with_args, int_test, 42, 43, 44);
-BENCHMARK_CAPTURE(BM_with_args, string_and_pair_test, std::string("abc"),
- std::pair<int, double>(42, 3.8));
-
-void BM_non_template_args(benchmark::State& state, int, double) {
- while(state.KeepRunning()) {}
-}
-BENCHMARK_CAPTURE(BM_non_template_args, basic_test, 0, 0);
-
-#endif // BENCHMARK_HAS_CXX11
-
-static void BM_DenseThreadRanges(benchmark::State& st) {
- switch (st.range(0)) {
- case 1:
- assert(st.threads == 1 || st.threads == 2 || st.threads == 3);
- break;
- case 2:
- assert(st.threads == 1 || st.threads == 3 || st.threads == 4);
- break;
- case 3:
- assert(st.threads == 5 || st.threads == 8 || st.threads == 11 ||
- st.threads == 14);
- break;
- default:
- assert(false && "Invalid test case number");
- }
- while (st.KeepRunning()) {
- }
-}
-BENCHMARK(BM_DenseThreadRanges)->Arg(1)->DenseThreadRange(1, 3);
-BENCHMARK(BM_DenseThreadRanges)->Arg(2)->DenseThreadRange(1, 4, 2);
-BENCHMARK(BM_DenseThreadRanges)->Arg(3)->DenseThreadRange(5, 14, 3);
-
-BENCHMARK_MAIN();
diff --git a/lib/libcxx/utils/google-benchmark/test/clobber_memory_assembly_test.cc b/lib/libcxx/utils/google-benchmark/test/clobber_memory_assembly_test.cc
deleted file mode 100644
index f41911a39ce..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/clobber_memory_assembly_test.cc
+++ /dev/null
@@ -1,64 +0,0 @@
-#include <benchmark/benchmark.h>
-
-#ifdef __clang__
-#pragma clang diagnostic ignored "-Wreturn-type"
-#endif
-
-extern "C" {
-
-extern int ExternInt;
-extern int ExternInt2;
-extern int ExternInt3;
-
-}
-
-// CHECK-LABEL: test_basic:
-extern "C" void test_basic() {
- int x;
- benchmark::DoNotOptimize(&x);
- x = 101;
- benchmark::ClobberMemory();
- // CHECK: leaq [[DEST:[^,]+]], %rax
- // CHECK: movl $101, [[DEST]]
- // CHECK: ret
-}
-
-// CHECK-LABEL: test_redundant_store:
-extern "C" void test_redundant_store() {
- ExternInt = 3;
- benchmark::ClobberMemory();
- ExternInt = 51;
- // CHECK-DAG: ExternInt
- // CHECK-DAG: movl $3
- // CHECK: movl $51
-}
-
-// CHECK-LABEL: test_redundant_read:
-extern "C" void test_redundant_read() {
- int x;
- benchmark::DoNotOptimize(&x);
- x = ExternInt;
- benchmark::ClobberMemory();
- x = ExternInt2;
- // CHECK: leaq [[DEST:[^,]+]], %rax
- // CHECK: ExternInt(%rip)
- // CHECK: movl %eax, [[DEST]]
- // CHECK-NOT: ExternInt2
- // CHECK: ret
-}
-
-// CHECK-LABEL: test_redundant_read2:
-extern "C" void test_redundant_read2() {
- int x;
- benchmark::DoNotOptimize(&x);
- x = ExternInt;
- benchmark::ClobberMemory();
- x = ExternInt2;
- benchmark::ClobberMemory();
- // CHECK: leaq [[DEST:[^,]+]], %rax
- // CHECK: ExternInt(%rip)
- // CHECK: movl %eax, [[DEST]]
- // CHECK: ExternInt2(%rip)
- // CHECK: movl %eax, [[DEST]]
- // CHECK: ret
-}
diff --git a/lib/libcxx/utils/google-benchmark/test/complexity_test.cc b/lib/libcxx/utils/google-benchmark/test/complexity_test.cc
deleted file mode 100644
index 323ddfe7ac5..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/complexity_test.cc
+++ /dev/null
@@ -1,183 +0,0 @@
-#undef NDEBUG
-#include <algorithm>
-#include <cassert>
-#include <cmath>
-#include <cstdlib>
-#include <vector>
-#include "benchmark/benchmark.h"
-#include "output_test.h"
-
-namespace {
-
-#define ADD_COMPLEXITY_CASES(...) \
- int CONCAT(dummy, __LINE__) = AddComplexityTest(__VA_ARGS__)
-
-int AddComplexityTest(std::string test_name, std::string big_o_test_name,
- std::string rms_test_name, std::string big_o) {
- SetSubstitutions({{"%name", test_name},
- {"%bigo_name", big_o_test_name},
- {"%rms_name", rms_test_name},
- {"%bigo_str", "[ ]* %float " + big_o},
- {"%bigo", big_o},
- {"%rms", "[ ]*[0-9]+ %"}});
- AddCases(
- TC_ConsoleOut,
- {{"^%bigo_name %bigo_str %bigo_str[ ]*$"},
- {"^%bigo_name", MR_Not}, // Assert we we didn't only matched a name.
- {"^%rms_name %rms %rms[ ]*$", MR_Next}});
- AddCases(TC_JSONOut, {{"\"name\": \"%bigo_name\",$"},
- {"\"run_name\": \"%name\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"BigO\",$", MR_Next},
- {"\"cpu_coefficient\": %float,$", MR_Next},
- {"\"real_coefficient\": %float,$", MR_Next},
- {"\"big_o\": \"%bigo\",$", MR_Next},
- {"\"time_unit\": \"ns\"$", MR_Next},
- {"}", MR_Next},
- {"\"name\": \"%rms_name\",$"},
- {"\"run_name\": \"%name\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"RMS\",$", MR_Next},
- {"\"rms\": %float$", MR_Next},
- {"}", MR_Next}});
- AddCases(TC_CSVOut, {{"^\"%bigo_name\",,%float,%float,%bigo,,,,,$"},
- {"^\"%bigo_name\"", MR_Not},
- {"^\"%rms_name\",,%float,%float,,,,,,$", MR_Next}});
- return 0;
-}
-
-} // end namespace
-
-// ========================================================================= //
-// --------------------------- Testing BigO O(1) --------------------------- //
-// ========================================================================= //
-
-void BM_Complexity_O1(benchmark::State& state) {
- for (auto _ : state) {
- for (int i = 0; i < 1024; ++i) {
- benchmark::DoNotOptimize(&i);
- }
- }
- state.SetComplexityN(state.range(0));
-}
-BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity(benchmark::o1);
-BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity();
-BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity([](int64_t) {
- return 1.0;
-});
-
-const char *one_test_name = "BM_Complexity_O1";
-const char *big_o_1_test_name = "BM_Complexity_O1_BigO";
-const char *rms_o_1_test_name = "BM_Complexity_O1_RMS";
-const char *enum_big_o_1 = "\\([0-9]+\\)";
-// FIXME: Tolerate both '(1)' and 'lgN' as output when the complexity is auto
-// deduced.
-// See https://github.com/google/benchmark/issues/272
-const char *auto_big_o_1 = "(\\([0-9]+\\))|(lgN)";
-const char *lambda_big_o_1 = "f\\(N\\)";
-
-// Add enum tests
-ADD_COMPLEXITY_CASES(one_test_name, big_o_1_test_name, rms_o_1_test_name,
- enum_big_o_1);
-
-// Add auto enum tests
-ADD_COMPLEXITY_CASES(one_test_name, big_o_1_test_name, rms_o_1_test_name,
- auto_big_o_1);
-
-// Add lambda tests
-ADD_COMPLEXITY_CASES(one_test_name, big_o_1_test_name, rms_o_1_test_name,
- lambda_big_o_1);
-
-// ========================================================================= //
-// --------------------------- Testing BigO O(N) --------------------------- //
-// ========================================================================= //
-
-std::vector<int> ConstructRandomVector(int64_t size) {
- std::vector<int> v;
- v.reserve(static_cast<int>(size));
- for (int i = 0; i < size; ++i) {
- v.push_back(static_cast<int>(std::rand() % size));
- }
- return v;
-}
-
-void BM_Complexity_O_N(benchmark::State& state) {
- auto v = ConstructRandomVector(state.range(0));
- // Test worst case scenario (item not in vector)
- const int64_t item_not_in_vector = state.range(0) * 2;
- for (auto _ : state) {
- benchmark::DoNotOptimize(std::find(v.begin(), v.end(), item_not_in_vector));
- }
- state.SetComplexityN(state.range(0));
-}
-BENCHMARK(BM_Complexity_O_N)
- ->RangeMultiplier(2)
- ->Range(1 << 10, 1 << 16)
- ->Complexity(benchmark::oN);
-BENCHMARK(BM_Complexity_O_N)
- ->RangeMultiplier(2)
- ->Range(1 << 10, 1 << 16)
- ->Complexity([](int64_t n) -> double { return static_cast<double>(n); });
-BENCHMARK(BM_Complexity_O_N)
- ->RangeMultiplier(2)
- ->Range(1 << 10, 1 << 16)
- ->Complexity();
-
-const char *n_test_name = "BM_Complexity_O_N";
-const char *big_o_n_test_name = "BM_Complexity_O_N_BigO";
-const char *rms_o_n_test_name = "BM_Complexity_O_N_RMS";
-const char *enum_auto_big_o_n = "N";
-const char *lambda_big_o_n = "f\\(N\\)";
-
-// Add enum tests
-ADD_COMPLEXITY_CASES(n_test_name, big_o_n_test_name, rms_o_n_test_name,
- enum_auto_big_o_n);
-
-// Add lambda tests
-ADD_COMPLEXITY_CASES(n_test_name, big_o_n_test_name, rms_o_n_test_name,
- lambda_big_o_n);
-
-// ========================================================================= //
-// ------------------------- Testing BigO O(N*lgN) ------------------------- //
-// ========================================================================= //
-
-static void BM_Complexity_O_N_log_N(benchmark::State& state) {
- auto v = ConstructRandomVector(state.range(0));
- for (auto _ : state) {
- std::sort(v.begin(), v.end());
- }
- state.SetComplexityN(state.range(0));
-}
-static const double kLog2E = 1.44269504088896340736;
-BENCHMARK(BM_Complexity_O_N_log_N)
- ->RangeMultiplier(2)
- ->Range(1 << 10, 1 << 16)
- ->Complexity(benchmark::oNLogN);
-BENCHMARK(BM_Complexity_O_N_log_N)
- ->RangeMultiplier(2)
- ->Range(1 << 10, 1 << 16)
- ->Complexity([](int64_t n) { return kLog2E * n * log(static_cast<double>(n)); });
-BENCHMARK(BM_Complexity_O_N_log_N)
- ->RangeMultiplier(2)
- ->Range(1 << 10, 1 << 16)
- ->Complexity();
-
-const char *n_lg_n_test_name = "BM_Complexity_O_N_log_N";
-const char *big_o_n_lg_n_test_name = "BM_Complexity_O_N_log_N_BigO";
-const char *rms_o_n_lg_n_test_name = "BM_Complexity_O_N_log_N_RMS";
-const char *enum_auto_big_o_n_lg_n = "NlgN";
-const char *lambda_big_o_n_lg_n = "f\\(N\\)";
-
-// Add enum tests
-ADD_COMPLEXITY_CASES(n_lg_n_test_name, big_o_n_lg_n_test_name,
- rms_o_n_lg_n_test_name, enum_auto_big_o_n_lg_n);
-
-// Add lambda tests
-ADD_COMPLEXITY_CASES(n_lg_n_test_name, big_o_n_lg_n_test_name,
- rms_o_n_lg_n_test_name, lambda_big_o_n_lg_n);
-
-// ========================================================================= //
-// --------------------------- TEST CASES END ------------------------------ //
-// ========================================================================= //
-
-int main(int argc, char *argv[]) { RunOutputTests(argc, argv); }
diff --git a/lib/libcxx/utils/google-benchmark/test/cxx03_test.cc b/lib/libcxx/utils/google-benchmark/test/cxx03_test.cc
deleted file mode 100644
index baa9ed9262b..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/cxx03_test.cc
+++ /dev/null
@@ -1,63 +0,0 @@
-#undef NDEBUG
-#include <cassert>
-#include <cstddef>
-
-#include "benchmark/benchmark.h"
-
-#if __cplusplus >= 201103L
-#error C++11 or greater detected. Should be C++03.
-#endif
-
-#ifdef BENCHMARK_HAS_CXX11
-#error C++11 or greater detected by the library. BENCHMARK_HAS_CXX11 is defined.
-#endif
-
-void BM_empty(benchmark::State& state) {
- while (state.KeepRunning()) {
- volatile std::size_t x = state.iterations();
- ((void)x);
- }
-}
-BENCHMARK(BM_empty);
-
-// The new C++11 interface for args/ranges requires initializer list support.
-// Therefore we provide the old interface to support C++03.
-void BM_old_arg_range_interface(benchmark::State& state) {
- assert((state.range(0) == 1 && state.range(1) == 2) ||
- (state.range(0) == 5 && state.range(1) == 6));
- while (state.KeepRunning()) {
- }
-}
-BENCHMARK(BM_old_arg_range_interface)->ArgPair(1, 2)->RangePair(5, 5, 6, 6);
-
-template <class T, class U>
-void BM_template2(benchmark::State& state) {
- BM_empty(state);
-}
-BENCHMARK_TEMPLATE2(BM_template2, int, long);
-
-template <class T>
-void BM_template1(benchmark::State& state) {
- BM_empty(state);
-}
-BENCHMARK_TEMPLATE(BM_template1, long);
-BENCHMARK_TEMPLATE1(BM_template1, int);
-
-template <class T>
-struct BM_Fixture : public ::benchmark::Fixture {
-};
-
-BENCHMARK_TEMPLATE_F(BM_Fixture, BM_template1, long)(benchmark::State& state) {
- BM_empty(state);
-}
-BENCHMARK_TEMPLATE1_F(BM_Fixture, BM_template2, int)(benchmark::State& state) {
- BM_empty(state);
-}
-
-void BM_counters(benchmark::State& state) {
- BM_empty(state);
- state.counters["Foo"] = 2;
-}
-BENCHMARK(BM_counters);
-
-BENCHMARK_MAIN();
diff --git a/lib/libcxx/utils/google-benchmark/test/diagnostics_test.cc b/lib/libcxx/utils/google-benchmark/test/diagnostics_test.cc
deleted file mode 100644
index dd64a336553..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/diagnostics_test.cc
+++ /dev/null
@@ -1,80 +0,0 @@
-// Testing:
-// State::PauseTiming()
-// State::ResumeTiming()
-// Test that CHECK's within these function diagnose when they are called
-// outside of the KeepRunning() loop.
-//
-// NOTE: Users should NOT include or use src/check.h. This is only done in
-// order to test library internals.
-
-#include <cstdlib>
-#include <stdexcept>
-
-#include "../src/check.h"
-#include "benchmark/benchmark.h"
-
-#if defined(__GNUC__) && !defined(__EXCEPTIONS)
-#define TEST_HAS_NO_EXCEPTIONS
-#endif
-
-void TestHandler() {
-#ifndef TEST_HAS_NO_EXCEPTIONS
- throw std::logic_error("");
-#else
- std::abort();
-#endif
-}
-
-void try_invalid_pause_resume(benchmark::State& state) {
-#if !defined(TEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS) && !defined(TEST_HAS_NO_EXCEPTIONS)
- try {
- state.PauseTiming();
- std::abort();
- } catch (std::logic_error const&) {
- }
- try {
- state.ResumeTiming();
- std::abort();
- } catch (std::logic_error const&) {
- }
-#else
- (void)state; // avoid unused warning
-#endif
-}
-
-void BM_diagnostic_test(benchmark::State& state) {
- static bool called_once = false;
-
- if (called_once == false) try_invalid_pause_resume(state);
-
- for (auto _ : state) {
- benchmark::DoNotOptimize(state.iterations());
- }
-
- if (called_once == false) try_invalid_pause_resume(state);
-
- called_once = true;
-}
-BENCHMARK(BM_diagnostic_test);
-
-
-void BM_diagnostic_test_keep_running(benchmark::State& state) {
- static bool called_once = false;
-
- if (called_once == false) try_invalid_pause_resume(state);
-
- while(state.KeepRunning()) {
- benchmark::DoNotOptimize(state.iterations());
- }
-
- if (called_once == false) try_invalid_pause_resume(state);
-
- called_once = true;
-}
-BENCHMARK(BM_diagnostic_test_keep_running);
-
-int main(int argc, char* argv[]) {
- benchmark::internal::GetAbortHandler() = &TestHandler;
- benchmark::Initialize(&argc, argv);
- benchmark::RunSpecifiedBenchmarks();
-}
diff --git a/lib/libcxx/utils/google-benchmark/test/display_aggregates_only_test.cc b/lib/libcxx/utils/google-benchmark/test/display_aggregates_only_test.cc
deleted file mode 100644
index 3c36d3f03c1..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/display_aggregates_only_test.cc
+++ /dev/null
@@ -1,43 +0,0 @@
-
-#undef NDEBUG
-#include <cstdio>
-#include <string>
-
-#include "benchmark/benchmark.h"
-#include "output_test.h"
-
-// Ok this test is super ugly. We want to check what happens with the file
-// reporter in the presence of DisplayAggregatesOnly().
-// We do not care about console output, the normal tests check that already.
-
-void BM_SummaryRepeat(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_SummaryRepeat)->Repetitions(3)->DisplayAggregatesOnly();
-
-int main(int argc, char* argv[]) {
- const std::string output = GetFileReporterOutput(argc, argv);
-
- if (SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3") != 6 ||
- SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3\"") != 3 ||
- SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3_mean\"") != 1 ||
- SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3_median\"") !=
- 1 ||
- SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3_stddev\"") !=
- 1) {
- std::cout << "Precondition mismatch. Expected to only find 6 "
- "occurrences of \"BM_SummaryRepeat/repeats:3\" substring:\n"
- "\"name\": \"BM_SummaryRepeat/repeats:3\", "
- "\"name\": \"BM_SummaryRepeat/repeats:3\", "
- "\"name\": \"BM_SummaryRepeat/repeats:3\", "
- "\"name\": \"BM_SummaryRepeat/repeats:3_mean\", "
- "\"name\": \"BM_SummaryRepeat/repeats:3_median\", "
- "\"name\": \"BM_SummaryRepeat/repeats:3_stddev\"\nThe entire "
- "output:\n";
- std::cout << output;
- return 1;
- }
-
- return 0;
-}
diff --git a/lib/libcxx/utils/google-benchmark/test/donotoptimize_assembly_test.cc b/lib/libcxx/utils/google-benchmark/test/donotoptimize_assembly_test.cc
deleted file mode 100644
index d4b0bab70e7..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/donotoptimize_assembly_test.cc
+++ /dev/null
@@ -1,163 +0,0 @@
-#include <benchmark/benchmark.h>
-
-#ifdef __clang__
-#pragma clang diagnostic ignored "-Wreturn-type"
-#endif
-
-extern "C" {
-
-extern int ExternInt;
-extern int ExternInt2;
-extern int ExternInt3;
-
-inline int Add42(int x) { return x + 42; }
-
-struct NotTriviallyCopyable {
- NotTriviallyCopyable();
- explicit NotTriviallyCopyable(int x) : value(x) {}
- NotTriviallyCopyable(NotTriviallyCopyable const&);
- int value;
-};
-
-struct Large {
- int value;
- int data[2];
-};
-
-}
-// CHECK-LABEL: test_with_rvalue:
-extern "C" void test_with_rvalue() {
- benchmark::DoNotOptimize(Add42(0));
- // CHECK: movl $42, %eax
- // CHECK: ret
-}
-
-// CHECK-LABEL: test_with_large_rvalue:
-extern "C" void test_with_large_rvalue() {
- benchmark::DoNotOptimize(Large{ExternInt, {ExternInt, ExternInt}});
- // CHECK: ExternInt(%rip)
- // CHECK: movl %eax, -{{[0-9]+}}(%[[REG:[a-z]+]]
- // CHECK: movl %eax, -{{[0-9]+}}(%[[REG]])
- // CHECK: movl %eax, -{{[0-9]+}}(%[[REG]])
- // CHECK: ret
-}
-
-// CHECK-LABEL: test_with_non_trivial_rvalue:
-extern "C" void test_with_non_trivial_rvalue() {
- benchmark::DoNotOptimize(NotTriviallyCopyable(ExternInt));
- // CHECK: mov{{l|q}} ExternInt(%rip)
- // CHECK: ret
-}
-
-// CHECK-LABEL: test_with_lvalue:
-extern "C" void test_with_lvalue() {
- int x = 101;
- benchmark::DoNotOptimize(x);
- // CHECK-GNU: movl $101, %eax
- // CHECK-CLANG: movl $101, -{{[0-9]+}}(%[[REG:[a-z]+]])
- // CHECK: ret
-}
-
-// CHECK-LABEL: test_with_large_lvalue:
-extern "C" void test_with_large_lvalue() {
- Large L{ExternInt, {ExternInt, ExternInt}};
- benchmark::DoNotOptimize(L);
- // CHECK: ExternInt(%rip)
- // CHECK: movl %eax, -{{[0-9]+}}(%[[REG:[a-z]+]])
- // CHECK: movl %eax, -{{[0-9]+}}(%[[REG]])
- // CHECK: movl %eax, -{{[0-9]+}}(%[[REG]])
- // CHECK: ret
-}
-
-// CHECK-LABEL: test_with_non_trivial_lvalue:
-extern "C" void test_with_non_trivial_lvalue() {
- NotTriviallyCopyable NTC(ExternInt);
- benchmark::DoNotOptimize(NTC);
- // CHECK: ExternInt(%rip)
- // CHECK: movl %eax, -{{[0-9]+}}(%[[REG:[a-z]+]])
- // CHECK: ret
-}
-
-// CHECK-LABEL: test_with_const_lvalue:
-extern "C" void test_with_const_lvalue() {
- const int x = 123;
- benchmark::DoNotOptimize(x);
- // CHECK: movl $123, %eax
- // CHECK: ret
-}
-
-// CHECK-LABEL: test_with_large_const_lvalue:
-extern "C" void test_with_large_const_lvalue() {
- const Large L{ExternInt, {ExternInt, ExternInt}};
- benchmark::DoNotOptimize(L);
- // CHECK: ExternInt(%rip)
- // CHECK: movl %eax, -{{[0-9]+}}(%[[REG:[a-z]+]])
- // CHECK: movl %eax, -{{[0-9]+}}(%[[REG]])
- // CHECK: movl %eax, -{{[0-9]+}}(%[[REG]])
- // CHECK: ret
-}
-
-// CHECK-LABEL: test_with_non_trivial_const_lvalue:
-extern "C" void test_with_non_trivial_const_lvalue() {
- const NotTriviallyCopyable Obj(ExternInt);
- benchmark::DoNotOptimize(Obj);
- // CHECK: mov{{q|l}} ExternInt(%rip)
- // CHECK: ret
-}
-
-// CHECK-LABEL: test_div_by_two:
-extern "C" int test_div_by_two(int input) {
- int divisor = 2;
- benchmark::DoNotOptimize(divisor);
- return input / divisor;
- // CHECK: movl $2, [[DEST:.*]]
- // CHECK: idivl [[DEST]]
- // CHECK: ret
-}
-
-// CHECK-LABEL: test_inc_integer:
-extern "C" int test_inc_integer() {
- int x = 0;
- for (int i=0; i < 5; ++i)
- benchmark::DoNotOptimize(++x);
- // CHECK: movl $1, [[DEST:.*]]
- // CHECK: {{(addl \$1,|incl)}} [[DEST]]
- // CHECK: {{(addl \$1,|incl)}} [[DEST]]
- // CHECK: {{(addl \$1,|incl)}} [[DEST]]
- // CHECK: {{(addl \$1,|incl)}} [[DEST]]
- // CHECK-CLANG: movl [[DEST]], %eax
- // CHECK: ret
- return x;
-}
-
-// CHECK-LABEL: test_pointer_rvalue
-extern "C" void test_pointer_rvalue() {
- // CHECK: movl $42, [[DEST:.*]]
- // CHECK: leaq [[DEST]], %rax
- // CHECK-CLANG: movq %rax, -{{[0-9]+}}(%[[REG:[a-z]+]])
- // CHECK: ret
- int x = 42;
- benchmark::DoNotOptimize(&x);
-}
-
-// CHECK-LABEL: test_pointer_const_lvalue:
-extern "C" void test_pointer_const_lvalue() {
- // CHECK: movl $42, [[DEST:.*]]
- // CHECK: leaq [[DEST]], %rax
- // CHECK-CLANG: movq %rax, -{{[0-9]+}}(%[[REG:[a-z]+]])
- // CHECK: ret
- int x = 42;
- int * const xp = &x;
- benchmark::DoNotOptimize(xp);
-}
-
-// CHECK-LABEL: test_pointer_lvalue:
-extern "C" void test_pointer_lvalue() {
- // CHECK: movl $42, [[DEST:.*]]
- // CHECK: leaq [[DEST]], %rax
- // CHECK-CLANG: movq %rax, -{{[0-9]+}}(%[[REG:[a-z+]+]])
- // CHECK: ret
- int x = 42;
- int *xp = &x;
- benchmark::DoNotOptimize(xp);
-}
diff --git a/lib/libcxx/utils/google-benchmark/test/donotoptimize_test.cc b/lib/libcxx/utils/google-benchmark/test/donotoptimize_test.cc
deleted file mode 100644
index 2ce92d1c72b..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/donotoptimize_test.cc
+++ /dev/null
@@ -1,52 +0,0 @@
-#include "benchmark/benchmark.h"
-
-#include <cstdint>
-
-namespace {
-#if defined(__GNUC__)
-std::uint64_t double_up(const std::uint64_t x) __attribute__((const));
-#endif
-std::uint64_t double_up(const std::uint64_t x) { return x * 2; }
-}
-
-// Using DoNotOptimize on types like BitRef seem to cause a lot of problems
-// with the inline assembly on both GCC and Clang.
-struct BitRef {
- int index;
- unsigned char &byte;
-
-public:
- static BitRef Make() {
- static unsigned char arr[2] = {};
- BitRef b(1, arr[0]);
- return b;
- }
-private:
- BitRef(int i, unsigned char& b) : index(i), byte(b) {}
-};
-
-int main(int, char*[]) {
- // this test verifies compilation of DoNotOptimize() for some types
-
- char buffer8[8] = "";
- benchmark::DoNotOptimize(buffer8);
-
- char buffer20[20] = "";
- benchmark::DoNotOptimize(buffer20);
-
- char buffer1024[1024] = "";
- benchmark::DoNotOptimize(buffer1024);
- benchmark::DoNotOptimize(&buffer1024[0]);
-
- int x = 123;
- benchmark::DoNotOptimize(x);
- benchmark::DoNotOptimize(&x);
- benchmark::DoNotOptimize(x += 42);
-
- benchmark::DoNotOptimize(double_up(x));
-
- // These tests are to e
- benchmark::DoNotOptimize(BitRef::Make());
- BitRef lval = BitRef::Make();
- benchmark::DoNotOptimize(lval);
-}
diff --git a/lib/libcxx/utils/google-benchmark/test/filter_test.cc b/lib/libcxx/utils/google-benchmark/test/filter_test.cc
deleted file mode 100644
index 0e27065c155..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/filter_test.cc
+++ /dev/null
@@ -1,104 +0,0 @@
-#include "benchmark/benchmark.h"
-
-#include <cassert>
-#include <cmath>
-#include <cstdint>
-#include <cstdlib>
-
-#include <iostream>
-#include <limits>
-#include <sstream>
-#include <string>
-
-namespace {
-
-class TestReporter : public benchmark::ConsoleReporter {
- public:
- virtual bool ReportContext(const Context& context) {
- return ConsoleReporter::ReportContext(context);
- };
-
- virtual void ReportRuns(const std::vector<Run>& report) {
- ++count_;
- ConsoleReporter::ReportRuns(report);
- };
-
- TestReporter() : count_(0) {}
-
- virtual ~TestReporter() {}
-
- size_t GetCount() const { return count_; }
-
- private:
- mutable size_t count_;
-};
-
-} // end namespace
-
-static void NoPrefix(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(NoPrefix);
-
-static void BM_Foo(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_Foo);
-
-static void BM_Bar(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_Bar);
-
-static void BM_FooBar(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_FooBar);
-
-static void BM_FooBa(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_FooBa);
-
-int main(int argc, char **argv) {
- bool list_only = false;
- for (int i = 0; i < argc; ++i)
- list_only |= std::string(argv[i]).find("--benchmark_list_tests") !=
- std::string::npos;
-
- benchmark::Initialize(&argc, argv);
-
- TestReporter test_reporter;
- const size_t returned_count =
- benchmark::RunSpecifiedBenchmarks(&test_reporter);
-
- if (argc == 2) {
- // Make sure we ran all of the tests
- std::stringstream ss(argv[1]);
- size_t expected_return;
- ss >> expected_return;
-
- if (returned_count != expected_return) {
- std::cerr << "ERROR: Expected " << expected_return
- << " tests to match the filter but returned_count = "
- << returned_count << std::endl;
- return -1;
- }
-
- const size_t expected_reports = list_only ? 0 : expected_return;
- const size_t reports_count = test_reporter.GetCount();
- if (reports_count != expected_reports) {
- std::cerr << "ERROR: Expected " << expected_reports
- << " tests to be run but reported_count = " << reports_count
- << std::endl;
- return -1;
- }
- }
-
- return 0;
-}
diff --git a/lib/libcxx/utils/google-benchmark/test/fixture_test.cc b/lib/libcxx/utils/google-benchmark/test/fixture_test.cc
deleted file mode 100644
index 1462b10f02f..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/fixture_test.cc
+++ /dev/null
@@ -1,49 +0,0 @@
-
-#include "benchmark/benchmark.h"
-
-#include <cassert>
-#include <memory>
-
-class MyFixture : public ::benchmark::Fixture {
- public:
- void SetUp(const ::benchmark::State& state) {
- if (state.thread_index == 0) {
- assert(data.get() == nullptr);
- data.reset(new int(42));
- }
- }
-
- void TearDown(const ::benchmark::State& state) {
- if (state.thread_index == 0) {
- assert(data.get() != nullptr);
- data.reset();
- }
- }
-
- ~MyFixture() { assert(data == nullptr); }
-
- std::unique_ptr<int> data;
-};
-
-BENCHMARK_F(MyFixture, Foo)(benchmark::State &st) {
- assert(data.get() != nullptr);
- assert(*data == 42);
- for (auto _ : st) {
- }
-}
-
-BENCHMARK_DEFINE_F(MyFixture, Bar)(benchmark::State& st) {
- if (st.thread_index == 0) {
- assert(data.get() != nullptr);
- assert(*data == 42);
- }
- for (auto _ : st) {
- assert(data.get() != nullptr);
- assert(*data == 42);
- }
- st.SetItemsProcessed(st.range(0));
-}
-BENCHMARK_REGISTER_F(MyFixture, Bar)->Arg(42);
-BENCHMARK_REGISTER_F(MyFixture, Bar)->Arg(42)->ThreadPerCpu();
-
-BENCHMARK_MAIN();
diff --git a/lib/libcxx/utils/google-benchmark/test/link_main_test.cc b/lib/libcxx/utils/google-benchmark/test/link_main_test.cc
deleted file mode 100644
index 241ad5c3905..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/link_main_test.cc
+++ /dev/null
@@ -1,8 +0,0 @@
-#include "benchmark/benchmark.h"
-
-void BM_empty(benchmark::State& state) {
- for (auto _ : state) {
- benchmark::DoNotOptimize(state.iterations());
- }
-}
-BENCHMARK(BM_empty);
diff --git a/lib/libcxx/utils/google-benchmark/test/map_test.cc b/lib/libcxx/utils/google-benchmark/test/map_test.cc
deleted file mode 100644
index dbf7982a368..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/map_test.cc
+++ /dev/null
@@ -1,57 +0,0 @@
-#include "benchmark/benchmark.h"
-
-#include <cstdlib>
-#include <map>
-
-namespace {
-
-std::map<int, int> ConstructRandomMap(int size) {
- std::map<int, int> m;
- for (int i = 0; i < size; ++i) {
- m.insert(std::make_pair(std::rand() % size, std::rand() % size));
- }
- return m;
-}
-
-} // namespace
-
-// Basic version.
-static void BM_MapLookup(benchmark::State& state) {
- const int size = static_cast<int>(state.range(0));
- std::map<int, int> m;
- for (auto _ : state) {
- state.PauseTiming();
- m = ConstructRandomMap(size);
- state.ResumeTiming();
- for (int i = 0; i < size; ++i) {
- benchmark::DoNotOptimize(m.find(std::rand() % size));
- }
- }
- state.SetItemsProcessed(state.iterations() * size);
-}
-BENCHMARK(BM_MapLookup)->Range(1 << 3, 1 << 12);
-
-// Using fixtures.
-class MapFixture : public ::benchmark::Fixture {
- public:
- void SetUp(const ::benchmark::State& st) {
- m = ConstructRandomMap(static_cast<int>(st.range(0)));
- }
-
- void TearDown(const ::benchmark::State&) { m.clear(); }
-
- std::map<int, int> m;
-};
-
-BENCHMARK_DEFINE_F(MapFixture, Lookup)(benchmark::State& state) {
- const int size = static_cast<int>(state.range(0));
- for (auto _ : state) {
- for (int i = 0; i < size; ++i) {
- benchmark::DoNotOptimize(m.find(std::rand() % size));
- }
- }
- state.SetItemsProcessed(state.iterations() * size);
-}
-BENCHMARK_REGISTER_F(MapFixture, Lookup)->Range(1 << 3, 1 << 12);
-
-BENCHMARK_MAIN();
diff --git a/lib/libcxx/utils/google-benchmark/test/memory_manager_test.cc b/lib/libcxx/utils/google-benchmark/test/memory_manager_test.cc
deleted file mode 100644
index 94be6083795..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/memory_manager_test.cc
+++ /dev/null
@@ -1,42 +0,0 @@
-#include <memory>
-
-#include "../src/check.h"
-#include "benchmark/benchmark.h"
-#include "output_test.h"
-
-class TestMemoryManager : public benchmark::MemoryManager {
- void Start() {}
- void Stop(Result* result) {
- result->num_allocs = 42;
- result->max_bytes_used = 42000;
- }
-};
-
-void BM_empty(benchmark::State& state) {
- for (auto _ : state) {
- benchmark::DoNotOptimize(state.iterations());
- }
-}
-BENCHMARK(BM_empty);
-
-ADD_CASES(TC_ConsoleOut, {{"^BM_empty %console_report$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_empty\",$"},
- {"\"run_name\": \"BM_empty\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"allocs_per_iter\": %float,$", MR_Next},
- {"\"max_bytes_used\": 42000$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_empty\",%csv_report$"}});
-
-
-int main(int argc, char *argv[]) {
- std::unique_ptr<benchmark::MemoryManager> mm(new TestMemoryManager());
-
- benchmark::RegisterMemoryManager(mm.get());
- RunOutputTests(argc, argv);
- benchmark::RegisterMemoryManager(nullptr);
-}
diff --git a/lib/libcxx/utils/google-benchmark/test/multiple_ranges_test.cc b/lib/libcxx/utils/google-benchmark/test/multiple_ranges_test.cc
deleted file mode 100644
index c64acabc25c..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/multiple_ranges_test.cc
+++ /dev/null
@@ -1,97 +0,0 @@
-#include "benchmark/benchmark.h"
-
-#include <cassert>
-#include <iostream>
-#include <set>
-#include <vector>
-
-class MultipleRangesFixture : public ::benchmark::Fixture {
- public:
- MultipleRangesFixture()
- : expectedValues({{1, 3, 5},
- {1, 3, 8},
- {1, 3, 15},
- {2, 3, 5},
- {2, 3, 8},
- {2, 3, 15},
- {1, 4, 5},
- {1, 4, 8},
- {1, 4, 15},
- {2, 4, 5},
- {2, 4, 8},
- {2, 4, 15},
- {1, 7, 5},
- {1, 7, 8},
- {1, 7, 15},
- {2, 7, 5},
- {2, 7, 8},
- {2, 7, 15},
- {7, 6, 3}}) {}
-
- void SetUp(const ::benchmark::State& state) {
- std::vector<int64_t> ranges = {state.range(0), state.range(1),
- state.range(2)};
-
- assert(expectedValues.find(ranges) != expectedValues.end());
-
- actualValues.insert(ranges);
- }
-
- // NOTE: This is not TearDown as we want to check after _all_ runs are
- // complete.
- virtual ~MultipleRangesFixture() {
- assert(actualValues.size() == expectedValues.size());
- if (actualValues.size() != expectedValues.size()) {
- std::cout << "EXPECTED\n";
- for (auto v : expectedValues) {
- std::cout << "{";
- for (int64_t iv : v) {
- std::cout << iv << ", ";
- }
- std::cout << "}\n";
- }
- std::cout << "ACTUAL\n";
- for (auto v : actualValues) {
- std::cout << "{";
- for (int64_t iv : v) {
- std::cout << iv << ", ";
- }
- std::cout << "}\n";
- }
- }
- }
-
- std::set<std::vector<int64_t>> expectedValues;
- std::set<std::vector<int64_t>> actualValues;
-};
-
-BENCHMARK_DEFINE_F(MultipleRangesFixture, Empty)(benchmark::State& state) {
- for (auto _ : state) {
- int64_t product = state.range(0) * state.range(1) * state.range(2);
- for (int64_t x = 0; x < product; x++) {
- benchmark::DoNotOptimize(x);
- }
- }
-}
-
-BENCHMARK_REGISTER_F(MultipleRangesFixture, Empty)
- ->RangeMultiplier(2)
- ->Ranges({{1, 2}, {3, 7}, {5, 15}})
- ->Args({7, 6, 3});
-
-void BM_CheckDefaultArgument(benchmark::State& state) {
- // Test that the 'range()' without an argument is the same as 'range(0)'.
- assert(state.range() == state.range(0));
- assert(state.range() != state.range(1));
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_CheckDefaultArgument)->Ranges({{1, 5}, {6, 10}});
-
-static void BM_MultipleRanges(benchmark::State& st) {
- for (auto _ : st) {
- }
-}
-BENCHMARK(BM_MultipleRanges)->Ranges({{5, 5}, {6, 6}});
-
-BENCHMARK_MAIN();
diff --git a/lib/libcxx/utils/google-benchmark/test/options_test.cc b/lib/libcxx/utils/google-benchmark/test/options_test.cc
deleted file mode 100644
index fdec69174ee..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/options_test.cc
+++ /dev/null
@@ -1,65 +0,0 @@
-#include "benchmark/benchmark.h"
-#include <chrono>
-#include <thread>
-
-#if defined(NDEBUG)
-#undef NDEBUG
-#endif
-#include <cassert>
-
-void BM_basic(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-
-void BM_basic_slow(benchmark::State& state) {
- std::chrono::milliseconds sleep_duration(state.range(0));
- for (auto _ : state) {
- std::this_thread::sleep_for(
- std::chrono::duration_cast<std::chrono::nanoseconds>(sleep_duration));
- }
-}
-
-BENCHMARK(BM_basic);
-BENCHMARK(BM_basic)->Arg(42);
-BENCHMARK(BM_basic_slow)->Arg(10)->Unit(benchmark::kNanosecond);
-BENCHMARK(BM_basic_slow)->Arg(100)->Unit(benchmark::kMicrosecond);
-BENCHMARK(BM_basic_slow)->Arg(1000)->Unit(benchmark::kMillisecond);
-BENCHMARK(BM_basic)->Range(1, 8);
-BENCHMARK(BM_basic)->RangeMultiplier(2)->Range(1, 8);
-BENCHMARK(BM_basic)->DenseRange(10, 15);
-BENCHMARK(BM_basic)->Args({42, 42});
-BENCHMARK(BM_basic)->Ranges({{64, 512}, {64, 512}});
-BENCHMARK(BM_basic)->MinTime(0.7);
-BENCHMARK(BM_basic)->UseRealTime();
-BENCHMARK(BM_basic)->ThreadRange(2, 4);
-BENCHMARK(BM_basic)->ThreadPerCpu();
-BENCHMARK(BM_basic)->Repetitions(3);
-
-void CustomArgs(benchmark::internal::Benchmark* b) {
- for (int i = 0; i < 10; ++i) {
- b->Arg(i);
- }
-}
-
-BENCHMARK(BM_basic)->Apply(CustomArgs);
-
-void BM_explicit_iteration_count(benchmark::State& state) {
- // Test that benchmarks specified with an explicit iteration count are
- // only run once.
- static bool invoked_before = false;
- assert(!invoked_before);
- invoked_before = true;
-
- // Test that the requested iteration count is respected.
- assert(state.max_iterations == 42);
- size_t actual_iterations = 0;
- for (auto _ : state)
- ++actual_iterations;
- assert(state.iterations() == state.max_iterations);
- assert(state.iterations() == 42);
-
-}
-BENCHMARK(BM_explicit_iteration_count)->Iterations(42);
-
-BENCHMARK_MAIN();
diff --git a/lib/libcxx/utils/google-benchmark/test/output_test.h b/lib/libcxx/utils/google-benchmark/test/output_test.h
deleted file mode 100644
index 9385761b214..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/output_test.h
+++ /dev/null
@@ -1,213 +0,0 @@
-#ifndef TEST_OUTPUT_TEST_H
-#define TEST_OUTPUT_TEST_H
-
-#undef NDEBUG
-#include <functional>
-#include <initializer_list>
-#include <memory>
-#include <sstream>
-#include <string>
-#include <utility>
-#include <vector>
-
-#include "../src/re.h"
-#include "benchmark/benchmark.h"
-
-#define CONCAT2(x, y) x##y
-#define CONCAT(x, y) CONCAT2(x, y)
-
-#define ADD_CASES(...) int CONCAT(dummy, __LINE__) = ::AddCases(__VA_ARGS__)
-
-#define SET_SUBSTITUTIONS(...) \
- int CONCAT(dummy, __LINE__) = ::SetSubstitutions(__VA_ARGS__)
-
-enum MatchRules {
- MR_Default, // Skip non-matching lines until a match is found.
- MR_Next, // Match must occur on the next line.
- MR_Not // No line between the current position and the next match matches
- // the regex
-};
-
-struct TestCase {
- TestCase(std::string re, int rule = MR_Default);
-
- std::string regex_str;
- int match_rule;
- std::string substituted_regex;
- std::shared_ptr<benchmark::Regex> regex;
-};
-
-enum TestCaseID {
- TC_ConsoleOut,
- TC_ConsoleErr,
- TC_JSONOut,
- TC_JSONErr,
- TC_CSVOut,
- TC_CSVErr,
-
- TC_NumID // PRIVATE
-};
-
-// Add a list of test cases to be run against the output specified by
-// 'ID'
-int AddCases(TestCaseID ID, std::initializer_list<TestCase> il);
-
-// Add or set a list of substitutions to be performed on constructed regex's
-// See 'output_test_helper.cc' for a list of default substitutions.
-int SetSubstitutions(
- std::initializer_list<std::pair<std::string, std::string>> il);
-
-// Run all output tests.
-void RunOutputTests(int argc, char* argv[]);
-
-// Count the number of 'pat' substrings in the 'haystack' string.
-int SubstrCnt(const std::string& haystack, const std::string& pat);
-
-// Run registered benchmarks with file reporter enabled, and return the content
-// outputted by the file reporter.
-std::string GetFileReporterOutput(int argc, char* argv[]);
-
-// ========================================================================= //
-// ------------------------- Results checking ------------------------------ //
-// ========================================================================= //
-
-// Call this macro to register a benchmark for checking its results. This
-// should be all that's needed. It subscribes a function to check the (CSV)
-// results of a benchmark. This is done only after verifying that the output
-// strings are really as expected.
-// bm_name_pattern: a name or a regex pattern which will be matched against
-// all the benchmark names. Matching benchmarks
-// will be the subject of a call to checker_function
-// checker_function: should be of type ResultsCheckFn (see below)
-#define CHECK_BENCHMARK_RESULTS(bm_name_pattern, checker_function) \
- size_t CONCAT(dummy, __LINE__) = AddChecker(bm_name_pattern, checker_function)
-
-struct Results;
-typedef std::function<void(Results const&)> ResultsCheckFn;
-
-size_t AddChecker(const char* bm_name_pattern, ResultsCheckFn fn);
-
-// Class holding the results of a benchmark.
-// It is passed in calls to checker functions.
-struct Results {
- // the benchmark name
- std::string name;
- // the benchmark fields
- std::map<std::string, std::string> values;
-
- Results(const std::string& n) : name(n) {}
-
- int NumThreads() const;
-
- double NumIterations() const;
-
- typedef enum { kCpuTime, kRealTime } BenchmarkTime;
-
- // get cpu_time or real_time in seconds
- double GetTime(BenchmarkTime which) const;
-
- // get the real_time duration of the benchmark in seconds.
- // it is better to use fuzzy float checks for this, as the float
- // ASCII formatting is lossy.
- double DurationRealTime() const {
- return NumIterations() * GetTime(kRealTime);
- }
- // get the cpu_time duration of the benchmark in seconds
- double DurationCPUTime() const {
- return NumIterations() * GetTime(kCpuTime);
- }
-
- // get the string for a result by name, or nullptr if the name
- // is not found
- const std::string* Get(const char* entry_name) const {
- auto it = values.find(entry_name);
- if (it == values.end()) return nullptr;
- return &it->second;
- }
-
- // get a result by name, parsed as a specific type.
- // NOTE: for counters, use GetCounterAs instead.
- template <class T>
- T GetAs(const char* entry_name) const;
-
- // counters are written as doubles, so they have to be read first
- // as a double, and only then converted to the asked type.
- template <class T>
- T GetCounterAs(const char* entry_name) const {
- double dval = GetAs<double>(entry_name);
- T tval = static_cast<T>(dval);
- return tval;
- }
-};
-
-template <class T>
-T Results::GetAs(const char* entry_name) const {
- auto* sv = Get(entry_name);
- CHECK(sv != nullptr && !sv->empty());
- std::stringstream ss;
- ss << *sv;
- T out;
- ss >> out;
- CHECK(!ss.fail());
- return out;
-}
-
-//----------------------------------
-// Macros to help in result checking. Do not use them with arguments causing
-// side-effects.
-
-// clang-format off
-
-#define _CHECK_RESULT_VALUE(entry, getfn, var_type, var_name, relationship, value) \
- CONCAT(CHECK_, relationship) \
- (entry.getfn< var_type >(var_name), (value)) << "\n" \
- << __FILE__ << ":" << __LINE__ << ": " << (entry).name << ":\n" \
- << __FILE__ << ":" << __LINE__ << ": " \
- << "expected (" << #var_type << ")" << (var_name) \
- << "=" << (entry).getfn< var_type >(var_name) \
- << " to be " #relationship " to " << (value) << "\n"
-
-// check with tolerance. eps_factor is the tolerance window, which is
-// interpreted relative to value (eg, 0.1 means 10% of value).
-#define _CHECK_FLOAT_RESULT_VALUE(entry, getfn, var_type, var_name, relationship, value, eps_factor) \
- CONCAT(CHECK_FLOAT_, relationship) \
- (entry.getfn< var_type >(var_name), (value), (eps_factor) * (value)) << "\n" \
- << __FILE__ << ":" << __LINE__ << ": " << (entry).name << ":\n" \
- << __FILE__ << ":" << __LINE__ << ": " \
- << "expected (" << #var_type << ")" << (var_name) \
- << "=" << (entry).getfn< var_type >(var_name) \
- << " to be " #relationship " to " << (value) << "\n" \
- << __FILE__ << ":" << __LINE__ << ": " \
- << "with tolerance of " << (eps_factor) * (value) \
- << " (" << (eps_factor)*100. << "%), " \
- << "but delta was " << ((entry).getfn< var_type >(var_name) - (value)) \
- << " (" << (((entry).getfn< var_type >(var_name) - (value)) \
- / \
- ((value) > 1.e-5 || value < -1.e-5 ? value : 1.e-5)*100.) \
- << "%)"
-
-#define CHECK_RESULT_VALUE(entry, var_type, var_name, relationship, value) \
- _CHECK_RESULT_VALUE(entry, GetAs, var_type, var_name, relationship, value)
-
-#define CHECK_COUNTER_VALUE(entry, var_type, var_name, relationship, value) \
- _CHECK_RESULT_VALUE(entry, GetCounterAs, var_type, var_name, relationship, value)
-
-#define CHECK_FLOAT_RESULT_VALUE(entry, var_name, relationship, value, eps_factor) \
- _CHECK_FLOAT_RESULT_VALUE(entry, GetAs, double, var_name, relationship, value, eps_factor)
-
-#define CHECK_FLOAT_COUNTER_VALUE(entry, var_name, relationship, value, eps_factor) \
- _CHECK_FLOAT_RESULT_VALUE(entry, GetCounterAs, double, var_name, relationship, value, eps_factor)
-
-// clang-format on
-
-// ========================================================================= //
-// --------------------------- Misc Utilities ------------------------------ //
-// ========================================================================= //
-
-namespace {
-
-const char* const dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
-
-} // end namespace
-
-#endif // TEST_OUTPUT_TEST_H
diff --git a/lib/libcxx/utils/google-benchmark/test/output_test_helper.cc b/lib/libcxx/utils/google-benchmark/test/output_test_helper.cc
deleted file mode 100644
index 5dc951d2bca..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/output_test_helper.cc
+++ /dev/null
@@ -1,505 +0,0 @@
-#include <cstdio>
-#include <cstring>
-#include <fstream>
-#include <iostream>
-#include <map>
-#include <memory>
-#include <random>
-#include <sstream>
-#include <streambuf>
-
-#include "../src/benchmark_api_internal.h"
-#include "../src/check.h" // NOTE: check.h is for internal use only!
-#include "../src/re.h" // NOTE: re.h is for internal use only
-#include "output_test.h"
-
-// ========================================================================= //
-// ------------------------------ Internals -------------------------------- //
-// ========================================================================= //
-namespace internal {
-namespace {
-
-using TestCaseList = std::vector<TestCase>;
-
-// Use a vector because the order elements are added matters during iteration.
-// std::map/unordered_map don't guarantee that.
-// For example:
-// SetSubstitutions({{"%HelloWorld", "Hello"}, {"%Hello", "Hi"}});
-// Substitute("%HelloWorld") // Always expands to Hello.
-using SubMap = std::vector<std::pair<std::string, std::string>>;
-
-TestCaseList& GetTestCaseList(TestCaseID ID) {
- // Uses function-local statics to ensure initialization occurs
- // before first use.
- static TestCaseList lists[TC_NumID];
- return lists[ID];
-}
-
-SubMap& GetSubstitutions() {
- // Don't use 'dec_re' from header because it may not yet be initialized.
- // clang-format off
- static std::string safe_dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
- static std::string time_re = "([0-9]+[.])?[0-9]+";
- static SubMap map = {
- {"%float", "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?"},
- // human-readable float
- {"%hrfloat", "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?[kMGTPEZYmunpfazy]?"},
- {"%int", "[ ]*[0-9]+"},
- {" %s ", "[ ]+"},
- {"%time", "[ ]*" + time_re + "[ ]+ns"},
- {"%console_report", "[ ]*" + time_re + "[ ]+ns [ ]*" + time_re + "[ ]+ns [ ]*[0-9]+"},
- {"%console_time_only_report", "[ ]*" + time_re + "[ ]+ns [ ]*" + time_re + "[ ]+ns"},
- {"%console_us_report", "[ ]*" + time_re + "[ ]+us [ ]*" + time_re + "[ ]+us [ ]*[0-9]+"},
- {"%console_us_time_only_report", "[ ]*" + time_re + "[ ]+us [ ]*" + time_re + "[ ]+us"},
- {"%csv_header",
- "name,iterations,real_time,cpu_time,time_unit,bytes_per_second,"
- "items_per_second,label,error_occurred,error_message"},
- {"%csv_report", "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns,,,,,"},
- {"%csv_us_report", "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",us,,,,,"},
- {"%csv_bytes_report",
- "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns," + safe_dec_re + ",,,,"},
- {"%csv_items_report",
- "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns,," + safe_dec_re + ",,,"},
- {"%csv_bytes_items_report",
- "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns," + safe_dec_re +
- "," + safe_dec_re + ",,,"},
- {"%csv_label_report_begin", "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns,,,"},
- {"%csv_label_report_end", ",,"}};
- // clang-format on
- return map;
-}
-
-std::string PerformSubstitutions(std::string source) {
- SubMap const& subs = GetSubstitutions();
- using SizeT = std::string::size_type;
- for (auto const& KV : subs) {
- SizeT pos;
- SizeT next_start = 0;
- while ((pos = source.find(KV.first, next_start)) != std::string::npos) {
- next_start = pos + KV.second.size();
- source.replace(pos, KV.first.size(), KV.second);
- }
- }
- return source;
-}
-
-void CheckCase(std::stringstream& remaining_output, TestCase const& TC,
- TestCaseList const& not_checks) {
- std::string first_line;
- bool on_first = true;
- std::string line;
- while (remaining_output.eof() == false) {
- CHECK(remaining_output.good());
- std::getline(remaining_output, line);
- if (on_first) {
- first_line = line;
- on_first = false;
- }
- for (const auto& NC : not_checks) {
- CHECK(!NC.regex->Match(line))
- << "Unexpected match for line \"" << line << "\" for MR_Not regex \""
- << NC.regex_str << "\""
- << "\n actual regex string \"" << TC.substituted_regex << "\""
- << "\n started matching near: " << first_line;
- }
- if (TC.regex->Match(line)) return;
- CHECK(TC.match_rule != MR_Next)
- << "Expected line \"" << line << "\" to match regex \"" << TC.regex_str
- << "\""
- << "\n actual regex string \"" << TC.substituted_regex << "\""
- << "\n started matching near: " << first_line;
- }
- CHECK(remaining_output.eof() == false)
- << "End of output reached before match for regex \"" << TC.regex_str
- << "\" was found"
- << "\n actual regex string \"" << TC.substituted_regex << "\""
- << "\n started matching near: " << first_line;
-}
-
-void CheckCases(TestCaseList const& checks, std::stringstream& output) {
- std::vector<TestCase> not_checks;
- for (size_t i = 0; i < checks.size(); ++i) {
- const auto& TC = checks[i];
- if (TC.match_rule == MR_Not) {
- not_checks.push_back(TC);
- continue;
- }
- CheckCase(output, TC, not_checks);
- not_checks.clear();
- }
-}
-
-class TestReporter : public benchmark::BenchmarkReporter {
- public:
- TestReporter(std::vector<benchmark::BenchmarkReporter*> reps)
- : reporters_(reps) {}
-
- virtual bool ReportContext(const Context& context) {
- bool last_ret = false;
- bool first = true;
- for (auto rep : reporters_) {
- bool new_ret = rep->ReportContext(context);
- CHECK(first || new_ret == last_ret)
- << "Reports return different values for ReportContext";
- first = false;
- last_ret = new_ret;
- }
- (void)first;
- return last_ret;
- }
-
- void ReportRuns(const std::vector<Run>& report) {
- for (auto rep : reporters_) rep->ReportRuns(report);
- }
- void Finalize() {
- for (auto rep : reporters_) rep->Finalize();
- }
-
- private:
- std::vector<benchmark::BenchmarkReporter*> reporters_;
-};
-} // namespace
-
-} // end namespace internal
-
-// ========================================================================= //
-// -------------------------- Results checking ----------------------------- //
-// ========================================================================= //
-
-namespace internal {
-
-// Utility class to manage subscribers for checking benchmark results.
-// It works by parsing the CSV output to read the results.
-class ResultsChecker {
- public:
- struct PatternAndFn : public TestCase { // reusing TestCase for its regexes
- PatternAndFn(const std::string& rx, ResultsCheckFn fn_)
- : TestCase(rx), fn(fn_) {}
- ResultsCheckFn fn;
- };
-
- std::vector<PatternAndFn> check_patterns;
- std::vector<Results> results;
- std::vector<std::string> field_names;
-
- void Add(const std::string& entry_pattern, ResultsCheckFn fn);
-
- void CheckResults(std::stringstream& output);
-
- private:
- void SetHeader_(const std::string& csv_header);
- void SetValues_(const std::string& entry_csv_line);
-
- std::vector<std::string> SplitCsv_(const std::string& line);
-};
-
-// store the static ResultsChecker in a function to prevent initialization
-// order problems
-ResultsChecker& GetResultsChecker() {
- static ResultsChecker rc;
- return rc;
-}
-
-// add a results checker for a benchmark
-void ResultsChecker::Add(const std::string& entry_pattern, ResultsCheckFn fn) {
- check_patterns.emplace_back(entry_pattern, fn);
-}
-
-// check the results of all subscribed benchmarks
-void ResultsChecker::CheckResults(std::stringstream& output) {
- // first reset the stream to the start
- {
- auto start = std::stringstream::pos_type(0);
- // clear before calling tellg()
- output.clear();
- // seek to zero only when needed
- if (output.tellg() > start) output.seekg(start);
- // and just in case
- output.clear();
- }
- // now go over every line and publish it to the ResultsChecker
- std::string line;
- bool on_first = true;
- while (output.eof() == false) {
- CHECK(output.good());
- std::getline(output, line);
- if (on_first) {
- SetHeader_(line); // this is important
- on_first = false;
- continue;
- }
- SetValues_(line);
- }
- // finally we can call the subscribed check functions
- for (const auto& p : check_patterns) {
- VLOG(2) << "--------------------------------\n";
- VLOG(2) << "checking for benchmarks matching " << p.regex_str << "...\n";
- for (const auto& r : results) {
- if (!p.regex->Match(r.name)) {
- VLOG(2) << p.regex_str << " is not matched by " << r.name << "\n";
- continue;
- } else {
- VLOG(2) << p.regex_str << " is matched by " << r.name << "\n";
- }
- VLOG(1) << "Checking results of " << r.name << ": ... \n";
- p.fn(r);
- VLOG(1) << "Checking results of " << r.name << ": OK.\n";
- }
- }
-}
-
-// prepare for the names in this header
-void ResultsChecker::SetHeader_(const std::string& csv_header) {
- field_names = SplitCsv_(csv_header);
-}
-
-// set the values for a benchmark
-void ResultsChecker::SetValues_(const std::string& entry_csv_line) {
- if (entry_csv_line.empty()) return; // some lines are empty
- CHECK(!field_names.empty());
- auto vals = SplitCsv_(entry_csv_line);
- CHECK_EQ(vals.size(), field_names.size());
- results.emplace_back(vals[0]); // vals[0] is the benchmark name
- auto& entry = results.back();
- for (size_t i = 1, e = vals.size(); i < e; ++i) {
- entry.values[field_names[i]] = vals[i];
- }
-}
-
-// a quick'n'dirty csv splitter (eliminating quotes)
-std::vector<std::string> ResultsChecker::SplitCsv_(const std::string& line) {
- std::vector<std::string> out;
- if (line.empty()) return out;
- if (!field_names.empty()) out.reserve(field_names.size());
- size_t prev = 0, pos = line.find_first_of(','), curr = pos;
- while (pos != line.npos) {
- CHECK(curr > 0);
- if (line[prev] == '"') ++prev;
- if (line[curr - 1] == '"') --curr;
- out.push_back(line.substr(prev, curr - prev));
- prev = pos + 1;
- pos = line.find_first_of(',', pos + 1);
- curr = pos;
- }
- curr = line.size();
- if (line[prev] == '"') ++prev;
- if (line[curr - 1] == '"') --curr;
- out.push_back(line.substr(prev, curr - prev));
- return out;
-}
-
-} // end namespace internal
-
-size_t AddChecker(const char* bm_name, ResultsCheckFn fn) {
- auto& rc = internal::GetResultsChecker();
- rc.Add(bm_name, fn);
- return rc.results.size();
-}
-
-int Results::NumThreads() const {
- auto pos = name.find("/threads:");
- if (pos == name.npos) return 1;
- auto end = name.find('/', pos + 9);
- std::stringstream ss;
- ss << name.substr(pos + 9, end);
- int num = 1;
- ss >> num;
- CHECK(!ss.fail());
- return num;
-}
-
-double Results::NumIterations() const {
- return GetAs<double>("iterations");
-}
-
-double Results::GetTime(BenchmarkTime which) const {
- CHECK(which == kCpuTime || which == kRealTime);
- const char* which_str = which == kCpuTime ? "cpu_time" : "real_time";
- double val = GetAs<double>(which_str);
- auto unit = Get("time_unit");
- CHECK(unit);
- if (*unit == "ns") {
- return val * 1.e-9;
- } else if (*unit == "us") {
- return val * 1.e-6;
- } else if (*unit == "ms") {
- return val * 1.e-3;
- } else if (*unit == "s") {
- return val;
- } else {
- CHECK(1 == 0) << "unknown time unit: " << *unit;
- return 0;
- }
-}
-
-// ========================================================================= //
-// -------------------------- Public API Definitions------------------------ //
-// ========================================================================= //
-
-TestCase::TestCase(std::string re, int rule)
- : regex_str(std::move(re)),
- match_rule(rule),
- substituted_regex(internal::PerformSubstitutions(regex_str)),
- regex(std::make_shared<benchmark::Regex>()) {
- std::string err_str;
- regex->Init(substituted_regex, &err_str);
- CHECK(err_str.empty()) << "Could not construct regex \"" << substituted_regex
- << "\""
- << "\n originally \"" << regex_str << "\""
- << "\n got error: " << err_str;
-}
-
-int AddCases(TestCaseID ID, std::initializer_list<TestCase> il) {
- auto& L = internal::GetTestCaseList(ID);
- L.insert(L.end(), il);
- return 0;
-}
-
-int SetSubstitutions(
- std::initializer_list<std::pair<std::string, std::string>> il) {
- auto& subs = internal::GetSubstitutions();
- for (auto KV : il) {
- bool exists = false;
- KV.second = internal::PerformSubstitutions(KV.second);
- for (auto& EKV : subs) {
- if (EKV.first == KV.first) {
- EKV.second = std::move(KV.second);
- exists = true;
- break;
- }
- }
- if (!exists) subs.push_back(std::move(KV));
- }
- return 0;
-}
-
-void RunOutputTests(int argc, char* argv[]) {
- using internal::GetTestCaseList;
- benchmark::Initialize(&argc, argv);
- auto options = benchmark::internal::GetOutputOptions(/*force_no_color*/ true);
- benchmark::ConsoleReporter CR(options);
- benchmark::JSONReporter JR;
- benchmark::CSVReporter CSVR;
- struct ReporterTest {
- const char* name;
- std::vector<TestCase>& output_cases;
- std::vector<TestCase>& error_cases;
- benchmark::BenchmarkReporter& reporter;
- std::stringstream out_stream;
- std::stringstream err_stream;
-
- ReporterTest(const char* n, std::vector<TestCase>& out_tc,
- std::vector<TestCase>& err_tc,
- benchmark::BenchmarkReporter& br)
- : name(n), output_cases(out_tc), error_cases(err_tc), reporter(br) {
- reporter.SetOutputStream(&out_stream);
- reporter.SetErrorStream(&err_stream);
- }
- } TestCases[] = {
- {"ConsoleReporter", GetTestCaseList(TC_ConsoleOut),
- GetTestCaseList(TC_ConsoleErr), CR},
- {"JSONReporter", GetTestCaseList(TC_JSONOut), GetTestCaseList(TC_JSONErr),
- JR},
- {"CSVReporter", GetTestCaseList(TC_CSVOut), GetTestCaseList(TC_CSVErr),
- CSVR},
- };
-
- // Create the test reporter and run the benchmarks.
- std::cout << "Running benchmarks...\n";
- internal::TestReporter test_rep({&CR, &JR, &CSVR});
- benchmark::RunSpecifiedBenchmarks(&test_rep);
-
- for (auto& rep_test : TestCases) {
- std::string msg = std::string("\nTesting ") + rep_test.name + " Output\n";
- std::string banner(msg.size() - 1, '-');
- std::cout << banner << msg << banner << "\n";
-
- std::cerr << rep_test.err_stream.str();
- std::cout << rep_test.out_stream.str();
-
- internal::CheckCases(rep_test.error_cases, rep_test.err_stream);
- internal::CheckCases(rep_test.output_cases, rep_test.out_stream);
-
- std::cout << "\n";
- }
-
- // now that we know the output is as expected, we can dispatch
- // the checks to subscribees.
- auto& csv = TestCases[2];
- // would use == but gcc spits a warning
- CHECK(std::strcmp(csv.name, "CSVReporter") == 0);
- internal::GetResultsChecker().CheckResults(csv.out_stream);
-}
-
-int SubstrCnt(const std::string& haystack, const std::string& pat) {
- if (pat.length() == 0) return 0;
- int count = 0;
- for (size_t offset = haystack.find(pat); offset != std::string::npos;
- offset = haystack.find(pat, offset + pat.length()))
- ++count;
- return count;
-}
-
-static char ToHex(int ch) {
- return ch < 10 ? static_cast<char>('0' + ch)
- : static_cast<char>('a' + (ch - 10));
-}
-
-static char RandomHexChar() {
- static std::mt19937 rd{std::random_device{}()};
- static std::uniform_int_distribution<int> mrand{0, 15};
- return ToHex(mrand(rd));
-}
-
-static std::string GetRandomFileName() {
- std::string model = "test.%%%%%%";
- for (auto & ch : model) {
- if (ch == '%')
- ch = RandomHexChar();
- }
- return model;
-}
-
-static bool FileExists(std::string const& name) {
- std::ifstream in(name.c_str());
- return in.good();
-}
-
-static std::string GetTempFileName() {
- // This function attempts to avoid race conditions where two tests
- // create the same file at the same time. However, it still introduces races
- // similar to tmpnam.
- int retries = 3;
- while (--retries) {
- std::string name = GetRandomFileName();
- if (!FileExists(name))
- return name;
- }
- std::cerr << "Failed to create unique temporary file name" << std::endl;
- std::abort();
-}
-
-std::string GetFileReporterOutput(int argc, char* argv[]) {
- std::vector<char*> new_argv(argv, argv + argc);
- assert(static_cast<decltype(new_argv)::size_type>(argc) == new_argv.size());
-
- std::string tmp_file_name = GetTempFileName();
- std::cout << "Will be using this as the tmp file: " << tmp_file_name << '\n';
-
- std::string tmp = "--benchmark_out=";
- tmp += tmp_file_name;
- new_argv.emplace_back(const_cast<char*>(tmp.c_str()));
-
- argc = int(new_argv.size());
-
- benchmark::Initialize(&argc, new_argv.data());
- benchmark::RunSpecifiedBenchmarks();
-
- // Read the output back from the file, and delete the file.
- std::ifstream tmp_stream(tmp_file_name);
- std::string output = std::string((std::istreambuf_iterator<char>(tmp_stream)),
- std::istreambuf_iterator<char>());
- std::remove(tmp_file_name.c_str());
-
- return output;
-}
diff --git a/lib/libcxx/utils/google-benchmark/test/register_benchmark_test.cc b/lib/libcxx/utils/google-benchmark/test/register_benchmark_test.cc
deleted file mode 100644
index 3ac5b21fb34..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/register_benchmark_test.cc
+++ /dev/null
@@ -1,184 +0,0 @@
-
-#undef NDEBUG
-#include <cassert>
-#include <vector>
-
-#include "../src/check.h" // NOTE: check.h is for internal use only!
-#include "benchmark/benchmark.h"
-
-namespace {
-
-class TestReporter : public benchmark::ConsoleReporter {
- public:
- virtual void ReportRuns(const std::vector<Run>& report) {
- all_runs_.insert(all_runs_.end(), begin(report), end(report));
- ConsoleReporter::ReportRuns(report);
- }
-
- std::vector<Run> all_runs_;
-};
-
-struct TestCase {
- std::string name;
- const char* label;
- // Note: not explicit as we rely on it being converted through ADD_CASES.
- TestCase(const char* xname) : TestCase(xname, nullptr) {}
- TestCase(const char* xname, const char* xlabel)
- : name(xname), label(xlabel) {}
-
- typedef benchmark::BenchmarkReporter::Run Run;
-
- void CheckRun(Run const& run) const {
- // clang-format off
- CHECK(name == run.benchmark_name()) << "expected " << name << " got "
- << run.benchmark_name();
- if (label) {
- CHECK(run.report_label == label) << "expected " << label << " got "
- << run.report_label;
- } else {
- CHECK(run.report_label == "");
- }
- // clang-format on
- }
-};
-
-std::vector<TestCase> ExpectedResults;
-
-int AddCases(std::initializer_list<TestCase> const& v) {
- for (auto N : v) {
- ExpectedResults.push_back(N);
- }
- return 0;
-}
-
-#define CONCAT(x, y) CONCAT2(x, y)
-#define CONCAT2(x, y) x##y
-#define ADD_CASES(...) int CONCAT(dummy, __LINE__) = AddCases({__VA_ARGS__})
-
-} // end namespace
-
-typedef benchmark::internal::Benchmark* ReturnVal;
-
-//----------------------------------------------------------------------------//
-// Test RegisterBenchmark with no additional arguments
-//----------------------------------------------------------------------------//
-void BM_function(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_function);
-ReturnVal dummy = benchmark::RegisterBenchmark(
- "BM_function_manual_registration", BM_function);
-ADD_CASES({"BM_function"}, {"BM_function_manual_registration"});
-
-//----------------------------------------------------------------------------//
-// Test RegisterBenchmark with additional arguments
-// Note: GCC <= 4.8 do not support this form of RegisterBenchmark because they
-// reject the variadic pack expansion of lambda captures.
-//----------------------------------------------------------------------------//
-#ifndef BENCHMARK_HAS_NO_VARIADIC_REGISTER_BENCHMARK
-
-void BM_extra_args(benchmark::State& st, const char* label) {
- for (auto _ : st) {
- }
- st.SetLabel(label);
-}
-int RegisterFromFunction() {
- std::pair<const char*, const char*> cases[] = {
- {"test1", "One"}, {"test2", "Two"}, {"test3", "Three"}};
- for (auto const& c : cases)
- benchmark::RegisterBenchmark(c.first, &BM_extra_args, c.second);
- return 0;
-}
-int dummy2 = RegisterFromFunction();
-ADD_CASES({"test1", "One"}, {"test2", "Two"}, {"test3", "Three"});
-
-#endif // BENCHMARK_HAS_NO_VARIADIC_REGISTER_BENCHMARK
-
-//----------------------------------------------------------------------------//
-// Test RegisterBenchmark with different callable types
-//----------------------------------------------------------------------------//
-
-struct CustomFixture {
- void operator()(benchmark::State& st) {
- for (auto _ : st) {
- }
- }
-};
-
-void TestRegistrationAtRuntime() {
-#ifdef BENCHMARK_HAS_CXX11
- {
- CustomFixture fx;
- benchmark::RegisterBenchmark("custom_fixture", fx);
- AddCases({"custom_fixture"});
- }
-#endif
-#ifndef BENCHMARK_HAS_NO_VARIADIC_REGISTER_BENCHMARK
- {
- const char* x = "42";
- auto capturing_lam = [=](benchmark::State& st) {
- for (auto _ : st) {
- }
- st.SetLabel(x);
- };
- benchmark::RegisterBenchmark("lambda_benchmark", capturing_lam);
- AddCases({{"lambda_benchmark", x}});
- }
-#endif
-}
-
-// Test that all benchmarks, registered at either during static init or runtime,
-// are run and the results are passed to the reported.
-void RunTestOne() {
- TestRegistrationAtRuntime();
-
- TestReporter test_reporter;
- benchmark::RunSpecifiedBenchmarks(&test_reporter);
-
- typedef benchmark::BenchmarkReporter::Run Run;
- auto EB = ExpectedResults.begin();
-
- for (Run const& run : test_reporter.all_runs_) {
- assert(EB != ExpectedResults.end());
- EB->CheckRun(run);
- ++EB;
- }
- assert(EB == ExpectedResults.end());
-}
-
-// Test that ClearRegisteredBenchmarks() clears all previously registered
-// benchmarks.
-// Also test that new benchmarks can be registered and ran afterwards.
-void RunTestTwo() {
- assert(ExpectedResults.size() != 0 &&
- "must have at least one registered benchmark");
- ExpectedResults.clear();
- benchmark::ClearRegisteredBenchmarks();
-
- TestReporter test_reporter;
- size_t num_ran = benchmark::RunSpecifiedBenchmarks(&test_reporter);
- assert(num_ran == 0);
- assert(test_reporter.all_runs_.begin() == test_reporter.all_runs_.end());
-
- TestRegistrationAtRuntime();
- num_ran = benchmark::RunSpecifiedBenchmarks(&test_reporter);
- assert(num_ran == ExpectedResults.size());
-
- typedef benchmark::BenchmarkReporter::Run Run;
- auto EB = ExpectedResults.begin();
-
- for (Run const& run : test_reporter.all_runs_) {
- assert(EB != ExpectedResults.end());
- EB->CheckRun(run);
- ++EB;
- }
- assert(EB == ExpectedResults.end());
-}
-
-int main(int argc, char* argv[]) {
- benchmark::Initialize(&argc, argv);
-
- RunTestOne();
- RunTestTwo();
-}
diff --git a/lib/libcxx/utils/google-benchmark/test/report_aggregates_only_test.cc b/lib/libcxx/utils/google-benchmark/test/report_aggregates_only_test.cc
deleted file mode 100644
index 9646b9be534..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/report_aggregates_only_test.cc
+++ /dev/null
@@ -1,39 +0,0 @@
-
-#undef NDEBUG
-#include <cstdio>
-#include <string>
-
-#include "benchmark/benchmark.h"
-#include "output_test.h"
-
-// Ok this test is super ugly. We want to check what happens with the file
-// reporter in the presence of ReportAggregatesOnly().
-// We do not care about console output, the normal tests check that already.
-
-void BM_SummaryRepeat(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_SummaryRepeat)->Repetitions(3)->ReportAggregatesOnly();
-
-int main(int argc, char* argv[]) {
- const std::string output = GetFileReporterOutput(argc, argv);
-
- if (SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3") != 3 ||
- SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3_mean\"") != 1 ||
- SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3_median\"") !=
- 1 ||
- SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3_stddev\"") !=
- 1) {
- std::cout << "Precondition mismatch. Expected to only find three "
- "occurrences of \"BM_SummaryRepeat/repeats:3\" substring:\n"
- "\"name\": \"BM_SummaryRepeat/repeats:3_mean\", "
- "\"name\": \"BM_SummaryRepeat/repeats:3_median\", "
- "\"name\": \"BM_SummaryRepeat/repeats:3_stddev\"\nThe entire "
- "output:\n";
- std::cout << output;
- return 1;
- }
-
- return 0;
-}
diff --git a/lib/libcxx/utils/google-benchmark/test/reporter_output_test.cc b/lib/libcxx/utils/google-benchmark/test/reporter_output_test.cc
deleted file mode 100644
index ec6d51b3591..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/reporter_output_test.cc
+++ /dev/null
@@ -1,604 +0,0 @@
-
-#undef NDEBUG
-#include <utility>
-
-#include "benchmark/benchmark.h"
-#include "output_test.h"
-
-// ========================================================================= //
-// ---------------------- Testing Prologue Output -------------------------- //
-// ========================================================================= //
-
-ADD_CASES(TC_ConsoleOut, {{"^[-]+$", MR_Next},
- {"^Benchmark %s Time %s CPU %s Iterations$", MR_Next},
- {"^[-]+$", MR_Next}});
-static int AddContextCases() {
- AddCases(TC_ConsoleErr,
- {
- {"%int[-/]%int[-/]%int %int:%int:%int$", MR_Default},
- {"Running .*/reporter_output_test(\\.exe)?$", MR_Next},
- {"Run on \\(%int X %float MHz CPU s?\\)", MR_Next},
- });
- AddCases(TC_JSONOut,
- {{"^\\{", MR_Default},
- {"\"context\":", MR_Next},
- {"\"date\": \"", MR_Next},
- {"\"host_name\":", MR_Next},
- {"\"executable\": \".*(/|\\\\)reporter_output_test(\\.exe)?\",",
- MR_Next},
- {"\"num_cpus\": %int,$", MR_Next},
- {"\"mhz_per_cpu\": %float,$", MR_Next},
- {"\"cpu_scaling_enabled\": ", MR_Next},
- {"\"caches\": \\[$", MR_Next}});
- auto const& Info = benchmark::CPUInfo::Get();
- auto const& Caches = Info.caches;
- if (!Caches.empty()) {
- AddCases(TC_ConsoleErr, {{"CPU Caches:$", MR_Next}});
- }
- for (size_t I = 0; I < Caches.size(); ++I) {
- std::string num_caches_str =
- Caches[I].num_sharing != 0 ? " \\(x%int\\)$" : "$";
- AddCases(
- TC_ConsoleErr,
- {{"L%int (Data|Instruction|Unified) %intK" + num_caches_str, MR_Next}});
- AddCases(TC_JSONOut, {{"\\{$", MR_Next},
- {"\"type\": \"", MR_Next},
- {"\"level\": %int,$", MR_Next},
- {"\"size\": %int,$", MR_Next},
- {"\"num_sharing\": %int$", MR_Next},
- {"}[,]{0,1}$", MR_Next}});
- }
- AddCases(TC_JSONOut, {{"],$"}});
- auto const& LoadAvg = Info.load_avg;
- if (!LoadAvg.empty()) {
- AddCases(TC_ConsoleErr,
- {{"Load Average: (%float, ){0,2}%float$", MR_Next}});
- }
- AddCases(TC_JSONOut, {{"\"load_avg\": \\[(%float,?){0,3}],$", MR_Next}});
- return 0;
-}
-int dummy_register = AddContextCases();
-ADD_CASES(TC_CSVOut, {{"%csv_header"}});
-
-// ========================================================================= //
-// ------------------------ Testing Basic Output --------------------------- //
-// ========================================================================= //
-
-void BM_basic(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_basic);
-
-ADD_CASES(TC_ConsoleOut, {{"^BM_basic %console_report$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_basic\",$"},
- {"\"run_name\": \"BM_basic\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\"$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_basic\",%csv_report$"}});
-
-// ========================================================================= //
-// ------------------------ Testing Bytes per Second Output ---------------- //
-// ========================================================================= //
-
-void BM_bytes_per_second(benchmark::State& state) {
- for (auto _ : state) {
- }
- state.SetBytesProcessed(1);
-}
-BENCHMARK(BM_bytes_per_second);
-
-ADD_CASES(TC_ConsoleOut, {{"^BM_bytes_per_second %console_report "
- "bytes_per_second=%float[kM]{0,1}/s$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_bytes_per_second\",$"},
- {"\"run_name\": \"BM_bytes_per_second\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"bytes_per_second\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_bytes_per_second\",%csv_bytes_report$"}});
-
-// ========================================================================= //
-// ------------------------ Testing Items per Second Output ---------------- //
-// ========================================================================= //
-
-void BM_items_per_second(benchmark::State& state) {
- for (auto _ : state) {
- }
- state.SetItemsProcessed(1);
-}
-BENCHMARK(BM_items_per_second);
-
-ADD_CASES(TC_ConsoleOut, {{"^BM_items_per_second %console_report "
- "items_per_second=%float[kM]{0,1}/s$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_items_per_second\",$"},
- {"\"run_name\": \"BM_items_per_second\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"items_per_second\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_items_per_second\",%csv_items_report$"}});
-
-// ========================================================================= //
-// ------------------------ Testing Label Output --------------------------- //
-// ========================================================================= //
-
-void BM_label(benchmark::State& state) {
- for (auto _ : state) {
- }
- state.SetLabel("some label");
-}
-BENCHMARK(BM_label);
-
-ADD_CASES(TC_ConsoleOut, {{"^BM_label %console_report some label$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_label\",$"},
- {"\"run_name\": \"BM_label\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"label\": \"some label\"$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_label\",%csv_label_report_begin\"some "
- "label\"%csv_label_report_end$"}});
-
-// ========================================================================= //
-// ------------------------ Testing Error Output --------------------------- //
-// ========================================================================= //
-
-void BM_error(benchmark::State& state) {
- state.SkipWithError("message");
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_error);
-ADD_CASES(TC_ConsoleOut, {{"^BM_error[ ]+ERROR OCCURRED: 'message'$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_error\",$"},
- {"\"run_name\": \"BM_error\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"error_occurred\": true,$", MR_Next},
- {"\"error_message\": \"message\",$", MR_Next}});
-
-ADD_CASES(TC_CSVOut, {{"^\"BM_error\",,,,,,,,true,\"message\"$"}});
-
-// ========================================================================= //
-// ------------------------ Testing No Arg Name Output -----------------------
-// //
-// ========================================================================= //
-
-void BM_no_arg_name(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_no_arg_name)->Arg(3);
-ADD_CASES(TC_ConsoleOut, {{"^BM_no_arg_name/3 %console_report$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_no_arg_name/3\",$"},
- {"\"run_name\": \"BM_no_arg_name/3\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_no_arg_name/3\",%csv_report$"}});
-
-// ========================================================================= //
-// ------------------------ Testing Arg Name Output ----------------------- //
-// ========================================================================= //
-
-void BM_arg_name(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_arg_name)->ArgName("first")->Arg(3);
-ADD_CASES(TC_ConsoleOut, {{"^BM_arg_name/first:3 %console_report$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_arg_name/first:3\",$"},
- {"\"run_name\": \"BM_arg_name/first:3\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_arg_name/first:3\",%csv_report$"}});
-
-// ========================================================================= //
-// ------------------------ Testing Arg Names Output ----------------------- //
-// ========================================================================= //
-
-void BM_arg_names(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_arg_names)->Args({2, 5, 4})->ArgNames({"first", "", "third"});
-ADD_CASES(TC_ConsoleOut,
- {{"^BM_arg_names/first:2/5/third:4 %console_report$"}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_arg_names/first:2/5/third:4\",$"},
- {"\"run_name\": \"BM_arg_names/first:2/5/third:4\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_arg_names/first:2/5/third:4\",%csv_report$"}});
-
-// ========================================================================= //
-// ------------------------ Testing Big Args Output ------------------------ //
-// ========================================================================= //
-
-void BM_BigArgs(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_BigArgs)->RangeMultiplier(2)->Range(1U << 30U, 1U << 31U);
-ADD_CASES(TC_ConsoleOut, {{"^BM_BigArgs/1073741824 %console_report$"},
- {"^BM_BigArgs/2147483648 %console_report$"}});
-
-// ========================================================================= //
-// ----------------------- Testing Complexity Output ----------------------- //
-// ========================================================================= //
-
-void BM_Complexity_O1(benchmark::State& state) {
- for (auto _ : state) {
- }
- state.SetComplexityN(state.range(0));
-}
-BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity(benchmark::o1);
-SET_SUBSTITUTIONS({{"%bigOStr", "[ ]* %float \\([0-9]+\\)"},
- {"%RMS", "[ ]*[0-9]+ %"}});
-ADD_CASES(TC_ConsoleOut, {{"^BM_Complexity_O1_BigO %bigOStr %bigOStr[ ]*$"},
- {"^BM_Complexity_O1_RMS %RMS %RMS[ ]*$"}});
-
-// ========================================================================= //
-// ----------------------- Testing Aggregate Output ------------------------ //
-// ========================================================================= //
-
-// Test that non-aggregate data is printed by default
-void BM_Repeat(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-// need two repetitions min to be able to output any aggregate output
-BENCHMARK(BM_Repeat)->Repetitions(2);
-ADD_CASES(TC_ConsoleOut,
- {{"^BM_Repeat/repeats:2 %console_report$"},
- {"^BM_Repeat/repeats:2 %console_report$"},
- {"^BM_Repeat/repeats:2_mean %console_time_only_report [ ]*2$"},
- {"^BM_Repeat/repeats:2_median %console_time_only_report [ ]*2$"},
- {"^BM_Repeat/repeats:2_stddev %console_time_only_report [ ]*2$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Repeat/repeats:2\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:2\"", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:2\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:2\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:2_mean\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:2\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"mean\",$", MR_Next},
- {"\"iterations\": 2,$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:2_median\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:2\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"median\",$", MR_Next},
- {"\"iterations\": 2,$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:2_stddev\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:2\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"stddev\",$", MR_Next},
- {"\"iterations\": 2,$", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_Repeat/repeats:2\",%csv_report$"},
- {"^\"BM_Repeat/repeats:2\",%csv_report$"},
- {"^\"BM_Repeat/repeats:2_mean\",%csv_report$"},
- {"^\"BM_Repeat/repeats:2_median\",%csv_report$"},
- {"^\"BM_Repeat/repeats:2_stddev\",%csv_report$"}});
-// but for two repetitions, mean and median is the same, so let's repeat..
-BENCHMARK(BM_Repeat)->Repetitions(3);
-ADD_CASES(TC_ConsoleOut,
- {{"^BM_Repeat/repeats:3 %console_report$"},
- {"^BM_Repeat/repeats:3 %console_report$"},
- {"^BM_Repeat/repeats:3 %console_report$"},
- {"^BM_Repeat/repeats:3_mean %console_time_only_report [ ]*3$"},
- {"^BM_Repeat/repeats:3_median %console_time_only_report [ ]*3$"},
- {"^BM_Repeat/repeats:3_stddev %console_time_only_report [ ]*3$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Repeat/repeats:3\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:3\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:3\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:3\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:3\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:3\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:3_mean\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:3\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"mean\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:3_median\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:3\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"median\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:3_stddev\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:3\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"stddev\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_Repeat/repeats:3\",%csv_report$"},
- {"^\"BM_Repeat/repeats:3\",%csv_report$"},
- {"^\"BM_Repeat/repeats:3\",%csv_report$"},
- {"^\"BM_Repeat/repeats:3_mean\",%csv_report$"},
- {"^\"BM_Repeat/repeats:3_median\",%csv_report$"},
- {"^\"BM_Repeat/repeats:3_stddev\",%csv_report$"}});
-// median differs between even/odd number of repetitions, so just to be sure
-BENCHMARK(BM_Repeat)->Repetitions(4);
-ADD_CASES(TC_ConsoleOut,
- {{"^BM_Repeat/repeats:4 %console_report$"},
- {"^BM_Repeat/repeats:4 %console_report$"},
- {"^BM_Repeat/repeats:4 %console_report$"},
- {"^BM_Repeat/repeats:4 %console_report$"},
- {"^BM_Repeat/repeats:4_mean %console_time_only_report [ ]*4$"},
- {"^BM_Repeat/repeats:4_median %console_time_only_report [ ]*4$"},
- {"^BM_Repeat/repeats:4_stddev %console_time_only_report [ ]*4$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Repeat/repeats:4\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:4\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:4\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:4\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:4_mean\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"mean\",$", MR_Next},
- {"\"iterations\": 4,$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:4_median\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"median\",$", MR_Next},
- {"\"iterations\": 4,$", MR_Next},
- {"\"name\": \"BM_Repeat/repeats:4_stddev\",$"},
- {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"stddev\",$", MR_Next},
- {"\"iterations\": 4,$", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_Repeat/repeats:4\",%csv_report$"},
- {"^\"BM_Repeat/repeats:4\",%csv_report$"},
- {"^\"BM_Repeat/repeats:4\",%csv_report$"},
- {"^\"BM_Repeat/repeats:4\",%csv_report$"},
- {"^\"BM_Repeat/repeats:4_mean\",%csv_report$"},
- {"^\"BM_Repeat/repeats:4_median\",%csv_report$"},
- {"^\"BM_Repeat/repeats:4_stddev\",%csv_report$"}});
-
-// Test that a non-repeated test still prints non-aggregate results even when
-// only-aggregate reports have been requested
-void BM_RepeatOnce(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_RepeatOnce)->Repetitions(1)->ReportAggregatesOnly();
-ADD_CASES(TC_ConsoleOut, {{"^BM_RepeatOnce/repeats:1 %console_report$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_RepeatOnce/repeats:1\",$"},
- {"\"run_name\": \"BM_RepeatOnce/repeats:1\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_RepeatOnce/repeats:1\",%csv_report$"}});
-
-// Test that non-aggregate data is not reported
-void BM_SummaryRepeat(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_SummaryRepeat)->Repetitions(3)->ReportAggregatesOnly();
-ADD_CASES(
- TC_ConsoleOut,
- {{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
- {"^BM_SummaryRepeat/repeats:3_mean %console_time_only_report [ ]*3$"},
- {"^BM_SummaryRepeat/repeats:3_median %console_time_only_report [ ]*3$"},
- {"^BM_SummaryRepeat/repeats:3_stddev %console_time_only_report [ ]*3$"}});
-ADD_CASES(TC_JSONOut,
- {{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
- {"\"name\": \"BM_SummaryRepeat/repeats:3_mean\",$"},
- {"\"run_name\": \"BM_SummaryRepeat/repeats:3\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"mean\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next},
- {"\"name\": \"BM_SummaryRepeat/repeats:3_median\",$"},
- {"\"run_name\": \"BM_SummaryRepeat/repeats:3\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"median\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next},
- {"\"name\": \"BM_SummaryRepeat/repeats:3_stddev\",$"},
- {"\"run_name\": \"BM_SummaryRepeat/repeats:3\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"stddev\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next}});
-ADD_CASES(TC_CSVOut, {{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
- {"^\"BM_SummaryRepeat/repeats:3_mean\",%csv_report$"},
- {"^\"BM_SummaryRepeat/repeats:3_median\",%csv_report$"},
- {"^\"BM_SummaryRepeat/repeats:3_stddev\",%csv_report$"}});
-
-// Test that non-aggregate data is not displayed.
-// NOTE: this test is kinda bad. we are only testing the display output.
-// But we don't check that the file output still contains everything...
-void BM_SummaryDisplay(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_SummaryDisplay)->Repetitions(2)->DisplayAggregatesOnly();
-ADD_CASES(
- TC_ConsoleOut,
- {{".*BM_SummaryDisplay/repeats:2 ", MR_Not},
- {"^BM_SummaryDisplay/repeats:2_mean %console_time_only_report [ ]*2$"},
- {"^BM_SummaryDisplay/repeats:2_median %console_time_only_report [ ]*2$"},
- {"^BM_SummaryDisplay/repeats:2_stddev %console_time_only_report [ ]*2$"}});
-ADD_CASES(TC_JSONOut,
- {{".*BM_SummaryDisplay/repeats:2 ", MR_Not},
- {"\"name\": \"BM_SummaryDisplay/repeats:2_mean\",$"},
- {"\"run_name\": \"BM_SummaryDisplay/repeats:2\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"mean\",$", MR_Next},
- {"\"iterations\": 2,$", MR_Next},
- {"\"name\": \"BM_SummaryDisplay/repeats:2_median\",$"},
- {"\"run_name\": \"BM_SummaryDisplay/repeats:2\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"median\",$", MR_Next},
- {"\"iterations\": 2,$", MR_Next},
- {"\"name\": \"BM_SummaryDisplay/repeats:2_stddev\",$"},
- {"\"run_name\": \"BM_SummaryDisplay/repeats:2\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"stddev\",$", MR_Next},
- {"\"iterations\": 2,$", MR_Next}});
-ADD_CASES(TC_CSVOut,
- {{".*BM_SummaryDisplay/repeats:2 ", MR_Not},
- {"^\"BM_SummaryDisplay/repeats:2_mean\",%csv_report$"},
- {"^\"BM_SummaryDisplay/repeats:2_median\",%csv_report$"},
- {"^\"BM_SummaryDisplay/repeats:2_stddev\",%csv_report$"}});
-
-// Test repeats with custom time unit.
-void BM_RepeatTimeUnit(benchmark::State& state) {
- for (auto _ : state) {
- }
-}
-BENCHMARK(BM_RepeatTimeUnit)
- ->Repetitions(3)
- ->ReportAggregatesOnly()
- ->Unit(benchmark::kMicrosecond);
-ADD_CASES(
- TC_ConsoleOut,
- {{".*BM_RepeatTimeUnit/repeats:3 ", MR_Not},
- {"^BM_RepeatTimeUnit/repeats:3_mean %console_us_time_only_report [ ]*3$"},
- {"^BM_RepeatTimeUnit/repeats:3_median %console_us_time_only_report [ "
- "]*3$"},
- {"^BM_RepeatTimeUnit/repeats:3_stddev %console_us_time_only_report [ "
- "]*3$"}});
-ADD_CASES(TC_JSONOut,
- {{".*BM_RepeatTimeUnit/repeats:3 ", MR_Not},
- {"\"name\": \"BM_RepeatTimeUnit/repeats:3_mean\",$"},
- {"\"run_name\": \"BM_RepeatTimeUnit/repeats:3\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"mean\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next},
- {"\"time_unit\": \"us\",?$"},
- {"\"name\": \"BM_RepeatTimeUnit/repeats:3_median\",$"},
- {"\"run_name\": \"BM_RepeatTimeUnit/repeats:3\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"median\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next},
- {"\"time_unit\": \"us\",?$"},
- {"\"name\": \"BM_RepeatTimeUnit/repeats:3_stddev\",$"},
- {"\"run_name\": \"BM_RepeatTimeUnit/repeats:3\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"stddev\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next},
- {"\"time_unit\": \"us\",?$"}});
-ADD_CASES(TC_CSVOut,
- {{".*BM_RepeatTimeUnit/repeats:3 ", MR_Not},
- {"^\"BM_RepeatTimeUnit/repeats:3_mean\",%csv_us_report$"},
- {"^\"BM_RepeatTimeUnit/repeats:3_median\",%csv_us_report$"},
- {"^\"BM_RepeatTimeUnit/repeats:3_stddev\",%csv_us_report$"}});
-
-// ========================================================================= //
-// -------------------- Testing user-provided statistics ------------------- //
-// ========================================================================= //
-
-const auto UserStatistics = [](const std::vector<double>& v) {
- return v.back();
-};
-void BM_UserStats(benchmark::State& state) {
- for (auto _ : state) {
- state.SetIterationTime(150 / 10e8);
- }
-}
-// clang-format off
-BENCHMARK(BM_UserStats)
- ->Repetitions(3)
- ->Iterations(5)
- ->UseManualTime()
- ->ComputeStatistics("", UserStatistics);
-// clang-format on
-
-// check that user-provided stats is calculated, and is after the default-ones
-// empty string as name is intentional, it would sort before anything else
-ADD_CASES(TC_ConsoleOut, {{"^BM_UserStats/iterations:5/repeats:3/manual_time [ "
- "]* 150 ns %time [ ]*5$"},
- {"^BM_UserStats/iterations:5/repeats:3/manual_time [ "
- "]* 150 ns %time [ ]*5$"},
- {"^BM_UserStats/iterations:5/repeats:3/manual_time [ "
- "]* 150 ns %time [ ]*5$"},
- {"^BM_UserStats/iterations:5/repeats:3/"
- "manual_time_mean [ ]* 150 ns %time [ ]*3$"},
- {"^BM_UserStats/iterations:5/repeats:3/"
- "manual_time_median [ ]* 150 ns %time [ ]*3$"},
- {"^BM_UserStats/iterations:5/repeats:3/"
- "manual_time_stddev [ ]* 0.000 ns %time [ ]*3$"},
- {"^BM_UserStats/iterations:5/repeats:3/manual_time_ "
- "[ ]* 150 ns %time [ ]*3$"}});
-ADD_CASES(
- TC_JSONOut,
- {{"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$"},
- {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
- MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": 5,$", MR_Next},
- {"\"real_time\": 1\\.5(0)*e\\+(0)*2,$", MR_Next},
- {"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$"},
- {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
- MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": 5,$", MR_Next},
- {"\"real_time\": 1\\.5(0)*e\\+(0)*2,$", MR_Next},
- {"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$"},
- {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
- MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": 5,$", MR_Next},
- {"\"real_time\": 1\\.5(0)*e\\+(0)*2,$", MR_Next},
- {"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time_mean\",$"},
- {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
- MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"mean\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next},
- {"\"real_time\": 1\\.5(0)*e\\+(0)*2,$", MR_Next},
- {"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time_median\",$"},
- {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
- MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"median\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next},
- {"\"real_time\": 1\\.5(0)*e\\+(0)*2,$", MR_Next},
- {"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time_stddev\",$"},
- {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
- MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"stddev\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time_\",$"},
- {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
- MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"\",$", MR_Next},
- {"\"iterations\": 3,$", MR_Next},
- {"\"real_time\": 1\\.5(0)*e\\+(0)*2,$", MR_Next}});
-ADD_CASES(
- TC_CSVOut,
- {{"^\"BM_UserStats/iterations:5/repeats:3/manual_time\",%csv_report$"},
- {"^\"BM_UserStats/iterations:5/repeats:3/manual_time\",%csv_report$"},
- {"^\"BM_UserStats/iterations:5/repeats:3/manual_time\",%csv_report$"},
- {"^\"BM_UserStats/iterations:5/repeats:3/manual_time_mean\",%csv_report$"},
- {"^\"BM_UserStats/iterations:5/repeats:3/"
- "manual_time_median\",%csv_report$"},
- {"^\"BM_UserStats/iterations:5/repeats:3/"
- "manual_time_stddev\",%csv_report$"},
- {"^\"BM_UserStats/iterations:5/repeats:3/manual_time_\",%csv_report$"}});
-
-// ========================================================================= //
-// --------------------------- TEST CASES END ------------------------------ //
-// ========================================================================= //
-
-int main(int argc, char* argv[]) { RunOutputTests(argc, argv); }
diff --git a/lib/libcxx/utils/google-benchmark/test/skip_with_error_test.cc b/lib/libcxx/utils/google-benchmark/test/skip_with_error_test.cc
deleted file mode 100644
index 06579772ff7..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/skip_with_error_test.cc
+++ /dev/null
@@ -1,189 +0,0 @@
-
-#undef NDEBUG
-#include <cassert>
-#include <vector>
-
-#include "../src/check.h" // NOTE: check.h is for internal use only!
-#include "benchmark/benchmark.h"
-
-namespace {
-
-class TestReporter : public benchmark::ConsoleReporter {
- public:
- virtual bool ReportContext(const Context& context) {
- return ConsoleReporter::ReportContext(context);
- };
-
- virtual void ReportRuns(const std::vector<Run>& report) {
- all_runs_.insert(all_runs_.end(), begin(report), end(report));
- ConsoleReporter::ReportRuns(report);
- }
-
- TestReporter() {}
- virtual ~TestReporter() {}
-
- mutable std::vector<Run> all_runs_;
-};
-
-struct TestCase {
- std::string name;
- bool error_occurred;
- std::string error_message;
-
- typedef benchmark::BenchmarkReporter::Run Run;
-
- void CheckRun(Run const& run) const {
- CHECK(name == run.benchmark_name())
- << "expected " << name << " got " << run.benchmark_name();
- CHECK(error_occurred == run.error_occurred);
- CHECK(error_message == run.error_message);
- if (error_occurred) {
- // CHECK(run.iterations == 0);
- } else {
- CHECK(run.iterations != 0);
- }
- }
-};
-
-std::vector<TestCase> ExpectedResults;
-
-int AddCases(const char* base_name, std::initializer_list<TestCase> const& v) {
- for (auto TC : v) {
- TC.name = base_name + TC.name;
- ExpectedResults.push_back(std::move(TC));
- }
- return 0;
-}
-
-#define CONCAT(x, y) CONCAT2(x, y)
-#define CONCAT2(x, y) x##y
-#define ADD_CASES(...) int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__)
-
-} // end namespace
-
-void BM_error_before_running(benchmark::State& state) {
- state.SkipWithError("error message");
- while (state.KeepRunning()) {
- assert(false);
- }
-}
-BENCHMARK(BM_error_before_running);
-ADD_CASES("BM_error_before_running", {{"", true, "error message"}});
-
-void BM_error_before_running_batch(benchmark::State& state) {
- state.SkipWithError("error message");
- while (state.KeepRunningBatch(17)) {
- assert(false);
- }
-}
-BENCHMARK(BM_error_before_running_batch);
-ADD_CASES("BM_error_before_running_batch", {{"", true, "error message"}});
-
-void BM_error_before_running_range_for(benchmark::State& state) {
- state.SkipWithError("error message");
- for (auto _ : state) {
- assert(false);
- }
-}
-BENCHMARK(BM_error_before_running_range_for);
-ADD_CASES("BM_error_before_running_range_for", {{"", true, "error message"}});
-
-void BM_error_during_running(benchmark::State& state) {
- int first_iter = true;
- while (state.KeepRunning()) {
- if (state.range(0) == 1 && state.thread_index <= (state.threads / 2)) {
- assert(first_iter);
- first_iter = false;
- state.SkipWithError("error message");
- } else {
- state.PauseTiming();
- state.ResumeTiming();
- }
- }
-}
-BENCHMARK(BM_error_during_running)->Arg(1)->Arg(2)->ThreadRange(1, 8);
-ADD_CASES("BM_error_during_running", {{"/1/threads:1", true, "error message"},
- {"/1/threads:2", true, "error message"},
- {"/1/threads:4", true, "error message"},
- {"/1/threads:8", true, "error message"},
- {"/2/threads:1", false, ""},
- {"/2/threads:2", false, ""},
- {"/2/threads:4", false, ""},
- {"/2/threads:8", false, ""}});
-
-void BM_error_during_running_ranged_for(benchmark::State& state) {
- assert(state.max_iterations > 3 && "test requires at least a few iterations");
- int first_iter = true;
- // NOTE: Users should not write the for loop explicitly.
- for (auto It = state.begin(), End = state.end(); It != End; ++It) {
- if (state.range(0) == 1) {
- assert(first_iter);
- first_iter = false;
- state.SkipWithError("error message");
- // Test the unfortunate but documented behavior that the ranged-for loop
- // doesn't automatically terminate when SkipWithError is set.
- assert(++It != End);
- break; // Required behavior
- }
- }
-}
-BENCHMARK(BM_error_during_running_ranged_for)->Arg(1)->Arg(2)->Iterations(5);
-ADD_CASES("BM_error_during_running_ranged_for",
- {{"/1/iterations:5", true, "error message"},
- {"/2/iterations:5", false, ""}});
-
-void BM_error_after_running(benchmark::State& state) {
- for (auto _ : state) {
- benchmark::DoNotOptimize(state.iterations());
- }
- if (state.thread_index <= (state.threads / 2))
- state.SkipWithError("error message");
-}
-BENCHMARK(BM_error_after_running)->ThreadRange(1, 8);
-ADD_CASES("BM_error_after_running", {{"/threads:1", true, "error message"},
- {"/threads:2", true, "error message"},
- {"/threads:4", true, "error message"},
- {"/threads:8", true, "error message"}});
-
-void BM_error_while_paused(benchmark::State& state) {
- bool first_iter = true;
- while (state.KeepRunning()) {
- if (state.range(0) == 1 && state.thread_index <= (state.threads / 2)) {
- assert(first_iter);
- first_iter = false;
- state.PauseTiming();
- state.SkipWithError("error message");
- } else {
- state.PauseTiming();
- state.ResumeTiming();
- }
- }
-}
-BENCHMARK(BM_error_while_paused)->Arg(1)->Arg(2)->ThreadRange(1, 8);
-ADD_CASES("BM_error_while_paused", {{"/1/threads:1", true, "error message"},
- {"/1/threads:2", true, "error message"},
- {"/1/threads:4", true, "error message"},
- {"/1/threads:8", true, "error message"},
- {"/2/threads:1", false, ""},
- {"/2/threads:2", false, ""},
- {"/2/threads:4", false, ""},
- {"/2/threads:8", false, ""}});
-
-int main(int argc, char* argv[]) {
- benchmark::Initialize(&argc, argv);
-
- TestReporter test_reporter;
- benchmark::RunSpecifiedBenchmarks(&test_reporter);
-
- typedef benchmark::BenchmarkReporter::Run Run;
- auto EB = ExpectedResults.begin();
-
- for (Run const& run : test_reporter.all_runs_) {
- assert(EB != ExpectedResults.end());
- EB->CheckRun(run);
- ++EB;
- }
- assert(EB == ExpectedResults.end());
-
- return 0;
-}
diff --git a/lib/libcxx/utils/google-benchmark/test/state_assembly_test.cc b/lib/libcxx/utils/google-benchmark/test/state_assembly_test.cc
deleted file mode 100644
index abe9a4ddb56..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/state_assembly_test.cc
+++ /dev/null
@@ -1,68 +0,0 @@
-#include <benchmark/benchmark.h>
-
-#ifdef __clang__
-#pragma clang diagnostic ignored "-Wreturn-type"
-#endif
-
-// clang-format off
-extern "C" {
- extern int ExternInt;
- benchmark::State& GetState();
- void Fn();
-}
-// clang-format on
-
-using benchmark::State;
-
-// CHECK-LABEL: test_for_auto_loop:
-extern "C" int test_for_auto_loop() {
- State& S = GetState();
- int x = 42;
- // CHECK: [[CALL:call(q)*]] _ZN9benchmark5State16StartKeepRunningEv
- // CHECK-NEXT: testq %rbx, %rbx
- // CHECK-NEXT: je [[LOOP_END:.*]]
-
- for (auto _ : S) {
- // CHECK: .L[[LOOP_HEAD:[a-zA-Z0-9_]+]]:
- // CHECK-GNU-NEXT: subq $1, %rbx
- // CHECK-CLANG-NEXT: {{(addq \$1,|incq)}} %rax
- // CHECK-NEXT: jne .L[[LOOP_HEAD]]
- benchmark::DoNotOptimize(x);
- }
- // CHECK: [[LOOP_END]]:
- // CHECK: [[CALL]] _ZN9benchmark5State17FinishKeepRunningEv
-
- // CHECK: movl $101, %eax
- // CHECK: ret
- return 101;
-}
-
-// CHECK-LABEL: test_while_loop:
-extern "C" int test_while_loop() {
- State& S = GetState();
- int x = 42;
-
- // CHECK: j{{(e|mp)}} .L[[LOOP_HEADER:[a-zA-Z0-9_]+]]
- // CHECK-NEXT: .L[[LOOP_BODY:[a-zA-Z0-9_]+]]:
- while (S.KeepRunning()) {
- // CHECK-GNU-NEXT: subq $1, %[[IREG:[a-z]+]]
- // CHECK-CLANG-NEXT: {{(addq \$-1,|decq)}} %[[IREG:[a-z]+]]
- // CHECK: movq %[[IREG]], [[DEST:.*]]
- benchmark::DoNotOptimize(x);
- }
- // CHECK-DAG: movq [[DEST]], %[[IREG]]
- // CHECK-DAG: testq %[[IREG]], %[[IREG]]
- // CHECK-DAG: jne .L[[LOOP_BODY]]
- // CHECK-DAG: .L[[LOOP_HEADER]]:
-
- // CHECK: cmpb $0
- // CHECK-NEXT: jne .L[[LOOP_END:[a-zA-Z0-9_]+]]
- // CHECK: [[CALL:call(q)*]] _ZN9benchmark5State16StartKeepRunningEv
-
- // CHECK: .L[[LOOP_END]]:
- // CHECK: [[CALL]] _ZN9benchmark5State17FinishKeepRunningEv
-
- // CHECK: movl $101, %eax
- // CHECK: ret
- return 101;
-}
diff --git a/lib/libcxx/utils/google-benchmark/test/statistics_gtest.cc b/lib/libcxx/utils/google-benchmark/test/statistics_gtest.cc
deleted file mode 100644
index 99e314920c5..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/statistics_gtest.cc
+++ /dev/null
@@ -1,28 +0,0 @@
-//===---------------------------------------------------------------------===//
-// statistics_test - Unit tests for src/statistics.cc
-//===---------------------------------------------------------------------===//
-
-#include "../src/statistics.h"
-#include "gtest/gtest.h"
-
-namespace {
-TEST(StatisticsTest, Mean) {
- EXPECT_DOUBLE_EQ(benchmark::StatisticsMean({42, 42, 42, 42}), 42.0);
- EXPECT_DOUBLE_EQ(benchmark::StatisticsMean({1, 2, 3, 4}), 2.5);
- EXPECT_DOUBLE_EQ(benchmark::StatisticsMean({1, 2, 5, 10, 10, 14}), 7.0);
-}
-
-TEST(StatisticsTest, Median) {
- EXPECT_DOUBLE_EQ(benchmark::StatisticsMedian({42, 42, 42, 42}), 42.0);
- EXPECT_DOUBLE_EQ(benchmark::StatisticsMedian({1, 2, 3, 4}), 2.5);
- EXPECT_DOUBLE_EQ(benchmark::StatisticsMedian({1, 2, 5, 10, 10}), 5.0);
-}
-
-TEST(StatisticsTest, StdDev) {
- EXPECT_DOUBLE_EQ(benchmark::StatisticsStdDev({101, 101, 101, 101}), 0.0);
- EXPECT_DOUBLE_EQ(benchmark::StatisticsStdDev({1, 2, 3}), 1.0);
- EXPECT_FLOAT_EQ(benchmark::StatisticsStdDev({1.5, 2.4, 3.3, 4.2, 5.1}),
- 1.42302495);
-}
-
-} // end namespace
diff --git a/lib/libcxx/utils/google-benchmark/test/string_util_gtest.cc b/lib/libcxx/utils/google-benchmark/test/string_util_gtest.cc
deleted file mode 100644
index 2c5d073f613..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/string_util_gtest.cc
+++ /dev/null
@@ -1,146 +0,0 @@
-//===---------------------------------------------------------------------===//
-// statistics_test - Unit tests for src/statistics.cc
-//===---------------------------------------------------------------------===//
-
-#include "../src/string_util.h"
-#include "gtest/gtest.h"
-
-namespace {
-TEST(StringUtilTest, stoul) {
- {
- size_t pos = 0;
- EXPECT_EQ(0ul, benchmark::stoul("0", &pos));
- EXPECT_EQ(1ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(7ul, benchmark::stoul("7", &pos));
- EXPECT_EQ(1ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(135ul, benchmark::stoul("135", &pos));
- EXPECT_EQ(3ul, pos);
- }
-#if ULONG_MAX == 0xFFFFFFFFul
- {
- size_t pos = 0;
- EXPECT_EQ(0xFFFFFFFFul, benchmark::stoul("4294967295", &pos));
- EXPECT_EQ(10ul, pos);
- }
-#elif ULONG_MAX == 0xFFFFFFFFFFFFFFFFul
- {
- size_t pos = 0;
- EXPECT_EQ(0xFFFFFFFFFFFFFFFFul, benchmark::stoul("18446744073709551615", &pos));
- EXPECT_EQ(20ul, pos);
- }
-#endif
- {
- size_t pos = 0;
- EXPECT_EQ(10ul, benchmark::stoul("1010", &pos, 2));
- EXPECT_EQ(4ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(520ul, benchmark::stoul("1010", &pos, 8));
- EXPECT_EQ(4ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(1010ul, benchmark::stoul("1010", &pos, 10));
- EXPECT_EQ(4ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(4112ul, benchmark::stoul("1010", &pos, 16));
- EXPECT_EQ(4ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(0xBEEFul, benchmark::stoul("BEEF", &pos, 16));
- EXPECT_EQ(4ul, pos);
- }
- {
- ASSERT_THROW(benchmark::stoul("this is a test"), std::invalid_argument);
- }
-}
-
-TEST(StringUtilTest, stoi) {
- {
- size_t pos = 0;
- EXPECT_EQ(0, benchmark::stoi("0", &pos));
- EXPECT_EQ(1ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(-17, benchmark::stoi("-17", &pos));
- EXPECT_EQ(3ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(1357, benchmark::stoi("1357", &pos));
- EXPECT_EQ(4ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(10, benchmark::stoi("1010", &pos, 2));
- EXPECT_EQ(4ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(520, benchmark::stoi("1010", &pos, 8));
- EXPECT_EQ(4ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(1010, benchmark::stoi("1010", &pos, 10));
- EXPECT_EQ(4ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(4112, benchmark::stoi("1010", &pos, 16));
- EXPECT_EQ(4ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(0xBEEF, benchmark::stoi("BEEF", &pos, 16));
- EXPECT_EQ(4ul, pos);
- }
- {
- ASSERT_THROW(benchmark::stoi("this is a test"), std::invalid_argument);
- }
-}
-
-TEST(StringUtilTest, stod) {
- {
- size_t pos = 0;
- EXPECT_EQ(0.0, benchmark::stod("0", &pos));
- EXPECT_EQ(1ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(-84.0, benchmark::stod("-84", &pos));
- EXPECT_EQ(3ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(1234.0, benchmark::stod("1234", &pos));
- EXPECT_EQ(4ul, pos);
- }
- {
- size_t pos = 0;
- EXPECT_EQ(1.5, benchmark::stod("1.5", &pos));
- EXPECT_EQ(3ul, pos);
- }
- {
- size_t pos = 0;
- /* Note: exactly representable as double */
- EXPECT_EQ(-1.25e+9, benchmark::stod("-1.25e+9", &pos));
- EXPECT_EQ(8ul, pos);
- }
- {
- ASSERT_THROW(benchmark::stod("this is a test"), std::invalid_argument);
- }
-}
-
-} // end namespace
diff --git a/lib/libcxx/utils/google-benchmark/test/templated_fixture_test.cc b/lib/libcxx/utils/google-benchmark/test/templated_fixture_test.cc
deleted file mode 100644
index fe9865cc776..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/templated_fixture_test.cc
+++ /dev/null
@@ -1,28 +0,0 @@
-
-#include "benchmark/benchmark.h"
-
-#include <cassert>
-#include <memory>
-
-template <typename T>
-class MyFixture : public ::benchmark::Fixture {
- public:
- MyFixture() : data(0) {}
-
- T data;
-};
-
-BENCHMARK_TEMPLATE_F(MyFixture, Foo, int)(benchmark::State& st) {
- for (auto _ : st) {
- data += 1;
- }
-}
-
-BENCHMARK_TEMPLATE_DEFINE_F(MyFixture, Bar, double)(benchmark::State& st) {
- for (auto _ : st) {
- data += 1.0;
- }
-}
-BENCHMARK_REGISTER_F(MyFixture, Bar);
-
-BENCHMARK_MAIN();
diff --git a/lib/libcxx/utils/google-benchmark/test/user_counters_tabular_test.cc b/lib/libcxx/utils/google-benchmark/test/user_counters_tabular_test.cc
deleted file mode 100644
index 030e98916c3..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/user_counters_tabular_test.cc
+++ /dev/null
@@ -1,268 +0,0 @@
-
-#undef NDEBUG
-
-#include "benchmark/benchmark.h"
-#include "output_test.h"
-
-// @todo: <jpmag> this checks the full output at once; the rule for
-// CounterSet1 was failing because it was not matching "^[-]+$".
-// @todo: <jpmag> check that the counters are vertically aligned.
-ADD_CASES(
- TC_ConsoleOut,
- {
- // keeping these lines long improves readability, so:
- // clang-format off
- {"^[-]+$", MR_Next},
- {"^Benchmark %s Time %s CPU %s Iterations %s Bar %s Bat %s Baz %s Foo %s Frob %s Lob$", MR_Next},
- {"^[-]+$", MR_Next},
- {"^BM_Counters_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_Counters_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_Counters_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_Counters_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_Counters_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterRates_Tabular/threads:%int %console_report [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s$", MR_Next},
- {"^BM_CounterRates_Tabular/threads:%int %console_report [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s$", MR_Next},
- {"^BM_CounterRates_Tabular/threads:%int %console_report [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s$", MR_Next},
- {"^BM_CounterRates_Tabular/threads:%int %console_report [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s$", MR_Next},
- {"^BM_CounterRates_Tabular/threads:%int %console_report [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s$", MR_Next},
- {"^[-]+$", MR_Next},
- {"^Benchmark %s Time %s CPU %s Iterations %s Bar %s Baz %s Foo$", MR_Next},
- {"^[-]+$", MR_Next},
- {"^BM_CounterSet0_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet0_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet0_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet0_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet0_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet1_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet1_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet1_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet1_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet1_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^[-]+$", MR_Next},
- {"^Benchmark %s Time %s CPU %s Iterations %s Bat %s Baz %s Foo$", MR_Next},
- {"^[-]+$", MR_Next},
- {"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
- {"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$"},
- // clang-format on
- });
-ADD_CASES(TC_CSVOut, {{"%csv_header,"
- "\"Bar\",\"Bat\",\"Baz\",\"Foo\",\"Frob\",\"Lob\""}});
-
-// ========================================================================= //
-// ------------------------- Tabular Counters Output ----------------------- //
-// ========================================================================= //
-
-void BM_Counters_Tabular(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters.insert({
- {"Foo", {1, bm::Counter::kAvgThreads}},
- {"Bar", {2, bm::Counter::kAvgThreads}},
- {"Baz", {4, bm::Counter::kAvgThreads}},
- {"Bat", {8, bm::Counter::kAvgThreads}},
- {"Frob", {16, bm::Counter::kAvgThreads}},
- {"Lob", {32, bm::Counter::kAvgThreads}},
- });
-}
-BENCHMARK(BM_Counters_Tabular)->ThreadRange(1, 16);
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_Tabular/threads:%int\",$"},
- {"\"run_name\": \"BM_Counters_Tabular/threads:%int\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"Bar\": %float,$", MR_Next},
- {"\"Bat\": %float,$", MR_Next},
- {"\"Baz\": %float,$", MR_Next},
- {"\"Foo\": %float,$", MR_Next},
- {"\"Frob\": %float,$", MR_Next},
- {"\"Lob\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Tabular/threads:%int\",%csv_report,"
- "%float,%float,%float,%float,%float,%float$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckTabular(Results const& e) {
- CHECK_COUNTER_VALUE(e, int, "Foo", EQ, 1);
- CHECK_COUNTER_VALUE(e, int, "Bar", EQ, 2);
- CHECK_COUNTER_VALUE(e, int, "Baz", EQ, 4);
- CHECK_COUNTER_VALUE(e, int, "Bat", EQ, 8);
- CHECK_COUNTER_VALUE(e, int, "Frob", EQ, 16);
- CHECK_COUNTER_VALUE(e, int, "Lob", EQ, 32);
-}
-CHECK_BENCHMARK_RESULTS("BM_Counters_Tabular/threads:%int", &CheckTabular);
-
-// ========================================================================= //
-// -------------------- Tabular+Rate Counters Output ----------------------- //
-// ========================================================================= //
-
-void BM_CounterRates_Tabular(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters.insert({
- {"Foo", {1, bm::Counter::kAvgThreadsRate}},
- {"Bar", {2, bm::Counter::kAvgThreadsRate}},
- {"Baz", {4, bm::Counter::kAvgThreadsRate}},
- {"Bat", {8, bm::Counter::kAvgThreadsRate}},
- {"Frob", {16, bm::Counter::kAvgThreadsRate}},
- {"Lob", {32, bm::Counter::kAvgThreadsRate}},
- });
-}
-BENCHMARK(BM_CounterRates_Tabular)->ThreadRange(1, 16);
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_CounterRates_Tabular/threads:%int\",$"},
- {"\"run_name\": \"BM_CounterRates_Tabular/threads:%int\",$",
- MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"Bar\": %float,$", MR_Next},
- {"\"Bat\": %float,$", MR_Next},
- {"\"Baz\": %float,$", MR_Next},
- {"\"Foo\": %float,$", MR_Next},
- {"\"Frob\": %float,$", MR_Next},
- {"\"Lob\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_CounterRates_Tabular/threads:%int\",%csv_report,"
- "%float,%float,%float,%float,%float,%float$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckTabularRate(Results const& e) {
- double t = e.DurationCPUTime();
- CHECK_FLOAT_COUNTER_VALUE(e, "Foo", EQ, 1. / t, 0.001);
- CHECK_FLOAT_COUNTER_VALUE(e, "Bar", EQ, 2. / t, 0.001);
- CHECK_FLOAT_COUNTER_VALUE(e, "Baz", EQ, 4. / t, 0.001);
- CHECK_FLOAT_COUNTER_VALUE(e, "Bat", EQ, 8. / t, 0.001);
- CHECK_FLOAT_COUNTER_VALUE(e, "Frob", EQ, 16. / t, 0.001);
- CHECK_FLOAT_COUNTER_VALUE(e, "Lob", EQ, 32. / t, 0.001);
-}
-CHECK_BENCHMARK_RESULTS("BM_CounterRates_Tabular/threads:%int",
- &CheckTabularRate);
-
-// ========================================================================= //
-// ------------------------- Tabular Counters Output ----------------------- //
-// ========================================================================= //
-
-// set only some of the counters
-void BM_CounterSet0_Tabular(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters.insert({
- {"Foo", {10, bm::Counter::kAvgThreads}},
- {"Bar", {20, bm::Counter::kAvgThreads}},
- {"Baz", {40, bm::Counter::kAvgThreads}},
- });
-}
-BENCHMARK(BM_CounterSet0_Tabular)->ThreadRange(1, 16);
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_CounterSet0_Tabular/threads:%int\",$"},
- {"\"run_name\": \"BM_CounterSet0_Tabular/threads:%int\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"Bar\": %float,$", MR_Next},
- {"\"Baz\": %float,$", MR_Next},
- {"\"Foo\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_CounterSet0_Tabular/threads:%int\",%csv_report,"
- "%float,,%float,%float,,"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckSet0(Results const& e) {
- CHECK_COUNTER_VALUE(e, int, "Foo", EQ, 10);
- CHECK_COUNTER_VALUE(e, int, "Bar", EQ, 20);
- CHECK_COUNTER_VALUE(e, int, "Baz", EQ, 40);
-}
-CHECK_BENCHMARK_RESULTS("BM_CounterSet0_Tabular", &CheckSet0);
-
-// again.
-void BM_CounterSet1_Tabular(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters.insert({
- {"Foo", {15, bm::Counter::kAvgThreads}},
- {"Bar", {25, bm::Counter::kAvgThreads}},
- {"Baz", {45, bm::Counter::kAvgThreads}},
- });
-}
-BENCHMARK(BM_CounterSet1_Tabular)->ThreadRange(1, 16);
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_CounterSet1_Tabular/threads:%int\",$"},
- {"\"run_name\": \"BM_CounterSet1_Tabular/threads:%int\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"Bar\": %float,$", MR_Next},
- {"\"Baz\": %float,$", MR_Next},
- {"\"Foo\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_CounterSet1_Tabular/threads:%int\",%csv_report,"
- "%float,,%float,%float,,"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckSet1(Results const& e) {
- CHECK_COUNTER_VALUE(e, int, "Foo", EQ, 15);
- CHECK_COUNTER_VALUE(e, int, "Bar", EQ, 25);
- CHECK_COUNTER_VALUE(e, int, "Baz", EQ, 45);
-}
-CHECK_BENCHMARK_RESULTS("BM_CounterSet1_Tabular/threads:%int", &CheckSet1);
-
-// ========================================================================= //
-// ------------------------- Tabular Counters Output ----------------------- //
-// ========================================================================= //
-
-// set only some of the counters, different set now.
-void BM_CounterSet2_Tabular(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters.insert({
- {"Foo", {10, bm::Counter::kAvgThreads}},
- {"Bat", {30, bm::Counter::kAvgThreads}},
- {"Baz", {40, bm::Counter::kAvgThreads}},
- });
-}
-BENCHMARK(BM_CounterSet2_Tabular)->ThreadRange(1, 16);
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_CounterSet2_Tabular/threads:%int\",$"},
- {"\"run_name\": \"BM_CounterSet2_Tabular/threads:%int\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"Bat\": %float,$", MR_Next},
- {"\"Baz\": %float,$", MR_Next},
- {"\"Foo\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_CounterSet2_Tabular/threads:%int\",%csv_report,"
- ",%float,%float,%float,,"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckSet2(Results const& e) {
- CHECK_COUNTER_VALUE(e, int, "Foo", EQ, 10);
- CHECK_COUNTER_VALUE(e, int, "Bat", EQ, 30);
- CHECK_COUNTER_VALUE(e, int, "Baz", EQ, 40);
-}
-CHECK_BENCHMARK_RESULTS("BM_CounterSet2_Tabular", &CheckSet2);
-
-// ========================================================================= //
-// --------------------------- TEST CASES END ------------------------------ //
-// ========================================================================= //
-
-int main(int argc, char* argv[]) { RunOutputTests(argc, argv); }
diff --git a/lib/libcxx/utils/google-benchmark/test/user_counters_test.cc b/lib/libcxx/utils/google-benchmark/test/user_counters_test.cc
deleted file mode 100644
index bb0d6b4c5a9..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/user_counters_test.cc
+++ /dev/null
@@ -1,408 +0,0 @@
-
-#undef NDEBUG
-
-#include "benchmark/benchmark.h"
-#include "output_test.h"
-
-// ========================================================================= //
-// ---------------------- Testing Prologue Output -------------------------- //
-// ========================================================================= //
-
-// clang-format off
-
-ADD_CASES(TC_ConsoleOut,
- {{"^[-]+$", MR_Next},
- {"^Benchmark %s Time %s CPU %s Iterations UserCounters...$", MR_Next},
- {"^[-]+$", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"%csv_header,\"bar\",\"foo\""}});
-
-// clang-format on
-
-// ========================================================================= //
-// ------------------------- Simple Counters Output ------------------------ //
-// ========================================================================= //
-
-void BM_Counters_Simple(benchmark::State& state) {
- for (auto _ : state) {
- }
- state.counters["foo"] = 1;
- state.counters["bar"] = 2 * (double)state.iterations();
-}
-BENCHMARK(BM_Counters_Simple);
-ADD_CASES(TC_ConsoleOut,
- {{"^BM_Counters_Simple %console_report bar=%hrfloat foo=%hrfloat$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_Simple\",$"},
- {"\"run_name\": \"BM_Counters_Simple\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"bar\": %float,$", MR_Next},
- {"\"foo\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Simple\",%csv_report,%float,%float$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckSimple(Results const& e) {
- double its = e.NumIterations();
- CHECK_COUNTER_VALUE(e, int, "foo", EQ, 1);
- // check that the value of bar is within 0.1% of the expected value
- CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. * its, 0.001);
-}
-CHECK_BENCHMARK_RESULTS("BM_Counters_Simple", &CheckSimple);
-
-// ========================================================================= //
-// --------------------- Counters+Items+Bytes/s Output --------------------- //
-// ========================================================================= //
-
-namespace {
-int num_calls1 = 0;
-}
-void BM_Counters_WithBytesAndItemsPSec(benchmark::State& state) {
- for (auto _ : state) {
- }
- state.counters["foo"] = 1;
- state.counters["bar"] = ++num_calls1;
- state.SetBytesProcessed(364);
- state.SetItemsProcessed(150);
-}
-BENCHMARK(BM_Counters_WithBytesAndItemsPSec);
-ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_WithBytesAndItemsPSec %console_report "
- "bar=%hrfloat bytes_per_second=%hrfloat/s "
- "foo=%hrfloat items_per_second=%hrfloat/s$"}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_WithBytesAndItemsPSec\",$"},
- {"\"run_name\": \"BM_Counters_WithBytesAndItemsPSec\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"bar\": %float,$", MR_Next},
- {"\"bytes_per_second\": %float,$", MR_Next},
- {"\"foo\": %float,$", MR_Next},
- {"\"items_per_second\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_WithBytesAndItemsPSec\","
- "%csv_bytes_items_report,%float,%float$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckBytesAndItemsPSec(Results const& e) {
- double t = e.DurationCPUTime(); // this (and not real time) is the time used
- CHECK_COUNTER_VALUE(e, int, "foo", EQ, 1);
- CHECK_COUNTER_VALUE(e, int, "bar", EQ, num_calls1);
- // check that the values are within 0.1% of the expected values
- CHECK_FLOAT_RESULT_VALUE(e, "bytes_per_second", EQ, 364. / t, 0.001);
- CHECK_FLOAT_RESULT_VALUE(e, "items_per_second", EQ, 150. / t, 0.001);
-}
-CHECK_BENCHMARK_RESULTS("BM_Counters_WithBytesAndItemsPSec",
- &CheckBytesAndItemsPSec);
-
-// ========================================================================= //
-// ------------------------- Rate Counters Output -------------------------- //
-// ========================================================================= //
-
-void BM_Counters_Rate(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters["foo"] = bm::Counter{1, bm::Counter::kIsRate};
- state.counters["bar"] = bm::Counter{2, bm::Counter::kIsRate};
-}
-BENCHMARK(BM_Counters_Rate);
-ADD_CASES(
- TC_ConsoleOut,
- {{"^BM_Counters_Rate %console_report bar=%hrfloat/s foo=%hrfloat/s$"}});
-ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_Rate\",$"},
- {"\"run_name\": \"BM_Counters_Rate\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"bar\": %float,$", MR_Next},
- {"\"foo\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Rate\",%csv_report,%float,%float$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckRate(Results const& e) {
- double t = e.DurationCPUTime(); // this (and not real time) is the time used
- // check that the values are within 0.1% of the expected values
- CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1. / t, 0.001);
- CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. / t, 0.001);
-}
-CHECK_BENCHMARK_RESULTS("BM_Counters_Rate", &CheckRate);
-
-// ========================================================================= //
-// ------------------------- Thread Counters Output ------------------------ //
-// ========================================================================= //
-
-void BM_Counters_Threads(benchmark::State& state) {
- for (auto _ : state) {
- }
- state.counters["foo"] = 1;
- state.counters["bar"] = 2;
-}
-BENCHMARK(BM_Counters_Threads)->ThreadRange(1, 8);
-ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_Threads/threads:%int %console_report "
- "bar=%hrfloat foo=%hrfloat$"}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_Threads/threads:%int\",$"},
- {"\"run_name\": \"BM_Counters_Threads/threads:%int\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"bar\": %float,$", MR_Next},
- {"\"foo\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(
- TC_CSVOut,
- {{"^\"BM_Counters_Threads/threads:%int\",%csv_report,%float,%float$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckThreads(Results const& e) {
- CHECK_COUNTER_VALUE(e, int, "foo", EQ, e.NumThreads());
- CHECK_COUNTER_VALUE(e, int, "bar", EQ, 2 * e.NumThreads());
-}
-CHECK_BENCHMARK_RESULTS("BM_Counters_Threads/threads:%int", &CheckThreads);
-
-// ========================================================================= //
-// ---------------------- ThreadAvg Counters Output ------------------------ //
-// ========================================================================= //
-
-void BM_Counters_AvgThreads(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters["foo"] = bm::Counter{1, bm::Counter::kAvgThreads};
- state.counters["bar"] = bm::Counter{2, bm::Counter::kAvgThreads};
-}
-BENCHMARK(BM_Counters_AvgThreads)->ThreadRange(1, 8);
-ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_AvgThreads/threads:%int "
- "%console_report bar=%hrfloat foo=%hrfloat$"}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_AvgThreads/threads:%int\",$"},
- {"\"run_name\": \"BM_Counters_AvgThreads/threads:%int\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"bar\": %float,$", MR_Next},
- {"\"foo\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(
- TC_CSVOut,
- {{"^\"BM_Counters_AvgThreads/threads:%int\",%csv_report,%float,%float$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckAvgThreads(Results const& e) {
- CHECK_COUNTER_VALUE(e, int, "foo", EQ, 1);
- CHECK_COUNTER_VALUE(e, int, "bar", EQ, 2);
-}
-CHECK_BENCHMARK_RESULTS("BM_Counters_AvgThreads/threads:%int",
- &CheckAvgThreads);
-
-// ========================================================================= //
-// ---------------------- ThreadAvg Counters Output ------------------------ //
-// ========================================================================= //
-
-void BM_Counters_AvgThreadsRate(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters["foo"] = bm::Counter{1, bm::Counter::kAvgThreadsRate};
- state.counters["bar"] = bm::Counter{2, bm::Counter::kAvgThreadsRate};
-}
-BENCHMARK(BM_Counters_AvgThreadsRate)->ThreadRange(1, 8);
-ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_AvgThreadsRate/threads:%int "
- "%console_report bar=%hrfloat/s foo=%hrfloat/s$"}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_AvgThreadsRate/threads:%int\",$"},
- {"\"run_name\": \"BM_Counters_AvgThreadsRate/threads:%int\",$",
- MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"bar\": %float,$", MR_Next},
- {"\"foo\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_AvgThreadsRate/"
- "threads:%int\",%csv_report,%float,%float$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckAvgThreadsRate(Results const& e) {
- CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1. / e.DurationCPUTime(), 0.001);
- CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. / e.DurationCPUTime(), 0.001);
-}
-CHECK_BENCHMARK_RESULTS("BM_Counters_AvgThreadsRate/threads:%int",
- &CheckAvgThreadsRate);
-
-// ========================================================================= //
-// ------------------- IterationInvariant Counters Output ------------------ //
-// ========================================================================= //
-
-void BM_Counters_IterationInvariant(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters["foo"] = bm::Counter{1, bm::Counter::kIsIterationInvariant};
- state.counters["bar"] = bm::Counter{2, bm::Counter::kIsIterationInvariant};
-}
-BENCHMARK(BM_Counters_IterationInvariant);
-ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_IterationInvariant %console_report "
- "bar=%hrfloat foo=%hrfloat$"}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_IterationInvariant\",$"},
- {"\"run_name\": \"BM_Counters_IterationInvariant\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"bar\": %float,$", MR_Next},
- {"\"foo\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut,
- {{"^\"BM_Counters_IterationInvariant\",%csv_report,%float,%float$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckIterationInvariant(Results const& e) {
- double its = e.NumIterations();
- // check that the values are within 0.1% of the expected value
- CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, its, 0.001);
- CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. * its, 0.001);
-}
-CHECK_BENCHMARK_RESULTS("BM_Counters_IterationInvariant",
- &CheckIterationInvariant);
-
-// ========================================================================= //
-// ----------------- IterationInvariantRate Counters Output ---------------- //
-// ========================================================================= //
-
-void BM_Counters_kIsIterationInvariantRate(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters["foo"] =
- bm::Counter{1, bm::Counter::kIsIterationInvariantRate};
- state.counters["bar"] =
- bm::Counter{2, bm::Counter::kIsRate | bm::Counter::kIsIterationInvariant};
-}
-BENCHMARK(BM_Counters_kIsIterationInvariantRate);
-ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_kIsIterationInvariantRate "
- "%console_report bar=%hrfloat/s foo=%hrfloat/s$"}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_kIsIterationInvariantRate\",$"},
- {"\"run_name\": \"BM_Counters_kIsIterationInvariantRate\",$",
- MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"bar\": %float,$", MR_Next},
- {"\"foo\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_kIsIterationInvariantRate\",%csv_report,"
- "%float,%float$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckIsIterationInvariantRate(Results const& e) {
- double its = e.NumIterations();
- double t = e.DurationCPUTime(); // this (and not real time) is the time used
- // check that the values are within 0.1% of the expected values
- CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, its * 1. / t, 0.001);
- CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, its * 2. / t, 0.001);
-}
-CHECK_BENCHMARK_RESULTS("BM_Counters_kIsIterationInvariantRate",
- &CheckIsIterationInvariantRate);
-
-// ========================================================================= //
-// ------------------- AvgIterations Counters Output ------------------ //
-// ========================================================================= //
-
-void BM_Counters_AvgIterations(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters["foo"] = bm::Counter{1, bm::Counter::kAvgIterations};
- state.counters["bar"] = bm::Counter{2, bm::Counter::kAvgIterations};
-}
-BENCHMARK(BM_Counters_AvgIterations);
-ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_AvgIterations %console_report "
- "bar=%hrfloat foo=%hrfloat$"}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_AvgIterations\",$"},
- {"\"run_name\": \"BM_Counters_AvgIterations\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"bar\": %float,$", MR_Next},
- {"\"foo\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut,
- {{"^\"BM_Counters_AvgIterations\",%csv_report,%float,%float$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckAvgIterations(Results const& e) {
- double its = e.NumIterations();
- // check that the values are within 0.1% of the expected value
- CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1. / its, 0.001);
- CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. / its, 0.001);
-}
-CHECK_BENCHMARK_RESULTS("BM_Counters_AvgIterations", &CheckAvgIterations);
-
-// ========================================================================= //
-// ----------------- AvgIterationsRate Counters Output ---------------- //
-// ========================================================================= //
-
-void BM_Counters_kAvgIterationsRate(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters["foo"] = bm::Counter{1, bm::Counter::kAvgIterationsRate};
- state.counters["bar"] =
- bm::Counter{2, bm::Counter::kIsRate | bm::Counter::kAvgIterations};
-}
-BENCHMARK(BM_Counters_kAvgIterationsRate);
-ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_kAvgIterationsRate "
- "%console_report bar=%hrfloat/s foo=%hrfloat/s$"}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_kAvgIterationsRate\",$"},
- {"\"run_name\": \"BM_Counters_kAvgIterationsRate\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"bar\": %float,$", MR_Next},
- {"\"foo\": %float$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_kAvgIterationsRate\",%csv_report,"
- "%float,%float$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckAvgIterationsRate(Results const& e) {
- double its = e.NumIterations();
- double t = e.DurationCPUTime(); // this (and not real time) is the time used
- // check that the values are within 0.1% of the expected values
- CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1. / its / t, 0.001);
- CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. / its / t, 0.001);
-}
-CHECK_BENCHMARK_RESULTS("BM_Counters_kAvgIterationsRate",
- &CheckAvgIterationsRate);
-
-// ========================================================================= //
-// --------------------------- TEST CASES END ------------------------------ //
-// ========================================================================= //
-
-int main(int argc, char* argv[]) { RunOutputTests(argc, argv); }
diff --git a/lib/libcxx/utils/google-benchmark/test/user_counters_thousands_test.cc b/lib/libcxx/utils/google-benchmark/test/user_counters_thousands_test.cc
deleted file mode 100644
index fa0ef972047..00000000000
--- a/lib/libcxx/utils/google-benchmark/test/user_counters_thousands_test.cc
+++ /dev/null
@@ -1,161 +0,0 @@
-
-#undef NDEBUG
-
-#include "benchmark/benchmark.h"
-#include "output_test.h"
-
-// ========================================================================= //
-// ------------------------ Thousands Customisation ------------------------ //
-// ========================================================================= //
-
-void BM_Counters_Thousands(benchmark::State& state) {
- for (auto _ : state) {
- }
- namespace bm = benchmark;
- state.counters.insert({
- {"t0_1000000DefaultBase",
- bm::Counter(1000 * 1000, bm::Counter::kDefaults)},
- {"t1_1000000Base1000", bm::Counter(1000 * 1000, bm::Counter::kDefaults,
- benchmark::Counter::OneK::kIs1000)},
- {"t2_1000000Base1024", bm::Counter(1000 * 1000, bm::Counter::kDefaults,
- benchmark::Counter::OneK::kIs1024)},
- {"t3_1048576Base1000", bm::Counter(1024 * 1024, bm::Counter::kDefaults,
- benchmark::Counter::OneK::kIs1000)},
- {"t4_1048576Base1024", bm::Counter(1024 * 1024, bm::Counter::kDefaults,
- benchmark::Counter::OneK::kIs1024)},
- });
-}
-BENCHMARK(BM_Counters_Thousands)->Repetitions(2);
-ADD_CASES(
- TC_ConsoleOut,
- {
- {"^BM_Counters_Thousands/repeats:2 %console_report "
- "t0_1000000DefaultBase=1000k "
- "t1_1000000Base1000=1000k t2_1000000Base1024=976.56[23]k "
- "t3_1048576Base1000=1048.58k t4_1048576Base1024=1024k$"},
- {"^BM_Counters_Thousands/repeats:2 %console_report "
- "t0_1000000DefaultBase=1000k "
- "t1_1000000Base1000=1000k t2_1000000Base1024=976.56[23]k "
- "t3_1048576Base1000=1048.58k t4_1048576Base1024=1024k$"},
- {"^BM_Counters_Thousands/repeats:2_mean %console_report "
- "t0_1000000DefaultBase=1000k t1_1000000Base1000=1000k "
- "t2_1000000Base1024=976.56[23]k t3_1048576Base1000=1048.58k "
- "t4_1048576Base1024=1024k$"},
- {"^BM_Counters_Thousands/repeats:2_median %console_report "
- "t0_1000000DefaultBase=1000k t1_1000000Base1000=1000k "
- "t2_1000000Base1024=976.56[23]k t3_1048576Base1000=1048.58k "
- "t4_1048576Base1024=1024k$"},
- {"^BM_Counters_Thousands/repeats:2_stddev %console_time_only_report [ "
- "]*2 t0_1000000DefaultBase=0 t1_1000000Base1000=0 "
- "t2_1000000Base1024=0 t3_1048576Base1000=0 t4_1048576Base1024=0$"},
- });
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_Thousands/repeats:2\",$"},
- {"\"run_name\": \"BM_Counters_Thousands/repeats:2\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"t0_1000000DefaultBase\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
- {"\"t1_1000000Base1000\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
- {"\"t2_1000000Base1024\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
- {"\"t3_1048576Base1000\": 1\\.048576(0)*e\\+(0)*6,$", MR_Next},
- {"\"t4_1048576Base1024\": 1\\.048576(0)*e\\+(0)*6$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_Thousands/repeats:2\",$"},
- {"\"run_name\": \"BM_Counters_Thousands/repeats:2\",$", MR_Next},
- {"\"run_type\": \"iteration\",$", MR_Next},
- {"\"iterations\": %int,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"t0_1000000DefaultBase\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
- {"\"t1_1000000Base1000\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
- {"\"t2_1000000Base1024\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
- {"\"t3_1048576Base1000\": 1\\.048576(0)*e\\+(0)*6,$", MR_Next},
- {"\"t4_1048576Base1024\": 1\\.048576(0)*e\\+(0)*6$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_Thousands/repeats:2_mean\",$"},
- {"\"run_name\": \"BM_Counters_Thousands/repeats:2\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"mean\",$", MR_Next},
- {"\"iterations\": 2,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"t0_1000000DefaultBase\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
- {"\"t1_1000000Base1000\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
- {"\"t2_1000000Base1024\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
- {"\"t3_1048576Base1000\": 1\\.048576(0)*e\\+(0)*6,$", MR_Next},
- {"\"t4_1048576Base1024\": 1\\.048576(0)*e\\+(0)*6$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_Thousands/repeats:2_median\",$"},
- {"\"run_name\": \"BM_Counters_Thousands/repeats:2\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"median\",$", MR_Next},
- {"\"iterations\": 2,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"t0_1000000DefaultBase\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
- {"\"t1_1000000Base1000\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
- {"\"t2_1000000Base1024\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
- {"\"t3_1048576Base1000\": 1\\.048576(0)*e\\+(0)*6,$", MR_Next},
- {"\"t4_1048576Base1024\": 1\\.048576(0)*e\\+(0)*6$", MR_Next},
- {"}", MR_Next}});
-ADD_CASES(TC_JSONOut,
- {{"\"name\": \"BM_Counters_Thousands/repeats:2_stddev\",$"},
- {"\"run_name\": \"BM_Counters_Thousands/repeats:2\",$", MR_Next},
- {"\"run_type\": \"aggregate\",$", MR_Next},
- {"\"aggregate_name\": \"stddev\",$", MR_Next},
- {"\"iterations\": 2,$", MR_Next},
- {"\"real_time\": %float,$", MR_Next},
- {"\"cpu_time\": %float,$", MR_Next},
- {"\"time_unit\": \"ns\",$", MR_Next},
- {"\"t0_1000000DefaultBase\": 0\\.(0)*e\\+(0)*,$", MR_Next},
- {"\"t1_1000000Base1000\": 0\\.(0)*e\\+(0)*,$", MR_Next},
- {"\"t2_1000000Base1024\": 0\\.(0)*e\\+(0)*,$", MR_Next},
- {"\"t3_1048576Base1000\": 0\\.(0)*e\\+(0)*,$", MR_Next},
- {"\"t4_1048576Base1024\": 0\\.(0)*e\\+(0)*$", MR_Next},
- {"}", MR_Next}});
-
-ADD_CASES(
- TC_CSVOut,
- {{"^\"BM_Counters_Thousands/"
- "repeats:2\",%csv_report,1e\\+(0)*6,1e\\+(0)*6,1e\\+(0)*6,1\\.04858e\\+("
- "0)*6,1\\.04858e\\+(0)*6$"},
- {"^\"BM_Counters_Thousands/"
- "repeats:2\",%csv_report,1e\\+(0)*6,1e\\+(0)*6,1e\\+(0)*6,1\\.04858e\\+("
- "0)*6,1\\.04858e\\+(0)*6$"},
- {"^\"BM_Counters_Thousands/"
- "repeats:2_mean\",%csv_report,1e\\+(0)*6,1e\\+(0)*6,1e\\+(0)*6,1\\."
- "04858e\\+(0)*6,1\\.04858e\\+(0)*6$"},
- {"^\"BM_Counters_Thousands/"
- "repeats:2_median\",%csv_report,1e\\+(0)*6,1e\\+(0)*6,1e\\+(0)*6,1\\."
- "04858e\\+(0)*6,1\\.04858e\\+(0)*6$"},
- {"^\"BM_Counters_Thousands/repeats:2_stddev\",%csv_report,0,0,0,0,0$"}});
-// VS2013 does not allow this function to be passed as a lambda argument
-// to CHECK_BENCHMARK_RESULTS()
-void CheckThousands(Results const& e) {
- if (e.name != "BM_Counters_Thousands/repeats:2")
- return; // Do not check the aggregates!
-
- // check that the values are within 0.01% of the expected values
- CHECK_FLOAT_COUNTER_VALUE(e, "t0_1000000DefaultBase", EQ, 1000 * 1000,
- 0.0001);
- CHECK_FLOAT_COUNTER_VALUE(e, "t1_1000000Base1000", EQ, 1000 * 1000, 0.0001);
- CHECK_FLOAT_COUNTER_VALUE(e, "t2_1000000Base1024", EQ, 1000 * 1000, 0.0001);
- CHECK_FLOAT_COUNTER_VALUE(e, "t3_1048576Base1000", EQ, 1024 * 1024, 0.0001);
- CHECK_FLOAT_COUNTER_VALUE(e, "t4_1048576Base1024", EQ, 1024 * 1024, 0.0001);
-}
-CHECK_BENCHMARK_RESULTS("BM_Counters_Thousands", &CheckThousands);
-
-// ========================================================================= //
-// --------------------------- TEST CASES END ------------------------------ //
-// ========================================================================= //
-
-int main(int argc, char* argv[]) { RunOutputTests(argc, argv); }