20 using namespace std::chrono_literals;
28 void GenerateTemplateResults(
const std::vector<ankerl::nanobench::Result>& benchmarkResults,
const fs::path& file,
const char* tpl)
30 if (benchmarkResults.empty() || file.empty()) {
34 std::ofstream fout{file};
37 std::cout <<
"Created " << file << std::endl;
39 std::cout <<
"Could not write to file " << file << std::endl;
56 using item_t = std::pair<std::string, uint8_t>;
57 auto sort_by_priority = [](item_t a, item_t b){
return a.second < b.second; };
59 return Join(sorted_priorities,
',', [](
const auto& entry){
return entry.first; });
72 return benchmarks_map;
77 benchmarks().insert(std::make_pair(
name, std::make_pair(func, level)));
82 std::regex reFilter(
args.regex_filter);
83 std::smatch baseMatch;
85 if (
args.sanity_check) {
86 std::cout <<
"Running with -sanity-check option, output is being suppressed as benchmark results will be useless." << std::endl;
89 std::vector<ankerl::nanobench::Result> benchmarkResults;
90 for (
const auto& [
name, bench_func] : benchmarks()) {
91 const auto& [func, priority_level] = bench_func;
93 if (!(priority_level &
args.priority)) {
97 if (!std::regex_match(
name, baseMatch, reFilter)) {
101 if (
args.is_list_only) {
102 std::cout <<
name << std::endl;
107 if (
args.sanity_check) {
112 if (
args.min_time > 0ms) {
114 std::chrono::nanoseconds min_time_ns =
args.min_time;
118 if (
args.asymptote.empty()) {
121 for (
auto n :
args.asymptote) {
128 if (!bench.results().empty()) {
129 benchmarkResults.push_back(bench.results().back());
133 GenerateTemplateResults(benchmarkResults,
args.output_csv,
"# Benchmark, evals, iterations, total, min, max, median\n"
134 "{{#result}}{{name}}, {{epochs}}, {{average(iterations)}}, {{sumProduct(iterations, elapsed)}}, {{minimum(elapsed)}}, {{maximum(elapsed)}}, {{median(elapsed)}}\n"
const std::function< void(const std::string &)> G_TEST_LOG_FUN
This is connected to the logger.
const std::function< std::vector< const char * >)> G_TEST_COMMAND_LINE_ARGUMENTS
Retrieve the command line arguments.
Main entry point to nanobench's benchmarking facility.
ANKERL_NANOBENCH(NODISCARD) std Bench & minEpochTime(std::chrono::nanoseconds t) noexcept
Minimum time each epoch should take.
ANKERL_NANOBENCH(NODISCARD) std ANKERL_NANOBENCH(NODISCARD) std Bench & output(std::ostream *outstream) noexcept
Set the output stream where the resulting markdown table will be printed to.
Bench & epochIterations(uint64_t numIters) noexcept
Sets exactly the number of iterations for each epoch.
ANKERL_NANOBENCH(NODISCARD) std Bench & name(char const *benchmarkName)
Gets the title of the benchmark.
std::vector< BigO > complexityBigO() const
Bench & epochs(size_t numEpochs) noexcept
Controls number of epochs, the number of measurements to perform.
Bench & complexityN(T n) noexcept
std::map< std::string, std::pair< BenchFunction, PriorityLevel > > BenchmarkMap
Path class wrapper to block calls to the fs::path(std::string) implicit constructor and the fs::path:...
char const * json() noexcept
Template to generate JSON data.
void render(char const *mustacheTemplate, Bench const &bench, std::ostream &out)
Renders output from a mustache-like template and benchmark results.
std::string ListPriorities()
std::map< std::string, uint8_t > map_label_priority
std::function< void(Bench &)> BenchFunction
uint8_t StringToPriority(const std::string &str)
auto Join(const C &container, const S &separator, UnaryOp unary_op)
Join all container items.