mirror of
https://github.com/capstone-engine/llvm-capstone.git
synced 2024-11-24 06:10:12 +00:00
438f7fc068
Summary: To get the target order correct, the benchmarks directory has been moved one level higher. Previously, it was living in the utils directory. The utils directory is a collection of utils which are to be used by the tests and implementations. However, benchmarks *use* the implementations. So, moving it out of utils helps us setup proper target level dependencies. Reviewers: gchatelet Differential Revision: https://reviews.llvm.org/D81910
103 lines
3.4 KiB
C++
103 lines
3.4 KiB
C++
//===-- Benchmark ---------------------------------------------------------===//
|
|
//
|
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
|
// See https://llvm.org/LICENSE.txt for license information.
|
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
|
//
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
#include "LibcMemoryBenchmarkMain.h"
|
|
#include "JSON.h"
|
|
#include "LibcBenchmark.h"
|
|
#include "LibcMemoryBenchmark.h"
|
|
#include "llvm/Support/CommandLine.h"
|
|
#include "llvm/Support/ErrorHandling.h"
|
|
#include "llvm/Support/FileSystem.h"
|
|
#include "llvm/Support/JSON.h"
|
|
#include "llvm/Support/MemoryBuffer.h"
|
|
#include "llvm/Support/raw_ostream.h"
|
|
|
|
#include <string>
|
|
|
|
namespace llvm {
|
|
namespace libc_benchmarks {
|
|
|
|
static cl::opt<std::string>
|
|
Configuration("conf", cl::desc("Specify configuration filename"),
|
|
cl::value_desc("filename"), cl::init(""));
|
|
|
|
static cl::opt<std::string> Output("o", cl::desc("Specify output filename"),
|
|
cl::value_desc("filename"), cl::init("-"));
|
|
|
|
extern std::unique_ptr<BenchmarkRunner>
|
|
getRunner(const StudyConfiguration &Conf);
|
|
|
|
void Main() {
|
|
#ifndef NDEBUG
|
|
static_assert(
|
|
false,
|
|
"For reproducibility benchmarks should not be compiled in DEBUG mode.");
|
|
#endif
|
|
checkRequirements();
|
|
ErrorOr<std::unique_ptr<MemoryBuffer>> MB =
|
|
MemoryBuffer::getFileOrSTDIN(Configuration);
|
|
if (!MB)
|
|
report_fatal_error(
|
|
Twine("Could not open configuration file: ").concat(Configuration));
|
|
auto ErrorOrStudy = ParseJsonStudy((*MB)->getBuffer());
|
|
if (!ErrorOrStudy)
|
|
report_fatal_error(ErrorOrStudy.takeError());
|
|
|
|
const auto StudyPrototype = *ErrorOrStudy;
|
|
|
|
Study S;
|
|
S.Host = HostState::get();
|
|
S.Options = StudyPrototype.Options;
|
|
S.Configuration = StudyPrototype.Configuration;
|
|
|
|
const auto Runs = S.Configuration.Runs;
|
|
const auto &SR = S.Configuration.Size;
|
|
std::unique_ptr<BenchmarkRunner> Runner = getRunner(S.Configuration);
|
|
const size_t TotalSteps =
|
|
Runner->getFunctionNames().size() * Runs * ((SR.To - SR.From) / SR.Step);
|
|
size_t Steps = 0;
|
|
for (auto FunctionName : Runner->getFunctionNames()) {
|
|
FunctionMeasurements FM;
|
|
FM.Name = std::string(FunctionName);
|
|
for (size_t Run = 0; Run < Runs; ++Run) {
|
|
for (uint32_t Size = SR.From; Size <= SR.To; Size += SR.Step) {
|
|
const auto Result = Runner->benchmark(S.Options, FunctionName, Size);
|
|
Measurement Measurement;
|
|
Measurement.Runtime = Result.BestGuess;
|
|
Measurement.Size = Size;
|
|
FM.Measurements.push_back(Measurement);
|
|
outs() << format("%3d%% run: %2d / %2d size: %5d ",
|
|
(Steps * 100 / TotalSteps), Run, Runs, Size)
|
|
<< FunctionName
|
|
<< " \r";
|
|
++Steps;
|
|
}
|
|
}
|
|
S.Functions.push_back(std::move(FM));
|
|
}
|
|
|
|
std::error_code EC;
|
|
raw_fd_ostream FOS(Output, EC);
|
|
if (EC)
|
|
report_fatal_error(Twine("Could not open file: ")
|
|
.concat(EC.message())
|
|
.concat(", ")
|
|
.concat(Output));
|
|
json::OStream JOS(FOS);
|
|
SerializeToJson(S, JOS);
|
|
}
|
|
|
|
} // namespace libc_benchmarks
|
|
} // namespace llvm
|
|
|
|
int main(int argc, char **argv) {
|
|
llvm::cl::ParseCommandLineOptions(argc, argv);
|
|
llvm::libc_benchmarks::Main();
|
|
return EXIT_SUCCESS;
|
|
}
|