aboutsummaryrefslogtreecommitdiffstats
path: root/library/cpp/testing/benchmark
diff options
context:
space:
mode:
authoryazevnul <yazevnul@yandex-team.ru>2022-02-10 16:46:48 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:46:48 +0300
commit9abfb1a53b7f7b791444d1378e645d8fad9b06ed (patch)
tree49e222ea1c5804306084bb3ae065bb702625360f /library/cpp/testing/benchmark
parent8cbc307de0221f84c80c42dcbe07d40727537e2c (diff)
downloadydb-9abfb1a53b7f7b791444d1378e645d8fad9b06ed.tar.gz
Restoring authorship annotation for <yazevnul@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'library/cpp/testing/benchmark')
-rw-r--r--library/cpp/testing/benchmark/bench.cpp368
-rw-r--r--library/cpp/testing/benchmark/bench.h66
-rw-r--r--library/cpp/testing/benchmark/examples/main.cpp82
-rw-r--r--library/cpp/testing/benchmark/examples/metrics/main.py12
-rw-r--r--library/cpp/testing/benchmark/examples/metrics/ya.make28
-rw-r--r--library/cpp/testing/benchmark/examples/ya.make10
-rw-r--r--library/cpp/testing/benchmark/main/main.cpp24
-rw-r--r--library/cpp/testing/benchmark/main/ya.make20
-rw-r--r--library/cpp/testing/benchmark/ya.make18
9 files changed, 314 insertions, 314 deletions
diff --git a/library/cpp/testing/benchmark/bench.cpp b/library/cpp/testing/benchmark/bench.cpp
index dc7f8b7856..08d8708005 100644
--- a/library/cpp/testing/benchmark/bench.cpp
+++ b/library/cpp/testing/benchmark/bench.cpp
@@ -1,7 +1,7 @@
#include "bench.h"
-#include <contrib/libs/re2/re2/re2.h>
-
+#include <contrib/libs/re2/re2/re2.h>
+
#include <library/cpp/colorizer/output.h>
#include <library/cpp/getopt/small/last_getopt.h>
#include <library/cpp/json/json_value.h>
@@ -9,7 +9,7 @@
#include <library/cpp/threading/poor_man_openmp/thread_helper.h>
#include <util/system/hp_timer.h>
-#include <util/system/info.h>
+#include <util/system/info.h>
#include <util/stream/output.h>
#include <util/datetime/base.h>
#include <util/random/random.h>
@@ -26,7 +26,7 @@
#include <util/system/yield.h>
using re2::RE2;
-
+
using namespace NBench;
using namespace NColorizer;
using namespace NLastGetopt;
@@ -47,7 +47,7 @@ namespace {
};
struct ITestRunner: public TIntrusiveListItem<ITestRunner> {
- virtual ~ITestRunner() = default;
+ virtual ~ITestRunner() = default;
void Register();
virtual TStringBuf Name() const noexcept = 0;
@@ -278,115 +278,115 @@ namespace {
F(params);
}, opts.TimeBudget, *this);
}
-
- enum EOutFormat {
- F_CONSOLE = 0 /* "console" */,
+
+ enum EOutFormat {
+ F_CONSOLE = 0 /* "console" */,
F_CSV /* "csv" */,
F_JSON /* "json" */
- };
-
+ };
+
TAdaptiveLock STDOUT_LOCK;
-
+
struct IReporter {
- virtual void Report(TResult&& result) = 0;
-
- virtual void Finish() {
- }
-
- virtual ~IReporter() {
- }
- };
-
+ virtual void Report(TResult&& result) = 0;
+
+ virtual void Finish() {
+ }
+
+ virtual ~IReporter() {
+ }
+ };
+
class TConsoleReporter: public IReporter {
- public:
+ public:
~TConsoleReporter() override {
- }
-
- void Report(TResult&& r) override {
- with_lock (STDOUT_LOCK) {
- Cout << r;
- }
- }
- };
-
+ }
+
+ void Report(TResult&& r) override {
+ with_lock (STDOUT_LOCK) {
+ Cout << r;
+ }
+ }
+ };
+
class TCSVReporter: public IReporter {
- public:
- TCSVReporter() {
+ public:
+ TCSVReporter() {
Cout << "Name\tSamples\tIterations\tRun_time\tPer_iteration_sec\tPer_iteration_cycles" << Endl;
- }
-
+ }
+
~TCSVReporter() override {
- }
-
- void Report(TResult&& r) override {
- with_lock (STDOUT_LOCK) {
- Cout << r.TestName
- << '\t' << r.Samples
- << '\t' << r.Iterations
- << '\t' << r.RunTime;
-
- Cout << '\t';
- if (r.CyclesPerIteration) {
- Cout << TCycleTimer::FmtTime(*r.CyclesPerIteration);
- } else {
- Cout << '-';
- }
-
- Cout << '\t';
- if (r.SecondsPerIteration) {
- Cout << DoFmtTime(*r.SecondsPerIteration);
- } else {
- Cout << '-';
- }
-
- Cout << Endl;
- }
- }
- };
-
+ }
+
+ void Report(TResult&& r) override {
+ with_lock (STDOUT_LOCK) {
+ Cout << r.TestName
+ << '\t' << r.Samples
+ << '\t' << r.Iterations
+ << '\t' << r.RunTime;
+
+ Cout << '\t';
+ if (r.CyclesPerIteration) {
+ Cout << TCycleTimer::FmtTime(*r.CyclesPerIteration);
+ } else {
+ Cout << '-';
+ }
+
+ Cout << '\t';
+ if (r.SecondsPerIteration) {
+ Cout << DoFmtTime(*r.SecondsPerIteration);
+ } else {
+ Cout << '-';
+ }
+
+ Cout << Endl;
+ }
+ }
+ };
+
class TJSONReporter: public IReporter {
- public:
+ public:
~TJSONReporter() override {
- }
-
- void Report(TResult&& r) override {
- with_lock (ResultsLock_) {
- Results_.emplace_back(std::move(r));
- }
- }
-
- void Finish() override {
- NJson::TJsonValue report;
- auto& bench = report["benchmark"];
- bench.SetType(NJson::JSON_ARRAY);
-
- NJson::TJsonValue benchReport;
-
- for (const auto& result : Results_) {
- NJson::TJsonValue{}.Swap(benchReport);
- benchReport["name"] = result.TestName;
- benchReport["samples"] = result.Samples;
- benchReport["run_time"] = result.RunTime;
-
- if (result.CyclesPerIteration) {
- benchReport["per_iteration_cycles"] = *result.CyclesPerIteration;
- }
-
- if (result.SecondsPerIteration) {
- benchReport["per_iteration_secons"] = *result.SecondsPerIteration;
- }
-
- bench.AppendValue(benchReport);
- }
-
- Cout << report << Endl;
- }
-
- private:
- TAdaptiveLock ResultsLock_;
+ }
+
+ void Report(TResult&& r) override {
+ with_lock (ResultsLock_) {
+ Results_.emplace_back(std::move(r));
+ }
+ }
+
+ void Finish() override {
+ NJson::TJsonValue report;
+ auto& bench = report["benchmark"];
+ bench.SetType(NJson::JSON_ARRAY);
+
+ NJson::TJsonValue benchReport;
+
+ for (const auto& result : Results_) {
+ NJson::TJsonValue{}.Swap(benchReport);
+ benchReport["name"] = result.TestName;
+ benchReport["samples"] = result.Samples;
+ benchReport["run_time"] = result.RunTime;
+
+ if (result.CyclesPerIteration) {
+ benchReport["per_iteration_cycles"] = *result.CyclesPerIteration;
+ }
+
+ if (result.SecondsPerIteration) {
+ benchReport["per_iteration_secons"] = *result.SecondsPerIteration;
+ }
+
+ bench.AppendValue(benchReport);
+ }
+
+ Cout << report << Endl;
+ }
+
+ private:
+ TAdaptiveLock ResultsLock_;
TVector<TResult> Results_;
- };
-
+ };
+
class TOrderedReporter: public IReporter {
public:
TOrderedReporter(THolder<IReporter> slave)
@@ -421,22 +421,22 @@ namespace {
};
THolder<IReporter> MakeReporter(const EOutFormat type) {
- switch (type) {
- case F_CONSOLE:
- return MakeHolder<TConsoleReporter>();
-
- case F_CSV:
- return MakeHolder<TCSVReporter>();
+ switch (type) {
+ case F_CONSOLE:
+ return MakeHolder<TConsoleReporter>();
+
+ case F_CSV:
+ return MakeHolder<TCSVReporter>();
- case F_JSON:
- return MakeHolder<TJSONReporter>();
+ case F_JSON:
+ return MakeHolder<TJSONReporter>();
default:
break;
- }
+ }
return MakeHolder<TConsoleReporter>(); // make compiler happy
- }
+ }
THolder<IReporter> MakeOrderedReporter(const EOutFormat type) {
return MakeHolder<TOrderedReporter>(MakeReporter(type));
@@ -448,24 +448,24 @@ namespace {
}
}
}
-
-template <>
-EOutFormat FromStringImpl<EOutFormat>(const char* data, size_t len) {
- const auto s = TStringBuf{data, len};
+
+template <>
+EOutFormat FromStringImpl<EOutFormat>(const char* data, size_t len) {
+ const auto s = TStringBuf{data, len};
if (TStringBuf("console") == s) {
- return F_CONSOLE;
+ return F_CONSOLE;
} else if (TStringBuf("csv") == s) {
- return F_CSV;
+ return F_CSV;
} else if (TStringBuf("json") == s) {
- return F_JSON;
- }
-
- ythrow TFromStringException{} << "failed to convert '" << s << '\'';
+ return F_JSON;
+ }
+
+ ythrow TFromStringException{} << "failed to convert '" << s << '\'';
}
template <>
-void Out<TResult>(IOutputStream& out, const TResult& r) {
+void Out<TResult>(IOutputStream& out, const TResult& r) {
out << "----------- " << LightRed() << r.TestName << Old() << " ---------------" << Endl
<< " samples: " << White() << r.Samples << Old() << Endl
<< " iterations: " << White() << r.Iterations << Old() << Endl
@@ -482,9 +482,9 @@ void Out<TResult>(IOutputStream& out, const TResult& r) {
}
NCpu::TRegistar::TRegistar(const char* name, TUserFunc func) {
- static_assert(sizeof(TCpuBenchmark) + alignof(TCpuBenchmark) < sizeof(Buf), "fix Buf size");
+ static_assert(sizeof(TCpuBenchmark) + alignof(TCpuBenchmark) < sizeof(Buf), "fix Buf size");
- new (AlignUp(Buf, alignof(TCpuBenchmark))) TCpuBenchmark(name, func);
+ new (AlignUp(Buf, alignof(TCpuBenchmark))) TCpuBenchmark(name, func);
}
namespace {
@@ -496,36 +496,36 @@ namespace {
opts.AddLongOption('b', "budget")
.StoreResult(&TimeBudget)
- .RequiredArgument("SEC")
+ .RequiredArgument("SEC")
.Optional()
.Help("overall time budget");
opts.AddLongOption('l', "list")
- .NoArgument()
- .StoreValue(&ListTests, true)
+ .NoArgument()
+ .StoreValue(&ListTests, true)
.Help("list all tests");
opts.AddLongOption('t', "threads")
.StoreResult(&Threads)
- .OptionalValue(ToString((NSystemInfo::CachedNumberOfCpus() + 1) / 2), "JOBS")
- .DefaultValue("1")
+ .OptionalValue(ToString((NSystemInfo::CachedNumberOfCpus() + 1) / 2), "JOBS")
+ .DefaultValue("1")
.Help("run benchmarks in parallel");
- opts.AddLongOption('f', "format")
+ opts.AddLongOption('f', "format")
.AddLongName("benchmark_format")
- .StoreResult(&OutFormat)
- .RequiredArgument("FORMAT")
- .DefaultValue("console")
- .Help("output format (console|csv|json)");
-
- opts.SetFreeArgDefaultTitle("REGEXP", "RE2 regular expression to filter tests");
-
- const TOptsParseResult parseResult{&opts, argc, argv};
-
- for (const auto& regexp : parseResult.GetFreeArgs()) {
+ .StoreResult(&OutFormat)
+ .RequiredArgument("FORMAT")
+ .DefaultValue("console")
+ .Help("output format (console|csv|json)");
+
+ opts.SetFreeArgDefaultTitle("REGEXP", "RE2 regular expression to filter tests");
+
+ const TOptsParseResult parseResult{&opts, argc, argv};
+
+ for (const auto& regexp : parseResult.GetFreeArgs()) {
Filters.push_back(MakeHolder<RE2>(regexp.data(), RE2::Quiet));
- Y_ENSURE(Filters.back()->ok(), "incorrect RE2 expression '" << regexp << "'");
- }
+ Y_ENSURE(Filters.back()->ok(), "incorrect RE2 expression '" << regexp << "'");
+ }
}
bool MatchFilters(const TStringBuf& name) const {
@@ -533,72 +533,72 @@ namespace {
return true;
}
- for (auto&& re : Filters) {
+ for (auto&& re : Filters) {
if (RE2::FullMatchN({name.data(), name.size()}, *re, nullptr, 0)) {
- return true;
- }
- }
-
- return false;
- }
-
+ return true;
+ }
+ }
+
+ return false;
+ }
+
bool ListTests = false;
double TimeBudget = -1.0;
TVector<THolder<RE2>> Filters;
size_t Threads = 0;
- EOutFormat OutFormat;
+ EOutFormat OutFormat;
};
}
-int NBench::Main(int argc, char** argv) {
- const TProgOpts opts(argc, argv);
+int NBench::Main(int argc, char** argv) {
+ const TProgOpts opts(argc, argv);
TVector<ITestRunner*> tests;
-
- for (auto&& it : Tests()) {
- if (opts.MatchFilters(it.Name())) {
- tests.push_back(&it);
+
+ for (auto&& it : Tests()) {
+ if (opts.MatchFilters(it.Name())) {
+ tests.push_back(&it);
}
- }
+ }
EnumerateTests(tests);
- if (opts.ListTests) {
- for (const auto* const it : tests) {
- Cout << it->Name() << Endl;
+ if (opts.ListTests) {
+ for (const auto* const it : tests) {
+ Cout << it->Name() << Endl;
}
- return 0;
- }
+ return 0;
+ }
- if (!tests) {
- return 0;
- }
+ if (!tests) {
+ return 0;
+ }
- double timeBudget = opts.TimeBudget;
+ double timeBudget = opts.TimeBudget;
- if (timeBudget < 0) {
+ if (timeBudget < 0) {
timeBudget = 5.0 * tests.size();
- }
+ }
const TOptions testOpts = {timeBudget / tests.size()};
const auto reporter = MakeOrderedReporter(opts.OutFormat);
std::function<void(ITestRunner**)> func = [&](ITestRunner** it) {
- auto&& res = (*it)->Run(testOpts);
+ auto&& res = (*it)->Run(testOpts);
- reporter->Report(std::move(res));
- };
-
- if (opts.Threads > 1) {
- NYmp::SetThreadCount(opts.Threads);
+ reporter->Report(std::move(res));
+ };
+
+ if (opts.Threads > 1) {
+ NYmp::SetThreadCount(opts.Threads);
NYmp::ParallelForStaticChunk(tests.data(), tests.data() + tests.size(), 1, func);
- } else {
- for (auto it : tests) {
- func(&it);
- }
+ } else {
+ for (auto it : tests) {
+ func(&it);
+ }
}
-
- reporter->Finish();
-
- return 0;
+
+ reporter->Finish();
+
+ return 0;
}
diff --git a/library/cpp/testing/benchmark/bench.h b/library/cpp/testing/benchmark/bench.h
index 5773fc1534..21551ad0dd 100644
--- a/library/cpp/testing/benchmark/bench.h
+++ b/library/cpp/testing/benchmark/bench.h
@@ -26,62 +26,62 @@ namespace NBench {
/**
* Functions that states "I can read and write everywhere in memory".
- *
- * Use it to prevent optimizer from reordering or discarding memory writes prior to it's call,
- * and force memory reads after it's call.
- */
- void Clobber();
-
+ *
+ * Use it to prevent optimizer from reordering or discarding memory writes prior to it's call,
+ * and force memory reads after it's call.
+ */
+ void Clobber();
+
/**
* Forces whatever `p` points to be in memory and not in register.
- *
- * @param Pointer to data.
- */
- template <typename T>
- void Escape(T* p);
-
-#if defined(__GNUC__)
- Y_FORCE_INLINE void Clobber() {
+ *
+ * @param Pointer to data.
+ */
+ template <typename T>
+ void Escape(T* p);
+
+#if defined(__GNUC__)
+ Y_FORCE_INLINE void Clobber() {
asm volatile(""
:
:
: "memory");
- }
+ }
#elif defined(_MSC_VER)
Y_FORCE_INLINE void Clobber() {
_ReadWriteBarrier();
}
-#else
- Y_FORCE_INLINE void Clobber() {
- }
-#endif
-
-#if defined(__GNUC__)
- template <typename T>
- Y_FORCE_INLINE void Escape(T* p) {
+#else
+ Y_FORCE_INLINE void Clobber() {
+ }
+#endif
+
+#if defined(__GNUC__)
+ template <typename T>
+ Y_FORCE_INLINE void Escape(T* p) {
asm volatile(""
:
: "g"(p)
: "memory");
- }
-#else
- template <typename T>
- Y_FORCE_INLINE void Escape(T*) {
- }
-#endif
-
+ }
+#else
+ template <typename T>
+ Y_FORCE_INLINE void Escape(T*) {
+ }
+#endif
+
/**
* Use this function to prevent unused variables elimination.
*
* @param Unused variable (e.g. return value of benchmarked function).
*/
- template <typename T>
+ template <typename T>
Y_FORCE_INLINE void DoNotOptimize(T&& datum) {
::DoNotOptimizeAway(std::forward<T>(datum));
}
-
- int Main(int argc, char** argv);
+
+ int Main(int argc, char** argv);
}
#define Y_CPU_BENCHMARK(name, cnt) \
diff --git a/library/cpp/testing/benchmark/examples/main.cpp b/library/cpp/testing/benchmark/examples/main.cpp
index 745e636d4c..ddd8b05ffc 100644
--- a/library/cpp/testing/benchmark/examples/main.cpp
+++ b/library/cpp/testing/benchmark/examples/main.cpp
@@ -120,9 +120,9 @@ Y_CPU_BENCHMARK(FunctionCallCost_StringBufVal1, iface) {
for (auto i : xrange<size_t>(0, iface.Iterations())) {
(void)i;
- NBench::Escape(&x);
+ NBench::Escape(&x);
Y_DO_NOT_OPTIMIZE_AWAY(FS1(x));
- NBench::Clobber();
+ NBench::Clobber();
}
}
@@ -131,9 +131,9 @@ Y_CPU_BENCHMARK(FunctionCallCost_StringBufRef1, iface) {
for (auto i : xrange<size_t>(0, iface.Iterations())) {
(void)i;
- NBench::Escape(&x);
+ NBench::Escape(&x);
Y_DO_NOT_OPTIMIZE_AWAY(FS2(x));
- NBench::Clobber();
+ NBench::Clobber();
}
}
@@ -143,10 +143,10 @@ Y_CPU_BENCHMARK(FunctionCallCost_StringBufVal2, iface) {
for (auto i : xrange<size_t>(0, iface.Iterations())) {
(void)i;
- NBench::Escape(&x);
- NBench::Escape(&y);
+ NBench::Escape(&x);
+ NBench::Escape(&y);
Y_DO_NOT_OPTIMIZE_AWAY(FS1_2(x, y));
- NBench::Clobber();
+ NBench::Clobber();
}
}
@@ -156,10 +156,10 @@ Y_CPU_BENCHMARK(FunctionCallCost_StringBufRef2, iface) {
for (auto i : xrange<size_t>(0, iface.Iterations())) {
(void)i;
- NBench::Escape(&x);
- NBench::Escape(&y);
+ NBench::Escape(&x);
+ NBench::Escape(&y);
Y_DO_NOT_OPTIMIZE_AWAY(FS2_2(x, y));
- NBench::Clobber();
+ NBench::Clobber();
}
}
@@ -181,35 +181,35 @@ Y_CPU_BENCHMARK(FunctionCallCost_TwoArg, iface) {
Y_DO_NOT_OPTIMIZE_AWAY(FFF(i, i));
}
}
-
-/* An example of incorrect benchmark. As of r2581591 Clang 3.7 produced following assembly:
- * @code
- * │ push %rbp
- * │ mov %rsp,%rbp
- * │ push %rbx
- * │ push %rax
- * │ mov (%rdi),%rbx
- * │ test %rbx,%rbx
- * │ ↓ je 25
- * │ xor %edi,%edi
- * │ xor %esi,%esi
+
+/* An example of incorrect benchmark. As of r2581591 Clang 3.7 produced following assembly:
+ * @code
+ * │ push %rbp
+ * │ mov %rsp,%rbp
+ * │ push %rbx
+ * │ push %rax
+ * │ mov (%rdi),%rbx
+ * │ test %rbx,%rbx
+ * │ ↓ je 25
+ * │ xor %edi,%edi
+ * │ xor %esi,%esi
* │ → callq FS1(TBasicStringBuf<char, std::char_traits<char
- * │ nop
- * 100.00 │20:┌─→dec %rbx
- * │ └──jne 20
- * │25: add $0x8,%rsp
- * │ pop %rbx
- * │ pop %rbp
- * │ ← retq
- * @endcode
- *
- * So, this benchmark is measuring empty loop!
- */
-Y_CPU_BENCHMARK(Incorrect_FunctionCallCost_StringBufVal1, iface) {
- TStringBuf x;
-
- for (auto i : xrange<size_t>(0, iface.Iterations())) {
- (void)i;
- Y_DO_NOT_OPTIMIZE_AWAY(FS1(x));
- }
-}
+ * │ nop
+ * 100.00 │20:┌─→dec %rbx
+ * │ └──jne 20
+ * │25: add $0x8,%rsp
+ * │ pop %rbx
+ * │ pop %rbp
+ * │ ← retq
+ * @endcode
+ *
+ * So, this benchmark is measuring empty loop!
+ */
+Y_CPU_BENCHMARK(Incorrect_FunctionCallCost_StringBufVal1, iface) {
+ TStringBuf x;
+
+ for (auto i : xrange<size_t>(0, iface.Iterations())) {
+ (void)i;
+ Y_DO_NOT_OPTIMIZE_AWAY(FS1(x));
+ }
+}
diff --git a/library/cpp/testing/benchmark/examples/metrics/main.py b/library/cpp/testing/benchmark/examples/metrics/main.py
index d826450400..8f9d9d06ae 100644
--- a/library/cpp/testing/benchmark/examples/metrics/main.py
+++ b/library/cpp/testing/benchmark/examples/metrics/main.py
@@ -1,7 +1,7 @@
-import yatest.common as yc
-
-
-def test_export_metrics(metrics):
- metrics.set_benchmark(yc.execute_benchmark(
+import yatest.common as yc
+
+
+def test_export_metrics(metrics):
+ metrics.set_benchmark(yc.execute_benchmark(
'library/cpp/testing/benchmark/examples/examples',
- threads=8))
+ threads=8))
diff --git a/library/cpp/testing/benchmark/examples/metrics/ya.make b/library/cpp/testing/benchmark/examples/metrics/ya.make
index a2c773a2d0..a9dbdca9fa 100644
--- a/library/cpp/testing/benchmark/examples/metrics/ya.make
+++ b/library/cpp/testing/benchmark/examples/metrics/ya.make
@@ -1,20 +1,20 @@
-OWNER(
- pg
- yazevnul
-)
-
+OWNER(
+ pg
+ yazevnul
+)
+
PY2TEST()
-
+
SIZE(LARGE)
-
-TAG(
+
+TAG(
ya:force_sandbox
- sb:intel_e5_2660v1
+ sb:intel_e5_2660v1
ya:fat
-)
-
+)
+
TEST_SRCS(main.py)
-
+
DEPENDS(library/cpp/testing/benchmark/examples)
-
-END()
+
+END()
diff --git a/library/cpp/testing/benchmark/examples/ya.make b/library/cpp/testing/benchmark/examples/ya.make
index d0d5bdca2c..7e696e127a 100644
--- a/library/cpp/testing/benchmark/examples/ya.make
+++ b/library/cpp/testing/benchmark/examples/ya.make
@@ -1,8 +1,8 @@
-OWNER(
- pg
- yazevnul
-)
-
+OWNER(
+ pg
+ yazevnul
+)
+
Y_BENCHMARK()
SRCS(
diff --git a/library/cpp/testing/benchmark/main/main.cpp b/library/cpp/testing/benchmark/main/main.cpp
index b464c79023..aabcb89c43 100644
--- a/library/cpp/testing/benchmark/main/main.cpp
+++ b/library/cpp/testing/benchmark/main/main.cpp
@@ -1,16 +1,16 @@
#include <library/cpp/testing/benchmark/bench.h>
-
-#include <util/generic/yexception.h>
-#include <util/stream/output.h>
-
-#include <cstdlib>
-
-int main(int argc, char** argv) {
- try {
- return NBench::Main(argc, argv);
+
+#include <util/generic/yexception.h>
+#include <util/stream/output.h>
+
+#include <cstdlib>
+
+int main(int argc, char** argv) {
+ try {
+ return NBench::Main(argc, argv);
} catch (...) {
- Cerr << CurrentExceptionMessage() << Endl;
- }
+ Cerr << CurrentExceptionMessage() << Endl;
+ }
return EXIT_FAILURE;
-}
+}
diff --git a/library/cpp/testing/benchmark/main/ya.make b/library/cpp/testing/benchmark/main/ya.make
index a434d42675..d00cdcf9fc 100644
--- a/library/cpp/testing/benchmark/main/ya.make
+++ b/library/cpp/testing/benchmark/main/ya.make
@@ -1,16 +1,16 @@
-LIBRARY()
-
+LIBRARY()
+
OWNER(
pg
yazevnul
)
-
-SRCS(
+
+SRCS(
GLOBAL main.cpp
-)
-
-PEERDIR(
+)
+
+PEERDIR(
library/cpp/testing/benchmark
-)
-
-END()
+)
+
+END()
diff --git a/library/cpp/testing/benchmark/ya.make b/library/cpp/testing/benchmark/ya.make
index 661e160238..f42be80698 100644
--- a/library/cpp/testing/benchmark/ya.make
+++ b/library/cpp/testing/benchmark/ya.make
@@ -4,19 +4,19 @@ OWNER(
pg
yazevnul
)
-
-SRCS(
+
+SRCS(
bench.cpp
dummy.cpp
-)
-
-PEERDIR(
- contrib/libs/re2
+)
+
+PEERDIR(
+ contrib/libs/re2
library/cpp/colorizer
library/cpp/getopt/small
library/cpp/json
library/cpp/linear_regression
library/cpp/threading/poor_man_openmp
-)
-
-END()
+)
+
+END()