aboutsummaryrefslogtreecommitdiffstats
path: root/library
diff options
context:
space:
mode:
authorAleksandr <ivansduck@gmail.com>2022-02-10 16:47:52 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:47:52 +0300
commitea6c5b7f172becca389cacaff7d5f45f6adccbe6 (patch)
treed16cef493ac1e092b4a03ab9437ec06ffe3d188f /library
parent37de222addabbef336dcaaea5f7c7645a629fc6d (diff)
downloadydb-ea6c5b7f172becca389cacaff7d5f45f6adccbe6.tar.gz
Restoring authorship annotation for Aleksandr <ivansduck@gmail.com>. Commit 1 of 2.
Diffstat (limited to 'library')
-rw-r--r--library/cpp/accurate_accumulate/benchmark/metrics/ya.make4
-rw-r--r--library/cpp/actors/interconnect/ut_fat/ya.make2
-rw-r--r--library/cpp/digest/argonish/ut_fat/ya.make2
-rw-r--r--library/cpp/http/io/fuzz/ya.make4
-rw-r--r--library/cpp/http/io/stream_ut.cpp12
-rw-r--r--library/cpp/http/server/http_ut.cpp8
-rw-r--r--library/cpp/json/yson/ut/ya.make6
-rw-r--r--library/cpp/scheme/tests/fuzz_json/ya.make4
-rw-r--r--library/cpp/string_utils/base64/bench/metrics/ya.make4
-rw-r--r--library/cpp/testing/benchmark/examples/metrics/ya.make4
-rw-r--r--library/cpp/testing/common/env.cpp198
-rw-r--r--library/cpp/testing/common/env.h54
-rw-r--r--library/cpp/testing/common/ut/env_ut.cpp128
-rw-r--r--library/cpp/testing/unittest/fat/test_port_manager.cpp66
-rw-r--r--library/cpp/testing/unittest/fat/ya.make28
-rw-r--r--library/cpp/testing/unittest/registar.cpp30
-rw-r--r--library/cpp/testing/unittest/tests_data.cpp76
-rw-r--r--library/cpp/testing/unittest/tests_data.h20
-rw-r--r--library/cpp/testing/unittest/ut/main.cpp12
-rw-r--r--library/cpp/testing/unittest/utmain.cpp148
-rw-r--r--library/cpp/testing/ya.make2
-rw-r--r--library/cpp/ya.make20
-rw-r--r--library/python/cores/__init__.py2
-rw-r--r--library/python/cores/ya.make2
-rw-r--r--library/python/filelock/__init__.py56
-rw-r--r--library/python/filelock/ut/lib/test_filelock.py56
-rw-r--r--library/python/fs/__init__.py96
-rw-r--r--library/python/fs/test/test_fs.py62
-rw-r--r--library/python/func/__init__.py194
-rw-r--r--library/python/func/ut/test_func.py162
-rw-r--r--library/python/func/ut/ya.make22
-rw-r--r--library/python/func/ya.make8
-rw-r--r--library/python/pytest/main.py96
-rw-r--r--library/python/pytest/plugins/collection.py2
-rw-r--r--library/python/pytest/plugins/fakeid_py2.py4
-rw-r--r--library/python/pytest/plugins/fakeid_py3.py4
-rw-r--r--library/python/pytest/plugins/ya.make32
-rw-r--r--library/python/pytest/plugins/ya.py478
-rw-r--r--library/python/pytest/ya.make22
-rw-r--r--library/python/pytest/yatest_tools.py196
-rw-r--r--library/python/reservoir_sampling/README.md22
-rw-r--r--library/python/resource/__init__.py16
-rw-r--r--library/python/resource/ut/lib/test_simple.py22
-rw-r--r--library/python/runtime_py3/entry_points.py16
-rw-r--r--library/python/runtime_py3/main/main.c30
-rw-r--r--library/python/strings/__init__.py34
-rw-r--r--library/python/strings/strings.py48
-rw-r--r--library/python/strings/ut/test_strings.py14
-rw-r--r--library/python/strings/ya.make10
-rw-r--r--library/python/testing/filter/ya.make2
-rw-r--r--library/python/testing/import_test/import_test.py30
-rw-r--r--library/python/testing/import_test/ya.make2
-rw-r--r--library/python/testing/recipe/__init__.py8
-rw-r--r--library/python/testing/recipe/ya.make10
-rw-r--r--library/python/testing/ya.make8
-rw-r--r--library/python/testing/yatest_common/ya.make14
-rw-r--r--library/python/testing/yatest_common/yatest/common/canonical.py10
-rw-r--r--library/python/testing/yatest_common/yatest/common/environment.py8
-rw-r--r--library/python/testing/yatest_common/yatest/common/errors.py6
-rw-r--r--library/python/testing/yatest_common/yatest/common/misc.py38
-rw-r--r--library/python/testing/yatest_common/yatest/common/network.py436
-rw-r--r--library/python/testing/yatest_common/yatest/common/path.py56
-rw-r--r--library/python/testing/yatest_common/yatest/common/process.py634
-rw-r--r--library/python/testing/yatest_common/yatest/common/runtime.py196
-rw-r--r--library/python/testing/yatest_common/yatest/common/runtime_java.py36
-rw-r--r--library/python/testing/yatest_common/yatest/common/ya.make2
-rw-r--r--library/python/testing/yatest_lib/external.py36
-rw-r--r--library/python/testing/yatest_lib/test_splitter.py8
-rw-r--r--library/python/testing/yatest_lib/tests/test_external.py40
-rw-r--r--library/python/testing/yatest_lib/tests/ya.make6
-rw-r--r--library/python/testing/yatest_lib/ya.make14
-rw-r--r--library/python/windows/__init__.py78
-rw-r--r--library/python/ya.make2
73 files changed, 2109 insertions, 2109 deletions
diff --git a/library/cpp/accurate_accumulate/benchmark/metrics/ya.make b/library/cpp/accurate_accumulate/benchmark/metrics/ya.make
index 5d532e1479..9c62aad212 100644
--- a/library/cpp/accurate_accumulate/benchmark/metrics/ya.make
+++ b/library/cpp/accurate_accumulate/benchmark/metrics/ya.make
@@ -2,12 +2,12 @@ OWNER(yazevnul)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/library/cpp/actors/interconnect/ut_fat/ya.make b/library/cpp/actors/interconnect/ut_fat/ya.make
index 6e58d08154..5218401045 100644
--- a/library/cpp/actors/interconnect/ut_fat/ya.make
+++ b/library/cpp/actors/interconnect/ut_fat/ya.make
@@ -7,7 +7,7 @@ OWNER(
SIZE(LARGE)
-TAG(ya:fat)
+TAG(ya:fat)
SRCS(
main.cpp
diff --git a/library/cpp/digest/argonish/ut_fat/ya.make b/library/cpp/digest/argonish/ut_fat/ya.make
index 94ebda9225..605524bedb 100644
--- a/library/cpp/digest/argonish/ut_fat/ya.make
+++ b/library/cpp/digest/argonish/ut_fat/ya.make
@@ -16,6 +16,6 @@ TAG(
ya:force_sandbox
)
-SIZE(LARGE)
+SIZE(LARGE)
END()
diff --git a/library/cpp/http/io/fuzz/ya.make b/library/cpp/http/io/fuzz/ya.make
index 8b3ccb1969..d4d4b8cdbf 100644
--- a/library/cpp/http/io/fuzz/ya.make
+++ b/library/cpp/http/io/fuzz/ya.make
@@ -9,8 +9,8 @@ PEERDIR(
library/cpp/http/io
)
-SIZE(MEDIUM)
-
+SIZE(MEDIUM)
+
SRCS(
main.cpp
)
diff --git a/library/cpp/http/io/stream_ut.cpp b/library/cpp/http/io/stream_ut.cpp
index 1ea35df675..48c9307dd4 100644
--- a/library/cpp/http/io/stream_ut.cpp
+++ b/library/cpp/http/io/stream_ut.cpp
@@ -84,8 +84,8 @@ Y_UNIT_TEST_SUITE(THttpStreamTest) {
Y_UNIT_TEST(TestHttpInput) {
TString res = "I'm a teapot";
- TPortManager pm;
- const ui16 port = pm.GetPort();
+ TPortManager pm;
+ const ui16 port = pm.GetPort();
TTestHttpServer serverImpl(res);
THttpServer server(&serverImpl, THttpServer::TOptions(port).EnableKeepAlive(true).EnableCompression(true));
@@ -238,8 +238,8 @@ Y_UNIT_TEST_SUITE(THttpStreamTest) {
Y_UNIT_TEST(TestMinRequest) {
TString res = "qqqqqq";
- TPortManager pm;
- const ui16 port = pm.GetPort();
+ TPortManager pm;
+ const ui16 port = pm.GetPort();
TTestHttpServer serverImpl(res);
THttpServer server(&serverImpl, THttpServer::TOptions(port).EnableKeepAlive(true).EnableCompression(true));
@@ -264,8 +264,8 @@ Y_UNIT_TEST_SUITE(THttpStreamTest) {
Y_UNIT_TEST(TestResponseWithBlanks) {
TString res = "qqqqqq\r\n\r\nsdasdsad\r\n";
- TPortManager pm;
- const ui16 port = pm.GetPort();
+ TPortManager pm;
+ const ui16 port = pm.GetPort();
TTestHttpServer serverImpl(res);
THttpServer server(&serverImpl, THttpServer::TOptions(port).EnableKeepAlive(true).EnableCompression(true));
diff --git a/library/cpp/http/server/http_ut.cpp b/library/cpp/http/server/http_ut.cpp
index cc62bb988e..cf6f9c2c63 100644
--- a/library/cpp/http/server/http_ut.cpp
+++ b/library/cpp/http/server/http_ut.cpp
@@ -322,8 +322,8 @@ Y_UNIT_TEST_SUITE(THttpServerTest) {
Y_UNIT_TEST(TestEchoServer) {
TString res = TestData();
- TPortManager pm;
- const ui16 port = pm.GetPort();
+ TPortManager pm;
+ const ui16 port = pm.GetPort();
const bool trueFalse[] = {true, false};
TEchoServer serverImpl(res);
@@ -428,8 +428,8 @@ Y_UNIT_TEST_SUITE(THttpServerTest) {
* Data should be large enough not to fit into socket buffer
**/
TString res = TestData(10 * 1024 * 1024);
- TPortManager portManager;
- const ui16 port = portManager.GetPort();
+ TPortManager portManager;
+ const ui16 port = portManager.GetPort();
TEchoServer serverImpl(res);
THttpServer::TOptions options(port);
options.EnableKeepAlive(true);
diff --git a/library/cpp/json/yson/ut/ya.make b/library/cpp/json/yson/ut/ya.make
index 4ceb65b279..1a4a193b55 100644
--- a/library/cpp/json/yson/ut/ya.make
+++ b/library/cpp/json/yson/ut/ya.make
@@ -15,10 +15,10 @@ PEERDIR(
library/cpp/testing/unittest
)
-SIZE(LARGE)
-
-TAG(ya:fat)
+SIZE(LARGE)
+TAG(ya:fat)
+
TIMEOUT(600)
SRCS(
diff --git a/library/cpp/scheme/tests/fuzz_json/ya.make b/library/cpp/scheme/tests/fuzz_json/ya.make
index 0d91c70585..e398cfc5dd 100644
--- a/library/cpp/scheme/tests/fuzz_json/ya.make
+++ b/library/cpp/scheme/tests/fuzz_json/ya.make
@@ -7,8 +7,8 @@ OWNER(
velavokr
)
-SIZE(MEDIUM)
-
+SIZE(MEDIUM)
+
SRCS(
fuzz_json.cpp
)
diff --git a/library/cpp/string_utils/base64/bench/metrics/ya.make b/library/cpp/string_utils/base64/bench/metrics/ya.make
index b0406516c3..7185c725b6 100644
--- a/library/cpp/string_utils/base64/bench/metrics/ya.make
+++ b/library/cpp/string_utils/base64/bench/metrics/ya.make
@@ -5,12 +5,12 @@ OWNER(
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/library/cpp/testing/benchmark/examples/metrics/ya.make b/library/cpp/testing/benchmark/examples/metrics/ya.make
index a9dbdca9fa..0ba17e4545 100644
--- a/library/cpp/testing/benchmark/examples/metrics/ya.make
+++ b/library/cpp/testing/benchmark/examples/metrics/ya.make
@@ -5,12 +5,12 @@ OWNER(
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/library/cpp/testing/common/env.cpp b/library/cpp/testing/common/env.cpp
index fa3a47fe16..436af91845 100644
--- a/library/cpp/testing/common/env.cpp
+++ b/library/cpp/testing/common/env.cpp
@@ -3,19 +3,19 @@
#include <build/scripts/c_templates/svnversion.h>
#include <util/folder/dirut.h>
-#include <util/folder/path.h>
-#include <util/generic/singleton.h>
+#include <util/folder/path.h>
+#include <util/generic/singleton.h>
#include <util/stream/file.h>
-#include <util/stream/fwd.h>
-#include <util/system/env.h>
-#include <util/system/file.h>
-#include <util/system/file_lock.h>
-#include <util/system/guard.h>
-
+#include <util/stream/fwd.h>
+#include <util/system/env.h>
+#include <util/system/file.h>
+#include <util/system/file_lock.h>
+#include <util/system/guard.h>
+
#include <library/cpp/json/json_reader.h>
#include <library/cpp/json/json_value.h>
-#include <library/cpp/json/json_writer.h>
-
+#include <library/cpp/json/json_writer.h>
+
TString ArcadiaSourceRoot() {
if (const auto& sourceRoot = NPrivate::GetTestEnv().SourceRoot) {
return sourceRoot;
@@ -63,89 +63,89 @@ TString GetArcadiaTestsData() {
return {};
}
-
+
TString GetWorkPath() {
TString workPath = NPrivate::GetTestEnv().WorkPath;
if (workPath) {
return workPath;
- }
+ }
return NPrivate::GetCwd();
-}
-
+}
+
TFsPath GetOutputPath() {
return GetWorkPath() + "/testing_out_stuff";
}
-const TString& GetRamDrivePath() {
+const TString& GetRamDrivePath() {
return NPrivate::GetTestEnv().RamDrivePath;
-}
-
+}
+
const TString& GetYtHddPath() {
return NPrivate::GetTestEnv().YtHddPath;
}
-const TString& GetOutputRamDrivePath() {
+const TString& GetOutputRamDrivePath() {
return NPrivate::GetTestEnv().TestOutputRamDrivePath;
-}
-
-const TString& GdbPath() {
- return NPrivate::GetTestEnv().GdbPath;
-}
-
-const TString& GetTestParam(TStringBuf name) {
- const static TString def = "";
- return GetTestParam(name, def);
-}
-
-const TString& GetTestParam(TStringBuf name, const TString& def) {
- auto& testParameters = NPrivate::GetTestEnv().TestParameters;
- auto it = testParameters.find(name.data());
- if (it != testParameters.end()) {
- return it->second;
- }
- return def;
-}
-
-void AddEntryToCoreSearchFile(const TString& filename, TStringBuf cmd, int pid, const TFsPath& binaryPath = TFsPath(), const TFsPath& cwd = TFsPath()) {
- auto lock = TFileLock(filename);
- TGuard<TFileLock> guard(lock);
-
- TOFStream output(TFile(filename, WrOnly | ForAppend | OpenAlways));
-
- NJson::TJsonWriter writer(&output, false);
- writer.OpenMap();
- writer.Write("cmd", cmd);
- writer.Write("pid", pid);
- if (binaryPath) {
- writer.Write("binary_path", binaryPath);
- }
- if (cwd) {
- writer.Write("cwd", cwd);
- }
- writer.CloseMap();
- writer.Flush();
-
- output.Write("\n");
-}
-
-void WatchProcessCore(int pid, const TFsPath& binaryPath, const TFsPath& cwd) {
- auto& filename = NPrivate::GetTestEnv().CoreSearchFile;
- if (filename) {
- AddEntryToCoreSearchFile(filename, "add", pid, binaryPath, cwd);
- }
-}
-
-void StopProcessCoreWatching(int pid) {
- auto& filename = NPrivate::GetTestEnv().CoreSearchFile;
- if (filename) {
- AddEntryToCoreSearchFile(filename, "drop", pid);
- }
-}
-
+}
+
+const TString& GdbPath() {
+ return NPrivate::GetTestEnv().GdbPath;
+}
+
+const TString& GetTestParam(TStringBuf name) {
+ const static TString def = "";
+ return GetTestParam(name, def);
+}
+
+const TString& GetTestParam(TStringBuf name, const TString& def) {
+ auto& testParameters = NPrivate::GetTestEnv().TestParameters;
+ auto it = testParameters.find(name.data());
+ if (it != testParameters.end()) {
+ return it->second;
+ }
+ return def;
+}
+
+void AddEntryToCoreSearchFile(const TString& filename, TStringBuf cmd, int pid, const TFsPath& binaryPath = TFsPath(), const TFsPath& cwd = TFsPath()) {
+ auto lock = TFileLock(filename);
+ TGuard<TFileLock> guard(lock);
+
+ TOFStream output(TFile(filename, WrOnly | ForAppend | OpenAlways));
+
+ NJson::TJsonWriter writer(&output, false);
+ writer.OpenMap();
+ writer.Write("cmd", cmd);
+ writer.Write("pid", pid);
+ if (binaryPath) {
+ writer.Write("binary_path", binaryPath);
+ }
+ if (cwd) {
+ writer.Write("cwd", cwd);
+ }
+ writer.CloseMap();
+ writer.Flush();
+
+ output.Write("\n");
+}
+
+void WatchProcessCore(int pid, const TFsPath& binaryPath, const TFsPath& cwd) {
+ auto& filename = NPrivate::GetTestEnv().CoreSearchFile;
+ if (filename) {
+ AddEntryToCoreSearchFile(filename, "add", pid, binaryPath, cwd);
+ }
+}
+
+void StopProcessCoreWatching(int pid) {
+ auto& filename = NPrivate::GetTestEnv().CoreSearchFile;
+ if (filename) {
+ AddEntryToCoreSearchFile(filename, "drop", pid);
+ }
+}
+
bool FromYaTest() {
return NPrivate::GetTestEnv().IsRunningFromTest;
-}
+}
namespace NPrivate {
TTestEnv::TTestEnv() {
@@ -161,9 +161,9 @@ namespace NPrivate {
RamDrivePath = "";
YtHddPath = "";
TestOutputRamDrivePath = "";
- GdbPath = "";
- CoreSearchFile = "";
- TestParameters.clear();
+ GdbPath = "";
+ CoreSearchFile = "";
+ TestParameters.clear();
const TString contextFilename = GetEnv("YA_TEST_CONTEXT_FILE");
if (contextFilename) {
@@ -206,23 +206,23 @@ namespace NPrivate {
if (value) {
TestOutputRamDrivePath = value->GetStringSafe("");
}
-
- value = context.GetValueByPath("runtime.gdb_bin");
- if (value) {
- GdbPath = value->GetStringSafe("");
- }
-
- value = context.GetValueByPath("runtime.test_params");
- if (value) {
- for (const auto& entry : context.GetValueByPath("runtime.test_params")->GetMap()) {
- TestParameters[entry.first] = entry.second.GetStringSafe("");
- }
- }
-
- value = context.GetValueByPath("internal.core_search_file");
- if (value) {
- CoreSearchFile = value->GetStringSafe("");
- }
+
+ value = context.GetValueByPath("runtime.gdb_bin");
+ if (value) {
+ GdbPath = value->GetStringSafe("");
+ }
+
+ value = context.GetValueByPath("runtime.test_params");
+ if (value) {
+ for (const auto& entry : context.GetValueByPath("runtime.test_params")->GetMap()) {
+ TestParameters[entry.first] = entry.second.GetStringSafe("");
+ }
+ }
+
+ value = context.GetValueByPath("internal.core_search_file");
+ if (value) {
+ CoreSearchFile = value->GetStringSafe("");
+ }
}
if (!YtHddPath) {
@@ -257,10 +257,10 @@ namespace NPrivate {
IsRunningFromTest = (fromEnv == "1");
}
- void TTestEnv::AddTestParam(TStringBuf name, TStringBuf value) {
- TestParameters[TString{name}] = value;
- }
-
+ void TTestEnv::AddTestParam(TStringBuf name, TStringBuf value) {
+ TestParameters[TString{name}] = value;
+ }
+
TString GetCwd() {
try {
return NFs::CurrentWorkingDirectory();
diff --git a/library/cpp/testing/common/env.h b/library/cpp/testing/common/env.h
index 7b89aa1bed..9f405145f1 100644
--- a/library/cpp/testing/common/env.h
+++ b/library/cpp/testing/common/env.h
@@ -1,7 +1,7 @@
#pragma once
-#include <unordered_map>
-
+#include <unordered_map>
+
#include <util/folder/path.h>
#include <util/generic/string.h>
#include <util/generic/strbuf.h>
@@ -14,7 +14,7 @@ TString ArcadiaSourceRoot();
// for the instance: there is 2 files in folder test example_ut.cpp and example.data, so full path to test/example.data can be obtained
// from example_ut.cpp as ArcadiaFromCurrentLocation(__SOURCE_FILE__, "example.data")
TString ArcadiaFromCurrentLocation(TStringBuf where, TStringBuf path);
-
+
// @brief return build folder path
TString BuildRoot();
@@ -34,26 +34,26 @@ TString GetWorkPath();
TFsPath GetOutputPath();
// @brief return path from env:YA_TEST_RAM_DRIVE_PATH
-const TString& GetRamDrivePath();
-
+const TString& GetRamDrivePath();
+
// @brief return path from env:YA_TEST_OUTPUT_RAM_DRIVE_PATH
-const TString& GetOutputRamDrivePath();
-
-// @brief return test parameter by name. If not exists, return an empty string
-const TString& GetTestParam(TStringBuf name);
-
-// @brief return test parameter by name. If not exists, return specified default value
-const TString& GetTestParam(TStringBuf name, const TString& def);
-
-// @brief return path to the gdb
-const TString& GdbPath();
-
-// @brief register the process. Test suite will be marked as failed if the process is terminated with a core dump file after testing
-void WatchProcessCore(int pid, const TFsPath& binaryPath, const TFsPath& cwd = TFsPath());
-
-// @brief mark the process as successfully completed - a test machinery won't try to recover core dump file for the process
-void StopProcessCoreWatching(int pid);
-
+const TString& GetOutputRamDrivePath();
+
+// @brief return test parameter by name. If not exists, return an empty string
+const TString& GetTestParam(TStringBuf name);
+
+// @brief return test parameter by name. If not exists, return specified default value
+const TString& GetTestParam(TStringBuf name, const TString& def);
+
+// @brief return path to the gdb
+const TString& GdbPath();
+
+// @brief register the process. Test suite will be marked as failed if the process is terminated with a core dump file after testing
+void WatchProcessCore(int pid, const TFsPath& binaryPath, const TFsPath& cwd = TFsPath());
+
+// @brief mark the process as successfully completed - a test machinery won't try to recover core dump file for the process
+void StopProcessCoreWatching(int pid);
+
#define SRC_(path) ArcadiaFromCurrentLocation(__SOURCE_FILE__, path)
namespace NPrivate {
@@ -63,8 +63,8 @@ namespace NPrivate {
void ReInitialize();
- void AddTestParam(TStringBuf name, TStringBuf value);
-
+ void AddTestParam(TStringBuf name, TStringBuf value);
+
bool IsRunningFromTest;
TString ArcadiaTestsDataDir;
TString SourceRoot;
@@ -73,9 +73,9 @@ namespace NPrivate {
TString RamDrivePath;
TString YtHddPath;
TString TestOutputRamDrivePath;
- TString GdbPath;
- TString CoreSearchFile;
- std::unordered_map<TString, TString> TestParameters;
+ TString GdbPath;
+ TString CoreSearchFile;
+ std::unordered_map<TString, TString> TestParameters;
};
TString GetCwd();
diff --git a/library/cpp/testing/common/ut/env_ut.cpp b/library/cpp/testing/common/ut/env_ut.cpp
index 2aed1e4a25..408661d84d 100644
--- a/library/cpp/testing/common/ut/env_ut.cpp
+++ b/library/cpp/testing/common/ut/env_ut.cpp
@@ -1,12 +1,12 @@
#include <library/cpp/testing/common/env.h>
#include <library/cpp/testing/common/scope.h>
-#include <library/cpp/testing/gtest/gtest.h>
+#include <library/cpp/testing/gtest/gtest.h>
#include <util/folder/dirut.h>
-#include <util/stream/file.h>
+#include <util/stream/file.h>
#include <util/system/env.h>
#include <util/system/execpath.h>
-#include <util/system/fs.h>
+#include <util/system/fs.h>
TEST(Runtime, ArcadiaSourceRoot) {
@@ -99,64 +99,64 @@ TEST(Runtime, GetOutputRamDrivePath) {
Singleton<NPrivate::TTestEnv>()->ReInitialize();
EXPECT_EQ(tmpDir, GetOutputRamDrivePath());
}
-
-#ifdef _linux_
-TEST(Runtime, GdbPath) {
- Singleton<NPrivate::TTestEnv>()->ReInitialize();
- EXPECT_TRUE(NFs::Exists(::GdbPath()));
-}
-#endif
-
-TString ReInitializeContext(TStringBuf data) {
- auto tmpDir = ::GetSystemTempDir();
- auto filename = tmpDir + "/context.json";
- TOFStream stream(filename);
- stream.Write(data.data(), data.size());
- stream.Finish();
-
- NTesting::TScopedEnvironment contextGuard("YA_TEST_CONTEXT_FILE", filename);
- Singleton<NPrivate::TTestEnv>()->ReInitialize();
-
- return filename;
-}
-
-TEST(Runtime, GetTestParam) {
- TString context = R"json({
- "runtime": {
- "test_params": {
- "a": "b",
- "c": "d"
- }
- }
- })json";
- auto filename = ReInitializeContext(context);
-
- EXPECT_EQ("b", GetTestParam("a"));
- EXPECT_EQ("d", GetTestParam("c"));
- EXPECT_EQ("", GetTestParam("e"));
- EXPECT_EQ("w", GetTestParam("e", "w"));
-
- Singleton<NPrivate::TTestEnv>()->AddTestParam("e", "e");
- EXPECT_EQ("e", GetTestParam("e"));
-}
-
-TEST(Runtime, WatchProcessCore) {
- TString context = R"json({
- "internal": {
- "core_search_file": "watch_core.txt"
- }
- })json";
- auto filename = ReInitializeContext(context);
-
- WatchProcessCore(1, "bin1", "pwd");
- WatchProcessCore(2, "bin1");
- StopProcessCoreWatching(2);
-
- TIFStream file("watch_core.txt");
- auto data = file.ReadAll();
- TString expected = R"json({"cmd":"add","pid":1,"binary_path":"bin1","cwd":"pwd"}
-{"cmd":"add","pid":2,"binary_path":"bin1"}
-{"cmd":"drop","pid":2}
-)json";
- EXPECT_EQ(expected, data);
-}
+
+#ifdef _linux_
+TEST(Runtime, GdbPath) {
+ Singleton<NPrivate::TTestEnv>()->ReInitialize();
+ EXPECT_TRUE(NFs::Exists(::GdbPath()));
+}
+#endif
+
+TString ReInitializeContext(TStringBuf data) {
+ auto tmpDir = ::GetSystemTempDir();
+ auto filename = tmpDir + "/context.json";
+ TOFStream stream(filename);
+ stream.Write(data.data(), data.size());
+ stream.Finish();
+
+ NTesting::TScopedEnvironment contextGuard("YA_TEST_CONTEXT_FILE", filename);
+ Singleton<NPrivate::TTestEnv>()->ReInitialize();
+
+ return filename;
+}
+
+TEST(Runtime, GetTestParam) {
+ TString context = R"json({
+ "runtime": {
+ "test_params": {
+ "a": "b",
+ "c": "d"
+ }
+ }
+ })json";
+ auto filename = ReInitializeContext(context);
+
+ EXPECT_EQ("b", GetTestParam("a"));
+ EXPECT_EQ("d", GetTestParam("c"));
+ EXPECT_EQ("", GetTestParam("e"));
+ EXPECT_EQ("w", GetTestParam("e", "w"));
+
+ Singleton<NPrivate::TTestEnv>()->AddTestParam("e", "e");
+ EXPECT_EQ("e", GetTestParam("e"));
+}
+
+TEST(Runtime, WatchProcessCore) {
+ TString context = R"json({
+ "internal": {
+ "core_search_file": "watch_core.txt"
+ }
+ })json";
+ auto filename = ReInitializeContext(context);
+
+ WatchProcessCore(1, "bin1", "pwd");
+ WatchProcessCore(2, "bin1");
+ StopProcessCoreWatching(2);
+
+ TIFStream file("watch_core.txt");
+ auto data = file.ReadAll();
+ TString expected = R"json({"cmd":"add","pid":1,"binary_path":"bin1","cwd":"pwd"}
+{"cmd":"add","pid":2,"binary_path":"bin1"}
+{"cmd":"drop","pid":2}
+)json";
+ EXPECT_EQ(expected, data);
+}
diff --git a/library/cpp/testing/unittest/fat/test_port_manager.cpp b/library/cpp/testing/unittest/fat/test_port_manager.cpp
index f77d2e3a25..472a66249e 100644
--- a/library/cpp/testing/unittest/fat/test_port_manager.cpp
+++ b/library/cpp/testing/unittest/fat/test_port_manager.cpp
@@ -1,36 +1,36 @@
#include <library/cpp/testing/unittest/registar.h>
#include <library/cpp/testing/unittest/tests_data.h>
-
-bool IsFreePort(ui16 port) {
- TInet6StreamSocket sock;
- TSockAddrInet6 addr("::", port);
- Y_ENSURE(SetSockOpt(sock, SOL_SOCKET, SO_REUSEADDR, 1) == 0);
- SetReuseAddressAndPort(sock);
- if (sock.Bind(&addr) == 0) {
- return true;
- }
- return false;
-}
-
-void get_port_ranges() {
+
+bool IsFreePort(ui16 port) {
+ TInet6StreamSocket sock;
+ TSockAddrInet6 addr("::", port);
+ Y_ENSURE(SetSockOpt(sock, SOL_SOCKET, SO_REUSEADDR, 1) == 0);
+ SetReuseAddressAndPort(sock);
+ if (sock.Bind(&addr) == 0) {
+ return true;
+ }
+ return false;
+}
+
+void get_port_ranges() {
for (int i = 1; i < 10; ++i) {
- TPortManager pm;
- ui16 port = pm.GetPortsRange(1024, i);
- for (int p = port; p < port + i; ++p) {
- UNIT_ASSERT(IsFreePort(p));
- }
- }
-}
-
-Y_UNIT_TEST_SUITE(TestTPortManager) {
- Y_UNIT_TEST(ParallelRun0) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun1) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun2) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun3) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun4) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun5) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun6) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun7) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun8) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun9) {get_port_ranges();}
-}
+ TPortManager pm;
+ ui16 port = pm.GetPortsRange(1024, i);
+ for (int p = port; p < port + i; ++p) {
+ UNIT_ASSERT(IsFreePort(p));
+ }
+ }
+}
+
+Y_UNIT_TEST_SUITE(TestTPortManager) {
+ Y_UNIT_TEST(ParallelRun0) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun1) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun2) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun3) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun4) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun5) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun6) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun7) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun8) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun9) {get_port_ranges();}
+}
diff --git a/library/cpp/testing/unittest/fat/ya.make b/library/cpp/testing/unittest/fat/ya.make
index d405e599ee..6eb68767b4 100644
--- a/library/cpp/testing/unittest/fat/ya.make
+++ b/library/cpp/testing/unittest/fat/ya.make
@@ -1,19 +1,19 @@
-UNITTEST()
-
-OWNER(g:yatool)
-
-SRCS(
- test_port_manager.cpp
-)
-
-SIZE(LARGE)
-
-# We need to run tests at the same time on the single machine
-FORK_SUBTESTS()
-
+UNITTEST()
+
+OWNER(g:yatool)
+
+SRCS(
+ test_port_manager.cpp
+)
+
+SIZE(LARGE)
+
+# We need to run tests at the same time on the single machine
+FORK_SUBTESTS()
+
TAG(
ya:fat
ya:force_sandbox
)
-END()
+END()
diff --git a/library/cpp/testing/unittest/registar.cpp b/library/cpp/testing/unittest/registar.cpp
index 3679b768ed..06882dd1ae 100644
--- a/library/cpp/testing/unittest/registar.cpp
+++ b/library/cpp/testing/unittest/registar.cpp
@@ -106,10 +106,10 @@ struct TDiffColorizer {
}
};
-struct TTraceDiffFormatter {
+struct TTraceDiffFormatter {
bool Reverse = false;
- explicit TTraceDiffFormatter(bool reverse = false)
+ explicit TTraceDiffFormatter(bool reverse = false)
: Reverse(reverse)
{
}
@@ -123,26 +123,26 @@ struct TTraceDiffFormatter {
}
TString Left(TArrayRef<const char> str) const {
- return NUnitTest::GetFormatTag("good") +
+ return NUnitTest::GetFormatTag("good") +
TString(str.begin(), str.end()) +
NUnitTest::GetResetTag();
}
TString Right(TArrayRef<const char> str) const {
- return NUnitTest::GetFormatTag("bad") +
+ return NUnitTest::GetFormatTag("bad") +
TString(str.begin(), str.end()) +
NUnitTest::GetResetTag();
}
};
TString NUnitTest::GetFormatTag(const char* name) {
- return Sprintf("[[%s]]", name);
-}
-
+ return Sprintf("[[%s]]", name);
+}
+
TString NUnitTest::GetResetTag() {
return TString("[[rst]]");
-}
-
+}
+
TString NUnitTest::ColoredDiff(TStringBuf s1, TStringBuf s2, const TString& delims, bool reverse) {
TStringStream res;
TVector<NDiff::TChunk<char>> chunks;
@@ -150,8 +150,8 @@ TString NUnitTest::ColoredDiff(TStringBuf s1, TStringBuf s2, const TString& deli
if (NUnitTest::ShouldColorizeDiff) {
NDiff::PrintChunks(res, TDiffColorizer(reverse), chunks);
} else {
- res << NUnitTest::GetResetTag();
- NDiff::PrintChunks(res, TTraceDiffFormatter(reverse), chunks);
+ res << NUnitTest::GetResetTag();
+ NDiff::PrintChunks(res, TTraceDiffFormatter(reverse), chunks);
}
return res.Str();
}
@@ -478,18 +478,18 @@ unsigned NUnitTest::TTestFactory::Execute() {
#ifdef _unix_ // on Windows RTTI causes memory leaks
TString type = test->TypeId();
if (types.insert(type).second == false) {
- warnx("Duplicate suite found: %s (%s). Probably you have copy-pasted suite without changing it name", factory->Name().c_str(), type.c_str());
+ warnx("Duplicate suite found: %s (%s). Probably you have copy-pasted suite without changing it name", factory->Name().c_str(), type.c_str());
return 1;
}
#endif // _unix_
test->Parent_ = this;
-#ifdef UT_SKIP_EXCEPTIONS
+#ifdef UT_SKIP_EXCEPTIONS
try {
#endif
test->Execute();
-#ifdef UT_SKIP_EXCEPTIONS
+#ifdef UT_SKIP_EXCEPTIONS
} catch (...) {
}
#endif
@@ -497,7 +497,7 @@ unsigned NUnitTest::TTestFactory::Execute() {
Processor_->End();
- return bool(Processor_->FailTests());
+ return bool(Processor_->FailTests());
}
void NUnitTest::TTestFactory::SetProcessor(ITestSuiteProcessor* processor) {
diff --git a/library/cpp/testing/unittest/tests_data.cpp b/library/cpp/testing/unittest/tests_data.cpp
index b51cbc4b87..d1edd84196 100644
--- a/library/cpp/testing/unittest/tests_data.cpp
+++ b/library/cpp/testing/unittest/tests_data.cpp
@@ -6,52 +6,52 @@
#include <util/system/env.h>
#include <util/system/mutex.h>
-class TPortManager::TPortManagerImpl {
-public:
+class TPortManager::TPortManagerImpl {
+public:
TPortManagerImpl(bool reservePortsForCurrentTest)
: EnableReservePortsForCurrentTest(reservePortsForCurrentTest)
, DisableRandomPorts(!GetEnv("NO_RANDOM_PORTS").empty())
{
- }
-
- ui16 GetPort(ui16 port) {
+ }
+
+ ui16 GetPort(ui16 port) {
if (port && DisableRandomPorts) {
- return port;
- }
-
+ return port;
+ }
+
TAtomicSharedPtr<NTesting::IPort> holder(NTesting::GetFreePort().Release());
ReservePortForCurrentTest(holder);
-
+
TGuard<TMutex> g(Lock);
ReservedPorts.push_back(holder);
return holder->Get();
}
-
+
ui16 GetUdpPort(ui16 port) {
return GetPort(port);
}
-
+
ui16 GetTcpPort(ui16 port) {
return GetPort(port);
- }
-
+ }
+
ui16 GetTcpAndUdpPort(ui16 port) {
return GetPort(port);
}
- ui16 GetPortsRange(const ui16 startPort, const ui16 range) {
+ ui16 GetPortsRange(const ui16 startPort, const ui16 range) {
Y_UNUSED(startPort);
auto ports = NTesting::NLegacy::GetFreePortsRange(range);
ui16 first = ports[0];
- TGuard<TMutex> g(Lock);
+ TGuard<TMutex> g(Lock);
for (auto& port : ports) {
ReservedPorts.emplace_back(port.Release());
ReservePortForCurrentTest(ReservedPorts.back());
- }
+ }
return first;
- }
-
-private:
+ }
+
+private:
void ReservePortForCurrentTest(const TAtomicSharedPtr<NTesting::IPort>& portGuard) {
if (EnableReservePortsForCurrentTest) {
TTestBase* currentTest = NUnitTest::NPrivate::GetCurrentTest();
@@ -64,40 +64,40 @@ private:
}
private:
- TMutex Lock;
+ TMutex Lock;
TVector<TAtomicSharedPtr<NTesting::IPort>> ReservedPorts;
const bool EnableReservePortsForCurrentTest;
const bool DisableRandomPorts;
-};
-
+};
+
TPortManager::TPortManager(bool reservePortsForCurrentTest)
: Impl_(new TPortManagerImpl(reservePortsForCurrentTest))
-{
-}
-
-TPortManager::~TPortManager() {
-}
-
-ui16 TPortManager::GetPort(ui16 port) {
- return Impl_->GetTcpPort(port);
-}
+{
+}
+
+TPortManager::~TPortManager() {
+}
+
+ui16 TPortManager::GetPort(ui16 port) {
+ return Impl_->GetTcpPort(port);
+}
ui16 TPortManager::GetTcpPort(ui16 port) {
- return Impl_->GetTcpPort(port);
+ return Impl_->GetTcpPort(port);
}
ui16 TPortManager::GetUdpPort(ui16 port) {
- return Impl_->GetUdpPort(port);
+ return Impl_->GetUdpPort(port);
}
ui16 TPortManager::GetTcpAndUdpPort(ui16 port) {
- return Impl_->GetTcpAndUdpPort(port);
-}
-
-ui16 TPortManager::GetPortsRange(const ui16 startPort, const ui16 range) {
- return Impl_->GetPortsRange(startPort, range);
+ return Impl_->GetTcpAndUdpPort(port);
}
+ui16 TPortManager::GetPortsRange(const ui16 startPort, const ui16 range) {
+ return Impl_->GetPortsRange(startPort, range);
+}
+
ui16 GetRandomPort() {
TPortManager* pm = Singleton<TPortManager>(false);
return pm->GetPort();
diff --git a/library/cpp/testing/unittest/tests_data.h b/library/cpp/testing/unittest/tests_data.h
index 6536bc1ae6..dac65dfc72 100644
--- a/library/cpp/testing/unittest/tests_data.h
+++ b/library/cpp/testing/unittest/tests_data.h
@@ -2,8 +2,8 @@
#include <library/cpp/testing/common/env.h>
-#include <util/generic/noncopyable.h>
-#include <util/generic/ptr.h>
+#include <util/generic/noncopyable.h>
+#include <util/generic/ptr.h>
#include <util/generic/string.h>
#include <util/network/sock.h>
@@ -28,12 +28,12 @@ void SetReuseAddressAndPort(const TSocketType& sock) {
}
class TPortManager: public TNonCopyable {
-public:
+public:
TPortManager(bool reservePortsForCurrentTest = true);
- ~TPortManager();
+ ~TPortManager();
// Gets free TCP port
- ui16 GetPort(ui16 port = 0);
+ ui16 GetPort(ui16 port = 0);
// Gets free TCP port
ui16 GetTcpPort(ui16 port = 0);
@@ -44,11 +44,11 @@ public:
// Gets one free port for use in both TCP and UDP protocols
ui16 GetTcpAndUdpPort(ui16 port = 0);
- ui16 GetPortsRange(const ui16 startPort, const ui16 range);
-
-private:
- class TPortManagerImpl;
- THolder<TPortManagerImpl> Impl_;
+ ui16 GetPortsRange(const ui16 startPort, const ui16 range);
+
+private:
+ class TPortManagerImpl;
+ THolder<TPortManagerImpl> Impl_;
};
ui16 GetRandomPort();
diff --git a/library/cpp/testing/unittest/ut/main.cpp b/library/cpp/testing/unittest/ut/main.cpp
index e303e21e30..0614e77fe2 100644
--- a/library/cpp/testing/unittest/ut/main.cpp
+++ b/library/cpp/testing/unittest/ut/main.cpp
@@ -1,11 +1,11 @@
#include <library/cpp/testing/unittest/gtest.h>
#include <library/cpp/testing/unittest/registar.h>
#include <library/cpp/testing/unittest/tests_data.h>
-
-#include <util/generic/set.h>
-#include <util/network/sock.h>
-#include <util/system/env.h>
-#include <util/system/fs.h>
+
+#include <util/generic/set.h>
+#include <util/network/sock.h>
+#include <util/system/env.h>
+#include <util/system/fs.h>
TEST(GTest, Test1) {
UNIT_ASSERT_EQUAL(1, 1);
@@ -60,7 +60,7 @@ TEST(ETest, Test1) {
UNIT_CHECK_GENERATED_EXCEPTION(ythrow yexception(), yexception);
UNIT_CHECK_GENERATED_NO_EXCEPTION(true, yexception);
}
-
+
Y_UNIT_TEST_SUITE(TestSingleTestFixture)
{
Y_UNIT_TEST_F(Test3, TSimpleFixture) {
diff --git a/library/cpp/testing/unittest/utmain.cpp b/library/cpp/testing/unittest/utmain.cpp
index 305bc6b40f..e5c4185001 100644
--- a/library/cpp/testing/unittest/utmain.cpp
+++ b/library/cpp/testing/unittest/utmain.cpp
@@ -6,7 +6,7 @@
#include <library/cpp/json/writer/json.h>
#include <library/cpp/json/writer/json_value.h>
-#include <library/cpp/testing/common/env.h>
+#include <library/cpp/testing/common/env.h>
#include <library/cpp/testing/hook/hook.h>
#include <util/datetime/base.h>
@@ -19,9 +19,9 @@
#include <util/network/init.h>
-#include <util/stream/file.h>
+#include <util/stream/file.h>
#include <util/stream/output.h>
-#include <util/string/join.h>
+#include <util/string/join.h>
#include <util/string/util.h>
#include <util/system/defaults.h>
@@ -46,8 +46,8 @@
#define NOTE_IN_VALGRIND(test)
#endif
-const size_t MAX_COMMENT_MESSAGE_LENGTH = 1024 * 1024; // 1 MB
-
+const size_t MAX_COMMENT_MESSAGE_LENGTH = 1024 * 1024; // 1 MB
+
using namespace NUnitTest;
class TNullTraceWriterProcessor: public ITestSuiteProcessor {
@@ -56,8 +56,8 @@ class TNullTraceWriterProcessor: public ITestSuiteProcessor {
class TTraceWriterProcessor: public ITestSuiteProcessor {
public:
inline TTraceWriterProcessor(const char* traceFilePath, EOpenMode mode)
- : PrevTime(TInstant::Now())
- {
+ : PrevTime(TInstant::Now())
+ {
TraceFile = new TUnbufferedFileOutput(TFile(traceFilePath, mode | WrOnly | Seq));
}
@@ -68,17 +68,17 @@ private:
TVector<TString> ErrorMessages;
inline void Trace(const TString eventName, const NJson::TJsonValue eventValue) {
- NJsonWriter::TBuf json(NJsonWriter::HEM_UNSAFE);
- json.BeginObject();
-
- json.WriteKey("name").WriteString(eventName);
- json.WriteKey("value").WriteJsonValue(&eventValue);
- json.WriteKey("timestamp").WriteDouble(TInstant::Now().SecondsFloat(), PREC_NDIGITS, 14);
-
- json.EndObject();
-
- json.FlushTo(TraceFile.Get());
- *TraceFile << "\n";
+ NJsonWriter::TBuf json(NJsonWriter::HEM_UNSAFE);
+ json.BeginObject();
+
+ json.WriteKey("name").WriteString(eventName);
+ json.WriteKey("value").WriteJsonValue(&eventValue);
+ json.WriteKey("timestamp").WriteDouble(TInstant::Now().SecondsFloat(), PREC_NDIGITS, 14);
+
+ json.EndObject();
+
+ json.FlushTo(TraceFile.Get());
+ *TraceFile << "\n";
}
inline void TraceSubtestFinished(const char* className, const char* subtestName, const char* status, const TString comment, const TTestContext* context) {
@@ -88,43 +88,43 @@ private:
event.InsertValue("subtest", subtestName);
event.InsertValue("status", status);
event.InsertValue("comment", comment.data());
- event.InsertValue("time", (now - PrevTime).SecondsFloat());
+ event.InsertValue("time", (now - PrevTime).SecondsFloat());
if (context) {
for (const auto& metric : context->Metrics) {
event["metrics"].InsertValue(metric.first, metric.second);
}
}
Trace("subtest-finished", event);
-
+
PrevTime = now;
TString marker = Join("", "\n###subtest-finished:", className, "::", subtestName, "\n");
- Cout << marker;
- Cout.Flush();
- Cerr << comment;
- Cerr << marker;
- Cerr.Flush();
+ Cout << marker;
+ Cout.Flush();
+ Cerr << comment;
+ Cerr << marker;
+ Cerr.Flush();
}
virtual TString BuildComment(const char* message, const char* backTrace) {
- return NUnitTest::GetFormatTag("bad") +
+ return NUnitTest::GetFormatTag("bad") +
TString(message).substr(0, MAX_COMMENT_MESSAGE_LENGTH) +
NUnitTest::GetResetTag() +
TString("\n") +
NUnitTest::GetFormatTag("alt1") +
TString(backTrace).substr(0, MAX_COMMENT_MESSAGE_LENGTH) +
NUnitTest::GetResetTag();
- }
-
+ }
+
void OnBeforeTest(const TTest* test) override {
NJson::TJsonValue event;
event.InsertValue("class", test->unit->name);
event.InsertValue("subtest", test->name);
Trace("subtest-started", event);
TString marker = Join("", "\n###subtest-started:", test->unit->name, "::", test->name, "\n");
- Cout << marker;
- Cout.Flush();
- Cerr << marker;
- Cerr.Flush();
+ Cout << marker;
+ Cout.Flush();
+ Cerr << marker;
+ Cerr.Flush();
}
void OnUnitStart(const TUnit* unit) override {
@@ -552,13 +552,13 @@ public:
if (Verbose_) {
return true;
} else {
- Stream_ << name << "\n";
+ Stream_ << name << "\n";
return false;
}
}
bool CheckAccessTest(TString suite, const char* name) override {
- Stream_ << suite << "::" << name << "\n";
+ Stream_ << suite << "::" << name << "\n";
return false;
}
@@ -601,7 +601,7 @@ static const TWinEnvironment Instance;
#endif // _win_
static int DoList(bool verbose, IOutputStream& stream) {
- TEnumeratingProcessor eproc(verbose, stream);
+ TEnumeratingProcessor eproc(verbose, stream);
TTestFactory::Instance().SetProcessor(&eproc);
TTestFactory::Instance().Execute();
return 0;
@@ -625,28 +625,28 @@ static int DoUsage(const char* progname) {
return 0;
}
-#if defined(_linux_) && defined(CLANG_COVERAGE)
-extern "C" int __llvm_profile_write_file(void);
-
-static void GracefulShutdownHandler(int) {
- try {
- __llvm_profile_write_file();
- } catch (...) {
- }
- abort();
-}
-#endif
-
+#if defined(_linux_) && defined(CLANG_COVERAGE)
+extern "C" int __llvm_profile_write_file(void);
+
+static void GracefulShutdownHandler(int) {
+ try {
+ __llvm_profile_write_file();
+ } catch (...) {
+ }
+ abort();
+}
+#endif
+
int NUnitTest::RunMain(int argc, char** argv) {
-#if defined(_linux_) && defined(CLANG_COVERAGE)
- {
- struct sigaction sa;
- memset(&sa, 0, sizeof(sa));
- sa.sa_handler = GracefulShutdownHandler;
- sa.sa_flags = SA_SIGINFO | SA_RESTART;
- Y_VERIFY(!sigaction(SIGUSR2, &sa, nullptr));
- }
-#endif
+#if defined(_linux_) && defined(CLANG_COVERAGE)
+ {
+ struct sigaction sa;
+ memset(&sa, 0, sizeof(sa));
+ sa.sa_handler = GracefulShutdownHandler;
+ sa.sa_flags = SA_SIGINFO | SA_RESTART;
+ Y_VERIFY(!sigaction(SIGUSR2, &sa, nullptr));
+ }
+#endif
NTesting::THook::CallBeforeInit();
InitNetworkSubSystem();
@@ -668,13 +668,13 @@ int NUnitTest::RunMain(int argc, char** argv) {
IOutputStream* listStream = &Cout;
THolder<IOutputStream> listFile;
- enum EListType {
- DONT_LIST,
- LIST,
- LIST_VERBOSE
- };
- EListType listTests = DONT_LIST;
-
+ enum EListType {
+ DONT_LIST,
+ LIST,
+ LIST_VERBOSE
+ };
+ EListType listTests = DONT_LIST;
+
for (size_t i = 1; i < (size_t)argc; ++i) {
const char* name = argv[i];
@@ -682,9 +682,9 @@ int NUnitTest::RunMain(int argc, char** argv) {
if (strcmp(name, "--help") == 0 || strcmp(name, "-h") == 0) {
return DoUsage(argv[0]);
} else if (strcmp(name, "--list") == 0 || strcmp(name, "-l") == 0) {
- listTests = LIST;
+ listTests = LIST;
} else if (strcmp(name, "--list-verbose") == 0 || strcmp(name, "-A") == 0) {
- listTests = LIST_VERBOSE;
+ listTests = LIST_VERBOSE;
} else if (strcmp(name, "--print-before-suite=false") == 0) {
processor.SetPrintBeforeSuite(false);
} else if (strcmp(name, "--print-before-test=false") == 0) {
@@ -718,20 +718,20 @@ int NUnitTest::RunMain(int argc, char** argv) {
processor.BeQuiet();
NUnitTest::ShouldColorizeDiff = false;
processor.SetTraceProcessor(new TTraceWriterProcessor(argv[i], CreateAlways));
- } else if (strcmp(name, "--trace-path-append") == 0) {
+ } else if (strcmp(name, "--trace-path-append") == 0) {
++i;
processor.BeQuiet();
NUnitTest::ShouldColorizeDiff = false;
- processor.SetTraceProcessor(new TTraceWriterProcessor(argv[i], OpenAlways | ForAppend));
- } else if (strcmp(name, "--list-path") == 0) {
- ++i;
+ processor.SetTraceProcessor(new TTraceWriterProcessor(argv[i], OpenAlways | ForAppend));
+ } else if (strcmp(name, "--list-path") == 0) {
+ ++i;
listFile = MakeHolder<TFixedBufferFileOutput>(argv[i]);
- listStream = listFile.Get();
+ listStream = listFile.Get();
} else if (strcmp(name, "--test-param") == 0) {
++i;
TString param(argv[i]);
size_t assign = param.find('=');
- Singleton<::NPrivate::TTestEnv>()->AddTestParam(param.substr(0, assign), param.substr(assign + 1));
+ Singleton<::NPrivate::TTestEnv>()->AddTestParam(param.substr(0, assign), param.substr(assign + 1));
} else if (TString(name).StartsWith("--")) {
return DoUsage(argv[0]), 1;
} else if (*name == '-') {
@@ -743,9 +743,9 @@ int NUnitTest::RunMain(int argc, char** argv) {
}
}
}
- if (listTests != DONT_LIST) {
- return DoList(listTests == LIST_VERBOSE, *listStream);
- }
+ if (listTests != DONT_LIST) {
+ return DoList(listTests == LIST_VERBOSE, *listStream);
+ }
TTestFactory::Instance().SetProcessor(&processor);
diff --git a/library/cpp/testing/ya.make b/library/cpp/testing/ya.make
index 6a57ac2ee6..bff91ef52d 100644
--- a/library/cpp/testing/ya.make
+++ b/library/cpp/testing/ya.make
@@ -6,7 +6,7 @@ RECURSE(
benchmark/main
boost_test
boost_test_main
- dump_clang_coverage
+ dump_clang_coverage
gbenchmark_main
gmock
gmock_in_unittest
diff --git a/library/cpp/ya.make b/library/cpp/ya.make
index 8c1193b007..48e44aea55 100644
--- a/library/cpp/ya.make
+++ b/library/cpp/ya.make
@@ -29,14 +29,14 @@ RECURSE(
bucket_quoter
build_info
cache
- case_insensitive_string
+ case_insensitive_string
cgiparam
cgiparam/fuzz
cgiparam/ut
charset
charset/ut
chromium_trace
- clang_tidy
+ clang_tidy
clickhouse
clustered_hnsw
clustered_hnsw/ut
@@ -60,8 +60,8 @@ RECURSE(
config
config/extra
config/ut
- consistent_hash_ring
- consistent_hash_ring/ut
+ consistent_hash_ring
+ consistent_hash_ring/ut
consistent_hashing
consistent_hashing/ut
containers
@@ -142,7 +142,7 @@ RECURSE(
getopt/last_getopt_demo
getopt/small
getopt/ut
- getoptpb
+ getoptpb
gettimeofday
gradient_optimize
gradient_optimize/ut
@@ -290,7 +290,7 @@ RECURSE(
proto_config/plugin
proto_config/protos
proto_config/ut
- protobuf
+ protobuf
pybind
pybind/example
pybind/example/dynamic
@@ -342,7 +342,7 @@ RECURSE(
sqlite3/ut
sse
ssh
- ssh/ut
+ ssh/ut
ssh_sign
ssh_sign/ut
stat-handle
@@ -372,9 +372,9 @@ RECURSE(
token/serialization
token/serialization/ut
token/ut
- tokenclassifiers
- tokenizer
- tokenizer/ut
+ tokenclassifiers
+ tokenizer
+ tokenizer/ut
trace_usage
trace_usage/benchmark
trace_usage/ut
diff --git a/library/python/cores/__init__.py b/library/python/cores/__init__.py
index fdb1f82a46..137056a915 100644
--- a/library/python/cores/__init__.py
+++ b/library/python/cores/__init__.py
@@ -28,7 +28,7 @@ def recover_core_dump_file(binary_path, cwd, pid):
self.path = path
self.mask = mask
- cwd = cwd or os.getcwd()
+ cwd = cwd or os.getcwd()
system = platform.system().lower()
if system.startswith("linux"):
import stat
diff --git a/library/python/cores/ya.make b/library/python/cores/ya.make
index 76264e9cce..f99c4c1da6 100644
--- a/library/python/cores/ya.make
+++ b/library/python/cores/ya.make
@@ -1,6 +1,6 @@
OWNER(
prettyboy
- g:yatest
+ g:yatest
)
PY23_LIBRARY()
diff --git a/library/python/filelock/__init__.py b/library/python/filelock/__init__.py
index f81ff67f37..f62d08b66c 100644
--- a/library/python/filelock/__init__.py
+++ b/library/python/filelock/__init__.py
@@ -21,7 +21,7 @@ class AbstractFileLock(object):
def __init__(self, path):
self.path = path
- def acquire(self, blocking=True):
+ def acquire(self, blocking=True):
raise NotImplementedError
def release(self):
@@ -39,24 +39,24 @@ class _NixFileLock(AbstractFileLock):
def __init__(self, path):
super(_NixFileLock, self).__init__(path)
- from fcntl import flock, LOCK_EX, LOCK_UN, LOCK_NB
- self._locker = lambda lock, blocking: flock(lock, LOCK_EX if blocking else LOCK_EX | LOCK_NB)
+ from fcntl import flock, LOCK_EX, LOCK_UN, LOCK_NB
+ self._locker = lambda lock, blocking: flock(lock, LOCK_EX if blocking else LOCK_EX | LOCK_NB)
self._unlocker = lambda lock: flock(lock, LOCK_UN)
- self._lock = open(self.path, 'a')
- set_close_on_exec(self._lock)
-
- def acquire(self, blocking=True):
- import errno
- try:
- self._locker(self._lock, blocking)
- except IOError as e:
- if e.errno in (errno.EAGAIN, errno.EACCES) and not blocking:
- return False
- raise
- return True
+ self._lock = open(self.path, 'a')
+ set_close_on_exec(self._lock)
+
+ def acquire(self, blocking=True):
+ import errno
+ try:
+ self._locker(self._lock, blocking)
+ except IOError as e:
+ if e.errno in (errno.EAGAIN, errno.EACCES) and not blocking:
+ return False
+ raise
+ return True
def release(self):
- self._unlocker(self._lock)
+ self._unlocker(self._lock)
def __del__(self):
if hasattr(self, "_lock"):
@@ -81,26 +81,26 @@ class _WinFileLock(AbstractFileLock):
if e.errno != errno.EACCES or not os.path.isfile(path):
raise
- def acquire(self, blocking=True):
+ def acquire(self, blocking=True):
self._lock = open(self.path)
set_close_on_exec(self._lock)
-
+
import time
locked = False
while not locked:
locked = library.python.windows.lock_file(self._lock, 0, self._LOCKED_BYTES_NUM, raises=False)
- if locked:
- return True
- if blocking:
+ if locked:
+ return True
+ if blocking:
time.sleep(.5)
- else:
- return False
+ else:
+ return False
def release(self):
if self._lock:
library.python.windows.unlock_file(self._lock, 0, self._LOCKED_BYTES_NUM, raises=False)
- self._lock.close()
- self._lock = None
+ self._lock.close()
+ self._lock = None
class FileLock(AbstractFileLock):
@@ -113,9 +113,9 @@ class FileLock(AbstractFileLock):
else:
self._lock = _NixFileLock(path)
- def acquire(self, blocking=True):
- logger.debug('Acquiring filelock (blocking=%s): %s', blocking, self.path)
- return self._lock.acquire(blocking)
+ def acquire(self, blocking=True):
+ logger.debug('Acquiring filelock (blocking=%s): %s', blocking, self.path)
+ return self._lock.acquire(blocking)
def release(self):
logger.debug('Ensuring filelock released: %s', self.path)
diff --git a/library/python/filelock/ut/lib/test_filelock.py b/library/python/filelock/ut/lib/test_filelock.py
index 1b11d89123..a5624f827c 100644
--- a/library/python/filelock/ut/lib/test_filelock.py
+++ b/library/python/filelock/ut/lib/test_filelock.py
@@ -3,7 +3,7 @@ import time
import logging
import multiprocessing
import tempfile
-import threading
+import threading
import library.python.filelock
@@ -48,36 +48,36 @@ def test_filelock():
time1 = time2
-def test_filelock_init_acquired():
+def test_filelock_init_acquired():
temp_dir = tempfile.mkdtemp()
lock_path = os.path.join(temp_dir, "file.lock")
with library.python.filelock.FileLock(lock_path):
sublock = library.python.filelock.FileLock(lock_path)
del sublock
-
-
-def test_concurrent_lock():
- filename = 'con.lock'
-
- def lock():
- l = library.python.filelock.FileLock(filename)
- time.sleep(1)
- l.acquire()
- l.release()
- try:
- os.unlink(filename)
- except OSError:
- pass
-
- threads = []
- for i in range(100):
- t = threading.Thread(target=lock)
- t.daemon = True
- threads.append(t)
-
- for t in threads:
- t.start()
-
- for t in threads:
- t.join()
+
+
+def test_concurrent_lock():
+ filename = 'con.lock'
+
+ def lock():
+ l = library.python.filelock.FileLock(filename)
+ time.sleep(1)
+ l.acquire()
+ l.release()
+ try:
+ os.unlink(filename)
+ except OSError:
+ pass
+
+ threads = []
+ for i in range(100):
+ t = threading.Thread(target=lock)
+ t.daemon = True
+ threads.append(t)
+
+ for t in threads:
+ t.start()
+
+ for t in threads:
+ t.join()
diff --git a/library/python/fs/__init__.py b/library/python/fs/__init__.py
index b1b7cde079..c9651b03ae 100644
--- a/library/python/fs/__init__.py
+++ b/library/python/fs/__init__.py
@@ -4,11 +4,11 @@ import codecs
import errno
import logging
import os
-import random
+import random
import shutil
import six
import stat
-import sys
+import sys
import library.python.func
import library.python.strings
@@ -202,13 +202,13 @@ def hardlink_or_copy(src, lnk):
if WindowsError is not None and isinstance(exc, WindowsError) and exc.winerror == 1142: # too many hardlinks
return True
# cross-device hardlink or too many hardlinks, or some known WSL error
- if isinstance(exc, OSError) and exc.errno in (
- errno.EXDEV,
- errno.EMLINK,
- errno.EINVAL,
- errno.EACCES,
- errno.EPERM,
- ):
+ if isinstance(exc, OSError) and exc.errno in (
+ errno.EXDEV,
+ errno.EMLINK,
+ errno.EINVAL,
+ errno.EACCES,
+ errno.EPERM,
+ ):
return True
return False
@@ -312,9 +312,9 @@ def read_file_unicode(path, binary=True, enc='utf-8'):
@errorfix_win
def open_file(*args, **kwargs):
- return (
- library.python.windows.open_file(*args, **kwargs) if library.python.windows.on_win() else open(*args, **kwargs)
- )
+ return (
+ library.python.windows.open_file(*args, **kwargs) if library.python.windows.on_win() else open(*args, **kwargs)
+ )
# Atomic file write
@@ -363,15 +363,15 @@ def get_tree_size(path, recursive=False, raise_all_errors=False):
# Directory copy ported from Python 3
-def copytree3(
- src,
- dst,
- symlinks=False,
- ignore=None,
- copy_function=shutil.copy2,
- ignore_dangling_symlinks=False,
- dirs_exist_ok=False,
-):
+def copytree3(
+ src,
+ dst,
+ symlinks=False,
+ ignore=None,
+ copy_function=shutil.copy2,
+ ignore_dangling_symlinks=False,
+ dirs_exist_ok=False,
+):
"""Recursively copy a directory tree.
The copytree3 is a port of shutil.copytree function from python-3.2.
@@ -467,35 +467,35 @@ def walk_relative(path, topdown=True, onerror=None, followlinks=False):
for dirpath, dirnames, filenames in os.walk(path, topdown=topdown, onerror=onerror, followlinks=followlinks):
yield os.path.relpath(dirpath, path), dirnames, filenames
-
+
def supports_clone():
if 'darwin' in sys.platform:
import platform
-
+
return list(map(int, platform.mac_ver()[0].split('.'))) >= [10, 13]
return False
-
-
-def commonpath(paths):
- assert paths
- if len(paths) == 1:
- return next(iter(paths))
-
- split_paths = [path.split(os.sep) for path in paths]
- smin = min(split_paths)
- smax = max(split_paths)
-
- common = smin
- for i, c in enumerate(smin):
- if c != smax[i]:
- common = smin[:i]
- break
-
- return os.path.sep.join(common)
-
-
-def set_execute_bits(filename):
- stm = os.stat(filename).st_mode
- exe = stm | 0o111
- if stm != exe:
- os.chmod(filename, exe)
+
+
+def commonpath(paths):
+ assert paths
+ if len(paths) == 1:
+ return next(iter(paths))
+
+ split_paths = [path.split(os.sep) for path in paths]
+ smin = min(split_paths)
+ smax = max(split_paths)
+
+ common = smin
+ for i, c in enumerate(smin):
+ if c != smax[i]:
+ common = smin[:i]
+ break
+
+ return os.path.sep.join(common)
+
+
+def set_execute_bits(filename):
+ stm = os.stat(filename).st_mode
+ exe = stm | 0o111
+ if stm != exe:
+ os.chmod(filename, exe)
diff --git a/library/python/fs/test/test_fs.py b/library/python/fs/test/test_fs.py
index 9e2c70c069..5c1c6030b5 100644
--- a/library/python/fs/test/test_fs.py
+++ b/library/python/fs/test/test_fs.py
@@ -18,16 +18,16 @@ def in_env(case):
def wrapped_case(*args, **kwargs):
with library.python.tmp.temp_dir() as temp_dir:
case(lambda path: os.path.join(temp_dir, path))
-
+
return wrapped_case
def mkfile(path, data=''):
with open(path, 'wb') as f:
if data:
- f.write(data) if isinstance(data, six.binary_type) else f.write(
- data.encode(library.python.strings.fs_encoding())
- )
+ f.write(data) if isinstance(data, six.binary_type) else f.write(
+ data.encode(library.python.strings.fs_encoding())
+ )
def mktree_example(path, name):
@@ -820,20 +820,20 @@ def test_read_file_empty(path):
@in_env
def test_read_file_multiline(path):
mkfile(path('src'), 'SRC line 1\nSRC line 2\n')
- assert (
- library.python.fs.read_file(path('src')).decode(library.python.strings.fs_encoding())
- == 'SRC line 1\nSRC line 2\n'
- )
+ assert (
+ library.python.fs.read_file(path('src')).decode(library.python.strings.fs_encoding())
+ == 'SRC line 1\nSRC line 2\n'
+ )
assert library.python.fs.read_file(path('src'), binary=False) == 'SRC line 1\nSRC line 2\n'
@in_env
def test_read_file_multiline_crlf(path):
mkfile(path('src'), 'SRC line 1\r\nSRC line 2\r\n')
- assert (
- library.python.fs.read_file(path('src')).decode(library.python.strings.fs_encoding())
- == 'SRC line 1\r\nSRC line 2\r\n'
- )
+ assert (
+ library.python.fs.read_file(path('src')).decode(library.python.strings.fs_encoding())
+ == 'SRC line 1\r\nSRC line 2\r\n'
+ )
if library.python.windows.on_win() or six.PY3: # universal newlines are by default in text mode in python3
assert library.python.fs.read_file(path('src'), binary=False) == 'SRC line 1\nSRC line 2\n'
else:
@@ -1005,9 +1005,9 @@ def test_copy_tree_custom_copy_function():
shutil.copy2(src, dst)
copied.append(dst)
- library.python.fs.copy_tree(
- "test_copy_tree_src", yatest.common.work_path("test_copy_tree_dst"), copy_function=copy_function
- )
+ library.python.fs.copy_tree(
+ "test_copy_tree_src", yatest.common.work_path("test_copy_tree_dst"), copy_function=copy_function
+ )
assert len(copied) == 2
assert yatest.common.work_path("test_copy_tree_dst/deepper/deepper.txt") in copied
assert yatest.common.work_path("test_copy_tree_dst/deepper/inner/inner.txt") in copied
@@ -1019,19 +1019,19 @@ def test_copy2():
assert os.path.islink("link2")
assert os.readlink("link2") == "non-existent"
-
-
-def test_commonpath():
- pj = os.path.join
- pja = lambda *x: os.path.abspath(pj(*x))
-
- assert library.python.fs.commonpath(['a', 'b']) == ''
- assert library.python.fs.commonpath([pj('t', '1')]) == pj('t', '1')
- assert library.python.fs.commonpath([pj('t', '1'), pj('t', '2')]) == pj('t')
- assert library.python.fs.commonpath([pj('t', '1', '2'), pj('t', '1', '2')]) == pj('t', '1', '2')
- assert library.python.fs.commonpath([pj('t', '1', '1'), pj('t', '1', '2')]) == pj('t', '1')
- assert library.python.fs.commonpath([pj('t', '1', '1'), pj('t', '1', '2'), pj('t', '1', '3')]) == pj('t', '1')
-
- assert library.python.fs.commonpath([pja('t', '1', '1'), pja('t', '1', '2')]) == pja('t', '1')
-
- assert library.python.fs.commonpath({pj('t', '1'), pj('t', '2')}) == pj('t')
+
+
+def test_commonpath():
+ pj = os.path.join
+ pja = lambda *x: os.path.abspath(pj(*x))
+
+ assert library.python.fs.commonpath(['a', 'b']) == ''
+ assert library.python.fs.commonpath([pj('t', '1')]) == pj('t', '1')
+ assert library.python.fs.commonpath([pj('t', '1'), pj('t', '2')]) == pj('t')
+ assert library.python.fs.commonpath([pj('t', '1', '2'), pj('t', '1', '2')]) == pj('t', '1', '2')
+ assert library.python.fs.commonpath([pj('t', '1', '1'), pj('t', '1', '2')]) == pj('t', '1')
+ assert library.python.fs.commonpath([pj('t', '1', '1'), pj('t', '1', '2'), pj('t', '1', '3')]) == pj('t', '1')
+
+ assert library.python.fs.commonpath([pja('t', '1', '1'), pja('t', '1', '2')]) == pja('t', '1')
+
+ assert library.python.fs.commonpath({pj('t', '1'), pj('t', '2')}) == pj('t')
diff --git a/library/python/func/__init__.py b/library/python/func/__init__.py
index 7424361635..e37ea95c7c 100644
--- a/library/python/func/__init__.py
+++ b/library/python/func/__init__.py
@@ -1,6 +1,6 @@
import functools
-import threading
-import collections
+import threading
+import collections
def map0(func, value):
@@ -20,12 +20,12 @@ class _Result(object):
def lazy(func):
result = _Result()
- @functools.wraps(func)
- def wrapper(*args):
+ @functools.wraps(func)
+ def wrapper(*args):
try:
return result.result
except AttributeError:
- result.result = func(*args)
+ result.result = func(*args)
return result.result
@@ -64,54 +64,54 @@ class lazy_classproperty(object):
return getattr(owner, attr_name)
-def memoize(limit=0, thread_local=False):
- assert limit >= 0
-
+def memoize(limit=0, thread_local=False):
+ assert limit >= 0
+
def decorator(func):
- memory = {}
- lock = threading.Lock()
-
- if limit:
- keys = collections.deque()
-
- def get(args):
- try:
- return memory[args]
- except KeyError:
- with lock:
- if args not in memory:
- fargs = args[-1]
- memory[args] = func(*fargs)
- keys.append(args)
- if len(keys) > limit:
- del memory[keys.popleft()]
- return memory[args]
-
- else:
-
- def get(args):
- if args not in memory:
- with lock:
- if args not in memory:
- fargs = args[-1]
- memory[args] = func(*fargs)
- return memory[args]
-
- if thread_local:
-
- @functools.wraps(func)
- def wrapper(*args):
- th = threading.current_thread()
- return get((th.ident, th.name, args))
-
- else:
-
- @functools.wraps(func)
- def wrapper(*args):
- return get(('', '', args))
-
- return wrapper
-
+ memory = {}
+ lock = threading.Lock()
+
+ if limit:
+ keys = collections.deque()
+
+ def get(args):
+ try:
+ return memory[args]
+ except KeyError:
+ with lock:
+ if args not in memory:
+ fargs = args[-1]
+ memory[args] = func(*fargs)
+ keys.append(args)
+ if len(keys) > limit:
+ del memory[keys.popleft()]
+ return memory[args]
+
+ else:
+
+ def get(args):
+ if args not in memory:
+ with lock:
+ if args not in memory:
+ fargs = args[-1]
+ memory[args] = func(*fargs)
+ return memory[args]
+
+ if thread_local:
+
+ @functools.wraps(func)
+ def wrapper(*args):
+ th = threading.current_thread()
+ return get((th.ident, th.name, args))
+
+ else:
+
+ @functools.wraps(func)
+ def wrapper(*args):
+ return get(('', '', args))
+
+ return wrapper
+
return decorator
@@ -119,52 +119,52 @@ def memoize(limit=0, thread_local=False):
def compose(*functions):
def compose2(f, g):
return lambda x: f(g(x))
-
+
return functools.reduce(compose2, functions, lambda x: x)
-
-
-class Singleton(type):
- _instances = {}
-
- def __call__(cls, *args, **kwargs):
- if cls not in cls._instances:
- cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
- return cls._instances[cls]
-
-
-def stable_uniq(it):
- seen = set()
- res = []
- for e in it:
- if e not in seen:
- res.append(e)
- seen.add(e)
- return res
-
-
-def first(it):
- for d in it:
- if d:
- return d
-
-
-def split(data, func):
- l, r = [], []
- for e in data:
- if func(e):
- l.append(e)
- else:
- r.append(e)
- return l, r
+
+
+class Singleton(type):
+ _instances = {}
+
+ def __call__(cls, *args, **kwargs):
+ if cls not in cls._instances:
+ cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
+ return cls._instances[cls]
+
+
+def stable_uniq(it):
+ seen = set()
+ res = []
+ for e in it:
+ if e not in seen:
+ res.append(e)
+ seen.add(e)
+ return res
+
+
+def first(it):
+ for d in it:
+ if d:
+ return d
+
+
+def split(data, func):
+ l, r = [], []
+ for e in data:
+ if func(e):
+ l.append(e)
+ else:
+ r.append(e)
+ return l, r
def flatten_dict(dd, separator='.', prefix=''):
- return (
- {
- prefix + separator + k if prefix else k: v
- for kk, vv in dd.items()
- for k, v in flatten_dict(vv, separator, kk).items()
- }
- if isinstance(dd, dict)
- else {prefix: dd}
- )
+ return (
+ {
+ prefix + separator + k if prefix else k: v
+ for kk, vv in dd.items()
+ for k, v in flatten_dict(vv, separator, kk).items()
+ }
+ if isinstance(dd, dict)
+ else {prefix: dd}
+ )
diff --git a/library/python/func/ut/test_func.py b/library/python/func/ut/test_func.py
index 3c4fad1a07..d283402374 100644
--- a/library/python/func/ut/test_func.py
+++ b/library/python/func/ut/test_func.py
@@ -1,13 +1,13 @@
import pytest
-import threading
+import threading
import library.python.func as func
def test_map0():
- assert None is func.map0(lambda x: x + 1, None)
+ assert None is func.map0(lambda x: x + 1, None)
assert 3 == func.map0(lambda x: x + 1, 2)
- assert None is func.map0(len, None)
+ assert None is func.map0(len, None)
assert 2 == func.map0(len, [1, 2])
@@ -26,34 +26,34 @@ def test_memoize():
Counter._qty = getattr(Counter, '_qty', 0) + 1
return Counter._qty
- @func.memoize()
+ @func.memoize()
def t1(a):
return a, Counter.inc()
- @func.memoize()
+ @func.memoize()
def t2(a):
return a, Counter.inc()
- @func.memoize()
+ @func.memoize()
def t3(a):
return a, Counter.inc()
- @func.memoize()
+ @func.memoize()
def t4(a):
return a, Counter.inc()
- @func.memoize()
+ @func.memoize()
def t5(a, b, c):
return a + b + c, Counter.inc()
- @func.memoize()
+ @func.memoize()
def t6():
return Counter.inc()
- @func.memoize(limit=2)
- def t7(a, _b):
- return a, Counter.inc()
-
+ @func.memoize(limit=2)
+ def t7(a, _b):
+ return a, Counter.inc()
+
assert (1, 1) == t1(1)
assert (1, 1) == t1(1)
assert (2, 2) == t1(2)
@@ -82,48 +82,48 @@ def test_memoize():
assert 11 == t6()
assert 11 == t6()
- assert (1, 12) == t7(1, None)
- assert (2, 13) == t7(2, None)
- assert (1, 12) == t7(1, None)
- assert (2, 13) == t7(2, None)
- # removed result for (1, None)
- assert (3, 14) == t7(3, None)
- assert (1, 15) == t7(1, None)
-
- class ClassWithMemoizedMethod(object):
- def __init__(self):
- self.a = 0
-
- @func.memoize(True)
- def t(self, i):
- self.a += i
- return i
-
- obj = ClassWithMemoizedMethod()
- assert 10 == obj.t(10)
- assert 10 == obj.a
- assert 10 == obj.t(10)
- assert 10 == obj.a
-
- assert 20 == obj.t(20)
- assert 30 == obj.a
- assert 20 == obj.t(20)
- assert 30 == obj.a
-
-
-def test_first():
- assert func.first([0, [], (), None, False, {}, 0.0, '1', 0]) == '1'
- assert func.first([]) is None
- assert func.first([0]) is None
-
-
-def test_split():
- assert func.split([1, 1], lambda x: x) == ([1, 1], [])
- assert func.split([0, 0], lambda x: x) == ([], [0, 0])
- assert func.split([], lambda x: x) == ([], [])
- assert func.split([1, 0, 1], lambda x: x) == ([1, 1], [0])
-
-
+ assert (1, 12) == t7(1, None)
+ assert (2, 13) == t7(2, None)
+ assert (1, 12) == t7(1, None)
+ assert (2, 13) == t7(2, None)
+ # removed result for (1, None)
+ assert (3, 14) == t7(3, None)
+ assert (1, 15) == t7(1, None)
+
+ class ClassWithMemoizedMethod(object):
+ def __init__(self):
+ self.a = 0
+
+ @func.memoize(True)
+ def t(self, i):
+ self.a += i
+ return i
+
+ obj = ClassWithMemoizedMethod()
+ assert 10 == obj.t(10)
+ assert 10 == obj.a
+ assert 10 == obj.t(10)
+ assert 10 == obj.a
+
+ assert 20 == obj.t(20)
+ assert 30 == obj.a
+ assert 20 == obj.t(20)
+ assert 30 == obj.a
+
+
+def test_first():
+ assert func.first([0, [], (), None, False, {}, 0.0, '1', 0]) == '1'
+ assert func.first([]) is None
+ assert func.first([0]) is None
+
+
+def test_split():
+ assert func.split([1, 1], lambda x: x) == ([1, 1], [])
+ assert func.split([0, 0], lambda x: x) == ([], [0, 0])
+ assert func.split([], lambda x: x) == ([], [])
+ assert func.split([1, 0, 1], lambda x: x) == ([1, 1], [0])
+
+
def test_flatten_dict():
assert func.flatten_dict({"a": 1, "b": 2}) == {"a": 1, "b": 2}
assert func.flatten_dict({"a": 1}) == {"a": 1}
@@ -132,31 +132,31 @@ def test_flatten_dict():
assert func.flatten_dict({"a": 1, "b": {"c": {"d": 2}}}, separator="/") == {"a": 1, "b/c/d": 2}
-def test_memoize_thread_local():
- class Counter(object):
- def __init__(self, s):
- self.val = s
-
- def inc(self):
- self.val += 1
- return self.val
-
- @func.memoize(thread_local=True)
- def get_counter(start):
- return Counter(start)
-
- def th_inc():
- assert get_counter(0).inc() == 1
- assert get_counter(0).inc() == 2
- assert get_counter(10).inc() == 11
- assert get_counter(10).inc() == 12
-
- th_inc()
-
- th = threading.Thread(target=th_inc)
- th.start()
- th.join()
-
-
+def test_memoize_thread_local():
+ class Counter(object):
+ def __init__(self, s):
+ self.val = s
+
+ def inc(self):
+ self.val += 1
+ return self.val
+
+ @func.memoize(thread_local=True)
+ def get_counter(start):
+ return Counter(start)
+
+ def th_inc():
+ assert get_counter(0).inc() == 1
+ assert get_counter(0).inc() == 2
+ assert get_counter(10).inc() == 11
+ assert get_counter(10).inc() == 12
+
+ th_inc()
+
+ th = threading.Thread(target=th_inc)
+ th.start()
+ th.join()
+
+
if __name__ == '__main__':
pytest.main([__file__])
diff --git a/library/python/func/ut/ya.make b/library/python/func/ut/ya.make
index 5ec6c1225e..4d7e8b8f5b 100644
--- a/library/python/func/ut/ya.make
+++ b/library/python/func/ut/ya.make
@@ -1,11 +1,11 @@
-OWNER(g:yatool)
-
-PY23_TEST()
-
-TEST_SRCS(test_func.py)
-
-PEERDIR(
- library/python/func
-)
-
-END()
+OWNER(g:yatool)
+
+PY23_TEST()
+
+TEST_SRCS(test_func.py)
+
+PEERDIR(
+ library/python/func
+)
+
+END()
diff --git a/library/python/func/ya.make b/library/python/func/ya.make
index 9d414a976e..2f7b4890db 100644
--- a/library/python/func/ya.make
+++ b/library/python/func/ya.make
@@ -5,7 +5,7 @@ PY23_LIBRARY()
PY_SRCS(__init__.py)
END()
-
-RECURSE_FOR_TESTS(
- ut
-)
+
+RECURSE_FOR_TESTS(
+ ut
+)
diff --git a/library/python/pytest/main.py b/library/python/pytest/main.py
index 6296bd6f0f..aa08f846b1 100644
--- a/library/python/pytest/main.py
+++ b/library/python/pytest/main.py
@@ -1,47 +1,47 @@
-import os
+import os
import sys
import time
-
+
import __res
-FORCE_EXIT_TESTSFAILED_ENV = 'FORCE_EXIT_TESTSFAILED'
-
+FORCE_EXIT_TESTSFAILED_ENV = 'FORCE_EXIT_TESTSFAILED'
+
def main():
import library.python.pytest.context as context
context.Ctx["YA_PYTEST_START_TIMESTAMP"] = time.time()
- profile = None
- if '--profile-pytest' in sys.argv:
- sys.argv.remove('--profile-pytest')
-
- import pstats
- import cProfile
- profile = cProfile.Profile()
- profile.enable()
+ profile = None
+ if '--profile-pytest' in sys.argv:
+ sys.argv.remove('--profile-pytest')
- # Reset influencing env. vars
- # For more info see library/python/testing/yatest_common/yatest/common/errors.py
- if FORCE_EXIT_TESTSFAILED_ENV in os.environ:
- del os.environ[FORCE_EXIT_TESTSFAILED_ENV]
+ import pstats
+ import cProfile
+ profile = cProfile.Profile()
+ profile.enable()
+ # Reset influencing env. vars
+ # For more info see library/python/testing/yatest_common/yatest/common/errors.py
+ if FORCE_EXIT_TESTSFAILED_ENV in os.environ:
+ del os.environ[FORCE_EXIT_TESTSFAILED_ENV]
+
if "Y_PYTHON_CLEAR_ENTRY_POINT" in os.environ:
if "Y_PYTHON_ENTRY_POINT" in os.environ:
del os.environ["Y_PYTHON_ENTRY_POINT"]
del os.environ["Y_PYTHON_CLEAR_ENTRY_POINT"]
- listing_mode = '--collect-only' in sys.argv
- yatest_runner = os.environ.get('YA_TEST_RUNNER') == '1'
-
- import pytest
-
- import library.python.pytest.plugins.collection as collection
- import library.python.pytest.plugins.ya as ya
- import library.python.pytest.plugins.conftests as conftests
-
+ listing_mode = '--collect-only' in sys.argv
+ yatest_runner = os.environ.get('YA_TEST_RUNNER') == '1'
+
+ import pytest
+
+ import library.python.pytest.plugins.collection as collection
+ import library.python.pytest.plugins.ya as ya
+ import library.python.pytest.plugins.conftests as conftests
+
import _pytest.assertion
from _pytest.monkeypatch import MonkeyPatch
- from . import rewrite
+ from . import rewrite
m = MonkeyPatch()
m.setattr(_pytest.assertion.rewrite, "AssertionRewritingHook", rewrite.AssertionRewritingHook)
@@ -52,10 +52,10 @@ def main():
if name.startswith(prefix) and not name.endswith('.conftest')
]
- doctest_packages = __res.find("PY_DOCTEST_PACKAGES") or ""
- if isinstance(doctest_packages, bytes):
- doctest_packages = doctest_packages.decode('utf-8')
- doctest_packages = doctest_packages.split()
+ doctest_packages = __res.find("PY_DOCTEST_PACKAGES") or ""
+ if isinstance(doctest_packages, bytes):
+ doctest_packages = doctest_packages.decode('utf-8')
+ doctest_packages = doctest_packages.split()
def is_doctest_module(name):
for package in doctest_packages:
@@ -85,31 +85,31 @@ def main():
return new_paths
sys.path = remove_user_site(sys.path)
- rc = pytest.main(plugins=[
+ rc = pytest.main(plugins=[
collection.CollectionPlugin(test_modules, doctest_modules),
ya,
conftests,
- ])
-
+ ])
+
if rc == 5:
# don't care about EXIT_NOTESTSCOLLECTED
rc = 0
- if rc == 1 and yatest_runner and not listing_mode and not os.environ.get(FORCE_EXIT_TESTSFAILED_ENV) == '1':
- # XXX it's place for future improvements
- # Test wrapper should terminate with 0 exit code if there are common test failures
- # and report it with trace-file machinery.
- # However, there are several case when we don't want to suppress exit_code:
- # - listing machinery doesn't use trace-file currently and rely on stdout and exit_code
- # - RestartTestException and InfrastructureException required non-zero exit_code to be processes correctly
- rc = 0
-
- if profile:
- profile.disable()
- ps = pstats.Stats(profile, stream=sys.stderr).sort_stats('cumulative')
- ps.print_stats()
-
- sys.exit(rc)
+ if rc == 1 and yatest_runner and not listing_mode and not os.environ.get(FORCE_EXIT_TESTSFAILED_ENV) == '1':
+ # XXX it's place for future improvements
+ # Test wrapper should terminate with 0 exit code if there are common test failures
+ # and report it with trace-file machinery.
+ # However, there are several case when we don't want to suppress exit_code:
+ # - listing machinery doesn't use trace-file currently and rely on stdout and exit_code
+ # - RestartTestException and InfrastructureException required non-zero exit_code to be processes correctly
+ rc = 0
+
+ if profile:
+ profile.disable()
+ ps = pstats.Stats(profile, stream=sys.stderr).sort_stats('cumulative')
+ ps.print_stats()
+
+ sys.exit(rc)
if __name__ == '__main__':
diff --git a/library/python/pytest/plugins/collection.py b/library/python/pytest/plugins/collection.py
index e36f47a78f..93932e4b02 100644
--- a/library/python/pytest/plugins/collection.py
+++ b/library/python/pytest/plugins/collection.py
@@ -89,7 +89,7 @@ def pytest_ignore_collect(module, session, filenames_from_full_filters, accept_f
if test_file_filter is None:
return False
if module.name != test_file_filter.replace('/', '.'):
- return True
+ return True
return False
diff --git a/library/python/pytest/plugins/fakeid_py2.py b/library/python/pytest/plugins/fakeid_py2.py
index 8b26148e2e..8efc368629 100644
--- a/library/python/pytest/plugins/fakeid_py2.py
+++ b/library/python/pytest/plugins/fakeid_py2.py
@@ -1,2 +1,2 @@
-# Inc this number to change uid for every PYTEST() target
-fake_id = 0
+# Inc this number to change uid for every PYTEST() target
+fake_id = 0
diff --git a/library/python/pytest/plugins/fakeid_py3.py b/library/python/pytest/plugins/fakeid_py3.py
index 247cc8b29d..d6812eadba 100644
--- a/library/python/pytest/plugins/fakeid_py3.py
+++ b/library/python/pytest/plugins/fakeid_py3.py
@@ -1,2 +1,2 @@
-# Inc this number to change uid for every PY3TEST() target
-fake_id = 10
+# Inc this number to change uid for every PY3TEST() target
+fake_id = 10
diff --git a/library/python/pytest/plugins/ya.make b/library/python/pytest/plugins/ya.make
index c15d6f759d..638c532e86 100644
--- a/library/python/pytest/plugins/ya.make
+++ b/library/python/pytest/plugins/ya.make
@@ -1,4 +1,4 @@
-OWNER(g:yatest)
+OWNER(g:yatest)
PY23_LIBRARY()
@@ -10,23 +10,23 @@ PY_SRCS(
)
PEERDIR(
- library/python/filelock
+ library/python/filelock
library/python/find_root
library/python/testing/filter
)
-IF (PYTHON2)
- PY_SRCS(
- fakeid_py2.py
- )
-
- PEERDIR(
- contrib/python/faulthandler
- )
-ELSE()
- PY_SRCS(
- fakeid_py3.py
- )
-ENDIF()
-
+IF (PYTHON2)
+ PY_SRCS(
+ fakeid_py2.py
+ )
+
+ PEERDIR(
+ contrib/python/faulthandler
+ )
+ELSE()
+ PY_SRCS(
+ fakeid_py3.py
+ )
+ENDIF()
+
END()
diff --git a/library/python/pytest/plugins/ya.py b/library/python/pytest/plugins/ya.py
index 1bde03042d..d7398ae90d 100644
--- a/library/python/pytest/plugins/ya.py
+++ b/library/python/pytest/plugins/ya.py
@@ -1,15 +1,15 @@
-# coding: utf-8
-
-import base64
-import errno
-import re
+# coding: utf-8
+
+import base64
+import errno
+import re
import sys
import os
import logging
import fnmatch
import json
import time
-import traceback
+import traceback
import collections
import signal
import inspect
@@ -28,7 +28,7 @@ import _pytest.outcomes
import _pytest.skipping
from _pytest.warning_types import PytestUnhandledCoroutineWarning
-
+
from yatest_lib import test_splitter
try:
@@ -42,12 +42,12 @@ except ImportError:
# fallback for pytest script mode
import yatest_tools as tools
-try:
- from library.python import filelock
-except ImportError:
- filelock = None
-
-
+try:
+ from library.python import filelock
+except ImportError:
+ filelock = None
+
+
import yatest_lib.tools
import yatest_lib.external as canon
@@ -61,7 +61,7 @@ yatest_logger = logging.getLogger("ya.test")
_pytest.main.EXIT_NOTESTSCOLLECTED = 0
-SHUTDOWN_REQUESTED = False
+SHUTDOWN_REQUESTED = False
pytest_config = None
@@ -71,8 +71,8 @@ def configure_pdb_on_demand():
if hasattr(signal, "SIGUSR1"):
def on_signal(*args):
- import ipdb
- ipdb.set_trace()
+ import ipdb
+ ipdb.set_trace()
signal.signal(signal.SIGUSR1, on_signal)
@@ -147,40 +147,40 @@ def pytest_addoption(parser):
parser.addoption("--python-path", action="store", dest="python_path", default="", help="path the canonical python binary")
parser.addoption("--valgrind-path", action="store", dest="valgrind_path", default="", help="path the canonical valgring binary")
parser.addoption("--test-filter", action="append", dest="test_filter", default=None, help="test filter")
- parser.addoption("--test-file-filter", action="store", dest="test_file_filter", default=None, help="test file filter")
+ parser.addoption("--test-file-filter", action="store", dest="test_file_filter", default=None, help="test file filter")
parser.addoption("--test-param", action="append", dest="test_params", default=None, help="test parameters")
parser.addoption("--test-log-level", action="store", dest="test_log_level", choices=["critical", "error", "warning", "info", "debug"], default="debug", help="test log level")
parser.addoption("--mode", action="store", choices=[yatest_lib.ya.RunMode.List, yatest_lib.ya.RunMode.Run], dest="mode", default=yatest_lib.ya.RunMode.Run, help="testing mode")
- parser.addoption("--test-list-file", action="store", dest="test_list_file")
+ parser.addoption("--test-list-file", action="store", dest="test_list_file")
parser.addoption("--modulo", default=1, type=int)
parser.addoption("--modulo-index", default=0, type=int)
parser.addoption("--partition-mode", default='SEQUENTIAL', help="Split tests according to partitoin mode")
parser.addoption("--split-by-tests", action='store_true', help="Split test execution by tests instead of suites", default=False)
parser.addoption("--project-path", action="store", default="", help="path to CMakeList where test is declared")
parser.addoption("--build-type", action="store", default="", help="build type")
- parser.addoption("--flags", action="append", dest="flags", default=[], help="build flags (-D)")
+ parser.addoption("--flags", action="append", dest="flags", default=[], help="build flags (-D)")
parser.addoption("--sanitize", action="store", default="", help="sanitize mode")
parser.addoption("--test-stderr", action="store_true", default=False, help="test stderr")
parser.addoption("--test-debug", action="store_true", default=False, help="test debug mode")
parser.addoption("--root-dir", action="store", default=None)
parser.addoption("--ya-trace", action="store", dest="ya_trace_path", default=None, help="path to ya trace report")
- parser.addoption("--ya-version", action="store", dest="ya_version", default=0, type=int, help="allows to be compatible with ya and the new changes in ya-dev")
+ parser.addoption("--ya-version", action="store", dest="ya_version", default=0, type=int, help="allows to be compatible with ya and the new changes in ya-dev")
parser.addoption(
"--test-suffix", action="store", dest="test_suffix", default=None, help="add suffix to every test name"
)
parser.addoption("--gdb-path", action="store", dest="gdb_path", default="", help="path the canonical gdb binary")
parser.addoption("--collect-cores", action="store_true", dest="collect_cores", default=False, help="allows core dump file recovering during test")
- parser.addoption("--sanitizer-extra-checks", action="store_true", dest="sanitizer_extra_checks", default=False, help="enables extra checks for tests built with sanitizers")
+ parser.addoption("--sanitizer-extra-checks", action="store_true", dest="sanitizer_extra_checks", default=False, help="enables extra checks for tests built with sanitizers")
parser.addoption("--report-deselected", action="store_true", dest="report_deselected", default=False, help="report deselected tests to the trace file")
parser.addoption("--pdb-on-sigusr1", action="store_true", default=False, help="setup pdb.set_trace on SIGUSR1")
- parser.addoption("--test-tool-bin", help="Path to test_tool")
+ parser.addoption("--test-tool-bin", help="Path to test_tool")
parser.addoption("--test-list-path", dest="test_list_path", action="store", help="path to test list", default="")
-def from_ya_test():
- return "YA_TEST_RUNNER" in os.environ
-
-
+def from_ya_test():
+ return "YA_TEST_RUNNER" in os.environ
+
+
def pytest_configure(config):
global pytest_config
pytest_config = config
@@ -189,7 +189,7 @@ def pytest_configure(config):
config.addinivalue_line("markers", "ya:external")
- config.from_ya_test = from_ya_test()
+ config.from_ya_test = from_ya_test()
config.test_logs = collections.defaultdict(dict)
config.test_metrics = {}
config.suite_metrics = {}
@@ -234,65 +234,65 @@ def pytest_configure(config):
config.current_test_name = None
config.test_cores_count = 0
config.collect_cores = config.option.collect_cores
- config.sanitizer_extra_checks = config.option.sanitizer_extra_checks
+ config.sanitizer_extra_checks = config.option.sanitizer_extra_checks
try:
config.test_tool_bin = config.option.test_tool_bin
except AttributeError:
logging.info("test_tool_bin not specified")
if config.sanitizer_extra_checks:
- for envvar in ['LSAN_OPTIONS', 'ASAN_OPTIONS']:
- if envvar in os.environ:
- os.environ.pop(envvar)
- if envvar + '_ORIGINAL' in os.environ:
- os.environ[envvar] = os.environ[envvar + '_ORIGINAL']
+ for envvar in ['LSAN_OPTIONS', 'ASAN_OPTIONS']:
+ if envvar in os.environ:
+ os.environ.pop(envvar)
+ if envvar + '_ORIGINAL' in os.environ:
+ os.environ[envvar] = os.environ[envvar + '_ORIGINAL']
if config.option.root_dir:
config.rootdir = py.path.local(config.option.root_dir)
config.invocation_params = attr.evolve(config.invocation_params, dir=config.rootdir)
- extra_sys_path = []
- # Arcadia paths from the test DEPENDS section of ya.make
- extra_sys_path.append(os.path.join(config.option.source_root, config.option.project_path))
+ extra_sys_path = []
+ # Arcadia paths from the test DEPENDS section of ya.make
+ extra_sys_path.append(os.path.join(config.option.source_root, config.option.project_path))
# Build root is required for correct import of protobufs, because imports are related to the root
# (like import devtools.dummy_arcadia.protos.lib.my_proto_pb2)
- extra_sys_path.append(config.option.build_root)
-
- for path in config.option.dep_roots:
- if os.path.isabs(path):
- extra_sys_path.append(path)
- else:
- extra_sys_path.append(os.path.join(config.option.source_root, path))
-
- sys_path_set = set(sys.path)
- for path in extra_sys_path:
- if path not in sys_path_set:
- sys.path.append(path)
- sys_path_set.add(path)
-
- os.environ["PYTHONPATH"] = os.pathsep.join(sys.path)
-
+ extra_sys_path.append(config.option.build_root)
+
+ for path in config.option.dep_roots:
+ if os.path.isabs(path):
+ extra_sys_path.append(path)
+ else:
+ extra_sys_path.append(os.path.join(config.option.source_root, path))
+
+ sys_path_set = set(sys.path)
+ for path in extra_sys_path:
+ if path not in sys_path_set:
+ sys.path.append(path)
+ sys_path_set.add(path)
+
+ os.environ["PYTHONPATH"] = os.pathsep.join(sys.path)
+
if not config.option.collectonly:
if config.option.ya_trace_path:
config.ya_trace_reporter = TraceReportGenerator(config.option.ya_trace_path)
else:
config.ya_trace_reporter = DryTraceReportGenerator(config.option.ya_trace_path)
- config.ya_version = config.option.ya_version
+ config.ya_version = config.option.ya_version
sys.meta_path.append(CustomImporter([config.option.build_root] + [os.path.join(config.option.build_root, dep) for dep in config.option.dep_roots]))
if config.option.pdb_on_sigusr1:
configure_pdb_on_demand()
- # Dump python backtrace in case of any errors
- faulthandler.enable()
- if hasattr(signal, "SIGQUIT"):
- # SIGQUIT is used by test_tool to teardown tests which overruns timeout
- faulthandler.register(signal.SIGQUIT, chain=True)
-
- if hasattr(signal, "SIGUSR2"):
- signal.signal(signal.SIGUSR2, _graceful_shutdown)
-
+ # Dump python backtrace in case of any errors
+ faulthandler.enable()
+ if hasattr(signal, "SIGQUIT"):
+ # SIGQUIT is used by test_tool to teardown tests which overruns timeout
+ faulthandler.register(signal.SIGQUIT, chain=True)
+
+ if hasattr(signal, "SIGUSR2"):
+ signal.signal(signal.SIGUSR2, _graceful_shutdown)
+
session_should_exit = False
@@ -313,20 +313,20 @@ def pytest_runtest_logfinish(nodeid, location):
_graceful_shutdown_on_log(session_should_exit)
-def _graceful_shutdown(*args):
+def _graceful_shutdown(*args):
global session_should_exit
session_should_exit = True
- try:
- import library.python.coverage
- library.python.coverage.stop_coverage_tracing()
- except ImportError:
- pass
- traceback.print_stack(file=sys.stderr)
+ try:
+ import library.python.coverage
+ library.python.coverage.stop_coverage_tracing()
+ except ImportError:
+ pass
+ traceback.print_stack(file=sys.stderr)
capman = pytest_config.pluginmanager.getplugin("capturemanager")
capman.suspend(in_=True)
_graceful_shutdown_on_log(not capman.is_globally_capturing())
-
-
+
+
def _get_rusage():
return resource and resource.getrusage(resource.RUSAGE_SELF)
@@ -342,7 +342,7 @@ def _collect_test_rusage(item):
if not modifier:
modifier = lambda x: x
if hasattr(item.rusage, attr_name):
- ya_inst.set_metric_value(metric_name, modifier(getattr(finish_rusage, attr_name) - getattr(item.rusage, attr_name)))
+ ya_inst.set_metric_value(metric_name, modifier(getattr(finish_rusage, attr_name) - getattr(item.rusage, attr_name)))
for args in [
("ru_maxrss", "ru_rss", lambda x: x*1024), # to be the same as in util/system/rusage.cpp
@@ -431,7 +431,7 @@ def pytest_collection_modifyitems(items, config):
canonical_node_id = str(CustomTestItem(item.nodeid, pytest_config.option.test_suffix))
matched = False
for flt in filters:
- if "::" not in flt and "*" not in flt:
+ if "::" not in flt and "*" not in flt:
flt += "*" # add support for filtering by module name
if canonical_node_id.endswith(flt) or fnmatch.fnmatch(tools.escape_for_fnmatch(canonical_node_id), tools.escape_for_fnmatch(flt)):
matched = True
@@ -507,10 +507,10 @@ def pytest_collection_modifyitems(items, config):
"tags": _get_item_tags(item),
}
tests.append(record)
- if config.option.test_list_file:
- with open(config.option.test_list_file, 'w') as afile:
- json.dump(tests, afile)
- # TODO prettyboy remove after test_tool release - currently it's required for backward compatibility
+ if config.option.test_list_file:
+ with open(config.option.test_list_file, 'w') as afile:
+ json.dump(tests, afile)
+ # TODO prettyboy remove after test_tool release - currently it's required for backward compatibility
sys.stderr.write(json.dumps(tests))
@@ -548,7 +548,7 @@ def pytest_runtest_makereport(item, call):
if not pytest_config.suite_metrics and context.Ctx.get("YA_PYTEST_START_TIMESTAMP"):
pytest_config.suite_metrics["pytest_startup_duration"] = call.start - context.Ctx["YA_PYTEST_START_TIMESTAMP"]
pytest_config.ya_trace_reporter.dump_suite_metrics()
-
+
pytest_config.ya_trace_reporter.on_log_report(test_item)
if report.outcome == "failed":
@@ -591,48 +591,48 @@ def pytest_make_parametrize_id(config, val, argname):
return None
-def get_formatted_error(report):
- if isinstance(report.longrepr, tuple):
- text = ""
- for entry in report.longrepr:
- text += colorize(entry)
- else:
- text = colorize(report.longrepr)
+def get_formatted_error(report):
+ if isinstance(report.longrepr, tuple):
+ text = ""
+ for entry in report.longrepr:
+ text += colorize(entry)
+ else:
+ text = colorize(report.longrepr)
text = yatest_lib.tools.to_utf8(text)
- return text
-
-
-def colorize(longrepr):
- # use default pytest colorization
+ return text
+
+
+def colorize(longrepr):
+ # use default pytest colorization
if pytest_config.option.tbstyle != "short":
- io = py.io.TextIO()
+ io = py.io.TextIO()
if six.PY2:
writer = py.io.TerminalWriter(file=io)
else:
writer = _pytest._io.TerminalWriter(file=io)
- # enable colorization
- writer.hasmarkup = True
-
- if hasattr(longrepr, 'reprtraceback') and hasattr(longrepr.reprtraceback, 'toterminal'):
- longrepr.reprtraceback.toterminal(writer)
- return io.getvalue().strip()
+ # enable colorization
+ writer.hasmarkup = True
+
+ if hasattr(longrepr, 'reprtraceback') and hasattr(longrepr.reprtraceback, 'toterminal'):
+ longrepr.reprtraceback.toterminal(writer)
+ return io.getvalue().strip()
return yatest_lib.tools.to_utf8(longrepr)
-
+
text = yatest_lib.tools.to_utf8(longrepr)
- pos = text.find("E ")
- if pos == -1:
- return text
-
- bt, error = text[:pos], text[pos:]
- filters = [
- # File path, line number and function name
- (re.compile(r"^(.*?):(\d+): in (\S+)", flags=re.MULTILINE), r"[[unimp]]\1[[rst]]:[[alt2]]\2[[rst]]: in [[alt1]]\3[[rst]]"),
- ]
- for regex, substitution in filters:
- bt = regex.sub(substitution, bt)
- return "{}[[bad]]{}".format(bt, error)
-
-
+ pos = text.find("E ")
+ if pos == -1:
+ return text
+
+ bt, error = text[:pos], text[pos:]
+ filters = [
+ # File path, line number and function name
+ (re.compile(r"^(.*?):(\d+): in (\S+)", flags=re.MULTILINE), r"[[unimp]]\1[[rst]]:[[alt2]]\2[[rst]]: in [[alt1]]\3[[rst]]"),
+ ]
+ for regex, substitution in filters:
+ bt = regex.sub(substitution, bt)
+ return "{}[[bad]]{}".format(bt, error)
+
+
class TestItem(object):
def __init__(self, report, result, test_suffix):
@@ -691,7 +691,7 @@ class TestItem(object):
def error(self):
return self._error
- def set_error(self, entry, marker='bad'):
+ def set_error(self, entry, marker='bad'):
if isinstance(entry, _pytest.reports.BaseReport):
self._error = get_formatted_error(entry)
else:
@@ -750,80 +750,80 @@ class DeselectedTestItem(CustomTestItem):
class TraceReportGenerator(object):
def __init__(self, out_file_path):
- self._filename = out_file_path
- self._file = open(out_file_path, 'w')
- self._wreckage_filename = out_file_path + '.wreckage'
+ self._filename = out_file_path
+ self._file = open(out_file_path, 'w')
+ self._wreckage_filename = out_file_path + '.wreckage'
self._test_messages = {}
self._test_duration = {}
- # Some machinery to avoid data corruption due sloppy fork()
- self._current_test = (None, None)
- self._pid = os.getpid()
- self._check_intricate_respawn()
-
- def _check_intricate_respawn(self):
- pid_file = self._filename + '.pid'
- try:
- # python2 doesn't support open(f, 'x')
- afile = os.fdopen(os.open(pid_file, os.O_WRONLY | os.O_EXCL | os.O_CREAT), 'w')
- afile.write(str(self._pid))
- afile.close()
- return
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
- # Looks like the test binary was respawned
- if from_ya_test():
- try:
- with open(pid_file) as afile:
- prev_pid = afile.read()
- except Exception as e:
- prev_pid = '(failed to obtain previous pid: {})'.format(e)
-
- parts = [
- "Aborting test run: test machinery found that the test binary {} has already been run before.".format(sys.executable),
- "Looks like test has incorrect respawn/relaunch logic within test binary.",
- "Test should not try to restart itself - this is a poorly designed test case that leads to errors and could corrupt internal test machinery files.",
- "Debug info: previous pid:{} current:{}".format(prev_pid, self._pid),
- ]
- msg = '\n'.join(parts)
- yatest_logger.error(msg)
-
- if filelock:
- lock = filelock.FileLock(self._wreckage_filename + '.lock')
- lock.acquire()
-
- with open(self._wreckage_filename, 'a') as afile:
- self._file = afile
-
- self._dump_trace('chunk_event', {"errors": [('fail', '[[bad]]' + msg)]})
-
- raise Exception(msg)
- else:
- # Test binary is launched without `ya make -t`'s testing machinery - don't rely on clean environment
- pass
-
+ # Some machinery to avoid data corruption due sloppy fork()
+ self._current_test = (None, None)
+ self._pid = os.getpid()
+ self._check_intricate_respawn()
+
+ def _check_intricate_respawn(self):
+ pid_file = self._filename + '.pid'
+ try:
+ # python2 doesn't support open(f, 'x')
+ afile = os.fdopen(os.open(pid_file, os.O_WRONLY | os.O_EXCL | os.O_CREAT), 'w')
+ afile.write(str(self._pid))
+ afile.close()
+ return
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ # Looks like the test binary was respawned
+ if from_ya_test():
+ try:
+ with open(pid_file) as afile:
+ prev_pid = afile.read()
+ except Exception as e:
+ prev_pid = '(failed to obtain previous pid: {})'.format(e)
+
+ parts = [
+ "Aborting test run: test machinery found that the test binary {} has already been run before.".format(sys.executable),
+ "Looks like test has incorrect respawn/relaunch logic within test binary.",
+ "Test should not try to restart itself - this is a poorly designed test case that leads to errors and could corrupt internal test machinery files.",
+ "Debug info: previous pid:{} current:{}".format(prev_pid, self._pid),
+ ]
+ msg = '\n'.join(parts)
+ yatest_logger.error(msg)
+
+ if filelock:
+ lock = filelock.FileLock(self._wreckage_filename + '.lock')
+ lock.acquire()
+
+ with open(self._wreckage_filename, 'a') as afile:
+ self._file = afile
+
+ self._dump_trace('chunk_event', {"errors": [('fail', '[[bad]]' + msg)]})
+
+ raise Exception(msg)
+ else:
+ # Test binary is launched without `ya make -t`'s testing machinery - don't rely on clean environment
+ pass
+
def on_start_test_class(self, test_item):
pytest_config.ya.set_test_item_node_id(test_item.nodeid)
- class_name = test_item.class_name.decode('utf-8') if sys.version_info[0] < 3 else test_item.class_name
- self._current_test = (class_name, None)
- self.trace('test-started', {'class': class_name})
+ class_name = test_item.class_name.decode('utf-8') if sys.version_info[0] < 3 else test_item.class_name
+ self._current_test = (class_name, None)
+ self.trace('test-started', {'class': class_name})
def on_finish_test_class(self, test_item):
pytest_config.ya.set_test_item_node_id(test_item.nodeid)
self.trace('test-finished', {'class': test_item.class_name.decode('utf-8') if sys.version_info[0] < 3 else test_item.class_name})
def on_start_test_case(self, test_item):
- class_name = yatest_lib.tools.to_utf8(test_item.class_name)
- subtest_name = yatest_lib.tools.to_utf8(test_item.test_name)
+ class_name = yatest_lib.tools.to_utf8(test_item.class_name)
+ subtest_name = yatest_lib.tools.to_utf8(test_item.test_name)
message = {
- 'class': class_name,
- 'subtest': subtest_name,
+ 'class': class_name,
+ 'subtest': subtest_name,
}
if test_item.nodeid in pytest_config.test_logs:
message['logs'] = pytest_config.test_logs[test_item.nodeid]
pytest_config.ya.set_test_item_node_id(test_item.nodeid)
- self._current_test = (class_name, subtest_name)
+ self._current_test = (class_name, subtest_name)
self.trace('subtest-started', message)
def on_finish_test_case(self, test_item, duration_only=False):
@@ -865,9 +865,9 @@ class TraceReportGenerator(object):
message = {"metrics": pytest_config.suite_metrics}
self.trace("suite-event", message)
- def on_error(self, test_item):
- self.trace('chunk_event', {"errors": [(test_item.status, self._get_comment(test_item))]})
-
+ def on_error(self, test_item):
+ self.trace('chunk_event', {"errors": [(test_item.status, self._get_comment(test_item))]})
+
def on_log_report(self, test_item):
if test_item.nodeid in self._test_duration:
self._test_duration[test_item.nodeid] += test_item._duration
@@ -879,77 +879,77 @@ class TraceReportGenerator(object):
msg = yatest_lib.tools.to_utf8(test_item.error)
if not msg:
return ""
- return msg + "[[rst]]"
+ return msg + "[[rst]]"
- def _dump_trace(self, name, value):
+ def _dump_trace(self, name, value):
event = {
'timestamp': time.time(),
'value': value,
'name': name
}
-
+
data = yatest_lib.tools.to_str(json.dumps(event, ensure_ascii=False))
- self._file.write(data + '\n')
- self._file.flush()
-
- def _check_sloppy_fork(self, name, value):
- if self._pid == os.getpid():
- return
-
- yatest_logger.error("Skip tracing to avoid data corruption, name = %s, value = %s", name, value)
-
- try:
- # Lock wreckage tracefile to avoid race if multiple tests use fork sloppily
- if filelock:
- lock = filelock.FileLock(self._wreckage_filename + '.lock')
- lock.acquire()
-
- with open(self._wreckage_filename, 'a') as afile:
- self._file = afile
-
- parts = [
- "It looks like you have leaked process - it could corrupt internal test machinery files.",
- "Usually it happens when you casually use fork() without os._exit(),",
- "which results in two pytest processes running at the same time.",
- "Pid of the original pytest's process is {}, however current process has {} pid.".format(self._pid, os.getpid()),
- ]
- if self._current_test[1]:
- parts.append("Most likely the problem is in '{}' test.".format(self._current_test))
- else:
- parts.append("Most likely new process was created before any test was launched (during the import stage?).")
-
- if value.get('comment'):
- comment = value.get('comment', '').strip()
- # multiline comment
- newline_required = '\n' if '\n' in comment else ''
- parts.append("Debug info: name = '{}' comment:{}{}".format(name, newline_required, comment))
- else:
- val_str = json.dumps(value, ensure_ascii=False).encode('utf-8')
- parts.append("Debug info: name = '{}' value = '{}'".format(name, base64.b64encode(val_str)))
-
- msg = "[[bad]]{}".format('\n'.join(parts))
- class_name, subtest_name = self._current_test
- if subtest_name:
- data = {
- 'class': class_name,
- 'subtest': subtest_name,
- 'status': 'fail',
- 'comment': msg,
- }
- # overwrite original status
- self._dump_trace('subtest-finished', data)
- else:
- self._dump_trace('chunk_event', {"errors": [('fail', msg)]})
- except Exception as e:
- yatest_logger.exception(e)
- finally:
- os._exit(38)
-
- def trace(self, name, value):
- self._check_sloppy_fork(name, value)
- self._dump_trace(name, value)
-
-
+ self._file.write(data + '\n')
+ self._file.flush()
+
+ def _check_sloppy_fork(self, name, value):
+ if self._pid == os.getpid():
+ return
+
+ yatest_logger.error("Skip tracing to avoid data corruption, name = %s, value = %s", name, value)
+
+ try:
+ # Lock wreckage tracefile to avoid race if multiple tests use fork sloppily
+ if filelock:
+ lock = filelock.FileLock(self._wreckage_filename + '.lock')
+ lock.acquire()
+
+ with open(self._wreckage_filename, 'a') as afile:
+ self._file = afile
+
+ parts = [
+ "It looks like you have leaked process - it could corrupt internal test machinery files.",
+ "Usually it happens when you casually use fork() without os._exit(),",
+ "which results in two pytest processes running at the same time.",
+ "Pid of the original pytest's process is {}, however current process has {} pid.".format(self._pid, os.getpid()),
+ ]
+ if self._current_test[1]:
+ parts.append("Most likely the problem is in '{}' test.".format(self._current_test))
+ else:
+ parts.append("Most likely new process was created before any test was launched (during the import stage?).")
+
+ if value.get('comment'):
+ comment = value.get('comment', '').strip()
+ # multiline comment
+ newline_required = '\n' if '\n' in comment else ''
+ parts.append("Debug info: name = '{}' comment:{}{}".format(name, newline_required, comment))
+ else:
+ val_str = json.dumps(value, ensure_ascii=False).encode('utf-8')
+ parts.append("Debug info: name = '{}' value = '{}'".format(name, base64.b64encode(val_str)))
+
+ msg = "[[bad]]{}".format('\n'.join(parts))
+ class_name, subtest_name = self._current_test
+ if subtest_name:
+ data = {
+ 'class': class_name,
+ 'subtest': subtest_name,
+ 'status': 'fail',
+ 'comment': msg,
+ }
+ # overwrite original status
+ self._dump_trace('subtest-finished', data)
+ else:
+ self._dump_trace('chunk_event', {"errors": [('fail', msg)]})
+ except Exception as e:
+ yatest_logger.exception(e)
+ finally:
+ os._exit(38)
+
+ def trace(self, name, value):
+ self._check_sloppy_fork(name, value)
+ self._dump_trace(name, value)
+
+
class DryTraceReportGenerator(TraceReportGenerator):
"""
Generator does not write any information.
diff --git a/library/python/pytest/ya.make b/library/python/pytest/ya.make
index 060c92c313..662c7787b3 100644
--- a/library/python/pytest/ya.make
+++ b/library/python/pytest/ya.make
@@ -6,7 +6,7 @@ OWNER(
)
PY_SRCS(
- __init__.py
+ __init__.py
main.py
rewrite.py
yatest_tools.py
@@ -14,19 +14,19 @@ PY_SRCS(
)
PEERDIR(
- contrib/python/dateutil
- contrib/python/ipdb
- contrib/python/py
- contrib/python/pytest
- contrib/python/requests
+ contrib/python/dateutil
+ contrib/python/ipdb
+ contrib/python/py
+ contrib/python/pytest
+ contrib/python/requests
library/python/pytest/plugins
library/python/testing/yatest_common
library/python/testing/yatest_lib
)
-RESOURCE_FILES(
- PREFIX library/python/pytest/
- pytest.yatest.ini
-)
-
+RESOURCE_FILES(
+ PREFIX library/python/pytest/
+ pytest.yatest.ini
+)
+
END()
diff --git a/library/python/pytest/yatest_tools.py b/library/python/pytest/yatest_tools.py
index 6b8b896394..0c336250c6 100644
--- a/library/python/pytest/yatest_tools.py
+++ b/library/python/pytest/yatest_tools.py
@@ -1,25 +1,25 @@
-# coding: utf-8
-
-import collections
-import functools
-import math
+# coding: utf-8
+
+import collections
+import functools
+import math
import os
import re
-import sys
+import sys
import yatest_lib.tools
class Subtest(object):
- def __init__(self, name, test_name, status, comment, elapsed, result=None, test_type=None, logs=None, cwd=None, metrics=None):
- self._name = name
- self._test_name = test_name
+ def __init__(self, name, test_name, status, comment, elapsed, result=None, test_type=None, logs=None, cwd=None, metrics=None):
+ self._name = name
+ self._test_name = test_name
self.status = status
self.elapsed = elapsed
self.comment = comment
self.result = result
self.test_type = test_type
- self.logs = logs or {}
+ self.logs = logs or {}
self.cwd = cwd
self.metrics = metrics
@@ -31,17 +31,17 @@ class Subtest(object):
def __str__(self):
return yatest_lib.tools.to_utf8(unicode(self))
- def __unicode__(self):
- return u"{}::{}".format(self.test_name, self.test_name)
-
- @property
- def name(self):
+ def __unicode__(self):
+ return u"{}::{}".format(self.test_name, self.test_name)
+
+ @property
+ def name(self):
return yatest_lib.tools.to_utf8(self._name)
-
- @property
- def test_name(self):
+
+ @property
+ def test_name(self):
return yatest_lib.tools.to_utf8(self._test_name)
-
+
def __repr__(self):
return "Subtest [{}::{} - {}[{}]: {}]".format(self.name, self.test_name, self.status, self.elapsed, self.comment)
@@ -84,7 +84,7 @@ class SubtestInfo(object):
class Status(object):
- GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(7)
+ GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(7)
SKIPPED = -100
NOT_LAUNCHED = -200
CANON_DIFF = -300
@@ -152,76 +152,76 @@ ya_ctx = YaCtx()
TRACE_FILE_NAME = "ytest.report.trace"
-def lazy(func):
- mem = {}
-
- @functools.wraps(func)
- def wrapper():
- if "results" not in mem:
- mem["results"] = func()
- return mem["results"]
-
- return wrapper
-
-
-@lazy
-def _get_mtab():
- if os.path.exists("/etc/mtab"):
- with open("/etc/mtab") as afile:
- data = afile.read()
- return [line.split(" ") for line in data.split("\n") if line]
- return []
-
-
-def get_max_filename_length(dirname):
+def lazy(func):
+ mem = {}
+
+ @functools.wraps(func)
+ def wrapper():
+ if "results" not in mem:
+ mem["results"] = func()
+ return mem["results"]
+
+ return wrapper
+
+
+@lazy
+def _get_mtab():
+ if os.path.exists("/etc/mtab"):
+ with open("/etc/mtab") as afile:
+ data = afile.read()
+ return [line.split(" ") for line in data.split("\n") if line]
+ return []
+
+
+def get_max_filename_length(dirname):
"""
- Return maximum filename length for the filesystem
- :return:
- """
- if sys.platform.startswith("linux"):
- # Linux user's may work on mounted ecryptfs filesystem
- # which has filename length limitations
- for entry in _get_mtab():
- mounted_dir, filesystem = entry[1], entry[2]
- # http://unix.stackexchange.com/questions/32795/what-is-the-maximum-allowed-filename-and-folder-size-with-ecryptfs
- if filesystem == "ecryptfs" and dirname and dirname.startswith(mounted_dir):
- return 140
- # default maximum filename length for most filesystems
- return 255
-
-
-def get_unique_file_path(dir_path, filename, cache=collections.defaultdict(set)):
- """
- Get unique filename in dir with proper filename length, using given filename/dir.
- File/dir won't be created (thread nonsafe)
+ Return maximum filename length for the filesystem
+ :return:
+ """
+ if sys.platform.startswith("linux"):
+ # Linux user's may work on mounted ecryptfs filesystem
+ # which has filename length limitations
+ for entry in _get_mtab():
+ mounted_dir, filesystem = entry[1], entry[2]
+ # http://unix.stackexchange.com/questions/32795/what-is-the-maximum-allowed-filename-and-folder-size-with-ecryptfs
+ if filesystem == "ecryptfs" and dirname and dirname.startswith(mounted_dir):
+ return 140
+ # default maximum filename length for most filesystems
+ return 255
+
+
+def get_unique_file_path(dir_path, filename, cache=collections.defaultdict(set)):
+ """
+ Get unique filename in dir with proper filename length, using given filename/dir.
+ File/dir won't be created (thread nonsafe)
:param dir_path: path to dir
- :param filename: original filename
- :return: unique filename
+ :param filename: original filename
+ :return: unique filename
"""
- max_suffix = 10000
- # + 1 symbol for dot before suffix
- tail_length = int(round(math.log(max_suffix, 10))) + 1
- # truncate filename length in accordance with filesystem limitations
- filename, extension = os.path.splitext(filename)
- # XXX
- if sys.platform.startswith("win"):
- # Trying to fit into MAX_PATH if it's possible.
- # Remove after DEVTOOLS-1646
- max_path = 260
- filename_len = len(dir_path) + len(extension) + tail_length + len(os.sep)
- if filename_len < max_path:
+ max_suffix = 10000
+ # + 1 symbol for dot before suffix
+ tail_length = int(round(math.log(max_suffix, 10))) + 1
+ # truncate filename length in accordance with filesystem limitations
+ filename, extension = os.path.splitext(filename)
+ # XXX
+ if sys.platform.startswith("win"):
+ # Trying to fit into MAX_PATH if it's possible.
+ # Remove after DEVTOOLS-1646
+ max_path = 260
+ filename_len = len(dir_path) + len(extension) + tail_length + len(os.sep)
+ if filename_len < max_path:
filename = yatest_lib.tools.trim_string(filename, max_path - filename_len)
filename = yatest_lib.tools.trim_string(filename, get_max_filename_length(dir_path) - tail_length - len(extension)) + extension
- candidate = os.path.join(dir_path, filename)
-
- key = dir_path + filename
- counter = sorted(cache.get(key, {0, }))[-1]
- while os.path.exists(candidate):
- cache[key].add(counter)
- counter += 1
- assert counter < max_suffix
- candidate = os.path.join(dir_path, filename + ".{}".format(counter))
- return candidate
+ candidate = os.path.join(dir_path, filename)
+
+ key = dir_path + filename
+ counter = sorted(cache.get(key, {0, }))[-1]
+ while os.path.exists(candidate):
+ cache[key].add(counter)
+ counter += 1
+ assert counter < max_suffix
+ candidate = os.path.join(dir_path, filename + ".{}".format(counter))
+ return candidate
def escape_for_fnmatch(s):
@@ -251,18 +251,18 @@ def normalize_name(name):
return name
-def normalize_filename(filename):
+def normalize_filename(filename):
"""
Replace invalid for file names characters with string equivalents
:param some_string: string to be converted to a valid file name
:return: valid file name
"""
not_allowed_pattern = r"[\[\]\/:*?\"\'<>|+\0\\\s\x0b\x0c]"
- filename = re.sub(not_allowed_pattern, ".", filename)
- return re.sub(r"\.{2,}", ".", filename)
+ filename = re.sub(not_allowed_pattern, ".", filename)
+ return re.sub(r"\.{2,}", ".", filename)
-def get_test_log_file_path(output_dir, class_name, test_name, extension="log"):
+def get_test_log_file_path(output_dir, class_name, test_name, extension="log"):
"""
get test log file path, platform dependant
:param output_dir: dir where log file should be placed
@@ -271,16 +271,16 @@ def get_test_log_file_path(output_dir, class_name, test_name, extension="log"):
:return: test log file name
"""
if os.name == "nt":
- # don't add class name to the log's filename
- # to reduce it's length on windows
- filename = test_name
+ # don't add class name to the log's filename
+ # to reduce it's length on windows
+ filename = test_name
else:
- filename = "{}.{}".format(class_name, test_name)
- if not filename:
- filename = "test"
- filename += "." + extension
- filename = normalize_filename(filename)
- return get_unique_file_path(output_dir, filename)
+ filename = "{}.{}".format(class_name, test_name)
+ if not filename:
+ filename = "test"
+ filename += "." + extension
+ filename = normalize_filename(filename)
+ return get_unique_file_path(output_dir, filename)
def split_node_id(nodeid, test_suffix=None):
diff --git a/library/python/reservoir_sampling/README.md b/library/python/reservoir_sampling/README.md
index 27674ba4f0..30ff3fcfcb 100644
--- a/library/python/reservoir_sampling/README.md
+++ b/library/python/reservoir_sampling/README.md
@@ -1,11 +1,11 @@
-### Overview
-Reservoir sampling is a family of randomized algorithms for choosing a simple random sample, without replacement, of k items from a population of unknown size n in a single pass over the items.
-
-### Example
-
-```jupyter
-In [1]: from library.python import reservoir_sampling
-
-In [2]: reservoir_sampling.reservoir_sampling(data=range(100), nsamples=10)
-Out[2]: [27, 19, 81, 45, 89, 78, 13, 36, 29, 9]
-```
+### Overview
+Reservoir sampling is a family of randomized algorithms for choosing a simple random sample, without replacement, of k items from a population of unknown size n in a single pass over the items.
+
+### Example
+
+```jupyter
+In [1]: from library.python import reservoir_sampling
+
+In [2]: reservoir_sampling.reservoir_sampling(data=range(100), nsamples=10)
+Out[2]: [27, 19, 81, 45, 89, 78, 13, 36, 29, 9]
+```
diff --git a/library/python/resource/__init__.py b/library/python/resource/__init__.py
index 26503ef7fc..1e4f0526b7 100644
--- a/library/python/resource/__init__.py
+++ b/library/python/resource/__init__.py
@@ -30,8 +30,8 @@ def iteritems(prefix='', strip_prefix=False):
if strip_prefix:
key = key[len(prefix):]
yield key, value
-
-
+
+
def resfs_file_exists(path):
return resfs_src(path, resfs_file=True) is not None
@@ -41,9 +41,9 @@ def resfs_files(prefix=''):
if isinstance(prefix, six.text_type):
decode = lambda s: s.decode('utf-8')
return [decode(s) for s in __resfs_files(prefix=prefix)]
-
-
-def find(path):
- if isinstance(path, six.text_type):
- path = path.encode('utf-8')
- return __find(path)
+
+
+def find(path):
+ if isinstance(path, six.text_type):
+ path = path.encode('utf-8')
+ return __find(path)
diff --git a/library/python/resource/ut/lib/test_simple.py b/library/python/resource/ut/lib/test_simple.py
index 52f006ff91..f76b656cb9 100644
--- a/library/python/resource/ut/lib/test_simple.py
+++ b/library/python/resource/ut/lib/test_simple.py
@@ -1,11 +1,11 @@
-import six # noqa
-
+import six # noqa
+
import library.python.resource as rs
text = b'na gorshke sidel korol\n'
-def test_find():
+def test_find():
assert rs.find('/qw.txt') == text
@@ -21,11 +21,11 @@ def test_iter():
('/1.txt', text),
('/2.txt', text),
}
-
-
-def test_resfs_files():
- assert 'contrib/python/py/.dist-info/METADATA' in set(rs.resfs_files())
-
-
-def test_resfs_read():
- assert 'Metadata-Version' in rs.resfs_read('contrib/python/py/.dist-info/METADATA').decode('utf-8')
+
+
+def test_resfs_files():
+ assert 'contrib/python/py/.dist-info/METADATA' in set(rs.resfs_files())
+
+
+def test_resfs_read():
+ assert 'Metadata-Version' in rs.resfs_read('contrib/python/py/.dist-info/METADATA').decode('utf-8')
diff --git a/library/python/runtime_py3/entry_points.py b/library/python/runtime_py3/entry_points.py
index 05098723cb..fe2dff2c77 100644
--- a/library/python/runtime_py3/entry_points.py
+++ b/library/python/runtime_py3/entry_points.py
@@ -42,11 +42,11 @@ def repl():
def resource_files():
sys.stdout.buffer.write(b'\n'.join(sorted(__res.resfs_files()) + [b'']))
-
-
-def run_constructors():
- for key, module_name in __res.iter_keys(b'py/constructors/'):
- import importlib
- module = importlib.import_module(module_name.decode())
- init_func = getattr(module, __res.find(key).decode())
- init_func()
+
+
+def run_constructors():
+ for key, module_name in __res.iter_keys(b'py/constructors/'):
+ import importlib
+ module = importlib.import_module(module_name.decode())
+ init_func = getattr(module, __res.find(key).decode())
+ init_func()
diff --git a/library/python/runtime_py3/main/main.c b/library/python/runtime_py3/main/main.c
index 3159800615..d135e00e99 100644
--- a/library/python/runtime_py3/main/main.c
+++ b/library/python/runtime_py3/main/main.c
@@ -160,21 +160,21 @@ static int pymain(int argc, char** argv) {
Py_InitArgcArgv(argc, argv_copy);
PySys_SetArgv(argc, argv_copy);
- {
- PyObject* module = PyImport_ImportModule("library.python.runtime_py3.entry_points");
- if (module == NULL) {
- PyErr_Print();
- } else {
- PyObject* res = PyObject_CallMethod(module, "run_constructors", NULL);
- if (res == NULL) {
- PyErr_Print();
- } else {
- Py_DECREF(res);
- }
- Py_DECREF(module);
- }
- }
-
+ {
+ PyObject* module = PyImport_ImportModule("library.python.runtime_py3.entry_points");
+ if (module == NULL) {
+ PyErr_Print();
+ } else {
+ PyObject* res = PyObject_CallMethod(module, "run_constructors", NULL);
+ if (res == NULL) {
+ PyErr_Print();
+ } else {
+ Py_DECREF(res);
+ }
+ Py_DECREF(module);
+ }
+ }
+
const char* module_name = entry_point_copy;
const char* func_name = NULL;
diff --git a/library/python/strings/__init__.py b/library/python/strings/__init__.py
index bd6bf6e7ce..47a731b1de 100644
--- a/library/python/strings/__init__.py
+++ b/library/python/strings/__init__.py
@@ -1,17 +1,17 @@
-# flake8 noqa: F401
-
-from .strings import (
- DEFAULT_ENCODING,
- ENCODING_ERRORS_POLICY,
- encode,
- fs_encoding,
- get_stream_encoding,
- guess_default_encoding,
- left_strip,
- locale_encoding,
- stringize_deep,
- to_basestring,
- to_str,
- to_unicode,
- unicodize_deep,
-)
+# flake8 noqa: F401
+
+from .strings import (
+ DEFAULT_ENCODING,
+ ENCODING_ERRORS_POLICY,
+ encode,
+ fs_encoding,
+ get_stream_encoding,
+ guess_default_encoding,
+ left_strip,
+ locale_encoding,
+ stringize_deep,
+ to_basestring,
+ to_str,
+ to_unicode,
+ unicodize_deep,
+)
diff --git a/library/python/strings/strings.py b/library/python/strings/strings.py
index 5bfddfe78a..476a797117 100644
--- a/library/python/strings/strings.py
+++ b/library/python/strings/strings.py
@@ -9,7 +9,7 @@ import library.python.func
logger = logging.getLogger(__name__)
-DEFAULT_ENCODING = 'utf-8'
+DEFAULT_ENCODING = 'utf-8'
ENCODING_ERRORS_POLICY = 'replace'
@@ -40,7 +40,7 @@ def to_basestring(value):
to_text = to_basestring
-def to_unicode(value, from_enc=DEFAULT_ENCODING):
+def to_unicode(value, from_enc=DEFAULT_ENCODING):
if isinstance(value, six.text_type):
return value
if isinstance(value, six.binary_type):
@@ -52,7 +52,7 @@ def to_unicode(value, from_enc=DEFAULT_ENCODING):
# Optional from_enc enables transcoding
-def to_str(value, to_enc=DEFAULT_ENCODING, from_enc=None):
+def to_str(value, to_enc=DEFAULT_ENCODING, from_enc=None):
if isinstance(value, six.binary_type):
if from_enc is None or to_enc == from_enc:
# Unknown input encoding or input and output encoding are the same
@@ -63,32 +63,32 @@ def to_str(value, to_enc=DEFAULT_ENCODING, from_enc=None):
return six.binary_type(value)
-def _convert_deep(x, enc, convert, relaxed=True):
+def _convert_deep(x, enc, convert, relaxed=True):
if x is None:
return None
if isinstance(x, (six.text_type, six.binary_type)):
return convert(x, enc)
if isinstance(x, dict):
return {convert(k, enc): _convert_deep(v, enc, convert, relaxed) for k, v in six.iteritems(x)}
- if isinstance(x, list):
- return [_convert_deep(e, enc, convert, relaxed) for e in x]
- if isinstance(x, tuple):
- return tuple([_convert_deep(e, enc, convert, relaxed) for e in x])
-
- if relaxed:
- return x
+ if isinstance(x, list):
+ return [_convert_deep(e, enc, convert, relaxed) for e in x]
+ if isinstance(x, tuple):
+ return tuple([_convert_deep(e, enc, convert, relaxed) for e in x])
+
+ if relaxed:
+ return x
raise TypeError('unsupported type')
-def unicodize_deep(x, enc=DEFAULT_ENCODING, relaxed=True):
- return _convert_deep(x, enc, to_unicode, relaxed)
+def unicodize_deep(x, enc=DEFAULT_ENCODING, relaxed=True):
+ return _convert_deep(x, enc, to_unicode, relaxed)
-def stringize_deep(x, enc=DEFAULT_ENCODING, relaxed=True):
- return _convert_deep(x, enc, to_str, relaxed)
+def stringize_deep(x, enc=DEFAULT_ENCODING, relaxed=True):
+ return _convert_deep(x, enc, to_str, relaxed)
-@library.python.func.memoize()
+@library.python.func.memoize()
def locale_encoding():
try:
loc = locale.getdefaultlocale()[1]
@@ -109,10 +109,10 @@ def fs_encoding():
def guess_default_encoding():
enc = locale_encoding()
- return enc if enc else DEFAULT_ENCODING
+ return enc if enc else DEFAULT_ENCODING
-@library.python.func.memoize()
+@library.python.func.memoize()
def get_stream_encoding(stream):
if stream.encoding:
try:
@@ -120,10 +120,10 @@ def get_stream_encoding(stream):
return stream.encoding
except LookupError:
pass
- return DEFAULT_ENCODING
-
-
-def encode(value, encoding=DEFAULT_ENCODING):
+ return DEFAULT_ENCODING
+
+
+def encode(value, encoding=DEFAULT_ENCODING):
if isinstance(value, six.binary_type):
- value = value.decode(encoding, errors='ignore')
- return value.encode(encoding)
+ value = value.decode(encoding, errors='ignore')
+ return value.encode(encoding)
diff --git a/library/python/strings/ut/test_strings.py b/library/python/strings/ut/test_strings.py
index dd0c694ee1..663ff24b14 100644
--- a/library/python/strings/ut/test_strings.py
+++ b/library/python/strings/ut/test_strings.py
@@ -115,7 +115,7 @@ def test_stringize_deep():
assert library.python.strings.stringize_deep({
'key 1': 'value 1',
u'ключ 2': u'значение 2',
- 'list': [u'ключ 2', 'key 1', (u'к', 2)]
+ 'list': [u'ключ 2', 'key 1', (u'к', 2)]
}) == {
'key 1' if six.PY2 else b'key 1': 'value 1' if six.PY2 else b'value 1',
u'ключ 2'.encode('utf-8'): u'значение 2'.encode('utf-8'),
@@ -157,9 +157,9 @@ def test_stringize_deep_plain():
def test_stringize_deep_nonstr():
with pytest.raises(TypeError):
- library.python.strings.stringize_deep(Convertible(), relaxed=False)
- x = Convertible()
- assert x == library.python.strings.stringize_deep(x)
+ library.python.strings.stringize_deep(Convertible(), relaxed=False)
+ x = Convertible()
+ assert x == library.python.strings.stringize_deep(x)
def test_unicodize_deep():
@@ -200,6 +200,6 @@ def test_unicodize_deep_plain():
def test_unicodize_deep_nonstr():
with pytest.raises(TypeError):
- library.python.strings.unicodize_deep(Convertible(), relaxed=False)
- x = Convertible()
- assert x == library.python.strings.stringize_deep(x)
+ library.python.strings.unicodize_deep(Convertible(), relaxed=False)
+ x = Convertible()
+ assert x == library.python.strings.stringize_deep(x)
diff --git a/library/python/strings/ya.make b/library/python/strings/ya.make
index 7e0b033717..5285c13774 100644
--- a/library/python/strings/ya.make
+++ b/library/python/strings/ya.make
@@ -2,11 +2,11 @@ OWNER(g:yatool)
PY23_LIBRARY()
-PY_SRCS(
- __init__.py
- CYTHONIZE_PY
- strings.py
-)
+PY_SRCS(
+ __init__.py
+ CYTHONIZE_PY
+ strings.py
+)
PEERDIR(
library/python/func
diff --git a/library/python/testing/filter/ya.make b/library/python/testing/filter/ya.make
index 22c485d258..7944b713a5 100644
--- a/library/python/testing/filter/ya.make
+++ b/library/python/testing/filter/ya.make
@@ -1,5 +1,5 @@
PY23_LIBRARY()
-OWNER(g:yatest)
+OWNER(g:yatest)
PY_SRCS(filter.py)
END()
diff --git a/library/python/testing/import_test/import_test.py b/library/python/testing/import_test/import_test.py
index 3e3b7234ef..440690af59 100644
--- a/library/python/testing/import_test/import_test.py
+++ b/library/python/testing/import_test/import_test.py
@@ -3,8 +3,8 @@ from __future__ import print_function
import os
import re
import sys
-import time
-import traceback
+import time
+import traceback
import __res
from __res import importer
@@ -32,7 +32,7 @@ def check_imports(no_check=(), extra=(), skip_func=None, py_main=None):
rx = re.compile('^({})$'.format('|'.join(patterns)))
failed = []
- import_times = {}
+ import_times = {}
norm = lambda s: s[:-9] if s.endswith('.__init__') else s
@@ -59,10 +59,10 @@ def check_imports(no_check=(), extra=(), skip_func=None, py_main=None):
try:
print('TRY', module)
- # XXX waiting for py3 to use print(..., flush=True)
- sys.stdout.flush()
-
- s = time.time()
+ # XXX waiting for py3 to use print(..., flush=True)
+ sys.stdout.flush()
+
+ s = time.time()
if module == '__main__':
importer.load_module('__main__', '__main__py')
elif module.endswith('.__init__'):
@@ -70,10 +70,10 @@ def check_imports(no_check=(), extra=(), skip_func=None, py_main=None):
else:
__import__(module)
- delay = time.time() - s
- import_times[str(module)] = delay
- print('OK ', module, '{:.3f}s'.format(delay))
-
+ delay = time.time() - s
+ import_times[str(module)] = delay
+ print('OK ', module, '{:.3f}s'.format(delay))
+
except Exception as e:
print('FAIL:', module, e, file=sys.stderr)
print_backtrace_marked(sys.exc_info())
@@ -86,10 +86,10 @@ def check_imports(no_check=(), extra=(), skip_func=None, py_main=None):
failed.append('{}: {}'.format(module, e))
raise
- print("Slowest imports:")
- for m, t in sorted(import_times.items(), key=lambda x: x[1], reverse=True)[:30]:
- print(' ', '{:.3f}s'.format(t), m)
-
+ print("Slowest imports:")
+ for m, t in sorted(import_times.items(), key=lambda x: x[1], reverse=True)[:30]:
+ print(' ', '{:.3f}s'.format(t), m)
+
if failed:
raise ImportError('modules not imported:\n' + '\n'.join(failed))
diff --git a/library/python/testing/import_test/ya.make b/library/python/testing/import_test/ya.make
index fae36ffe8f..b996336159 100644
--- a/library/python/testing/import_test/ya.make
+++ b/library/python/testing/import_test/ya.make
@@ -1,5 +1,5 @@
OWNER(
- g:yatest
+ g:yatest
exprmntr
)
diff --git a/library/python/testing/recipe/__init__.py b/library/python/testing/recipe/__init__.py
index 5ef9c5c189..3ea95ebb1d 100644
--- a/library/python/testing/recipe/__init__.py
+++ b/library/python/testing/recipe/__init__.py
@@ -16,7 +16,7 @@ collect_cores = None
sanitizer_extra_checks = None
-def _setup_logging(level=logging.DEBUG):
+def _setup_logging(level=logging.DEBUG):
root_logger = logging.getLogger()
root_logger.setLevel(level)
@@ -41,7 +41,7 @@ def get_options():
args, opts = parser.parse_known_args()
global ya, sanitizer_extra_checks, collect_cores
- _setup_logging()
+ _setup_logging()
context = {
"test_stderr": args.test_stderr,
@@ -96,7 +96,7 @@ def declare_recipe(start, stop):
except Exception:
if parsed_args.pdb:
tty()
- import ipdb
- ipdb.post_mortem()
+ import ipdb
+ ipdb.post_mortem()
else:
raise
diff --git a/library/python/testing/recipe/ya.make b/library/python/testing/recipe/ya.make
index dd323aa245..239223c789 100644
--- a/library/python/testing/recipe/ya.make
+++ b/library/python/testing/recipe/ya.make
@@ -1,6 +1,6 @@
OWNER(
exprmntr
- g:yatest
+ g:yatest
)
PY23_LIBRARY()
@@ -10,10 +10,10 @@ PY_SRCS(
ports.py
)
-PEERDIR(
- contrib/python/ipdb
+PEERDIR(
+ contrib/python/ipdb
library/python/testing/yatest_common
library/python/testing/yatest_lib
-)
-
+)
+
END()
diff --git a/library/python/testing/ya.make b/library/python/testing/ya.make
index 883bc8d7ab..c232bc663e 100644
--- a/library/python/testing/ya.make
+++ b/library/python/testing/ya.make
@@ -1,8 +1,8 @@
-OWNER(g:yatest)
+OWNER(g:yatest)
RECURSE(
behave
- deprecated
+ deprecated
fake_ya_package
filter
gtest
@@ -15,8 +15,8 @@ RECURSE(
recipe
system_info
types_test
- yapackage
- yapackage/test
+ yapackage
+ yapackage/test
yatest_common
yatest_lib
)
diff --git a/library/python/testing/yatest_common/ya.make b/library/python/testing/yatest_common/ya.make
index 5662db4c5d..0098e1be85 100644
--- a/library/python/testing/yatest_common/ya.make
+++ b/library/python/testing/yatest_common/ya.make
@@ -1,31 +1,31 @@
-OWNER(g:yatest)
+OWNER(g:yatest)
PY23_LIBRARY()
-OWNER(g:yatest)
+OWNER(g:yatest)
NO_EXTENDED_SOURCE_SEARCH()
PY_SRCS(
TOP_LEVEL
- yatest/__init__.py
+ yatest/__init__.py
yatest/common/__init__.py
yatest/common/benchmark.py
yatest/common/canonical.py
yatest/common/environment.py
yatest/common/errors.py
yatest/common/legacy.py
- yatest/common/misc.py
+ yatest/common/misc.py
yatest/common/network.py
yatest/common/path.py
yatest/common/process.py
yatest/common/runtime.py
yatest/common/runtime_java.py
yatest/common/tags.py
-)
+)
-PEERDIR(
- contrib/python/six
+PEERDIR(
+ contrib/python/six
library/python/cores
library/python/filelock
library/python/fs
diff --git a/library/python/testing/yatest_common/yatest/common/canonical.py b/library/python/testing/yatest_common/yatest/common/canonical.py
index b6a136d3e9..7a258d070b 100644
--- a/library/python/testing/yatest_common/yatest/common/canonical.py
+++ b/library/python/testing/yatest_common/yatest/common/canonical.py
@@ -68,7 +68,7 @@ def canonical_execute(
):
"""
Shortcut to execute a binary and canonize its stdout
- :param binary: absolute path to the binary
+ :param binary: absolute path to the binary
:param args: binary arguments
:param check_exit_code: will raise ExecutionError if the command exits with non zero code
:param shell: use shell to run the command
@@ -78,7 +78,7 @@ def canonical_execute(
:param stdin: command stdin
:param stderr: command stderr
:param creationflags: command creation flags
- :param file_name: output file name. if not specified program name will be used
+ :param file_name: output file name. if not specified program name will be used
:param diff_tool: path to custome diff tool
:param diff_file_name: custom diff file name to create when diff is found
:param diff_tool_timeout: timeout for running diff tool
@@ -94,7 +94,7 @@ def canonical_execute(
execute_args = locals()
del execute_args["binary"]
del execute_args["args"]
- del execute_args["file_name"]
+ del execute_args["file_name"]
del execute_args["save_locally"]
del execute_args["diff_tool"]
del execute_args["diff_file_name"]
@@ -123,7 +123,7 @@ def canonical_py_execute(
:param stdin: command stdin
:param stderr: command stderr
:param creationflags: command creation flags
- :param file_name: output file name. if not specified program name will be used
+ :param file_name: output file name. if not specified program name will be used
:param diff_tool: path to custome diff tool
:param diff_file_name: custom diff file name to create when diff is found
:param diff_tool_timeout: timeout for running diff tool
@@ -135,7 +135,7 @@ def canonical_py_execute(
execute_args = locals()
del execute_args["script_path"]
del execute_args["args"]
- del execute_args["file_name"]
+ del execute_args["file_name"]
del execute_args["save_locally"]
del execute_args["diff_tool"]
del execute_args["diff_file_name"]
diff --git a/library/python/testing/yatest_common/yatest/common/environment.py b/library/python/testing/yatest_common/yatest/common/environment.py
index 43f48d0958..62fc501d52 100644
--- a/library/python/testing/yatest_common/yatest/common/environment.py
+++ b/library/python/testing/yatest_common/yatest/common/environment.py
@@ -1,5 +1,5 @@
-# coding: utf-8
+# coding: utf-8
+
-
-def extend_env_var(env, name, value, sep=":"):
- return sep.join(filter(None, [env.get(name), value]))
+def extend_env_var(env, name, value, sep=":"):
+ return sep.join(filter(None, [env.get(name), value]))
diff --git a/library/python/testing/yatest_common/yatest/common/errors.py b/library/python/testing/yatest_common/yatest/common/errors.py
index 8c038fc381..8d5abc9b96 100644
--- a/library/python/testing/yatest_common/yatest/common/errors.py
+++ b/library/python/testing/yatest_common/yatest/common/errors.py
@@ -1,4 +1,4 @@
-import os
+import os
import sys
@@ -8,7 +8,7 @@ class RestartTestException(Exception):
super(RestartTestException, self).__init__(*args, **kwargs)
sys.stderr.write("##restart-test##\n")
sys.stderr.flush()
- os.environ["FORCE_EXIT_TESTSFAILED"] = "1"
+ os.environ["FORCE_EXIT_TESTSFAILED"] = "1"
class InfrastructureException(Exception):
@@ -17,4 +17,4 @@ class InfrastructureException(Exception):
super(InfrastructureException, self).__init__(*args, **kwargs)
sys.stderr.write("##infrastructure-error##\n")
sys.stderr.flush()
- os.environ["FORCE_EXIT_TESTSFAILED"] = "1"
+ os.environ["FORCE_EXIT_TESTSFAILED"] = "1"
diff --git a/library/python/testing/yatest_common/yatest/common/misc.py b/library/python/testing/yatest_common/yatest/common/misc.py
index 20d3725ac9..3c8d239a40 100644
--- a/library/python/testing/yatest_common/yatest/common/misc.py
+++ b/library/python/testing/yatest_common/yatest/common/misc.py
@@ -1,19 +1,19 @@
-import functools
-
-
-def first(it):
- for d in it:
- if d:
- return d
-
-
-def lazy(func):
- res = []
-
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- if not res:
- res.append(func(*args, **kwargs))
- return res[0]
-
- return wrapper
+import functools
+
+
+def first(it):
+ for d in it:
+ if d:
+ return d
+
+
+def lazy(func):
+ res = []
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ if not res:
+ res.append(func(*args, **kwargs))
+ return res[0]
+
+ return wrapper
diff --git a/library/python/testing/yatest_common/yatest/common/network.py b/library/python/testing/yatest_common/yatest/common/network.py
index 37bcb1b8e0..e3e3b0abc2 100644
--- a/library/python/testing/yatest_common/yatest/common/network.py
+++ b/library/python/testing/yatest_common/yatest/common/network.py
@@ -1,47 +1,47 @@
# coding=utf-8
-
-import os
-import errno
-import socket
-import random
-import logging
-import platform
-import threading
-
+
+import os
+import errno
+import socket
+import random
+import logging
+import platform
+import threading
+
import six
-UI16MAXVAL = (1 << 16) - 1
-logger = logging.getLogger(__name__)
-
-
-class PortManagerException(Exception):
- pass
-
-
-class PortManager(object):
+UI16MAXVAL = (1 << 16) - 1
+logger = logging.getLogger(__name__)
+
+
+class PortManagerException(Exception):
+ pass
+
+
+class PortManager(object):
"""
See documentation here
-
+
https://wiki.yandex-team.ru/yatool/test/#python-acquire-ports
"""
- def __init__(self, sync_dir=None):
- self._sync_dir = sync_dir or os.environ.get('PORT_SYNC_PATH')
- if self._sync_dir:
- _makedirs(self._sync_dir)
-
- self._valid_range = get_valid_port_range()
- self._valid_port_count = self._count_valid_ports()
- self._filelocks = {}
- self._lock = threading.Lock()
-
- def __enter__(self):
- return self
-
- def __exit__(self, type, value, traceback):
- self.release()
-
- def get_port(self, port=0):
+ def __init__(self, sync_dir=None):
+ self._sync_dir = sync_dir or os.environ.get('PORT_SYNC_PATH')
+ if self._sync_dir:
+ _makedirs(self._sync_dir)
+
+ self._valid_range = get_valid_port_range()
+ self._valid_port_count = self._count_valid_ports()
+ self._filelocks = {}
+ self._lock = threading.Lock()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.release()
+
+ def get_port(self, port=0):
'''
Gets free TCP port
'''
@@ -64,8 +64,8 @@ class PortManager(object):
Gets one free port for use in both TCP and UDP protocols
'''
if port and self._no_random_ports():
- return port
-
+ return port
+
retries = 20
while retries > 0:
retries -= 1
@@ -73,199 +73,199 @@ class PortManager(object):
result_port = self.get_tcp_port()
if not self.is_port_free(result_port, socket.SOCK_DGRAM):
self.release_port(result_port)
- # Don't try to _capture_port(), it's already captured in the get_tcp_port()
+ # Don't try to _capture_port(), it's already captured in the get_tcp_port()
return result_port
raise Exception('Failed to find port')
def release_port(self, port):
- with self._lock:
- self._release_port_no_lock(port)
-
- def _release_port_no_lock(self, port):
- filelock = self._filelocks.pop(port, None)
- if filelock:
- filelock.release()
-
+ with self._lock:
+ self._release_port_no_lock(port)
+
+ def _release_port_no_lock(self, port):
+ filelock = self._filelocks.pop(port, None)
+ if filelock:
+ filelock.release()
+
def release(self):
- with self._lock:
- while self._filelocks:
- _, filelock = self._filelocks.popitem()
- if filelock:
- filelock.release()
-
- def get_port_range(self, start_port, count, random_start=True):
- assert count > 0
- if start_port and self._no_random_ports():
- return start_port
-
- candidates = []
-
- def drop_candidates():
- for port in candidates:
- self._release_port_no_lock(port)
- candidates[:] = []
-
- with self._lock:
- for attempts in six.moves.range(128):
- for left, right in self._valid_range:
- if right - left < count:
- continue
-
- if random_start:
- start = random.randint(left, right - ((right - left) // 2))
- else:
- start = left
- for probe_port in six.moves.range(start, right):
- if self._capture_port_no_lock(probe_port, socket.SOCK_STREAM):
- candidates.append(probe_port)
- else:
- drop_candidates()
-
- if len(candidates) == count:
- return candidates[0]
- # Can't find required number of ports without gap in the current range
- drop_candidates()
-
- raise PortManagerException("Failed to find valid port range (start_port: {} count: {}) (range: {} used: {})".format(
- start_port, count, self._valid_range, self._filelocks))
-
- def _count_valid_ports(self):
- res = 0
- for left, right in self._valid_range:
- res += right - left
- assert res, ('There are no available valid ports', self._valid_range)
- return res
-
+ with self._lock:
+ while self._filelocks:
+ _, filelock = self._filelocks.popitem()
+ if filelock:
+ filelock.release()
+
+ def get_port_range(self, start_port, count, random_start=True):
+ assert count > 0
+ if start_port and self._no_random_ports():
+ return start_port
+
+ candidates = []
+
+ def drop_candidates():
+ for port in candidates:
+ self._release_port_no_lock(port)
+ candidates[:] = []
+
+ with self._lock:
+ for attempts in six.moves.range(128):
+ for left, right in self._valid_range:
+ if right - left < count:
+ continue
+
+ if random_start:
+ start = random.randint(left, right - ((right - left) // 2))
+ else:
+ start = left
+ for probe_port in six.moves.range(start, right):
+ if self._capture_port_no_lock(probe_port, socket.SOCK_STREAM):
+ candidates.append(probe_port)
+ else:
+ drop_candidates()
+
+ if len(candidates) == count:
+ return candidates[0]
+ # Can't find required number of ports without gap in the current range
+ drop_candidates()
+
+ raise PortManagerException("Failed to find valid port range (start_port: {} count: {}) (range: {} used: {})".format(
+ start_port, count, self._valid_range, self._filelocks))
+
+ def _count_valid_ports(self):
+ res = 0
+ for left, right in self._valid_range:
+ res += right - left
+ assert res, ('There are no available valid ports', self._valid_range)
+ return res
+
def _get_port(self, port, sock_type):
if port and self._no_random_ports():
return port
- if len(self._filelocks) >= self._valid_port_count:
- raise PortManagerException("All valid ports are taken ({}): {}".format(self._valid_range, self._filelocks))
-
- salt = random.randint(0, UI16MAXVAL)
+ if len(self._filelocks) >= self._valid_port_count:
+ raise PortManagerException("All valid ports are taken ({}): {}".format(self._valid_range, self._filelocks))
+
+ salt = random.randint(0, UI16MAXVAL)
for attempt in six.moves.range(self._valid_port_count):
- probe_port = (salt + attempt) % self._valid_port_count
-
- for left, right in self._valid_range:
- if probe_port >= (right - left):
- probe_port -= right - left
- else:
- probe_port += left
- break
- if not self._capture_port(probe_port, sock_type):
- continue
- return probe_port
-
- raise PortManagerException("Failed to find valid port (range: {} used: {})".format(self._valid_range, self._filelocks))
-
- def _capture_port(self, port, sock_type):
- with self._lock:
- return self._capture_port_no_lock(port, sock_type)
-
+ probe_port = (salt + attempt) % self._valid_port_count
+
+ for left, right in self._valid_range:
+ if probe_port >= (right - left):
+ probe_port -= right - left
+ else:
+ probe_port += left
+ break
+ if not self._capture_port(probe_port, sock_type):
+ continue
+ return probe_port
+
+ raise PortManagerException("Failed to find valid port (range: {} used: {})".format(self._valid_range, self._filelocks))
+
+ def _capture_port(self, port, sock_type):
+ with self._lock:
+ return self._capture_port_no_lock(port, sock_type)
+
def is_port_free(self, port, sock_type=socket.SOCK_STREAM):
- sock = socket.socket(socket.AF_INET6, sock_type)
- try:
- sock.bind(('::', port))
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- except socket.error as e:
- if e.errno == errno.EADDRINUSE:
- return False
- raise
- finally:
- sock.close()
- return True
-
- def _capture_port_no_lock(self, port, sock_type):
- if port in self._filelocks:
- return False
-
- filelock = None
- if self._sync_dir:
- # yatest.common should try to be hermetic and don't have peerdirs
- # otherwise, PYTEST_SCRIPT (aka USE_ARCADIA_PYTHON=no) won't work
- import library.python.filelock
-
- filelock = library.python.filelock.FileLock(os.path.join(self._sync_dir, str(port)))
- if not filelock.acquire(blocking=False):
- return False
+ sock = socket.socket(socket.AF_INET6, sock_type)
+ try:
+ sock.bind(('::', port))
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ except socket.error as e:
+ if e.errno == errno.EADDRINUSE:
+ return False
+ raise
+ finally:
+ sock.close()
+ return True
+
+ def _capture_port_no_lock(self, port, sock_type):
+ if port in self._filelocks:
+ return False
+
+ filelock = None
+ if self._sync_dir:
+ # yatest.common should try to be hermetic and don't have peerdirs
+ # otherwise, PYTEST_SCRIPT (aka USE_ARCADIA_PYTHON=no) won't work
+ import library.python.filelock
+
+ filelock = library.python.filelock.FileLock(os.path.join(self._sync_dir, str(port)))
+ if not filelock.acquire(blocking=False):
+ return False
if self.is_port_free(port, sock_type):
- self._filelocks[port] = filelock
- return True
- else:
- filelock.release()
- return False
-
+ self._filelocks[port] = filelock
+ return True
+ else:
+ filelock.release()
+ return False
+
if self.is_port_free(port, sock_type):
- self._filelocks[port] = filelock
- return True
- if filelock:
- filelock.release()
- return False
-
+ self._filelocks[port] = filelock
+ return True
+ if filelock:
+ filelock.release()
+ return False
+
def _no_random_ports(self):
return os.environ.get("NO_RANDOM_PORTS")
-
-
-def get_valid_port_range():
- first_valid = 1025
- last_valid = UI16MAXVAL
-
- given_range = os.environ.get('VALID_PORT_RANGE')
- if given_range and ':' in given_range:
- return [list(int(x) for x in given_range.split(':', 2))]
-
- first_eph, last_eph = get_ephemeral_range()
- first_invalid = max(first_eph, first_valid)
- last_invalid = min(last_eph, last_valid)
-
- ranges = []
- if first_invalid > first_valid:
- ranges.append((first_valid, first_invalid - 1))
- if last_invalid < last_valid:
- ranges.append((last_invalid + 1, last_valid))
- return ranges
-
-
-def get_ephemeral_range():
- if platform.system() == 'Linux':
- filename = "/proc/sys/net/ipv4/ip_local_port_range"
- if os.path.exists(filename):
- with open(filename) as afile:
+
+
+def get_valid_port_range():
+ first_valid = 1025
+ last_valid = UI16MAXVAL
+
+ given_range = os.environ.get('VALID_PORT_RANGE')
+ if given_range and ':' in given_range:
+ return [list(int(x) for x in given_range.split(':', 2))]
+
+ first_eph, last_eph = get_ephemeral_range()
+ first_invalid = max(first_eph, first_valid)
+ last_invalid = min(last_eph, last_valid)
+
+ ranges = []
+ if first_invalid > first_valid:
+ ranges.append((first_valid, first_invalid - 1))
+ if last_invalid < last_valid:
+ ranges.append((last_invalid + 1, last_valid))
+ return ranges
+
+
+def get_ephemeral_range():
+ if platform.system() == 'Linux':
+ filename = "/proc/sys/net/ipv4/ip_local_port_range"
+ if os.path.exists(filename):
+ with open(filename) as afile:
data = afile.read(1024) # fix for musl
- port_range = tuple(map(int, data.strip().split()))
- if len(port_range) == 2:
- return port_range
- else:
- logger.warning("Bad ip_local_port_range format: '%s'. Going to use IANA suggestion", data)
- elif platform.system() == 'Darwin':
- first = _sysctlbyname_uint("net.inet.ip.portrange.first")
- last = _sysctlbyname_uint("net.inet.ip.portrange.last")
- if first and last:
- return first, last
- # IANA suggestion
- return (1 << 15) + (1 << 14), UI16MAXVAL
-
-
-def _sysctlbyname_uint(name):
- try:
- from ctypes import CDLL, c_uint, byref
- from ctypes.util import find_library
- except ImportError:
- return
-
- libc = CDLL(find_library("c"))
- size = c_uint(0)
- res = c_uint(0)
- libc.sysctlbyname(name, None, byref(size), None, 0)
- libc.sysctlbyname(name, byref(res), byref(size), None, 0)
- return res.value
-
-
-def _makedirs(path):
- try:
- os.makedirs(path)
- except OSError as e:
- if e.errno == errno.EEXIST:
- return
- raise
+ port_range = tuple(map(int, data.strip().split()))
+ if len(port_range) == 2:
+ return port_range
+ else:
+ logger.warning("Bad ip_local_port_range format: '%s'. Going to use IANA suggestion", data)
+ elif platform.system() == 'Darwin':
+ first = _sysctlbyname_uint("net.inet.ip.portrange.first")
+ last = _sysctlbyname_uint("net.inet.ip.portrange.last")
+ if first and last:
+ return first, last
+ # IANA suggestion
+ return (1 << 15) + (1 << 14), UI16MAXVAL
+
+
+def _sysctlbyname_uint(name):
+ try:
+ from ctypes import CDLL, c_uint, byref
+ from ctypes.util import find_library
+ except ImportError:
+ return
+
+ libc = CDLL(find_library("c"))
+ size = c_uint(0)
+ res = c_uint(0)
+ libc.sysctlbyname(name, None, byref(size), None, 0)
+ libc.sysctlbyname(name, byref(res), byref(size), None, 0)
+ return res.value
+
+
+def _makedirs(path):
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ return
+ raise
diff --git a/library/python/testing/yatest_common/yatest/common/path.py b/library/python/testing/yatest_common/yatest/common/path.py
index 6fed7dda8a..d4f52f6333 100644
--- a/library/python/testing/yatest_common/yatest/common/path.py
+++ b/library/python/testing/yatest_common/yatest/common/path.py
@@ -1,12 +1,12 @@
# coding=utf-8
import errno
-import os
-import shutil
-import contextlib
-
+import os
+import shutil
+import contextlib
+
import library.python.fs as lpf
-
+
def replace_in_file(path, old, new):
"""
@@ -21,32 +21,32 @@ def replace_in_file(path, old, new):
lpf.ensure_removed(path)
with open(path, 'w') as fp:
fp.write(content.replace(old, new))
-
-
-@contextlib.contextmanager
-def change_dir(path):
- old = os.getcwd()
- try:
- os.chdir(path)
- yield path
- finally:
- os.chdir(old)
-
-
+
+
+@contextlib.contextmanager
+def change_dir(path):
+ old = os.getcwd()
+ try:
+ os.chdir(path)
+ yield path
+ finally:
+ os.chdir(old)
+
+
def copytree(src, dst, symlinks=False, ignore=None, postprocessing=None):
- '''
- Copy an entire directory of files into an existing directory
- instead of raising Exception what shtuil.copytree does
- '''
+ '''
+ Copy an entire directory of files into an existing directory
+ instead of raising Exception what shtuil.copytree does
+ '''
if not os.path.exists(dst) and os.path.isdir(src):
os.makedirs(dst)
- for item in os.listdir(src):
- s = os.path.join(src, item)
- d = os.path.join(dst, item)
- if os.path.isdir(s):
- shutil.copytree(s, d, symlinks, ignore)
- else:
- shutil.copy2(s, d)
+ for item in os.listdir(src):
+ s = os.path.join(src, item)
+ d = os.path.join(dst, item)
+ if os.path.isdir(s):
+ shutil.copytree(s, d, symlinks, ignore)
+ else:
+ shutil.copy2(s, d)
if postprocessing:
postprocessing(dst, False)
for root, dirs, files in os.walk(dst):
diff --git a/library/python/testing/yatest_common/yatest/common/process.py b/library/python/testing/yatest_common/yatest/common/process.py
index a8bcc21f51..f9eeb16525 100644
--- a/library/python/testing/yatest_common/yatest/common/process.py
+++ b/library/python/testing/yatest_common/yatest/common/process.py
@@ -1,10 +1,10 @@
-# coding: utf-8
-
+# coding: utf-8
+
import os
-import re
+import re
import time
import signal
-import shutil
+import shutil
import logging
import tempfile
import subprocess
@@ -13,19 +13,19 @@ import distutils.version
import six
-try:
- # yatest.common should try to be hermetic, otherwise, PYTEST_SCRIPT (aka USE_ARCADIA_PYTHON=no) won't work.
- import library.python.cores as cores
-except ImportError:
- cores = None
-
+try:
+ # yatest.common should try to be hermetic, otherwise, PYTEST_SCRIPT (aka USE_ARCADIA_PYTHON=no) won't work.
+ import library.python.cores as cores
+except ImportError:
+ cores = None
+
from . import runtime
from . import path
from . import environment
MAX_OUT_LEN = 1000 * 1000 # 1 mb
-MAX_MESSAGE_LEN = 1500
+MAX_MESSAGE_LEN = 1500
SANITIZER_ERROR_PATTERN = br": ([A-Z][\w]+Sanitizer)"
GLIBC_PATTERN = re.compile(r"\S+@GLIBC_([0-9.]+)")
yatest_logger = logging.getLogger("ya.test")
@@ -55,12 +55,12 @@ class ExecutionError(Exception):
command=command,
code=execution_result.exit_code,
err=_format_error(execution_result.std_err))
- if cores:
- if execution_result.backtrace:
- message += "Backtrace:\n[[rst]]{}[[bad]]\n".format(cores.colorize_backtrace(execution_result._backtrace))
- else:
- message += "Backtrace is not available: module cores isn't available"
-
+ if cores:
+ if execution_result.backtrace:
+ message += "Backtrace:\n[[rst]]{}[[bad]]\n".format(cores.colorize_backtrace(execution_result._backtrace))
+ else:
+ message += "Backtrace is not available: module cores isn't available"
+
super(ExecutionError, self).__init__(message)
self.execution_result = execution_result
@@ -85,31 +85,31 @@ class SignalInterruptionError(Exception):
self.res = None
-class InvalidCommandError(Exception):
- pass
-
-
+class InvalidCommandError(Exception):
+ pass
+
+
class _Execution(object):
- def __init__(self, command, process, out_file, err_file, process_progress_listener=None, cwd=None, collect_cores=True, check_sanitizer=True, started=0, user_stdout=False, user_stderr=False):
+ def __init__(self, command, process, out_file, err_file, process_progress_listener=None, cwd=None, collect_cores=True, check_sanitizer=True, started=0, user_stdout=False, user_stderr=False):
self._command = command
self._process = process
self._out_file = out_file
self._err_file = err_file
self._std_out = None
self._std_err = None
- self._elapsed = None
- self._start = time.time()
+ self._elapsed = None
+ self._start = time.time()
self._process_progress_listener = process_progress_listener
- self._cwd = cwd or os.getcwd()
- self._collect_cores = collect_cores
- self._backtrace = ''
- self._check_sanitizer = check_sanitizer
+ self._cwd = cwd or os.getcwd()
+ self._collect_cores = collect_cores
+ self._backtrace = ''
+ self._check_sanitizer = check_sanitizer
self._metrics = {}
- self._started = started
- self._user_stdout = bool(user_stdout)
- self._user_stderr = bool(user_stderr)
- self._exit_code = None
+ self._started = started
+ self._user_stdout = bool(user_stdout)
+ self._user_stderr = bool(user_stderr)
+ self._exit_code = None
if process_progress_listener:
process_progress_listener.open(command, process, out_file, err_file)
@@ -122,21 +122,21 @@ class _Execution(object):
self._save_outputs(False)
_kill_process_tree(self._process.pid)
self._clean_files()
- # DEVTOOLS-2347
- yatest_logger.debug("Process status before wait_for: %s", self.running)
- try:
- wait_for(lambda: not self.running, timeout=5, fail_message="Could not kill process {}".format(self._process.pid), sleep_time=.1)
- except TimeoutError:
- yatest_logger.debug("Process status after wait_for: %s", self.running)
- yatest_logger.debug("Process %d info: %s", self._process.pid, _get_proc_tree_info([self._process.pid]))
- raise
+ # DEVTOOLS-2347
+ yatest_logger.debug("Process status before wait_for: %s", self.running)
+ try:
+ wait_for(lambda: not self.running, timeout=5, fail_message="Could not kill process {}".format(self._process.pid), sleep_time=.1)
+ except TimeoutError:
+ yatest_logger.debug("Process status after wait_for: %s", self.running)
+ yatest_logger.debug("Process %d info: %s", self._process.pid, _get_proc_tree_info([self._process.pid]))
+ raise
else:
raise InvalidExecutionStateError("Cannot kill a stopped process")
- def terminate(self):
- if self.running:
- self._process.terminate()
-
+ def terminate(self):
+ if self.running:
+ self._process.terminate()
+
@property
def process(self):
return self._process
@@ -146,56 +146,56 @@ class _Execution(object):
return self._command
@property
- def returncode(self):
- return self.exit_code
-
- @property
+ def returncode(self):
+ return self.exit_code
+
+ @property
def exit_code(self):
- """
- Deprecated, use returncode
- """
- if self._exit_code is None:
- self._exit_code = self._process.returncode
- return self._exit_code
-
- @property
- def stdout(self):
- return self.std_out
+ """
+ Deprecated, use returncode
+ """
+ if self._exit_code is None:
+ self._exit_code = self._process.returncode
+ return self._exit_code
@property
+ def stdout(self):
+ return self.std_out
+
+ @property
def std_out(self):
- """
- Deprecated, use stdout
- """
+ """
+ Deprecated, use stdout
+ """
if self._std_out is not None:
return self._std_out
- if self._process.stdout and not self._user_stdout:
- self._std_out = self._process.stdout.read()
- return self._std_out
-
- @property
- def stderr(self):
- return self.std_err
+ if self._process.stdout and not self._user_stdout:
+ self._std_out = self._process.stdout.read()
+ return self._std_out
@property
+ def stderr(self):
+ return self.std_err
+
+ @property
def std_err(self):
- """
- Deprecated, use stderr
- """
+ """
+ Deprecated, use stderr
+ """
if self._std_err is not None:
return self._std_err
- if self._process.stderr and not self._user_stderr:
- self._std_err = self._process.stderr.read()
- return self._std_err
-
- @property
- def elapsed(self):
- return self._elapsed
-
- @property
- def backtrace(self):
- return self._backtrace
-
+ if self._process.stderr and not self._user_stderr:
+ self._std_err = self._process.stderr.read()
+ return self._std_err
+
+ @property
+ def elapsed(self):
+ return self._elapsed
+
+ @property
+ def backtrace(self):
+ return self._backtrace
+
@property
def metrics(self):
return self._metrics
@@ -204,25 +204,25 @@ class _Execution(object):
if self._process_progress_listener:
self._process_progress_listener()
self._process_progress_listener.close()
- if not self._user_stdout:
- if self._out_file is None:
- pass
- elif self._out_file != subprocess.PIPE:
- self._out_file.flush()
- self._out_file.seek(0, os.SEEK_SET)
- self._std_out = self._out_file.read()
- else:
- self._std_out = self._process.stdout.read()
- if not self._user_stderr:
- if self._err_file is None:
- pass
- elif self._err_file != subprocess.PIPE:
- self._err_file.flush()
- self._err_file.seek(0, os.SEEK_SET)
- self._std_err = self._err_file.read()
- else:
- self._std_err = self._process.stderr.read()
-
+ if not self._user_stdout:
+ if self._out_file is None:
+ pass
+ elif self._out_file != subprocess.PIPE:
+ self._out_file.flush()
+ self._out_file.seek(0, os.SEEK_SET)
+ self._std_out = self._out_file.read()
+ else:
+ self._std_out = self._process.stdout.read()
+ if not self._user_stderr:
+ if self._err_file is None:
+ pass
+ elif self._err_file != subprocess.PIPE:
+ self._err_file.flush()
+ self._err_file.seek(0, os.SEEK_SET)
+ self._std_err = self._err_file.read()
+ else:
+ self._std_err = self._process.stderr.read()
+
if clean_files:
self._clean_files()
yatest_logger.debug("Command (pid %s) rc: %s", self._process.pid, self.exit_code)
@@ -245,45 +245,45 @@ class _Execution(object):
yatest_logger.debug("Command (pid %s) errors:\n%s", self._process.pid, truncate(printable_std_err, MAX_OUT_LEN))
def _clean_files(self):
- if self._err_file and not self._user_stderr and self._err_file != subprocess.PIPE:
- self._err_file.close()
- self._err_file = None
- if self._out_file and not self._user_stdout and self._out_file != subprocess.PIPE:
- self._out_file.close()
- self._out_file = None
-
- def _recover_core(self):
- core_path = cores.recover_core_dump_file(self.command[0], self._cwd, self.process.pid)
- if core_path:
- # Core dump file recovering may be disabled (for distbuild for example) - produce only bt
- store_cores = runtime._get_ya_config().collect_cores
- if store_cores:
- new_core_path = path.get_unique_file_path(runtime.output_path(), "{}.{}.core".format(os.path.basename(self.command[0]), self._process.pid))
- # Copy core dump file, because it may be overwritten
+ if self._err_file and not self._user_stderr and self._err_file != subprocess.PIPE:
+ self._err_file.close()
+ self._err_file = None
+ if self._out_file and not self._user_stdout and self._out_file != subprocess.PIPE:
+ self._out_file.close()
+ self._out_file = None
+
+ def _recover_core(self):
+ core_path = cores.recover_core_dump_file(self.command[0], self._cwd, self.process.pid)
+ if core_path:
+ # Core dump file recovering may be disabled (for distbuild for example) - produce only bt
+ store_cores = runtime._get_ya_config().collect_cores
+ if store_cores:
+ new_core_path = path.get_unique_file_path(runtime.output_path(), "{}.{}.core".format(os.path.basename(self.command[0]), self._process.pid))
+ # Copy core dump file, because it may be overwritten
yatest_logger.debug("Coping core dump file from '%s' to the '%s'", core_path, new_core_path)
- shutil.copyfile(core_path, new_core_path)
- core_path = new_core_path
-
- bt_filename = None
- pbt_filename = None
-
- if os.path.exists(runtime.gdb_path()):
- self._backtrace = cores.get_gdb_full_backtrace(self.command[0], core_path, runtime.gdb_path())
- bt_filename = path.get_unique_file_path(runtime.output_path(), "{}.{}.backtrace".format(os.path.basename(self.command[0]), self._process.pid))
+ shutil.copyfile(core_path, new_core_path)
+ core_path = new_core_path
+
+ bt_filename = None
+ pbt_filename = None
+
+ if os.path.exists(runtime.gdb_path()):
+ self._backtrace = cores.get_gdb_full_backtrace(self.command[0], core_path, runtime.gdb_path())
+ bt_filename = path.get_unique_file_path(runtime.output_path(), "{}.{}.backtrace".format(os.path.basename(self.command[0]), self._process.pid))
with open(bt_filename, "wb") as afile:
afile.write(six.ensure_binary(self._backtrace))
- # generate pretty html version of backtrace aka Tri Korochki
- pbt_filename = bt_filename + ".html"
+ # generate pretty html version of backtrace aka Tri Korochki
+ pbt_filename = bt_filename + ".html"
backtrace_to_html(bt_filename, pbt_filename)
-
- if store_cores:
- runtime._register_core(os.path.basename(self.command[0]), self.command[0], core_path, bt_filename, pbt_filename)
- else:
- runtime._register_core(os.path.basename(self.command[0]), None, None, bt_filename, pbt_filename)
-
+
+ if store_cores:
+ runtime._register_core(os.path.basename(self.command[0]), self.command[0], core_path, bt_filename, pbt_filename)
+ else:
+ runtime._register_core(os.path.basename(self.command[0]), None, None, bt_filename, pbt_filename)
+
def wait(self, check_exit_code=True, timeout=None, on_timeout=None):
def _wait():
- finished = None
+ finished = None
interrupted = False
try:
if hasattr(os, "wait4"):
@@ -293,7 +293,7 @@ class _Execution(object):
else:
# PEP 475
pid, sts, rusage = os.wait4(self._process.pid, 0)
- finished = time.time()
+ finished = time.time()
self._process._handle_exitstatus(sts)
for field in [
"ru_idrss",
@@ -328,46 +328,46 @@ class _Execution(object):
if not interrupted:
self._process.wait() # this has to be here unconditionally, so that all process properties are set
- if not finished:
- finished = time.time()
- self._metrics["wtime"] = round(finished - self._started, 3)
-
+ if not finished:
+ finished = time.time()
+ self._metrics["wtime"] = round(finished - self._started, 3)
+
try:
- if timeout:
- process_is_finished = lambda: not self.running
- fail_message = "Command '%s' stopped by %d seconds timeout" % (self._command, timeout)
+ if timeout:
+ process_is_finished = lambda: not self.running
+ fail_message = "Command '%s' stopped by %d seconds timeout" % (self._command, timeout)
try:
wait_for(process_is_finished, timeout, fail_message, sleep_time=0.1, on_check_condition=self._process_progress_listener)
except TimeoutError as e:
if on_timeout:
- yatest_logger.debug("Calling user specified on_timeout function")
+ yatest_logger.debug("Calling user specified on_timeout function")
try:
on_timeout(self, timeout)
except Exception:
yatest_logger.exception("Exception while calling on_timeout")
raise ExecutionTimeoutError(self, str(e))
- # Wait should be always called here, it finalizes internal states of its process and sets up return code
+ # Wait should be always called here, it finalizes internal states of its process and sets up return code
_wait()
- except BaseException as e:
+ except BaseException as e:
_kill_process_tree(self._process.pid)
_wait()
yatest_logger.debug("Command exception: %s", e)
- raise
+ raise
finally:
- self._elapsed = time.time() - self._start
- self._save_outputs()
+ self._elapsed = time.time() - self._start
+ self._save_outputs()
self.verify_no_coredumps()
- self._finalise(check_exit_code)
-
- def _finalise(self, check_exit_code):
+ self._finalise(check_exit_code)
+
+ def _finalise(self, check_exit_code):
# Set the signal (negative number) which caused the process to exit
if check_exit_code and self.exit_code != 0:
yatest_logger.error("Execution failed with exit code: %s\n\t,std_out:%s\n\tstd_err:%s\n",
self.exit_code, truncate(self.std_out, MAX_OUT_LEN), truncate(self.std_err, MAX_OUT_LEN))
raise ExecutionError(self)
- # Don't search for sanitize errors if stderr was redirected
+ # Don't search for sanitize errors if stderr was redirected
self.verify_sanitize_errors()
def verify_no_coredumps(self):
@@ -375,44 +375,44 @@ class _Execution(object):
Verify there is no coredump from this binary. If there is then report backtrace.
"""
if self.exit_code < 0 and self._collect_cores:
- if cores:
- try:
- self._recover_core()
- except Exception:
- yatest_logger.exception("Exception while recovering core")
- else:
- yatest_logger.warning("Core dump file recovering is skipped: module cores isn't available")
+ if cores:
+ try:
+ self._recover_core()
+ except Exception:
+ yatest_logger.exception("Exception while recovering core")
+ else:
+ yatest_logger.warning("Core dump file recovering is skipped: module cores isn't available")
def verify_sanitize_errors(self):
"""
Verify there are no sanitizer (ASAN, MSAN, TSAN, etc) errors for this binary. If there are any report them.
"""
- if self._std_err and self._check_sanitizer and runtime._get_ya_config().sanitizer_extra_checks:
- build_path = runtime.build_path()
- if self.command[0].startswith(build_path):
- match = re.search(SANITIZER_ERROR_PATTERN, self._std_err)
- if match:
- yatest_logger.error("%s sanitizer found errors:\n\tstd_err:%s\n", match.group(1), truncate(self.std_err, MAX_OUT_LEN))
- raise ExecutionError(self)
+ if self._std_err and self._check_sanitizer and runtime._get_ya_config().sanitizer_extra_checks:
+ build_path = runtime.build_path()
+ if self.command[0].startswith(build_path):
+ match = re.search(SANITIZER_ERROR_PATTERN, self._std_err)
+ if match:
+ yatest_logger.error("%s sanitizer found errors:\n\tstd_err:%s\n", match.group(1), truncate(self.std_err, MAX_OUT_LEN))
+ raise ExecutionError(self)
else:
yatest_logger.debug("No sanitizer errors found")
- else:
+ else:
yatest_logger.debug("'%s' doesn't belong to '%s' - no check for sanitize errors", self.command[0], build_path)
-
-def on_timeout_gen_coredump(exec_obj, _):
- """
- Function can be passed to the execute(..., timeout=X, on_timeout=on_timeout_gen_coredump)
- to generate core dump file, backtrace ahd html-version of the backtrace in case of timeout.
- All files will be available in the testing_out_stuff and via links.
- """
- try:
- os.kill(exec_obj.process.pid, signal.SIGQUIT)
- except OSError:
- # process might be already terminated
- pass
-
-
+
+def on_timeout_gen_coredump(exec_obj, _):
+ """
+ Function can be passed to the execute(..., timeout=X, on_timeout=on_timeout_gen_coredump)
+ to generate core dump file, backtrace ahd html-version of the backtrace in case of timeout.
+ All files will be available in the testing_out_stuff and via links.
+ """
+ try:
+ os.kill(exec_obj.process.pid, signal.SIGQUIT)
+ except OSError:
+ # process might be already terminated
+ pass
+
+
def execute(
command, check_exit_code=True,
shell=False, timeout=None,
@@ -421,7 +421,7 @@ def execute(
creationflags=0, wait=True,
process_progress_listener=None, close_fds=False,
collect_cores=True, check_sanitizer=True, preexec_fn=None, on_timeout=None,
- executor=_Execution,
+ executor=_Execution,
):
"""
Executes a command
@@ -438,26 +438,26 @@ def execute(
:param wait: should wait until the command finishes
:param process_progress_listener=object that is polled while execution is in progress
:param close_fds: subrpocess.Popen close_fds args
- :param collect_cores: recover core dump files if shell == False
- :param check_sanitizer: raise ExecutionError if stderr contains sanitize errors
+ :param collect_cores: recover core dump files if shell == False
+ :param check_sanitizer: raise ExecutionError if stderr contains sanitize errors
:param preexec_fn: subrpocess.Popen preexec_fn arg
:param on_timeout: on_timeout(<execution object>, <timeout value>) callback
:return _Execution: Execution object
"""
- if env is None:
- env = os.environ.copy()
+ if env is None:
+ env = os.environ.copy()
else:
- # Certain environment variables must be present for programs to work properly.
- # For more info see DEVTOOLSSUPPORT-4907
- mandatory_env_name = 'YA_MANDATORY_ENV_VARS'
- mandatory_vars = env.get(mandatory_env_name, os.environ.get(mandatory_env_name)) or ''
- if mandatory_vars:
- env[mandatory_env_name] = mandatory_vars
- mandatory_system_vars = filter(None, mandatory_vars.split(':'))
- else:
- mandatory_system_vars = ['TMPDIR']
-
+ # Certain environment variables must be present for programs to work properly.
+ # For more info see DEVTOOLSSUPPORT-4907
+ mandatory_env_name = 'YA_MANDATORY_ENV_VARS'
+ mandatory_vars = env.get(mandatory_env_name, os.environ.get(mandatory_env_name)) or ''
+ if mandatory_vars:
+ env[mandatory_env_name] = mandatory_vars
+ mandatory_system_vars = filter(None, mandatory_vars.split(':'))
+ else:
+ mandatory_system_vars = ['TMPDIR']
+
for var in mandatory_system_vars:
if var not in env and var in os.environ:
env[var] = os.environ[var]
@@ -465,13 +465,13 @@ def execute(
if not wait and timeout is not None:
raise ValueError("Incompatible arguments 'timeout' and wait=False")
- # if subprocess.PIPE in [stdout, stderr]:
- # raise ValueError("Don't use pipe to obtain stream data - it may leads to the deadlock")
-
+ # if subprocess.PIPE in [stdout, stderr]:
+ # raise ValueError("Don't use pipe to obtain stream data - it may leads to the deadlock")
+
def get_out_stream(stream, default_name):
if stream is None:
# No stream is supplied: open new temp file
- return _get_command_output_file(command, default_name), False
+ return _get_command_output_file(command, default_name), False
if isinstance(stream, six.string_types):
# User filename is supplied: open file for writing
@@ -490,86 +490,86 @@ def execute(
if shell and type(command) == list:
command = " ".join(command)
- if shell:
- collect_cores = False
- check_sanitizer = False
- else:
- if isinstance(command, (list, tuple)):
- executable = command[0]
- else:
- executable = command
- if os.path.isabs(executable):
- if not os.path.isfile(executable) and not os.path.isfile(executable + ".exe"):
- exists = os.path.exists(executable)
- if exists:
- stat = os.stat(executable)
- else:
- stat = None
- raise InvalidCommandError("Target program is not a file: {} (exists: {} stat: {})".format(executable, exists, stat))
- if not os.access(executable, os.X_OK) and not os.access(executable + ".exe", os.X_OK):
- raise InvalidCommandError("Target program is not executable: {}".format(executable))
-
- if check_sanitizer:
- env["LSAN_OPTIONS"] = environment.extend_env_var(os.environ, "LSAN_OPTIONS", "exitcode=100")
-
- if stdin:
- name = "PIPE" if stdin == subprocess.PIPE else stdin.name
- yatest_logger.debug("Executing '%s' with input '%s' in '%s'", command, name, cwd)
- else:
+ if shell:
+ collect_cores = False
+ check_sanitizer = False
+ else:
+ if isinstance(command, (list, tuple)):
+ executable = command[0]
+ else:
+ executable = command
+ if os.path.isabs(executable):
+ if not os.path.isfile(executable) and not os.path.isfile(executable + ".exe"):
+ exists = os.path.exists(executable)
+ if exists:
+ stat = os.stat(executable)
+ else:
+ stat = None
+ raise InvalidCommandError("Target program is not a file: {} (exists: {} stat: {})".format(executable, exists, stat))
+ if not os.access(executable, os.X_OK) and not os.access(executable + ".exe", os.X_OK):
+ raise InvalidCommandError("Target program is not executable: {}".format(executable))
+
+ if check_sanitizer:
+ env["LSAN_OPTIONS"] = environment.extend_env_var(os.environ, "LSAN_OPTIONS", "exitcode=100")
+
+ if stdin:
+ name = "PIPE" if stdin == subprocess.PIPE else stdin.name
+ yatest_logger.debug("Executing '%s' with input '%s' in '%s'", command, name, cwd)
+ else:
yatest_logger.debug("Executing '%s' in '%s'", command, cwd)
- # XXX
-
- started = time.time()
- process = subprocess.Popen(
- command, shell=shell, universal_newlines=True,
- stdout=out_file, stderr=err_file, stdin=in_file,
- cwd=cwd, env=env, creationflags=creationflags, close_fds=close_fds, preexec_fn=preexec_fn,
- )
- yatest_logger.debug("Command pid: %s", process.pid)
-
- res = executor(command, process, out_file, err_file, process_progress_listener, cwd, collect_cores, check_sanitizer, started, user_stdout=user_stdout, user_stderr=user_stderr)
+ # XXX
+
+ started = time.time()
+ process = subprocess.Popen(
+ command, shell=shell, universal_newlines=True,
+ stdout=out_file, stderr=err_file, stdin=in_file,
+ cwd=cwd, env=env, creationflags=creationflags, close_fds=close_fds, preexec_fn=preexec_fn,
+ )
+ yatest_logger.debug("Command pid: %s", process.pid)
+
+ res = executor(command, process, out_file, err_file, process_progress_listener, cwd, collect_cores, check_sanitizer, started, user_stdout=user_stdout, user_stderr=user_stderr)
if wait:
res.wait(check_exit_code, timeout, on_timeout)
return res
-def _get_command_output_file(cmd, ext):
- parts = [get_command_name(cmd)]
- if 'YA_RETRY_INDEX' in os.environ:
- parts.append('retry{}'.format(os.environ.get('YA_RETRY_INDEX')))
- if int(os.environ.get('YA_SPLIT_COUNT', '0')) > 1:
- parts.append('chunk{}'.format(os.environ.get('YA_SPLIT_INDEX', '0')))
-
- filename = '.'.join(parts + [ext])
- try:
- # if execution is performed from test, save out / err to the test logs dir
- import yatest.common
+def _get_command_output_file(cmd, ext):
+ parts = [get_command_name(cmd)]
+ if 'YA_RETRY_INDEX' in os.environ:
+ parts.append('retry{}'.format(os.environ.get('YA_RETRY_INDEX')))
+ if int(os.environ.get('YA_SPLIT_COUNT', '0')) > 1:
+ parts.append('chunk{}'.format(os.environ.get('YA_SPLIT_INDEX', '0')))
+
+ filename = '.'.join(parts + [ext])
+ try:
+ # if execution is performed from test, save out / err to the test logs dir
+ import yatest.common
import library.python.pytest.plugins.ya
if getattr(library.python.pytest.plugins.ya, 'pytest_config', None) is None:
- raise ImportError("not in test")
- filename = path.get_unique_file_path(yatest.common.output_path(), filename)
- yatest_logger.debug("Command %s will be placed to %s", ext, os.path.basename(filename))
- return open(filename, "wb+")
- except ImportError:
- return tempfile.NamedTemporaryFile(delete=False, suffix=filename)
-
-
-def _get_proc_tree_info(pids):
- if os.name == 'nt':
- return 'Not supported'
- else:
- stdout, _ = subprocess.Popen(["/bin/ps", "-wufp"] + [str(p) for p in pids], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
- return stdout
-
-
-def py_execute(
- command, check_exit_code=True,
- shell=False, timeout=None,
- cwd=None, env=None,
- stdin=None, stdout=None, stderr=None,
- creationflags=0, wait=True,
- process_progress_listener=None, close_fds=False
-):
+ raise ImportError("not in test")
+ filename = path.get_unique_file_path(yatest.common.output_path(), filename)
+ yatest_logger.debug("Command %s will be placed to %s", ext, os.path.basename(filename))
+ return open(filename, "wb+")
+ except ImportError:
+ return tempfile.NamedTemporaryFile(delete=False, suffix=filename)
+
+
+def _get_proc_tree_info(pids):
+ if os.name == 'nt':
+ return 'Not supported'
+ else:
+ stdout, _ = subprocess.Popen(["/bin/ps", "-wufp"] + [str(p) for p in pids], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
+ return stdout
+
+
+def py_execute(
+ command, check_exit_code=True,
+ shell=False, timeout=None,
+ cwd=None, env=None,
+ stdin=None, stdout=None, stderr=None,
+ creationflags=0, wait=True,
+ process_progress_listener=None, close_fds=False
+):
"""
Executes a command with the arcadia python
:param command: command to pass to python
@@ -635,50 +635,50 @@ def _kill_process_tree(process_pid, target_pid_signal=None):
if os.name == 'nt':
_win_kill_process_tree(process_pid)
else:
- _nix_kill_process_tree(process_pid, target_pid_signal)
-
-
-def _nix_get_proc_children(pid):
- try:
- cmd = ["pgrep", "-P", str(pid)]
- return [int(p) for p in subprocess.check_output(cmd).split()]
- except Exception:
- return []
-
-
-def _get_binname(pid):
- try:
- return os.path.basename(os.readlink('/proc/{}/exe'.format(pid)))
- except Exception as e:
- return "error({})".format(e)
-
-
+ _nix_kill_process_tree(process_pid, target_pid_signal)
+
+
+def _nix_get_proc_children(pid):
+ try:
+ cmd = ["pgrep", "-P", str(pid)]
+ return [int(p) for p in subprocess.check_output(cmd).split()]
+ except Exception:
+ return []
+
+
+def _get_binname(pid):
+ try:
+ return os.path.basename(os.readlink('/proc/{}/exe'.format(pid)))
+ except Exception as e:
+ return "error({})".format(e)
+
+
def _nix_kill_process_tree(pid, target_pid_signal=None):
"""
Kills the process tree.
"""
- yatest_logger.debug("Killing process tree for pid {} (bin:'{}')".format(pid, _get_binname(pid)))
+ yatest_logger.debug("Killing process tree for pid {} (bin:'{}')".format(pid, _get_binname(pid)))
def try_to_send_signal(pid, sig):
try:
os.kill(pid, sig)
- yatest_logger.debug("Sent signal %d to the pid %d", sig, pid)
+ yatest_logger.debug("Sent signal %d to the pid %d", sig, pid)
except Exception as exc:
- yatest_logger.debug("Error while sending signal {sig} to pid {pid}: {error}".format(sig=sig, pid=pid, error=str(exc)))
+ yatest_logger.debug("Error while sending signal {sig} to pid {pid}: {error}".format(sig=sig, pid=pid, error=str(exc)))
try_to_send_signal(pid, signal.SIGSTOP) # Stop the process to prevent it from starting any child processes.
# Get the child process PID list.
- child_pids = _nix_get_proc_children(pid)
- # Stop the child processes.
- for child_pid in child_pids:
- try:
- # Kill the child recursively.
- _kill_process_tree(int(child_pid))
- except Exception as e:
- # Skip the error and continue killing.
- yatest_logger.debug("Killing child pid {pid} failed: {error}".format(pid=child_pid, error=e))
- continue
+ child_pids = _nix_get_proc_children(pid)
+ # Stop the child processes.
+ for child_pid in child_pids:
+ try:
+ # Kill the child recursively.
+ _kill_process_tree(int(child_pid))
+ except Exception as e:
+ # Skip the error and continue killing.
+ yatest_logger.debug("Killing child pid {pid} failed: {error}".format(pid=child_pid, error=e))
+ continue
try_to_send_signal(pid, target_pid_signal or signal.SIGKILL) # Kill the root process.
@@ -689,16 +689,16 @@ def _nix_kill_process_tree(pid, target_pid_signal=None):
def _win_kill_process_tree(pid):
subprocess.call(['taskkill', '/F', '/T', '/PID', str(pid)])
-
-
-def _run_readelf(binary_path):
+
+
+def _run_readelf(binary_path):
return str(subprocess.check_output([runtime.binary_path('contrib/python/pyelftools/readelf/readelf'), '-s', runtime.binary_path(binary_path)]))
-
-
-def check_glibc_version(binary_path):
+
+
+def check_glibc_version(binary_path):
lucid_glibc_version = distutils.version.LooseVersion("2.11")
- for l in _run_readelf(binary_path).split('\n'):
+ for l in _run_readelf(binary_path).split('\n'):
match = GLIBC_PATTERN.search(l)
if not match:
continue
@@ -706,14 +706,14 @@ def check_glibc_version(binary_path):
def backtrace_to_html(bt_filename, output):
- try:
+ try:
from library.python import coredump_filter
- with open(output, "wb") as afile:
+ with open(output, "wb") as afile:
coredump_filter.filter_stackdump(bt_filename, stream=afile)
- except ImportError as e:
- yatest_logger.debug("Failed to import coredump_filter: %s", e)
- with open(output, "wb") as afile:
- afile.write("<html>Failed to import coredump_filter in USE_ARCADIA_PYTHON=no mode</html>")
+ except ImportError as e:
+ yatest_logger.debug("Failed to import coredump_filter: %s", e)
+ with open(output, "wb") as afile:
+ afile.write("<html>Failed to import coredump_filter in USE_ARCADIA_PYTHON=no mode</html>")
def _try_convert_bytes_to_string(source):
diff --git a/library/python/testing/yatest_common/yatest/common/runtime.py b/library/python/testing/yatest_common/yatest/common/runtime.py
index e55e193446..07a41ebbbc 100644
--- a/library/python/testing/yatest_common/yatest/common/runtime.py
+++ b/library/python/testing/yatest_common/yatest/common/runtime.py
@@ -1,7 +1,7 @@
-import errno
-import functools
+import errno
+import functools
import json
-import os
+import os
import threading
import six
@@ -10,13 +10,13 @@ import six
_lock = threading.Lock()
-def _get_ya_config():
+def _get_ya_config():
try:
import library.python.pytest.plugins.ya as ya_plugin
if ya_plugin.pytest_config is not None:
return ya_plugin.pytest_config
import pytest
- return pytest.config
+ return pytest.config
except (ImportError, AttributeError):
try:
import library.python.testing.recipe
@@ -27,10 +27,10 @@ def _get_ya_config():
raise NotImplementedError("yatest.common.* is only available from the testing runtime")
-def _get_ya_plugin_instance():
- return _get_ya_config().ya
-
-
+def _get_ya_plugin_instance():
+ return _get_ya_config().ya
+
+
def _norm_path(path):
if path is None:
return None
@@ -46,35 +46,35 @@ def _join_path(main_path, path):
return os.path.join(main_path, _norm_path(path))
-def not_test(func):
- """
- Mark any function as not a test for py.test
- :param func:
- :return:
- """
- @functools.wraps(func)
- def wrapper(*args, **kwds):
- return func(*args, **kwds)
- setattr(wrapper, '__test__', False)
- return wrapper
-
-
-def source_path(path=None):
+def not_test(func):
+ """
+ Mark any function as not a test for py.test
+ :param func:
+ :return:
+ """
+ @functools.wraps(func)
+ def wrapper(*args, **kwds):
+ return func(*args, **kwds)
+ setattr(wrapper, '__test__', False)
+ return wrapper
+
+
+def source_path(path=None):
"""
Get source path inside arcadia
- :param path: path arcadia relative, e.g. yatest.common.source_path('devtools/ya')
+ :param path: path arcadia relative, e.g. yatest.common.source_path('devtools/ya')
:return: absolute path to the source folder
"""
- return _join_path(_get_ya_plugin_instance().source_root, path)
+ return _join_path(_get_ya_plugin_instance().source_root, path)
-def build_path(path=None):
+def build_path(path=None):
"""
Get path inside build directory
- :param path: path relative to the build directory, e.g. yatest.common.build_path('devtools/ya/bin')
+ :param path: path relative to the build directory, e.g. yatest.common.build_path('devtools/ya/bin')
:return: absolute path inside build directory
"""
- return _join_path(_get_ya_plugin_instance().build_root, path)
+ return _join_path(_get_ya_plugin_instance().build_root, path)
def java_path():
@@ -82,7 +82,7 @@ def java_path():
[DEPRECATED] Get path to java
:return: absolute path to java
"""
- from . import runtime_java
+ from . import runtime_java
return runtime_java.get_java_path(binary_path(os.path.join('contrib', 'tools', 'jdk')))
@@ -90,7 +90,7 @@ def java_home():
"""
Get jdk directory path
"""
- from . import runtime_java
+ from . import runtime_java
jdk_dir = runtime_java.get_build_java_dir(binary_path('jdk'))
if not jdk_dir:
raise Exception("Cannot find jdk - make sure 'jdk' is added to the DEPENDS section and exists for the current platform")
@@ -100,60 +100,60 @@ def java_home():
def java_bin():
"""
Get path to the java binary
- Requires DEPENDS(jdk)
+ Requires DEPENDS(jdk)
"""
return os.path.join(java_home(), "bin", "java")
-def data_path(path=None):
+def data_path(path=None):
"""
Get path inside arcadia_tests_data directory
- :param path: path relative to the arcadia_tests_data directory, e.g. yatest.common.data_path("pers/rerank_service")
+ :param path: path relative to the arcadia_tests_data directory, e.g. yatest.common.data_path("pers/rerank_service")
:return: absolute path inside arcadia_tests_data
"""
- return _join_path(_get_ya_plugin_instance().data_root, path)
+ return _join_path(_get_ya_plugin_instance().data_root, path)
-def output_path(path=None):
+def output_path(path=None):
"""
Get path inside the current test suite output dir.
Placing files to this dir guarantees that files will be accessible after the test suite execution.
:param path: path relative to the test suite output dir
:return: absolute path inside the test suite output dir
"""
- return _join_path(_get_ya_plugin_instance().output_dir, path)
-
-
-def ram_drive_path(path=None):
- """
- :param path: path relative to the ram drive.
- :return: absolute path inside the ram drive directory or None if no ram drive was provided by environment.
- """
- if 'YA_TEST_RAM_DRIVE_PATH' in os.environ:
- return _join_path(os.environ['YA_TEST_RAM_DRIVE_PATH'], path)
-
-
-def output_ram_drive_path(path=None):
- """
- Returns path inside ram drive directory which will be saved in the testing_out_stuff directory after testing.
- Returns None if no ram drive was provided by environment.
- :param path: path relative to the output ram drive directory
- """
- if 'YA_TEST_OUTPUT_RAM_DRIVE_PATH' in os.environ:
- return _join_path(os.environ['YA_TEST_OUTPUT_RAM_DRIVE_PATH'], path)
-
-
-def binary_path(path=None):
+ return _join_path(_get_ya_plugin_instance().output_dir, path)
+
+
+def ram_drive_path(path=None):
+ """
+ :param path: path relative to the ram drive.
+ :return: absolute path inside the ram drive directory or None if no ram drive was provided by environment.
+ """
+ if 'YA_TEST_RAM_DRIVE_PATH' in os.environ:
+ return _join_path(os.environ['YA_TEST_RAM_DRIVE_PATH'], path)
+
+
+def output_ram_drive_path(path=None):
+ """
+ Returns path inside ram drive directory which will be saved in the testing_out_stuff directory after testing.
+ Returns None if no ram drive was provided by environment.
+ :param path: path relative to the output ram drive directory
+ """
+ if 'YA_TEST_OUTPUT_RAM_DRIVE_PATH' in os.environ:
+ return _join_path(os.environ['YA_TEST_OUTPUT_RAM_DRIVE_PATH'], path)
+
+
+def binary_path(path=None):
"""
Get path to the built binary
- :param path: path to the binary relative to the build directory e.g. yatest.common.binary_path('devtools/ya/bin/ya-bin')
+ :param path: path to the binary relative to the build directory e.g. yatest.common.binary_path('devtools/ya/bin/ya-bin')
:return: absolute path to the binary
"""
- path = _norm_path(path)
- return _get_ya_plugin_instance().get_binary(path)
+ path = _norm_path(path)
+ return _get_ya_plugin_instance().get_binary(path)
-def work_path(path=None):
+def work_path(path=None):
"""
Get path inside the current test suite working directory. Creating files in the work directory does not guarantee
that files will be accessible after the test suite execution
@@ -169,12 +169,12 @@ def work_path(path=None):
def python_path():
"""
- Get path to the arcadia python.
-
- Warn: if you are using build with system python (-DUSE_SYSTEM_PYTHON=X) beware that some python bundles
- are built in a stripped-down form that is needed for building, not running tests.
- See comments in the file below to find out which version of python is compatible with tests.
- https://a.yandex-team.ru/arc/trunk/arcadia/build/platform/python/resources.inc
+ Get path to the arcadia python.
+
+ Warn: if you are using build with system python (-DUSE_SYSTEM_PYTHON=X) beware that some python bundles
+ are built in a stripped-down form that is needed for building, not running tests.
+ See comments in the file below to find out which version of python is compatible with tests.
+ https://a.yandex-team.ru/arc/trunk/arcadia/build/platform/python/resources.inc
:return: absolute path to python
"""
return _get_ya_plugin_instance().python_path
@@ -207,18 +207,18 @@ def get_param_dict_copy():
return _get_ya_plugin_instance().get_param_dict_copy()
-@not_test
-def test_output_path(path=None):
+@not_test
+def test_output_path(path=None):
"""
Get dir in the suite output_path for the current test case
"""
test_out_dir = os.path.splitext(_get_ya_config().current_test_log_path)[0]
- try:
+ try:
os.makedirs(test_out_dir)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
- return _join_path(test_out_dir, path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ return _join_path(test_out_dir, path)
def project_path(path=None):
@@ -228,13 +228,13 @@ def project_path(path=None):
return _join_path(os.path.join(build_path(), context.project_path), path)
-def gdb_path():
- """
- Get path to the gdb
- """
- return _get_ya_plugin_instance().gdb_path
-
-
+def gdb_path():
+ """
+ Get path to the gdb
+ """
+ return _get_ya_plugin_instance().gdb_path
+
+
def c_compiler_path():
"""
Get path to the gdb
@@ -261,27 +261,27 @@ def global_resources():
return {}
-def _register_core(name, binary_path, core_path, bt_path, pbt_path):
- config = _get_ya_config()
+def _register_core(name, binary_path, core_path, bt_path, pbt_path):
+ config = _get_ya_config()
with _lock:
- if not hasattr(config, 'test_cores_count'):
- config.test_cores_count = 0
+ if not hasattr(config, 'test_cores_count'):
+ config.test_cores_count = 0
config.test_cores_count += 1
count_str = '' if config.test_cores_count == 1 else str(config.test_cores_count)
- log_entry = config.test_logs[config.current_item_nodeid]
- if binary_path:
- log_entry['{} binary{}'.format(name, count_str)] = binary_path
- if core_path:
- log_entry['{} core{}'.format(name, count_str)] = core_path
- if bt_path:
- log_entry['{} backtrace{}'.format(name, count_str)] = bt_path
- if pbt_path:
- log_entry['{} backtrace html{}'.format(name, count_str)] = pbt_path
-
-
-@not_test
+ log_entry = config.test_logs[config.current_item_nodeid]
+ if binary_path:
+ log_entry['{} binary{}'.format(name, count_str)] = binary_path
+ if core_path:
+ log_entry['{} core{}'.format(name, count_str)] = core_path
+ if bt_path:
+ log_entry['{} backtrace{}'.format(name, count_str)] = bt_path
+ if pbt_path:
+ log_entry['{} backtrace html{}'.format(name, count_str)] = pbt_path
+
+
+@not_test
def test_source_path(path=None):
return _join_path(os.path.join(source_path(), context.project_path), path)
diff --git a/library/python/testing/yatest_common/yatest/common/runtime_java.py b/library/python/testing/yatest_common/yatest/common/runtime_java.py
index 39bbb45570..5ed678a457 100644
--- a/library/python/testing/yatest_common/yatest/common/runtime_java.py
+++ b/library/python/testing/yatest_common/yatest/common/runtime_java.py
@@ -2,11 +2,11 @@ import os
import tarfile
import contextlib
-from . import runtime
-
-_JAVA_DIR = []
-
+from . import runtime
+_JAVA_DIR = []
+
+
def get_java_path(jdk_dir):
# deprecated - to be deleted
java_paths = (os.path.join(jdk_dir, 'bin', 'java'), os.path.join(jdk_dir, 'bin', 'java.exe'))
@@ -30,17 +30,17 @@ def get_java_path(jdk_dir):
def get_build_java_dir(jdk_dir):
versions = [8, 10, 11, 12, 13, 14, 15]
- if not _JAVA_DIR:
- for version in versions:
- jdk_tar_path = os.path.join(jdk_dir, "jdk{}.tar".format(version))
- if os.path.exists(jdk_tar_path):
- jdk_dir = runtime.build_path('jdk4test')
- with contextlib.closing(tarfile.open(jdk_tar_path)) as tf:
- tf.extractall(jdk_dir)
- assert os.path.exists(os.path.join(jdk_dir, "bin", "java"))
- _JAVA_DIR.append(jdk_dir)
- break
- else:
- _JAVA_DIR.append(None)
-
- return _JAVA_DIR[0]
+ if not _JAVA_DIR:
+ for version in versions:
+ jdk_tar_path = os.path.join(jdk_dir, "jdk{}.tar".format(version))
+ if os.path.exists(jdk_tar_path):
+ jdk_dir = runtime.build_path('jdk4test')
+ with contextlib.closing(tarfile.open(jdk_tar_path)) as tf:
+ tf.extractall(jdk_dir)
+ assert os.path.exists(os.path.join(jdk_dir, "bin", "java"))
+ _JAVA_DIR.append(jdk_dir)
+ break
+ else:
+ _JAVA_DIR.append(None)
+
+ return _JAVA_DIR[0]
diff --git a/library/python/testing/yatest_common/yatest/common/ya.make b/library/python/testing/yatest_common/yatest/common/ya.make
index f7c50dfe64..fe4ea86d2d 100644
--- a/library/python/testing/yatest_common/yatest/common/ya.make
+++ b/library/python/testing/yatest_common/yatest/common/ya.make
@@ -1 +1 @@
-OWNER(g:yatest)
+OWNER(g:yatest)
diff --git a/library/python/testing/yatest_lib/external.py b/library/python/testing/yatest_lib/external.py
index 39113230d9..69874dece4 100644
--- a/library/python/testing/yatest_lib/external.py
+++ b/library/python/testing/yatest_lib/external.py
@@ -1,20 +1,20 @@
from __future__ import absolute_import
-import re
+import re
import sys
import copy
-import logging
+import logging
from . import tools
from datetime import date, datetime
-import enum
+import enum
import six
-logger = logging.getLogger(__name__)
-MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
-
+logger = logging.getLogger(__name__)
+MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
+
def apply(func, value, apply_to_keys=False):
"""
Applies func to every possible member of value
@@ -67,8 +67,8 @@ def serialize(value):
return val
if isinstance(val, six.string_types) or isinstance(val, bytes):
return tools.to_utf8(val)
- if isinstance(val, enum.Enum):
- return str(val)
+ if isinstance(val, enum.Enum):
+ return str(val)
if isinstance(val, six.integer_types) or type(val) in [float, bool]:
return val
if is_external(val):
@@ -136,19 +136,19 @@ class ExternalDataInfo(object):
@property
def path(self):
- if self.uri.count("://") != 1:
- logger.error("Invalid external data uri: '%s'", self.uri)
- return self.uri
+ if self.uri.count("://") != 1:
+ logger.error("Invalid external data uri: '%s'", self.uri)
+ return self.uri
_, path = self.uri.split("://")
return path
- def get_mds_key(self):
- assert self.is_http
- m = re.match(re.escape(MDS_URI_PREFIX) + r'(.*?)($|#)', self.uri)
- if m:
- return m.group(1)
- raise AssertionError("Failed to extract mds key properly from '{}'".format(self.uri))
-
+ def get_mds_key(self):
+ assert self.is_http
+ m = re.match(re.escape(MDS_URI_PREFIX) + r'(.*?)($|#)', self.uri)
+ if m:
+ return m.group(1)
+ raise AssertionError("Failed to extract mds key properly from '{}'".format(self.uri))
+
@property
def size(self):
return self._data.get("size")
diff --git a/library/python/testing/yatest_lib/test_splitter.py b/library/python/testing/yatest_lib/test_splitter.py
index acbcd4300e..bc7beba568 100644
--- a/library/python/testing/yatest_lib/test_splitter.py
+++ b/library/python/testing/yatest_lib/test_splitter.py
@@ -1,15 +1,15 @@
-# coding: utf-8
+# coding: utf-8
import collections
-def flatten_tests(test_classes):
+def flatten_tests(test_classes):
"""
>>> test_classes = {x: [x] for x in range(5)}
- >>> flatten_tests(test_classes)
+ >>> flatten_tests(test_classes)
[(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)]
>>> test_classes = {x: [x + 1, x + 2] for x in range(2)}
- >>> flatten_tests(test_classes)
+ >>> flatten_tests(test_classes)
[(0, 1), (0, 2), (1, 2), (1, 3)]
"""
tests = []
diff --git a/library/python/testing/yatest_lib/tests/test_external.py b/library/python/testing/yatest_lib/tests/test_external.py
index 18cb560b17..ea5ebf97a3 100644
--- a/library/python/testing/yatest_lib/tests/test_external.py
+++ b/library/python/testing/yatest_lib/tests/test_external.py
@@ -1,20 +1,20 @@
-import enum
-import pytest
-
-from yatest_lib import external
-
-
-class MyEnum(enum.Enum):
- VAL1 = 1
- VAL2 = 2
-
-
-@pytest.mark.parametrize("data, expected_val, expected_type", [
- ({}, {}, dict),
- (MyEnum.VAL1, "MyEnum.VAL1", str),
- ({MyEnum.VAL1: MyEnum.VAL2}, {"MyEnum.VAL1": "MyEnum.VAL2"}, dict),
-])
-def test_serialize(data, expected_val, expected_type):
- data = external.serialize(data)
- assert expected_type == type(data), data
- assert expected_val == data
+import enum
+import pytest
+
+from yatest_lib import external
+
+
+class MyEnum(enum.Enum):
+ VAL1 = 1
+ VAL2 = 2
+
+
+@pytest.mark.parametrize("data, expected_val, expected_type", [
+ ({}, {}, dict),
+ (MyEnum.VAL1, "MyEnum.VAL1", str),
+ ({MyEnum.VAL1: MyEnum.VAL2}, {"MyEnum.VAL1": "MyEnum.VAL2"}, dict),
+])
+def test_serialize(data, expected_val, expected_type):
+ data = external.serialize(data)
+ assert expected_type == type(data), data
+ assert expected_val == data
diff --git a/library/python/testing/yatest_lib/tests/ya.make b/library/python/testing/yatest_lib/tests/ya.make
index 8586c6ef7d..89396b733e 100644
--- a/library/python/testing/yatest_lib/tests/ya.make
+++ b/library/python/testing/yatest_lib/tests/ya.make
@@ -1,13 +1,13 @@
-OWNER(g:yatest)
+OWNER(g:yatest)
-PY23_TEST()
+PY23_TEST()
PEERDIR(
library/python/testing/yatest_lib
)
TEST_SRCS(
- test_external.py
+ test_external.py
test_testsplitter.py
)
diff --git a/library/python/testing/yatest_lib/ya.make b/library/python/testing/yatest_lib/ya.make
index 342bae82ba..1b9d7aa8c2 100644
--- a/library/python/testing/yatest_lib/ya.make
+++ b/library/python/testing/yatest_lib/ya.make
@@ -1,4 +1,4 @@
-OWNER(g:yatest)
+OWNER(g:yatest)
PY23_LIBRARY()
@@ -15,12 +15,12 @@ PEERDIR(
contrib/python/six
)
-IF(PYTHON2)
- PEERDIR(
- contrib/python/enum34
- )
-ENDIF()
-
+IF(PYTHON2)
+ PEERDIR(
+ contrib/python/enum34
+ )
+ENDIF()
+
END()
RECURSE_FOR_TESTS(tests)
diff --git a/library/python/windows/__init__.py b/library/python/windows/__init__.py
index 62861b3309..ab19b95144 100644
--- a/library/python/windows/__init__.py
+++ b/library/python/windows/__init__.py
@@ -1,10 +1,10 @@
-# coding: utf-8
-
+# coding: utf-8
+
import os
import stat
-import sys
+import sys
import shutil
-import logging
+import logging
from six import reraise
@@ -75,9 +75,9 @@ def errorfix(f):
def f_wrapped(*args, **kwargs):
try:
return f(*args, **kwargs)
- except WindowsError:
- tp, value, tb = sys.exc_info()
- fix_error(value)
+ except WindowsError:
+ tp, value, tb = sys.exc_info()
+ fix_error(value)
reraise(tp, value, tb)
return f_wrapped
@@ -105,7 +105,7 @@ if on_win():
_has_ctypes = True
try:
import ctypes
- from ctypes import wintypes
+ from ctypes import wintypes
except ImportError:
_has_ctypes = False
@@ -141,8 +141,8 @@ if on_win():
def run_diehard(f, winerrors, tries, delay, *args, **kwargs):
if isinstance(winerrors, int):
winerrors = (winerrors,)
-
- ei = None
+
+ ei = None
for t in xrange(tries):
if t:
logger.debug('Diehard [errs %s]: try #%d in %s', ','.join(str(x) for x in winerrors), t, f)
@@ -151,7 +151,7 @@ if on_win():
except WindowsError as e:
if e.winerror not in winerrors:
raise
- ei = sys.exc_info()
+ ei = sys.exc_info()
time.sleep(delay)
reraise(ei[0], ei[1], ei[2])
@@ -323,34 +323,34 @@ if on_win():
@require_ctypes
def _high_dword(x):
return ctypes.c_ulong((x >> 32) & ((1 << 32) - 1))
-
- @win_only
- @require_ctypes
- def get_current_process():
- handle = ctypes.windll.kernel32.GetCurrentProcess()
- if not handle:
- raise ctypes.WinError()
- return wintypes.HANDLE(handle)
-
- @win_only
- @require_ctypes
- def get_process_handle_count(proc_handle):
- assert isinstance(proc_handle, wintypes.HANDLE)
-
- GetProcessHandleCount = ctypes.WINFUNCTYPE(wintypes.BOOL, wintypes.HANDLE, wintypes.POINTER(wintypes.DWORD))(("GetProcessHandleCount", ctypes.windll.kernel32))
- hndcnt = wintypes.DWORD()
- if not GetProcessHandleCount(proc_handle, ctypes.byref(hndcnt)):
- raise ctypes.WinError()
- return hndcnt.value
-
- @win_only
- @require_ctypes
- def set_handle_information(file, inherit=None, protect_from_close=None):
- for flag, value in [(inherit, 1), (protect_from_close, 2)]:
- if flag is not None:
- assert isinstance(flag, bool)
- if not ctypes.windll.kernel32.SetHandleInformation(file_handle(file), _low_dword(value), _low_dword(int(flag))):
- raise ctypes.WinError()
+
+ @win_only
+ @require_ctypes
+ def get_current_process():
+ handle = ctypes.windll.kernel32.GetCurrentProcess()
+ if not handle:
+ raise ctypes.WinError()
+ return wintypes.HANDLE(handle)
+
+ @win_only
+ @require_ctypes
+ def get_process_handle_count(proc_handle):
+ assert isinstance(proc_handle, wintypes.HANDLE)
+
+ GetProcessHandleCount = ctypes.WINFUNCTYPE(wintypes.BOOL, wintypes.HANDLE, wintypes.POINTER(wintypes.DWORD))(("GetProcessHandleCount", ctypes.windll.kernel32))
+ hndcnt = wintypes.DWORD()
+ if not GetProcessHandleCount(proc_handle, ctypes.byref(hndcnt)):
+ raise ctypes.WinError()
+ return hndcnt.value
+
+ @win_only
+ @require_ctypes
+ def set_handle_information(file, inherit=None, protect_from_close=None):
+ for flag, value in [(inherit, 1), (protect_from_close, 2)]:
+ if flag is not None:
+ assert isinstance(flag, bool)
+ if not ctypes.windll.kernel32.SetHandleInformation(file_handle(file), _low_dword(value), _low_dword(int(flag))):
+ raise ctypes.WinError()
@win_only
@require_ctypes
diff --git a/library/python/ya.make b/library/python/ya.make
index 2e1eb6e0e1..813ca4ef0d 100644
--- a/library/python/ya.make
+++ b/library/python/ya.make
@@ -42,7 +42,7 @@ RECURSE(
cookiemy
coredump_filter
cores
- coverage
+ coverage
cpp_test
cppdemangle
cqueue