aboutsummaryrefslogtreecommitdiffstats
path: root/util/stream/tokenizer_ut.cpp
diff options
context:
space:
mode:
authortobo <tobo@yandex-team.ru>2022-02-10 16:47:27 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:47:27 +0300
commit55a7f90e4cd31e9481cace8ee5dfd682c27e810e (patch)
tree9814fbd1c3effac9b8377c5d604b367b14e2db55 /util/stream/tokenizer_ut.cpp
parent7fe839092527589b38f014d854c51565b3c1adfa (diff)
downloadydb-55a7f90e4cd31e9481cace8ee5dfd682c27e810e.tar.gz
Restoring authorship annotation for <tobo@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'util/stream/tokenizer_ut.cpp')
-rw-r--r--util/stream/tokenizer_ut.cpp18
1 files changed, 9 insertions, 9 deletions
diff --git a/util/stream/tokenizer_ut.cpp b/util/stream/tokenizer_ut.cpp
index 37b30d8e5f..afc566da86 100644
--- a/util/stream/tokenizer_ut.cpp
+++ b/util/stream/tokenizer_ut.cpp
@@ -40,7 +40,7 @@ Y_UNIT_TEST_SUITE(TStreamTokenizerTests) {
Y_UNIT_TEST(LastTokenendDoesntSatisfyPredicateTest) {
const char data[] = "abc\ndef\nxxxxxx";
const auto dataSize = Y_ARRAY_SIZE(data) - 1;
- const TStringBuf tokens[] = {TStringBuf("abc"), TStringBuf("def"), TStringBuf("xxxxxx")};
+ const TStringBuf tokens[] = {TStringBuf("abc"), TStringBuf("def"), TStringBuf("xxxxxx")};
const auto tokensSize = Y_ARRAY_SIZE(tokens);
auto&& input = TMemoryInput{data, dataSize};
auto&& tokenizer = TStreamTokenizer<TEol>{&input};
@@ -58,7 +58,7 @@ Y_UNIT_TEST_SUITE(TStreamTokenizerTests) {
Y_UNIT_TEST(FirstTokenIsEmptyTest) {
const char data[] = "\ndef\nxxxxxx";
const auto dataSize = Y_ARRAY_SIZE(data) - 1;
- const TStringBuf tokens[] = {TStringBuf(), TStringBuf("def"), TStringBuf("xxxxxx")};
+ const TStringBuf tokens[] = {TStringBuf(), TStringBuf("def"), TStringBuf("xxxxxx")};
const auto tokensSize = Y_ARRAY_SIZE(tokens);
auto&& input = TMemoryInput{data, dataSize};
auto&& tokenizer = TStreamTokenizer<TEol>{&input};
@@ -91,7 +91,7 @@ Y_UNIT_TEST_SUITE(TStreamTokenizerTests) {
Y_UNIT_TEST(SimpleTest) {
const char data[] = "qwerty\n1234567890\n";
const auto dataSize = Y_ARRAY_SIZE(data) - 1;
- const TStringBuf tokens[] = {TStringBuf("qwerty"), TStringBuf("1234567890")};
+ const TStringBuf tokens[] = {TStringBuf("qwerty"), TStringBuf("1234567890")};
const auto tokensSize = Y_ARRAY_SIZE(tokens);
auto&& input = TMemoryInput{data, dataSize};
auto&& tokenizer = TStreamTokenizer<TEol>{&input};
@@ -115,7 +115,7 @@ Y_UNIT_TEST_SUITE(TStreamTokenizerTests) {
const char data[] = "abc|def|xxxxxx";
const auto dataSize = Y_ARRAY_SIZE(data) - 1;
- const TStringBuf tokens[] = {TStringBuf("abc"), TStringBuf("def"), TStringBuf("xxxxxx")};
+ const TStringBuf tokens[] = {TStringBuf("abc"), TStringBuf("def"), TStringBuf("xxxxxx")};
const auto tokensSize = Y_ARRAY_SIZE(tokens);
auto&& input = TMemoryInput{data, dataSize};
auto&& tokenizer = TStreamTokenizer<TIsVerticalBar>{&input};
@@ -139,8 +139,8 @@ Y_UNIT_TEST_SUITE(TStreamTokenizerTests) {
const char data[] = "abc|def|xxxxxx,abc|def|xxxxxx";
const auto dataSize = Y_ARRAY_SIZE(data) - 1;
- const TStringBuf tokens[] = {TStringBuf("abc"), TStringBuf("def"), TStringBuf("xxxxxx"),
- TStringBuf("abc"), TStringBuf("def"), TStringBuf("xxxxxx")};
+ const TStringBuf tokens[] = {TStringBuf("abc"), TStringBuf("def"), TStringBuf("xxxxxx"),
+ TStringBuf("abc"), TStringBuf("def"), TStringBuf("xxxxxx")};
const auto tokensSize = Y_ARRAY_SIZE(tokens);
auto&& input = TMemoryInput{data, dataSize};
auto&& tokenizer = TStreamTokenizer<TIsVerticalBar>{&input};
@@ -199,7 +199,7 @@ Y_UNIT_TEST_SUITE(TStreamTokenizerTests) {
Y_UNIT_TEST(FirstTokenHasSizeOfTheBufferTest) {
const char data[] = "xxxxx\nxx";
const auto dataSize = Y_ARRAY_SIZE(data) - 1;
- const TStringBuf tokens[] = {TStringBuf("xxxxx"), TStringBuf("xx")};
+ const TStringBuf tokens[] = {TStringBuf("xxxxx"), TStringBuf("xx")};
const auto tokensSize = Y_ARRAY_SIZE(tokens);
auto&& input = TMemoryInput{data, dataSize};
auto&& tokenizer = TStreamTokenizer<TEol>{&input, TEol{}, tokens[0].size()};
@@ -231,7 +231,7 @@ Y_UNIT_TEST_SUITE(TStreamTokenizerTests) {
Y_UNIT_TEST(BufferSizeInitialSizeSmallerThanTokenTest) {
const char data[] = "xxxxx\nxx";
const auto dataSize = Y_ARRAY_SIZE(data) - 1;
- const TStringBuf tokens[] = {TStringBuf("xxxxx"), TStringBuf("xx")};
+ const TStringBuf tokens[] = {TStringBuf("xxxxx"), TStringBuf("xx")};
const auto tokensSize = Y_ARRAY_SIZE(tokens);
auto&& input = TMemoryInput{data, dataSize};
auto&& tokenizer = TStreamTokenizer<TEol>{&input, TEol{}, 1};
@@ -248,7 +248,7 @@ Y_UNIT_TEST_SUITE(TStreamTokenizerTests) {
Y_UNIT_TEST(RangeBasedForTest) {
const char data[] = "abc\ndef\nxxxxxx";
const auto dataSize = Y_ARRAY_SIZE(data) - 1;
- const TStringBuf tokens[] = {TStringBuf("abc"), TStringBuf("def"), TStringBuf("xxxxxx")};
+ const TStringBuf tokens[] = {TStringBuf("abc"), TStringBuf("def"), TStringBuf("xxxxxx")};
const auto tokensSize = Y_ARRAY_SIZE(tokens);
auto&& input = TMemoryInput{data, dataSize};
auto&& tokenizer = TStreamTokenizer<TEol>{&input};