aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authoraneporada <aneporada@yandex-team.ru>2022-04-19 18:30:57 +0300
committeraneporada <aneporada@yandex-team.ru>2022-04-19 18:30:57 +0300
commiteb3f8b2c051dfb1532102448c491e0a0015956f8 (patch)
tree59bfaebfaf1510ba184a63658bfdae6a77e7f583
parentf5742bbe057a315ae890d66ee7f50ab5f3af84f6 (diff)
downloadydb-eb3f8b2c051dfb1532102448c491e0a0015956f8.tar.gz
[YQL-14290] [refactoring] Check lexer ptr for nulls, improve ILexer::Tokenize()
ref:1652120fa70339fce5ecbdaafafe7cc226ce5744
-rw-r--r--ydb/library/yql/parser/lexer_common/lexer.h2
-rw-r--r--ydb/library/yql/parser/lexer_common/tokens.cpp4
2 files changed, 3 insertions, 3 deletions
diff --git a/ydb/library/yql/parser/lexer_common/lexer.h b/ydb/library/yql/parser/lexer_common/lexer.h
index bc59c37e56c..560ea204829 100644
--- a/ydb/library/yql/parser/lexer_common/lexer.h
+++ b/ydb/library/yql/parser/lexer_common/lexer.h
@@ -35,7 +35,7 @@ public:
using TParsedTokenList = TVector<TParsedToken>;
IOutputStream& OutputTokens(IOutputStream& out, TParsedTokenList::const_iterator begin, TParsedTokenList::const_iterator end);
-bool Tokenize(const ILexer::TPtr& lexer, const TString& query, const TString& queryName, TParsedTokenList& tokens, NYql::TIssues& issues, size_t maxErrors);
+bool Tokenize(ILexer& lexer, const TString& query, const TString& queryName, TParsedTokenList& tokens, NYql::TIssues& issues, size_t maxErrors);
}
diff --git a/ydb/library/yql/parser/lexer_common/tokens.cpp b/ydb/library/yql/parser/lexer_common/tokens.cpp
index db3553b2abc..b37b0139ef7 100644
--- a/ydb/library/yql/parser/lexer_common/tokens.cpp
+++ b/ydb/library/yql/parser/lexer_common/tokens.cpp
@@ -10,12 +10,12 @@ IOutputStream& OutputTokens(IOutputStream& out, TParsedTokenList::const_iterator
return out;
}
-bool Tokenize(const ILexer::TPtr& lexer, const TString& query, const TString& queryName, TParsedTokenList& tokens, NYql::TIssues& issues, size_t maxErrors) {
+bool Tokenize(ILexer& lexer, const TString& query, const TString& queryName, TParsedTokenList& tokens, NYql::TIssues& issues, size_t maxErrors) {
auto onNextToken = [&tokens](TParsedToken&& token) {
tokens.push_back(std::move(token));
};
- return lexer->Tokenize(query, queryName, onNextToken, issues, maxErrors);
+ return lexer.Tokenize(query, queryName, onNextToken, issues, maxErrors);
}