1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
|
#pragma once
#include <yql/essentials/parser/proto_ast/common.h>
#include <contrib/libs/antlr3_cpp_runtime/include/antlr3.hpp>
namespace NProtoAST {
using namespace NAST;
template <typename TParser, typename TLexer>
class TProtoASTBuilder3 {
typedef ANTLR_UINT8 TChar;
public:
TProtoASTBuilder3(TStringBuf data, const TString& queryName = "query", google::protobuf::Arena* arena = nullptr)
: QueryName_(queryName)
, InputStream_((const TChar*)data.data(), antlr3::ENC_8BIT, data.length(), (TChar*)QueryName_.begin()) // Why the hell antlr needs non-const ptr??
, Lexer_(&InputStream_, static_cast<google::protobuf::Arena*>(nullptr))
, TokenStream_(ANTLR_SIZE_HINT, Lexer_.get_tokSource())
, Parser_(&TokenStream_, arena)
{
}
google::protobuf::Message* BuildAST(IErrorCollector& errors) {
// TODO: find a better way to break on lexer errors
try {
Lexer_.ReportErrors(&errors);
return Parser_.Parse(Lexer_, &errors);
} catch (const TTooManyErrors&) {
return nullptr;
} catch (...) {
errors.Error(0, 0, CurrentExceptionMessage());
return nullptr;
}
}
private:
TString QueryName_;
typename TLexer::InputStreamType InputStream_;
TLexer Lexer_;
typename TParser::TokenStreamType TokenStream_;
TParser Parser_;
};
template <typename TLexer>
class TLexerTokensCollector3 {
typedef ANTLR_UINT8 TChar;
public:
TLexerTokensCollector3(TStringBuf data, const char** tokenNames, const TString& queryName = "query")
: TokenNames_(tokenNames)
, QueryName_(queryName)
, InputStream_((const TChar*)data.data(), antlr3::ENC_8BIT, data.length(), (TChar*)QueryName_.begin())
, Lexer_(&InputStream_, static_cast<google::protobuf::Arena*>(nullptr))
{
}
void CollectTokens(IErrorCollector& errors, const NSQLTranslation::ILexer::TTokenCallback& onNextToken) {
try {
Lexer_.ReportErrors(&errors);
auto src = Lexer_.get_tokSource();
for (;;) {
auto token = src->nextToken();
auto type = token->getType();
const bool isEOF = type == TLexer::CommonTokenType::TOKEN_EOF;
NSQLTranslation::TParsedToken last;
last.Name = isEOF ? "EOF" : TokenNames_[type];
last.Content = token->getText();
last.Line = token->get_line();
last.LinePos = token->get_charPositionInLine();
onNextToken(std::move(last));
if (isEOF) {
break;
}
}
} catch (const TTooManyErrors&) {
} catch (...) {
errors.Error(0, 0, CurrentExceptionMessage());
}
}
private:
const char** TokenNames_;
TString QueryName_;
typename TLexer::InputStreamType InputStream_;
TLexer Lexer_;
};
} // namespace NProtoAST
|