aboutsummaryrefslogtreecommitdiffstats
path: root/yql/essentials/parser/proto_ast/antlr3/proto_ast_antlr3.h
blob: 22f40fd1fd96d119e13b3a18ba06529838b24461 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
#pragma once

#include <yql/essentials/parser/proto_ast/common.h>

#include <contrib/libs/antlr3_cpp_runtime/include/antlr3.hpp>

namespace NProtoAST {

    template <typename TParser, typename TLexer>
    class TProtoASTBuilder3 {
        typedef ANTLR_UINT8 TChar;

    public:
        TProtoASTBuilder3(TStringBuf data, const TString& queryName = "query", google::protobuf::Arena* arena = nullptr)
            : QueryName(queryName)
            , InputStream((const TChar*)data.data(), antlr3::ENC_8BIT, data.length(), (TChar*)QueryName.begin())  // Why the hell antlr needs non-const ptr??
            , Lexer(&InputStream, static_cast<google::protobuf::Arena*>(nullptr))
            , TokenStream(ANTLR_SIZE_HINT, Lexer.get_tokSource())
            , Parser(&TokenStream, arena)
        {
        }

        google::protobuf::Message* BuildAST(IErrorCollector& errors) {
            // TODO: find a better way to break on lexer errors
            try {
                Lexer.ReportErrors(&errors);
                return Parser.Parse(Lexer, &errors);
            } catch (const TTooManyErrors&) {
                return nullptr;
            } catch (...) {
                errors.Error(0, 0, CurrentExceptionMessage());
                return nullptr;
            }
        }

    private:
        TString QueryName;

        typename TLexer::InputStreamType InputStream;
        TLexer Lexer;

        typename TParser::TokenStreamType TokenStream;
        TParser Parser;
    };

    template <typename TLexer>
    class TLexerTokensCollector3 {
        typedef ANTLR_UINT8 TChar;

    public:
        TLexerTokensCollector3(TStringBuf data, const char** tokenNames, const TString& queryName = "query")
            : TokenNames(tokenNames)
            , QueryName(queryName)
            , InputStream((const TChar*)data.data(), antlr3::ENC_8BIT, data.length(), (TChar*)QueryName.begin())
            , Lexer(&InputStream, static_cast<google::protobuf::Arena*>(nullptr))
        {
        }

        void CollectTokens(IErrorCollector& errors, const NSQLTranslation::ILexer::TTokenCallback& onNextToken) {
            try {
                Lexer.ReportErrors(&errors);
                auto src = Lexer.get_tokSource();

                for (;;) {
                    auto token = src->nextToken();
                    auto type = token->getType();
                    const bool isEOF = type == TLexer::CommonTokenType::TOKEN_EOF;
                    NSQLTranslation::TParsedToken last;
                    last.Name = isEOF ? "EOF" : TokenNames[type];
                    last.Content = token->getText();
                    last.Line = token->get_line();
                    last.LinePos = token->get_charPositionInLine();

                    onNextToken(std::move(last));
                    if (isEOF) {
                        break;
                    }
                }
            } catch (const TTooManyErrors&) {
            } catch (...) {
                errors.Error(0, 0, CurrentExceptionMessage());
            }
        }

    private:
        const char** TokenNames;
        TString QueryName;
        typename TLexer::InputStreamType InputStream;
        TLexer Lexer;
    };
} // namespace NProtoAST