diff options
author | nkozlovskiy <nmk@ydb.tech> | 2023-09-29 12:24:06 +0300 |
---|---|---|
committer | nkozlovskiy <nmk@ydb.tech> | 2023-09-29 12:41:34 +0300 |
commit | e0e3e1717e3d33762ce61950504f9637a6e669ed (patch) | |
tree | bca3ff6939b10ed60c3d5c12439963a1146b9711 /contrib/python/prompt-toolkit/py3/prompt_toolkit/lexers/base.py | |
parent | 38f2c5852db84c7b4d83adfcb009eb61541d1ccd (diff) | |
download | ydb-e0e3e1717e3d33762ce61950504f9637a6e669ed.tar.gz |
add ydb deps
Diffstat (limited to 'contrib/python/prompt-toolkit/py3/prompt_toolkit/lexers/base.py')
-rw-r--r-- | contrib/python/prompt-toolkit/py3/prompt_toolkit/lexers/base.py | 84 |
1 files changed, 84 insertions, 0 deletions
diff --git a/contrib/python/prompt-toolkit/py3/prompt_toolkit/lexers/base.py b/contrib/python/prompt-toolkit/py3/prompt_toolkit/lexers/base.py new file mode 100644 index 00000000000..3f65f8e7c1d --- /dev/null +++ b/contrib/python/prompt-toolkit/py3/prompt_toolkit/lexers/base.py @@ -0,0 +1,84 @@ +""" +Base classes for prompt_toolkit lexers. +""" +from __future__ import annotations + +from abc import ABCMeta, abstractmethod +from typing import Callable, Hashable + +from prompt_toolkit.document import Document +from prompt_toolkit.formatted_text.base import StyleAndTextTuples + +__all__ = [ + "Lexer", + "SimpleLexer", + "DynamicLexer", +] + + +class Lexer(metaclass=ABCMeta): + """ + Base class for all lexers. + """ + + @abstractmethod + def lex_document(self, document: Document) -> Callable[[int], StyleAndTextTuples]: + """ + Takes a :class:`~prompt_toolkit.document.Document` and returns a + callable that takes a line number and returns a list of + ``(style_str, text)`` tuples for that line. + + XXX: Note that in the past, this was supposed to return a list + of ``(Token, text)`` tuples, just like a Pygments lexer. + """ + + def invalidation_hash(self) -> Hashable: + """ + When this changes, `lex_document` could give a different output. + (Only used for `DynamicLexer`.) + """ + return id(self) + + +class SimpleLexer(Lexer): + """ + Lexer that doesn't do any tokenizing and returns the whole input as one + token. + + :param style: The style string for this lexer. + """ + + def __init__(self, style: str = "") -> None: + self.style = style + + def lex_document(self, document: Document) -> Callable[[int], StyleAndTextTuples]: + lines = document.lines + + def get_line(lineno: int) -> StyleAndTextTuples: + "Return the tokens for the given line." + try: + return [(self.style, lines[lineno])] + except IndexError: + return [] + + return get_line + + +class DynamicLexer(Lexer): + """ + Lexer class that can dynamically returns any Lexer. + + :param get_lexer: Callable that returns a :class:`.Lexer` instance. + """ + + def __init__(self, get_lexer: Callable[[], Lexer | None]) -> None: + self.get_lexer = get_lexer + self._dummy = SimpleLexer() + + def lex_document(self, document: Document) -> Callable[[int], StyleAndTextTuples]: + lexer = self.get_lexer() or self._dummy + return lexer.lex_document(document) + + def invalidation_hash(self) -> Hashable: + lexer = self.get_lexer() or self._dummy + return id(lexer) |