aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/prompt-toolkit/py2/prompt_toolkit/layout/utils.py
diff options
context:
space:
mode:
authorNikita Slyusarev <nslus@yandex-team.com>2022-02-10 16:46:53 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:46:53 +0300
commit469afdc4e2587bf62ecdd096b75a0baa444c4012 (patch)
tree49e222ea1c5804306084bb3ae065bb702625360f /contrib/python/prompt-toolkit/py2/prompt_toolkit/layout/utils.py
parentcd77cecfc03a3eaf87816af28a33067c4f0cdb59 (diff)
downloadydb-469afdc4e2587bf62ecdd096b75a0baa444c4012.tar.gz
Restoring authorship annotation for Nikita Slyusarev <nslus@yandex-team.com>. Commit 2 of 2.
Diffstat (limited to 'contrib/python/prompt-toolkit/py2/prompt_toolkit/layout/utils.py')
-rw-r--r--contrib/python/prompt-toolkit/py2/prompt_toolkit/layout/utils.py118
1 files changed, 59 insertions, 59 deletions
diff --git a/contrib/python/prompt-toolkit/py2/prompt_toolkit/layout/utils.py b/contrib/python/prompt-toolkit/py2/prompt_toolkit/layout/utils.py
index 3fa16de535..a4fb7ed0f5 100644
--- a/contrib/python/prompt-toolkit/py2/prompt_toolkit/layout/utils.py
+++ b/contrib/python/prompt-toolkit/py2/prompt_toolkit/layout/utils.py
@@ -1,14 +1,14 @@
from __future__ import unicode_literals
from prompt_toolkit.utils import get_cwidth
-from prompt_toolkit.token import Token
+from prompt_toolkit.token import Token
__all__ = (
'token_list_len',
'token_list_width',
'token_list_to_text',
'explode_tokens',
- 'split_lines',
+ 'split_lines',
'find_window_for_buffer_name',
)
@@ -20,8 +20,8 @@ def token_list_len(tokenlist):
:param tokenlist: List of (token, text) or (token, text, mouse_handler)
tuples.
"""
- ZeroWidthEscape = Token.ZeroWidthEscape
- return sum(len(item[1]) for item in tokenlist if item[0] != ZeroWidthEscape)
+ ZeroWidthEscape = Token.ZeroWidthEscape
+ return sum(len(item[1]) for item in tokenlist if item[0] != ZeroWidthEscape)
def token_list_width(tokenlist):
@@ -32,16 +32,16 @@ def token_list_width(tokenlist):
:param tokenlist: List of (token, text) or (token, text, mouse_handler)
tuples.
"""
- ZeroWidthEscape = Token.ZeroWidthEscape
- return sum(get_cwidth(c) for item in tokenlist for c in item[1] if item[0] != ZeroWidthEscape)
+ ZeroWidthEscape = Token.ZeroWidthEscape
+ return sum(get_cwidth(c) for item in tokenlist for c in item[1] if item[0] != ZeroWidthEscape)
def token_list_to_text(tokenlist):
"""
Concatenate all the text parts again.
"""
- ZeroWidthEscape = Token.ZeroWidthEscape
- return ''.join(item[1] for item in tokenlist if item[0] != ZeroWidthEscape)
+ ZeroWidthEscape = Token.ZeroWidthEscape
+ return ''.join(item[1] for item in tokenlist if item[0] != ZeroWidthEscape)
def iter_token_lines(tokenlist):
@@ -62,7 +62,7 @@ def iter_token_lines(tokenlist):
def split_lines(tokenlist):
"""
Take a single list of (Token, text) tuples and yield one such list for each
- line. Just like str.split, this will yield at least one item.
+ line. Just like str.split, this will yield at least one item.
:param tokenlist: List of (token, text) or (token, text, mouse_handler)
tuples.
@@ -82,8 +82,8 @@ def split_lines(tokenlist):
line = []
line.append((token, parts[-1]))
- # Note that parts[-1] can be empty, and that's fine. It happens
- # in the case of [(Token.SetCursorPosition, '')].
+ # Note that parts[-1] can be empty, and that's fine. It happens
+ # in the case of [(Token.SetCursorPosition, '')].
# For (token, text, mouse_handler) tuples.
# I know, partly copy/paste, but understandable and more efficient
@@ -100,67 +100,67 @@ def split_lines(tokenlist):
line.append((token, parts[-1], mouse_handler))
- # Always yield the last line, even when this is an empty line. This ensures
- # that when `tokenlist` ends with a newline character, an additional empty
- # line is yielded. (Otherwise, there's no way to differentiate between the
- # cases where `tokenlist` does and doesn't end with a newline.)
- yield line
-
-
-class _ExplodedList(list):
- """
- Wrapper around a list, that marks it as 'exploded'.
-
- As soon as items are added or the list is extended, the new items are
- automatically exploded as well.
- """
- def __init__(self, *a, **kw):
- super(_ExplodedList, self).__init__(*a, **kw)
- self.exploded = True
-
- def append(self, item):
- self.extend([item])
-
- def extend(self, lst):
- super(_ExplodedList, self).extend(explode_tokens(lst))
-
- def insert(self, index, item):
- raise NotImplementedError # TODO
-
- # TODO: When creating a copy() or [:], return also an _ExplodedList.
-
- def __setitem__(self, index, value):
- """
- Ensure that when `(Token, 'long string')` is set, the string will be
- exploded.
- """
- if not isinstance(index, slice):
- index = slice(index, index + 1)
- value = explode_tokens([value])
- super(_ExplodedList, self).__setitem__(index, value)
-
-
+ # Always yield the last line, even when this is an empty line. This ensures
+ # that when `tokenlist` ends with a newline character, an additional empty
+ # line is yielded. (Otherwise, there's no way to differentiate between the
+ # cases where `tokenlist` does and doesn't end with a newline.)
+ yield line
+
+
+class _ExplodedList(list):
+ """
+ Wrapper around a list, that marks it as 'exploded'.
+
+ As soon as items are added or the list is extended, the new items are
+ automatically exploded as well.
+ """
+ def __init__(self, *a, **kw):
+ super(_ExplodedList, self).__init__(*a, **kw)
+ self.exploded = True
+
+ def append(self, item):
+ self.extend([item])
+
+ def extend(self, lst):
+ super(_ExplodedList, self).extend(explode_tokens(lst))
+
+ def insert(self, index, item):
+ raise NotImplementedError # TODO
+
+ # TODO: When creating a copy() or [:], return also an _ExplodedList.
+
+ def __setitem__(self, index, value):
+ """
+ Ensure that when `(Token, 'long string')` is set, the string will be
+ exploded.
+ """
+ if not isinstance(index, slice):
+ index = slice(index, index + 1)
+ value = explode_tokens([value])
+ super(_ExplodedList, self).__setitem__(index, value)
+
+
def explode_tokens(tokenlist):
"""
Turn a list of (token, text) tuples into another list where each string is
exactly one character.
- It should be fine to call this function several times. Calling this on a
- list that is already exploded, is a null operation.
-
+ It should be fine to call this function several times. Calling this on a
+ list that is already exploded, is a null operation.
+
:param tokenlist: List of (token, text) tuples.
"""
- # When the tokenlist is already exploded, don't explode again.
- if getattr(tokenlist, 'exploded', False):
- return tokenlist
-
+ # When the tokenlist is already exploded, don't explode again.
+ if getattr(tokenlist, 'exploded', False):
+ return tokenlist
+
result = []
for token, string in tokenlist:
for c in string:
result.append((token, c))
- return _ExplodedList(result)
+ return _ExplodedList(result)
def find_window_for_buffer_name(cli, buffer_name):