diff options
author | robot-piglet <robot-piglet@yandex-team.com> | 2024-06-25 17:16:00 +0300 |
---|---|---|
committer | robot-piglet <robot-piglet@yandex-team.com> | 2024-06-25 17:25:06 +0300 |
commit | da76f0a36e74126cec432deca81e964d16c2520e (patch) | |
tree | 18e1d8ce9f53a73a6a74945844a8d6a17d3077f2 | |
parent | d9e4fe8aeca226856d2780ca903622285610658e (diff) | |
download | ydb-da76f0a36e74126cec432deca81e964d16c2520e.tar.gz |
Intermediate changes
16 files changed, 600 insertions, 213 deletions
diff --git a/contrib/python/more-itertools/py3/.dist-info/METADATA b/contrib/python/more-itertools/py3/.dist-info/METADATA index f54f1ff279..fb41b0cfe6 100644 --- a/contrib/python/more-itertools/py3/.dist-info/METADATA +++ b/contrib/python/more-itertools/py3/.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: more-itertools -Version: 10.2.0 +Version: 10.3.0 Summary: More routines for operating on iterables, beyond itertools Keywords: itertools,iterator,iteration,filter,peek,peekable,chunk,chunked Author-email: Erik Rose <erikrose@grinchcentral.com> @@ -87,8 +87,6 @@ Python iterables. | | `zip_offset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_offset>`_, | | | `zip_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_equal>`_, | | | `zip_broadcast <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_broadcast>`_, | -| | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, | -| | `convolve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.convolve>`_, | | | `flatten <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.flatten>`_, | | | `roundrobin <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.roundrobin>`_, | | | `prepend <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.prepend>`_, | @@ -101,6 +99,7 @@ Python iterables. | | `consecutive_groups <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consecutive_groups>`_, | | | `run_length <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.run_length>`_, | | | `map_reduce <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_reduce>`_, | +| | `join_mappings <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.join_mappings>`_, | | | `exactly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.exactly_n>`_, | | | `is_sorted <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.is_sorted>`_, | | | `all_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_equal>`_, | @@ -131,12 +130,26 @@ Python iterables. | | `tail <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tail>`_, | | | `unique_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_everseen>`_, | | | `unique_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_justseen>`_, | +| | `unique <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique>`_, | | | `duplicates_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.duplicates_everseen>`_, | | | `duplicates_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.duplicates_justseen>`_, | | | `classify_unique <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.classify_unique>`_, | | | `longest_common_prefix <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.longest_common_prefix>`_, | | | `takewhile_inclusive <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.takewhile_inclusive>`_ | +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Math | `dft <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dft>`_, | +| | `idft <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.idft>`_, | +| | `convolve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.convolve>`_, | +| | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, | +| | `factor <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.factor>`_, | +| | `matmul <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.matmul>`_, | +| | `polynomial_from_roots <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.polynomial_from_roots>`_, | +| | `polynomial_derivative <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.polynomial_derivative>`_, | +| | `polynomial_eval <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.polynomial_eval>`_, | +| | `sieve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sieve>`_, | +| | `sum_of_squares <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sum_of_squares>`_, | +| | `totient <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.totient>`_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | Combinatorics | `distinct_permutations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_permutations>`_, | | | `distinct_combinations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_combinations>`_, | | | `circular_shifts <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.circular_shifts>`_, | @@ -149,6 +162,7 @@ Python iterables. | | `gray_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.gray_product>`_, | | | `outer_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.outer_product>`_, | | | `powerset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset>`_, | +| | `powerset_of_sets <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset_of_sets>`_, | | | `random_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_product>`_, | | | `random_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_permutation>`_, | | | `random_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination>`_, | @@ -180,15 +194,8 @@ Python iterables. | | `consume <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consume>`_, | | | `tabulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tabulate>`_, | | | `repeatfunc <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeatfunc>`_, | -| | `polynomial_from_roots <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.polynomial_from_roots>`_, | -| | `polynomial_eval <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.polynomial_eval>`_, | -| | `polynomial_derivative <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.polynomial_derivative>`_, | -| | `sieve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sieve>`_, | -| | `factor <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.factor>`_, | -| | `matmul <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.matmul>`_, | -| | `sum_of_squares <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sum_of_squares>`_, | -| | `totient <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.totient>`_, | | | `reshape <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.reshape>`_ | +| | `doublestarmap <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.doublestarmap>`_ | +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/contrib/python/more-itertools/py3/README.rst b/contrib/python/more-itertools/py3/README.rst index 0786bf3f3e..9f3e9c671f 100644 --- a/contrib/python/more-itertools/py3/README.rst +++ b/contrib/python/more-itertools/py3/README.rst @@ -63,8 +63,6 @@ Python iterables. | | `zip_offset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_offset>`_, | | | `zip_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_equal>`_, | | | `zip_broadcast <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_broadcast>`_, | -| | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, | -| | `convolve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.convolve>`_, | | | `flatten <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.flatten>`_, | | | `roundrobin <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.roundrobin>`_, | | | `prepend <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.prepend>`_, | @@ -77,6 +75,7 @@ Python iterables. | | `consecutive_groups <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consecutive_groups>`_, | | | `run_length <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.run_length>`_, | | | `map_reduce <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_reduce>`_, | +| | `join_mappings <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.join_mappings>`_, | | | `exactly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.exactly_n>`_, | | | `is_sorted <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.is_sorted>`_, | | | `all_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_equal>`_, | @@ -107,12 +106,26 @@ Python iterables. | | `tail <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tail>`_, | | | `unique_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_everseen>`_, | | | `unique_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_justseen>`_, | +| | `unique <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique>`_, | | | `duplicates_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.duplicates_everseen>`_, | | | `duplicates_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.duplicates_justseen>`_, | | | `classify_unique <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.classify_unique>`_, | | | `longest_common_prefix <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.longest_common_prefix>`_, | | | `takewhile_inclusive <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.takewhile_inclusive>`_ | +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Math | `dft <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dft>`_, | +| | `idft <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.idft>`_, | +| | `convolve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.convolve>`_, | +| | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, | +| | `factor <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.factor>`_, | +| | `matmul <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.matmul>`_, | +| | `polynomial_from_roots <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.polynomial_from_roots>`_, | +| | `polynomial_derivative <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.polynomial_derivative>`_, | +| | `polynomial_eval <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.polynomial_eval>`_, | +| | `sieve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sieve>`_, | +| | `sum_of_squares <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sum_of_squares>`_, | +| | `totient <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.totient>`_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | Combinatorics | `distinct_permutations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_permutations>`_, | | | `distinct_combinations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_combinations>`_, | | | `circular_shifts <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.circular_shifts>`_, | @@ -125,6 +138,7 @@ Python iterables. | | `gray_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.gray_product>`_, | | | `outer_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.outer_product>`_, | | | `powerset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset>`_, | +| | `powerset_of_sets <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset_of_sets>`_, | | | `random_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_product>`_, | | | `random_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_permutation>`_, | | | `random_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination>`_, | @@ -156,15 +170,8 @@ Python iterables. | | `consume <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consume>`_, | | | `tabulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tabulate>`_, | | | `repeatfunc <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeatfunc>`_, | -| | `polynomial_from_roots <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.polynomial_from_roots>`_, | -| | `polynomial_eval <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.polynomial_eval>`_, | -| | `polynomial_derivative <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.polynomial_derivative>`_, | -| | `sieve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sieve>`_, | -| | `factor <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.factor>`_, | -| | `matmul <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.matmul>`_, | -| | `sum_of_squares <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sum_of_squares>`_, | -| | `totient <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.totient>`_, | | | `reshape <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.reshape>`_ | +| | `doublestarmap <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.doublestarmap>`_ | +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/contrib/python/more-itertools/py3/more_itertools/__init__.py b/contrib/python/more-itertools/py3/more_itertools/__init__.py index aff94a9abd..9c4662fc31 100644 --- a/contrib/python/more-itertools/py3/more_itertools/__init__.py +++ b/contrib/python/more-itertools/py3/more_itertools/__init__.py @@ -3,4 +3,4 @@ from .more import * # noqa from .recipes import * # noqa -__version__ = '10.2.0' +__version__ = '10.3.0' diff --git a/contrib/python/more-itertools/py3/more_itertools/more.py b/contrib/python/more-itertools/py3/more_itertools/more.py index dd711a4763..7b481907da 100644 --- a/contrib/python/more-itertools/py3/more_itertools/more.py +++ b/contrib/python/more-itertools/py3/more_itertools/more.py @@ -1,3 +1,4 @@ +import math import warnings from collections import Counter, defaultdict, deque, abc @@ -6,6 +7,7 @@ from functools import cached_property, partial, reduce, wraps from heapq import heapify, heapreplace, heappop from itertools import ( chain, + combinations, compress, count, cycle, @@ -19,7 +21,7 @@ from itertools import ( zip_longest, product, ) -from math import exp, factorial, floor, log, perm, comb +from math import comb, e, exp, factorial, floor, fsum, log, perm, tau from queue import Empty, Queue from random import random, randrange, uniform from operator import itemgetter, mul, sub, gt, lt, ge, le @@ -61,11 +63,13 @@ __all__ = [ 'consumer', 'count_cycle', 'countable', + 'dft', 'difference', 'distinct_combinations', 'distinct_permutations', 'distribute', 'divide', + 'doublestarmap', 'duplicates_everseen', 'duplicates_justseen', 'classify_unique', @@ -77,6 +81,7 @@ __all__ = [ 'groupby_transform', 'ichunked', 'iequals', + 'idft', 'ilen', 'interleave', 'interleave_evenly', @@ -86,6 +91,7 @@ __all__ = [ 'islice_extended', 'iterate', 'iter_suppress', + 'join_mappings', 'last', 'locate', 'longest_common_prefix', @@ -109,6 +115,7 @@ __all__ = [ 'partitions', 'peekable', 'permutation_index', + 'powerset_of_sets', 'product_index', 'raise_', 'repeat_each', @@ -148,6 +155,9 @@ __all__ = [ 'zip_offset', ] +# math.sumprod is available for Python 3.12+ +_fsumprod = getattr(math, 'sumprod', lambda x, y: fsum(map(mul, x, y))) + def chunked(iterable, n, strict=False): """Break *iterable* into lists of length *n*: @@ -550,10 +560,10 @@ def one(iterable, too_short=None, too_long=None): try: first_value = next(it) - except StopIteration as e: + except StopIteration as exc: raise ( too_short or ValueError('too few items in iterable (expected 1)') - ) from e + ) from exc try: second_value = next(it) @@ -840,26 +850,31 @@ def windowed(seq, n, fillvalue=None, step=1): if n < 0: raise ValueError('n must be >= 0') if n == 0: - yield tuple() + yield () return if step < 1: raise ValueError('step must be >= 1') - window = deque(maxlen=n) - i = n - for _ in map(window.append, seq): - i -= 1 - if not i: - i = step - yield tuple(window) - - size = len(window) - if size == 0: + iterable = iter(seq) + + # Generate first window + window = deque(islice(iterable, n), maxlen=n) + + # Deal with the first window not being full + if not window: + return + if len(window) < n: + yield tuple(window) + ((fillvalue,) * (n - len(window))) return - elif size < n: - yield tuple(chain(window, repeat(fillvalue, n - size))) - elif 0 < i < min(step, n): - window += (fillvalue,) * i + yield tuple(window) + + # Create the filler for the next windows. The padding ensures + # we have just enough elements to fill the last window. + padding = (fillvalue,) * (n - 1 if step >= n else step - 1) + filler = map(window.append, chain(iterable, padding)) + + # Generate the rest of the windows + for _ in islice(filler, step - 1, None, step): yield tuple(window) @@ -1151,8 +1166,8 @@ def interleave_evenly(iterables, lengths=None): # those iterables for which the error is negative are yielded # ("diagonal step" in Bresenham) - for i, e in enumerate(errors): - if e < 0: + for i, e_ in enumerate(errors): + if e_ < 0: yield next(iters_secondary[i]) to_yield -= 1 errors[i] += delta_primary @@ -1184,26 +1199,38 @@ def collapse(iterable, base_type=None, levels=None): ['a', ['b'], 'c', ['d']] """ + stack = deque() + # Add our first node group, treat the iterable as a single node + stack.appendleft((0, repeat(iterable, 1))) - def walk(node, level): - if ( - ((levels is not None) and (level > levels)) - or isinstance(node, (str, bytes)) - or ((base_type is not None) and isinstance(node, base_type)) - ): - yield node - return + while stack: + node_group = stack.popleft() + level, nodes = node_group - try: - tree = iter(node) - except TypeError: - yield node - return - else: - for child in tree: - yield from walk(child, level + 1) + # Check if beyond max level + if levels is not None and level > levels: + yield from nodes + continue - yield from walk(iterable, 0) + for node in nodes: + # Check if done iterating + if isinstance(node, (str, bytes)) or ( + (base_type is not None) and isinstance(node, base_type) + ): + yield node + # Otherwise try to create child nodes + else: + try: + tree = iter(node) + except TypeError: + yield node + else: + # Save our current location + stack.appendleft(node_group) + # Append the new child node + stack.appendleft((level + 1, tree)) + # Break to process child node + break def side_effect(func, iterable, chunk_size=None, before=None, after=None): @@ -1516,28 +1543,41 @@ def padded(iterable, fillvalue=None, n=None, next_multiple=False): [1, 2, 3, '?', '?'] If *next_multiple* is ``True``, *fillvalue* will be emitted until the - number of items emitted is a multiple of *n*:: + number of items emitted is a multiple of *n*: >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True)) [1, 2, 3, 4, None, None] If *n* is ``None``, *fillvalue* will be emitted indefinitely. + To create an *iterable* of exactly size *n*, you can truncate with + :func:`islice`. + + >>> list(islice(padded([1, 2, 3], '?'), 5)) + [1, 2, 3, '?', '?'] + >>> list(islice(padded([1, 2, 3, 4, 5, 6, 7, 8], '?'), 5)) + [1, 2, 3, 4, 5] + """ - it = iter(iterable) + iterable = iter(iterable) + iterable_with_repeat = chain(iterable, repeat(fillvalue)) + if n is None: - yield from chain(it, repeat(fillvalue)) + return iterable_with_repeat elif n < 1: raise ValueError('n must be at least 1') - else: - item_count = 0 - for item in it: - yield item - item_count += 1 + elif next_multiple: - remaining = (n - item_count) % n if next_multiple else n - item_count - for _ in range(remaining): - yield fillvalue + def slice_generator(): + for first in iterable: + yield (first,) + yield islice(iterable_with_repeat, n - 1) + + # While elements exist produce slices of size n + return chain.from_iterable(slice_generator()) + else: + # Ensure the first batch is at least size n then iterate + return chain(islice(iterable_with_repeat, n), iterable) def repeat_each(iterable, n=2): @@ -1592,7 +1632,9 @@ def distribute(n, iterable): [[1], [2], [3], [], []] This function uses :func:`itertools.tee` and may require significant - storage. If you need the order items in the smaller iterables to match the + storage. + + If you need the order items in the smaller iterables to match the original iterable, see :func:`divide`. """ @@ -1840,9 +1882,9 @@ def divide(n, iterable): >>> [list(c) for c in children] [[1], [2], [3], [], []] - This function will exhaust the iterable before returning and may require - significant storage. If order is not important, see :func:`distribute`, - which does not first pull the iterable into memory. + This function will exhaust the iterable before returning. + If order is not important, see :func:`distribute`, which does not first + pull the iterable into memory. """ if n < 1: @@ -3296,25 +3338,38 @@ def only(iterable, default=None, too_long=None): return first_value -class _IChunk: - def __init__(self, iterable, n): - self._it = islice(iterable, n) - self._cache = deque() +def _ichunk(iterable, n): + cache = deque() + chunk = islice(iterable, n) + + def generator(): + while True: + if cache: + yield cache.popleft() + else: + try: + item = next(chunk) + except StopIteration: + return + else: + yield item - def fill_cache(self): - self._cache.extend(self._it) + def materialize_next(n=1): + # if n not specified materialize everything + if n is None: + cache.extend(chunk) + return len(cache) - def __iter__(self): - return self + to_cache = n - len(cache) - def __next__(self): - try: - return next(self._it) - except StopIteration: - if self._cache: - return self._cache.popleft() - else: - raise + # materialize up to n + if to_cache > 0: + cache.extend(islice(chunk, to_cache)) + + # return number materialized up to n + return min(n, len(cache)) + + return (generator(), materialize_next) def ichunked(iterable, n): @@ -3338,19 +3393,19 @@ def ichunked(iterable, n): [8, 9, 10, 11] """ - source = peekable(iter(iterable)) - ichunk_marker = object() + iterable = iter(iterable) while True: + # Create new chunk + chunk, materialize_next = _ichunk(iterable, n) + # Check to see whether we're at the end of the source iterable - item = source.peek(ichunk_marker) - if item is ichunk_marker: + if not materialize_next(): return - chunk = _IChunk(source, n) yield chunk - # Advance the source iterable and fill previous chunk's cache - chunk.fill_cache() + # Fill previous chunk's cache + materialize_next(None) def iequals(*iterables): @@ -3864,6 +3919,7 @@ def nth_permutation(iterable, r, index): raise ValueError else: c = perm(n, r) + assert c > 0 # factortial(n)>0, and r<n so perm(n,r) is never zero if index < 0: index += c @@ -3871,9 +3927,6 @@ def nth_permutation(iterable, r, index): if not 0 <= index < c: raise IndexError - if c == 0: - return tuple() - result = [0] * r q = index * factorial(n) // c if r < n else index for d in range(1, n + 1): @@ -3946,6 +3999,12 @@ def value_chain(*args): >>> list(value_chain('12', '34', ['56', '78'])) ['12', '34', '56', '78'] + Pre- or postpend a single element to an iterable: + + >>> list(value_chain(1, [2, 3, 4, 5, 6])) + [1, 2, 3, 4, 5, 6] + >>> list(value_chain([1, 2, 3, 4, 5], 6)) + [1, 2, 3, 4, 5, 6] Multiple levels of nesting are not flattened. @@ -4154,53 +4213,41 @@ def chunked_even(iterable, n): [[1, 2, 3], [4, 5, 6], [7]] """ + iterable = iter(iterable) - len_method = getattr(iterable, '__len__', None) - - if len_method is None: - return _chunked_even_online(iterable, n) - else: - return _chunked_even_finite(iterable, len_method(), n) - - -def _chunked_even_online(iterable, n): - buffer = [] - maxbuf = n + (n - 2) * (n - 1) - for x in iterable: - buffer.append(x) - if len(buffer) == maxbuf: - yield buffer[:n] - buffer = buffer[n:] - yield from _chunked_even_finite(buffer, len(buffer), n) + # Initialize a buffer to process the chunks while keeping + # some back to fill any underfilled chunks + min_buffer = (n - 1) * (n - 2) + buffer = list(islice(iterable, min_buffer)) + # Append items until we have a completed chunk + for _ in islice(map(buffer.append, iterable), n, None, n): + yield buffer[:n] + del buffer[:n] -def _chunked_even_finite(iterable, N, n): - if N < 1: + # Check if any chunks need addition processing + if not buffer: return + length = len(buffer) - # Lists are either size `full_size <= n` or `partial_size = full_size - 1` - q, r = divmod(N, n) + # Chunks are either size `full_size <= n` or `partial_size = full_size - 1` + q, r = divmod(length, n) num_lists = q + (1 if r > 0 else 0) - q, r = divmod(N, num_lists) + q, r = divmod(length, num_lists) full_size = q + (1 if r > 0 else 0) partial_size = full_size - 1 - num_full = N - partial_size * num_lists - num_partial = num_lists - num_full + num_full = length - partial_size * num_lists - # Yield num_full lists of full_size + # Yield chunks of full size partial_start_idx = num_full * full_size if full_size > 0: for i in range(0, partial_start_idx, full_size): - yield list(islice(iterable, i, i + full_size)) + yield buffer[i : i + full_size] - # Yield num_partial lists of partial_size + # Yield chunks of partial size if partial_size > 0: - for i in range( - partial_start_idx, - partial_start_idx + (num_partial * partial_size), - partial_size, - ): - yield list(islice(iterable, i, i + partial_size)) + for i in range(partial_start_idx, length, partial_size): + yield buffer[i : i + partial_size] def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False): @@ -4419,12 +4466,12 @@ def minmax(iterable_or_value, *others, key=None, default=_marker): try: lo = hi = next(it) - except StopIteration as e: + except StopIteration as exc: if default is _marker: raise ValueError( '`minmax()` argument is an empty iterable. ' 'Provide a `default` value to suppress this error.' - ) from e + ) from exc return default # Different branches depending on the presence of key. This saves a lot @@ -4654,3 +4701,106 @@ def filter_map(func, iterable): y = func(x) if y is not None: yield y + + +def powerset_of_sets(iterable): + """Yields all possible subsets of the iterable. + + >>> list(powerset_of_sets([1, 2, 3])) # doctest: +SKIP + [set(), {1}, {2}, {3}, {1, 2}, {1, 3}, {2, 3}, {1, 2, 3}] + >>> list(powerset_of_sets([1, 1, 0])) # doctest: +SKIP + [set(), {1}, {0}, {0, 1}] + + :func:`powerset_of_sets` takes care to minimize the number + of hash operations performed. + """ + sets = tuple(map(set, dict.fromkeys(map(frozenset, zip(iterable))))) + for r in range(len(sets) + 1): + yield from starmap(set().union, combinations(sets, r)) + + +def join_mappings(**field_to_map): + """ + Joins multiple mappings together using their common keys. + + >>> user_scores = {'elliot': 50, 'claris': 60} + >>> user_times = {'elliot': 30, 'claris': 40} + >>> join_mappings(score=user_scores, time=user_times) + {'elliot': {'score': 50, 'time': 30}, 'claris': {'score': 60, 'time': 40}} + """ + ret = defaultdict(dict) + + for field_name, mapping in field_to_map.items(): + for key, value in mapping.items(): + ret[key][field_name] = value + + return dict(ret) + + +def _complex_sumprod(v1, v2): + """High precision sumprod() for complex numbers. + Used by :func:`dft` and :func:`idft`. + """ + + r1 = chain((p.real for p in v1), (-p.imag for p in v1)) + r2 = chain((q.real for q in v2), (q.imag for q in v2)) + i1 = chain((p.real for p in v1), (p.imag for p in v1)) + i2 = chain((q.imag for q in v2), (q.real for q in v2)) + return complex(_fsumprod(r1, r2), _fsumprod(i1, i2)) + + +def dft(xarr): + """Discrete Fourier Tranform. *xarr* is a sequence of complex numbers. + Yields the components of the corresponding transformed output vector. + + >>> import cmath + >>> xarr = [1, 2-1j, -1j, -1+2j] + >>> Xarr = [2, -2-2j, -2j, 4+4j] + >>> all(map(cmath.isclose, dft(xarr), Xarr)) + True + + See :func:`idft` for the inverse Discrete Fourier Transform. + """ + N = len(xarr) + roots_of_unity = [e ** (n / N * tau * -1j) for n in range(N)] + for k in range(N): + coeffs = [roots_of_unity[k * n % N] for n in range(N)] + yield _complex_sumprod(xarr, coeffs) + + +def idft(Xarr): + """Inverse Discrete Fourier Tranform. *Xarr* is a sequence of + complex numbers. Yields the components of the corresponding + inverse-transformed output vector. + + >>> import cmath + >>> xarr = [1, 2-1j, -1j, -1+2j] + >>> Xarr = [2, -2-2j, -2j, 4+4j] + >>> all(map(cmath.isclose, idft(Xarr), xarr)) + True + + See :func:`dft` for the Discrete Fourier Transform. + """ + N = len(Xarr) + roots_of_unity = [e ** (n / N * tau * 1j) for n in range(N)] + for k in range(N): + coeffs = [roots_of_unity[k * n % N] for n in range(N)] + yield _complex_sumprod(Xarr, coeffs) / N + + +def doublestarmap(func, iterable): + """Apply *func* to every item of *iterable* by dictionary unpacking + the item into *func*. + + The difference between :func:`itertools.starmap` and :func:`doublestarmap` + parallels the distinction between ``func(*a)`` and ``func(**a)``. + + >>> iterable = [{'a': 1, 'b': 2}, {'a': 40, 'b': 60}] + >>> list(doublestarmap(lambda a, b: a + b, iterable)) + [3, 100] + + ``TypeError`` will be raised if *func*'s signature doesn't match the + mapping contained in *iterable* or if *iterable* does not contain mappings. + """ + for item in iterable: + yield func(**item) diff --git a/contrib/python/more-itertools/py3/more_itertools/more.pyi b/contrib/python/more-itertools/py3/more_itertools/more.pyi index 9a5fc911a3..e946023259 100644 --- a/contrib/python/more-itertools/py3/more_itertools/more.pyi +++ b/contrib/python/more-itertools/py3/more_itertools/more.pyi @@ -1,4 +1,5 @@ """Stubs for more_itertools.more""" + from __future__ import annotations from types import TracebackType @@ -9,8 +10,10 @@ from typing import ( ContextManager, Generic, Hashable, + Mapping, Iterable, Iterator, + Mapping, overload, Reversible, Sequence, @@ -602,6 +605,7 @@ class countable(Generic[_T], Iterator[_T]): def __init__(self, iterable: Iterable[_T]) -> None: ... def __iter__(self) -> countable[_T]: ... def __next__(self) -> _T: ... + items_seen: int def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[list[_T]]: ... def zip_broadcast( @@ -693,3 +697,13 @@ def filter_map( func: Callable[[_T], _V | None], iterable: Iterable[_T], ) -> Iterator[_V]: ... +def powerset_of_sets(iterable: Iterable[_T]) -> Iterator[set[_T]]: ... +def join_mappings( + **field_to_map: Mapping[_T, _V] +) -> dict[_T, dict[str, _V]]: ... +def doublestarmap( + func: Callable[..., _T], + iterable: Iterable[Mapping[str, Any]], +) -> Iterator[_T]: ... +def dft(xarr: Sequence[complex]) -> Iterator[complex]: ... +def idft(Xarr: Sequence[complex]) -> Iterator[complex]: ... diff --git a/contrib/python/more-itertools/py3/more_itertools/recipes.py b/contrib/python/more-itertools/py3/more_itertools/recipes.py index 145e3cb5bd..b32fa95533 100644 --- a/contrib/python/more-itertools/py3/more_itertools/recipes.py +++ b/contrib/python/more-itertools/py3/more_itertools/recipes.py @@ -7,6 +7,7 @@ Some backward-compatible usability improvements have been made. .. [1] http://docs.python.org/library/itertools.html#recipes """ + import math import operator @@ -74,6 +75,7 @@ __all__ = [ 'totient', 'transpose', 'triplewise', + 'unique', 'unique_everseen', 'unique_justseen', ] @@ -198,7 +200,7 @@ def nth(iterable, n, default=None): return next(islice(iterable, n, None), default) -def all_equal(iterable): +def all_equal(iterable, key=None): """ Returns ``True`` if all the elements are equal to each other. @@ -207,9 +209,16 @@ def all_equal(iterable): >>> all_equal('aaab') False + A function that accepts a single argument and returns a transformed version + of each input item can be specified with *key*: + + >>> all_equal('AaaA', key=str.casefold) + True + >>> all_equal([1, 2, 3], key=lambda x: x < 10) + True + """ - g = groupby(iterable) - return next(g, True) and not next(g, False) + return len(list(islice(groupby(iterable, key), 2))) <= 1 def quantify(iterable, pred=bool): @@ -410,16 +419,11 @@ def roundrobin(*iterables): iterables is small). """ - # Recipe credited to George Sakkis - pending = len(iterables) - nexts = cycle(iter(it).__next__ for it in iterables) - while pending: - try: - for next in nexts: - yield next() - except StopIteration: - pending -= 1 - nexts = cycle(islice(nexts, pending)) + # Algorithm credited to George Sakkis + iterators = map(iter, iterables) + for num_active in range(len(iterables), 0, -1): + iterators = cycle(islice(iterators, num_active)) + yield from map(next, iterators) def partition(pred, iterable): @@ -458,16 +462,14 @@ def powerset(iterable): :func:`powerset` will operate on iterables that aren't :class:`set` instances, so repeated elements in the input will produce repeated elements - in the output. Use :func:`unique_everseen` on the input to avoid generating - duplicates: + in the output. >>> seq = [1, 1, 0] >>> list(powerset(seq)) [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)] - >>> from more_itertools import unique_everseen - >>> list(powerset(unique_everseen(seq))) - [(), (1,), (0,), (1, 0)] + For a variant that efficiently yields actual :class:`set` instances, see + :func:`powerset_of_sets`. """ s = list(iterable) return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1)) @@ -533,6 +535,25 @@ def unique_justseen(iterable, key=None): return map(next, map(operator.itemgetter(1), groupby(iterable, key))) +def unique(iterable, key=None, reverse=False): + """Yields unique elements in sorted order. + + >>> list(unique([[1, 2], [3, 4], [1, 2]])) + [[1, 2], [3, 4]] + + *key* and *reverse* are passed to :func:`sorted`. + + >>> list(unique('ABBcCAD', str.casefold)) + ['A', 'B', 'c', 'D'] + >>> list(unique('ABBcCAD', str.casefold, reverse=True)) + ['D', 'c', 'B', 'A'] + + The elements in *iterable* need not be hashable, but they must be + comparable for sorting to work. + """ + return unique_justseen(sorted(iterable, key=key, reverse=reverse), key=key) + + def iter_except(func, exception, first=None): """Yields results from a function repeatedly until an exception is raised. @@ -827,8 +848,6 @@ def iter_index(iterable, value, start=0, stop=None): """Yield the index of each place in *iterable* that *value* occurs, beginning with index *start* and ending before index *stop*. - See :func:`locate` for a more general means of finding the indexes - associated with particular values. >>> list(iter_index('AABCADEAF', 'A')) [0, 1, 4, 7] @@ -836,6 +855,19 @@ def iter_index(iterable, value, start=0, stop=None): [1, 4, 7] >>> list(iter_index('AABCADEAF', 'A', 1, 7)) # stop index is not inclusive [1, 4] + + The behavior for non-scalar *values* matches the built-in Python types. + + >>> list(iter_index('ABCDABCD', 'AB')) + [0, 4] + >>> list(iter_index([0, 1, 2, 3, 0, 1, 2, 3], [0, 1])) + [] + >>> list(iter_index([[0, 1], [2, 3], [0, 1], [2, 3]], [0, 1])) + [0, 2] + + See :func:`locate` for a more general means of finding the indexes + associated with particular values. + """ seq_index = getattr(iterable, 'index', None) if seq_index is None: @@ -1006,7 +1038,9 @@ def totient(n): >>> totient(12) 4 """ - for p in unique_justseen(factor(n)): + # The itertools docs use unique_justseen instead of set; see + # https://github.com/more-itertools/more-itertools/issues/823 + for p in set(factor(n)): n = n // p * (p - 1) return n diff --git a/contrib/python/more-itertools/py3/more_itertools/recipes.pyi b/contrib/python/more-itertools/py3/more_itertools/recipes.pyi index ed4c19db49..739acec05f 100644 --- a/contrib/python/more-itertools/py3/more_itertools/recipes.pyi +++ b/contrib/python/more-itertools/py3/more_itertools/recipes.pyi @@ -1,4 +1,5 @@ """Stubs for more_itertools.recipes""" + from __future__ import annotations from typing import ( @@ -28,7 +29,9 @@ def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ... def nth(iterable: Iterable[_T], n: int) -> _T | None: ... @overload def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ... -def all_equal(iterable: Iterable[_T]) -> bool: ... +def all_equal( + iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... +) -> bool: ... def quantify( iterable: Iterable[_T], pred: Callable[[_T], bool] = ... ) -> int: ... @@ -58,6 +61,11 @@ def unique_everseen( def unique_justseen( iterable: Iterable[_T], key: Callable[[_T], object] | None = ... ) -> Iterator[_T]: ... +def unique( + iterable: Iterable[_T], + key: Callable[[_T], object] | None = ..., + reverse: bool = False, +) -> Iterator[_T]: ... @overload def iter_except( func: Callable[[], _T], diff --git a/contrib/python/more-itertools/py3/tests/test_more.py b/contrib/python/more-itertools/py3/tests/test_more.py index 741ef1a468..fda4c0984a 100644 --- a/contrib/python/more-itertools/py3/tests/test_more.py +++ b/contrib/python/more-itertools/py3/tests/test_more.py @@ -1,3 +1,4 @@ +import cmath import warnings from collections import Counter, abc @@ -23,7 +24,7 @@ from itertools import ( ) from operator import add, mul, itemgetter from pickle import loads, dumps -from random import seed, Random +from random import Random, random, randrange, seed from statistics import mean from string import ascii_letters from sys import version_info @@ -772,6 +773,11 @@ class WindowedTests(TestCase): with self.assertRaises(ValueError): list(mi.windowed([1, 2, 3, 4, 5], -1)) + def test_empty_seq(self): + actual = list(mi.windowed([], 3)) + expected = [] + self.assertEqual(actual, expected) + class SubstringsTests(TestCase): def test_basic(self): @@ -2121,9 +2127,9 @@ class SortTogetherTest(TestCase): mi.sort_together( iterables, key_list=(0, 2), - key=lambda state, number: number - if state == 'CT' - else 2 * number, + key=lambda state, number: ( + number if state == 'CT' else 2 * number + ), ), [ ('CT', 'GA', 'CT', 'CT', 'GA', 'GA'), @@ -3984,22 +3990,41 @@ class IchunkedTests(TestCase): self.assertRaises(RuntimeError, next, it) def test_memory_in_order(self): - # No items should be kept in memory when a chunk is produced - all_chunks = mi.ichunked(count(), 4) + gen_numbers = [] + + def gen(): + for gen_number in count(): + gen_numbers.append(gen_number) + yield gen_number + + # No items should be kept in memory when a ichunked is first called + all_chunks = mi.ichunked(gen(), 4) + self.assertEqual(gen_numbers, []) + + # The first item of each chunk should be generated on chunk generation first_chunk = next(all_chunks) - self.assertEqual(len(first_chunk._cache), 0) + self.assertEqual(gen_numbers, [0]) # If we don't read a chunk before getting its successor, its contents # will be cached second_chunk = next(all_chunks) - self.assertEqual(len(first_chunk._cache), 4) + self.assertEqual(gen_numbers, [0, 1, 2, 3, 4]) + + # Check if we can read in cached values + self.assertEqual(list(first_chunk), [0, 1, 2, 3]) + self.assertEqual(list(second_chunk), [4, 5, 6, 7]) - # If we read in order, there again should be nothing cached - mi.consume(first_chunk) - mi.consume(second_chunk) + # Again only the most recent chunk should have an item cached third_chunk = next(all_chunks) - for chunk in (first_chunk, second_chunk, third_chunk): - self.assertEqual(len(chunk._cache), 0) + self.assertEqual(len(gen_numbers), 9) + + # No new item should be cached when reading past the first number + next(third_chunk) + self.assertEqual(len(gen_numbers), 9) + + # we should not be able to read spent chunks + self.assertEqual(list(first_chunk), []) + self.assertEqual(list(second_chunk), []) class DistinctCombinationsTests(TestCase): @@ -5489,6 +5514,10 @@ class ConstrainedBatchesTests(TestCase): [(record_3, record_5), (record_10,), (record_2,)], ) + def test_bad_max(self): + with self.assertRaises(ValueError): + list(mi.constrained_batches([], 0)) + class GrayProductTests(TestCase): def test_basic(self): @@ -5733,3 +5762,107 @@ class FilterMapTests(TestCase): ) expected = [1, 2, 3] self.assertEqual(actual, expected) + + +class PowersetOfSetsTests(TestCase): + def test_simple(self): + iterable = [0, 1, 2] + actual = list(mi.powerset_of_sets(iterable)) + expected = [set(), {0}, {1}, {2}, {0, 1}, {0, 2}, {1, 2}, {0, 1, 2}] + self.assertEqual(actual, expected) + + def test_hash_count(self): + hash_count = 0 + + class Str(str): + def __hash__(true_self): + nonlocal hash_count + hash_count += 1 + return super.__hash__(true_self) + + iterable = map(Str, 'ABBBCDD') + self.assertEqual(len(list(mi.powerset_of_sets(iterable))), 128) + self.assertLessEqual(hash_count, 14) + + +class JoinMappingTests(TestCase): + def test_basic(self): + salary_map = {'e1': 12, 'e2': 23, 'e3': 34} + dept_map = {'e1': 'eng', 'e2': 'sales', 'e3': 'eng'} + service_map = {'e1': 5, 'e2': 9, 'e3': 2} + field_to_map = { + 'salary': salary_map, + 'dept': dept_map, + 'service': service_map, + } + expected = { + 'e1': {'salary': 12, 'dept': 'eng', 'service': 5}, + 'e2': {'salary': 23, 'dept': 'sales', 'service': 9}, + 'e3': {'salary': 34, 'dept': 'eng', 'service': 2}, + } + self.assertEqual(dict(mi.join_mappings(**field_to_map)), expected) + + def test_empty(self): + self.assertEqual(dict(mi.join_mappings()), {}) + + +class DiscreteFourierTransformTests(TestCase): + def test_basic(self): + # Example calculation from: + # https://en.wikipedia.org/wiki/Discrete_Fourier_transform#Example + xarr = [1, 2 - 1j, -1j, -1 + 2j] + Xarr = [2, -2 - 2j, -2j, 4 + 4j] + self.assertTrue(all(map(cmath.isclose, mi.dft(xarr), Xarr))) + self.assertTrue(all(map(cmath.isclose, mi.idft(Xarr), xarr))) + + def test_roundtrip(self): + for _ in range(1_000): + N = randrange(35) + xarr = [complex(random(), random()) for i in range(N)] + Xarr = list(mi.dft(xarr)) + assert all(map(cmath.isclose, mi.idft(Xarr), xarr)) + + +class DoubleStarMapTests(TestCase): + def test_construction(self): + iterable = [{'price': 1.23}, {'price': 42}, {'price': 0.1}] + actual = list(mi.doublestarmap('{price:.2f}'.format, iterable)) + expected = ['1.23', '42.00', '0.10'] + self.assertEqual(actual, expected) + + def test_identity(self): + iterable = [{'x': 1}, {'x': 2}, {'x': 3}] + actual = list(mi.doublestarmap(lambda x: x, iterable)) + expected = [1, 2, 3] + self.assertEqual(actual, expected) + + def test_adding(self): + iterable = [{'a': 1, 'b': 2}, {'a': 3, 'b': 4}] + actual = list(mi.doublestarmap(lambda a, b: a + b, iterable)) + expected = [3, 7] + self.assertEqual(actual, expected) + + def test_mismatch_function_smaller(self): + iterable = [{'a': 1, 'b': 2}, {'a': 3, 'b': 4}] + with self.assertRaises(TypeError): + list(mi.doublestarmap(lambda a: a, iterable)) + + def test_mismatch_function_different(self): + iterable = [{'a': 1}, {'a': 2}] + with self.assertRaises(TypeError): + list(mi.doublestarmap(lambda x: x, iterable)) + + def test_mismatch_function_larger(self): + iterable = [{'a': 1}, {'a': 2}] + with self.assertRaises(TypeError): + list(mi.doublestarmap(lambda a, b: a + b, iterable)) + + def test_no_mapping(self): + iterable = [1, 2, 3, 4] + with self.assertRaises(TypeError): + list(mi.doublestarmap(lambda x: x, iterable)) + + def test_empty(self): + actual = list(mi.doublestarmap(lambda x: x, [])) + expected = [] + self.assertEqual(actual, expected) diff --git a/contrib/python/more-itertools/py3/tests/test_recipes.py b/contrib/python/more-itertools/py3/tests/test_recipes.py index ee5a5233b5..0035e58d05 100644 --- a/contrib/python/more-itertools/py3/tests/test_recipes.py +++ b/contrib/python/more-itertools/py3/tests/test_recipes.py @@ -134,33 +134,30 @@ class NthTests(TestCase): class AllEqualTests(TestCase): - """Tests for ``all_equal()``""" - def test_true(self): - """Everything is equal""" self.assertTrue(mi.all_equal('aaaaaa')) self.assertTrue(mi.all_equal([0, 0, 0, 0])) def test_false(self): - """Not everything is equal""" self.assertFalse(mi.all_equal('aaaaab')) self.assertFalse(mi.all_equal([0, 0, 0, 1])) def test_tricky(self): - """Not everything is identical, but everything is equal""" items = [1, complex(1, 0), 1.0] self.assertTrue(mi.all_equal(items)) def test_empty(self): - """Return True if the iterable is empty""" self.assertTrue(mi.all_equal('')) self.assertTrue(mi.all_equal([])) def test_one(self): - """Return True if the iterable is singular""" self.assertTrue(mi.all_equal('0')) self.assertTrue(mi.all_equal([0])) + def test_key(self): + self.assertTrue(mi.all_equal('4٤໔4৪', key=int)) + self.assertFalse(mi.all_equal('Abc', key=str.casefold)) + class QuantifyTests(TestCase): """Tests for ``quantify()``""" @@ -268,6 +265,12 @@ class PairwiseTests(TestCase): p = mi.pairwise("a") self.assertRaises(StopIteration, lambda: next(p)) + def test_coverage(self): + from more_itertools import recipes + + p = recipes._pairwise([1, 2, 3]) + self.assertEqual([(1, 2), (2, 3)], list(p)) + class GrouperTests(TestCase): def test_basic(self): @@ -392,45 +395,55 @@ class PowersetTests(TestCase): class UniqueEverseenTests(TestCase): - """Tests for ``unique_everseen()``""" - def test_everseen(self): - """ensure duplicate elements are ignored""" u = mi.unique_everseen('AAAABBBBCCDAABBB') self.assertEqual(['A', 'B', 'C', 'D'], list(u)) def test_custom_key(self): - """ensure the custom key comparison works""" u = mi.unique_everseen('aAbACCc', key=str.lower) self.assertEqual(list('abC'), list(u)) def test_unhashable(self): - """ensure things work for unhashable items""" iterable = ['a', [1, 2, 3], [1, 2, 3], 'a'] u = mi.unique_everseen(iterable) self.assertEqual(list(u), ['a', [1, 2, 3]]) def test_unhashable_key(self): - """ensure things work for unhashable items with a custom key""" iterable = ['a', [1, 2, 3], [1, 2, 3], 'a'] u = mi.unique_everseen(iterable, key=lambda x: x) self.assertEqual(list(u), ['a', [1, 2, 3]]) class UniqueJustseenTests(TestCase): - """Tests for ``unique_justseen()``""" - def test_justseen(self): - """ensure only last item is remembered""" u = mi.unique_justseen('AAAABBBCCDABB') self.assertEqual(list('ABCDAB'), list(u)) def test_custom_key(self): - """ensure the custom key comparison works""" u = mi.unique_justseen('AABCcAD', str.lower) self.assertEqual(list('ABCAD'), list(u)) +class UniqueTests(TestCase): + def test_basic(self): + iterable = [0, 1, 1, 8, 9, 9, 9, 8, 8, 1, 9, 9] + actual = list(mi.unique(iterable)) + expected = [0, 1, 8, 9] + self.assertEqual(actual, expected) + + def test_key(self): + iterable = ['1', '1', '10', '10', '2', '2', '20', '20'] + actual = list(mi.unique(iterable, key=int)) + expected = ['1', '2', '10', '20'] + self.assertEqual(actual, expected) + + def test_reverse(self): + iterable = ['1', '1', '10', '10', '2', '2', '20', '20'] + actual = list(mi.unique(iterable, key=int, reverse=True)) + expected = ['20', '10', '2', '1'] + self.assertEqual(actual, expected) + + class IterExceptTests(TestCase): """Tests for ``iter_except()``""" @@ -698,7 +711,7 @@ class NthPermutationTests(TestCase): n = factorial(len(iterable)) // factorial(len(iterable) - r) for index in [-1 - n, n + 1]: with self.assertRaises(IndexError): - mi.nth_combination(iterable, r, index) + mi.nth_permutation(iterable, r, index) def test_invalid_r(self): iterable = 'abcde' @@ -706,7 +719,7 @@ class NthPermutationTests(TestCase): n = factorial(len(iterable)) // factorial(len(iterable) - r) for r in [-1, n + 1]: with self.assertRaises(ValueError): - mi.nth_combination(iterable, r, 0) + mi.nth_permutation(iterable, r, 0) class PrependTests(TestCase): diff --git a/contrib/python/more-itertools/py3/ya.make b/contrib/python/more-itertools/py3/ya.make index a9228cc50e..e902c9d552 100644 --- a/contrib/python/more-itertools/py3/ya.make +++ b/contrib/python/more-itertools/py3/ya.make @@ -2,7 +2,7 @@ PY3_LIBRARY() -VERSION(10.2.0) +VERSION(10.3.0) LICENSE(MIT) diff --git a/contrib/python/prompt-toolkit/py3/.dist-info/METADATA b/contrib/python/prompt-toolkit/py3/.dist-info/METADATA index fcd3151a1c..c4f9a4a3ff 100644 --- a/contrib/python/prompt-toolkit/py3/.dist-info/METADATA +++ b/contrib/python/prompt-toolkit/py3/.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: prompt_toolkit -Version: 3.0.46 +Version: 3.0.47 Summary: Library for building powerful interactive command lines in Python Home-page: https://github.com/prompt-toolkit/python-prompt-toolkit Author: Jonathan Slenders diff --git a/contrib/python/prompt-toolkit/py3/prompt_toolkit/__init__.py b/contrib/python/prompt-toolkit/py3/prompt_toolkit/__init__.py index 9f194f1b44..7f6f30251c 100644 --- a/contrib/python/prompt-toolkit/py3/prompt_toolkit/__init__.py +++ b/contrib/python/prompt-toolkit/py3/prompt_toolkit/__init__.py @@ -28,7 +28,7 @@ from .formatted_text import ANSI, HTML from .shortcuts import PromptSession, print_formatted_text, prompt # Don't forget to update in `docs/conf.py`! -__version__ = "3.0.46" +__version__ = "3.0.47" assert pep440.match(__version__) diff --git a/contrib/python/prompt-toolkit/py3/prompt_toolkit/layout/screen.py b/contrib/python/prompt-toolkit/py3/prompt_toolkit/layout/screen.py index 0f19f52a8d..475f540d11 100644 --- a/contrib/python/prompt-toolkit/py3/prompt_toolkit/layout/screen.py +++ b/contrib/python/prompt-toolkit/py3/prompt_toolkit/layout/screen.py @@ -169,7 +169,7 @@ class Screen: #: Escape sequences to be injected. self.zero_width_escapes: defaultdict[int, defaultdict[int, str]] = defaultdict( - lambda: defaultdict(lambda: "") + lambda: defaultdict(str) ) #: Position of the cursor. diff --git a/contrib/python/prompt-toolkit/py3/prompt_toolkit/shortcuts/prompt.py b/contrib/python/prompt-toolkit/py3/prompt_toolkit/shortcuts/prompt.py index 115d890075..d0732bc133 100644 --- a/contrib/python/prompt-toolkit/py3/prompt_toolkit/shortcuts/prompt.py +++ b/contrib/python/prompt-toolkit/py3/prompt_toolkit/shortcuts/prompt.py @@ -324,6 +324,10 @@ class PromptSession(Generic[_T]): :param input: `Input` object. (Note that the preferred way to change the input/output is by creating an `AppSession`.) :param output: `Output` object. + :param interrupt_exception: The exception type that will be raised when + there is a keyboard interrupt (control-c keypress). + :param eof_exception: The exception type that will be raised when there is + an end-of-file/exit event (control-d keypress). """ _fields = ( @@ -410,6 +414,8 @@ class PromptSession(Generic[_T]): refresh_interval: float = 0, input: Input | None = None, output: Output | None = None, + interrupt_exception: type[BaseException] = KeyboardInterrupt, + eof_exception: type[BaseException] = EOFError, ) -> None: history = history or InMemoryHistory() clipboard = clipboard or InMemoryClipboard() @@ -459,6 +465,8 @@ class PromptSession(Generic[_T]): self.reserve_space_for_menu = reserve_space_for_menu self.tempfile_suffix = tempfile_suffix self.tempfile = tempfile + self.interrupt_exception = interrupt_exception + self.eof_exception = eof_exception # Create buffers, layout and Application. self.history = history @@ -811,7 +819,7 @@ class PromptSession(Generic[_T]): @handle("<sigint>") def _keyboard_interrupt(event: E) -> None: "Abort when Control-C has been pressed." - event.app.exit(exception=KeyboardInterrupt, style="class:aborting") + event.app.exit(exception=self.interrupt_exception(), style="class:aborting") @Condition def ctrl_d_condition() -> bool: @@ -826,7 +834,7 @@ class PromptSession(Generic[_T]): @handle("c-d", filter=ctrl_d_condition & default_focused) def _eof(event: E) -> None: "Exit when Control-D has been pressed." - event.app.exit(exception=EOFError, style="class:exiting") + event.app.exit(exception=self.eof_exception(), style="class:exiting") suspend_supported = Condition(suspend_to_background_supported) diff --git a/contrib/python/prompt-toolkit/py3/prompt_toolkit/widgets/base.py b/contrib/python/prompt-toolkit/py3/prompt_toolkit/widgets/base.py index 709b7a9499..e2330e9e1f 100644 --- a/contrib/python/prompt-toolkit/py3/prompt_toolkit/widgets/base.py +++ b/contrib/python/prompt-toolkit/py3/prompt_toolkit/widgets/base.py @@ -59,7 +59,7 @@ from prompt_toolkit.layout.controls import ( FormattedTextControl, GetLinePrefixCallable, ) -from prompt_toolkit.layout.dimension import AnyDimension, to_dimension +from prompt_toolkit.layout.dimension import AnyDimension from prompt_toolkit.layout.dimension import Dimension as D from prompt_toolkit.layout.margins import ( ConditionalMargin, @@ -636,31 +636,44 @@ class Box: modal: bool = False, key_bindings: KeyBindings | None = None, ) -> None: - if padding is None: - padding = D(preferred=0) - - def get(value: AnyDimension) -> D: - if value is None: - value = padding - return to_dimension(value) - - self.padding_left = get(padding_left) - self.padding_right = get(padding_right) - self.padding_top = get(padding_top) - self.padding_bottom = get(padding_bottom) + self.padding = padding + self.padding_left = padding_left + self.padding_right = padding_right + self.padding_top = padding_top + self.padding_bottom = padding_bottom self.body = body + def left() -> AnyDimension: + if self.padding_left is None: + return self.padding + return self.padding_left + + def right() -> AnyDimension: + if self.padding_right is None: + return self.padding + return self.padding_right + + def top() -> AnyDimension: + if self.padding_top is None: + return self.padding + return self.padding_top + + def bottom() -> AnyDimension: + if self.padding_bottom is None: + return self.padding + return self.padding_bottom + self.container = HSplit( [ - Window(height=self.padding_top, char=char), + Window(height=top, char=char), VSplit( [ - Window(width=self.padding_left, char=char), + Window(width=left, char=char), body, - Window(width=self.padding_right, char=char), + Window(width=right, char=char), ] ), - Window(height=self.padding_bottom, char=char), + Window(height=bottom, char=char), ], width=width, height=height, diff --git a/contrib/python/prompt-toolkit/py3/ya.make b/contrib/python/prompt-toolkit/py3/ya.make index f8e402db87..cdfd5cd4ca 100644 --- a/contrib/python/prompt-toolkit/py3/ya.make +++ b/contrib/python/prompt-toolkit/py3/ya.make @@ -2,7 +2,7 @@ PY3_LIBRARY() -VERSION(3.0.46) +VERSION(3.0.47) LICENSE(BSD-3-Clause) |