aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/more-itertools/py3/more_itertools
diff options
context:
space:
mode:
authorAlexSm <alex@ydb.tech>2024-01-26 16:00:50 +0100
committerGitHub <noreply@github.com>2024-01-26 16:00:50 +0100
commit7ebcfd058d924bcc8c23da70e034f7415687885c (patch)
treee4f00d163c77528c1855f2d7af54a8be83fc1ccb /contrib/python/more-itertools/py3/more_itertools
parent64ca2dcd06312b9eef624054ceb5f787e11be79a (diff)
parent6d79e7793c2c462134f4b4a7d911abc7b9b0766f (diff)
downloadydb-7ebcfd058d924bcc8c23da70e034f7415687885c.tar.gz
Merge pull request #1260 from ydb-platform/mergelibs10
mergelibs10
Diffstat (limited to 'contrib/python/more-itertools/py3/more_itertools')
-rw-r--r--contrib/python/more-itertools/py3/more_itertools/__init__.py2
-rw-r--r--contrib/python/more-itertools/py3/more_itertools/more.py143
-rw-r--r--contrib/python/more-itertools/py3/more_itertools/recipes.py107
3 files changed, 187 insertions, 65 deletions
diff --git a/contrib/python/more-itertools/py3/more_itertools/__init__.py b/contrib/python/more-itertools/py3/more_itertools/__init__.py
index 28ffadcf8d..aff94a9abd 100644
--- a/contrib/python/more-itertools/py3/more_itertools/__init__.py
+++ b/contrib/python/more-itertools/py3/more_itertools/__init__.py
@@ -3,4 +3,4 @@
from .more import * # noqa
from .recipes import * # noqa
-__version__ = '10.1.0'
+__version__ = '10.2.0'
diff --git a/contrib/python/more-itertools/py3/more_itertools/more.py b/contrib/python/more-itertools/py3/more_itertools/more.py
index 59c2f1a499..dd711a4763 100644
--- a/contrib/python/more-itertools/py3/more_itertools/more.py
+++ b/contrib/python/more-itertools/py3/more_itertools/more.py
@@ -19,7 +19,7 @@ from itertools import (
zip_longest,
product,
)
-from math import exp, factorial, floor, log
+from math import exp, factorial, floor, log, perm, comb
from queue import Empty, Queue
from random import random, randrange, uniform
from operator import itemgetter, mul, sub, gt, lt, ge, le
@@ -68,8 +68,10 @@ __all__ = [
'divide',
'duplicates_everseen',
'duplicates_justseen',
+ 'classify_unique',
'exactly_n',
'filter_except',
+ 'filter_map',
'first',
'gray_product',
'groupby_transform',
@@ -83,6 +85,7 @@ __all__ = [
'is_sorted',
'islice_extended',
'iterate',
+ 'iter_suppress',
'last',
'locate',
'longest_common_prefix',
@@ -198,15 +201,14 @@ def first(iterable, default=_marker):
``next(iter(iterable), default)``.
"""
- try:
- return next(iter(iterable))
- except StopIteration as e:
- if default is _marker:
- raise ValueError(
- 'first() was called on an empty iterable, and no '
- 'default value was provided.'
- ) from e
- return default
+ for item in iterable:
+ return item
+ if default is _marker:
+ raise ValueError(
+ 'first() was called on an empty iterable, and no '
+ 'default value was provided.'
+ )
+ return default
def last(iterable, default=_marker):
@@ -582,6 +584,9 @@ def strictly_n(iterable, n, too_short=None, too_long=None):
>>> list(strictly_n(iterable, n))
['a', 'b', 'c', 'd']
+ Note that the returned iterable must be consumed in order for the check to
+ be made.
+
By default, *too_short* and *too_long* are functions that raise
``ValueError``.
@@ -919,7 +924,7 @@ def substrings_indexes(seq, reverse=False):
class bucket:
- """Wrap *iterable* and return an object that buckets it iterable into
+ """Wrap *iterable* and return an object that buckets the iterable into
child iterables based on a *key* function.
>>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
@@ -3222,6 +3227,8 @@ class time_limited:
stops if the time elapsed is greater than *limit_seconds*. If your time
limit is 1 second, but it takes 2 seconds to generate the first item from
the iterable, the function will run for 2 seconds and not yield anything.
+ As a special case, when *limit_seconds* is zero, the iterator never
+ returns anything.
"""
@@ -3237,6 +3244,9 @@ class time_limited:
return self
def __next__(self):
+ if self.limit_seconds == 0:
+ self.timed_out = True
+ raise StopIteration
item = next(self._iterable)
if monotonic() - self._start_time > self.limit_seconds:
self.timed_out = True
@@ -3356,7 +3366,7 @@ def iequals(*iterables):
>>> iequals("abc", "acb")
False
- Not to be confused with :func:`all_equals`, which checks whether all
+ Not to be confused with :func:`all_equal`, which checks whether all
elements of iterable are equal to each other.
"""
@@ -3853,7 +3863,7 @@ def nth_permutation(iterable, r, index):
elif not 0 <= r < n:
raise ValueError
else:
- c = factorial(n) // factorial(n - r)
+ c = perm(n, r)
if index < 0:
index += c
@@ -3898,7 +3908,7 @@ def nth_combination_with_replacement(iterable, r, index):
if (r < 0) or (r > n):
raise ValueError
- c = factorial(n + r - 1) // (factorial(r) * factorial(n - 1))
+ c = comb(n + r - 1, r)
if index < 0:
index += c
@@ -3911,9 +3921,7 @@ def nth_combination_with_replacement(iterable, r, index):
while r:
r -= 1
while n >= 0:
- num_combs = factorial(n + r - 1) // (
- factorial(r) * factorial(n - 1)
- )
+ num_combs = comb(n + r - 1, r)
if index < num_combs:
break
n -= 1
@@ -4015,9 +4023,9 @@ def combination_index(element, iterable):
for i, j in enumerate(reversed(indexes), start=1):
j = n - j
if i <= j:
- index += factorial(j) // (factorial(i) * factorial(j - i))
+ index += comb(j, i)
- return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index
+ return comb(n + 1, k + 1) - index
def combination_with_replacement_index(element, iterable):
@@ -4057,7 +4065,7 @@ def combination_with_replacement_index(element, iterable):
break
else:
raise ValueError(
- 'element is not a combination with replacment of iterable'
+ 'element is not a combination with replacement of iterable'
)
n = len(pool)
@@ -4066,11 +4074,13 @@ def combination_with_replacement_index(element, iterable):
occupations[p] += 1
index = 0
+ cumulative_sum = 0
for k in range(1, n):
- j = l + n - 1 - k - sum(occupations[:k])
+ cumulative_sum += occupations[k - 1]
+ j = l + n - 1 - k - cumulative_sum
i = n - k
if i <= j:
- index += factorial(j) // (factorial(i) * factorial(j - i))
+ index += comb(j, i)
return index
@@ -4296,7 +4306,7 @@ def duplicates_everseen(iterable, key=None):
>>> list(duplicates_everseen('AaaBbbCccAaa', str.lower))
['a', 'a', 'b', 'b', 'c', 'c', 'A', 'a', 'a']
- This function is analagous to :func:`unique_everseen` and is subject to
+ This function is analogous to :func:`unique_everseen` and is subject to
the same performance considerations.
"""
@@ -4326,12 +4336,54 @@ def duplicates_justseen(iterable, key=None):
>>> list(duplicates_justseen('AaaBbbCccAaa', str.lower))
['a', 'a', 'b', 'b', 'c', 'c', 'a', 'a']
- This function is analagous to :func:`unique_justseen`.
+ This function is analogous to :func:`unique_justseen`.
"""
return flatten(g for _, g in groupby(iterable, key) for _ in g)
+def classify_unique(iterable, key=None):
+ """Classify each element in terms of its uniqueness.
+
+ For each element in the input iterable, return a 3-tuple consisting of:
+
+ 1. The element itself
+ 2. ``False`` if the element is equal to the one preceding it in the input,
+ ``True`` otherwise (i.e. the equivalent of :func:`unique_justseen`)
+ 3. ``False`` if this element has been seen anywhere in the input before,
+ ``True`` otherwise (i.e. the equivalent of :func:`unique_everseen`)
+
+ >>> list(classify_unique('otto')) # doctest: +NORMALIZE_WHITESPACE
+ [('o', True, True),
+ ('t', True, True),
+ ('t', False, False),
+ ('o', True, False)]
+
+ This function is analogous to :func:`unique_everseen` and is subject to
+ the same performance considerations.
+
+ """
+ seen_set = set()
+ seen_list = []
+ use_key = key is not None
+ previous = None
+
+ for i, element in enumerate(iterable):
+ k = key(element) if use_key else element
+ is_unique_justseen = not i or previous != k
+ previous = k
+ is_unique_everseen = False
+ try:
+ if k not in seen_set:
+ seen_set.add(k)
+ is_unique_everseen = True
+ except TypeError:
+ if k not in seen_list:
+ seen_list.append(k)
+ is_unique_everseen = True
+ yield element, is_unique_justseen, is_unique_everseen
+
+
def minmax(iterable_or_value, *others, key=None, default=_marker):
"""Returns both the smallest and largest items in an iterable
or the largest of two or more arguments.
@@ -4529,10 +4581,8 @@ def takewhile_inclusive(predicate, iterable):
:func:`takewhile` would return ``[1, 4]``.
"""
for x in iterable:
- if predicate(x):
- yield x
- else:
- yield x
+ yield x
+ if not predicate(x):
break
@@ -4567,3 +4617,40 @@ def outer_product(func, xs, ys, *args, **kwargs):
starmap(lambda x, y: func(x, y, *args, **kwargs), product(xs, ys)),
n=len(ys),
)
+
+
+def iter_suppress(iterable, *exceptions):
+ """Yield each of the items from *iterable*. If the iteration raises one of
+ the specified *exceptions*, that exception will be suppressed and iteration
+ will stop.
+
+ >>> from itertools import chain
+ >>> def breaks_at_five(x):
+ ... while True:
+ ... if x >= 5:
+ ... raise RuntimeError
+ ... yield x
+ ... x += 1
+ >>> it_1 = iter_suppress(breaks_at_five(1), RuntimeError)
+ >>> it_2 = iter_suppress(breaks_at_five(2), RuntimeError)
+ >>> list(chain(it_1, it_2))
+ [1, 2, 3, 4, 2, 3, 4]
+ """
+ try:
+ yield from iterable
+ except exceptions:
+ return
+
+
+def filter_map(func, iterable):
+ """Apply *func* to every element of *iterable*, yielding only those which
+ are not ``None``.
+
+ >>> elems = ['1', 'a', '2', 'b', '3']
+ >>> list(filter_map(lambda s: int(s) if s.isnumeric() else None, elems))
+ [1, 2, 3]
+ """
+ for x in iterable:
+ y = func(x)
+ if y is not None:
+ yield y
diff --git a/contrib/python/more-itertools/py3/more_itertools/recipes.py b/contrib/python/more-itertools/py3/more_itertools/recipes.py
index a0bdbecec6..145e3cb5bd 100644
--- a/contrib/python/more-itertools/py3/more_itertools/recipes.py
+++ b/contrib/python/more-itertools/py3/more_itertools/recipes.py
@@ -28,6 +28,7 @@ from itertools import (
zip_longest,
)
from random import randrange, sample, choice
+from sys import hexversion
__all__ = [
'all_equal',
@@ -56,6 +57,7 @@ __all__ = [
'powerset',
'prepend',
'quantify',
+ 'reshape',
'random_combination_with_replacement',
'random_combination',
'random_permutation',
@@ -69,6 +71,7 @@ __all__ = [
'tabulate',
'tail',
'take',
+ 'totient',
'transpose',
'triplewise',
'unique_everseen',
@@ -492,7 +495,7 @@ def unique_everseen(iterable, key=None):
>>> list(unique_everseen(iterable, key=tuple)) # Faster
[[1, 2], [2, 3]]
- Similary, you may want to convert unhashable ``set`` objects with
+ Similarly, you may want to convert unhashable ``set`` objects with
``key=frozenset``. For ``dict`` objects,
``key=lambda x: frozenset(x.items())`` can be used.
@@ -524,6 +527,9 @@ def unique_justseen(iterable, key=None):
['A', 'B', 'C', 'A', 'D']
"""
+ if key is None:
+ return map(operator.itemgetter(0), groupby(iterable))
+
return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
@@ -817,35 +823,34 @@ def polynomial_from_roots(roots):
return list(reduce(convolve, factors, [1]))
-def iter_index(iterable, value, start=0):
+def iter_index(iterable, value, start=0, stop=None):
"""Yield the index of each place in *iterable* that *value* occurs,
- beginning with index *start*.
+ beginning with index *start* and ending before index *stop*.
See :func:`locate` for a more general means of finding the indexes
associated with particular values.
>>> list(iter_index('AABCADEAF', 'A'))
[0, 1, 4, 7]
+ >>> list(iter_index('AABCADEAF', 'A', 1)) # start index is inclusive
+ [1, 4, 7]
+ >>> list(iter_index('AABCADEAF', 'A', 1, 7)) # stop index is not inclusive
+ [1, 4]
"""
- try:
- seq_index = iterable.index
- except AttributeError:
+ seq_index = getattr(iterable, 'index', None)
+ if seq_index is None:
# Slow path for general iterables
- it = islice(iterable, start, None)
- i = start - 1
- try:
- while True:
- i = i + operator.indexOf(it, value) + 1
+ it = islice(iterable, start, stop)
+ for i, element in enumerate(it, start):
+ if element is value or element == value:
yield i
- except ValueError:
- pass
else:
# Fast path for sequences
+ stop = len(iterable) if stop is None else stop
i = start - 1
try:
while True:
- i = seq_index(value, i + 1)
- yield i
+ yield (i := seq_index(value, i + 1, stop))
except ValueError:
pass
@@ -856,47 +861,52 @@ def sieve(n):
>>> list(sieve(30))
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
"""
+ if n > 2:
+ yield 2
+ start = 3
data = bytearray((0, 1)) * (n // 2)
- data[:3] = 0, 0, 0
limit = math.isqrt(n) + 1
- for p in compress(range(limit), data):
+ for p in iter_index(data, 1, start, limit):
+ yield from iter_index(data, 1, start, p * p)
data[p * p : n : p + p] = bytes(len(range(p * p, n, p + p)))
- data[2] = 1
- return iter_index(data, 1) if n > 2 else iter([])
+ start = p * p
+ yield from iter_index(data, 1, start)
-def _batched(iterable, n):
- """Batch data into lists of length *n*. The last batch may be shorter.
+def _batched(iterable, n, *, strict=False):
+ """Batch data into tuples of length *n*. If the number of items in
+ *iterable* is not divisible by *n*:
+ * The last batch will be shorter if *strict* is ``False``.
+ * :exc:`ValueError` will be raised if *strict* is ``True``.
>>> list(batched('ABCDEFG', 3))
[('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)]
- On Python 3.12 and above, this is an alias for :func:`itertools.batched`.
+ On Python 3.13 and above, this is an alias for :func:`itertools.batched`.
"""
if n < 1:
raise ValueError('n must be at least one')
it = iter(iterable)
- while True:
- batch = tuple(islice(it, n))
- if not batch:
- break
+ while batch := tuple(islice(it, n)):
+ if strict and len(batch) != n:
+ raise ValueError('batched(): incomplete batch')
yield batch
-try:
+if hexversion >= 0x30D00A2:
from itertools import batched as itertools_batched
-except ImportError:
- batched = _batched
-else:
- def batched(iterable, n):
- return itertools_batched(iterable, n)
+ def batched(iterable, n, *, strict=False):
+ return itertools_batched(iterable, n, strict=strict)
+
+else:
+ batched = _batched
batched.__doc__ = _batched.__doc__
def transpose(it):
- """Swap the rows and columns of the input.
+ """Swap the rows and columns of the input matrix.
>>> list(transpose([(1, 2, 3), (11, 22, 33)]))
[(1, 11), (2, 22), (3, 33)]
@@ -907,8 +917,20 @@ def transpose(it):
return _zip_strict(*it)
+def reshape(matrix, cols):
+ """Reshape the 2-D input *matrix* to have a column count given by *cols*.
+
+ >>> matrix = [(0, 1), (2, 3), (4, 5)]
+ >>> cols = 3
+ >>> list(reshape(matrix, cols))
+ [(0, 1, 2), (3, 4, 5)]
+ """
+ return batched(chain.from_iterable(matrix), cols)
+
+
def matmul(m1, m2):
"""Multiply two matrices.
+
>>> list(matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)]))
[(49, 80), (41, 60)]
@@ -921,13 +943,12 @@ def matmul(m1, m2):
def factor(n):
"""Yield the prime factors of n.
+
>>> list(factor(360))
[2, 2, 2, 3, 3, 5]
"""
for prime in sieve(math.isqrt(n) + 1):
- while True:
- if n % prime:
- break
+ while not n % prime:
yield prime
n //= prime
if n == 1:
@@ -975,3 +996,17 @@ def polynomial_derivative(coefficients):
n = len(coefficients)
powers = reversed(range(1, n))
return list(map(operator.mul, coefficients, powers))
+
+
+def totient(n):
+ """Return the count of natural numbers up to *n* that are coprime with *n*.
+
+ >>> totient(9)
+ 6
+ >>> totient(12)
+ 4
+ """
+ for p in unique_justseen(factor(n)):
+ n = n // p * (p - 1)
+
+ return n