aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/more-itertools
diff options
context:
space:
mode:
authorDevtools Arcadia <arcadia-devtools@yandex-team.ru>2022-02-07 18:08:42 +0300
committerDevtools Arcadia <arcadia-devtools@mous.vla.yp-c.yandex.net>2022-02-07 18:08:42 +0300
commit1110808a9d39d4b808aef724c861a2e1a38d2a69 (patch)
treee26c9fed0de5d9873cce7e00bc214573dc2195b7 /contrib/python/more-itertools
downloadydb-1110808a9d39d4b808aef724c861a2e1a38d2a69.tar.gz
intermediate changes
ref:cde9a383711a11544ce7e107a78147fb96cc4029
Diffstat (limited to 'contrib/python/more-itertools')
-rw-r--r--contrib/python/more-itertools/py2/.dist-info/METADATA460
-rw-r--r--contrib/python/more-itertools/py2/.dist-info/top_level.txt1
-rw-r--r--contrib/python/more-itertools/py2/LICENSE19
-rw-r--r--contrib/python/more-itertools/py2/README.rst154
-rw-r--r--contrib/python/more-itertools/py2/more_itertools/__init__.py2
-rw-r--r--contrib/python/more-itertools/py2/more_itertools/more.py2333
-rw-r--r--contrib/python/more-itertools/py2/more_itertools/recipes.py577
-rw-r--r--contrib/python/more-itertools/py2/more_itertools/tests/__init__.py0
-rw-r--r--contrib/python/more-itertools/py2/more_itertools/tests/test_more.py2313
-rw-r--r--contrib/python/more-itertools/py2/more_itertools/tests/test_recipes.py616
-rw-r--r--contrib/python/more-itertools/py2/patches/01-fix-tests.patch18
-rw-r--r--contrib/python/more-itertools/py2/tests/ya.make18
-rw-r--r--contrib/python/more-itertools/py2/ya.make34
-rw-r--r--contrib/python/more-itertools/py3/.dist-info/METADATA521
-rw-r--r--contrib/python/more-itertools/py3/.dist-info/top_level.txt1
-rw-r--r--contrib/python/more-itertools/py3/LICENSE19
-rw-r--r--contrib/python/more-itertools/py3/README.rst200
-rw-r--r--contrib/python/more-itertools/py3/more_itertools/__init__.py4
-rw-r--r--contrib/python/more-itertools/py3/more_itertools/__init__.pyi2
-rw-r--r--contrib/python/more-itertools/py3/more_itertools/more.py4317
-rw-r--r--contrib/python/more-itertools/py3/more_itertools/more.pyi664
-rw-r--r--contrib/python/more-itertools/py3/more_itertools/py.typed0
-rw-r--r--contrib/python/more-itertools/py3/more_itertools/recipes.py698
-rw-r--r--contrib/python/more-itertools/py3/more_itertools/recipes.pyi112
-rw-r--r--contrib/python/more-itertools/py3/patches/01-fix-tests.patch17
-rw-r--r--contrib/python/more-itertools/py3/tests/__init__.py0
-rw-r--r--contrib/python/more-itertools/py3/tests/test_more.py5033
-rw-r--r--contrib/python/more-itertools/py3/tests/test_recipes.py765
-rw-r--r--contrib/python/more-itertools/py3/tests/ya.make16
-rw-r--r--contrib/python/more-itertools/py3/ya.make34
-rw-r--r--contrib/python/more-itertools/ya.make20
31 files changed, 18968 insertions, 0 deletions
diff --git a/contrib/python/more-itertools/py2/.dist-info/METADATA b/contrib/python/more-itertools/py2/.dist-info/METADATA
new file mode 100644
index 0000000000..e712d08090
--- /dev/null
+++ b/contrib/python/more-itertools/py2/.dist-info/METADATA
@@ -0,0 +1,460 @@
+Metadata-Version: 2.1
+Name: more-itertools
+Version: 5.0.0
+Summary: More routines for operating on iterables, beyond itertools
+Home-page: https://github.com/erikrose/more-itertools
+Author: Erik Rose
+Author-email: erikrose@grinchcentral.com
+License: MIT
+Keywords: itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Software Development :: Libraries
+Requires-Dist: six (<2.0.0,>=1.0.0)
+
+==============
+More Itertools
+==============
+
+.. image:: https://coveralls.io/repos/github/erikrose/more-itertools/badge.svg?branch=master
+ :target: https://coveralls.io/github/erikrose/more-itertools?branch=master
+
+Python's ``itertools`` library is a gem - you can compose elegant solutions
+for a variety of problems with the functions it provides. In ``more-itertools``
+we collect additional building blocks, recipes, and routines for working with
+Python iterables.
+
+----
+
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Grouping | `chunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.chunked>`_, |
+| | `sliced <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliced>`_, |
+| | `distribute <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distribute>`_, |
+| | `divide <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.divide>`_, |
+| | `split_at <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_at>`_, |
+| | `split_before <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_before>`_, |
+| | `split_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_after>`_, |
+| | `bucket <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.bucket>`_, |
+| | `grouper <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.grouper>`_, |
+| | `partition <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partition>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Lookahead and lookback | `spy <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.spy>`_, |
+| | `peekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.peekable>`_, |
+| | `seekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.seekable>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Windowing | `windowed <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed>`_, |
+| | `stagger <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.stagger>`_, |
+| | `pairwise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pairwise>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Augmenting | `count_cycle <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.count_cycle>`_, |
+| | `intersperse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.intersperse>`_, |
+| | `padded <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padded>`_, |
+| | `adjacent <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.adjacent>`_, |
+| | `groupby_transform <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.groupby_transform>`_, |
+| | `padnone <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padnone>`_, |
+| | `ncycles <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ncycles>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combining | `collapse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collapse>`_, |
+| | `sort_together <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sort_together>`_, |
+| | `interleave <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave>`_, |
+| | `interleave_longest <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_longest>`_, |
+| | `collate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collate>`_, |
+| | `zip_offset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_offset>`_, |
+| | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, |
+| | `flatten <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.flatten>`_, |
+| | `roundrobin <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.roundrobin>`_, |
+| | `prepend <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.prepend>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Summarizing | `ilen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ilen>`_, |
+| | `first <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first>`_, |
+| | `last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.last>`_, |
+| | `one <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.one>`_, |
+| | `unique_to_each <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_to_each>`_, |
+| | `locate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.locate>`_, |
+| | `rlocate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rlocate>`_, |
+| | `consecutive_groups <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consecutive_groups>`_, |
+| | `exactly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.exactly_n>`_, |
+| | `run_length <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.run_length>`_, |
+| | `map_reduce <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_reduce>`_, |
+| | `all_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_equal>`_, |
+| | `first_true <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first_true>`_, |
+| | `nth <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth>`_, |
+| | `quantify <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.quantify>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Selecting | `islice_extended <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.islice_extended>`_, |
+| | `strip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strip>`_, |
+| | `lstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.lstrip>`_, |
+| | `rstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rstrip>`_, |
+| | `take <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.take>`_, |
+| | `tail <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tail>`_, |
+| | `unique_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertoo ls.unique_everseen>`_, |
+| | `unique_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_justseen>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combinatorics | `distinct_permutations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_permutations>`_, |
+| | `circular_shifts <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.circular_shifts>`_, |
+| | `powerset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset>`_, |
+| | `random_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_product>`_, |
+| | `random_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_permutation>`_, |
+| | `random_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination>`_, |
+| | `random_combination_with_replacement <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination_with_replacement>`_, |
+| | `nth_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_combination>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Wrapping | `always_iterable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_iterable>`_, |
+| | `consumer <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consumer>`_, |
+| | `with_iter <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.with_iter>`_, |
+| | `iter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iter_except>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Others | `replace <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.replace>`_, |
+| | `numeric_range <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.numeric_range>`_, |
+| | `always_reversible <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_reversible>`_, |
+| | `side_effect <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.side_effect>`_, |
+| | `iterate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iterate>`_, |
+| | `difference <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.difference>`_, |
+| | `make_decorator <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.make_decorator>`_, |
+| | `SequenceView <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.SequenceView>`_, |
+| | `consume <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consume>`_, |
+| | `accumulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.accumulate>`_, |
+| | `tabulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tabulate>`_, |
+| | `repeatfunc <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeatfunc>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+
+
+Getting started
+===============
+
+To get started, install the library with `pip <https://pip.pypa.io/en/stable/>`_:
+
+.. code-block:: shell
+
+ pip install more-itertools
+
+The recipes from the `itertools docs <https://docs.python.org/3/library/itertools.html#itertools-recipes>`_
+are included in the top-level package:
+
+.. code-block:: python
+
+ >>> from more_itertools import flatten
+ >>> iterable = [(0, 1), (2, 3)]
+ >>> list(flatten(iterable))
+ [0, 1, 2, 3]
+
+Several new recipes are available as well:
+
+.. code-block:: python
+
+ >>> from more_itertools import chunked
+ >>> iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8]
+ >>> list(chunked(iterable, 3))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ >>> from more_itertools import spy
+ >>> iterable = (x * x for x in range(1, 6))
+ >>> head, iterable = spy(iterable, n=3)
+ >>> list(head)
+ [1, 4, 9]
+ >>> list(iterable)
+ [1, 4, 9, 16, 25]
+
+
+
+For the full listing of functions, see the `API documentation <https://more-itertools.readthedocs.io/en/latest/api.html>`_.
+
+Development
+===========
+
+``more-itertools`` is maintained by `@erikrose <https://github.com/erikrose>`_
+and `@bbayles <https://github.com/bbayles>`_, with help from `many others <https://github.com/erikrose/more-itertools/graphs/contributors>`_.
+If you have a problem or suggestion, please file a bug or pull request in this
+repository. Thanks for contributing!
+
+
+Version History
+===============
+
+
+
+5.0.0
+-----
+
+* New itertools:
+ * split_into (thanks to rovyko)
+ * unzip (thanks to bmintz)
+ * substrings (thanks to pylang)
+
+* Changes to existing itertools:
+ * ilen was optimized a bit (thanks to MSeifert04, achampion, and bmintz)
+ * first_true now returns ``None`` by default. This is the reason for the major version bump - see below. (thanks to sk and OJFord)
+
+* Other changes:
+ * Some code for old Python versions was removed (thanks to hugovk)
+ * Some documentation mistakes were corrected (thanks to belm0 and hugovk)
+ * Tests now run properly on 32-bit versions of Python (thanks to Millak)
+ * Newer versions of CPython and PyPy are now tested against
+
+The major version update is due to the change in the default return value of
+first_true. It's now ``None``.
+
+.. code-block:: python
+
+ >>> from more_itertools import first_true
+ >>> iterable = [0, '', False, [], ()] # All these are False
+ >>> answer = first_true(iterable)
+ >>> print(answer)
+ None
+
+4.3.0
+-----
+
+* New itertools:
+ * last (thanks to tmshn)
+ * replace (thanks to pylang)
+ * rlocate (thanks to jferard and pylang)
+
+* Improvements to existing itertools:
+ * locate can now search for multiple items
+
+* Other changes:
+ * The docs now include a nice table of tools (thanks MSeifert04)
+
+4.2.0
+-----
+
+* New itertools:
+ * map_reduce (thanks to pylang)
+ * prepend (from the `Python 3.7 docs <https://docs.python.org/3.7/library/itertools.html#itertools-recipes>`_)
+
+* Improvements to existing itertools:
+ * bucket now complies with PEP 479 (thanks to irmen)
+
+* Other changes:
+ * Python 3.7 is now supported (thanks to irmen)
+ * Python 3.3 is no longer supported
+ * The test suite no longer requires third-party modules to run
+ * The API docs now include links to source code
+
+4.1.0
+-----
+
+* New itertools:
+ * split_at (thanks to michael-celani)
+ * circular_shifts (thanks to hiqua)
+ * make_decorator - see the blog post `Yo, I heard you like decorators <https://sites.google.com/site/bbayles/index/decorator_factory>`_
+ for a tour (thanks to pylang)
+ * always_reversible (thanks to michael-celani)
+ * nth_combination (from the `Python 3.7 docs <https://docs.python.org/3.7/library/itertools.html#itertools-recipes>`_)
+
+* Improvements to existing itertools:
+ * seekable now has an ``elements`` method to return cached items.
+ * The performance tradeoffs between roundrobin and
+ interleave_longest are now documented (thanks michael-celani,
+ pylang, and MSeifert04)
+
+4.0.1
+-----
+
+* No code changes - this release fixes how the docs display on PyPI.
+
+4.0.0
+-----
+
+* New itertools:
+ * consecutive_groups (Based on the example in the `Python 2.4 docs <https://docs.python.org/release/2.4.4/lib/itertools-example.html>`_)
+ * seekable (If you're looking for how to "reset" an iterator,
+ you're in luck!)
+ * exactly_n (thanks to michael-celani)
+ * run_length.encode and run_length.decode
+ * difference
+
+* Improvements to existing itertools:
+ * The number of items between filler elements in intersperse can
+ now be specified (thanks to pylang)
+ * distinct_permutations and peekable got some minor
+ adjustments (thanks to MSeifert04)
+ * always_iterable now returns an iterator object. It also now
+ allows different types to be considered iterable (thanks to jaraco)
+ * bucket can now limit the keys it stores in memory
+ * one now allows for custom exceptions (thanks to kalekundert)
+
+* Other changes:
+ * A few typos were fixed (thanks to EdwardBetts)
+ * All tests can now be run with ``python setup.py test``
+
+The major version update is due to the change in the return value of always_iterable.
+It now always returns iterator objects:
+
+.. code-block:: python
+
+ >>> from more_itertools import always_iterable
+ # Non-iterable objects are wrapped with iter(tuple(obj))
+ >>> always_iterable(12345)
+ <tuple_iterator object at 0x7fb24c9488d0>
+ >>> list(always_iterable(12345))
+ [12345]
+ # Iterable objects are wrapped with iter()
+ >>> always_iterable([1, 2, 3, 4, 5])
+ <list_iterator object at 0x7fb24c948c50>
+
+3.2.0
+-----
+
+* New itertools:
+ * lstrip, rstrip, and strip
+ (thanks to MSeifert04 and pylang)
+ * islice_extended
+* Improvements to existing itertools:
+ * Some bugs with slicing peekable-wrapped iterables were fixed
+
+3.1.0
+-----
+
+* New itertools:
+ * numeric_range (Thanks to BebeSparkelSparkel and MSeifert04)
+ * count_cycle (Thanks to BebeSparkelSparkel)
+ * locate (Thanks to pylang and MSeifert04)
+* Improvements to existing itertools:
+ * A few itertools are now slightly faster due to some function
+ optimizations. (Thanks to MSeifert04)
+* The docs have been substantially revised with installation notes,
+ categories for library functions, links, and more. (Thanks to pylang)
+
+
+3.0.0
+-----
+
+* Removed itertools:
+ * ``context`` has been removed due to a design flaw - see below for
+ replacement options. (thanks to NeilGirdhar)
+* Improvements to existing itertools:
+ * ``side_effect`` now supports ``before`` and ``after`` keyword
+ arguments. (Thanks to yardsale8)
+* PyPy and PyPy3 are now supported.
+
+The major version change is due to the removal of the ``context`` function.
+Replace it with standard ``with`` statement context management:
+
+.. code-block:: python
+
+ # Don't use context() anymore
+ file_obj = StringIO()
+ consume(print(x, file=f) for f in context(file_obj) for x in u'123')
+
+ # Use a with statement instead
+ file_obj = StringIO()
+ with file_obj as f:
+ consume(print(x, file=f) for x in u'123')
+
+2.6.0
+-----
+
+* New itertools:
+ * ``adjacent`` and ``groupby_transform`` (Thanks to diazona)
+ * ``always_iterable`` (Thanks to jaraco)
+ * (Removed in 3.0.0) ``context`` (Thanks to yardsale8)
+ * ``divide`` (Thanks to mozbhearsum)
+* Improvements to existing itertools:
+ * ``ilen`` is now slightly faster. (Thanks to wbolster)
+ * ``peekable`` can now prepend items to an iterable. (Thanks to diazona)
+
+2.5.0
+-----
+
+* New itertools:
+ * ``distribute`` (Thanks to mozbhearsum and coady)
+ * ``sort_together`` (Thanks to clintval)
+ * ``stagger`` and ``zip_offset`` (Thanks to joshbode)
+ * ``padded``
+* Improvements to existing itertools:
+ * ``peekable`` now handles negative indexes and slices with negative
+ components properly.
+ * ``intersperse`` is now slightly faster. (Thanks to pylang)
+ * ``windowed`` now accepts a ``step`` keyword argument.
+ (Thanks to pylang)
+* Python 3.6 is now supported.
+
+2.4.1
+-----
+
+* Move docs 100% to readthedocs.io.
+
+2.4
+-----
+
+* New itertools:
+ * ``accumulate``, ``all_equal``, ``first_true``, ``partition``, and
+ ``tail`` from the itertools documentation.
+ * ``bucket`` (Thanks to Rosuav and cvrebert)
+ * ``collapse`` (Thanks to abarnet)
+ * ``interleave`` and ``interleave_longest`` (Thanks to abarnet)
+ * ``side_effect`` (Thanks to nvie)
+ * ``sliced`` (Thanks to j4mie and coady)
+ * ``split_before`` and ``split_after`` (Thanks to astronouth7303)
+ * ``spy`` (Thanks to themiurgo and mathieulongtin)
+* Improvements to existing itertools:
+ * ``chunked`` is now simpler and more friendly to garbage collection.
+ (Contributed by coady, with thanks to piskvorky)
+ * ``collate`` now delegates to ``heapq.merge`` when possible.
+ (Thanks to kmike and julianpistorius)
+ * ``peekable``-wrapped iterables are now indexable and sliceable.
+ Iterating through ``peekable``-wrapped iterables is also faster.
+ * ``one`` and ``unique_to_each`` have been simplified.
+ (Thanks to coady)
+
+
+2.3
+-----
+
+* Added ``one`` from ``jaraco.util.itertools``. (Thanks, jaraco!)
+* Added ``distinct_permutations`` and ``unique_to_each``. (Contributed by
+ bbayles)
+* Added ``windowed``. (Contributed by bbayles, with thanks to buchanae,
+ jaraco, and abarnert)
+* Simplified the implementation of ``chunked``. (Thanks, nvie!)
+* Python 3.5 is now supported. Python 2.6 is no longer supported.
+* Python 3 is now supported directly; there is no 2to3 step.
+
+2.2
+-----
+
+* Added ``iterate`` and ``with_iter``. (Thanks, abarnert!)
+
+2.1
+-----
+
+* Added (tested!) implementations of the recipes from the itertools
+ documentation. (Thanks, Chris Lonnen!)
+* Added ``ilen``. (Thanks for the inspiration, Matt Basta!)
+
+2.0
+-----
+
+* ``chunked`` now returns lists rather than tuples. After all, they're
+ homogeneous. This slightly backward-incompatible change is the reason for
+ the major version bump.
+* Added ``@consumer``.
+* Improved test machinery.
+
+1.1
+-----
+
+* Added ``first`` function.
+* Added Python 3 support.
+* Added a default arg to ``peekable.peek()``.
+* Noted how to easily test whether a peekable iterator is exhausted.
+* Rewrote documentation.
+
+1.0
+-----
+
+* Initial release, with ``collate``, ``peekable``, and ``chunked``. Could
+ really use better docs.
+
diff --git a/contrib/python/more-itertools/py2/.dist-info/top_level.txt b/contrib/python/more-itertools/py2/.dist-info/top_level.txt
new file mode 100644
index 0000000000..a5035befb3
--- /dev/null
+++ b/contrib/python/more-itertools/py2/.dist-info/top_level.txt
@@ -0,0 +1 @@
+more_itertools
diff --git a/contrib/python/more-itertools/py2/LICENSE b/contrib/python/more-itertools/py2/LICENSE
new file mode 100644
index 0000000000..0a523bece3
--- /dev/null
+++ b/contrib/python/more-itertools/py2/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2012 Erik Rose
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/contrib/python/more-itertools/py2/README.rst b/contrib/python/more-itertools/py2/README.rst
new file mode 100644
index 0000000000..d918eb684f
--- /dev/null
+++ b/contrib/python/more-itertools/py2/README.rst
@@ -0,0 +1,154 @@
+==============
+More Itertools
+==============
+
+.. image:: https://coveralls.io/repos/github/erikrose/more-itertools/badge.svg?branch=master
+ :target: https://coveralls.io/github/erikrose/more-itertools?branch=master
+
+Python's ``itertools`` library is a gem - you can compose elegant solutions
+for a variety of problems with the functions it provides. In ``more-itertools``
+we collect additional building blocks, recipes, and routines for working with
+Python iterables.
+
+----
+
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Grouping | `chunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.chunked>`_, |
+| | `sliced <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliced>`_, |
+| | `distribute <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distribute>`_, |
+| | `divide <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.divide>`_, |
+| | `split_at <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_at>`_, |
+| | `split_before <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_before>`_, |
+| | `split_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_after>`_, |
+| | `bucket <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.bucket>`_, |
+| | `grouper <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.grouper>`_, |
+| | `partition <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partition>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Lookahead and lookback | `spy <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.spy>`_, |
+| | `peekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.peekable>`_, |
+| | `seekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.seekable>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Windowing | `windowed <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed>`_, |
+| | `stagger <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.stagger>`_, |
+| | `pairwise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pairwise>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Augmenting | `count_cycle <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.count_cycle>`_, |
+| | `intersperse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.intersperse>`_, |
+| | `padded <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padded>`_, |
+| | `adjacent <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.adjacent>`_, |
+| | `groupby_transform <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.groupby_transform>`_, |
+| | `padnone <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padnone>`_, |
+| | `ncycles <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ncycles>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combining | `collapse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collapse>`_, |
+| | `sort_together <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sort_together>`_, |
+| | `interleave <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave>`_, |
+| | `interleave_longest <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_longest>`_, |
+| | `collate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collate>`_, |
+| | `zip_offset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_offset>`_, |
+| | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, |
+| | `flatten <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.flatten>`_, |
+| | `roundrobin <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.roundrobin>`_, |
+| | `prepend <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.prepend>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Summarizing | `ilen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ilen>`_, |
+| | `first <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first>`_, |
+| | `last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.last>`_, |
+| | `one <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.one>`_, |
+| | `unique_to_each <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_to_each>`_, |
+| | `locate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.locate>`_, |
+| | `rlocate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rlocate>`_, |
+| | `consecutive_groups <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consecutive_groups>`_, |
+| | `exactly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.exactly_n>`_, |
+| | `run_length <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.run_length>`_, |
+| | `map_reduce <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_reduce>`_, |
+| | `all_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_equal>`_, |
+| | `first_true <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first_true>`_, |
+| | `nth <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth>`_, |
+| | `quantify <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.quantify>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Selecting | `islice_extended <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.islice_extended>`_, |
+| | `strip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strip>`_, |
+| | `lstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.lstrip>`_, |
+| | `rstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rstrip>`_, |
+| | `take <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.take>`_, |
+| | `tail <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tail>`_, |
+| | `unique_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertoo ls.unique_everseen>`_, |
+| | `unique_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_justseen>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combinatorics | `distinct_permutations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_permutations>`_, |
+| | `circular_shifts <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.circular_shifts>`_, |
+| | `powerset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset>`_, |
+| | `random_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_product>`_, |
+| | `random_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_permutation>`_, |
+| | `random_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination>`_, |
+| | `random_combination_with_replacement <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination_with_replacement>`_, |
+| | `nth_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_combination>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Wrapping | `always_iterable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_iterable>`_, |
+| | `consumer <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consumer>`_, |
+| | `with_iter <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.with_iter>`_, |
+| | `iter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iter_except>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Others | `replace <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.replace>`_, |
+| | `numeric_range <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.numeric_range>`_, |
+| | `always_reversible <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_reversible>`_, |
+| | `side_effect <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.side_effect>`_, |
+| | `iterate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iterate>`_, |
+| | `difference <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.difference>`_, |
+| | `make_decorator <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.make_decorator>`_, |
+| | `SequenceView <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.SequenceView>`_, |
+| | `consume <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consume>`_, |
+| | `accumulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.accumulate>`_, |
+| | `tabulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tabulate>`_, |
+| | `repeatfunc <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeatfunc>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+
+
+Getting started
+===============
+
+To get started, install the library with `pip <https://pip.pypa.io/en/stable/>`_:
+
+.. code-block:: shell
+
+ pip install more-itertools
+
+The recipes from the `itertools docs <https://docs.python.org/3/library/itertools.html#itertools-recipes>`_
+are included in the top-level package:
+
+.. code-block:: python
+
+ >>> from more_itertools import flatten
+ >>> iterable = [(0, 1), (2, 3)]
+ >>> list(flatten(iterable))
+ [0, 1, 2, 3]
+
+Several new recipes are available as well:
+
+.. code-block:: python
+
+ >>> from more_itertools import chunked
+ >>> iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8]
+ >>> list(chunked(iterable, 3))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ >>> from more_itertools import spy
+ >>> iterable = (x * x for x in range(1, 6))
+ >>> head, iterable = spy(iterable, n=3)
+ >>> list(head)
+ [1, 4, 9]
+ >>> list(iterable)
+ [1, 4, 9, 16, 25]
+
+
+
+For the full listing of functions, see the `API documentation <https://more-itertools.readthedocs.io/en/latest/api.html>`_.
+
+Development
+===========
+
+``more-itertools`` is maintained by `@erikrose <https://github.com/erikrose>`_
+and `@bbayles <https://github.com/bbayles>`_, with help from `many others <https://github.com/erikrose/more-itertools/graphs/contributors>`_.
+If you have a problem or suggestion, please file a bug or pull request in this
+repository. Thanks for contributing!
diff --git a/contrib/python/more-itertools/py2/more_itertools/__init__.py b/contrib/python/more-itertools/py2/more_itertools/__init__.py
new file mode 100644
index 0000000000..bba462c3db
--- /dev/null
+++ b/contrib/python/more-itertools/py2/more_itertools/__init__.py
@@ -0,0 +1,2 @@
+from more_itertools.more import * # noqa
+from more_itertools.recipes import * # noqa
diff --git a/contrib/python/more-itertools/py2/more_itertools/more.py b/contrib/python/more-itertools/py2/more_itertools/more.py
new file mode 100644
index 0000000000..bd32a26130
--- /dev/null
+++ b/contrib/python/more-itertools/py2/more_itertools/more.py
@@ -0,0 +1,2333 @@
+from __future__ import print_function
+
+from collections import Counter, defaultdict, deque
+from functools import partial, wraps
+from heapq import merge
+from itertools import (
+ chain,
+ compress,
+ count,
+ cycle,
+ dropwhile,
+ groupby,
+ islice,
+ repeat,
+ starmap,
+ takewhile,
+ tee
+)
+from operator import itemgetter, lt, gt, sub
+from sys import maxsize, version_info
+try:
+ from collections.abc import Sequence
+except ImportError:
+ from collections import Sequence
+
+from six import binary_type, string_types, text_type
+from six.moves import filter, map, range, zip, zip_longest
+
+from .recipes import consume, flatten, take
+
+__all__ = [
+ 'adjacent',
+ 'always_iterable',
+ 'always_reversible',
+ 'bucket',
+ 'chunked',
+ 'circular_shifts',
+ 'collapse',
+ 'collate',
+ 'consecutive_groups',
+ 'consumer',
+ 'count_cycle',
+ 'difference',
+ 'distinct_permutations',
+ 'distribute',
+ 'divide',
+ 'exactly_n',
+ 'first',
+ 'groupby_transform',
+ 'ilen',
+ 'interleave_longest',
+ 'interleave',
+ 'intersperse',
+ 'islice_extended',
+ 'iterate',
+ 'last',
+ 'locate',
+ 'lstrip',
+ 'make_decorator',
+ 'map_reduce',
+ 'numeric_range',
+ 'one',
+ 'padded',
+ 'peekable',
+ 'replace',
+ 'rlocate',
+ 'rstrip',
+ 'run_length',
+ 'seekable',
+ 'SequenceView',
+ 'side_effect',
+ 'sliced',
+ 'sort_together',
+ 'split_at',
+ 'split_after',
+ 'split_before',
+ 'split_into',
+ 'spy',
+ 'stagger',
+ 'strip',
+ 'substrings',
+ 'unique_to_each',
+ 'unzip',
+ 'windowed',
+ 'with_iter',
+ 'zip_offset',
+]
+
+_marker = object()
+
+
+def chunked(iterable, n):
+ """Break *iterable* into lists of length *n*:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6], 3))
+ [[1, 2, 3], [4, 5, 6]]
+
+ If the length of *iterable* is not evenly divisible by *n*, the last
+ returned list will be shorter:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6, 7, 8], 3))
+ [[1, 2, 3], [4, 5, 6], [7, 8]]
+
+ To use a fill-in value instead, see the :func:`grouper` recipe.
+
+ :func:`chunked` is useful for splitting up a computation on a large number
+ of keys into batches, to be pickled and sent off to worker processes. One
+ example is operations on rows in MySQL, which does not implement
+ server-side cursors properly and would otherwise load the entire dataset
+ into RAM on the client.
+
+ """
+ return iter(partial(take, n, iter(iterable)), [])
+
+
+def first(iterable, default=_marker):
+ """Return the first item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> first([0, 1, 2, 3])
+ 0
+ >>> first([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+
+ :func:`first` is useful when you have a generator of expensive-to-retrieve
+ values and want any arbitrary one. It is marginally shorter than
+ ``next(iter(iterable), default)``.
+
+ """
+ try:
+ return next(iter(iterable))
+ except StopIteration:
+ # I'm on the edge about raising ValueError instead of StopIteration. At
+ # the moment, ValueError wins, because the caller could conceivably
+ # want to do something different with flow control when I raise the
+ # exception, and it's weird to explicitly catch StopIteration.
+ if default is _marker:
+ raise ValueError('first() was called on an empty iterable, and no '
+ 'default value was provided.')
+ return default
+
+
+def last(iterable, default=_marker):
+ """Return the last item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> last([0, 1, 2, 3])
+ 3
+ >>> last([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+ """
+ try:
+ try:
+ # Try to access the last item directly
+ return iterable[-1]
+ except (TypeError, AttributeError, KeyError):
+ # If not slice-able, iterate entirely using length-1 deque
+ return deque(iterable, maxlen=1)[0]
+ except IndexError: # If the iterable was empty
+ if default is _marker:
+ raise ValueError('last() was called on an empty iterable, and no '
+ 'default value was provided.')
+ return default
+
+
+class peekable(object):
+ """Wrap an iterator to allow lookahead and prepending elements.
+
+ Call :meth:`peek` on the result to get the value that will be returned
+ by :func:`next`. This won't advance the iterator:
+
+ >>> p = peekable(['a', 'b'])
+ >>> p.peek()
+ 'a'
+ >>> next(p)
+ 'a'
+
+ Pass :meth:`peek` a default value to return that instead of raising
+ ``StopIteration`` when the iterator is exhausted.
+
+ >>> p = peekable([])
+ >>> p.peek('hi')
+ 'hi'
+
+ peekables also offer a :meth:`prepend` method, which "inserts" items
+ at the head of the iterable:
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> p.peek()
+ 11
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ peekables can be indexed. Index 0 is the item that will be returned by
+ :func:`next`, index 1 is the item after that, and so on:
+ The values up to the given index will be cached.
+
+ >>> p = peekable(['a', 'b', 'c', 'd'])
+ >>> p[0]
+ 'a'
+ >>> p[1]
+ 'b'
+ >>> next(p)
+ 'a'
+
+ Negative indexes are supported, but be aware that they will cache the
+ remaining items in the source iterator, which may require significant
+ storage.
+
+ To check whether a peekable is exhausted, check its truth value:
+
+ >>> p = peekable(['a', 'b'])
+ >>> if p: # peekable has items
+ ... list(p)
+ ['a', 'b']
+ >>> if not p: # peekable is exhaused
+ ... list(p)
+ []
+
+ """
+ def __init__(self, iterable):
+ self._it = iter(iterable)
+ self._cache = deque()
+
+ def __iter__(self):
+ return self
+
+ def __bool__(self):
+ try:
+ self.peek()
+ except StopIteration:
+ return False
+ return True
+
+ def __nonzero__(self):
+ # For Python 2 compatibility
+ return self.__bool__()
+
+ def peek(self, default=_marker):
+ """Return the item that will be next returned from ``next()``.
+
+ Return ``default`` if there are no items left. If ``default`` is not
+ provided, raise ``StopIteration``.
+
+ """
+ if not self._cache:
+ try:
+ self._cache.append(next(self._it))
+ except StopIteration:
+ if default is _marker:
+ raise
+ return default
+ return self._cache[0]
+
+ def prepend(self, *items):
+ """Stack up items to be the next ones returned from ``next()`` or
+ ``self.peek()``. The items will be returned in
+ first in, first out order::
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ It is possible, by prepending items, to "resurrect" a peekable that
+ previously raised ``StopIteration``.
+
+ >>> p = peekable([])
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+ >>> p.prepend(1)
+ >>> next(p)
+ 1
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+
+ """
+ self._cache.extendleft(reversed(items))
+
+ def __next__(self):
+ if self._cache:
+ return self._cache.popleft()
+
+ return next(self._it)
+
+ next = __next__ # For Python 2 compatibility
+
+ def _get_slice(self, index):
+ # Normalize the slice's arguments
+ step = 1 if (index.step is None) else index.step
+ if step > 0:
+ start = 0 if (index.start is None) else index.start
+ stop = maxsize if (index.stop is None) else index.stop
+ elif step < 0:
+ start = -1 if (index.start is None) else index.start
+ stop = (-maxsize - 1) if (index.stop is None) else index.stop
+ else:
+ raise ValueError('slice step cannot be zero')
+
+ # If either the start or stop index is negative, we'll need to cache
+ # the rest of the iterable in order to slice from the right side.
+ if (start < 0) or (stop < 0):
+ self._cache.extend(self._it)
+ # Otherwise we'll need to find the rightmost index and cache to that
+ # point.
+ else:
+ n = min(max(start, stop) + 1, maxsize)
+ cache_len = len(self._cache)
+ if n >= cache_len:
+ self._cache.extend(islice(self._it, n - cache_len))
+
+ return list(self._cache)[index]
+
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ return self._get_slice(index)
+
+ cache_len = len(self._cache)
+ if index < 0:
+ self._cache.extend(self._it)
+ elif index >= cache_len:
+ self._cache.extend(islice(self._it, index + 1 - cache_len))
+
+ return self._cache[index]
+
+
+def _collate(*iterables, **kwargs):
+ """Helper for ``collate()``, called when the user is using the ``reverse``
+ or ``key`` keyword arguments on Python versions below 3.5.
+
+ """
+ key = kwargs.pop('key', lambda a: a)
+ reverse = kwargs.pop('reverse', False)
+
+ min_or_max = partial(max if reverse else min, key=itemgetter(0))
+ peekables = [peekable(it) for it in iterables]
+ peekables = [p for p in peekables if p] # Kill empties.
+ while peekables:
+ _, p = min_or_max((key(p.peek()), p) for p in peekables)
+ yield next(p)
+ peekables = [x for x in peekables if x]
+
+
+def collate(*iterables, **kwargs):
+ """Return a sorted merge of the items from each of several already-sorted
+ *iterables*.
+
+ >>> list(collate('ACDZ', 'AZ', 'JKL'))
+ ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z']
+
+ Works lazily, keeping only the next value from each iterable in memory. Use
+ :func:`collate` to, for example, perform a n-way mergesort of items that
+ don't fit in memory.
+
+ If a *key* function is specified, the iterables will be sorted according
+ to its result:
+
+ >>> key = lambda s: int(s) # Sort by numeric value, not by string
+ >>> list(collate(['1', '10'], ['2', '11'], key=key))
+ ['1', '2', '10', '11']
+
+
+ If the *iterables* are sorted in descending order, set *reverse* to
+ ``True``:
+
+ >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True))
+ [5, 4, 3, 2, 1, 0]
+
+ If the elements of the passed-in iterables are out of order, you might get
+ unexpected results.
+
+ On Python 2.7, this function delegates to :func:`heapq.merge` if neither
+ of the keyword arguments are specified. On Python 3.5+, this function
+ is an alias for :func:`heapq.merge`.
+
+ """
+ if not kwargs:
+ return merge(*iterables)
+
+ return _collate(*iterables, **kwargs)
+
+
+# If using Python version 3.5 or greater, heapq.merge() will be faster than
+# collate - use that instead.
+if version_info >= (3, 5, 0):
+ _collate_docstring = collate.__doc__
+ collate = partial(merge)
+ collate.__doc__ = _collate_docstring
+
+
+def consumer(func):
+ """Decorator that automatically advances a PEP-342-style "reverse iterator"
+ to its first yield point so you don't have to call ``next()`` on it
+ manually.
+
+ >>> @consumer
+ ... def tally():
+ ... i = 0
+ ... while True:
+ ... print('Thing number %s is %s.' % (i, (yield)))
+ ... i += 1
+ ...
+ >>> t = tally()
+ >>> t.send('red')
+ Thing number 0 is red.
+ >>> t.send('fish')
+ Thing number 1 is fish.
+
+ Without the decorator, you would have to call ``next(t)`` before
+ ``t.send()`` could be used.
+
+ """
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ gen = func(*args, **kwargs)
+ next(gen)
+ return gen
+ return wrapper
+
+
+def ilen(iterable):
+ """Return the number of items in *iterable*.
+
+ >>> ilen(x for x in range(1000000) if x % 3 == 0)
+ 333334
+
+ This consumes the iterable, so handle with care.
+
+ """
+ # This approach was selected because benchmarks showed it's likely the
+ # fastest of the known implementations at the time of writing.
+ # See GitHub tracker: #236, #230.
+ counter = count()
+ deque(zip(iterable, counter), maxlen=0)
+ return next(counter)
+
+
+def iterate(func, start):
+ """Return ``start``, ``func(start)``, ``func(func(start))``, ...
+
+ >>> from itertools import islice
+ >>> list(islice(iterate(lambda x: 2*x, 1), 10))
+ [1, 2, 4, 8, 16, 32, 64, 128, 256, 512]
+
+ """
+ while True:
+ yield start
+ start = func(start)
+
+
+def with_iter(context_manager):
+ """Wrap an iterable in a ``with`` statement, so it closes once exhausted.
+
+ For example, this will close the file when the iterator is exhausted::
+
+ upper_lines = (line.upper() for line in with_iter(open('foo')))
+
+ Any context manager which returns an iterable is a candidate for
+ ``with_iter``.
+
+ """
+ with context_manager as iterable:
+ for item in iterable:
+ yield item
+
+
+def one(iterable, too_short=None, too_long=None):
+ """Return the first item from *iterable*, which is expected to contain only
+ that item. Raise an exception if *iterable* is empty or has more than one
+ item.
+
+ :func:`one` is useful for ensuring that an iterable contains only one item.
+ For example, it can be used to retrieve the result of a database query
+ that is expected to return a single row.
+
+ If *iterable* is empty, ``ValueError`` will be raised. You may specify a
+ different exception with the *too_short* keyword:
+
+ >>> it = []
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (expected 1)'
+ >>> too_short = IndexError('too few items')
+ >>> one(it, too_short=too_short) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ IndexError: too few items
+
+ Similarly, if *iterable* contains more than one item, ``ValueError`` will
+ be raised. You may specify a different exception with the *too_long*
+ keyword:
+
+ >>> it = ['too', 'many']
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (expected 1)'
+ >>> too_long = RuntimeError
+ >>> one(it, too_long=too_long) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+
+ Note that :func:`one` attempts to advance *iterable* twice to ensure there
+ is only one item. If there is more than one, both items will be discarded.
+ See :func:`spy` or :func:`peekable` to check iterable contents less
+ destructively.
+
+ """
+ it = iter(iterable)
+
+ try:
+ value = next(it)
+ except StopIteration:
+ raise too_short or ValueError('too few items in iterable (expected 1)')
+
+ try:
+ next(it)
+ except StopIteration:
+ pass
+ else:
+ raise too_long or ValueError('too many items in iterable (expected 1)')
+
+ return value
+
+
+def distinct_permutations(iterable):
+ """Yield successive distinct permutations of the elements in *iterable*.
+
+ >>> sorted(distinct_permutations([1, 0, 1]))
+ [(0, 1, 1), (1, 0, 1), (1, 1, 0)]
+
+ Equivalent to ``set(permutations(iterable))``, except duplicates are not
+ generated and thrown away. For larger input sequences this is much more
+ efficient.
+
+ Duplicate permutations arise when there are duplicated elements in the
+ input iterable. The number of items returned is
+ `n! / (x_1! * x_2! * ... * x_n!)`, where `n` is the total number of
+ items input, and each `x_i` is the count of a distinct item in the input
+ sequence.
+
+ """
+ def perm_unique_helper(item_counts, perm, i):
+ """Internal helper function
+
+ :arg item_counts: Stores the unique items in ``iterable`` and how many
+ times they are repeated
+ :arg perm: The permutation that is being built for output
+ :arg i: The index of the permutation being modified
+
+ The output permutations are built up recursively; the distinct items
+ are placed until their repetitions are exhausted.
+ """
+ if i < 0:
+ yield tuple(perm)
+ else:
+ for item in item_counts:
+ if item_counts[item] <= 0:
+ continue
+ perm[i] = item
+ item_counts[item] -= 1
+ for x in perm_unique_helper(item_counts, perm, i - 1):
+ yield x
+ item_counts[item] += 1
+
+ item_counts = Counter(iterable)
+ length = sum(item_counts.values())
+
+ return perm_unique_helper(item_counts, [None] * length, length - 1)
+
+
+def intersperse(e, iterable, n=1):
+ """Intersperse filler element *e* among the items in *iterable*, leaving
+ *n* items between each filler element.
+
+ >>> list(intersperse('!', [1, 2, 3, 4, 5]))
+ [1, '!', 2, '!', 3, '!', 4, '!', 5]
+
+ >>> list(intersperse(None, [1, 2, 3, 4, 5], n=2))
+ [1, 2, None, 3, 4, None, 5]
+
+ """
+ if n == 0:
+ raise ValueError('n must be > 0')
+ elif n == 1:
+ # interleave(repeat(e), iterable) -> e, x_0, e, e, x_1, e, x_2...
+ # islice(..., 1, None) -> x_0, e, e, x_1, e, x_2...
+ return islice(interleave(repeat(e), iterable), 1, None)
+ else:
+ # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]...
+ # islice(..., 1, None) -> [x_0, x_1], [e], [x_2, x_3]...
+ # flatten(...) -> x_0, x_1, e, x_2, x_3...
+ filler = repeat([e])
+ chunks = chunked(iterable, n)
+ return flatten(islice(interleave(filler, chunks), 1, None))
+
+
+def unique_to_each(*iterables):
+ """Return the elements from each of the input iterables that aren't in the
+ other input iterables.
+
+ For example, suppose you have a set of packages, each with a set of
+ dependencies::
+
+ {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}}
+
+ If you remove one package, which dependencies can also be removed?
+
+ If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not
+ associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for
+ ``pkg_2``, and ``D`` is only needed for ``pkg_3``::
+
+ >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'})
+ [['A'], ['C'], ['D']]
+
+ If there are duplicates in one input iterable that aren't in the others
+ they will be duplicated in the output. Input order is preserved::
+
+ >>> unique_to_each("mississippi", "missouri")
+ [['p', 'p'], ['o', 'u', 'r']]
+
+ It is assumed that the elements of each iterable are hashable.
+
+ """
+ pool = [list(it) for it in iterables]
+ counts = Counter(chain.from_iterable(map(set, pool)))
+ uniques = {element for element in counts if counts[element] == 1}
+ return [list(filter(uniques.__contains__, it)) for it in pool]
+
+
+def windowed(seq, n, fillvalue=None, step=1):
+ """Return a sliding window of width *n* over the given iterable.
+
+ >>> all_windows = windowed([1, 2, 3, 4, 5], 3)
+ >>> list(all_windows)
+ [(1, 2, 3), (2, 3, 4), (3, 4, 5)]
+
+ When the window is larger than the iterable, *fillvalue* is used in place
+ of missing values::
+
+ >>> list(windowed([1, 2, 3], 4))
+ [(1, 2, 3, None)]
+
+ Each window will advance in increments of *step*:
+
+ >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2))
+ [(1, 2, 3), (3, 4, 5), (5, 6, '!')]
+
+ """
+ if n < 0:
+ raise ValueError('n must be >= 0')
+ if n == 0:
+ yield tuple()
+ return
+ if step < 1:
+ raise ValueError('step must be >= 1')
+
+ it = iter(seq)
+ window = deque([], n)
+ append = window.append
+
+ # Initial deque fill
+ for _ in range(n):
+ append(next(it, fillvalue))
+ yield tuple(window)
+
+ # Appending new items to the right causes old items to fall off the left
+ i = 0
+ for item in it:
+ append(item)
+ i = (i + 1) % step
+ if i % step == 0:
+ yield tuple(window)
+
+ # If there are items from the iterable in the window, pad with the given
+ # value and emit them.
+ if (i % step) and (step - i < n):
+ for _ in range(step - i):
+ append(fillvalue)
+ yield tuple(window)
+
+
+def substrings(iterable, join_func=None):
+ """Yield all of the substrings of *iterable*.
+
+ >>> [''.join(s) for s in substrings('more')]
+ ['m', 'o', 'r', 'e', 'mo', 'or', 're', 'mor', 'ore', 'more']
+
+ Note that non-string iterables can also be subdivided.
+
+ >>> list(substrings([0, 1, 2]))
+ [(0,), (1,), (2,), (0, 1), (1, 2), (0, 1, 2)]
+
+ """
+ # The length-1 substrings
+ seq = []
+ for item in iter(iterable):
+ seq.append(item)
+ yield (item,)
+ seq = tuple(seq)
+ item_count = len(seq)
+
+ # And the rest
+ for n in range(2, item_count + 1):
+ for i in range(item_count - n + 1):
+ yield seq[i:i + n]
+
+
+class bucket(object):
+ """Wrap *iterable* and return an object that buckets it iterable into
+ child iterables based on a *key* function.
+
+ >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
+ >>> s = bucket(iterable, key=lambda x: x[0])
+ >>> a_iterable = s['a']
+ >>> next(a_iterable)
+ 'a1'
+ >>> next(a_iterable)
+ 'a2'
+ >>> list(s['b'])
+ ['b1', 'b2', 'b3']
+
+ The original iterable will be advanced and its items will be cached until
+ they are used by the child iterables. This may require significant storage.
+
+ By default, attempting to select a bucket to which no items belong will
+ exhaust the iterable and cache all values.
+ If you specify a *validator* function, selected buckets will instead be
+ checked against it.
+
+ >>> from itertools import count
+ >>> it = count(1, 2) # Infinite sequence of odd numbers
+ >>> key = lambda x: x % 10 # Bucket by last digit
+ >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only
+ >>> s = bucket(it, key=key, validator=validator)
+ >>> 2 in s
+ False
+ >>> list(s[2])
+ []
+
+ """
+ def __init__(self, iterable, key, validator=None):
+ self._it = iter(iterable)
+ self._key = key
+ self._cache = defaultdict(deque)
+ self._validator = validator or (lambda x: True)
+
+ def __contains__(self, value):
+ if not self._validator(value):
+ return False
+
+ try:
+ item = next(self[value])
+ except StopIteration:
+ return False
+ else:
+ self._cache[value].appendleft(item)
+
+ return True
+
+ def _get_values(self, value):
+ """
+ Helper to yield items from the parent iterator that match *value*.
+ Items that don't match are stored in the local cache as they
+ are encountered.
+ """
+ while True:
+ # If we've cached some items that match the target value, emit
+ # the first one and evict it from the cache.
+ if self._cache[value]:
+ yield self._cache[value].popleft()
+ # Otherwise we need to advance the parent iterator to search for
+ # a matching item, caching the rest.
+ else:
+ while True:
+ try:
+ item = next(self._it)
+ except StopIteration:
+ return
+ item_value = self._key(item)
+ if item_value == value:
+ yield item
+ break
+ elif self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ def __getitem__(self, value):
+ if not self._validator(value):
+ return iter(())
+
+ return self._get_values(value)
+
+
+def spy(iterable, n=1):
+ """Return a 2-tuple with a list containing the first *n* elements of
+ *iterable*, and an iterator with the same items as *iterable*.
+ This allows you to "look ahead" at the items in the iterable without
+ advancing it.
+
+ There is one item in the list by default:
+
+ >>> iterable = 'abcdefg'
+ >>> head, iterable = spy(iterable)
+ >>> head
+ ['a']
+ >>> list(iterable)
+ ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+
+ You may use unpacking to retrieve items instead of lists:
+
+ >>> (head,), iterable = spy('abcdefg')
+ >>> head
+ 'a'
+ >>> (first, second), iterable = spy('abcdefg', 2)
+ >>> first
+ 'a'
+ >>> second
+ 'b'
+
+ The number of items requested can be larger than the number of items in
+ the iterable:
+
+ >>> iterable = [1, 2, 3, 4, 5]
+ >>> head, iterable = spy(iterable, 10)
+ >>> head
+ [1, 2, 3, 4, 5]
+ >>> list(iterable)
+ [1, 2, 3, 4, 5]
+
+ """
+ it = iter(iterable)
+ head = take(n, it)
+
+ return head, chain(head, it)
+
+
+def interleave(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ until the shortest is exhausted.
+
+ >>> list(interleave([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7]
+
+ For a version that doesn't terminate after the shortest iterable is
+ exhausted, see :func:`interleave_longest`.
+
+ """
+ return chain.from_iterable(zip(*iterables))
+
+
+def interleave_longest(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ skipping any that are exhausted.
+
+ >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7, 3, 8]
+
+ This function produces the same output as :func:`roundrobin`, but may
+ perform better for some inputs (in particular when the number of iterables
+ is large).
+
+ """
+ i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker))
+ return (x for x in i if x is not _marker)
+
+
+def collapse(iterable, base_type=None, levels=None):
+ """Flatten an iterable with multiple levels of nesting (e.g., a list of
+ lists of tuples) into non-iterable types.
+
+ >>> iterable = [(1, 2), ([3, 4], [[5], [6]])]
+ >>> list(collapse(iterable))
+ [1, 2, 3, 4, 5, 6]
+
+ String types are not considered iterable and will not be collapsed.
+ To avoid collapsing other types, specify *base_type*:
+
+ >>> iterable = ['ab', ('cd', 'ef'), ['gh', 'ij']]
+ >>> list(collapse(iterable, base_type=tuple))
+ ['ab', ('cd', 'ef'), 'gh', 'ij']
+
+ Specify *levels* to stop flattening after a certain level:
+
+ >>> iterable = [('a', ['b']), ('c', ['d'])]
+ >>> list(collapse(iterable)) # Fully flattened
+ ['a', 'b', 'c', 'd']
+ >>> list(collapse(iterable, levels=1)) # Only one level flattened
+ ['a', ['b'], 'c', ['d']]
+
+ """
+ def walk(node, level):
+ if (
+ ((levels is not None) and (level > levels)) or
+ isinstance(node, string_types) or
+ ((base_type is not None) and isinstance(node, base_type))
+ ):
+ yield node
+ return
+
+ try:
+ tree = iter(node)
+ except TypeError:
+ yield node
+ return
+ else:
+ for child in tree:
+ for x in walk(child, level + 1):
+ yield x
+
+ for x in walk(iterable, 0):
+ yield x
+
+
+def side_effect(func, iterable, chunk_size=None, before=None, after=None):
+ """Invoke *func* on each item in *iterable* (or on each *chunk_size* group
+ of items) before yielding the item.
+
+ `func` must be a function that takes a single argument. Its return value
+ will be discarded.
+
+ *before* and *after* are optional functions that take no arguments. They
+ will be executed before iteration starts and after it ends, respectively.
+
+ `side_effect` can be used for logging, updating progress bars, or anything
+ that is not functionally "pure."
+
+ Emitting a status message:
+
+ >>> from more_itertools import consume
+ >>> func = lambda item: print('Received {}'.format(item))
+ >>> consume(side_effect(func, range(2)))
+ Received 0
+ Received 1
+
+ Operating on chunks of items:
+
+ >>> pair_sums = []
+ >>> func = lambda chunk: pair_sums.append(sum(chunk))
+ >>> list(side_effect(func, [0, 1, 2, 3, 4, 5], 2))
+ [0, 1, 2, 3, 4, 5]
+ >>> list(pair_sums)
+ [1, 5, 9]
+
+ Writing to a file-like object:
+
+ >>> from io import StringIO
+ >>> from more_itertools import consume
+ >>> f = StringIO()
+ >>> func = lambda x: print(x, file=f)
+ >>> before = lambda: print(u'HEADER', file=f)
+ >>> after = f.close
+ >>> it = [u'a', u'b', u'c']
+ >>> consume(side_effect(func, it, before=before, after=after))
+ >>> f.closed
+ True
+
+ """
+ try:
+ if before is not None:
+ before()
+
+ if chunk_size is None:
+ for item in iterable:
+ func(item)
+ yield item
+ else:
+ for chunk in chunked(iterable, chunk_size):
+ func(chunk)
+ for item in chunk:
+ yield item
+ finally:
+ if after is not None:
+ after()
+
+
+def sliced(seq, n):
+ """Yield slices of length *n* from the sequence *seq*.
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6), 3))
+ [(1, 2, 3), (4, 5, 6)]
+
+ If the length of the sequence is not divisible by the requested slice
+ length, the last slice will be shorter.
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6, 7, 8), 3))
+ [(1, 2, 3), (4, 5, 6), (7, 8)]
+
+ This function will only work for iterables that support slicing.
+ For non-sliceable iterables, see :func:`chunked`.
+
+ """
+ return takewhile(bool, (seq[i: i + n] for i in count(0, n)))
+
+
+def split_at(iterable, pred):
+ """Yield lists of items from *iterable*, where each list is delimited by
+ an item where callable *pred* returns ``True``. The lists do not include
+ the delimiting items.
+
+ >>> list(split_at('abcdcba', lambda x: x == 'b'))
+ [['a'], ['c', 'd', 'c'], ['a']]
+
+ >>> list(split_at(range(10), lambda n: n % 2 == 1))
+ [[0], [2], [4], [6], [8], []]
+ """
+ buf = []
+ for item in iterable:
+ if pred(item):
+ yield buf
+ buf = []
+ else:
+ buf.append(item)
+ yield buf
+
+
+def split_before(iterable, pred):
+ """Yield lists of items from *iterable*, where each list starts with an
+ item where callable *pred* returns ``True``:
+
+ >>> list(split_before('OneTwo', lambda s: s.isupper()))
+ [['O', 'n', 'e'], ['T', 'w', 'o']]
+
+ >>> list(split_before(range(10), lambda n: n % 3 == 0))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
+
+ """
+ buf = []
+ for item in iterable:
+ if pred(item) and buf:
+ yield buf
+ buf = []
+ buf.append(item)
+ yield buf
+
+
+def split_after(iterable, pred):
+ """Yield lists of items from *iterable*, where each list ends with an
+ item where callable *pred* returns ``True``:
+
+ >>> list(split_after('one1two2', lambda s: s.isdigit()))
+ [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']]
+
+ >>> list(split_after(range(10), lambda n: n % 3 == 0))
+ [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]]
+
+ """
+ buf = []
+ for item in iterable:
+ buf.append(item)
+ if pred(item) and buf:
+ yield buf
+ buf = []
+ if buf:
+ yield buf
+
+
+def split_into(iterable, sizes):
+ """Yield a list of sequential items from *iterable* of length 'n' for each
+ integer 'n' in *sizes*.
+
+ >>> list(split_into([1,2,3,4,5,6], [1,2,3]))
+ [[1], [2, 3], [4, 5, 6]]
+
+ If the sum of *sizes* is smaller than the length of *iterable*, then the
+ remaining items of *iterable* will not be returned.
+
+ >>> list(split_into([1,2,3,4,5,6], [2,3]))
+ [[1, 2], [3, 4, 5]]
+
+ If the sum of *sizes* is larger than the length of *iterable*, fewer items
+ will be returned in the iteration that overruns *iterable* and further
+ lists will be empty:
+
+ >>> list(split_into([1,2,3,4], [1,2,3,4]))
+ [[1], [2, 3], [4], []]
+
+ When a ``None`` object is encountered in *sizes*, the returned list will
+ contain items up to the end of *iterable* the same way that itertools.slice
+ does:
+
+ >>> list(split_into([1,2,3,4,5,6,7,8,9,0], [2,3,None]))
+ [[1, 2], [3, 4, 5], [6, 7, 8, 9, 0]]
+
+ :func:`split_into` can be useful for grouping a series of items where the
+ sizes of the groups are not uniform. An example would be where in a row
+ from a table, multiple columns represent elements of the same feature
+ (e.g. a point represented by x,y,z) but, the format is not the same for
+ all columns.
+ """
+ # convert the iterable argument into an iterator so its contents can
+ # be consumed by islice in case it is a generator
+ it = iter(iterable)
+
+ for size in sizes:
+ if size is None:
+ yield list(it)
+ return
+ else:
+ yield list(islice(it, size))
+
+
+def padded(iterable, fillvalue=None, n=None, next_multiple=False):
+ """Yield the elements from *iterable*, followed by *fillvalue*, such that
+ at least *n* items are emitted.
+
+ >>> list(padded([1, 2, 3], '?', 5))
+ [1, 2, 3, '?', '?']
+
+ If *next_multiple* is ``True``, *fillvalue* will be emitted until the
+ number of items emitted is a multiple of *n*::
+
+ >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True))
+ [1, 2, 3, 4, None, None]
+
+ If *n* is ``None``, *fillvalue* will be emitted indefinitely.
+
+ """
+ it = iter(iterable)
+ if n is None:
+ for item in chain(it, repeat(fillvalue)):
+ yield item
+ elif n < 1:
+ raise ValueError('n must be at least 1')
+ else:
+ item_count = 0
+ for item in it:
+ yield item
+ item_count += 1
+
+ remaining = (n - item_count) % n if next_multiple else n - item_count
+ for _ in range(remaining):
+ yield fillvalue
+
+
+def distribute(n, iterable):
+ """Distribute the items from *iterable* among *n* smaller iterables.
+
+ >>> group_1, group_2 = distribute(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 3, 5]
+ >>> list(group_2)
+ [2, 4, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = distribute(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 4, 7], [2, 5], [3, 6]]
+
+ If the length of *iterable* is smaller than *n*, then the last returned
+ iterables will be empty:
+
+ >>> children = distribute(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function uses :func:`itertools.tee` and may require significant
+ storage. If you need the order items in the smaller iterables to match the
+ original iterable, see :func:`divide`.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ children = tee(iterable, n)
+ return [islice(it, index, None, n) for index, it in enumerate(children)]
+
+
+def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None):
+ """Yield tuples whose elements are offset from *iterable*.
+ The amount by which the `i`-th item in each tuple is offset is given by
+ the `i`-th item in *offsets*.
+
+ >>> list(stagger([0, 1, 2, 3]))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3)]
+ >>> list(stagger(range(8), offsets=(0, 2, 4)))
+ [(0, 2, 4), (1, 3, 5), (2, 4, 6), (3, 5, 7)]
+
+ By default, the sequence will end when the final element of a tuple is the
+ last item in the iterable. To continue until the first element of a tuple
+ is the last item in the iterable, set *longest* to ``True``::
+
+ >>> list(stagger([0, 1, 2, 3], longest=True))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, None), (3, None, None)]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ children = tee(iterable, len(offsets))
+
+ return zip_offset(
+ *children, offsets=offsets, longest=longest, fillvalue=fillvalue
+ )
+
+
+def zip_offset(*iterables, **kwargs):
+ """``zip`` the input *iterables* together, but offset the `i`-th iterable
+ by the `i`-th item in *offsets*.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1)))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')]
+
+ This can be used as a lightweight alternative to SciPy or pandas to analyze
+ data sets in which some series have a lead or lag relationship.
+
+ By default, the sequence will end when the shortest iterable is exhausted.
+ To continue until the longest iterable is exhausted, set *longest* to
+ ``True``.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1), longest=True))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ offsets = kwargs['offsets']
+ longest = kwargs.get('longest', False)
+ fillvalue = kwargs.get('fillvalue', None)
+
+ if len(iterables) != len(offsets):
+ raise ValueError("Number of iterables and offsets didn't match")
+
+ staggered = []
+ for it, n in zip(iterables, offsets):
+ if n < 0:
+ staggered.append(chain(repeat(fillvalue, -n), it))
+ elif n > 0:
+ staggered.append(islice(it, n, None))
+ else:
+ staggered.append(it)
+
+ if longest:
+ return zip_longest(*staggered, fillvalue=fillvalue)
+
+ return zip(*staggered)
+
+
+def sort_together(iterables, key_list=(0,), reverse=False):
+ """Return the input iterables sorted together, with *key_list* as the
+ priority for sorting. All iterables are trimmed to the length of the
+ shortest one.
+
+ This can be used like the sorting function in a spreadsheet. If each
+ iterable represents a column of data, the key list determines which
+ columns are used for sorting.
+
+ By default, all iterables are sorted using the ``0``-th iterable::
+
+ >>> iterables = [(4, 3, 2, 1), ('a', 'b', 'c', 'd')]
+ >>> sort_together(iterables)
+ [(1, 2, 3, 4), ('d', 'c', 'b', 'a')]
+
+ Set a different key list to sort according to another iterable.
+ Specifying multiple keys dictates how ties are broken::
+
+ >>> iterables = [(3, 1, 2), (0, 1, 0), ('c', 'b', 'a')]
+ >>> sort_together(iterables, key_list=(1, 2))
+ [(2, 3, 1), (0, 0, 1), ('a', 'c', 'b')]
+
+ Set *reverse* to ``True`` to sort in descending order.
+
+ >>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True)
+ [(3, 2, 1), ('a', 'b', 'c')]
+
+ """
+ return list(zip(*sorted(zip(*iterables),
+ key=itemgetter(*key_list),
+ reverse=reverse)))
+
+
+def unzip(iterable):
+ """The inverse of :func:`zip`, this function disaggregates the elements
+ of the zipped *iterable*.
+
+ The ``i``-th iterable contains the ``i``-th element from each element
+ of the zipped iterable. The first element is used to to determine the
+ length of the remaining elements.
+
+ >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+ >>> letters, numbers = unzip(iterable)
+ >>> list(letters)
+ ['a', 'b', 'c', 'd']
+ >>> list(numbers)
+ [1, 2, 3, 4]
+
+ This is similar to using ``zip(*iterable)``, but it avoids reading
+ *iterable* into memory. Note, however, that this function uses
+ :func:`itertools.tee` and thus may require significant storage.
+
+ """
+ head, iterable = spy(iter(iterable))
+ if not head:
+ # empty iterable, e.g. zip([], [], [])
+ return ()
+ # spy returns a one-length iterable as head
+ head = head[0]
+ iterables = tee(iterable, len(head))
+
+ def itemgetter(i):
+ def getter(obj):
+ try:
+ return obj[i]
+ except IndexError:
+ # basically if we have an iterable like
+ # iter([(1, 2, 3), (4, 5), (6,)])
+ # the second unzipped iterable would fail at the third tuple
+ # since it would try to access tup[1]
+ # same with the third unzipped iterable and the second tuple
+ # to support these "improperly zipped" iterables,
+ # we create a custom itemgetter
+ # which just stops the unzipped iterables
+ # at first length mismatch
+ raise StopIteration
+ return getter
+
+ return tuple(map(itemgetter(i), it) for i, it in enumerate(iterables))
+
+
+def divide(n, iterable):
+ """Divide the elements from *iterable* into *n* parts, maintaining
+ order.
+
+ >>> group_1, group_2 = divide(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 2, 3]
+ >>> list(group_2)
+ [4, 5, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = divide(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 2, 3], [4, 5], [6, 7]]
+
+ If the length of the iterable is smaller than n, then the last returned
+ iterables will be empty:
+
+ >>> children = divide(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function will exhaust the iterable before returning and may require
+ significant storage. If order is not important, see :func:`distribute`,
+ which does not first pull the iterable into memory.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ seq = tuple(iterable)
+ q, r = divmod(len(seq), n)
+
+ ret = []
+ for i in range(n):
+ start = (i * q) + (i if i < r else r)
+ stop = ((i + 1) * q) + (i + 1 if i + 1 < r else r)
+ ret.append(iter(seq[start:stop]))
+
+ return ret
+
+
+def always_iterable(obj, base_type=(text_type, binary_type)):
+ """If *obj* is iterable, return an iterator over its items::
+
+ >>> obj = (1, 2, 3)
+ >>> list(always_iterable(obj))
+ [1, 2, 3]
+
+ If *obj* is not iterable, return a one-item iterable containing *obj*::
+
+ >>> obj = 1
+ >>> list(always_iterable(obj))
+ [1]
+
+ If *obj* is ``None``, return an empty iterable:
+
+ >>> obj = None
+ >>> list(always_iterable(None))
+ []
+
+ By default, binary and text strings are not considered iterable::
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj))
+ ['foo']
+
+ If *base_type* is set, objects for which ``isinstance(obj, base_type)``
+ returns ``True`` won't be considered iterable.
+
+ >>> obj = {'a': 1}
+ >>> list(always_iterable(obj)) # Iterate over the dict's keys
+ ['a']
+ >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
+ [{'a': 1}]
+
+ Set *base_type* to ``None`` to avoid any special handling and treat objects
+ Python considers iterable as iterable:
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj, base_type=None))
+ ['f', 'o', 'o']
+ """
+ if obj is None:
+ return iter(())
+
+ if (base_type is not None) and isinstance(obj, base_type):
+ return iter((obj,))
+
+ try:
+ return iter(obj)
+ except TypeError:
+ return iter((obj,))
+
+
+def adjacent(predicate, iterable, distance=1):
+ """Return an iterable over `(bool, item)` tuples where the `item` is
+ drawn from *iterable* and the `bool` indicates whether
+ that item satisfies the *predicate* or is adjacent to an item that does.
+
+ For example, to find whether items are adjacent to a ``3``::
+
+ >>> list(adjacent(lambda x: x == 3, range(6)))
+ [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)]
+
+ Set *distance* to change what counts as adjacent. For example, to find
+ whether items are two places away from a ``3``:
+
+ >>> list(adjacent(lambda x: x == 3, range(6), distance=2))
+ [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)]
+
+ This is useful for contextualizing the results of a search function.
+ For example, a code comparison tool might want to identify lines that
+ have changed, but also surrounding lines to give the viewer of the diff
+ context.
+
+ The predicate function will only be called once for each item in the
+ iterable.
+
+ See also :func:`groupby_transform`, which can be used with this function
+ to group ranges of items with the same `bool` value.
+
+ """
+ # Allow distance=0 mainly for testing that it reproduces results with map()
+ if distance < 0:
+ raise ValueError('distance must be at least 0')
+
+ i1, i2 = tee(iterable)
+ padding = [False] * distance
+ selected = chain(padding, map(predicate, i1), padding)
+ adjacent_to_selected = map(any, windowed(selected, 2 * distance + 1))
+ return zip(adjacent_to_selected, i2)
+
+
+def groupby_transform(iterable, keyfunc=None, valuefunc=None):
+ """An extension of :func:`itertools.groupby` that transforms the values of
+ *iterable* after grouping them.
+ *keyfunc* is a function used to compute a grouping key for each item.
+ *valuefunc* is a function for transforming the items after grouping.
+
+ >>> iterable = 'AaaABbBCcA'
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: x.lower()
+ >>> grouper = groupby_transform(iterable, keyfunc, valuefunc)
+ >>> [(k, ''.join(g)) for k, g in grouper]
+ [('A', 'aaaa'), ('B', 'bbb'), ('C', 'cc'), ('A', 'a')]
+
+ *keyfunc* and *valuefunc* default to identity functions if they are not
+ specified.
+
+ :func:`groupby_transform` is useful when grouping elements of an iterable
+ using a separate iterable as the key. To do this, :func:`zip` the iterables
+ and pass a *keyfunc* that extracts the first element and a *valuefunc*
+ that extracts the second element::
+
+ >>> from operator import itemgetter
+ >>> keys = [0, 0, 1, 1, 1, 2, 2, 2, 3]
+ >>> values = 'abcdefghi'
+ >>> iterable = zip(keys, values)
+ >>> grouper = groupby_transform(iterable, itemgetter(0), itemgetter(1))
+ >>> [(k, ''.join(g)) for k, g in grouper]
+ [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')]
+
+ Note that the order of items in the iterable is significant.
+ Only adjacent items are grouped together, so if you don't want any
+ duplicate groups, you should sort the iterable by the key function.
+
+ """
+ valuefunc = (lambda x: x) if valuefunc is None else valuefunc
+ return ((k, map(valuefunc, g)) for k, g in groupby(iterable, keyfunc))
+
+
+def numeric_range(*args):
+ """An extension of the built-in ``range()`` function whose arguments can
+ be any orderable numeric type.
+
+ With only *stop* specified, *start* defaults to ``0`` and *step*
+ defaults to ``1``. The output items will match the type of *stop*:
+
+ >>> list(numeric_range(3.5))
+ [0.0, 1.0, 2.0, 3.0]
+
+ With only *start* and *stop* specified, *step* defaults to ``1``. The
+ output items will match the type of *start*:
+
+ >>> from decimal import Decimal
+ >>> start = Decimal('2.1')
+ >>> stop = Decimal('5.1')
+ >>> list(numeric_range(start, stop))
+ [Decimal('2.1'), Decimal('3.1'), Decimal('4.1')]
+
+ With *start*, *stop*, and *step* specified the output items will match
+ the type of ``start + step``:
+
+ >>> from fractions import Fraction
+ >>> start = Fraction(1, 2) # Start at 1/2
+ >>> stop = Fraction(5, 2) # End at 5/2
+ >>> step = Fraction(1, 2) # Count by 1/2
+ >>> list(numeric_range(start, stop, step))
+ [Fraction(1, 2), Fraction(1, 1), Fraction(3, 2), Fraction(2, 1)]
+
+ If *step* is zero, ``ValueError`` is raised. Negative steps are supported:
+
+ >>> list(numeric_range(3, -1, -1.0))
+ [3.0, 2.0, 1.0, 0.0]
+
+ Be aware of the limitations of floating point numbers; the representation
+ of the yielded numbers may be surprising.
+
+ """
+ argc = len(args)
+ if argc == 1:
+ stop, = args
+ start = type(stop)(0)
+ step = 1
+ elif argc == 2:
+ start, stop = args
+ step = 1
+ elif argc == 3:
+ start, stop, step = args
+ else:
+ err_msg = 'numeric_range takes at most 3 arguments, got {}'
+ raise TypeError(err_msg.format(argc))
+
+ values = (start + (step * n) for n in count())
+ if step > 0:
+ return takewhile(partial(gt, stop), values)
+ elif step < 0:
+ return takewhile(partial(lt, stop), values)
+ else:
+ raise ValueError('numeric_range arg 3 must not be zero')
+
+
+def count_cycle(iterable, n=None):
+ """Cycle through the items from *iterable* up to *n* times, yielding
+ the number of completed cycles along with each item. If *n* is omitted the
+ process repeats indefinitely.
+
+ >>> list(count_cycle('AB', 3))
+ [(0, 'A'), (0, 'B'), (1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')]
+
+ """
+ iterable = tuple(iterable)
+ if not iterable:
+ return iter(())
+ counter = count() if n is None else range(n)
+ return ((i, item) for i in counter for item in iterable)
+
+
+def locate(iterable, pred=bool, window_size=None):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(locate([0, 1, 1, 0, 1, 0, 0]))
+ [1, 2, 4]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item.
+
+ >>> list(locate(['a', 'b', 'c', 'b'], lambda x: x == 'b'))
+ [1, 3]
+
+ If *window_size* is given, then the *pred* function will be called with
+ that many items. This enables searching for sub-sequences:
+
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> pred = lambda *args: args == (1, 2, 3)
+ >>> list(locate(iterable, pred=pred, window_size=3))
+ [1, 5, 9]
+
+ Use with :func:`seekable` to find indexes and then retrieve the associated
+ items:
+
+ >>> from itertools import count
+ >>> from more_itertools import seekable
+ >>> source = (3 * n + 1 if (n % 2) else n // 2 for n in count())
+ >>> it = seekable(source)
+ >>> pred = lambda x: x > 100
+ >>> indexes = locate(it, pred=pred)
+ >>> i = next(indexes)
+ >>> it.seek(i)
+ >>> next(it)
+ 106
+
+ """
+ if window_size is None:
+ return compress(count(), map(pred, iterable))
+
+ if window_size < 1:
+ raise ValueError('window size must be at least 1')
+
+ it = windowed(iterable, window_size, fillvalue=_marker)
+ return compress(count(), starmap(pred, it))
+
+
+def lstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the beginning
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the start of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(lstrip(iterable, pred))
+ [1, 2, None, 3, False, None]
+
+ This function is analogous to to :func:`str.lstrip`, and is essentially
+ an wrapper for :func:`itertools.dropwhile`.
+
+ """
+ return dropwhile(pred, iterable)
+
+
+def rstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the end
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the end of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(rstrip(iterable, pred))
+ [None, False, None, 1, 2, None, 3]
+
+ This function is analogous to :func:`str.rstrip`.
+
+ """
+ cache = []
+ cache_append = cache.append
+ for x in iterable:
+ if pred(x):
+ cache_append(x)
+ else:
+ for y in cache:
+ yield y
+ del cache[:]
+ yield x
+
+
+def strip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the
+ beginning and end for which *pred* returns ``True``.
+
+ For example, to remove a set of items from both ends of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(strip(iterable, pred))
+ [1, 2, None, 3]
+
+ This function is analogous to :func:`str.strip`.
+
+ """
+ return rstrip(lstrip(iterable, pred), pred)
+
+
+def islice_extended(iterable, *args):
+ """An extension of :func:`itertools.islice` that supports negative values
+ for *stop*, *start*, and *step*.
+
+ >>> iterable = iter('abcdefgh')
+ >>> list(islice_extended(iterable, -4, -1))
+ ['e', 'f', 'g']
+
+ Slices with negative values require some caching of *iterable*, but this
+ function takes care to minimize the amount of memory required.
+
+ For example, you can use a negative step with an infinite iterator:
+
+ >>> from itertools import count
+ >>> list(islice_extended(count(), 110, 99, -2))
+ [110, 108, 106, 104, 102, 100]
+
+ """
+ s = slice(*args)
+ start = s.start
+ stop = s.stop
+ if s.step == 0:
+ raise ValueError('step argument must be a non-zero integer or None.')
+ step = s.step or 1
+
+ it = iter(iterable)
+
+ if step > 0:
+ start = 0 if (start is None) else start
+
+ if (start < 0):
+ # Consume all but the last -start items
+ cache = deque(enumerate(it, 1), maxlen=-start)
+ len_iter = cache[-1][0] if cache else 0
+
+ # Adjust start to be positive
+ i = max(len_iter + start, 0)
+
+ # Adjust stop to be positive
+ if stop is None:
+ j = len_iter
+ elif stop >= 0:
+ j = min(stop, len_iter)
+ else:
+ j = max(len_iter + stop, 0)
+
+ # Slice the cache
+ n = j - i
+ if n <= 0:
+ return
+
+ for index, item in islice(cache, 0, n, step):
+ yield item
+ elif (stop is not None) and (stop < 0):
+ # Advance to the start position
+ next(islice(it, start, start), None)
+
+ # When stop is negative, we have to carry -stop items while
+ # iterating
+ cache = deque(islice(it, -stop), maxlen=-stop)
+
+ for index, item in enumerate(it):
+ cached_item = cache.popleft()
+ if index % step == 0:
+ yield cached_item
+ cache.append(item)
+ else:
+ # When both start and stop are positive we have the normal case
+ for item in islice(it, start, stop, step):
+ yield item
+ else:
+ start = -1 if (start is None) else start
+
+ if (stop is not None) and (stop < 0):
+ # Consume all but the last items
+ n = -stop - 1
+ cache = deque(enumerate(it, 1), maxlen=n)
+ len_iter = cache[-1][0] if cache else 0
+
+ # If start and stop are both negative they are comparable and
+ # we can just slice. Otherwise we can adjust start to be negative
+ # and then slice.
+ if start < 0:
+ i, j = start, stop
+ else:
+ i, j = min(start - len_iter, -1), None
+
+ for index, item in list(cache)[i:j:step]:
+ yield item
+ else:
+ # Advance to the stop position
+ if stop is not None:
+ m = stop + 1
+ next(islice(it, m, m), None)
+
+ # stop is positive, so if start is negative they are not comparable
+ # and we need the rest of the items.
+ if start < 0:
+ i = start
+ n = None
+ # stop is None and start is positive, so we just need items up to
+ # the start index.
+ elif stop is None:
+ i = None
+ n = start + 1
+ # Both stop and start are positive, so they are comparable.
+ else:
+ i = None
+ n = start - stop
+ if n <= 0:
+ return
+
+ cache = list(islice(it, n))
+
+ for item in cache[i::step]:
+ yield item
+
+
+def always_reversible(iterable):
+ """An extension of :func:`reversed` that supports all iterables, not
+ just those which implement the ``Reversible`` or ``Sequence`` protocols.
+
+ >>> print(*always_reversible(x for x in range(3)))
+ 2 1 0
+
+ If the iterable is already reversible, this function returns the
+ result of :func:`reversed()`. If the iterable is not reversible,
+ this function will cache the remaining items in the iterable and
+ yield them in reverse order, which may require significant storage.
+ """
+ try:
+ return reversed(iterable)
+ except TypeError:
+ return reversed(list(iterable))
+
+
+def consecutive_groups(iterable, ordering=lambda x: x):
+ """Yield groups of consecutive items using :func:`itertools.groupby`.
+ The *ordering* function determines whether two items are adjacent by
+ returning their position.
+
+ By default, the ordering function is the identity function. This is
+ suitable for finding runs of numbers:
+
+ >>> iterable = [1, 10, 11, 12, 20, 30, 31, 32, 33, 40]
+ >>> for group in consecutive_groups(iterable):
+ ... print(list(group))
+ [1]
+ [10, 11, 12]
+ [20]
+ [30, 31, 32, 33]
+ [40]
+
+ For finding runs of adjacent letters, try using the :meth:`index` method
+ of a string of letters:
+
+ >>> from string import ascii_lowercase
+ >>> iterable = 'abcdfgilmnop'
+ >>> ordering = ascii_lowercase.index
+ >>> for group in consecutive_groups(iterable, ordering):
+ ... print(list(group))
+ ['a', 'b', 'c', 'd']
+ ['f', 'g']
+ ['i']
+ ['l', 'm', 'n', 'o', 'p']
+
+ """
+ for k, g in groupby(
+ enumerate(iterable), key=lambda x: x[0] - ordering(x[1])
+ ):
+ yield map(itemgetter(1), g)
+
+
+def difference(iterable, func=sub):
+ """By default, compute the first difference of *iterable* using
+ :func:`operator.sub`.
+
+ >>> iterable = [0, 1, 3, 6, 10]
+ >>> list(difference(iterable))
+ [0, 1, 2, 3, 4]
+
+ This is the opposite of :func:`accumulate`'s default behavior:
+
+ >>> from more_itertools import accumulate
+ >>> iterable = [0, 1, 2, 3, 4]
+ >>> list(accumulate(iterable))
+ [0, 1, 3, 6, 10]
+ >>> list(difference(accumulate(iterable)))
+ [0, 1, 2, 3, 4]
+
+ By default *func* is :func:`operator.sub`, but other functions can be
+ specified. They will be applied as follows::
+
+ A, B, C, D, ... --> A, func(B, A), func(C, B), func(D, C), ...
+
+ For example, to do progressive division:
+
+ >>> iterable = [1, 2, 6, 24, 120] # Factorial sequence
+ >>> func = lambda x, y: x // y
+ >>> list(difference(iterable, func))
+ [1, 2, 3, 4, 5]
+
+ """
+ a, b = tee(iterable)
+ try:
+ item = next(b)
+ except StopIteration:
+ return iter([])
+ return chain([item], map(lambda x: func(x[1], x[0]), zip(a, b)))
+
+
+class SequenceView(Sequence):
+ """Return a read-only view of the sequence object *target*.
+
+ :class:`SequenceView` objects are analogous to Python's built-in
+ "dictionary view" types. They provide a dynamic view of a sequence's items,
+ meaning that when the sequence updates, so does the view.
+
+ >>> seq = ['0', '1', '2']
+ >>> view = SequenceView(seq)
+ >>> view
+ SequenceView(['0', '1', '2'])
+ >>> seq.append('3')
+ >>> view
+ SequenceView(['0', '1', '2', '3'])
+
+ Sequence views support indexing, slicing, and length queries. They act
+ like the underlying sequence, except they don't allow assignment:
+
+ >>> view[1]
+ '1'
+ >>> view[1:-1]
+ ['1', '2']
+ >>> len(view)
+ 4
+
+ Sequence views are useful as an alternative to copying, as they don't
+ require (much) extra storage.
+
+ """
+ def __init__(self, target):
+ if not isinstance(target, Sequence):
+ raise TypeError
+ self._target = target
+
+ def __getitem__(self, index):
+ return self._target[index]
+
+ def __len__(self):
+ return len(self._target)
+
+ def __repr__(self):
+ return '{}({})'.format(self.__class__.__name__, repr(self._target))
+
+
+class seekable(object):
+ """Wrap an iterator to allow for seeking backward and forward. This
+ progressively caches the items in the source iterable so they can be
+ re-visited.
+
+ Call :meth:`seek` with an index to seek to that position in the source
+ iterable.
+
+ To "reset" an iterator, seek to ``0``:
+
+ >>> from itertools import count
+ >>> it = seekable((str(n) for n in count()))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> it.seek(0)
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> next(it)
+ '3'
+
+ You can also seek forward:
+
+ >>> it = seekable((str(n) for n in range(20)))
+ >>> it.seek(10)
+ >>> next(it)
+ '10'
+ >>> it.seek(20) # Seeking past the end of the source isn't a problem
+ >>> list(it)
+ []
+ >>> it.seek(0) # Resetting works even after hitting the end
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+
+ The cache grows as the source iterable progresses, so beware of wrapping
+ very large or infinite iterables.
+
+ You may view the contents of the cache with the :meth:`elements` method.
+ That returns a :class:`SequenceView`, a view that updates automatically:
+
+ >>> it = seekable((str(n) for n in range(10)))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> elements = it.elements()
+ >>> elements
+ SequenceView(['0', '1', '2'])
+ >>> next(it)
+ '3'
+ >>> elements
+ SequenceView(['0', '1', '2', '3'])
+
+ """
+
+ def __init__(self, iterable):
+ self._source = iter(iterable)
+ self._cache = []
+ self._index = None
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self._index is not None:
+ try:
+ item = self._cache[self._index]
+ except IndexError:
+ self._index = None
+ else:
+ self._index += 1
+ return item
+
+ item = next(self._source)
+ self._cache.append(item)
+ return item
+
+ next = __next__
+
+ def elements(self):
+ return SequenceView(self._cache)
+
+ def seek(self, index):
+ self._index = index
+ remainder = index - len(self._cache)
+ if remainder > 0:
+ consume(self, remainder)
+
+
+class run_length(object):
+ """
+ :func:`run_length.encode` compresses an iterable with run-length encoding.
+ It yields groups of repeated items with the count of how many times they
+ were repeated:
+
+ >>> uncompressed = 'abbcccdddd'
+ >>> list(run_length.encode(uncompressed))
+ [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+
+ :func:`run_length.decode` decompresses an iterable that was previously
+ compressed with run-length encoding. It yields the items of the
+ decompressed iterable:
+
+ >>> compressed = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+ >>> list(run_length.decode(compressed))
+ ['a', 'b', 'b', 'c', 'c', 'c', 'd', 'd', 'd', 'd']
+
+ """
+
+ @staticmethod
+ def encode(iterable):
+ return ((k, ilen(g)) for k, g in groupby(iterable))
+
+ @staticmethod
+ def decode(iterable):
+ return chain.from_iterable(repeat(k, n) for k, n in iterable)
+
+
+def exactly_n(iterable, n, predicate=bool):
+ """Return ``True`` if exactly ``n`` items in the iterable are ``True``
+ according to the *predicate* function.
+
+ >>> exactly_n([True, True, False], 2)
+ True
+ >>> exactly_n([True, True, False], 1)
+ False
+ >>> exactly_n([0, 1, 2, 3, 4, 5], 3, lambda x: x < 3)
+ True
+
+ The iterable will be advanced until ``n + 1`` truthy items are encountered,
+ so avoid calling it on infinite iterables.
+
+ """
+ return len(take(n + 1, filter(predicate, iterable))) == n
+
+
+def circular_shifts(iterable):
+ """Return a list of circular shifts of *iterable*.
+
+ >>> circular_shifts(range(4))
+ [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
+ """
+ lst = list(iterable)
+ return take(len(lst), windowed(cycle(lst), len(lst)))
+
+
+def make_decorator(wrapping_func, result_index=0):
+ """Return a decorator version of *wrapping_func*, which is a function that
+ modifies an iterable. *result_index* is the position in that function's
+ signature where the iterable goes.
+
+ This lets you use itertools on the "production end," i.e. at function
+ definition. This can augment what the function returns without changing the
+ function's code.
+
+ For example, to produce a decorator version of :func:`chunked`:
+
+ >>> from more_itertools import chunked
+ >>> chunker = make_decorator(chunked, result_index=0)
+ >>> @chunker(3)
+ ... def iter_range(n):
+ ... return iter(range(n))
+ ...
+ >>> list(iter_range(9))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ To only allow truthy items to be returned:
+
+ >>> truth_serum = make_decorator(filter, result_index=1)
+ >>> @truth_serum(bool)
+ ... def boolean_test():
+ ... return [0, 1, '', ' ', False, True]
+ ...
+ >>> list(boolean_test())
+ [1, ' ', True]
+
+ The :func:`peekable` and :func:`seekable` wrappers make for practical
+ decorators:
+
+ >>> from more_itertools import peekable
+ >>> peekable_function = make_decorator(peekable)
+ >>> @peekable_function()
+ ... def str_range(*args):
+ ... return (str(x) for x in range(*args))
+ ...
+ >>> it = str_range(1, 20, 2)
+ >>> next(it), next(it), next(it)
+ ('1', '3', '5')
+ >>> it.peek()
+ '7'
+ >>> next(it)
+ '7'
+
+ """
+ # See https://sites.google.com/site/bbayles/index/decorator_factory for
+ # notes on how this works.
+ def decorator(*wrapping_args, **wrapping_kwargs):
+ def outer_wrapper(f):
+ def inner_wrapper(*args, **kwargs):
+ result = f(*args, **kwargs)
+ wrapping_args_ = list(wrapping_args)
+ wrapping_args_.insert(result_index, result)
+ return wrapping_func(*wrapping_args_, **wrapping_kwargs)
+
+ return inner_wrapper
+
+ return outer_wrapper
+
+ return decorator
+
+
+def map_reduce(iterable, keyfunc, valuefunc=None, reducefunc=None):
+ """Return a dictionary that maps the items in *iterable* to categories
+ defined by *keyfunc*, transforms them with *valuefunc*, and
+ then summarizes them by category with *reducefunc*.
+
+ *valuefunc* defaults to the identity function if it is unspecified.
+ If *reducefunc* is unspecified, no summarization takes place:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> result = map_reduce('abbccc', keyfunc)
+ >>> sorted(result.items())
+ [('A', ['a']), ('B', ['b', 'b']), ('C', ['c', 'c', 'c'])]
+
+ Specifying *valuefunc* transforms the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc)
+ >>> sorted(result.items())
+ [('A', [1]), ('B', [1, 1]), ('C', [1, 1, 1])]
+
+ Specifying *reducefunc* summarizes the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> reducefunc = sum
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc, reducefunc)
+ >>> sorted(result.items())
+ [('A', 1), ('B', 2), ('C', 3)]
+
+ You may want to filter the input iterable before applying the map/reduce
+ procedure:
+
+ >>> all_items = range(30)
+ >>> items = [x for x in all_items if 10 <= x <= 20] # Filter
+ >>> keyfunc = lambda x: x % 2 # Evens map to 0; odds to 1
+ >>> categories = map_reduce(items, keyfunc=keyfunc)
+ >>> sorted(categories.items())
+ [(0, [10, 12, 14, 16, 18, 20]), (1, [11, 13, 15, 17, 19])]
+ >>> summaries = map_reduce(items, keyfunc=keyfunc, reducefunc=sum)
+ >>> sorted(summaries.items())
+ [(0, 90), (1, 75)]
+
+ Note that all items in the iterable are gathered into a list before the
+ summarization step, which may require significant storage.
+
+ The returned object is a :obj:`collections.defaultdict` with the
+ ``default_factory`` set to ``None``, such that it behaves like a normal
+ dictionary.
+
+ """
+ valuefunc = (lambda x: x) if (valuefunc is None) else valuefunc
+
+ ret = defaultdict(list)
+ for item in iterable:
+ key = keyfunc(item)
+ value = valuefunc(item)
+ ret[key].append(value)
+
+ if reducefunc is not None:
+ for key, value_list in ret.items():
+ ret[key] = reducefunc(value_list)
+
+ ret.default_factory = None
+ return ret
+
+
+def rlocate(iterable, pred=bool, window_size=None):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``, starting from the right and moving left.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(rlocate([0, 1, 1, 0, 1, 0, 0])) # Truthy at 1, 2, and 4
+ [4, 2, 1]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item:
+
+ >>> iterable = iter('abcb')
+ >>> pred = lambda x: x == 'b'
+ >>> list(rlocate(iterable, pred))
+ [3, 1]
+
+ If *window_size* is given, then the *pred* function will be called with
+ that many items. This enables searching for sub-sequences:
+
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> pred = lambda *args: args == (1, 2, 3)
+ >>> list(rlocate(iterable, pred=pred, window_size=3))
+ [9, 5, 1]
+
+ Beware, this function won't return anything for infinite iterables.
+ If *iterable* is reversible, ``rlocate`` will reverse it and search from
+ the right. Otherwise, it will search from the left and return the results
+ in reverse order.
+
+ See :func:`locate` to for other example applications.
+
+ """
+ if window_size is None:
+ try:
+ len_iter = len(iterable)
+ return (
+ len_iter - i - 1 for i in locate(reversed(iterable), pred)
+ )
+ except TypeError:
+ pass
+
+ return reversed(list(locate(iterable, pred, window_size)))
+
+
+def replace(iterable, pred, substitutes, count=None, window_size=1):
+ """Yield the items from *iterable*, replacing the items for which *pred*
+ returns ``True`` with the items from the iterable *substitutes*.
+
+ >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1]
+ >>> pred = lambda x: x == 0
+ >>> substitutes = (2, 3)
+ >>> list(replace(iterable, pred, substitutes))
+ [1, 1, 2, 3, 1, 1, 2, 3, 1, 1]
+
+ If *count* is given, the number of replacements will be limited:
+
+ >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1, 0]
+ >>> pred = lambda x: x == 0
+ >>> substitutes = [None]
+ >>> list(replace(iterable, pred, substitutes, count=2))
+ [1, 1, None, 1, 1, None, 1, 1, 0]
+
+ Use *window_size* to control the number of items passed as arguments to
+ *pred*. This allows for locating and replacing subsequences.
+
+ >>> iterable = [0, 1, 2, 5, 0, 1, 2, 5]
+ >>> window_size = 3
+ >>> pred = lambda *args: args == (0, 1, 2) # 3 items passed to pred
+ >>> substitutes = [3, 4] # Splice in these items
+ >>> list(replace(iterable, pred, substitutes, window_size=window_size))
+ [3, 4, 5, 3, 4, 5]
+
+ """
+ if window_size < 1:
+ raise ValueError('window_size must be at least 1')
+
+ # Save the substitutes iterable, since it's used more than once
+ substitutes = tuple(substitutes)
+
+ # Add padding such that the number of windows matches the length of the
+ # iterable
+ it = chain(iterable, [_marker] * (window_size - 1))
+ windows = windowed(it, window_size)
+
+ n = 0
+ for w in windows:
+ # If the current window matches our predicate (and we haven't hit
+ # our maximum number of replacements), splice in the substitutes
+ # and then consume the following windows that overlap with this one.
+ # For example, if the iterable is (0, 1, 2, 3, 4...)
+ # and the window size is 2, we have (0, 1), (1, 2), (2, 3)...
+ # If the predicate matches on (0, 1), we need to zap (0, 1) and (1, 2)
+ if pred(*w):
+ if (count is None) or (n < count):
+ n += 1
+ for s in substitutes:
+ yield s
+ consume(windows, window_size - 1)
+ continue
+
+ # If there was no match (or we've reached the replacement limit),
+ # yield the first item from the window.
+ if w and (w[0] is not _marker):
+ yield w[0]
diff --git a/contrib/python/more-itertools/py2/more_itertools/recipes.py b/contrib/python/more-itertools/py2/more_itertools/recipes.py
new file mode 100644
index 0000000000..3b455d4eb8
--- /dev/null
+++ b/contrib/python/more-itertools/py2/more_itertools/recipes.py
@@ -0,0 +1,577 @@
+"""Imported from the recipes section of the itertools documentation.
+
+All functions taken from the recipes section of the itertools library docs
+[1]_.
+Some backward-compatible usability improvements have been made.
+
+.. [1] http://docs.python.org/library/itertools.html#recipes
+
+"""
+from collections import deque
+from itertools import (
+ chain, combinations, count, cycle, groupby, islice, repeat, starmap, tee
+)
+import operator
+from random import randrange, sample, choice
+
+from six import PY2
+from six.moves import filter, filterfalse, map, range, zip, zip_longest
+
+__all__ = [
+ 'accumulate',
+ 'all_equal',
+ 'consume',
+ 'dotproduct',
+ 'first_true',
+ 'flatten',
+ 'grouper',
+ 'iter_except',
+ 'ncycles',
+ 'nth',
+ 'nth_combination',
+ 'padnone',
+ 'pairwise',
+ 'partition',
+ 'powerset',
+ 'prepend',
+ 'quantify',
+ 'random_combination_with_replacement',
+ 'random_combination',
+ 'random_permutation',
+ 'random_product',
+ 'repeatfunc',
+ 'roundrobin',
+ 'tabulate',
+ 'tail',
+ 'take',
+ 'unique_everseen',
+ 'unique_justseen',
+]
+
+
+def accumulate(iterable, func=operator.add):
+ """
+ Return an iterator whose items are the accumulated results of a function
+ (specified by the optional *func* argument) that takes two arguments.
+ By default, returns accumulated sums with :func:`operator.add`.
+
+ >>> list(accumulate([1, 2, 3, 4, 5])) # Running sum
+ [1, 3, 6, 10, 15]
+ >>> list(accumulate([1, 2, 3], func=operator.mul)) # Running product
+ [1, 2, 6]
+ >>> list(accumulate([0, 1, -1, 2, 3, 2], func=max)) # Running maximum
+ [0, 1, 1, 2, 3, 3]
+
+ This function is available in the ``itertools`` module for Python 3.2 and
+ greater.
+
+ """
+ it = iter(iterable)
+ try:
+ total = next(it)
+ except StopIteration:
+ return
+ else:
+ yield total
+
+ for element in it:
+ total = func(total, element)
+ yield total
+
+
+def take(n, iterable):
+ """Return first *n* items of the iterable as a list.
+
+ >>> take(3, range(10))
+ [0, 1, 2]
+ >>> take(5, range(3))
+ [0, 1, 2]
+
+ Effectively a short replacement for ``next`` based iterator consumption
+ when you want more than one item, but less than the whole iterator.
+
+ """
+ return list(islice(iterable, n))
+
+
+def tabulate(function, start=0):
+ """Return an iterator over the results of ``func(start)``,
+ ``func(start + 1)``, ``func(start + 2)``...
+
+ *func* should be a function that accepts one integer argument.
+
+ If *start* is not specified it defaults to 0. It will be incremented each
+ time the iterator is advanced.
+
+ >>> square = lambda x: x ** 2
+ >>> iterator = tabulate(square, -3)
+ >>> take(4, iterator)
+ [9, 4, 1, 0]
+
+ """
+ return map(function, count(start))
+
+
+def tail(n, iterable):
+ """Return an iterator over the last *n* items of *iterable*.
+
+ >>> t = tail(3, 'ABCDEFG')
+ >>> list(t)
+ ['E', 'F', 'G']
+
+ """
+ return iter(deque(iterable, maxlen=n))
+
+
+def consume(iterator, n=None):
+ """Advance *iterable* by *n* steps. If *n* is ``None``, consume it
+ entirely.
+
+ Efficiently exhausts an iterator without returning values. Defaults to
+ consuming the whole iterator, but an optional second argument may be
+ provided to limit consumption.
+
+ >>> i = (x for x in range(10))
+ >>> next(i)
+ 0
+ >>> consume(i, 3)
+ >>> next(i)
+ 4
+ >>> consume(i)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ If the iterator has fewer items remaining than the provided limit, the
+ whole iterator will be consumed.
+
+ >>> i = (x for x in range(3))
+ >>> consume(i, 5)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ """
+ # Use functions that consume iterators at C speed.
+ if n is None:
+ # feed the entire iterator into a zero-length deque
+ deque(iterator, maxlen=0)
+ else:
+ # advance to the empty slice starting at position n
+ next(islice(iterator, n, n), None)
+
+
+def nth(iterable, n, default=None):
+ """Returns the nth item or a default value.
+
+ >>> l = range(10)
+ >>> nth(l, 3)
+ 3
+ >>> nth(l, 20, "zebra")
+ 'zebra'
+
+ """
+ return next(islice(iterable, n, None), default)
+
+
+def all_equal(iterable):
+ """
+ Returns ``True`` if all the elements are equal to each other.
+
+ >>> all_equal('aaaa')
+ True
+ >>> all_equal('aaab')
+ False
+
+ """
+ g = groupby(iterable)
+ return next(g, True) and not next(g, False)
+
+
+def quantify(iterable, pred=bool):
+ """Return the how many times the predicate is true.
+
+ >>> quantify([True, False, True])
+ 2
+
+ """
+ return sum(map(pred, iterable))
+
+
+def padnone(iterable):
+ """Returns the sequence of elements and then returns ``None`` indefinitely.
+
+ >>> take(5, padnone(range(3)))
+ [0, 1, 2, None, None]
+
+ Useful for emulating the behavior of the built-in :func:`map` function.
+
+ See also :func:`padded`.
+
+ """
+ return chain(iterable, repeat(None))
+
+
+def ncycles(iterable, n):
+ """Returns the sequence elements *n* times
+
+ >>> list(ncycles(["a", "b"], 3))
+ ['a', 'b', 'a', 'b', 'a', 'b']
+
+ """
+ return chain.from_iterable(repeat(tuple(iterable), n))
+
+
+def dotproduct(vec1, vec2):
+ """Returns the dot product of the two iterables.
+
+ >>> dotproduct([10, 10], [20, 20])
+ 400
+
+ """
+ return sum(map(operator.mul, vec1, vec2))
+
+
+def flatten(listOfLists):
+ """Return an iterator flattening one level of nesting in a list of lists.
+
+ >>> list(flatten([[0, 1], [2, 3]]))
+ [0, 1, 2, 3]
+
+ See also :func:`collapse`, which can flatten multiple levels of nesting.
+
+ """
+ return chain.from_iterable(listOfLists)
+
+
+def repeatfunc(func, times=None, *args):
+ """Call *func* with *args* repeatedly, returning an iterable over the
+ results.
+
+ If *times* is specified, the iterable will terminate after that many
+ repetitions:
+
+ >>> from operator import add
+ >>> times = 4
+ >>> args = 3, 5
+ >>> list(repeatfunc(add, times, *args))
+ [8, 8, 8, 8]
+
+ If *times* is ``None`` the iterable will not terminate:
+
+ >>> from random import randrange
+ >>> times = None
+ >>> args = 1, 11
+ >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP
+ [2, 4, 8, 1, 8, 4]
+
+ """
+ if times is None:
+ return starmap(func, repeat(args))
+ return starmap(func, repeat(args, times))
+
+
+def pairwise(iterable):
+ """Returns an iterator of paired items, overlapping, from the original
+
+ >>> take(4, pairwise(count()))
+ [(0, 1), (1, 2), (2, 3), (3, 4)]
+
+ """
+ a, b = tee(iterable)
+ next(b, None)
+ return zip(a, b)
+
+
+def grouper(n, iterable, fillvalue=None):
+ """Collect data into fixed-length chunks or blocks.
+
+ >>> list(grouper(3, 'ABCDEFG', 'x'))
+ [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
+
+ """
+ args = [iter(iterable)] * n
+ return zip_longest(fillvalue=fillvalue, *args)
+
+
+def roundrobin(*iterables):
+ """Yields an item from each iterable, alternating between them.
+
+ >>> list(roundrobin('ABC', 'D', 'EF'))
+ ['A', 'D', 'E', 'B', 'F', 'C']
+
+ This function produces the same output as :func:`interleave_longest`, but
+ may perform better for some inputs (in particular when the number of
+ iterables is small).
+
+ """
+ # Recipe credited to George Sakkis
+ pending = len(iterables)
+ if PY2:
+ nexts = cycle(iter(it).next for it in iterables)
+ else:
+ nexts = cycle(iter(it).__next__ for it in iterables)
+ while pending:
+ try:
+ for next in nexts:
+ yield next()
+ except StopIteration:
+ pending -= 1
+ nexts = cycle(islice(nexts, pending))
+
+
+def partition(pred, iterable):
+ """
+ Returns a 2-tuple of iterables derived from the input iterable.
+ The first yields the items that have ``pred(item) == False``.
+ The second yields the items that have ``pred(item) == True``.
+
+ >>> is_odd = lambda x: x % 2 != 0
+ >>> iterable = range(10)
+ >>> even_items, odd_items = partition(is_odd, iterable)
+ >>> list(even_items), list(odd_items)
+ ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
+
+ """
+ # partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9
+ t1, t2 = tee(iterable)
+ return filterfalse(pred, t1), filter(pred, t2)
+
+
+def powerset(iterable):
+ """Yields all possible subsets of the iterable.
+
+ >>> list(powerset([1, 2, 3]))
+ [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
+
+ :func:`powerset` will operate on iterables that aren't :class:`set`
+ instances, so repeated elements in the input will produce repeated elements
+ in the output. Use :func:`unique_everseen` on the input to avoid generating
+ duplicates:
+
+ >>> seq = [1, 1, 0]
+ >>> list(powerset(seq))
+ [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
+ >>> from more_itertools import unique_everseen
+ >>> list(powerset(unique_everseen(seq)))
+ [(), (1,), (0,), (1, 0)]
+
+ """
+ s = list(iterable)
+ return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
+
+
+def unique_everseen(iterable, key=None):
+ """
+ Yield unique elements, preserving order.
+
+ >>> list(unique_everseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D']
+ >>> list(unique_everseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'D']
+
+ Sequences with a mix of hashable and unhashable items can be used.
+ The function will be slower (i.e., `O(n^2)`) for unhashable items.
+
+ """
+ seenset = set()
+ seenset_add = seenset.add
+ seenlist = []
+ seenlist_add = seenlist.append
+ if key is None:
+ for element in iterable:
+ try:
+ if element not in seenset:
+ seenset_add(element)
+ yield element
+ except TypeError:
+ if element not in seenlist:
+ seenlist_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ try:
+ if k not in seenset:
+ seenset_add(k)
+ yield element
+ except TypeError:
+ if k not in seenlist:
+ seenlist_add(k)
+ yield element
+
+
+def unique_justseen(iterable, key=None):
+ """Yields elements in order, ignoring serial duplicates
+
+ >>> list(unique_justseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D', 'A', 'B']
+ >>> list(unique_justseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'A', 'D']
+
+ """
+ return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
+
+
+def iter_except(func, exception, first=None):
+ """Yields results from a function repeatedly until an exception is raised.
+
+ Converts a call-until-exception interface to an iterator interface.
+ Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
+ to end the loop.
+
+ >>> l = [0, 1, 2]
+ >>> list(iter_except(l.pop, IndexError))
+ [2, 1, 0]
+
+ """
+ try:
+ if first is not None:
+ yield first()
+ while 1:
+ yield func()
+ except exception:
+ pass
+
+
+def first_true(iterable, default=None, pred=None):
+ """
+ Returns the first true value in the iterable.
+
+ If no true value is found, returns *default*
+
+ If *pred* is not None, returns the first item for which
+ ``pred(item) == True`` .
+
+ >>> first_true(range(10))
+ 1
+ >>> first_true(range(10), pred=lambda x: x > 5)
+ 6
+ >>> first_true(range(10), default='missing', pred=lambda x: x > 9)
+ 'missing'
+
+ """
+ return next(filter(pred, iterable), default)
+
+
+def random_product(*args, **kwds):
+ """Draw an item at random from each of the input iterables.
+
+ >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
+ ('c', 3, 'Z')
+
+ If *repeat* is provided as a keyword argument, that many items will be
+ drawn from each iterable.
+
+ >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP
+ ('a', 2, 'd', 3)
+
+ This equivalent to taking a random selection from
+ ``itertools.product(*args, **kwarg)``.
+
+ """
+ pools = [tuple(pool) for pool in args] * kwds.get('repeat', 1)
+ return tuple(choice(pool) for pool in pools)
+
+
+def random_permutation(iterable, r=None):
+ """Return a random *r* length permutation of the elements in *iterable*.
+
+ If *r* is not specified or is ``None``, then *r* defaults to the length of
+ *iterable*.
+
+ >>> random_permutation(range(5)) # doctest:+SKIP
+ (3, 4, 0, 1, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.permutations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ r = len(pool) if r is None else r
+ return tuple(sample(pool, r))
+
+
+def random_combination(iterable, r):
+ """Return a random *r* length subsequence of the elements in *iterable*.
+
+ >>> random_combination(range(5), 3) # doctest:+SKIP
+ (2, 3, 4)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(sample(range(n), r))
+ return tuple(pool[i] for i in indices)
+
+
+def random_combination_with_replacement(iterable, r):
+ """Return a random *r* length subsequence of elements in *iterable*,
+ allowing individual elements to be repeated.
+
+ >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
+ (0, 0, 1, 2, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations_with_replacement(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(randrange(n) for i in range(r))
+ return tuple(pool[i] for i in indices)
+
+
+def nth_combination(iterable, r, index):
+ """Equivalent to ``list(combinations(iterable, r))[index]``.
+
+ The subsequences of *iterable* that are of length *r* can be ordered
+ lexicographically. :func:`nth_combination` computes the subsequence at
+ sort position *index* directly, without computing the previous
+ subsequences.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ if (r < 0) or (r > n):
+ raise ValueError
+
+ c = 1
+ k = min(r, n - r)
+ for i in range(1, k + 1):
+ c = c * (n - k + i) // i
+
+ if index < 0:
+ index += c
+
+ if (index < 0) or (index >= c):
+ raise IndexError
+
+ result = []
+ while r:
+ c, n, r = c * r // n, n - 1, r - 1
+ while index >= c:
+ index -= c
+ c, n = c * (n - r) // n, n - 1
+ result.append(pool[-1 - n])
+
+ return tuple(result)
+
+
+def prepend(value, iterator):
+ """Yield *value*, followed by the elements in *iterator*.
+
+ >>> value = '0'
+ >>> iterator = ['1', '2', '3']
+ >>> list(prepend(value, iterator))
+ ['0', '1', '2', '3']
+
+ To prepend multiple values, see :func:`itertools.chain`.
+
+ """
+ return chain([value], iterator)
diff --git a/contrib/python/more-itertools/py2/more_itertools/tests/__init__.py b/contrib/python/more-itertools/py2/more_itertools/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/more-itertools/py2/more_itertools/tests/__init__.py
diff --git a/contrib/python/more-itertools/py2/more_itertools/tests/test_more.py b/contrib/python/more-itertools/py2/more_itertools/tests/test_more.py
new file mode 100644
index 0000000000..5f7e13df41
--- /dev/null
+++ b/contrib/python/more-itertools/py2/more_itertools/tests/test_more.py
@@ -0,0 +1,2313 @@
+from __future__ import division, print_function, unicode_literals
+
+from collections import OrderedDict
+from decimal import Decimal
+from doctest import DocTestSuite
+from fractions import Fraction
+from functools import partial, reduce
+from heapq import merge
+from io import StringIO
+from itertools import (
+ chain,
+ count,
+ groupby,
+ islice,
+ permutations,
+ product,
+ repeat,
+)
+from operator import add, mul, itemgetter
+from unittest import TestCase
+
+from six.moves import filter, map, range, zip
+
+import more_itertools as mi
+
+
+def load_tests(loader, tests, ignore):
+ # Add the doctests
+ tests.addTests(DocTestSuite('more_itertools.more'))
+ return tests
+
+
+class CollateTests(TestCase):
+ """Unit tests for ``collate()``"""
+ # Also accidentally tests peekable, though that could use its own tests
+
+ def test_default(self):
+ """Test with the default `key` function."""
+ iterables = [range(4), range(7), range(3, 6)]
+ self.assertEqual(
+ sorted(reduce(list.__add__, [list(it) for it in iterables])),
+ list(mi.collate(*iterables))
+ )
+
+ def test_key(self):
+ """Test using a custom `key` function."""
+ iterables = [range(5, 0, -1), range(4, 0, -1)]
+ actual = sorted(
+ reduce(list.__add__, [list(it) for it in iterables]), reverse=True
+ )
+ expected = list(mi.collate(*iterables, key=lambda x: -x))
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ """Be nice if passed an empty list of iterables."""
+ self.assertEqual([], list(mi.collate()))
+
+ def test_one(self):
+ """Work when only 1 iterable is passed."""
+ self.assertEqual([0, 1], list(mi.collate(range(2))))
+
+ def test_reverse(self):
+ """Test the `reverse` kwarg."""
+ iterables = [range(4, 0, -1), range(7, 0, -1), range(3, 6, -1)]
+
+ actual = sorted(
+ reduce(list.__add__, [list(it) for it in iterables]), reverse=True
+ )
+ expected = list(mi.collate(*iterables, reverse=True))
+ self.assertEqual(actual, expected)
+
+ def test_alias(self):
+ self.assertNotEqual(merge.__doc__, mi.collate.__doc__)
+ self.assertNotEqual(partial.__doc__, mi.collate.__doc__)
+
+
+class ChunkedTests(TestCase):
+ """Tests for ``chunked()``"""
+
+ def test_even(self):
+ """Test when ``n`` divides evenly into the length of the iterable."""
+ self.assertEqual(
+ list(mi.chunked('ABCDEF', 3)), [['A', 'B', 'C'], ['D', 'E', 'F']]
+ )
+
+ def test_odd(self):
+ """Test when ``n`` does not divide evenly into the length of the
+ iterable.
+
+ """
+ self.assertEqual(
+ list(mi.chunked('ABCDE', 3)), [['A', 'B', 'C'], ['D', 'E']]
+ )
+
+
+class FirstTests(TestCase):
+ """Tests for ``first()``"""
+
+ def test_many(self):
+ """Test that it works on many-item iterables."""
+ # Also try it on a generator expression to make sure it works on
+ # whatever those return, across Python versions.
+ self.assertEqual(mi.first(x for x in range(4)), 0)
+
+ def test_one(self):
+ """Test that it doesn't raise StopIteration prematurely."""
+ self.assertEqual(mi.first([3]), 3)
+
+ def test_empty_stop_iteration(self):
+ """It should raise StopIteration for empty iterables."""
+ self.assertRaises(ValueError, lambda: mi.first([]))
+
+ def test_default(self):
+ """It should return the provided default arg for empty iterables."""
+ self.assertEqual(mi.first([], 'boo'), 'boo')
+
+
+class IterOnlyRange:
+ """User-defined iterable class which only support __iter__.
+
+ It is not specified to inherit ``object``, so indexing on a instance will
+ raise an ``AttributeError`` rather than ``TypeError`` in Python 2.
+
+ >>> r = IterOnlyRange(5)
+ >>> r[0] # doctest: +SKIP
+ AttributeError: IterOnlyRange instance has no attribute '__getitem__'
+
+ Note: In Python 3, ``TypeError`` will be raised because ``object`` is
+ inherited implicitly by default.
+
+ >>> r[0] # doctest: +SKIP
+ TypeError: 'IterOnlyRange' object does not support indexing
+ """
+ def __init__(self, n):
+ """Set the length of the range."""
+ self.n = n
+
+ def __iter__(self):
+ """Works same as range()."""
+ return iter(range(self.n))
+
+
+class LastTests(TestCase):
+ """Tests for ``last()``"""
+
+ def test_many_nonsliceable(self):
+ """Test that it works on many-item non-slice-able iterables."""
+ # Also try it on a generator expression to make sure it works on
+ # whatever those return, across Python versions.
+ self.assertEqual(mi.last(x for x in range(4)), 3)
+
+ def test_one_nonsliceable(self):
+ """Test that it doesn't raise StopIteration prematurely."""
+ self.assertEqual(mi.last(x for x in range(1)), 0)
+
+ def test_empty_stop_iteration_nonsliceable(self):
+ """It should raise ValueError for empty non-slice-able iterables."""
+ self.assertRaises(ValueError, lambda: mi.last(x for x in range(0)))
+
+ def test_default_nonsliceable(self):
+ """It should return the provided default arg for empty non-slice-able
+ iterables.
+ """
+ self.assertEqual(mi.last((x for x in range(0)), 'boo'), 'boo')
+
+ def test_many_sliceable(self):
+ """Test that it works on many-item slice-able iterables."""
+ self.assertEqual(mi.last([0, 1, 2, 3]), 3)
+
+ def test_one_sliceable(self):
+ """Test that it doesn't raise StopIteration prematurely."""
+ self.assertEqual(mi.last([3]), 3)
+
+ def test_empty_stop_iteration_sliceable(self):
+ """It should raise ValueError for empty slice-able iterables."""
+ self.assertRaises(ValueError, lambda: mi.last([]))
+
+ def test_default_sliceable(self):
+ """It should return the provided default arg for empty slice-able
+ iterables.
+ """
+ self.assertEqual(mi.last([], 'boo'), 'boo')
+
+ def test_dict(self):
+ """last(dic) and last(dic.keys()) should return same result."""
+ dic = {'a': 1, 'b': 2, 'c': 3}
+ self.assertEqual(mi.last(dic), mi.last(dic.keys()))
+
+ def test_ordereddict(self):
+ """last(dic) should return the last key."""
+ od = OrderedDict()
+ od['a'] = 1
+ od['b'] = 2
+ od['c'] = 3
+ self.assertEqual(mi.last(od), 'c')
+
+ def test_customrange(self):
+ """It should work on custom class where [] raises AttributeError."""
+ self.assertEqual(mi.last(IterOnlyRange(5)), 4)
+
+
+class PeekableTests(TestCase):
+ """Tests for ``peekable()`` behavor not incidentally covered by testing
+ ``collate()``
+
+ """
+ def test_peek_default(self):
+ """Make sure passing a default into ``peek()`` works."""
+ p = mi.peekable([])
+ self.assertEqual(p.peek(7), 7)
+
+ def test_truthiness(self):
+ """Make sure a ``peekable`` tests true iff there are items remaining in
+ the iterable.
+
+ """
+ p = mi.peekable([])
+ self.assertFalse(p)
+
+ p = mi.peekable(range(3))
+ self.assertTrue(p)
+
+ def test_simple_peeking(self):
+ """Make sure ``next`` and ``peek`` advance and don't advance the
+ iterator, respectively.
+
+ """
+ p = mi.peekable(range(10))
+ self.assertEqual(next(p), 0)
+ self.assertEqual(p.peek(), 1)
+ self.assertEqual(next(p), 1)
+
+ def test_indexing(self):
+ """
+ Indexing into the peekable shouldn't advance the iterator.
+ """
+ p = mi.peekable('abcdefghijkl')
+
+ # The 0th index is what ``next()`` will return
+ self.assertEqual(p[0], 'a')
+ self.assertEqual(next(p), 'a')
+
+ # Indexing further into the peekable shouldn't advance the itertor
+ self.assertEqual(p[2], 'd')
+ self.assertEqual(next(p), 'b')
+
+ # The 0th index moves up with the iterator; the last index follows
+ self.assertEqual(p[0], 'c')
+ self.assertEqual(p[9], 'l')
+
+ self.assertEqual(next(p), 'c')
+ self.assertEqual(p[8], 'l')
+
+ # Negative indexing should work too
+ self.assertEqual(p[-2], 'k')
+ self.assertEqual(p[-9], 'd')
+ self.assertRaises(IndexError, lambda: p[-10])
+
+ def test_slicing(self):
+ """Slicing the peekable shouldn't advance the iterator."""
+ seq = list('abcdefghijkl')
+ p = mi.peekable(seq)
+
+ # Slicing the peekable should just be like slicing a re-iterable
+ self.assertEqual(p[1:4], seq[1:4])
+
+ # Advancing the iterator moves the slices up also
+ self.assertEqual(next(p), 'a')
+ self.assertEqual(p[1:4], seq[1:][1:4])
+
+ # Implicit starts and stop should work
+ self.assertEqual(p[:5], seq[1:][:5])
+ self.assertEqual(p[:], seq[1:][:])
+
+ # Indexing past the end should work
+ self.assertEqual(p[:100], seq[1:][:100])
+
+ # Steps should work, including negative
+ self.assertEqual(p[::2], seq[1:][::2])
+ self.assertEqual(p[::-1], seq[1:][::-1])
+
+ def test_slicing_reset(self):
+ """Test slicing on a fresh iterable each time"""
+ iterable = ['0', '1', '2', '3', '4', '5']
+ indexes = list(range(-4, len(iterable) + 4)) + [None]
+ steps = [1, 2, 3, 4, -1, -2, -3, 4]
+ for slice_args in product(indexes, indexes, steps):
+ it = iter(iterable)
+ p = mi.peekable(it)
+ next(p)
+ index = slice(*slice_args)
+ actual = p[index]
+ expected = iterable[1:][index]
+ self.assertEqual(actual, expected, slice_args)
+
+ def test_slicing_error(self):
+ iterable = '01234567'
+ p = mi.peekable(iter(iterable))
+
+ # Prime the cache
+ p.peek()
+ old_cache = list(p._cache)
+
+ # Illegal slice
+ with self.assertRaises(ValueError):
+ p[1:-1:0]
+
+ # Neither the cache nor the iteration should be affected
+ self.assertEqual(old_cache, list(p._cache))
+ self.assertEqual(list(p), list(iterable))
+
+ def test_passthrough(self):
+ """Iterating a peekable without using ``peek()`` or ``prepend()``
+ should just give the underlying iterable's elements (a trivial test but
+ useful to set a baseline in case something goes wrong)"""
+ expected = [1, 2, 3, 4, 5]
+ actual = list(mi.peekable(expected))
+ self.assertEqual(actual, expected)
+
+ # prepend() behavior tests
+
+ def test_prepend(self):
+ """Tests intersperesed ``prepend()`` and ``next()`` calls"""
+ it = mi.peekable(range(2))
+ actual = []
+
+ # Test prepend() before next()
+ it.prepend(10)
+ actual += [next(it), next(it)]
+
+ # Test prepend() between next()s
+ it.prepend(11)
+ actual += [next(it), next(it)]
+
+ # Test prepend() after source iterable is consumed
+ it.prepend(12)
+ actual += [next(it)]
+
+ expected = [10, 0, 11, 1, 12]
+ self.assertEqual(actual, expected)
+
+ def test_multi_prepend(self):
+ """Tests prepending multiple items and getting them in proper order"""
+ it = mi.peekable(range(5))
+ actual = [next(it), next(it)]
+ it.prepend(10, 11, 12)
+ it.prepend(20, 21)
+ actual += list(it)
+ expected = [0, 1, 20, 21, 10, 11, 12, 2, 3, 4]
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ """Tests prepending in front of an empty iterable"""
+ it = mi.peekable([])
+ it.prepend(10)
+ actual = list(it)
+ expected = [10]
+ self.assertEqual(actual, expected)
+
+ def test_prepend_truthiness(self):
+ """Tests that ``__bool__()`` or ``__nonzero__()`` works properly
+ with ``prepend()``"""
+ it = mi.peekable(range(5))
+ self.assertTrue(it)
+ actual = list(it)
+ self.assertFalse(it)
+ it.prepend(10)
+ self.assertTrue(it)
+ actual += [next(it)]
+ self.assertFalse(it)
+ expected = [0, 1, 2, 3, 4, 10]
+ self.assertEqual(actual, expected)
+
+ def test_multi_prepend_peek(self):
+ """Tests prepending multiple elements and getting them in reverse order
+ while peeking"""
+ it = mi.peekable(range(5))
+ actual = [next(it), next(it)]
+ self.assertEqual(it.peek(), 2)
+ it.prepend(10, 11, 12)
+ self.assertEqual(it.peek(), 10)
+ it.prepend(20, 21)
+ self.assertEqual(it.peek(), 20)
+ actual += list(it)
+ self.assertFalse(it)
+ expected = [0, 1, 20, 21, 10, 11, 12, 2, 3, 4]
+ self.assertEqual(actual, expected)
+
+ def test_prepend_after_stop(self):
+ """Test resuming iteration after a previous exhaustion"""
+ it = mi.peekable(range(3))
+ self.assertEqual(list(it), [0, 1, 2])
+ self.assertRaises(StopIteration, lambda: next(it))
+ it.prepend(10)
+ self.assertEqual(next(it), 10)
+ self.assertRaises(StopIteration, lambda: next(it))
+
+ def test_prepend_slicing(self):
+ """Tests interaction between prepending and slicing"""
+ seq = list(range(20))
+ p = mi.peekable(seq)
+
+ p.prepend(30, 40, 50)
+ pseq = [30, 40, 50] + seq # pseq for prepended_seq
+
+ # adapt the specific tests from test_slicing
+ self.assertEqual(p[0], 30)
+ self.assertEqual(p[1:8], pseq[1:8])
+ self.assertEqual(p[1:], pseq[1:])
+ self.assertEqual(p[:5], pseq[:5])
+ self.assertEqual(p[:], pseq[:])
+ self.assertEqual(p[:100], pseq[:100])
+ self.assertEqual(p[::2], pseq[::2])
+ self.assertEqual(p[::-1], pseq[::-1])
+
+ def test_prepend_indexing(self):
+ """Tests interaction between prepending and indexing"""
+ seq = list(range(20))
+ p = mi.peekable(seq)
+
+ p.prepend(30, 40, 50)
+
+ self.assertEqual(p[0], 30)
+ self.assertEqual(next(p), 30)
+ self.assertEqual(p[2], 0)
+ self.assertEqual(next(p), 40)
+ self.assertEqual(p[0], 50)
+ self.assertEqual(p[9], 8)
+ self.assertEqual(next(p), 50)
+ self.assertEqual(p[8], 8)
+ self.assertEqual(p[-2], 18)
+ self.assertEqual(p[-9], 11)
+ self.assertRaises(IndexError, lambda: p[-21])
+
+ def test_prepend_iterable(self):
+ """Tests prepending from an iterable"""
+ it = mi.peekable(range(5))
+ # Don't directly use the range() object to avoid any range-specific
+ # optimizations
+ it.prepend(*(x for x in range(5)))
+ actual = list(it)
+ expected = list(chain(range(5), range(5)))
+ self.assertEqual(actual, expected)
+
+ def test_prepend_many(self):
+ """Tests that prepending a huge number of elements works"""
+ it = mi.peekable(range(5))
+ # Don't directly use the range() object to avoid any range-specific
+ # optimizations
+ it.prepend(*(x for x in range(20000)))
+ actual = list(it)
+ expected = list(chain(range(20000), range(5)))
+ self.assertEqual(actual, expected)
+
+ def test_prepend_reversed(self):
+ """Tests prepending from a reversed iterable"""
+ it = mi.peekable(range(3))
+ it.prepend(*reversed((10, 11, 12)))
+ actual = list(it)
+ expected = [12, 11, 10, 0, 1, 2]
+ self.assertEqual(actual, expected)
+
+
+class ConsumerTests(TestCase):
+ """Tests for ``consumer()``"""
+
+ def test_consumer(self):
+ @mi.consumer
+ def eater():
+ while True:
+ x = yield # noqa
+
+ e = eater()
+ e.send('hi') # without @consumer, would raise TypeError
+
+
+class DistinctPermutationsTests(TestCase):
+ def test_distinct_permutations(self):
+ """Make sure the output for ``distinct_permutations()`` is the same as
+ set(permutations(it)).
+
+ """
+ iterable = ['z', 'a', 'a', 'q', 'q', 'q', 'y']
+ test_output = sorted(mi.distinct_permutations(iterable))
+ ref_output = sorted(set(permutations(iterable)))
+ self.assertEqual(test_output, ref_output)
+
+ def test_other_iterables(self):
+ """Make sure ``distinct_permutations()`` accepts a different type of
+ iterables.
+
+ """
+ # a generator
+ iterable = (c for c in ['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ test_output = sorted(mi.distinct_permutations(iterable))
+ # "reload" it
+ iterable = (c for c in ['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ ref_output = sorted(set(permutations(iterable)))
+ self.assertEqual(test_output, ref_output)
+
+ # an iterator
+ iterable = iter(['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ test_output = sorted(mi.distinct_permutations(iterable))
+ # "reload" it
+ iterable = iter(['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ ref_output = sorted(set(permutations(iterable)))
+ self.assertEqual(test_output, ref_output)
+
+
+class IlenTests(TestCase):
+ def test_ilen(self):
+ """Sanity-checks for ``ilen()``."""
+ # Non-empty
+ self.assertEqual(
+ mi.ilen(filter(lambda x: x % 10 == 0, range(101))), 11
+ )
+
+ # Empty
+ self.assertEqual(mi.ilen((x for x in range(0))), 0)
+
+ # Iterable with __len__
+ self.assertEqual(mi.ilen(list(range(6))), 6)
+
+
+class WithIterTests(TestCase):
+ def test_with_iter(self):
+ s = StringIO('One fish\nTwo fish')
+ initial_words = [line.split()[0] for line in mi.with_iter(s)]
+
+ # Iterable's items should be faithfully represented
+ self.assertEqual(initial_words, ['One', 'Two'])
+ # The file object should be closed
+ self.assertTrue(s.closed)
+
+
+class OneTests(TestCase):
+ def test_basic(self):
+ it = iter(['item'])
+ self.assertEqual(mi.one(it), 'item')
+
+ def test_too_short(self):
+ it = iter([])
+ self.assertRaises(ValueError, lambda: mi.one(it))
+ self.assertRaises(IndexError, lambda: mi.one(it, too_short=IndexError))
+
+ def test_too_long(self):
+ it = count()
+ self.assertRaises(ValueError, lambda: mi.one(it)) # burn 0 and 1
+ self.assertEqual(next(it), 2)
+ self.assertRaises(
+ OverflowError, lambda: mi.one(it, too_long=OverflowError)
+ )
+
+
+class IntersperseTest(TestCase):
+ """ Tests for intersperse() """
+
+ def test_even(self):
+ iterable = (x for x in '01')
+ self.assertEqual(
+ list(mi.intersperse(None, iterable)), ['0', None, '1']
+ )
+
+ def test_odd(self):
+ iterable = (x for x in '012')
+ self.assertEqual(
+ list(mi.intersperse(None, iterable)), ['0', None, '1', None, '2']
+ )
+
+ def test_nested(self):
+ element = ('a', 'b')
+ iterable = (x for x in '012')
+ actual = list(mi.intersperse(element, iterable))
+ expected = ['0', ('a', 'b'), '1', ('a', 'b'), '2']
+ self.assertEqual(actual, expected)
+
+ def test_not_iterable(self):
+ self.assertRaises(TypeError, lambda: mi.intersperse('x', 1))
+
+ def test_n(self):
+ for n, element, expected in [
+ (1, '_', ['0', '_', '1', '_', '2', '_', '3', '_', '4', '_', '5']),
+ (2, '_', ['0', '1', '_', '2', '3', '_', '4', '5']),
+ (3, '_', ['0', '1', '2', '_', '3', '4', '5']),
+ (4, '_', ['0', '1', '2', '3', '_', '4', '5']),
+ (5, '_', ['0', '1', '2', '3', '4', '_', '5']),
+ (6, '_', ['0', '1', '2', '3', '4', '5']),
+ (7, '_', ['0', '1', '2', '3', '4', '5']),
+ (3, ['a', 'b'], ['0', '1', '2', ['a', 'b'], '3', '4', '5']),
+ ]:
+ iterable = (x for x in '012345')
+ actual = list(mi.intersperse(element, iterable, n=n))
+ self.assertEqual(actual, expected)
+
+ def test_n_zero(self):
+ self.assertRaises(
+ ValueError, lambda: list(mi.intersperse('x', '012', n=0))
+ )
+
+
+class UniqueToEachTests(TestCase):
+ """Tests for ``unique_to_each()``"""
+
+ def test_all_unique(self):
+ """When all the input iterables are unique the output should match
+ the input."""
+ iterables = [[1, 2], [3, 4, 5], [6, 7, 8]]
+ self.assertEqual(mi.unique_to_each(*iterables), iterables)
+
+ def test_duplicates(self):
+ """When there are duplicates in any of the input iterables that aren't
+ in the rest, those duplicates should be emitted."""
+ iterables = ["mississippi", "missouri"]
+ self.assertEqual(
+ mi.unique_to_each(*iterables), [['p', 'p'], ['o', 'u', 'r']]
+ )
+
+ def test_mixed(self):
+ """When the input iterables contain different types the function should
+ still behave properly"""
+ iterables = ['x', (i for i in range(3)), [1, 2, 3], tuple()]
+ self.assertEqual(mi.unique_to_each(*iterables), [['x'], [0], [3], []])
+
+
+class WindowedTests(TestCase):
+ """Tests for ``windowed()``"""
+
+ def test_basic(self):
+ actual = list(mi.windowed([1, 2, 3, 4, 5], 3))
+ expected = [(1, 2, 3), (2, 3, 4), (3, 4, 5)]
+ self.assertEqual(actual, expected)
+
+ def test_large_size(self):
+ """
+ When the window size is larger than the iterable, and no fill value is
+ given,``None`` should be filled in.
+ """
+ actual = list(mi.windowed([1, 2, 3, 4, 5], 6))
+ expected = [(1, 2, 3, 4, 5, None)]
+ self.assertEqual(actual, expected)
+
+ def test_fillvalue(self):
+ """
+ When sizes don't match evenly, the given fill value should be used.
+ """
+ iterable = [1, 2, 3, 4, 5]
+
+ for n, kwargs, expected in [
+ (6, {}, [(1, 2, 3, 4, 5, '!')]), # n > len(iterable)
+ (3, {'step': 3}, [(1, 2, 3), (4, 5, '!')]), # using ``step``
+ ]:
+ actual = list(mi.windowed(iterable, n, fillvalue='!', **kwargs))
+ self.assertEqual(actual, expected)
+
+ def test_zero(self):
+ """When the window size is zero, an empty tuple should be emitted."""
+ actual = list(mi.windowed([1, 2, 3, 4, 5], 0))
+ expected = [tuple()]
+ self.assertEqual(actual, expected)
+
+ def test_negative(self):
+ """When the window size is negative, ValueError should be raised."""
+ with self.assertRaises(ValueError):
+ list(mi.windowed([1, 2, 3, 4, 5], -1))
+
+ def test_step(self):
+ """The window should advance by the number of steps provided"""
+ iterable = [1, 2, 3, 4, 5, 6, 7]
+ for n, step, expected in [
+ (3, 2, [(1, 2, 3), (3, 4, 5), (5, 6, 7)]), # n > step
+ (3, 3, [(1, 2, 3), (4, 5, 6), (7, None, None)]), # n == step
+ (3, 4, [(1, 2, 3), (5, 6, 7)]), # line up nicely
+ (3, 5, [(1, 2, 3), (6, 7, None)]), # off by one
+ (3, 6, [(1, 2, 3), (7, None, None)]), # off by two
+ (3, 7, [(1, 2, 3)]), # step past the end
+ (7, 8, [(1, 2, 3, 4, 5, 6, 7)]), # step > len(iterable)
+ ]:
+ actual = list(mi.windowed(iterable, n, step=step))
+ self.assertEqual(actual, expected)
+
+ # Step must be greater than or equal to 1
+ with self.assertRaises(ValueError):
+ list(mi.windowed(iterable, 3, step=0))
+
+
+class SubstringsTests(TestCase):
+ def test_basic(self):
+ iterable = (x for x in range(4))
+ actual = list(mi.substrings(iterable))
+ expected = [
+ (0,),
+ (1,),
+ (2,),
+ (3,),
+ (0, 1),
+ (1, 2),
+ (2, 3),
+ (0, 1, 2),
+ (1, 2, 3),
+ (0, 1, 2, 3),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_strings(self):
+ iterable = 'abc'
+ actual = list(mi.substrings(iterable))
+ expected = [
+ ('a',), ('b',), ('c',), ('a', 'b'), ('b', 'c'), ('a', 'b', 'c')
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ iterable = iter([])
+ actual = list(mi.substrings(iterable))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_order(self):
+ iterable = [2, 0, 1]
+ actual = list(mi.substrings(iterable))
+ expected = [(2,), (0,), (1,), (2, 0), (0, 1), (2, 0, 1)]
+ self.assertEqual(actual, expected)
+
+
+class BucketTests(TestCase):
+ """Tests for ``bucket()``"""
+
+ def test_basic(self):
+ iterable = [10, 20, 30, 11, 21, 31, 12, 22, 23, 33]
+ D = mi.bucket(iterable, key=lambda x: 10 * (x // 10))
+
+ # In-order access
+ self.assertEqual(list(D[10]), [10, 11, 12])
+
+ # Out of order access
+ self.assertEqual(list(D[30]), [30, 31, 33])
+ self.assertEqual(list(D[20]), [20, 21, 22, 23])
+
+ self.assertEqual(list(D[40]), []) # Nothing in here!
+
+ def test_in(self):
+ iterable = [10, 20, 30, 11, 21, 31, 12, 22, 23, 33]
+ D = mi.bucket(iterable, key=lambda x: 10 * (x // 10))
+
+ self.assertIn(10, D)
+ self.assertNotIn(40, D)
+ self.assertIn(20, D)
+ self.assertNotIn(21, D)
+
+ # Checking in-ness shouldn't advance the iterator
+ self.assertEqual(next(D[10]), 10)
+
+ def test_validator(self):
+ iterable = count(0)
+ key = lambda x: int(str(x)[0]) # First digit of each number
+ validator = lambda x: 0 < x < 10 # No leading zeros
+ D = mi.bucket(iterable, key, validator=validator)
+ self.assertEqual(mi.take(3, D[1]), [1, 10, 11])
+ self.assertNotIn(0, D) # Non-valid entries don't return True
+ self.assertNotIn(0, D._cache) # Don't store non-valid entries
+ self.assertEqual(list(D[0]), [])
+
+
+class SpyTests(TestCase):
+ """Tests for ``spy()``"""
+
+ def test_basic(self):
+ original_iterable = iter('abcdefg')
+ head, new_iterable = mi.spy(original_iterable)
+ self.assertEqual(head, ['a'])
+ self.assertEqual(
+ list(new_iterable), ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+ )
+
+ def test_unpacking(self):
+ original_iterable = iter('abcdefg')
+ (first, second, third), new_iterable = mi.spy(original_iterable, 3)
+ self.assertEqual(first, 'a')
+ self.assertEqual(second, 'b')
+ self.assertEqual(third, 'c')
+ self.assertEqual(
+ list(new_iterable), ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+ )
+
+ def test_too_many(self):
+ original_iterable = iter('abc')
+ head, new_iterable = mi.spy(original_iterable, 4)
+ self.assertEqual(head, ['a', 'b', 'c'])
+ self.assertEqual(list(new_iterable), ['a', 'b', 'c'])
+
+ def test_zero(self):
+ original_iterable = iter('abc')
+ head, new_iterable = mi.spy(original_iterable, 0)
+ self.assertEqual(head, [])
+ self.assertEqual(list(new_iterable), ['a', 'b', 'c'])
+
+
+class InterleaveTests(TestCase):
+ def test_even(self):
+ actual = list(mi.interleave([1, 4, 7], [2, 5, 8], [3, 6, 9]))
+ expected = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_short(self):
+ actual = list(mi.interleave([1, 4], [2, 5, 7], [3, 6, 8]))
+ expected = [1, 2, 3, 4, 5, 6]
+ self.assertEqual(actual, expected)
+
+ def test_mixed_types(self):
+ it_list = ['a', 'b', 'c', 'd']
+ it_str = '12345'
+ it_inf = count()
+ actual = list(mi.interleave(it_list, it_str, it_inf))
+ expected = ['a', '1', 0, 'b', '2', 1, 'c', '3', 2, 'd', '4', 3]
+ self.assertEqual(actual, expected)
+
+
+class InterleaveLongestTests(TestCase):
+ def test_even(self):
+ actual = list(mi.interleave_longest([1, 4, 7], [2, 5, 8], [3, 6, 9]))
+ expected = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_short(self):
+ actual = list(mi.interleave_longest([1, 4], [2, 5, 7], [3, 6, 8]))
+ expected = [1, 2, 3, 4, 5, 6, 7, 8]
+ self.assertEqual(actual, expected)
+
+ def test_mixed_types(self):
+ it_list = ['a', 'b', 'c', 'd']
+ it_str = '12345'
+ it_gen = (x for x in range(3))
+ actual = list(mi.interleave_longest(it_list, it_str, it_gen))
+ expected = ['a', '1', 0, 'b', '2', 1, 'c', '3', 2, 'd', '4', '5']
+ self.assertEqual(actual, expected)
+
+
+class TestCollapse(TestCase):
+ """Tests for ``collapse()``"""
+
+ def test_collapse(self):
+ l = [[1], 2, [[3], 4], [[[5]]]]
+ self.assertEqual(list(mi.collapse(l)), [1, 2, 3, 4, 5])
+
+ def test_collapse_to_string(self):
+ l = [["s1"], "s2", [["s3"], "s4"], [[["s5"]]]]
+ self.assertEqual(list(mi.collapse(l)), ["s1", "s2", "s3", "s4", "s5"])
+
+ def test_collapse_flatten(self):
+ l = [[1], [2], [[3], 4], [[[5]]]]
+ self.assertEqual(list(mi.collapse(l, levels=1)), list(mi.flatten(l)))
+
+ def test_collapse_to_level(self):
+ l = [[1], 2, [[3], 4], [[[5]]]]
+ self.assertEqual(list(mi.collapse(l, levels=2)), [1, 2, 3, 4, [5]])
+ self.assertEqual(
+ list(mi.collapse(mi.collapse(l, levels=1), levels=1)),
+ list(mi.collapse(l, levels=2))
+ )
+
+ def test_collapse_to_list(self):
+ l = (1, [2], (3, [4, (5,)], 'ab'))
+ actual = list(mi.collapse(l, base_type=list))
+ expected = [1, [2], 3, [4, (5,)], 'ab']
+ self.assertEqual(actual, expected)
+
+
+class SideEffectTests(TestCase):
+ """Tests for ``side_effect()``"""
+
+ def test_individual(self):
+ # The function increments the counter for each call
+ counter = [0]
+
+ def func(arg):
+ counter[0] += 1
+
+ result = list(mi.side_effect(func, range(10)))
+ self.assertEqual(result, list(range(10)))
+ self.assertEqual(counter[0], 10)
+
+ def test_chunked(self):
+ # The function increments the counter for each call
+ counter = [0]
+
+ def func(arg):
+ counter[0] += 1
+
+ result = list(mi.side_effect(func, range(10), 2))
+ self.assertEqual(result, list(range(10)))
+ self.assertEqual(counter[0], 5)
+
+ def test_before_after(self):
+ f = StringIO()
+ collector = []
+
+ def func(item):
+ print(item, file=f)
+ collector.append(f.getvalue())
+
+ def it():
+ yield 'a'
+ yield 'b'
+ raise RuntimeError('kaboom')
+
+ before = lambda: print('HEADER', file=f)
+ after = f.close
+
+ try:
+ mi.consume(mi.side_effect(func, it(), before=before, after=after))
+ except RuntimeError:
+ pass
+
+ # The iterable should have been written to the file
+ self.assertEqual(collector, ['HEADER\na\n', 'HEADER\na\nb\n'])
+
+ # The file should be closed even though something bad happened
+ self.assertTrue(f.closed)
+
+ def test_before_fails(self):
+ f = StringIO()
+ func = lambda x: print(x, file=f)
+
+ def before():
+ raise RuntimeError('ouch')
+
+ try:
+ mi.consume(
+ mi.side_effect(func, 'abc', before=before, after=f.close)
+ )
+ except RuntimeError:
+ pass
+
+ # The file should be closed even though something bad happened in the
+ # before function
+ self.assertTrue(f.closed)
+
+
+class SlicedTests(TestCase):
+ """Tests for ``sliced()``"""
+
+ def test_even(self):
+ """Test when the length of the sequence is divisible by *n*"""
+ seq = 'ABCDEFGHI'
+ self.assertEqual(list(mi.sliced(seq, 3)), ['ABC', 'DEF', 'GHI'])
+
+ def test_odd(self):
+ """Test when the length of the sequence is not divisible by *n*"""
+ seq = 'ABCDEFGHI'
+ self.assertEqual(list(mi.sliced(seq, 4)), ['ABCD', 'EFGH', 'I'])
+
+ def test_not_sliceable(self):
+ seq = (x for x in 'ABCDEFGHI')
+
+ with self.assertRaises(TypeError):
+ list(mi.sliced(seq, 3))
+
+
+class SplitAtTests(TestCase):
+ """Tests for ``split()``"""
+
+ def comp_with_str_split(self, str_to_split, delim):
+ pred = lambda c: c == delim
+ actual = list(map(''.join, mi.split_at(str_to_split, pred)))
+ expected = str_to_split.split(delim)
+ self.assertEqual(actual, expected)
+
+ def test_seperators(self):
+ test_strs = ['', 'abcba', 'aaabbbcccddd', 'e']
+ for s, delim in product(test_strs, 'abcd'):
+ self.comp_with_str_split(s, delim)
+
+
+class SplitBeforeTest(TestCase):
+ """Tests for ``split_before()``"""
+
+ def test_starts_with_sep(self):
+ actual = list(mi.split_before('xooxoo', lambda c: c == 'x'))
+ expected = [['x', 'o', 'o'], ['x', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+ def test_ends_with_sep(self):
+ actual = list(mi.split_before('ooxoox', lambda c: c == 'x'))
+ expected = [['o', 'o'], ['x', 'o', 'o'], ['x']]
+ self.assertEqual(actual, expected)
+
+ def test_no_sep(self):
+ actual = list(mi.split_before('ooo', lambda c: c == 'x'))
+ expected = [['o', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+
+class SplitAfterTest(TestCase):
+ """Tests for ``split_after()``"""
+
+ def test_starts_with_sep(self):
+ actual = list(mi.split_after('xooxoo', lambda c: c == 'x'))
+ expected = [['x'], ['o', 'o', 'x'], ['o', 'o']]
+ self.assertEqual(actual, expected)
+
+ def test_ends_with_sep(self):
+ actual = list(mi.split_after('ooxoox', lambda c: c == 'x'))
+ expected = [['o', 'o', 'x'], ['o', 'o', 'x']]
+ self.assertEqual(actual, expected)
+
+ def test_no_sep(self):
+ actual = list(mi.split_after('ooo', lambda c: c == 'x'))
+ expected = [['o', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+
+class SplitIntoTests(TestCase):
+ """Tests for ``split_into()``"""
+
+ def test_iterable_just_right(self):
+ """Size of ``iterable`` equals the sum of ``sizes``."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [2, 3, 4]
+ expected = [[1, 2], [3, 4, 5], [6, 7, 8, 9]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_iterable_too_small(self):
+ """Size of ``iterable`` is smaller than sum of ``sizes``. Last return
+ list is shorter as a result."""
+ iterable = [1, 2, 3, 4, 5, 6, 7]
+ sizes = [2, 3, 4]
+ expected = [[1, 2], [3, 4, 5], [6, 7]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_iterable_too_small_extra(self):
+ """Size of ``iterable`` is smaller than sum of ``sizes``. Second last
+ return list is shorter and last return list is empty as a result."""
+ iterable = [1, 2, 3, 4, 5, 6, 7]
+ sizes = [2, 3, 4, 5]
+ expected = [[1, 2], [3, 4, 5], [6, 7], []]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_iterable_too_large(self):
+ """Size of ``iterable`` is larger than sum of ``sizes``. Not all
+ items of iterable are returned."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [2, 3, 2]
+ expected = [[1, 2], [3, 4, 5], [6, 7]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_using_none_with_leftover(self):
+ """Last item of ``sizes`` is None when items still remain in
+ ``iterable``. Last list returned stretches to fit all remaining items
+ of ``iterable``."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [2, 3, None]
+ expected = [[1, 2], [3, 4, 5], [6, 7, 8, 9]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_using_none_without_leftover(self):
+ """Last item of ``sizes`` is None when no items remain in
+ ``iterable``. Last list returned is empty."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [2, 3, 4, None]
+ expected = [[1, 2], [3, 4, 5], [6, 7, 8, 9], []]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_using_none_mid_sizes(self):
+ """None is present in ``sizes`` but is not the last item. Last list
+ returned stretches to fit all remaining items of ``iterable`` but
+ all items in ``sizes`` after None are ignored."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [2, 3, None, 4]
+ expected = [[1, 2], [3, 4, 5], [6, 7, 8, 9]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_iterable_empty(self):
+ """``iterable`` argument is empty but ``sizes`` is not. An empty
+ list is returned for each item in ``sizes``."""
+ iterable = []
+ sizes = [2, 4, 2]
+ expected = [[], [], []]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_iterable_empty_using_none(self):
+ """``iterable`` argument is empty but ``sizes`` is not. An empty
+ list is returned for each item in ``sizes`` that is not after a
+ None item."""
+ iterable = []
+ sizes = [2, 4, None, 2]
+ expected = [[], [], []]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_sizes_empty(self):
+ """``sizes`` argument is empty but ``iterable`` is not. An empty
+ generator is returned."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = []
+ expected = []
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_both_empty(self):
+ """Both ``sizes`` and ``iterable`` arguments are empty. An empty
+ generator is returned."""
+ iterable = []
+ sizes = []
+ expected = []
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_bool_in_sizes(self):
+ """A bool object is present in ``sizes`` is treated as a 1 or 0 for
+ ``True`` or ``False`` due to bool being an instance of int."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [3, True, 2, False]
+ expected = [[1, 2, 3], [4], [5, 6], []]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_invalid_in_sizes(self):
+ """A ValueError is raised if an object in ``sizes`` is neither ``None``
+ or an integer."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [1, [], 3]
+ with self.assertRaises(ValueError):
+ list(mi.split_into(iterable, sizes))
+
+ def test_invalid_in_sizes_after_none(self):
+ """A item in ``sizes`` that is invalid will not raise a TypeError if it
+ comes after a ``None`` item."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [3, 4, None, []]
+ expected = [[1, 2, 3], [4, 5, 6, 7], [8, 9]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_generator_iterable_integrity(self):
+ """Check that if ``iterable`` is an iterator, it is consumed only by as
+ many items as the sum of ``sizes``."""
+ iterable = (i for i in range(10))
+ sizes = [2, 3]
+
+ expected = [[0, 1], [2, 3, 4]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ iterable_expected = [5, 6, 7, 8, 9]
+ iterable_actual = list(iterable)
+ self.assertEqual(iterable_actual, iterable_expected)
+
+ def test_generator_sizes_integrity(self):
+ """Check that if ``sizes`` is an iterator, it is consumed only until a
+ ``None`` item is reached"""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = (i for i in [1, 2, None, 3, 4])
+
+ expected = [[1], [2, 3], [4, 5, 6, 7, 8, 9]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ sizes_expected = [3, 4]
+ sizes_actual = list(sizes)
+ self.assertEqual(sizes_actual, sizes_expected)
+
+
+class PaddedTest(TestCase):
+ """Tests for ``padded()``"""
+
+ def test_no_n(self):
+ seq = [1, 2, 3]
+
+ # No fillvalue
+ self.assertEqual(mi.take(5, mi.padded(seq)), [1, 2, 3, None, None])
+
+ # With fillvalue
+ self.assertEqual(
+ mi.take(5, mi.padded(seq, fillvalue='')), [1, 2, 3, '', '']
+ )
+
+ def test_invalid_n(self):
+ self.assertRaises(ValueError, lambda: list(mi.padded([1, 2, 3], n=-1)))
+ self.assertRaises(ValueError, lambda: list(mi.padded([1, 2, 3], n=0)))
+
+ def test_valid_n(self):
+ seq = [1, 2, 3, 4, 5]
+
+ # No need for padding: len(seq) <= n
+ self.assertEqual(list(mi.padded(seq, n=4)), [1, 2, 3, 4, 5])
+ self.assertEqual(list(mi.padded(seq, n=5)), [1, 2, 3, 4, 5])
+
+ # No fillvalue
+ self.assertEqual(
+ list(mi.padded(seq, n=7)), [1, 2, 3, 4, 5, None, None]
+ )
+
+ # With fillvalue
+ self.assertEqual(
+ list(mi.padded(seq, fillvalue='', n=7)), [1, 2, 3, 4, 5, '', '']
+ )
+
+ def test_next_multiple(self):
+ seq = [1, 2, 3, 4, 5, 6]
+
+ # No need for padding: len(seq) % n == 0
+ self.assertEqual(
+ list(mi.padded(seq, n=3, next_multiple=True)), [1, 2, 3, 4, 5, 6]
+ )
+
+ # Padding needed: len(seq) < n
+ self.assertEqual(
+ list(mi.padded(seq, n=8, next_multiple=True)),
+ [1, 2, 3, 4, 5, 6, None, None]
+ )
+
+ # No padding needed: len(seq) == n
+ self.assertEqual(
+ list(mi.padded(seq, n=6, next_multiple=True)), [1, 2, 3, 4, 5, 6]
+ )
+
+ # Padding needed: len(seq) > n
+ self.assertEqual(
+ list(mi.padded(seq, n=4, next_multiple=True)),
+ [1, 2, 3, 4, 5, 6, None, None]
+ )
+
+ # With fillvalue
+ self.assertEqual(
+ list(mi.padded(seq, fillvalue='', n=4, next_multiple=True)),
+ [1, 2, 3, 4, 5, 6, '', '']
+ )
+
+
+class DistributeTest(TestCase):
+ """Tests for distribute()"""
+
+ def test_invalid_n(self):
+ self.assertRaises(ValueError, lambda: mi.distribute(-1, [1, 2, 3]))
+ self.assertRaises(ValueError, lambda: mi.distribute(0, [1, 2, 3]))
+
+ def test_basic(self):
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+
+ for n, expected in [
+ (1, [iterable]),
+ (2, [[1, 3, 5, 7, 9], [2, 4, 6, 8, 10]]),
+ (3, [[1, 4, 7, 10], [2, 5, 8], [3, 6, 9]]),
+ (10, [[n] for n in range(1, 10 + 1)]),
+ ]:
+ self.assertEqual(
+ [list(x) for x in mi.distribute(n, iterable)], expected
+ )
+
+ def test_large_n(self):
+ iterable = [1, 2, 3, 4]
+ self.assertEqual(
+ [list(x) for x in mi.distribute(6, iterable)],
+ [[1], [2], [3], [4], [], []]
+ )
+
+
+class StaggerTest(TestCase):
+ """Tests for ``stagger()``"""
+
+ def test_default(self):
+ iterable = [0, 1, 2, 3]
+ actual = list(mi.stagger(iterable))
+ expected = [(None, 0, 1), (0, 1, 2), (1, 2, 3)]
+ self.assertEqual(actual, expected)
+
+ def test_offsets(self):
+ iterable = [0, 1, 2, 3]
+ for offsets, expected in [
+ ((-2, 0, 2), [('', 0, 2), ('', 1, 3)]),
+ ((-2, -1), [('', ''), ('', 0), (0, 1), (1, 2), (2, 3)]),
+ ((1, 2), [(1, 2), (2, 3)]),
+ ]:
+ all_groups = mi.stagger(iterable, offsets=offsets, fillvalue='')
+ self.assertEqual(list(all_groups), expected)
+
+ def test_longest(self):
+ iterable = [0, 1, 2, 3]
+ for offsets, expected in [
+ (
+ (-1, 0, 1),
+ [('', 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, ''), (3, '', '')]
+ ),
+ ((-2, -1), [('', ''), ('', 0), (0, 1), (1, 2), (2, 3), (3, '')]),
+ ((1, 2), [(1, 2), (2, 3), (3, '')]),
+ ]:
+ all_groups = mi.stagger(
+ iterable, offsets=offsets, fillvalue='', longest=True
+ )
+ self.assertEqual(list(all_groups), expected)
+
+
+class ZipOffsetTest(TestCase):
+ """Tests for ``zip_offset()``"""
+
+ def test_shortest(self):
+ a_1 = [0, 1, 2, 3]
+ a_2 = [0, 1, 2, 3, 4, 5]
+ a_3 = [0, 1, 2, 3, 4, 5, 6, 7]
+ actual = list(
+ mi.zip_offset(a_1, a_2, a_3, offsets=(-1, 0, 1), fillvalue='')
+ )
+ expected = [('', 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, 4), (3, 4, 5)]
+ self.assertEqual(actual, expected)
+
+ def test_longest(self):
+ a_1 = [0, 1, 2, 3]
+ a_2 = [0, 1, 2, 3, 4, 5]
+ a_3 = [0, 1, 2, 3, 4, 5, 6, 7]
+ actual = list(
+ mi.zip_offset(a_1, a_2, a_3, offsets=(-1, 0, 1), longest=True)
+ )
+ expected = [
+ (None, 0, 1),
+ (0, 1, 2),
+ (1, 2, 3),
+ (2, 3, 4),
+ (3, 4, 5),
+ (None, 5, 6),
+ (None, None, 7),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_mismatch(self):
+ iterables = [0, 1, 2], [2, 3, 4]
+ offsets = (-1, 0, 1)
+ self.assertRaises(
+ ValueError,
+ lambda: list(mi.zip_offset(*iterables, offsets=offsets))
+ )
+
+
+class UnzipTests(TestCase):
+ """Tests for unzip()"""
+
+ def test_empty_iterable(self):
+ self.assertEqual(list(mi.unzip([])), [])
+ # in reality zip([], [], []) is equivalent to iter([])
+ # but it doesn't hurt to test both
+ self.assertEqual(list(mi.unzip(zip([], [], []))), [])
+
+ def test_length_one_iterable(self):
+ xs, ys, zs = mi.unzip(zip([1], [2], [3]))
+ self.assertEqual(list(xs), [1])
+ self.assertEqual(list(ys), [2])
+ self.assertEqual(list(zs), [3])
+
+ def test_normal_case(self):
+ xs, ys, zs = range(10), range(1, 11), range(2, 12)
+ zipped = zip(xs, ys, zs)
+ xs, ys, zs = mi.unzip(zipped)
+ self.assertEqual(list(xs), list(range(10)))
+ self.assertEqual(list(ys), list(range(1, 11)))
+ self.assertEqual(list(zs), list(range(2, 12)))
+
+ def test_improperly_zipped(self):
+ zipped = iter([(1, 2, 3), (4, 5), (6,)])
+ xs, ys, zs = mi.unzip(zipped)
+ self.assertEqual(list(xs), [1, 4, 6])
+ self.assertEqual(list(ys), [2, 5])
+ self.assertEqual(list(zs), [3])
+
+ def test_increasingly_zipped(self):
+ zipped = iter([(1, 2), (3, 4, 5), (6, 7, 8, 9)])
+ unzipped = mi.unzip(zipped)
+ # from the docstring:
+ # len(first tuple) is the number of iterables zipped
+ self.assertEqual(len(unzipped), 2)
+ xs, ys = unzipped
+ self.assertEqual(list(xs), [1, 3, 6])
+ self.assertEqual(list(ys), [2, 4, 7])
+
+
+class SortTogetherTest(TestCase):
+ """Tests for sort_together()"""
+
+ def test_key_list(self):
+ """tests `key_list` including default, iterables include duplicates"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20]
+ ]
+
+ self.assertEqual(
+ mi.sort_together(iterables),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('June', 'July', 'July', 'May', 'Aug.', 'May'),
+ (70, 100, 20, 97, 20, 100)
+ ]
+ )
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(0, 1)),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('July', 'July', 'June', 'Aug.', 'May', 'May'),
+ (100, 20, 70, 20, 97, 100)
+ ]
+ )
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(0, 1, 2)),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('July', 'July', 'June', 'Aug.', 'May', 'May'),
+ (20, 100, 70, 20, 97, 100)
+ ]
+ )
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(2,)),
+ [
+ ('GA', 'CT', 'CT', 'GA', 'GA', 'CT'),
+ ('Aug.', 'July', 'June', 'May', 'May', 'July'),
+ (20, 20, 70, 97, 100, 100)
+ ]
+ )
+
+ def test_invalid_key_list(self):
+ """tests `key_list` for indexes not available in `iterables`"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20]
+ ]
+
+ self.assertRaises(
+ IndexError, lambda: mi.sort_together(iterables, key_list=(5,))
+ )
+
+ def test_reverse(self):
+ """tests `reverse` to ensure a reverse sort for `key_list` iterables"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20]
+ ]
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(0, 1, 2), reverse=True),
+ [('GA', 'GA', 'GA', 'CT', 'CT', 'CT'),
+ ('May', 'May', 'Aug.', 'June', 'July', 'July'),
+ (100, 97, 20, 70, 100, 20)]
+ )
+
+ def test_uneven_iterables(self):
+ """tests trimming of iterables to the shortest length before sorting"""
+ iterables = [['GA', 'GA', 'GA', 'CT', 'CT', 'CT', 'MA'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20, 0]]
+
+ self.assertEqual(
+ mi.sort_together(iterables),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('June', 'July', 'July', 'May', 'Aug.', 'May'),
+ (70, 100, 20, 97, 20, 100)
+ ]
+ )
+
+
+class DivideTest(TestCase):
+ """Tests for divide()"""
+
+ def test_invalid_n(self):
+ self.assertRaises(ValueError, lambda: mi.divide(-1, [1, 2, 3]))
+ self.assertRaises(ValueError, lambda: mi.divide(0, [1, 2, 3]))
+
+ def test_basic(self):
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+
+ for n, expected in [
+ (1, [iterable]),
+ (2, [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]),
+ (3, [[1, 2, 3, 4], [5, 6, 7], [8, 9, 10]]),
+ (10, [[n] for n in range(1, 10 + 1)]),
+ ]:
+ self.assertEqual(
+ [list(x) for x in mi.divide(n, iterable)], expected
+ )
+
+ def test_large_n(self):
+ iterable = [1, 2, 3, 4]
+ self.assertEqual(
+ [list(x) for x in mi.divide(6, iterable)],
+ [[1], [2], [3], [4], [], []]
+ )
+
+
+class TestAlwaysIterable(TestCase):
+ """Tests for always_iterable()"""
+ def test_single(self):
+ self.assertEqual(list(mi.always_iterable(1)), [1])
+
+ def test_strings(self):
+ for obj in ['foo', b'bar', 'baz']:
+ actual = list(mi.always_iterable(obj))
+ expected = [obj]
+ self.assertEqual(actual, expected)
+
+ def test_base_type(self):
+ dict_obj = {'a': 1, 'b': 2}
+ str_obj = '123'
+
+ # Default: dicts are iterable like they normally are
+ default_actual = list(mi.always_iterable(dict_obj))
+ default_expected = list(dict_obj)
+ self.assertEqual(default_actual, default_expected)
+
+ # Unitary types set: dicts are not iterable
+ custom_actual = list(mi.always_iterable(dict_obj, base_type=dict))
+ custom_expected = [dict_obj]
+ self.assertEqual(custom_actual, custom_expected)
+
+ # With unitary types set, strings are iterable
+ str_actual = list(mi.always_iterable(str_obj, base_type=None))
+ str_expected = list(str_obj)
+ self.assertEqual(str_actual, str_expected)
+
+ def test_iterables(self):
+ self.assertEqual(list(mi.always_iterable([0, 1])), [0, 1])
+ self.assertEqual(
+ list(mi.always_iterable([0, 1], base_type=list)), [[0, 1]]
+ )
+ self.assertEqual(
+ list(mi.always_iterable(iter('foo'))), ['f', 'o', 'o']
+ )
+ self.assertEqual(list(mi.always_iterable([])), [])
+
+ def test_none(self):
+ self.assertEqual(list(mi.always_iterable(None)), [])
+
+ def test_generator(self):
+ def _gen():
+ yield 0
+ yield 1
+
+ self.assertEqual(list(mi.always_iterable(_gen())), [0, 1])
+
+
+class AdjacentTests(TestCase):
+ def test_typical(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10)))
+ expected = [(True, 0), (True, 1), (False, 2), (False, 3), (True, 4),
+ (True, 5), (True, 6), (False, 7), (False, 8), (False, 9)]
+ self.assertEqual(actual, expected)
+
+ def test_empty_iterable(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, []))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_length_one(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, [0]))
+ expected = [(True, 0)]
+ self.assertEqual(actual, expected)
+
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, [1]))
+ expected = [(False, 1)]
+ self.assertEqual(actual, expected)
+
+ def test_consecutive_true(self):
+ """Test that when the predicate matches multiple consecutive elements
+ it doesn't repeat elements in the output"""
+ actual = list(mi.adjacent(lambda x: x % 5 < 2, range(10)))
+ expected = [(True, 0), (True, 1), (True, 2), (False, 3), (True, 4),
+ (True, 5), (True, 6), (True, 7), (False, 8), (False, 9)]
+ self.assertEqual(actual, expected)
+
+ def test_distance(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10), distance=2))
+ expected = [(True, 0), (True, 1), (True, 2), (True, 3), (True, 4),
+ (True, 5), (True, 6), (True, 7), (False, 8), (False, 9)]
+ self.assertEqual(actual, expected)
+
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10), distance=3))
+ expected = [(True, 0), (True, 1), (True, 2), (True, 3), (True, 4),
+ (True, 5), (True, 6), (True, 7), (True, 8), (False, 9)]
+ self.assertEqual(actual, expected)
+
+ def test_large_distance(self):
+ """Test distance larger than the length of the iterable"""
+ iterable = range(10)
+ actual = list(mi.adjacent(lambda x: x % 5 == 4, iterable, distance=20))
+ expected = list(zip(repeat(True), iterable))
+ self.assertEqual(actual, expected)
+
+ actual = list(mi.adjacent(lambda x: False, iterable, distance=20))
+ expected = list(zip(repeat(False), iterable))
+ self.assertEqual(actual, expected)
+
+ def test_zero_distance(self):
+ """Test that adjacent() reduces to zip+map when distance is 0"""
+ iterable = range(1000)
+ predicate = lambda x: x % 4 == 2
+ actual = mi.adjacent(predicate, iterable, 0)
+ expected = zip(map(predicate, iterable), iterable)
+ self.assertTrue(all(a == e for a, e in zip(actual, expected)))
+
+ def test_negative_distance(self):
+ """Test that adjacent() raises an error with negative distance"""
+ pred = lambda x: x
+ self.assertRaises(
+ ValueError, lambda: mi.adjacent(pred, range(1000), -1)
+ )
+ self.assertRaises(
+ ValueError, lambda: mi.adjacent(pred, range(10), -10)
+ )
+
+ def test_grouping(self):
+ """Test interaction of adjacent() with groupby_transform()"""
+ iterable = mi.adjacent(lambda x: x % 5 == 0, range(10))
+ grouper = mi.groupby_transform(iterable, itemgetter(0), itemgetter(1))
+ actual = [(k, list(g)) for k, g in grouper]
+ expected = [
+ (True, [0, 1]),
+ (False, [2, 3]),
+ (True, [4, 5, 6]),
+ (False, [7, 8, 9]),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_call_once(self):
+ """Test that the predicate is only called once per item."""
+ already_seen = set()
+ iterable = range(10)
+
+ def predicate(item):
+ self.assertNotIn(item, already_seen)
+ already_seen.add(item)
+ return True
+
+ actual = list(mi.adjacent(predicate, iterable))
+ expected = [(True, x) for x in iterable]
+ self.assertEqual(actual, expected)
+
+
+class GroupByTransformTests(TestCase):
+ def assertAllGroupsEqual(self, groupby1, groupby2):
+ """Compare two groupby objects for equality, both keys and groups."""
+ for a, b in zip(groupby1, groupby2):
+ key1, group1 = a
+ key2, group2 = b
+ self.assertEqual(key1, key2)
+ self.assertListEqual(list(group1), list(group2))
+ self.assertRaises(StopIteration, lambda: next(groupby1))
+ self.assertRaises(StopIteration, lambda: next(groupby2))
+
+ def test_default_funcs(self):
+ """Test that groupby_transform() with default args mimics groupby()"""
+ iterable = [(x // 5, x) for x in range(1000)]
+ actual = mi.groupby_transform(iterable)
+ expected = groupby(iterable)
+ self.assertAllGroupsEqual(actual, expected)
+
+ def test_valuefunc(self):
+ iterable = [(int(x / 5), int(x / 3), x) for x in range(10)]
+
+ # Test the standard usage of grouping one iterable using another's keys
+ grouper = mi.groupby_transform(
+ iterable, keyfunc=itemgetter(0), valuefunc=itemgetter(-1)
+ )
+ actual = [(k, list(g)) for k, g in grouper]
+ expected = [(0, [0, 1, 2, 3, 4]), (1, [5, 6, 7, 8, 9])]
+ self.assertEqual(actual, expected)
+
+ grouper = mi.groupby_transform(
+ iterable, keyfunc=itemgetter(1), valuefunc=itemgetter(-1)
+ )
+ actual = [(k, list(g)) for k, g in grouper]
+ expected = [(0, [0, 1, 2]), (1, [3, 4, 5]), (2, [6, 7, 8]), (3, [9])]
+ self.assertEqual(actual, expected)
+
+ # and now for something a little different
+ d = dict(zip(range(10), 'abcdefghij'))
+ grouper = mi.groupby_transform(
+ range(10), keyfunc=lambda x: x // 5, valuefunc=d.get
+ )
+ actual = [(k, ''.join(g)) for k, g in grouper]
+ expected = [(0, 'abcde'), (1, 'fghij')]
+ self.assertEqual(actual, expected)
+
+ def test_no_valuefunc(self):
+ iterable = range(1000)
+
+ def key(x):
+ return x // 5
+
+ actual = mi.groupby_transform(iterable, key, valuefunc=None)
+ expected = groupby(iterable, key)
+ self.assertAllGroupsEqual(actual, expected)
+
+ actual = mi.groupby_transform(iterable, key) # default valuefunc
+ expected = groupby(iterable, key)
+ self.assertAllGroupsEqual(actual, expected)
+
+
+class NumericRangeTests(TestCase):
+ def test_basic(self):
+ for args, expected in [
+ ((4,), [0, 1, 2, 3]),
+ ((4.0,), [0.0, 1.0, 2.0, 3.0]),
+ ((1.0, 4), [1.0, 2.0, 3.0]),
+ ((1, 4.0), [1, 2, 3]),
+ ((1.0, 5), [1.0, 2.0, 3.0, 4.0]),
+ ((0, 20, 5), [0, 5, 10, 15]),
+ ((0, 20, 5.0), [0.0, 5.0, 10.0, 15.0]),
+ ((0, 10, 3), [0, 3, 6, 9]),
+ ((0, 10, 3.0), [0.0, 3.0, 6.0, 9.0]),
+ ((0, -5, -1), [0, -1, -2, -3, -4]),
+ ((0.0, -5, -1), [0.0, -1.0, -2.0, -3.0, -4.0]),
+ ((1, 2, Fraction(1, 2)), [Fraction(1, 1), Fraction(3, 2)]),
+ ((0,), []),
+ ((0.0,), []),
+ ((1, 0), []),
+ ((1.0, 0.0), []),
+ ((Fraction(2, 1),), [Fraction(0, 1), Fraction(1, 1)]),
+ ((Decimal('2.0'),), [Decimal('0.0'), Decimal('1.0')]),
+ ]:
+ actual = list(mi.numeric_range(*args))
+ self.assertEqual(actual, expected)
+ self.assertTrue(
+ all(type(a) == type(e) for a, e in zip(actual, expected))
+ )
+
+ def test_arg_count(self):
+ self.assertRaises(TypeError, lambda: list(mi.numeric_range()))
+ self.assertRaises(
+ TypeError, lambda: list(mi.numeric_range(0, 1, 2, 3))
+ )
+
+ def test_zero_step(self):
+ self.assertRaises(
+ ValueError, lambda: list(mi.numeric_range(1, 2, 0))
+ )
+
+
+class CountCycleTests(TestCase):
+ def test_basic(self):
+ expected = [
+ (0, 'a'), (0, 'b'), (0, 'c'),
+ (1, 'a'), (1, 'b'), (1, 'c'),
+ (2, 'a'), (2, 'b'), (2, 'c'),
+ ]
+ for actual in [
+ mi.take(9, mi.count_cycle('abc')), # n=None
+ list(mi.count_cycle('abc', 3)), # n=3
+ ]:
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ self.assertEqual(list(mi.count_cycle('')), [])
+ self.assertEqual(list(mi.count_cycle('', 2)), [])
+
+ def test_negative(self):
+ self.assertEqual(list(mi.count_cycle('abc', -3)), [])
+
+
+class LocateTests(TestCase):
+ def test_default_pred(self):
+ iterable = [0, 1, 1, 0, 1, 0, 0]
+ actual = list(mi.locate(iterable))
+ expected = [1, 2, 4]
+ self.assertEqual(actual, expected)
+
+ def test_no_matches(self):
+ iterable = [0, 0, 0]
+ actual = list(mi.locate(iterable))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_custom_pred(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda x: x == '0'
+ actual = list(mi.locate(iterable, pred))
+ expected = [0, 3, 5, 6]
+ self.assertEqual(actual, expected)
+
+ def test_window_size(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda *args: args == ('0', 1)
+ actual = list(mi.locate(iterable, pred, window_size=2))
+ expected = [0, 3]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_large(self):
+ iterable = [1, 2, 3, 4]
+ pred = lambda a, b, c, d, e: True
+ actual = list(mi.locate(iterable, pred, window_size=5))
+ expected = [0]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_zero(self):
+ iterable = [1, 2, 3, 4]
+ pred = lambda: True
+ with self.assertRaises(ValueError):
+ list(mi.locate(iterable, pred, window_size=0))
+
+
+class StripFunctionTests(TestCase):
+ def test_hashable(self):
+ iterable = list('www.example.com')
+ pred = lambda x: x in set('cmowz.')
+
+ self.assertEqual(list(mi.lstrip(iterable, pred)), list('example.com'))
+ self.assertEqual(list(mi.rstrip(iterable, pred)), list('www.example'))
+ self.assertEqual(list(mi.strip(iterable, pred)), list('example'))
+
+ def test_not_hashable(self):
+ iterable = [
+ list('http://'), list('www'), list('.example'), list('.com')
+ ]
+ pred = lambda x: x in [list('http://'), list('www'), list('.com')]
+
+ self.assertEqual(list(mi.lstrip(iterable, pred)), iterable[2:])
+ self.assertEqual(list(mi.rstrip(iterable, pred)), iterable[:3])
+ self.assertEqual(list(mi.strip(iterable, pred)), iterable[2: 3])
+
+ def test_math(self):
+ iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2]
+ pred = lambda x: x <= 2
+
+ self.assertEqual(list(mi.lstrip(iterable, pred)), iterable[3:])
+ self.assertEqual(list(mi.rstrip(iterable, pred)), iterable[:-3])
+ self.assertEqual(list(mi.strip(iterable, pred)), iterable[3:-3])
+
+
+class IsliceExtendedTests(TestCase):
+ def test_all(self):
+ iterable = ['0', '1', '2', '3', '4', '5']
+ indexes = list(range(-4, len(iterable) + 4)) + [None]
+ steps = [1, 2, 3, 4, -1, -2, -3, 4]
+ for slice_args in product(indexes, indexes, steps):
+ try:
+ actual = list(mi.islice_extended(iterable, *slice_args))
+ except Exception as e:
+ self.fail((slice_args, e))
+
+ expected = iterable[slice(*slice_args)]
+ self.assertEqual(actual, expected, slice_args)
+
+ def test_zero_step(self):
+ with self.assertRaises(ValueError):
+ list(mi.islice_extended([1, 2, 3], 0, 1, 0))
+
+
+class ConsecutiveGroupsTest(TestCase):
+ def test_numbers(self):
+ iterable = [-10, -8, -7, -6, 1, 2, 4, 5, -1, 7]
+ actual = [list(g) for g in mi.consecutive_groups(iterable)]
+ expected = [[-10], [-8, -7, -6], [1, 2], [4, 5], [-1], [7]]
+ self.assertEqual(actual, expected)
+
+ def test_custom_ordering(self):
+ iterable = ['1', '10', '11', '20', '21', '22', '30', '31']
+ ordering = lambda x: int(x)
+ actual = [list(g) for g in mi.consecutive_groups(iterable, ordering)]
+ expected = [['1'], ['10', '11'], ['20', '21', '22'], ['30', '31']]
+ self.assertEqual(actual, expected)
+
+ def test_exotic_ordering(self):
+ iterable = [
+ ('a', 'b', 'c', 'd'),
+ ('a', 'c', 'b', 'd'),
+ ('a', 'c', 'd', 'b'),
+ ('a', 'd', 'b', 'c'),
+ ('d', 'b', 'c', 'a'),
+ ('d', 'c', 'a', 'b'),
+ ]
+ ordering = list(permutations('abcd')).index
+ actual = [list(g) for g in mi.consecutive_groups(iterable, ordering)]
+ expected = [
+ [('a', 'b', 'c', 'd')],
+ [('a', 'c', 'b', 'd'), ('a', 'c', 'd', 'b'), ('a', 'd', 'b', 'c')],
+ [('d', 'b', 'c', 'a'), ('d', 'c', 'a', 'b')],
+ ]
+ self.assertEqual(actual, expected)
+
+
+class DifferenceTest(TestCase):
+ def test_normal(self):
+ iterable = [10, 20, 30, 40, 50]
+ actual = list(mi.difference(iterable))
+ expected = [10, 10, 10, 10, 10]
+ self.assertEqual(actual, expected)
+
+ def test_custom(self):
+ iterable = [10, 20, 30, 40, 50]
+ actual = list(mi.difference(iterable, add))
+ expected = [10, 30, 50, 70, 90]
+ self.assertEqual(actual, expected)
+
+ def test_roundtrip(self):
+ original = list(range(100))
+ accumulated = mi.accumulate(original)
+ actual = list(mi.difference(accumulated))
+ self.assertEqual(actual, original)
+
+ def test_one(self):
+ self.assertEqual(list(mi.difference([0])), [0])
+
+ def test_empty(self):
+ self.assertEqual(list(mi.difference([])), [])
+
+
+class SeekableTest(TestCase):
+ def test_exhaustion_reset(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(list(s), iterable) # Normal iteration
+ self.assertEqual(list(s), []) # Iterable is exhausted
+
+ s.seek(0)
+ self.assertEqual(list(s), iterable) # Back in action
+
+ def test_partial_reset(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(mi.take(5, s), iterable[:5]) # Normal iteration
+
+ s.seek(1)
+ self.assertEqual(list(s), iterable[1:]) # Get the rest of the iterable
+
+ def test_forward(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(mi.take(1, s), iterable[:1]) # Normal iteration
+
+ s.seek(3) # Skip over index 2
+ self.assertEqual(list(s), iterable[3:]) # Result is similar to slicing
+
+ s.seek(0) # Back to 0
+ self.assertEqual(list(s), iterable) # No difference in result
+
+ def test_past_end(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(mi.take(1, s), iterable[:1]) # Normal iteration
+
+ s.seek(20)
+ self.assertEqual(list(s), []) # Iterable is exhausted
+
+ s.seek(0) # Back to 0
+ self.assertEqual(list(s), iterable) # No difference in result
+
+ def test_elements(self):
+ iterable = map(str, count())
+
+ s = mi.seekable(iterable)
+ mi.take(10, s)
+
+ elements = s.elements()
+ self.assertEqual(
+ [elements[i] for i in range(10)], [str(n) for n in range(10)]
+ )
+ self.assertEqual(len(elements), 10)
+
+ mi.take(10, s)
+ self.assertEqual(list(elements), [str(n) for n in range(20)])
+
+
+class SequenceViewTests(TestCase):
+ def test_init(self):
+ view = mi.SequenceView((1, 2, 3))
+ self.assertEqual(repr(view), "SequenceView((1, 2, 3))")
+ self.assertRaises(TypeError, lambda: mi.SequenceView({}))
+
+ def test_update(self):
+ seq = [1, 2, 3]
+ view = mi.SequenceView(seq)
+ self.assertEqual(len(view), 3)
+ self.assertEqual(repr(view), "SequenceView([1, 2, 3])")
+
+ seq.pop()
+ self.assertEqual(len(view), 2)
+ self.assertEqual(repr(view), "SequenceView([1, 2])")
+
+ def test_indexing(self):
+ seq = ('a', 'b', 'c', 'd', 'e', 'f')
+ view = mi.SequenceView(seq)
+ for i in range(-len(seq), len(seq)):
+ self.assertEqual(view[i], seq[i])
+
+ def test_slicing(self):
+ seq = ('a', 'b', 'c', 'd', 'e', 'f')
+ view = mi.SequenceView(seq)
+ n = len(seq)
+ indexes = list(range(-n - 1, n + 1)) + [None]
+ steps = list(range(-n, n + 1))
+ steps.remove(0)
+ for slice_args in product(indexes, indexes, steps):
+ i = slice(*slice_args)
+ self.assertEqual(view[i], seq[i])
+
+ def test_abc_methods(self):
+ # collections.Sequence should provide all of this functionality
+ seq = ('a', 'b', 'c', 'd', 'e', 'f', 'f')
+ view = mi.SequenceView(seq)
+
+ # __contains__
+ self.assertIn('b', view)
+ self.assertNotIn('g', view)
+
+ # __iter__
+ self.assertEqual(list(iter(view)), list(seq))
+
+ # __reversed__
+ self.assertEqual(list(reversed(view)), list(reversed(seq)))
+
+ # index
+ self.assertEqual(view.index('b'), 1)
+
+ # count
+ self.assertEqual(seq.count('f'), 2)
+
+
+class RunLengthTest(TestCase):
+ def test_encode(self):
+ iterable = (int(str(n)[0]) for n in count(800))
+ actual = mi.take(4, mi.run_length.encode(iterable))
+ expected = [(8, 100), (9, 100), (1, 1000), (2, 1000)]
+ self.assertEqual(actual, expected)
+
+ def test_decode(self):
+ iterable = [('d', 4), ('c', 3), ('b', 2), ('a', 1)]
+ actual = ''.join(mi.run_length.decode(iterable))
+ expected = 'ddddcccbba'
+ self.assertEqual(actual, expected)
+
+
+class ExactlyNTests(TestCase):
+ """Tests for ``exactly_n()``"""
+
+ def test_true(self):
+ """Iterable has ``n`` ``True`` elements"""
+ self.assertTrue(mi.exactly_n([True, False, True], 2))
+ self.assertTrue(mi.exactly_n([1, 1, 1, 0], 3))
+ self.assertTrue(mi.exactly_n([False, False], 0))
+ self.assertTrue(mi.exactly_n(range(100), 10, lambda x: x < 10))
+
+ def test_false(self):
+ """Iterable does not have ``n`` ``True`` elements"""
+ self.assertFalse(mi.exactly_n([True, False, False], 2))
+ self.assertFalse(mi.exactly_n([True, True, False], 1))
+ self.assertFalse(mi.exactly_n([False], 1))
+ self.assertFalse(mi.exactly_n([True], -1))
+ self.assertFalse(mi.exactly_n(repeat(True), 100))
+
+ def test_empty(self):
+ """Return ``True`` if the iterable is empty and ``n`` is 0"""
+ self.assertTrue(mi.exactly_n([], 0))
+ self.assertFalse(mi.exactly_n([], 1))
+
+
+class AlwaysReversibleTests(TestCase):
+ """Tests for ``always_reversible()``"""
+
+ def test_regular_reversed(self):
+ self.assertEqual(list(reversed(range(10))),
+ list(mi.always_reversible(range(10))))
+ self.assertEqual(list(reversed([1, 2, 3])),
+ list(mi.always_reversible([1, 2, 3])))
+ self.assertEqual(reversed([1, 2, 3]).__class__,
+ mi.always_reversible([1, 2, 3]).__class__)
+
+ def test_nonseq_reversed(self):
+ # Create a non-reversible generator from a sequence
+ with self.assertRaises(TypeError):
+ reversed(x for x in range(10))
+
+ self.assertEqual(list(reversed(range(10))),
+ list(mi.always_reversible(x for x in range(10))))
+ self.assertEqual(list(reversed([1, 2, 3])),
+ list(mi.always_reversible(x for x in [1, 2, 3])))
+ self.assertNotEqual(reversed((1, 2)).__class__,
+ mi.always_reversible(x for x in (1, 2)).__class__)
+
+
+class CircularShiftsTests(TestCase):
+ def test_empty(self):
+ # empty iterable -> empty list
+ self.assertEqual(list(mi.circular_shifts([])), [])
+
+ def test_simple_circular_shifts(self):
+ # test the a simple iterator case
+ self.assertEqual(
+ mi.circular_shifts(range(4)),
+ [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
+ )
+
+ def test_duplicates(self):
+ # test non-distinct entries
+ self.assertEqual(
+ mi.circular_shifts([0, 1, 0, 1]),
+ [(0, 1, 0, 1), (1, 0, 1, 0), (0, 1, 0, 1), (1, 0, 1, 0)]
+ )
+
+
+class MakeDecoratorTests(TestCase):
+ def test_basic(self):
+ slicer = mi.make_decorator(islice)
+
+ @slicer(1, 10, 2)
+ def user_function(arg_1, arg_2, kwarg_1=None):
+ self.assertEqual(arg_1, 'arg_1')
+ self.assertEqual(arg_2, 'arg_2')
+ self.assertEqual(kwarg_1, 'kwarg_1')
+ return map(str, count())
+
+ it = user_function('arg_1', 'arg_2', kwarg_1='kwarg_1')
+ actual = list(it)
+ expected = ['1', '3', '5', '7', '9']
+ self.assertEqual(actual, expected)
+
+ def test_result_index(self):
+ def stringify(*args, **kwargs):
+ self.assertEqual(args[0], 'arg_0')
+ iterable = args[1]
+ self.assertEqual(args[2], 'arg_2')
+ self.assertEqual(kwargs['kwarg_1'], 'kwarg_1')
+ return map(str, iterable)
+
+ stringifier = mi.make_decorator(stringify, result_index=1)
+
+ @stringifier('arg_0', 'arg_2', kwarg_1='kwarg_1')
+ def user_function(n):
+ return count(n)
+
+ it = user_function(1)
+ actual = mi.take(5, it)
+ expected = ['1', '2', '3', '4', '5']
+ self.assertEqual(actual, expected)
+
+ def test_wrap_class(self):
+ seeker = mi.make_decorator(mi.seekable)
+
+ @seeker()
+ def user_function(n):
+ return map(str, range(n))
+
+ it = user_function(5)
+ self.assertEqual(list(it), ['0', '1', '2', '3', '4'])
+
+ it.seek(0)
+ self.assertEqual(list(it), ['0', '1', '2', '3', '4'])
+
+
+class MapReduceTests(TestCase):
+ def test_default(self):
+ iterable = (str(x) for x in range(5))
+ keyfunc = lambda x: int(x) // 2
+ actual = sorted(mi.map_reduce(iterable, keyfunc).items())
+ expected = [(0, ['0', '1']), (1, ['2', '3']), (2, ['4'])]
+ self.assertEqual(actual, expected)
+
+ def test_valuefunc(self):
+ iterable = (str(x) for x in range(5))
+ keyfunc = lambda x: int(x) // 2
+ valuefunc = int
+ actual = sorted(mi.map_reduce(iterable, keyfunc, valuefunc).items())
+ expected = [(0, [0, 1]), (1, [2, 3]), (2, [4])]
+ self.assertEqual(actual, expected)
+
+ def test_reducefunc(self):
+ iterable = (str(x) for x in range(5))
+ keyfunc = lambda x: int(x) // 2
+ valuefunc = int
+ reducefunc = lambda value_list: reduce(mul, value_list, 1)
+ actual = sorted(
+ mi.map_reduce(iterable, keyfunc, valuefunc, reducefunc).items()
+ )
+ expected = [(0, 0), (1, 6), (2, 4)]
+ self.assertEqual(actual, expected)
+
+ def test_ret(self):
+ d = mi.map_reduce([1, 0, 2, 0, 1, 0], bool)
+ self.assertEqual(d, {False: [0, 0, 0], True: [1, 2, 1]})
+ self.assertRaises(KeyError, lambda: d[None].append(1))
+
+
+class RlocateTests(TestCase):
+ def test_default_pred(self):
+ iterable = [0, 1, 1, 0, 1, 0, 0]
+ for it in (iterable[:], iter(iterable)):
+ actual = list(mi.rlocate(it))
+ expected = [4, 2, 1]
+ self.assertEqual(actual, expected)
+
+ def test_no_matches(self):
+ iterable = [0, 0, 0]
+ for it in (iterable[:], iter(iterable)):
+ actual = list(mi.rlocate(it))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_custom_pred(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda x: x == '0'
+ for it in (iterable[:], iter(iterable)):
+ actual = list(mi.rlocate(it, pred))
+ expected = [6, 5, 3, 0]
+ self.assertEqual(actual, expected)
+
+ def test_efficient_reversal(self):
+ iterable = range(9 ** 9) # Is efficiently reversible
+ target = 9 ** 9 - 2
+ pred = lambda x: x == target # Find-able from the right
+ actual = next(mi.rlocate(iterable, pred))
+ self.assertEqual(actual, target)
+
+ def test_window_size(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda *args: args == ('0', 1)
+ for it in (iterable, iter(iterable)):
+ actual = list(mi.rlocate(it, pred, window_size=2))
+ expected = [3, 0]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_large(self):
+ iterable = [1, 2, 3, 4]
+ pred = lambda a, b, c, d, e: True
+ for it in (iterable, iter(iterable)):
+ actual = list(mi.rlocate(iterable, pred, window_size=5))
+ expected = [0]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_zero(self):
+ iterable = [1, 2, 3, 4]
+ pred = lambda: True
+ for it in (iterable, iter(iterable)):
+ with self.assertRaises(ValueError):
+ list(mi.locate(iterable, pred, window_size=0))
+
+
+class ReplaceTests(TestCase):
+ def test_basic(self):
+ iterable = range(10)
+ pred = lambda x: x % 2 == 0
+ substitutes = []
+ actual = list(mi.replace(iterable, pred, substitutes))
+ expected = [1, 3, 5, 7, 9]
+ self.assertEqual(actual, expected)
+
+ def test_count(self):
+ iterable = range(10)
+ pred = lambda x: x % 2 == 0
+ substitutes = []
+ actual = list(mi.replace(iterable, pred, substitutes, count=4))
+ expected = [1, 3, 5, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_window_size(self):
+ iterable = range(10)
+ pred = lambda *args: args == (0, 1, 2)
+ substitutes = []
+ actual = list(mi.replace(iterable, pred, substitutes, window_size=3))
+ expected = [3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_end(self):
+ iterable = range(10)
+ pred = lambda *args: args == (7, 8, 9)
+ substitutes = []
+ actual = list(mi.replace(iterable, pred, substitutes, window_size=3))
+ expected = [0, 1, 2, 3, 4, 5, 6]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_count(self):
+ iterable = range(10)
+ pred = lambda *args: (args == (0, 1, 2)) or (args == (7, 8, 9))
+ substitutes = []
+ actual = list(
+ mi.replace(iterable, pred, substitutes, count=1, window_size=3)
+ )
+ expected = [3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_large(self):
+ iterable = range(4)
+ pred = lambda a, b, c, d, e: True
+ substitutes = [5, 6, 7]
+ actual = list(mi.replace(iterable, pred, substitutes, window_size=5))
+ expected = [5, 6, 7]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_zero(self):
+ iterable = range(10)
+ pred = lambda *args: True
+ substitutes = []
+ with self.assertRaises(ValueError):
+ list(mi.replace(iterable, pred, substitutes, window_size=0))
+
+ def test_iterable_substitutes(self):
+ iterable = range(5)
+ pred = lambda x: x % 2 == 0
+ substitutes = iter('__')
+ actual = list(mi.replace(iterable, pred, substitutes))
+ expected = ['_', '_', 1, '_', '_', 3, '_', '_']
+ self.assertEqual(actual, expected)
diff --git a/contrib/python/more-itertools/py2/more_itertools/tests/test_recipes.py b/contrib/python/more-itertools/py2/more_itertools/tests/test_recipes.py
new file mode 100644
index 0000000000..b3cfb62f46
--- /dev/null
+++ b/contrib/python/more-itertools/py2/more_itertools/tests/test_recipes.py
@@ -0,0 +1,616 @@
+from doctest import DocTestSuite
+from unittest import TestCase
+
+from itertools import combinations
+from six.moves import range
+
+import more_itertools as mi
+
+
+def load_tests(loader, tests, ignore):
+ # Add the doctests
+ tests.addTests(DocTestSuite('more_itertools.recipes'))
+ return tests
+
+
+class AccumulateTests(TestCase):
+ """Tests for ``accumulate()``"""
+
+ def test_empty(self):
+ """Test that an empty input returns an empty output"""
+ self.assertEqual(list(mi.accumulate([])), [])
+
+ def test_default(self):
+ """Test accumulate with the default function (addition)"""
+ self.assertEqual(list(mi.accumulate([1, 2, 3])), [1, 3, 6])
+
+ def test_bogus_function(self):
+ """Test accumulate with an invalid function"""
+ with self.assertRaises(TypeError):
+ list(mi.accumulate([1, 2, 3], func=lambda x: x))
+
+ def test_custom_function(self):
+ """Test accumulate with a custom function"""
+ self.assertEqual(
+ list(mi.accumulate((1, 2, 3, 2, 1), func=max)), [1, 2, 3, 3, 3]
+ )
+
+
+class TakeTests(TestCase):
+ """Tests for ``take()``"""
+
+ def test_simple_take(self):
+ """Test basic usage"""
+ t = mi.take(5, range(10))
+ self.assertEqual(t, [0, 1, 2, 3, 4])
+
+ def test_null_take(self):
+ """Check the null case"""
+ t = mi.take(0, range(10))
+ self.assertEqual(t, [])
+
+ def test_negative_take(self):
+ """Make sure taking negative items results in a ValueError"""
+ self.assertRaises(ValueError, lambda: mi.take(-3, range(10)))
+
+ def test_take_too_much(self):
+ """Taking more than an iterator has remaining should return what the
+ iterator has remaining.
+
+ """
+ t = mi.take(10, range(5))
+ self.assertEqual(t, [0, 1, 2, 3, 4])
+
+
+class TabulateTests(TestCase):
+ """Tests for ``tabulate()``"""
+
+ def test_simple_tabulate(self):
+ """Test the happy path"""
+ t = mi.tabulate(lambda x: x)
+ f = tuple([next(t) for _ in range(3)])
+ self.assertEqual(f, (0, 1, 2))
+
+ def test_count(self):
+ """Ensure tabulate accepts specific count"""
+ t = mi.tabulate(lambda x: 2 * x, -1)
+ f = (next(t), next(t), next(t))
+ self.assertEqual(f, (-2, 0, 2))
+
+
+class TailTests(TestCase):
+ """Tests for ``tail()``"""
+
+ def test_greater(self):
+ """Length of iterable is greater than requested tail"""
+ self.assertEqual(list(mi.tail(3, 'ABCDEFG')), ['E', 'F', 'G'])
+
+ def test_equal(self):
+ """Length of iterable is equal to the requested tail"""
+ self.assertEqual(
+ list(mi.tail(7, 'ABCDEFG')), ['A', 'B', 'C', 'D', 'E', 'F', 'G']
+ )
+
+ def test_less(self):
+ """Length of iterable is less than requested tail"""
+ self.assertEqual(
+ list(mi.tail(8, 'ABCDEFG')), ['A', 'B', 'C', 'D', 'E', 'F', 'G']
+ )
+
+
+class ConsumeTests(TestCase):
+ """Tests for ``consume()``"""
+
+ def test_sanity(self):
+ """Test basic functionality"""
+ r = (x for x in range(10))
+ mi.consume(r, 3)
+ self.assertEqual(3, next(r))
+
+ def test_null_consume(self):
+ """Check the null case"""
+ r = (x for x in range(10))
+ mi.consume(r, 0)
+ self.assertEqual(0, next(r))
+
+ def test_negative_consume(self):
+ """Check that negative consumsion throws an error"""
+ r = (x for x in range(10))
+ self.assertRaises(ValueError, lambda: mi.consume(r, -1))
+
+ def test_total_consume(self):
+ """Check that iterator is totally consumed by default"""
+ r = (x for x in range(10))
+ mi.consume(r)
+ self.assertRaises(StopIteration, lambda: next(r))
+
+
+class NthTests(TestCase):
+ """Tests for ``nth()``"""
+
+ def test_basic(self):
+ """Make sure the nth item is returned"""
+ l = range(10)
+ for i, v in enumerate(l):
+ self.assertEqual(mi.nth(l, i), v)
+
+ def test_default(self):
+ """Ensure a default value is returned when nth item not found"""
+ l = range(3)
+ self.assertEqual(mi.nth(l, 100, "zebra"), "zebra")
+
+ def test_negative_item_raises(self):
+ """Ensure asking for a negative item raises an exception"""
+ self.assertRaises(ValueError, lambda: mi.nth(range(10), -3))
+
+
+class AllEqualTests(TestCase):
+ """Tests for ``all_equal()``"""
+
+ def test_true(self):
+ """Everything is equal"""
+ self.assertTrue(mi.all_equal('aaaaaa'))
+ self.assertTrue(mi.all_equal([0, 0, 0, 0]))
+
+ def test_false(self):
+ """Not everything is equal"""
+ self.assertFalse(mi.all_equal('aaaaab'))
+ self.assertFalse(mi.all_equal([0, 0, 0, 1]))
+
+ def test_tricky(self):
+ """Not everything is identical, but everything is equal"""
+ items = [1, complex(1, 0), 1.0]
+ self.assertTrue(mi.all_equal(items))
+
+ def test_empty(self):
+ """Return True if the iterable is empty"""
+ self.assertTrue(mi.all_equal(''))
+ self.assertTrue(mi.all_equal([]))
+
+ def test_one(self):
+ """Return True if the iterable is singular"""
+ self.assertTrue(mi.all_equal('0'))
+ self.assertTrue(mi.all_equal([0]))
+
+
+class QuantifyTests(TestCase):
+ """Tests for ``quantify()``"""
+
+ def test_happy_path(self):
+ """Make sure True count is returned"""
+ q = [True, False, True]
+ self.assertEqual(mi.quantify(q), 2)
+
+ def test_custom_predicate(self):
+ """Ensure non-default predicates return as expected"""
+ q = range(10)
+ self.assertEqual(mi.quantify(q, lambda x: x % 2 == 0), 5)
+
+
+class PadnoneTests(TestCase):
+ """Tests for ``padnone()``"""
+
+ def test_happy_path(self):
+ """wrapper iterator should return None indefinitely"""
+ r = range(2)
+ p = mi.padnone(r)
+ self.assertEqual([0, 1, None, None], [next(p) for _ in range(4)])
+
+
+class NcyclesTests(TestCase):
+ """Tests for ``nyclces()``"""
+
+ def test_happy_path(self):
+ """cycle a sequence three times"""
+ r = ["a", "b", "c"]
+ n = mi.ncycles(r, 3)
+ self.assertEqual(
+ ["a", "b", "c", "a", "b", "c", "a", "b", "c"],
+ list(n)
+ )
+
+ def test_null_case(self):
+ """asking for 0 cycles should return an empty iterator"""
+ n = mi.ncycles(range(100), 0)
+ self.assertRaises(StopIteration, lambda: next(n))
+
+ def test_pathalogical_case(self):
+ """asking for negative cycles should return an empty iterator"""
+ n = mi.ncycles(range(100), -10)
+ self.assertRaises(StopIteration, lambda: next(n))
+
+
+class DotproductTests(TestCase):
+ """Tests for ``dotproduct()``'"""
+
+ def test_happy_path(self):
+ """simple dotproduct example"""
+ self.assertEqual(400, mi.dotproduct([10, 10], [20, 20]))
+
+
+class FlattenTests(TestCase):
+ """Tests for ``flatten()``"""
+
+ def test_basic_usage(self):
+ """ensure list of lists is flattened one level"""
+ f = [[0, 1, 2], [3, 4, 5]]
+ self.assertEqual(list(range(6)), list(mi.flatten(f)))
+
+ def test_single_level(self):
+ """ensure list of lists is flattened only one level"""
+ f = [[0, [1, 2]], [[3, 4], 5]]
+ self.assertEqual([0, [1, 2], [3, 4], 5], list(mi.flatten(f)))
+
+
+class RepeatfuncTests(TestCase):
+ """Tests for ``repeatfunc()``"""
+
+ def test_simple_repeat(self):
+ """test simple repeated functions"""
+ r = mi.repeatfunc(lambda: 5)
+ self.assertEqual([5, 5, 5, 5, 5], [next(r) for _ in range(5)])
+
+ def test_finite_repeat(self):
+ """ensure limited repeat when times is provided"""
+ r = mi.repeatfunc(lambda: 5, times=5)
+ self.assertEqual([5, 5, 5, 5, 5], list(r))
+
+ def test_added_arguments(self):
+ """ensure arguments are applied to the function"""
+ r = mi.repeatfunc(lambda x: x, 2, 3)
+ self.assertEqual([3, 3], list(r))
+
+ def test_null_times(self):
+ """repeat 0 should return an empty iterator"""
+ r = mi.repeatfunc(range, 0, 3)
+ self.assertRaises(StopIteration, lambda: next(r))
+
+
+class PairwiseTests(TestCase):
+ """Tests for ``pairwise()``"""
+
+ def test_base_case(self):
+ """ensure an iterable will return pairwise"""
+ p = mi.pairwise([1, 2, 3])
+ self.assertEqual([(1, 2), (2, 3)], list(p))
+
+ def test_short_case(self):
+ """ensure an empty iterator if there's not enough values to pair"""
+ p = mi.pairwise("a")
+ self.assertRaises(StopIteration, lambda: next(p))
+
+
+class GrouperTests(TestCase):
+ """Tests for ``grouper()``"""
+
+ def test_even(self):
+ """Test when group size divides evenly into the length of
+ the iterable.
+
+ """
+ self.assertEqual(
+ list(mi.grouper(3, 'ABCDEF')), [('A', 'B', 'C'), ('D', 'E', 'F')]
+ )
+
+ def test_odd(self):
+ """Test when group size does not divide evenly into the length of the
+ iterable.
+
+ """
+ self.assertEqual(
+ list(mi.grouper(3, 'ABCDE')), [('A', 'B', 'C'), ('D', 'E', None)]
+ )
+
+ def test_fill_value(self):
+ """Test that the fill value is used to pad the final group"""
+ self.assertEqual(
+ list(mi.grouper(3, 'ABCDE', 'x')),
+ [('A', 'B', 'C'), ('D', 'E', 'x')]
+ )
+
+
+class RoundrobinTests(TestCase):
+ """Tests for ``roundrobin()``"""
+
+ def test_even_groups(self):
+ """Ensure ordered output from evenly populated iterables"""
+ self.assertEqual(
+ list(mi.roundrobin('ABC', [1, 2, 3], range(3))),
+ ['A', 1, 0, 'B', 2, 1, 'C', 3, 2]
+ )
+
+ def test_uneven_groups(self):
+ """Ensure ordered output from unevenly populated iterables"""
+ self.assertEqual(
+ list(mi.roundrobin('ABCD', [1, 2], range(0))),
+ ['A', 1, 'B', 2, 'C', 'D']
+ )
+
+
+class PartitionTests(TestCase):
+ """Tests for ``partition()``"""
+
+ def test_bool(self):
+ """Test when pred() returns a boolean"""
+ lesser, greater = mi.partition(lambda x: x > 5, range(10))
+ self.assertEqual(list(lesser), [0, 1, 2, 3, 4, 5])
+ self.assertEqual(list(greater), [6, 7, 8, 9])
+
+ def test_arbitrary(self):
+ """Test when pred() returns an integer"""
+ divisibles, remainders = mi.partition(lambda x: x % 3, range(10))
+ self.assertEqual(list(divisibles), [0, 3, 6, 9])
+ self.assertEqual(list(remainders), [1, 2, 4, 5, 7, 8])
+
+
+class PowersetTests(TestCase):
+ """Tests for ``powerset()``"""
+
+ def test_combinatorics(self):
+ """Ensure a proper enumeration"""
+ p = mi.powerset([1, 2, 3])
+ self.assertEqual(
+ list(p),
+ [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
+ )
+
+
+class UniqueEverseenTests(TestCase):
+ """Tests for ``unique_everseen()``"""
+
+ def test_everseen(self):
+ """ensure duplicate elements are ignored"""
+ u = mi.unique_everseen('AAAABBBBCCDAABBB')
+ self.assertEqual(
+ ['A', 'B', 'C', 'D'],
+ list(u)
+ )
+
+ def test_custom_key(self):
+ """ensure the custom key comparison works"""
+ u = mi.unique_everseen('aAbACCc', key=str.lower)
+ self.assertEqual(list('abC'), list(u))
+
+ def test_unhashable(self):
+ """ensure things work for unhashable items"""
+ iterable = ['a', [1, 2, 3], [1, 2, 3], 'a']
+ u = mi.unique_everseen(iterable)
+ self.assertEqual(list(u), ['a', [1, 2, 3]])
+
+ def test_unhashable_key(self):
+ """ensure things work for unhashable items with a custom key"""
+ iterable = ['a', [1, 2, 3], [1, 2, 3], 'a']
+ u = mi.unique_everseen(iterable, key=lambda x: x)
+ self.assertEqual(list(u), ['a', [1, 2, 3]])
+
+
+class UniqueJustseenTests(TestCase):
+ """Tests for ``unique_justseen()``"""
+
+ def test_justseen(self):
+ """ensure only last item is remembered"""
+ u = mi.unique_justseen('AAAABBBCCDABB')
+ self.assertEqual(list('ABCDAB'), list(u))
+
+ def test_custom_key(self):
+ """ensure the custom key comparison works"""
+ u = mi.unique_justseen('AABCcAD', str.lower)
+ self.assertEqual(list('ABCAD'), list(u))
+
+
+class IterExceptTests(TestCase):
+ """Tests for ``iter_except()``"""
+
+ def test_exact_exception(self):
+ """ensure the exact specified exception is caught"""
+ l = [1, 2, 3]
+ i = mi.iter_except(l.pop, IndexError)
+ self.assertEqual(list(i), [3, 2, 1])
+
+ def test_generic_exception(self):
+ """ensure the generic exception can be caught"""
+ l = [1, 2]
+ i = mi.iter_except(l.pop, Exception)
+ self.assertEqual(list(i), [2, 1])
+
+ def test_uncaught_exception_is_raised(self):
+ """ensure a non-specified exception is raised"""
+ l = [1, 2, 3]
+ i = mi.iter_except(l.pop, KeyError)
+ self.assertRaises(IndexError, lambda: list(i))
+
+ def test_first(self):
+ """ensure first is run before the function"""
+ l = [1, 2, 3]
+ f = lambda: 25
+ i = mi.iter_except(l.pop, IndexError, f)
+ self.assertEqual(list(i), [25, 3, 2, 1])
+
+
+class FirstTrueTests(TestCase):
+ """Tests for ``first_true()``"""
+
+ def test_something_true(self):
+ """Test with no keywords"""
+ self.assertEqual(mi.first_true(range(10)), 1)
+
+ def test_nothing_true(self):
+ """Test default return value."""
+ self.assertIsNone(mi.first_true([0, 0, 0]))
+
+ def test_default(self):
+ """Test with a default keyword"""
+ self.assertEqual(mi.first_true([0, 0, 0], default='!'), '!')
+
+ def test_pred(self):
+ """Test with a custom predicate"""
+ self.assertEqual(
+ mi.first_true([2, 4, 6], pred=lambda x: x % 3 == 0), 6
+ )
+
+
+class RandomProductTests(TestCase):
+ """Tests for ``random_product()``
+
+ Since random.choice() has different results with the same seed across
+ python versions 2.x and 3.x, these tests use highly probably events to
+ create predictable outcomes across platforms.
+ """
+
+ def test_simple_lists(self):
+ """Ensure that one item is chosen from each list in each pair.
+ Also ensure that each item from each list eventually appears in
+ the chosen combinations.
+
+ Odds are roughly 1 in 7.1 * 10e16 that one item from either list will
+ not be chosen after 100 samplings of one item from each list. Just to
+ be safe, better use a known random seed, too.
+
+ """
+ nums = [1, 2, 3]
+ lets = ['a', 'b', 'c']
+ n, m = zip(*[mi.random_product(nums, lets) for _ in range(100)])
+ n, m = set(n), set(m)
+ self.assertEqual(n, set(nums))
+ self.assertEqual(m, set(lets))
+ self.assertEqual(len(n), len(nums))
+ self.assertEqual(len(m), len(lets))
+
+ def test_list_with_repeat(self):
+ """ensure multiple items are chosen, and that they appear to be chosen
+ from one list then the next, in proper order.
+
+ """
+ nums = [1, 2, 3]
+ lets = ['a', 'b', 'c']
+ r = list(mi.random_product(nums, lets, repeat=100))
+ self.assertEqual(2 * 100, len(r))
+ n, m = set(r[::2]), set(r[1::2])
+ self.assertEqual(n, set(nums))
+ self.assertEqual(m, set(lets))
+ self.assertEqual(len(n), len(nums))
+ self.assertEqual(len(m), len(lets))
+
+
+class RandomPermutationTests(TestCase):
+ """Tests for ``random_permutation()``"""
+
+ def test_full_permutation(self):
+ """ensure every item from the iterable is returned in a new ordering
+
+ 15 elements have a 1 in 1.3 * 10e12 of appearing in sorted order, so
+ we fix a seed value just to be sure.
+
+ """
+ i = range(15)
+ r = mi.random_permutation(i)
+ self.assertEqual(set(i), set(r))
+ if i == r:
+ raise AssertionError("Values were not permuted")
+
+ def test_partial_permutation(self):
+ """ensure all returned items are from the iterable, that the returned
+ permutation is of the desired length, and that all items eventually
+ get returned.
+
+ Sampling 100 permutations of length 5 from a set of 15 leaves a
+ (2/3)^100 chance that an item will not be chosen. Multiplied by 15
+ items, there is a 1 in 2.6e16 chance that at least 1 item will not
+ show up in the resulting output. Using a random seed will fix that.
+
+ """
+ items = range(15)
+ item_set = set(items)
+ all_items = set()
+ for _ in range(100):
+ permutation = mi.random_permutation(items, 5)
+ self.assertEqual(len(permutation), 5)
+ permutation_set = set(permutation)
+ self.assertLessEqual(permutation_set, item_set)
+ all_items |= permutation_set
+ self.assertEqual(all_items, item_set)
+
+
+class RandomCombinationTests(TestCase):
+ """Tests for ``random_combination()``"""
+
+ def test_pseudorandomness(self):
+ """ensure different subsets of the iterable get returned over many
+ samplings of random combinations"""
+ items = range(15)
+ all_items = set()
+ for _ in range(50):
+ combination = mi.random_combination(items, 5)
+ all_items |= set(combination)
+ self.assertEqual(all_items, set(items))
+
+ def test_no_replacement(self):
+ """ensure that elements are sampled without replacement"""
+ items = range(15)
+ for _ in range(50):
+ combination = mi.random_combination(items, len(items))
+ self.assertEqual(len(combination), len(set(combination)))
+ self.assertRaises(
+ ValueError, lambda: mi.random_combination(items, len(items) + 1)
+ )
+
+
+class RandomCombinationWithReplacementTests(TestCase):
+ """Tests for ``random_combination_with_replacement()``"""
+
+ def test_replacement(self):
+ """ensure that elements are sampled with replacement"""
+ items = range(5)
+ combo = mi.random_combination_with_replacement(items, len(items) * 2)
+ self.assertEqual(2 * len(items), len(combo))
+ if len(set(combo)) == len(combo):
+ raise AssertionError("Combination contained no duplicates")
+
+ def test_pseudorandomness(self):
+ """ensure different subsets of the iterable get returned over many
+ samplings of random combinations"""
+ items = range(15)
+ all_items = set()
+ for _ in range(50):
+ combination = mi.random_combination_with_replacement(items, 5)
+ all_items |= set(combination)
+ self.assertEqual(all_items, set(items))
+
+
+class NthCombinationTests(TestCase):
+ def test_basic(self):
+ iterable = 'abcdefg'
+ r = 4
+ for index, expected in enumerate(combinations(iterable, r)):
+ actual = mi.nth_combination(iterable, r, index)
+ self.assertEqual(actual, expected)
+
+ def test_long(self):
+ actual = mi.nth_combination(range(180), 4, 2000000)
+ expected = (2, 12, 35, 126)
+ self.assertEqual(actual, expected)
+
+ def test_invalid_r(self):
+ for r in (-1, 3):
+ with self.assertRaises(ValueError):
+ mi.nth_combination([], r, 0)
+
+ def test_invalid_index(self):
+ with self.assertRaises(IndexError):
+ mi.nth_combination('abcdefg', 3, -36)
+
+
+class PrependTests(TestCase):
+ def test_basic(self):
+ value = 'a'
+ iterator = iter('bcdefg')
+ actual = list(mi.prepend(value, iterator))
+ expected = list('abcdefg')
+ self.assertEqual(actual, expected)
+
+ def test_multiple(self):
+ value = 'ab'
+ iterator = iter('cdefg')
+ actual = tuple(mi.prepend(value, iterator))
+ expected = ('ab',) + tuple('cdefg')
+ self.assertEqual(actual, expected)
diff --git a/contrib/python/more-itertools/py2/patches/01-fix-tests.patch b/contrib/python/more-itertools/py2/patches/01-fix-tests.patch
new file mode 100644
index 0000000000..85602736df
--- /dev/null
+++ b/contrib/python/more-itertools/py2/patches/01-fix-tests.patch
@@ -0,0 +1,18 @@
+--- contrib/python/more-itertools/py2/more_itertools/tests/test_more.py (index)
++++ contrib/python/more-itertools/py2/more_itertools/tests/test_more.py (working tree)
+@@ -122,13 +122,13 @@ class IterOnlyRange:
+ raise an ``AttributeError`` rather than ``TypeError`` in Python 2.
+
+ >>> r = IterOnlyRange(5)
+- >>> r[0]
++ >>> r[0] # doctest: +SKIP
+ AttributeError: IterOnlyRange instance has no attribute '__getitem__'
+
+ Note: In Python 3, ``TypeError`` will be raised because ``object`` is
+ inherited implicitly by default.
+
+- >>> r[0]
++ >>> r[0] # doctest: +SKIP
+ TypeError: 'IterOnlyRange' object does not support indexing
+ """
+ def __init__(self, n):
diff --git a/contrib/python/more-itertools/py2/tests/ya.make b/contrib/python/more-itertools/py2/tests/ya.make
new file mode 100644
index 0000000000..8aecf61cc6
--- /dev/null
+++ b/contrib/python/more-itertools/py2/tests/ya.make
@@ -0,0 +1,18 @@
+PY2TEST()
+
+OWNER(g:python-contrib)
+
+PEERDIR(
+ contrib/python/more-itertools
+)
+
+SRCDIR(contrib/python/more-itertools/py2/more_itertools/tests)
+
+TEST_SRCS(
+ test_more.py
+ test_recipes.py
+)
+
+NO_LINT()
+
+END()
diff --git a/contrib/python/more-itertools/py2/ya.make b/contrib/python/more-itertools/py2/ya.make
new file mode 100644
index 0000000000..0a914e4f42
--- /dev/null
+++ b/contrib/python/more-itertools/py2/ya.make
@@ -0,0 +1,34 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+OWNER(g:python-contrib)
+
+VERSION(5.0.0)
+
+LICENSE(MIT)
+
+PEERDIR(
+ contrib/python/six
+)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ more_itertools/__init__.py
+ more_itertools/more.py
+ more_itertools/recipes.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/more-itertools/py2/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/contrib/python/more-itertools/py3/.dist-info/METADATA b/contrib/python/more-itertools/py3/.dist-info/METADATA
new file mode 100644
index 0000000000..9efacdd745
--- /dev/null
+++ b/contrib/python/more-itertools/py3/.dist-info/METADATA
@@ -0,0 +1,521 @@
+Metadata-Version: 2.1
+Name: more-itertools
+Version: 8.12.0
+Summary: More routines for operating on iterables, beyond itertools
+Home-page: https://github.com/more-itertools/more-itertools
+Author: Erik Rose
+Author-email: erikrose@grinchcentral.com
+License: MIT
+Keywords: itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries
+Requires-Python: >=3.5
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+
+==============
+More Itertools
+==============
+
+.. image:: https://readthedocs.org/projects/more-itertools/badge/?version=latest
+ :target: https://more-itertools.readthedocs.io/en/stable/
+
+Python's ``itertools`` library is a gem - you can compose elegant solutions
+for a variety of problems with the functions it provides. In ``more-itertools``
+we collect additional building blocks, recipes, and routines for working with
+Python iterables.
+
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Grouping | `chunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.chunked>`_, |
+| | `ichunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ichunked>`_, |
+| | `sliced <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliced>`_, |
+| | `distribute <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distribute>`_, |
+| | `divide <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.divide>`_, |
+| | `split_at <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_at>`_, |
+| | `split_before <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_before>`_, |
+| | `split_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_after>`_, |
+| | `split_into <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_into>`_, |
+| | `split_when <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_when>`_, |
+| | `bucket <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.bucket>`_, |
+| | `unzip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unzip>`_, |
+| | `grouper <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.grouper>`_, |
+| | `partition <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partition>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Lookahead and lookback | `spy <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.spy>`_, |
+| | `peekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.peekable>`_, |
+| | `seekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.seekable>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Windowing | `windowed <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed>`_, |
+| | `substrings <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.substrings>`_, |
+| | `substrings_indexes <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.substrings_indexes>`_, |
+| | `stagger <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.stagger>`_, |
+| | `windowed_complete <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed_complete>`_, |
+| | `pairwise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pairwise>`_, |
+| | `triplewise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.triplewise>`_, |
+| | `sliding_window <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliding_window>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Augmenting | `count_cycle <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.count_cycle>`_, |
+| | `intersperse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.intersperse>`_, |
+| | `padded <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padded>`_, |
+| | `mark_ends <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.mark_ends>`_, |
+| | `repeat_last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeat_last>`_, |
+| | `adjacent <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.adjacent>`_, |
+| | `groupby_transform <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.groupby_transform>`_, |
+| | `pad_none <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pad_none>`_, |
+| | `ncycles <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ncycles>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combining | `collapse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collapse>`_, |
+| | `sort_together <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sort_together>`_, |
+| | `interleave <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave>`_, |
+| | `interleave_longest <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_longest>`_, |
+| | `interleave_evenly <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_evenly>`_, |
+| | `zip_offset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_offset>`_, |
+| | `zip_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_equal>`_, |
+| | `zip_broadcast <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_broadcast>`_, |
+| | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, |
+| | `convolve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.convolve>`_, |
+| | `flatten <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.flatten>`_, |
+| | `roundrobin <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.roundrobin>`_, |
+| | `prepend <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.prepend>`_, |
+| | `value_chain <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.value_chain>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Summarizing | `ilen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ilen>`_, |
+| | `unique_to_each <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_to_each>`_, |
+| | `sample <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sample>`_, |
+| | `consecutive_groups <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consecutive_groups>`_, |
+| | `run_length <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.run_length>`_, |
+| | `map_reduce <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_reduce>`_, |
+| | `exactly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.exactly_n>`_, |
+| | `is_sorted <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.is_sorted>`_, |
+| | `all_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_equal>`_, |
+| | `all_unique <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_unique>`_, |
+| | `minmax <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.minmax>`_, |
+| | `first_true <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first_true>`_, |
+| | `quantify <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.quantify>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Selecting | `islice_extended <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.islice_extended>`_, |
+| | `first <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first>`_, |
+| | `last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.last>`_, |
+| | `one <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.one>`_, |
+| | `only <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.only>`_, |
+| | `strictly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strictly_n>`_, |
+| | `strip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strip>`_, |
+| | `lstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.lstrip>`_, |
+| | `rstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rstrip>`_, |
+| | `filter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.filter_except>`_, |
+| | `map_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_except>`_, |
+| | `nth_or_last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_or_last>`_, |
+| | `unique_in_window <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_in_window>`_, |
+| | `before_and_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.before_and_after>`_, |
+| | `nth <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth>`_, |
+| | `take <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.take>`_, |
+| | `tail <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tail>`_, |
+| | `unique_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertoo ls.unique_everseen>`_, |
+| | `unique_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_justseen>`_, |
+| | `duplicates_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.duplicates_everseen>`_, |
+| | `duplicates_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.duplicates_justseen>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combinatorics | `distinct_permutations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_permutations>`_, |
+| | `distinct_combinations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_combinations>`_, |
+| | `circular_shifts <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.circular_shifts>`_, |
+| | `partitions <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partitions>`_, |
+| | `set_partitions <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.set_partitions>`_, |
+| | `product_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.product_index>`_, |
+| | `combination_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.combination_index>`_, |
+| | `permutation_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.permutation_index>`_, |
+| | `powerset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset>`_, |
+| | `random_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_product>`_, |
+| | `random_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_permutation>`_, |
+| | `random_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination>`_, |
+| | `random_combination_with_replacement <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination_with_replacement>`_, |
+| | `nth_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_product>`_, |
+| | `nth_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_permutation>`_, |
+| | `nth_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_combination>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Wrapping | `always_iterable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_iterable>`_, |
+| | `always_reversible <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_reversible>`_, |
+| | `countable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.countable>`_, |
+| | `consumer <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consumer>`_, |
+| | `with_iter <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.with_iter>`_, |
+| | `iter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iter_except>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Others | `locate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.locate>`_, |
+| | `rlocate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rlocate>`_, |
+| | `replace <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.replace>`_, |
+| | `numeric_range <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.numeric_range>`_, |
+| | `side_effect <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.side_effect>`_, |
+| | `iterate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iterate>`_, |
+| | `difference <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.difference>`_, |
+| | `make_decorator <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.make_decorator>`_, |
+| | `SequenceView <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.SequenceView>`_, |
+| | `time_limited <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.time_limited>`_, |
+| | `consume <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consume>`_, |
+| | `tabulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tabulate>`_, |
+| | `repeatfunc <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeatfunc>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+
+
+Getting started
+===============
+
+To get started, install the library with `pip <https://pip.pypa.io/en/stable/>`_:
+
+.. code-block:: shell
+
+ pip install more-itertools
+
+The recipes from the `itertools docs <https://docs.python.org/3/library/itertools.html#itertools-recipes>`_
+are included in the top-level package:
+
+.. code-block:: python
+
+ >>> from more_itertools import flatten
+ >>> iterable = [(0, 1), (2, 3)]
+ >>> list(flatten(iterable))
+ [0, 1, 2, 3]
+
+Several new recipes are available as well:
+
+.. code-block:: python
+
+ >>> from more_itertools import chunked
+ >>> iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8]
+ >>> list(chunked(iterable, 3))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ >>> from more_itertools import spy
+ >>> iterable = (x * x for x in range(1, 6))
+ >>> head, iterable = spy(iterable, n=3)
+ >>> list(head)
+ [1, 4, 9]
+ >>> list(iterable)
+ [1, 4, 9, 16, 25]
+
+
+
+For the full listing of functions, see the `API documentation <https://more-itertools.readthedocs.io/en/stable/api.html>`_.
+
+
+Links elsewhere
+===============
+
+Blog posts about ``more-itertools``:
+
+* `Yo, I heard you like decorators <https://www.bbayles.com/index/decorator_factory>`__
+* `Tour of Python Itertools <https://martinheinz.dev/blog/16>`__ (`Alternate <https://dev.to/martinheinz/tour-of-python-itertools-4122>`__)
+* `Real-World Python More Itertools <https://www.gidware.com/real-world-more-itertools/>`_
+
+
+Development
+===========
+
+``more-itertools`` is maintained by `@erikrose <https://github.com/erikrose>`_
+and `@bbayles <https://github.com/bbayles>`_, with help from `many others <https://github.com/more-itertools/more-itertools/graphs/contributors>`_.
+If you have a problem or suggestion, please file a bug or pull request in this
+repository. Thanks for contributing!
+
+
+Version History
+===============
+
+
+ :noindex:
+
+8.12.0
+------
+
+* Bug fixes
+ * Some documentation issues were fixed (thanks to Masynchin, spookylukey, astrojuanlu, and stephengmatthews)
+ * Python 3.5 support was temporarily restored (thanks to mattbonnell)
+
+8.11.0
+------
+
+* New functions
+ * The before_and_after, sliding_window, and triplewise recipes from the Python 3.10 docs were added
+ * duplicates_everseen and duplicates_justseen (thanks to OrBin and DavidPratt512)
+ * minmax (thanks to Ricocotam, MSeifert04, and ruancomelli)
+ * strictly_n (thanks to hwalinga and NotWearingPants)
+ * unique_in_window
+
+* Changes to existing functions
+ * groupby_transform had its type stub improved (thanks to mjk4 and ruancomelli)
+ * is_sorted now accepts a ``strict`` parameter (thanks to Dutcho and ruancomelli)
+ * zip_broadcast was updated to fix a bug (thanks to kalekundert)
+
+8.10.0
+------
+
+* Changes to existing functions
+ * The type stub for iter_except was improved (thanks to MarcinKonowalczyk)
+
+* Other changes:
+ * Type stubs now ship with the source release (thanks to saaketp)
+ * The Sphinx docs were improved (thanks to MarcinKonowalczyk)
+
+8.9.0
+-----
+
+* New functions
+ * interleave_evenly (thanks to mbugert)
+ * repeat_each (thanks to FinalSh4re)
+ * chunked_even (thanks to valtron)
+ * map_if (thanks to sassbalint)
+ * zip_broadcast (thanks to kalekundert)
+
+* Changes to existing functions
+ * The type stub for chunked was improved (thanks to PhilMacKay)
+ * The type stubs for zip_equal and `zip_offset` were improved (thanks to maffoo)
+ * Building Sphinx docs locally was improved (thanks to MarcinKonowalczyk)
+
+8.8.0
+-----
+
+* New functions
+ * countable (thanks to krzysieq)
+
+* Changes to existing functions
+ * split_before was updated to handle empy collections (thanks to TiunovNN)
+ * unique_everseen got a performance boost (thanks to Numerlor)
+ * The type hint for value_chain was corrected (thanks to vr2262)
+
+8.7.0
+-----
+
+* New functions
+ * convolve (from the Python itertools docs)
+ * product_index, combination_index, and permutation_index (thanks to N8Brooks)
+ * value_chain (thanks to jenstroeger)
+
+* Changes to existing functions
+ * distinct_combinations now uses a non-recursive algorithm (thanks to knutdrand)
+ * pad_none is now the preferred name for padnone, though the latter remains available.
+ * pairwise will now use the Python standard library implementation on Python 3.10+
+ * sort_together now accepts a ``key`` argument (thanks to brianmaissy)
+ * seekable now has a ``peek`` method, and can indicate whether the iterator it's wrapping is exhausted (thanks to gsakkis)
+ * time_limited can now indicate whether its iterator has expired (thanks to roysmith)
+ * The implementation of unique_everseen was improved (thanks to plammens)
+
+* Other changes:
+ * Various documentation updates (thanks to cthoyt, Evantm, and cyphase)
+
+8.6.0
+-----
+
+* New itertools
+ * all_unique (thanks to brianmaissy)
+ * nth_product and nth_permutation (thanks to N8Brooks)
+
+* Changes to existing itertools
+ * chunked and sliced now accept a ``strict`` parameter (thanks to shlomif and jtwool)
+
+* Other changes
+ * Python 3.5 has reached its end of life and is no longer supported.
+ * Python 3.9 is officially supported.
+ * Various documentation fixes (thanks to timgates42)
+
+8.5.0
+-----
+
+* New itertools
+ * windowed_complete (thanks to MarcinKonowalczyk)
+
+* Changes to existing itertools:
+ * The is_sorted implementation was improved (thanks to cool-RR)
+ * The groupby_transform now accepts a ``reducefunc`` parameter.
+ * The last implementation was improved (thanks to brianmaissy)
+
+* Other changes
+ * Various documentation fixes (thanks to craigrosie, samuelstjean, PiCT0)
+ * The tests for distinct_combinations were improved (thanks to Minabsapi)
+ * Automated tests now run on GitHub Actions. All commits now check:
+ * That unit tests pass
+ * That the examples in docstrings work
+ * That test coverage remains high (using `coverage`)
+ * For linting errors (using `flake8`)
+ * For consistent style (using `black`)
+ * That the type stubs work (using `mypy`)
+ * That the docs build correctly (using `sphinx`)
+ * That packages build correctly (using `twine`)
+
+8.4.0
+-----
+
+* New itertools
+ * mark_ends (thanks to kalekundert)
+ * is_sorted
+
+* Changes to existing itertools:
+ * islice_extended can now be used with real slices (thanks to cool-RR)
+ * The implementations for filter_except and map_except were improved (thanks to SergBobrovsky)
+
+* Other changes
+ * Automated tests now enforce code style (using `black <https://github.com/psf/black>`__)
+ * The various signatures of islice_extended and numeric_range now appear in the docs (thanks to dsfulf)
+ * The test configuration for mypy was updated (thanks to blueyed)
+
+
+8.3.0
+-----
+
+* New itertools
+ * zip_equal (thanks to frankier and alexmojaki)
+
+* Changes to existing itertools:
+ * split_at, split_before, split_after, and split_when all got a ``maxsplit`` paramter (thanks to jferard and ilai-deutel)
+ * split_at now accepts a ``keep_separator`` parameter (thanks to jferard)
+ * distinct_permutations can now generate ``r``-length permutations (thanks to SergBobrovsky and ilai-deutel)
+ * The windowed implementation was improved (thanks to SergBobrovsky)
+ * The spy implementation was improved (thanks to has2k1)
+
+* Other changes
+ * Type stubs are now tested with ``stubtest`` (thanks to ilai-deutel)
+ * Tests now run with ``python -m unittest`` instead of ``python setup.py test`` (thanks to jdufresne)
+
+8.2.0
+-----
+
+* Bug fixes
+ * The .pyi files for typing were updated. (thanks to blueyed and ilai-deutel)
+
+* Changes to existing itertools:
+ * numeric_range now behaves more like the built-in range. (thanks to jferard)
+ * bucket now allows for enumerating keys. (thanks to alexchandel)
+ * sliced now should now work for numpy arrays. (thanks to sswingle)
+ * seekable now has a ``maxlen`` parameter.
+
+8.1.0
+-----
+
+* Bug fixes
+ * partition works with ``pred=None`` again. (thanks to MSeifert04)
+
+* New itertools
+ * sample (thanks to tommyod)
+ * nth_or_last (thanks to d-ryzhikov)
+
+* Changes to existing itertools:
+ * The implementation for divide was improved. (thanks to jferard)
+
+8.0.2
+-----
+
+* Bug fixes
+ * The type stub files are now part of the wheel distribution (thanks to keisheiled)
+
+8.0.1
+-----
+
+* Bug fixes
+ * The type stub files now work for functions imported from the
+ root package (thanks to keisheiled)
+
+8.0.0
+-----
+
+* New itertools and other additions
+ * This library now ships type hints for use with mypy.
+ (thanks to ilai-deutel for the implementation, and to gabbard and fmagin for assistance)
+ * split_when (thanks to jferard)
+ * repeat_last (thanks to d-ryzhikov)
+
+* Changes to existing itertools:
+ * The implementation for set_partitions was improved. (thanks to jferard)
+ * partition was optimized for expensive predicates. (thanks to stevecj)
+ * unique_everseen and groupby_transform were re-factored. (thanks to SergBobrovsky)
+ * The implementation for difference was improved. (thanks to Jabbey92)
+
+* Other changes
+ * Python 3.4 has reached its end of life and is no longer supported.
+ * Python 3.8 is officially supported. (thanks to jdufresne)
+ * The ``collate`` function has been deprecated.
+ It raises a ``DeprecationWarning`` if used, and will be removed in a future release.
+ * one and only now provide more informative error messages. (thanks to gabbard)
+ * Unit tests were moved outside of the main package (thanks to jdufresne)
+ * Various documentation fixes (thanks to kriomant, gabbard, jdufresne)
+
+
+7.2.0
+-----
+
+* New itertools
+ * distinct_combinations
+ * set_partitions (thanks to kbarrett)
+ * filter_except
+ * map_except
+
+7.1.0
+-----
+
+* New itertools
+ * ichunked (thanks davebelais and youtux)
+ * only (thanks jaraco)
+
+* Changes to existing itertools:
+ * numeric_range now supports ranges specified by
+ ``datetime.datetime`` and ``datetime.timedelta`` objects (thanks to MSeifert04 for tests).
+ * difference now supports an *initial* keyword argument.
+
+
+* Other changes
+ * Various documentation fixes (thanks raimon49, pylang)
+
+7.0.0
+-----
+
+* New itertools:
+ * time_limited
+ * partitions (thanks to rominf and Saluev)
+ * substrings_indexes (thanks to rominf)
+
+* Changes to existing itertools:
+ * collapse now treats ``bytes`` objects the same as ``str`` objects. (thanks to Sweenpet)
+
+The major version update is due to the change in the default behavior of
+collapse. It now treats ``bytes`` objects the same as ``str`` objects.
+This aligns its behavior with always_iterable.
+
+.. code-block:: python
+
+ >>> from more_itertools import collapse
+ >>> iterable = [[1, 2], b'345', [6]]
+ >>> print(list(collapse(iterable)))
+ [1, 2, b'345', 6]
+
+6.0.0
+-----
+
+* Major changes:
+ * Python 2.7 is no longer supported. The 5.0.0 release will be the last
+ version targeting Python 2.7.
+ * All future releases will target the active versions of Python 3.
+ As of 2019, those are Python 3.4 and above.
+ * The ``six`` library is no longer a dependency.
+ * The accumulate function is no longer part of this library. You
+ may import a better version from the standard ``itertools`` module.
+
+* Changes to existing itertools:
+ * The order of the parameters in grouper have changed to match
+ the latest recipe in the itertools documentation. Use of the old order
+ will be supported in this release, but emit a ``DeprecationWarning``.
+ The legacy behavior will be dropped in a future release. (thanks to jaraco)
+ * distinct_permutations was improved (thanks to jferard - see also `permutations with unique values <https://stackoverflow.com/questions/6284396/permutations-with-unique-values>`_ at StackOverflow.)
+ * An unused parameter was removed from substrings. (thanks to pylang)
+
+* Other changes:
+ * The docs for unique_everseen were improved. (thanks to jferard and MSeifert04)
+ * Several Python 2-isms were removed. (thanks to jaraco, MSeifert04, and hugovk)
+
+
diff --git a/contrib/python/more-itertools/py3/.dist-info/top_level.txt b/contrib/python/more-itertools/py3/.dist-info/top_level.txt
new file mode 100644
index 0000000000..a5035befb3
--- /dev/null
+++ b/contrib/python/more-itertools/py3/.dist-info/top_level.txt
@@ -0,0 +1 @@
+more_itertools
diff --git a/contrib/python/more-itertools/py3/LICENSE b/contrib/python/more-itertools/py3/LICENSE
new file mode 100644
index 0000000000..0a523bece3
--- /dev/null
+++ b/contrib/python/more-itertools/py3/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2012 Erik Rose
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/contrib/python/more-itertools/py3/README.rst b/contrib/python/more-itertools/py3/README.rst
new file mode 100644
index 0000000000..4df22091a4
--- /dev/null
+++ b/contrib/python/more-itertools/py3/README.rst
@@ -0,0 +1,200 @@
+==============
+More Itertools
+==============
+
+.. image:: https://readthedocs.org/projects/more-itertools/badge/?version=latest
+ :target: https://more-itertools.readthedocs.io/en/stable/
+
+Python's ``itertools`` library is a gem - you can compose elegant solutions
+for a variety of problems with the functions it provides. In ``more-itertools``
+we collect additional building blocks, recipes, and routines for working with
+Python iterables.
+
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Grouping | `chunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.chunked>`_, |
+| | `ichunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ichunked>`_, |
+| | `sliced <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliced>`_, |
+| | `distribute <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distribute>`_, |
+| | `divide <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.divide>`_, |
+| | `split_at <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_at>`_, |
+| | `split_before <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_before>`_, |
+| | `split_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_after>`_, |
+| | `split_into <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_into>`_, |
+| | `split_when <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_when>`_, |
+| | `bucket <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.bucket>`_, |
+| | `unzip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unzip>`_, |
+| | `grouper <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.grouper>`_, |
+| | `partition <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partition>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Lookahead and lookback | `spy <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.spy>`_, |
+| | `peekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.peekable>`_, |
+| | `seekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.seekable>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Windowing | `windowed <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed>`_, |
+| | `substrings <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.substrings>`_, |
+| | `substrings_indexes <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.substrings_indexes>`_, |
+| | `stagger <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.stagger>`_, |
+| | `windowed_complete <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed_complete>`_, |
+| | `pairwise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pairwise>`_, |
+| | `triplewise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.triplewise>`_, |
+| | `sliding_window <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliding_window>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Augmenting | `count_cycle <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.count_cycle>`_, |
+| | `intersperse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.intersperse>`_, |
+| | `padded <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padded>`_, |
+| | `mark_ends <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.mark_ends>`_, |
+| | `repeat_last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeat_last>`_, |
+| | `adjacent <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.adjacent>`_, |
+| | `groupby_transform <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.groupby_transform>`_, |
+| | `pad_none <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pad_none>`_, |
+| | `ncycles <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ncycles>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combining | `collapse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collapse>`_, |
+| | `sort_together <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sort_together>`_, |
+| | `interleave <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave>`_, |
+| | `interleave_longest <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_longest>`_, |
+| | `interleave_evenly <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_evenly>`_, |
+| | `zip_offset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_offset>`_, |
+| | `zip_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_equal>`_, |
+| | `zip_broadcast <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_broadcast>`_, |
+| | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, |
+| | `convolve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.convolve>`_, |
+| | `flatten <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.flatten>`_, |
+| | `roundrobin <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.roundrobin>`_, |
+| | `prepend <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.prepend>`_, |
+| | `value_chain <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.value_chain>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Summarizing | `ilen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ilen>`_, |
+| | `unique_to_each <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_to_each>`_, |
+| | `sample <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sample>`_, |
+| | `consecutive_groups <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consecutive_groups>`_, |
+| | `run_length <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.run_length>`_, |
+| | `map_reduce <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_reduce>`_, |
+| | `exactly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.exactly_n>`_, |
+| | `is_sorted <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.is_sorted>`_, |
+| | `all_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_equal>`_, |
+| | `all_unique <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_unique>`_, |
+| | `minmax <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.minmax>`_, |
+| | `first_true <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first_true>`_, |
+| | `quantify <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.quantify>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Selecting | `islice_extended <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.islice_extended>`_, |
+| | `first <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first>`_, |
+| | `last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.last>`_, |
+| | `one <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.one>`_, |
+| | `only <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.only>`_, |
+| | `strictly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strictly_n>`_, |
+| | `strip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strip>`_, |
+| | `lstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.lstrip>`_, |
+| | `rstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rstrip>`_, |
+| | `filter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.filter_except>`_, |
+| | `map_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_except>`_, |
+| | `nth_or_last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_or_last>`_, |
+| | `unique_in_window <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_in_window>`_, |
+| | `before_and_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.before_and_after>`_, |
+| | `nth <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth>`_, |
+| | `take <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.take>`_, |
+| | `tail <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tail>`_, |
+| | `unique_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertoo ls.unique_everseen>`_, |
+| | `unique_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_justseen>`_, |
+| | `duplicates_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.duplicates_everseen>`_, |
+| | `duplicates_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.duplicates_justseen>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combinatorics | `distinct_permutations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_permutations>`_, |
+| | `distinct_combinations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_combinations>`_, |
+| | `circular_shifts <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.circular_shifts>`_, |
+| | `partitions <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partitions>`_, |
+| | `set_partitions <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.set_partitions>`_, |
+| | `product_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.product_index>`_, |
+| | `combination_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.combination_index>`_, |
+| | `permutation_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.permutation_index>`_, |
+| | `powerset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset>`_, |
+| | `random_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_product>`_, |
+| | `random_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_permutation>`_, |
+| | `random_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination>`_, |
+| | `random_combination_with_replacement <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination_with_replacement>`_, |
+| | `nth_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_product>`_, |
+| | `nth_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_permutation>`_, |
+| | `nth_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_combination>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Wrapping | `always_iterable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_iterable>`_, |
+| | `always_reversible <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_reversible>`_, |
+| | `countable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.countable>`_, |
+| | `consumer <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consumer>`_, |
+| | `with_iter <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.with_iter>`_, |
+| | `iter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iter_except>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Others | `locate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.locate>`_, |
+| | `rlocate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rlocate>`_, |
+| | `replace <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.replace>`_, |
+| | `numeric_range <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.numeric_range>`_, |
+| | `side_effect <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.side_effect>`_, |
+| | `iterate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iterate>`_, |
+| | `difference <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.difference>`_, |
+| | `make_decorator <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.make_decorator>`_, |
+| | `SequenceView <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.SequenceView>`_, |
+| | `time_limited <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.time_limited>`_, |
+| | `consume <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consume>`_, |
+| | `tabulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tabulate>`_, |
+| | `repeatfunc <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeatfunc>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+
+
+Getting started
+===============
+
+To get started, install the library with `pip <https://pip.pypa.io/en/stable/>`_:
+
+.. code-block:: shell
+
+ pip install more-itertools
+
+The recipes from the `itertools docs <https://docs.python.org/3/library/itertools.html#itertools-recipes>`_
+are included in the top-level package:
+
+.. code-block:: python
+
+ >>> from more_itertools import flatten
+ >>> iterable = [(0, 1), (2, 3)]
+ >>> list(flatten(iterable))
+ [0, 1, 2, 3]
+
+Several new recipes are available as well:
+
+.. code-block:: python
+
+ >>> from more_itertools import chunked
+ >>> iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8]
+ >>> list(chunked(iterable, 3))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ >>> from more_itertools import spy
+ >>> iterable = (x * x for x in range(1, 6))
+ >>> head, iterable = spy(iterable, n=3)
+ >>> list(head)
+ [1, 4, 9]
+ >>> list(iterable)
+ [1, 4, 9, 16, 25]
+
+
+
+For the full listing of functions, see the `API documentation <https://more-itertools.readthedocs.io/en/stable/api.html>`_.
+
+
+Links elsewhere
+===============
+
+Blog posts about ``more-itertools``:
+
+* `Yo, I heard you like decorators <https://www.bbayles.com/index/decorator_factory>`__
+* `Tour of Python Itertools <https://martinheinz.dev/blog/16>`__ (`Alternate <https://dev.to/martinheinz/tour-of-python-itertools-4122>`__)
+* `Real-World Python More Itertools <https://www.gidware.com/real-world-more-itertools/>`_
+
+
+Development
+===========
+
+``more-itertools`` is maintained by `@erikrose <https://github.com/erikrose>`_
+and `@bbayles <https://github.com/bbayles>`_, with help from `many others <https://github.com/more-itertools/more-itertools/graphs/contributors>`_.
+If you have a problem or suggestion, please file a bug or pull request in this
+repository. Thanks for contributing!
diff --git a/contrib/python/more-itertools/py3/more_itertools/__init__.py b/contrib/python/more-itertools/py3/more_itertools/__init__.py
new file mode 100644
index 0000000000..ea38bef1f6
--- /dev/null
+++ b/contrib/python/more-itertools/py3/more_itertools/__init__.py
@@ -0,0 +1,4 @@
+from .more import * # noqa
+from .recipes import * # noqa
+
+__version__ = '8.12.0'
diff --git a/contrib/python/more-itertools/py3/more_itertools/__init__.pyi b/contrib/python/more-itertools/py3/more_itertools/__init__.pyi
new file mode 100644
index 0000000000..96f6e36c7f
--- /dev/null
+++ b/contrib/python/more-itertools/py3/more_itertools/__init__.pyi
@@ -0,0 +1,2 @@
+from .more import *
+from .recipes import *
diff --git a/contrib/python/more-itertools/py3/more_itertools/more.py b/contrib/python/more-itertools/py3/more_itertools/more.py
new file mode 100644
index 0000000000..630af973f2
--- /dev/null
+++ b/contrib/python/more-itertools/py3/more_itertools/more.py
@@ -0,0 +1,4317 @@
+import warnings
+
+from collections import Counter, defaultdict, deque, abc
+from collections.abc import Sequence
+from concurrent.futures import ThreadPoolExecutor
+from functools import partial, reduce, wraps
+from heapq import merge, heapify, heapreplace, heappop
+from itertools import (
+ chain,
+ compress,
+ count,
+ cycle,
+ dropwhile,
+ groupby,
+ islice,
+ repeat,
+ starmap,
+ takewhile,
+ tee,
+ zip_longest,
+)
+from math import exp, factorial, floor, log
+from queue import Empty, Queue
+from random import random, randrange, uniform
+from operator import itemgetter, mul, sub, gt, lt, ge, le
+from sys import hexversion, maxsize
+from time import monotonic
+
+from .recipes import (
+ consume,
+ flatten,
+ pairwise,
+ powerset,
+ take,
+ unique_everseen,
+)
+
+__all__ = [
+ 'AbortThread',
+ 'SequenceView',
+ 'UnequalIterablesError',
+ 'adjacent',
+ 'all_unique',
+ 'always_iterable',
+ 'always_reversible',
+ 'bucket',
+ 'callback_iter',
+ 'chunked',
+ 'chunked_even',
+ 'circular_shifts',
+ 'collapse',
+ 'collate',
+ 'combination_index',
+ 'consecutive_groups',
+ 'consumer',
+ 'count_cycle',
+ 'countable',
+ 'difference',
+ 'distinct_combinations',
+ 'distinct_permutations',
+ 'distribute',
+ 'divide',
+ 'duplicates_everseen',
+ 'duplicates_justseen',
+ 'exactly_n',
+ 'filter_except',
+ 'first',
+ 'groupby_transform',
+ 'ichunked',
+ 'ilen',
+ 'interleave',
+ 'interleave_evenly',
+ 'interleave_longest',
+ 'intersperse',
+ 'is_sorted',
+ 'islice_extended',
+ 'iterate',
+ 'last',
+ 'locate',
+ 'lstrip',
+ 'make_decorator',
+ 'map_except',
+ 'map_if',
+ 'map_reduce',
+ 'mark_ends',
+ 'minmax',
+ 'nth_or_last',
+ 'nth_permutation',
+ 'nth_product',
+ 'numeric_range',
+ 'one',
+ 'only',
+ 'padded',
+ 'partitions',
+ 'peekable',
+ 'permutation_index',
+ 'product_index',
+ 'raise_',
+ 'repeat_each',
+ 'repeat_last',
+ 'replace',
+ 'rlocate',
+ 'rstrip',
+ 'run_length',
+ 'sample',
+ 'seekable',
+ 'set_partitions',
+ 'side_effect',
+ 'sliced',
+ 'sort_together',
+ 'split_after',
+ 'split_at',
+ 'split_before',
+ 'split_into',
+ 'split_when',
+ 'spy',
+ 'stagger',
+ 'strip',
+ 'strictly_n',
+ 'substrings',
+ 'substrings_indexes',
+ 'time_limited',
+ 'unique_in_window',
+ 'unique_to_each',
+ 'unzip',
+ 'value_chain',
+ 'windowed',
+ 'windowed_complete',
+ 'with_iter',
+ 'zip_broadcast',
+ 'zip_equal',
+ 'zip_offset',
+]
+
+
+_marker = object()
+
+
+def chunked(iterable, n, strict=False):
+ """Break *iterable* into lists of length *n*:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6], 3))
+ [[1, 2, 3], [4, 5, 6]]
+
+ By the default, the last yielded list will have fewer than *n* elements
+ if the length of *iterable* is not divisible by *n*:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6, 7, 8], 3))
+ [[1, 2, 3], [4, 5, 6], [7, 8]]
+
+ To use a fill-in value instead, see the :func:`grouper` recipe.
+
+ If the length of *iterable* is not divisible by *n* and *strict* is
+ ``True``, then ``ValueError`` will be raised before the last
+ list is yielded.
+
+ """
+ iterator = iter(partial(take, n, iter(iterable)), [])
+ if strict:
+ if n is None:
+ raise ValueError('n must not be None when using strict mode.')
+
+ def ret():
+ for chunk in iterator:
+ if len(chunk) != n:
+ raise ValueError('iterable is not divisible by n.')
+ yield chunk
+
+ return iter(ret())
+ else:
+ return iterator
+
+
+def first(iterable, default=_marker):
+ """Return the first item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> first([0, 1, 2, 3])
+ 0
+ >>> first([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+
+ :func:`first` is useful when you have a generator of expensive-to-retrieve
+ values and want any arbitrary one. It is marginally shorter than
+ ``next(iter(iterable), default)``.
+
+ """
+ try:
+ return next(iter(iterable))
+ except StopIteration as e:
+ if default is _marker:
+ raise ValueError(
+ 'first() was called on an empty iterable, and no '
+ 'default value was provided.'
+ ) from e
+ return default
+
+
+def last(iterable, default=_marker):
+ """Return the last item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> last([0, 1, 2, 3])
+ 3
+ >>> last([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+ """
+ try:
+ if isinstance(iterable, Sequence):
+ return iterable[-1]
+ # Work around https://bugs.python.org/issue38525
+ elif hasattr(iterable, '__reversed__') and (hexversion != 0x030800F0):
+ return next(reversed(iterable))
+ else:
+ return deque(iterable, maxlen=1)[-1]
+ except (IndexError, TypeError, StopIteration):
+ if default is _marker:
+ raise ValueError(
+ 'last() was called on an empty iterable, and no default was '
+ 'provided.'
+ )
+ return default
+
+
+def nth_or_last(iterable, n, default=_marker):
+ """Return the nth or the last item of *iterable*,
+ or *default* if *iterable* is empty.
+
+ >>> nth_or_last([0, 1, 2, 3], 2)
+ 2
+ >>> nth_or_last([0, 1], 2)
+ 1
+ >>> nth_or_last([], 0, 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+ """
+ return last(islice(iterable, n + 1), default=default)
+
+
+class peekable:
+ """Wrap an iterator to allow lookahead and prepending elements.
+
+ Call :meth:`peek` on the result to get the value that will be returned
+ by :func:`next`. This won't advance the iterator:
+
+ >>> p = peekable(['a', 'b'])
+ >>> p.peek()
+ 'a'
+ >>> next(p)
+ 'a'
+
+ Pass :meth:`peek` a default value to return that instead of raising
+ ``StopIteration`` when the iterator is exhausted.
+
+ >>> p = peekable([])
+ >>> p.peek('hi')
+ 'hi'
+
+ peekables also offer a :meth:`prepend` method, which "inserts" items
+ at the head of the iterable:
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> p.peek()
+ 11
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ peekables can be indexed. Index 0 is the item that will be returned by
+ :func:`next`, index 1 is the item after that, and so on:
+ The values up to the given index will be cached.
+
+ >>> p = peekable(['a', 'b', 'c', 'd'])
+ >>> p[0]
+ 'a'
+ >>> p[1]
+ 'b'
+ >>> next(p)
+ 'a'
+
+ Negative indexes are supported, but be aware that they will cache the
+ remaining items in the source iterator, which may require significant
+ storage.
+
+ To check whether a peekable is exhausted, check its truth value:
+
+ >>> p = peekable(['a', 'b'])
+ >>> if p: # peekable has items
+ ... list(p)
+ ['a', 'b']
+ >>> if not p: # peekable is exhausted
+ ... list(p)
+ []
+
+ """
+
+ def __init__(self, iterable):
+ self._it = iter(iterable)
+ self._cache = deque()
+
+ def __iter__(self):
+ return self
+
+ def __bool__(self):
+ try:
+ self.peek()
+ except StopIteration:
+ return False
+ return True
+
+ def peek(self, default=_marker):
+ """Return the item that will be next returned from ``next()``.
+
+ Return ``default`` if there are no items left. If ``default`` is not
+ provided, raise ``StopIteration``.
+
+ """
+ if not self._cache:
+ try:
+ self._cache.append(next(self._it))
+ except StopIteration:
+ if default is _marker:
+ raise
+ return default
+ return self._cache[0]
+
+ def prepend(self, *items):
+ """Stack up items to be the next ones returned from ``next()`` or
+ ``self.peek()``. The items will be returned in
+ first in, first out order::
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ It is possible, by prepending items, to "resurrect" a peekable that
+ previously raised ``StopIteration``.
+
+ >>> p = peekable([])
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+ >>> p.prepend(1)
+ >>> next(p)
+ 1
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+
+ """
+ self._cache.extendleft(reversed(items))
+
+ def __next__(self):
+ if self._cache:
+ return self._cache.popleft()
+
+ return next(self._it)
+
+ def _get_slice(self, index):
+ # Normalize the slice's arguments
+ step = 1 if (index.step is None) else index.step
+ if step > 0:
+ start = 0 if (index.start is None) else index.start
+ stop = maxsize if (index.stop is None) else index.stop
+ elif step < 0:
+ start = -1 if (index.start is None) else index.start
+ stop = (-maxsize - 1) if (index.stop is None) else index.stop
+ else:
+ raise ValueError('slice step cannot be zero')
+
+ # If either the start or stop index is negative, we'll need to cache
+ # the rest of the iterable in order to slice from the right side.
+ if (start < 0) or (stop < 0):
+ self._cache.extend(self._it)
+ # Otherwise we'll need to find the rightmost index and cache to that
+ # point.
+ else:
+ n = min(max(start, stop) + 1, maxsize)
+ cache_len = len(self._cache)
+ if n >= cache_len:
+ self._cache.extend(islice(self._it, n - cache_len))
+
+ return list(self._cache)[index]
+
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ return self._get_slice(index)
+
+ cache_len = len(self._cache)
+ if index < 0:
+ self._cache.extend(self._it)
+ elif index >= cache_len:
+ self._cache.extend(islice(self._it, index + 1 - cache_len))
+
+ return self._cache[index]
+
+
+def collate(*iterables, **kwargs):
+ """Return a sorted merge of the items from each of several already-sorted
+ *iterables*.
+
+ >>> list(collate('ACDZ', 'AZ', 'JKL'))
+ ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z']
+
+ Works lazily, keeping only the next value from each iterable in memory. Use
+ :func:`collate` to, for example, perform a n-way mergesort of items that
+ don't fit in memory.
+
+ If a *key* function is specified, the iterables will be sorted according
+ to its result:
+
+ >>> key = lambda s: int(s) # Sort by numeric value, not by string
+ >>> list(collate(['1', '10'], ['2', '11'], key=key))
+ ['1', '2', '10', '11']
+
+
+ If the *iterables* are sorted in descending order, set *reverse* to
+ ``True``:
+
+ >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True))
+ [5, 4, 3, 2, 1, 0]
+
+ If the elements of the passed-in iterables are out of order, you might get
+ unexpected results.
+
+ On Python 3.5+, this function is an alias for :func:`heapq.merge`.
+
+ """
+ warnings.warn(
+ "collate is no longer part of more_itertools, use heapq.merge",
+ DeprecationWarning,
+ )
+ return merge(*iterables, **kwargs)
+
+
+def consumer(func):
+ """Decorator that automatically advances a PEP-342-style "reverse iterator"
+ to its first yield point so you don't have to call ``next()`` on it
+ manually.
+
+ >>> @consumer
+ ... def tally():
+ ... i = 0
+ ... while True:
+ ... print('Thing number %s is %s.' % (i, (yield)))
+ ... i += 1
+ ...
+ >>> t = tally()
+ >>> t.send('red')
+ Thing number 0 is red.
+ >>> t.send('fish')
+ Thing number 1 is fish.
+
+ Without the decorator, you would have to call ``next(t)`` before
+ ``t.send()`` could be used.
+
+ """
+
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ gen = func(*args, **kwargs)
+ next(gen)
+ return gen
+
+ return wrapper
+
+
+def ilen(iterable):
+ """Return the number of items in *iterable*.
+
+ >>> ilen(x for x in range(1000000) if x % 3 == 0)
+ 333334
+
+ This consumes the iterable, so handle with care.
+
+ """
+ # This approach was selected because benchmarks showed it's likely the
+ # fastest of the known implementations at the time of writing.
+ # See GitHub tracker: #236, #230.
+ counter = count()
+ deque(zip(iterable, counter), maxlen=0)
+ return next(counter)
+
+
+def iterate(func, start):
+ """Return ``start``, ``func(start)``, ``func(func(start))``, ...
+
+ >>> from itertools import islice
+ >>> list(islice(iterate(lambda x: 2*x, 1), 10))
+ [1, 2, 4, 8, 16, 32, 64, 128, 256, 512]
+
+ """
+ while True:
+ yield start
+ start = func(start)
+
+
+def with_iter(context_manager):
+ """Wrap an iterable in a ``with`` statement, so it closes once exhausted.
+
+ For example, this will close the file when the iterator is exhausted::
+
+ upper_lines = (line.upper() for line in with_iter(open('foo')))
+
+ Any context manager which returns an iterable is a candidate for
+ ``with_iter``.
+
+ """
+ with context_manager as iterable:
+ yield from iterable
+
+
+def one(iterable, too_short=None, too_long=None):
+ """Return the first item from *iterable*, which is expected to contain only
+ that item. Raise an exception if *iterable* is empty or has more than one
+ item.
+
+ :func:`one` is useful for ensuring that an iterable contains only one item.
+ For example, it can be used to retrieve the result of a database query
+ that is expected to return a single row.
+
+ If *iterable* is empty, ``ValueError`` will be raised. You may specify a
+ different exception with the *too_short* keyword:
+
+ >>> it = []
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (expected 1)'
+ >>> too_short = IndexError('too few items')
+ >>> one(it, too_short=too_short) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ IndexError: too few items
+
+ Similarly, if *iterable* contains more than one item, ``ValueError`` will
+ be raised. You may specify a different exception with the *too_long*
+ keyword:
+
+ >>> it = ['too', 'many']
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: Expected exactly one item in iterable, but got 'too',
+ 'many', and perhaps more.
+ >>> too_long = RuntimeError
+ >>> one(it, too_long=too_long) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+
+ Note that :func:`one` attempts to advance *iterable* twice to ensure there
+ is only one item. See :func:`spy` or :func:`peekable` to check iterable
+ contents less destructively.
+
+ """
+ it = iter(iterable)
+
+ try:
+ first_value = next(it)
+ except StopIteration as e:
+ raise (
+ too_short or ValueError('too few items in iterable (expected 1)')
+ ) from e
+
+ try:
+ second_value = next(it)
+ except StopIteration:
+ pass
+ else:
+ msg = (
+ 'Expected exactly one item in iterable, but got {!r}, {!r}, '
+ 'and perhaps more.'.format(first_value, second_value)
+ )
+ raise too_long or ValueError(msg)
+
+ return first_value
+
+
+def raise_(exception, *args):
+ raise exception(*args)
+
+
+def strictly_n(iterable, n, too_short=None, too_long=None):
+ """Validate that *iterable* has exactly *n* items and return them if
+ it does. If it has fewer than *n* items, call function *too_short*
+ with those items. If it has more than *n* items, call function
+ *too_long* with the first ``n + 1`` items.
+
+ >>> iterable = ['a', 'b', 'c', 'd']
+ >>> n = 4
+ >>> list(strictly_n(iterable, n))
+ ['a', 'b', 'c', 'd']
+
+ By default, *too_short* and *too_long* are functions that raise
+ ``ValueError``.
+
+ >>> list(strictly_n('ab', 3)) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too few items in iterable (got 2)
+
+ >>> list(strictly_n('abc', 2)) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (got at least 3)
+
+ You can instead supply functions that do something else.
+ *too_short* will be called with the number of items in *iterable*.
+ *too_long* will be called with `n + 1`.
+
+ >>> def too_short(item_count):
+ ... raise RuntimeError
+ >>> it = strictly_n('abcd', 6, too_short=too_short)
+ >>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+
+ >>> def too_long(item_count):
+ ... print('The boss is going to hear about this')
+ >>> it = strictly_n('abcdef', 4, too_long=too_long)
+ >>> list(it)
+ The boss is going to hear about this
+ ['a', 'b', 'c', 'd']
+
+ """
+ if too_short is None:
+ too_short = lambda item_count: raise_(
+ ValueError,
+ 'Too few items in iterable (got {})'.format(item_count),
+ )
+
+ if too_long is None:
+ too_long = lambda item_count: raise_(
+ ValueError,
+ 'Too many items in iterable (got at least {})'.format(item_count),
+ )
+
+ it = iter(iterable)
+ for i in range(n):
+ try:
+ item = next(it)
+ except StopIteration:
+ too_short(i)
+ return
+ else:
+ yield item
+
+ try:
+ next(it)
+ except StopIteration:
+ pass
+ else:
+ too_long(n + 1)
+
+
+def distinct_permutations(iterable, r=None):
+ """Yield successive distinct permutations of the elements in *iterable*.
+
+ >>> sorted(distinct_permutations([1, 0, 1]))
+ [(0, 1, 1), (1, 0, 1), (1, 1, 0)]
+
+ Equivalent to ``set(permutations(iterable))``, except duplicates are not
+ generated and thrown away. For larger input sequences this is much more
+ efficient.
+
+ Duplicate permutations arise when there are duplicated elements in the
+ input iterable. The number of items returned is
+ `n! / (x_1! * x_2! * ... * x_n!)`, where `n` is the total number of
+ items input, and each `x_i` is the count of a distinct item in the input
+ sequence.
+
+ If *r* is given, only the *r*-length permutations are yielded.
+
+ >>> sorted(distinct_permutations([1, 0, 1], r=2))
+ [(0, 1), (1, 0), (1, 1)]
+ >>> sorted(distinct_permutations(range(3), r=2))
+ [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
+
+ """
+ # Algorithm: https://w.wiki/Qai
+ def _full(A):
+ while True:
+ # Yield the permutation we have
+ yield tuple(A)
+
+ # Find the largest index i such that A[i] < A[i + 1]
+ for i in range(size - 2, -1, -1):
+ if A[i] < A[i + 1]:
+ break
+ # If no such index exists, this permutation is the last one
+ else:
+ return
+
+ # Find the largest index j greater than j such that A[i] < A[j]
+ for j in range(size - 1, i, -1):
+ if A[i] < A[j]:
+ break
+
+ # Swap the value of A[i] with that of A[j], then reverse the
+ # sequence from A[i + 1] to form the new permutation
+ A[i], A[j] = A[j], A[i]
+ A[i + 1 :] = A[: i - size : -1] # A[i + 1:][::-1]
+
+ # Algorithm: modified from the above
+ def _partial(A, r):
+ # Split A into the first r items and the last r items
+ head, tail = A[:r], A[r:]
+ right_head_indexes = range(r - 1, -1, -1)
+ left_tail_indexes = range(len(tail))
+
+ while True:
+ # Yield the permutation we have
+ yield tuple(head)
+
+ # Starting from the right, find the first index of the head with
+ # value smaller than the maximum value of the tail - call it i.
+ pivot = tail[-1]
+ for i in right_head_indexes:
+ if head[i] < pivot:
+ break
+ pivot = head[i]
+ else:
+ return
+
+ # Starting from the left, find the first value of the tail
+ # with a value greater than head[i] and swap.
+ for j in left_tail_indexes:
+ if tail[j] > head[i]:
+ head[i], tail[j] = tail[j], head[i]
+ break
+ # If we didn't find one, start from the right and find the first
+ # index of the head with a value greater than head[i] and swap.
+ else:
+ for j in right_head_indexes:
+ if head[j] > head[i]:
+ head[i], head[j] = head[j], head[i]
+ break
+
+ # Reverse head[i + 1:] and swap it with tail[:r - (i + 1)]
+ tail += head[: i - r : -1] # head[i + 1:][::-1]
+ i += 1
+ head[i:], tail[:] = tail[: r - i], tail[r - i :]
+
+ items = sorted(iterable)
+
+ size = len(items)
+ if r is None:
+ r = size
+
+ if 0 < r <= size:
+ return _full(items) if (r == size) else _partial(items, r)
+
+ return iter(() if r else ((),))
+
+
+def intersperse(e, iterable, n=1):
+ """Intersperse filler element *e* among the items in *iterable*, leaving
+ *n* items between each filler element.
+
+ >>> list(intersperse('!', [1, 2, 3, 4, 5]))
+ [1, '!', 2, '!', 3, '!', 4, '!', 5]
+
+ >>> list(intersperse(None, [1, 2, 3, 4, 5], n=2))
+ [1, 2, None, 3, 4, None, 5]
+
+ """
+ if n == 0:
+ raise ValueError('n must be > 0')
+ elif n == 1:
+ # interleave(repeat(e), iterable) -> e, x_0, e, x_1, e, x_2...
+ # islice(..., 1, None) -> x_0, e, x_1, e, x_2...
+ return islice(interleave(repeat(e), iterable), 1, None)
+ else:
+ # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]...
+ # islice(..., 1, None) -> [x_0, x_1], [e], [x_2, x_3]...
+ # flatten(...) -> x_0, x_1, e, x_2, x_3...
+ filler = repeat([e])
+ chunks = chunked(iterable, n)
+ return flatten(islice(interleave(filler, chunks), 1, None))
+
+
+def unique_to_each(*iterables):
+ """Return the elements from each of the input iterables that aren't in the
+ other input iterables.
+
+ For example, suppose you have a set of packages, each with a set of
+ dependencies::
+
+ {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}}
+
+ If you remove one package, which dependencies can also be removed?
+
+ If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not
+ associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for
+ ``pkg_2``, and ``D`` is only needed for ``pkg_3``::
+
+ >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'})
+ [['A'], ['C'], ['D']]
+
+ If there are duplicates in one input iterable that aren't in the others
+ they will be duplicated in the output. Input order is preserved::
+
+ >>> unique_to_each("mississippi", "missouri")
+ [['p', 'p'], ['o', 'u', 'r']]
+
+ It is assumed that the elements of each iterable are hashable.
+
+ """
+ pool = [list(it) for it in iterables]
+ counts = Counter(chain.from_iterable(map(set, pool)))
+ uniques = {element for element in counts if counts[element] == 1}
+ return [list(filter(uniques.__contains__, it)) for it in pool]
+
+
+def windowed(seq, n, fillvalue=None, step=1):
+ """Return a sliding window of width *n* over the given iterable.
+
+ >>> all_windows = windowed([1, 2, 3, 4, 5], 3)
+ >>> list(all_windows)
+ [(1, 2, 3), (2, 3, 4), (3, 4, 5)]
+
+ When the window is larger than the iterable, *fillvalue* is used in place
+ of missing values:
+
+ >>> list(windowed([1, 2, 3], 4))
+ [(1, 2, 3, None)]
+
+ Each window will advance in increments of *step*:
+
+ >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2))
+ [(1, 2, 3), (3, 4, 5), (5, 6, '!')]
+
+ To slide into the iterable's items, use :func:`chain` to add filler items
+ to the left:
+
+ >>> iterable = [1, 2, 3, 4]
+ >>> n = 3
+ >>> padding = [None] * (n - 1)
+ >>> list(windowed(chain(padding, iterable), 3))
+ [(None, None, 1), (None, 1, 2), (1, 2, 3), (2, 3, 4)]
+ """
+ if n < 0:
+ raise ValueError('n must be >= 0')
+ if n == 0:
+ yield tuple()
+ return
+ if step < 1:
+ raise ValueError('step must be >= 1')
+
+ window = deque(maxlen=n)
+ i = n
+ for _ in map(window.append, seq):
+ i -= 1
+ if not i:
+ i = step
+ yield tuple(window)
+
+ size = len(window)
+ if size < n:
+ yield tuple(chain(window, repeat(fillvalue, n - size)))
+ elif 0 < i < min(step, n):
+ window += (fillvalue,) * i
+ yield tuple(window)
+
+
+def substrings(iterable):
+ """Yield all of the substrings of *iterable*.
+
+ >>> [''.join(s) for s in substrings('more')]
+ ['m', 'o', 'r', 'e', 'mo', 'or', 're', 'mor', 'ore', 'more']
+
+ Note that non-string iterables can also be subdivided.
+
+ >>> list(substrings([0, 1, 2]))
+ [(0,), (1,), (2,), (0, 1), (1, 2), (0, 1, 2)]
+
+ """
+ # The length-1 substrings
+ seq = []
+ for item in iter(iterable):
+ seq.append(item)
+ yield (item,)
+ seq = tuple(seq)
+ item_count = len(seq)
+
+ # And the rest
+ for n in range(2, item_count + 1):
+ for i in range(item_count - n + 1):
+ yield seq[i : i + n]
+
+
+def substrings_indexes(seq, reverse=False):
+ """Yield all substrings and their positions in *seq*
+
+ The items yielded will be a tuple of the form ``(substr, i, j)``, where
+ ``substr == seq[i:j]``.
+
+ This function only works for iterables that support slicing, such as
+ ``str`` objects.
+
+ >>> for item in substrings_indexes('more'):
+ ... print(item)
+ ('m', 0, 1)
+ ('o', 1, 2)
+ ('r', 2, 3)
+ ('e', 3, 4)
+ ('mo', 0, 2)
+ ('or', 1, 3)
+ ('re', 2, 4)
+ ('mor', 0, 3)
+ ('ore', 1, 4)
+ ('more', 0, 4)
+
+ Set *reverse* to ``True`` to yield the same items in the opposite order.
+
+
+ """
+ r = range(1, len(seq) + 1)
+ if reverse:
+ r = reversed(r)
+ return (
+ (seq[i : i + L], i, i + L) for L in r for i in range(len(seq) - L + 1)
+ )
+
+
+class bucket:
+ """Wrap *iterable* and return an object that buckets it iterable into
+ child iterables based on a *key* function.
+
+ >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
+ >>> s = bucket(iterable, key=lambda x: x[0]) # Bucket by 1st character
+ >>> sorted(list(s)) # Get the keys
+ ['a', 'b', 'c']
+ >>> a_iterable = s['a']
+ >>> next(a_iterable)
+ 'a1'
+ >>> next(a_iterable)
+ 'a2'
+ >>> list(s['b'])
+ ['b1', 'b2', 'b3']
+
+ The original iterable will be advanced and its items will be cached until
+ they are used by the child iterables. This may require significant storage.
+
+ By default, attempting to select a bucket to which no items belong will
+ exhaust the iterable and cache all values.
+ If you specify a *validator* function, selected buckets will instead be
+ checked against it.
+
+ >>> from itertools import count
+ >>> it = count(1, 2) # Infinite sequence of odd numbers
+ >>> key = lambda x: x % 10 # Bucket by last digit
+ >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only
+ >>> s = bucket(it, key=key, validator=validator)
+ >>> 2 in s
+ False
+ >>> list(s[2])
+ []
+
+ """
+
+ def __init__(self, iterable, key, validator=None):
+ self._it = iter(iterable)
+ self._key = key
+ self._cache = defaultdict(deque)
+ self._validator = validator or (lambda x: True)
+
+ def __contains__(self, value):
+ if not self._validator(value):
+ return False
+
+ try:
+ item = next(self[value])
+ except StopIteration:
+ return False
+ else:
+ self._cache[value].appendleft(item)
+
+ return True
+
+ def _get_values(self, value):
+ """
+ Helper to yield items from the parent iterator that match *value*.
+ Items that don't match are stored in the local cache as they
+ are encountered.
+ """
+ while True:
+ # If we've cached some items that match the target value, emit
+ # the first one and evict it from the cache.
+ if self._cache[value]:
+ yield self._cache[value].popleft()
+ # Otherwise we need to advance the parent iterator to search for
+ # a matching item, caching the rest.
+ else:
+ while True:
+ try:
+ item = next(self._it)
+ except StopIteration:
+ return
+ item_value = self._key(item)
+ if item_value == value:
+ yield item
+ break
+ elif self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ def __iter__(self):
+ for item in self._it:
+ item_value = self._key(item)
+ if self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ yield from self._cache.keys()
+
+ def __getitem__(self, value):
+ if not self._validator(value):
+ return iter(())
+
+ return self._get_values(value)
+
+
+def spy(iterable, n=1):
+ """Return a 2-tuple with a list containing the first *n* elements of
+ *iterable*, and an iterator with the same items as *iterable*.
+ This allows you to "look ahead" at the items in the iterable without
+ advancing it.
+
+ There is one item in the list by default:
+
+ >>> iterable = 'abcdefg'
+ >>> head, iterable = spy(iterable)
+ >>> head
+ ['a']
+ >>> list(iterable)
+ ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+
+ You may use unpacking to retrieve items instead of lists:
+
+ >>> (head,), iterable = spy('abcdefg')
+ >>> head
+ 'a'
+ >>> (first, second), iterable = spy('abcdefg', 2)
+ >>> first
+ 'a'
+ >>> second
+ 'b'
+
+ The number of items requested can be larger than the number of items in
+ the iterable:
+
+ >>> iterable = [1, 2, 3, 4, 5]
+ >>> head, iterable = spy(iterable, 10)
+ >>> head
+ [1, 2, 3, 4, 5]
+ >>> list(iterable)
+ [1, 2, 3, 4, 5]
+
+ """
+ it = iter(iterable)
+ head = take(n, it)
+
+ return head.copy(), chain(head, it)
+
+
+def interleave(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ until the shortest is exhausted.
+
+ >>> list(interleave([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7]
+
+ For a version that doesn't terminate after the shortest iterable is
+ exhausted, see :func:`interleave_longest`.
+
+ """
+ return chain.from_iterable(zip(*iterables))
+
+
+def interleave_longest(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ skipping any that are exhausted.
+
+ >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7, 3, 8]
+
+ This function produces the same output as :func:`roundrobin`, but may
+ perform better for some inputs (in particular when the number of iterables
+ is large).
+
+ """
+ i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker))
+ return (x for x in i if x is not _marker)
+
+
+def interleave_evenly(iterables, lengths=None):
+ """
+ Interleave multiple iterables so that their elements are evenly distributed
+ throughout the output sequence.
+
+ >>> iterables = [1, 2, 3, 4, 5], ['a', 'b']
+ >>> list(interleave_evenly(iterables))
+ [1, 2, 'a', 3, 4, 'b', 5]
+
+ >>> iterables = [[1, 2, 3], [4, 5], [6, 7, 8]]
+ >>> list(interleave_evenly(iterables))
+ [1, 6, 4, 2, 7, 3, 8, 5]
+
+ This function requires iterables of known length. Iterables without
+ ``__len__()`` can be used by manually specifying lengths with *lengths*:
+
+ >>> from itertools import combinations, repeat
+ >>> iterables = [combinations(range(4), 2), ['a', 'b', 'c']]
+ >>> lengths = [4 * (4 - 1) // 2, 3]
+ >>> list(interleave_evenly(iterables, lengths=lengths))
+ [(0, 1), (0, 2), 'a', (0, 3), (1, 2), 'b', (1, 3), (2, 3), 'c']
+
+ Based on Bresenham's algorithm.
+ """
+ if lengths is None:
+ try:
+ lengths = [len(it) for it in iterables]
+ except TypeError:
+ raise ValueError(
+ 'Iterable lengths could not be determined automatically. '
+ 'Specify them with the lengths keyword.'
+ )
+ elif len(iterables) != len(lengths):
+ raise ValueError('Mismatching number of iterables and lengths.')
+
+ dims = len(lengths)
+
+ # sort iterables by length, descending
+ lengths_permute = sorted(
+ range(dims), key=lambda i: lengths[i], reverse=True
+ )
+ lengths_desc = [lengths[i] for i in lengths_permute]
+ iters_desc = [iter(iterables[i]) for i in lengths_permute]
+
+ # the longest iterable is the primary one (Bresenham: the longest
+ # distance along an axis)
+ delta_primary, deltas_secondary = lengths_desc[0], lengths_desc[1:]
+ iter_primary, iters_secondary = iters_desc[0], iters_desc[1:]
+ errors = [delta_primary // dims] * len(deltas_secondary)
+
+ to_yield = sum(lengths)
+ while to_yield:
+ yield next(iter_primary)
+ to_yield -= 1
+ # update errors for each secondary iterable
+ errors = [e - delta for e, delta in zip(errors, deltas_secondary)]
+
+ # those iterables for which the error is negative are yielded
+ # ("diagonal step" in Bresenham)
+ for i, e in enumerate(errors):
+ if e < 0:
+ yield next(iters_secondary[i])
+ to_yield -= 1
+ errors[i] += delta_primary
+
+
+def collapse(iterable, base_type=None, levels=None):
+ """Flatten an iterable with multiple levels of nesting (e.g., a list of
+ lists of tuples) into non-iterable types.
+
+ >>> iterable = [(1, 2), ([3, 4], [[5], [6]])]
+ >>> list(collapse(iterable))
+ [1, 2, 3, 4, 5, 6]
+
+ Binary and text strings are not considered iterable and
+ will not be collapsed.
+
+ To avoid collapsing other types, specify *base_type*:
+
+ >>> iterable = ['ab', ('cd', 'ef'), ['gh', 'ij']]
+ >>> list(collapse(iterable, base_type=tuple))
+ ['ab', ('cd', 'ef'), 'gh', 'ij']
+
+ Specify *levels* to stop flattening after a certain level:
+
+ >>> iterable = [('a', ['b']), ('c', ['d'])]
+ >>> list(collapse(iterable)) # Fully flattened
+ ['a', 'b', 'c', 'd']
+ >>> list(collapse(iterable, levels=1)) # Only one level flattened
+ ['a', ['b'], 'c', ['d']]
+
+ """
+
+ def walk(node, level):
+ if (
+ ((levels is not None) and (level > levels))
+ or isinstance(node, (str, bytes))
+ or ((base_type is not None) and isinstance(node, base_type))
+ ):
+ yield node
+ return
+
+ try:
+ tree = iter(node)
+ except TypeError:
+ yield node
+ return
+ else:
+ for child in tree:
+ yield from walk(child, level + 1)
+
+ yield from walk(iterable, 0)
+
+
+def side_effect(func, iterable, chunk_size=None, before=None, after=None):
+ """Invoke *func* on each item in *iterable* (or on each *chunk_size* group
+ of items) before yielding the item.
+
+ `func` must be a function that takes a single argument. Its return value
+ will be discarded.
+
+ *before* and *after* are optional functions that take no arguments. They
+ will be executed before iteration starts and after it ends, respectively.
+
+ `side_effect` can be used for logging, updating progress bars, or anything
+ that is not functionally "pure."
+
+ Emitting a status message:
+
+ >>> from more_itertools import consume
+ >>> func = lambda item: print('Received {}'.format(item))
+ >>> consume(side_effect(func, range(2)))
+ Received 0
+ Received 1
+
+ Operating on chunks of items:
+
+ >>> pair_sums = []
+ >>> func = lambda chunk: pair_sums.append(sum(chunk))
+ >>> list(side_effect(func, [0, 1, 2, 3, 4, 5], 2))
+ [0, 1, 2, 3, 4, 5]
+ >>> list(pair_sums)
+ [1, 5, 9]
+
+ Writing to a file-like object:
+
+ >>> from io import StringIO
+ >>> from more_itertools import consume
+ >>> f = StringIO()
+ >>> func = lambda x: print(x, file=f)
+ >>> before = lambda: print(u'HEADER', file=f)
+ >>> after = f.close
+ >>> it = [u'a', u'b', u'c']
+ >>> consume(side_effect(func, it, before=before, after=after))
+ >>> f.closed
+ True
+
+ """
+ try:
+ if before is not None:
+ before()
+
+ if chunk_size is None:
+ for item in iterable:
+ func(item)
+ yield item
+ else:
+ for chunk in chunked(iterable, chunk_size):
+ func(chunk)
+ yield from chunk
+ finally:
+ if after is not None:
+ after()
+
+
+def sliced(seq, n, strict=False):
+ """Yield slices of length *n* from the sequence *seq*.
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6), 3))
+ [(1, 2, 3), (4, 5, 6)]
+
+ By the default, the last yielded slice will have fewer than *n* elements
+ if the length of *seq* is not divisible by *n*:
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6, 7, 8), 3))
+ [(1, 2, 3), (4, 5, 6), (7, 8)]
+
+ If the length of *seq* is not divisible by *n* and *strict* is
+ ``True``, then ``ValueError`` will be raised before the last
+ slice is yielded.
+
+ This function will only work for iterables that support slicing.
+ For non-sliceable iterables, see :func:`chunked`.
+
+ """
+ iterator = takewhile(len, (seq[i : i + n] for i in count(0, n)))
+ if strict:
+
+ def ret():
+ for _slice in iterator:
+ if len(_slice) != n:
+ raise ValueError("seq is not divisible by n.")
+ yield _slice
+
+ return iter(ret())
+ else:
+ return iterator
+
+
+def split_at(iterable, pred, maxsplit=-1, keep_separator=False):
+ """Yield lists of items from *iterable*, where each list is delimited by
+ an item where callable *pred* returns ``True``.
+
+ >>> list(split_at('abcdcba', lambda x: x == 'b'))
+ [['a'], ['c', 'd', 'c'], ['a']]
+
+ >>> list(split_at(range(10), lambda n: n % 2 == 1))
+ [[0], [2], [4], [6], [8], []]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_at(range(10), lambda n: n % 2 == 1, maxsplit=2))
+ [[0], [2], [4, 5, 6, 7, 8, 9]]
+
+ By default, the delimiting items are not included in the output.
+ The include them, set *keep_separator* to ``True``.
+
+ >>> list(split_at('abcdcba', lambda x: x == 'b', keep_separator=True))
+ [['a'], ['b'], ['c', 'd', 'c'], ['b'], ['a']]
+
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ buf = []
+ it = iter(iterable)
+ for item in it:
+ if pred(item):
+ yield buf
+ if keep_separator:
+ yield [item]
+ if maxsplit == 1:
+ yield list(it)
+ return
+ buf = []
+ maxsplit -= 1
+ else:
+ buf.append(item)
+ yield buf
+
+
+def split_before(iterable, pred, maxsplit=-1):
+ """Yield lists of items from *iterable*, where each list ends just before
+ an item for which callable *pred* returns ``True``:
+
+ >>> list(split_before('OneTwo', lambda s: s.isupper()))
+ [['O', 'n', 'e'], ['T', 'w', 'o']]
+
+ >>> list(split_before(range(10), lambda n: n % 3 == 0))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_before(range(10), lambda n: n % 3 == 0, maxsplit=2))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8, 9]]
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ buf = []
+ it = iter(iterable)
+ for item in it:
+ if pred(item) and buf:
+ yield buf
+ if maxsplit == 1:
+ yield [item] + list(it)
+ return
+ buf = []
+ maxsplit -= 1
+ buf.append(item)
+ if buf:
+ yield buf
+
+
+def split_after(iterable, pred, maxsplit=-1):
+ """Yield lists of items from *iterable*, where each list ends with an
+ item where callable *pred* returns ``True``:
+
+ >>> list(split_after('one1two2', lambda s: s.isdigit()))
+ [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']]
+
+ >>> list(split_after(range(10), lambda n: n % 3 == 0))
+ [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_after(range(10), lambda n: n % 3 == 0, maxsplit=2))
+ [[0], [1, 2, 3], [4, 5, 6, 7, 8, 9]]
+
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ buf = []
+ it = iter(iterable)
+ for item in it:
+ buf.append(item)
+ if pred(item) and buf:
+ yield buf
+ if maxsplit == 1:
+ yield list(it)
+ return
+ buf = []
+ maxsplit -= 1
+ if buf:
+ yield buf
+
+
+def split_when(iterable, pred, maxsplit=-1):
+ """Split *iterable* into pieces based on the output of *pred*.
+ *pred* should be a function that takes successive pairs of items and
+ returns ``True`` if the iterable should be split in between them.
+
+ For example, to find runs of increasing numbers, split the iterable when
+ element ``i`` is larger than element ``i + 1``:
+
+ >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2], lambda x, y: x > y))
+ [[1, 2, 3, 3], [2, 5], [2, 4], [2]]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2],
+ ... lambda x, y: x > y, maxsplit=2))
+ [[1, 2, 3, 3], [2, 5], [2, 4, 2]]
+
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ it = iter(iterable)
+ try:
+ cur_item = next(it)
+ except StopIteration:
+ return
+
+ buf = [cur_item]
+ for next_item in it:
+ if pred(cur_item, next_item):
+ yield buf
+ if maxsplit == 1:
+ yield [next_item] + list(it)
+ return
+ buf = []
+ maxsplit -= 1
+
+ buf.append(next_item)
+ cur_item = next_item
+
+ yield buf
+
+
+def split_into(iterable, sizes):
+ """Yield a list of sequential items from *iterable* of length 'n' for each
+ integer 'n' in *sizes*.
+
+ >>> list(split_into([1,2,3,4,5,6], [1,2,3]))
+ [[1], [2, 3], [4, 5, 6]]
+
+ If the sum of *sizes* is smaller than the length of *iterable*, then the
+ remaining items of *iterable* will not be returned.
+
+ >>> list(split_into([1,2,3,4,5,6], [2,3]))
+ [[1, 2], [3, 4, 5]]
+
+ If the sum of *sizes* is larger than the length of *iterable*, fewer items
+ will be returned in the iteration that overruns *iterable* and further
+ lists will be empty:
+
+ >>> list(split_into([1,2,3,4], [1,2,3,4]))
+ [[1], [2, 3], [4], []]
+
+ When a ``None`` object is encountered in *sizes*, the returned list will
+ contain items up to the end of *iterable* the same way that itertools.slice
+ does:
+
+ >>> list(split_into([1,2,3,4,5,6,7,8,9,0], [2,3,None]))
+ [[1, 2], [3, 4, 5], [6, 7, 8, 9, 0]]
+
+ :func:`split_into` can be useful for grouping a series of items where the
+ sizes of the groups are not uniform. An example would be where in a row
+ from a table, multiple columns represent elements of the same feature
+ (e.g. a point represented by x,y,z) but, the format is not the same for
+ all columns.
+ """
+ # convert the iterable argument into an iterator so its contents can
+ # be consumed by islice in case it is a generator
+ it = iter(iterable)
+
+ for size in sizes:
+ if size is None:
+ yield list(it)
+ return
+ else:
+ yield list(islice(it, size))
+
+
+def padded(iterable, fillvalue=None, n=None, next_multiple=False):
+ """Yield the elements from *iterable*, followed by *fillvalue*, such that
+ at least *n* items are emitted.
+
+ >>> list(padded([1, 2, 3], '?', 5))
+ [1, 2, 3, '?', '?']
+
+ If *next_multiple* is ``True``, *fillvalue* will be emitted until the
+ number of items emitted is a multiple of *n*::
+
+ >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True))
+ [1, 2, 3, 4, None, None]
+
+ If *n* is ``None``, *fillvalue* will be emitted indefinitely.
+
+ """
+ it = iter(iterable)
+ if n is None:
+ yield from chain(it, repeat(fillvalue))
+ elif n < 1:
+ raise ValueError('n must be at least 1')
+ else:
+ item_count = 0
+ for item in it:
+ yield item
+ item_count += 1
+
+ remaining = (n - item_count) % n if next_multiple else n - item_count
+ for _ in range(remaining):
+ yield fillvalue
+
+
+def repeat_each(iterable, n=2):
+ """Repeat each element in *iterable* *n* times.
+
+ >>> list(repeat_each('ABC', 3))
+ ['A', 'A', 'A', 'B', 'B', 'B', 'C', 'C', 'C']
+ """
+ return chain.from_iterable(map(repeat, iterable, repeat(n)))
+
+
+def repeat_last(iterable, default=None):
+ """After the *iterable* is exhausted, keep yielding its last element.
+
+ >>> list(islice(repeat_last(range(3)), 5))
+ [0, 1, 2, 2, 2]
+
+ If the iterable is empty, yield *default* forever::
+
+ >>> list(islice(repeat_last(range(0), 42), 5))
+ [42, 42, 42, 42, 42]
+
+ """
+ item = _marker
+ for item in iterable:
+ yield item
+ final = default if item is _marker else item
+ yield from repeat(final)
+
+
+def distribute(n, iterable):
+ """Distribute the items from *iterable* among *n* smaller iterables.
+
+ >>> group_1, group_2 = distribute(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 3, 5]
+ >>> list(group_2)
+ [2, 4, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = distribute(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 4, 7], [2, 5], [3, 6]]
+
+ If the length of *iterable* is smaller than *n*, then the last returned
+ iterables will be empty:
+
+ >>> children = distribute(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function uses :func:`itertools.tee` and may require significant
+ storage. If you need the order items in the smaller iterables to match the
+ original iterable, see :func:`divide`.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ children = tee(iterable, n)
+ return [islice(it, index, None, n) for index, it in enumerate(children)]
+
+
+def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None):
+ """Yield tuples whose elements are offset from *iterable*.
+ The amount by which the `i`-th item in each tuple is offset is given by
+ the `i`-th item in *offsets*.
+
+ >>> list(stagger([0, 1, 2, 3]))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3)]
+ >>> list(stagger(range(8), offsets=(0, 2, 4)))
+ [(0, 2, 4), (1, 3, 5), (2, 4, 6), (3, 5, 7)]
+
+ By default, the sequence will end when the final element of a tuple is the
+ last item in the iterable. To continue until the first element of a tuple
+ is the last item in the iterable, set *longest* to ``True``::
+
+ >>> list(stagger([0, 1, 2, 3], longest=True))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, None), (3, None, None)]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ children = tee(iterable, len(offsets))
+
+ return zip_offset(
+ *children, offsets=offsets, longest=longest, fillvalue=fillvalue
+ )
+
+
+class UnequalIterablesError(ValueError):
+ def __init__(self, details=None):
+ msg = 'Iterables have different lengths'
+ if details is not None:
+ msg += (': index 0 has length {}; index {} has length {}').format(
+ *details
+ )
+
+ super().__init__(msg)
+
+
+def _zip_equal_generator(iterables):
+ for combo in zip_longest(*iterables, fillvalue=_marker):
+ for val in combo:
+ if val is _marker:
+ raise UnequalIterablesError()
+ yield combo
+
+
+def _zip_equal(*iterables):
+ # Check whether the iterables are all the same size.
+ try:
+ first_size = len(iterables[0])
+ for i, it in enumerate(iterables[1:], 1):
+ size = len(it)
+ if size != first_size:
+ break
+ else:
+ # If we didn't break out, we can use the built-in zip.
+ return zip(*iterables)
+
+ # If we did break out, there was a mismatch.
+ raise UnequalIterablesError(details=(first_size, i, size))
+ # If any one of the iterables didn't have a length, start reading
+ # them until one runs out.
+ except TypeError:
+ return _zip_equal_generator(iterables)
+
+
+def zip_equal(*iterables):
+ """``zip`` the input *iterables* together, but raise
+ ``UnequalIterablesError`` if they aren't all the same length.
+
+ >>> it_1 = range(3)
+ >>> it_2 = iter('abc')
+ >>> list(zip_equal(it_1, it_2))
+ [(0, 'a'), (1, 'b'), (2, 'c')]
+
+ >>> it_1 = range(3)
+ >>> it_2 = iter('abcd')
+ >>> list(zip_equal(it_1, it_2)) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ more_itertools.more.UnequalIterablesError: Iterables have different
+ lengths
+
+ """
+ if hexversion >= 0x30A00A6:
+ warnings.warn(
+ (
+ 'zip_equal will be removed in a future version of '
+ 'more-itertools. Use the builtin zip function with '
+ 'strict=True instead.'
+ ),
+ DeprecationWarning,
+ )
+
+ return _zip_equal(*iterables)
+
+
+def zip_offset(*iterables, offsets, longest=False, fillvalue=None):
+ """``zip`` the input *iterables* together, but offset the `i`-th iterable
+ by the `i`-th item in *offsets*.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1)))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')]
+
+ This can be used as a lightweight alternative to SciPy or pandas to analyze
+ data sets in which some series have a lead or lag relationship.
+
+ By default, the sequence will end when the shortest iterable is exhausted.
+ To continue until the longest iterable is exhausted, set *longest* to
+ ``True``.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1), longest=True))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ if len(iterables) != len(offsets):
+ raise ValueError("Number of iterables and offsets didn't match")
+
+ staggered = []
+ for it, n in zip(iterables, offsets):
+ if n < 0:
+ staggered.append(chain(repeat(fillvalue, -n), it))
+ elif n > 0:
+ staggered.append(islice(it, n, None))
+ else:
+ staggered.append(it)
+
+ if longest:
+ return zip_longest(*staggered, fillvalue=fillvalue)
+
+ return zip(*staggered)
+
+
+def sort_together(iterables, key_list=(0,), key=None, reverse=False):
+ """Return the input iterables sorted together, with *key_list* as the
+ priority for sorting. All iterables are trimmed to the length of the
+ shortest one.
+
+ This can be used like the sorting function in a spreadsheet. If each
+ iterable represents a column of data, the key list determines which
+ columns are used for sorting.
+
+ By default, all iterables are sorted using the ``0``-th iterable::
+
+ >>> iterables = [(4, 3, 2, 1), ('a', 'b', 'c', 'd')]
+ >>> sort_together(iterables)
+ [(1, 2, 3, 4), ('d', 'c', 'b', 'a')]
+
+ Set a different key list to sort according to another iterable.
+ Specifying multiple keys dictates how ties are broken::
+
+ >>> iterables = [(3, 1, 2), (0, 1, 0), ('c', 'b', 'a')]
+ >>> sort_together(iterables, key_list=(1, 2))
+ [(2, 3, 1), (0, 0, 1), ('a', 'c', 'b')]
+
+ To sort by a function of the elements of the iterable, pass a *key*
+ function. Its arguments are the elements of the iterables corresponding to
+ the key list::
+
+ >>> names = ('a', 'b', 'c')
+ >>> lengths = (1, 2, 3)
+ >>> widths = (5, 2, 1)
+ >>> def area(length, width):
+ ... return length * width
+ >>> sort_together([names, lengths, widths], key_list=(1, 2), key=area)
+ [('c', 'b', 'a'), (3, 2, 1), (1, 2, 5)]
+
+ Set *reverse* to ``True`` to sort in descending order.
+
+ >>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True)
+ [(3, 2, 1), ('a', 'b', 'c')]
+
+ """
+ if key is None:
+ # if there is no key function, the key argument to sorted is an
+ # itemgetter
+ key_argument = itemgetter(*key_list)
+ else:
+ # if there is a key function, call it with the items at the offsets
+ # specified by the key function as arguments
+ key_list = list(key_list)
+ if len(key_list) == 1:
+ # if key_list contains a single item, pass the item at that offset
+ # as the only argument to the key function
+ key_offset = key_list[0]
+ key_argument = lambda zipped_items: key(zipped_items[key_offset])
+ else:
+ # if key_list contains multiple items, use itemgetter to return a
+ # tuple of items, which we pass as *args to the key function
+ get_key_items = itemgetter(*key_list)
+ key_argument = lambda zipped_items: key(
+ *get_key_items(zipped_items)
+ )
+
+ return list(
+ zip(*sorted(zip(*iterables), key=key_argument, reverse=reverse))
+ )
+
+
+def unzip(iterable):
+ """The inverse of :func:`zip`, this function disaggregates the elements
+ of the zipped *iterable*.
+
+ The ``i``-th iterable contains the ``i``-th element from each element
+ of the zipped iterable. The first element is used to to determine the
+ length of the remaining elements.
+
+ >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+ >>> letters, numbers = unzip(iterable)
+ >>> list(letters)
+ ['a', 'b', 'c', 'd']
+ >>> list(numbers)
+ [1, 2, 3, 4]
+
+ This is similar to using ``zip(*iterable)``, but it avoids reading
+ *iterable* into memory. Note, however, that this function uses
+ :func:`itertools.tee` and thus may require significant storage.
+
+ """
+ head, iterable = spy(iter(iterable))
+ if not head:
+ # empty iterable, e.g. zip([], [], [])
+ return ()
+ # spy returns a one-length iterable as head
+ head = head[0]
+ iterables = tee(iterable, len(head))
+
+ def itemgetter(i):
+ def getter(obj):
+ try:
+ return obj[i]
+ except IndexError:
+ # basically if we have an iterable like
+ # iter([(1, 2, 3), (4, 5), (6,)])
+ # the second unzipped iterable would fail at the third tuple
+ # since it would try to access tup[1]
+ # same with the third unzipped iterable and the second tuple
+ # to support these "improperly zipped" iterables,
+ # we create a custom itemgetter
+ # which just stops the unzipped iterables
+ # at first length mismatch
+ raise StopIteration
+
+ return getter
+
+ return tuple(map(itemgetter(i), it) for i, it in enumerate(iterables))
+
+
+def divide(n, iterable):
+ """Divide the elements from *iterable* into *n* parts, maintaining
+ order.
+
+ >>> group_1, group_2 = divide(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 2, 3]
+ >>> list(group_2)
+ [4, 5, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = divide(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 2, 3], [4, 5], [6, 7]]
+
+ If the length of the iterable is smaller than n, then the last returned
+ iterables will be empty:
+
+ >>> children = divide(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function will exhaust the iterable before returning and may require
+ significant storage. If order is not important, see :func:`distribute`,
+ which does not first pull the iterable into memory.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ try:
+ iterable[:0]
+ except TypeError:
+ seq = tuple(iterable)
+ else:
+ seq = iterable
+
+ q, r = divmod(len(seq), n)
+
+ ret = []
+ stop = 0
+ for i in range(1, n + 1):
+ start = stop
+ stop += q + 1 if i <= r else q
+ ret.append(iter(seq[start:stop]))
+
+ return ret
+
+
+def always_iterable(obj, base_type=(str, bytes)):
+ """If *obj* is iterable, return an iterator over its items::
+
+ >>> obj = (1, 2, 3)
+ >>> list(always_iterable(obj))
+ [1, 2, 3]
+
+ If *obj* is not iterable, return a one-item iterable containing *obj*::
+
+ >>> obj = 1
+ >>> list(always_iterable(obj))
+ [1]
+
+ If *obj* is ``None``, return an empty iterable:
+
+ >>> obj = None
+ >>> list(always_iterable(None))
+ []
+
+ By default, binary and text strings are not considered iterable::
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj))
+ ['foo']
+
+ If *base_type* is set, objects for which ``isinstance(obj, base_type)``
+ returns ``True`` won't be considered iterable.
+
+ >>> obj = {'a': 1}
+ >>> list(always_iterable(obj)) # Iterate over the dict's keys
+ ['a']
+ >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
+ [{'a': 1}]
+
+ Set *base_type* to ``None`` to avoid any special handling and treat objects
+ Python considers iterable as iterable:
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj, base_type=None))
+ ['f', 'o', 'o']
+ """
+ if obj is None:
+ return iter(())
+
+ if (base_type is not None) and isinstance(obj, base_type):
+ return iter((obj,))
+
+ try:
+ return iter(obj)
+ except TypeError:
+ return iter((obj,))
+
+
+def adjacent(predicate, iterable, distance=1):
+ """Return an iterable over `(bool, item)` tuples where the `item` is
+ drawn from *iterable* and the `bool` indicates whether
+ that item satisfies the *predicate* or is adjacent to an item that does.
+
+ For example, to find whether items are adjacent to a ``3``::
+
+ >>> list(adjacent(lambda x: x == 3, range(6)))
+ [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)]
+
+ Set *distance* to change what counts as adjacent. For example, to find
+ whether items are two places away from a ``3``:
+
+ >>> list(adjacent(lambda x: x == 3, range(6), distance=2))
+ [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)]
+
+ This is useful for contextualizing the results of a search function.
+ For example, a code comparison tool might want to identify lines that
+ have changed, but also surrounding lines to give the viewer of the diff
+ context.
+
+ The predicate function will only be called once for each item in the
+ iterable.
+
+ See also :func:`groupby_transform`, which can be used with this function
+ to group ranges of items with the same `bool` value.
+
+ """
+ # Allow distance=0 mainly for testing that it reproduces results with map()
+ if distance < 0:
+ raise ValueError('distance must be at least 0')
+
+ i1, i2 = tee(iterable)
+ padding = [False] * distance
+ selected = chain(padding, map(predicate, i1), padding)
+ adjacent_to_selected = map(any, windowed(selected, 2 * distance + 1))
+ return zip(adjacent_to_selected, i2)
+
+
+def groupby_transform(iterable, keyfunc=None, valuefunc=None, reducefunc=None):
+ """An extension of :func:`itertools.groupby` that can apply transformations
+ to the grouped data.
+
+ * *keyfunc* is a function computing a key value for each item in *iterable*
+ * *valuefunc* is a function that transforms the individual items from
+ *iterable* after grouping
+ * *reducefunc* is a function that transforms each group of items
+
+ >>> iterable = 'aAAbBBcCC'
+ >>> keyfunc = lambda k: k.upper()
+ >>> valuefunc = lambda v: v.lower()
+ >>> reducefunc = lambda g: ''.join(g)
+ >>> list(groupby_transform(iterable, keyfunc, valuefunc, reducefunc))
+ [('A', 'aaa'), ('B', 'bbb'), ('C', 'ccc')]
+
+ Each optional argument defaults to an identity function if not specified.
+
+ :func:`groupby_transform` is useful when grouping elements of an iterable
+ using a separate iterable as the key. To do this, :func:`zip` the iterables
+ and pass a *keyfunc* that extracts the first element and a *valuefunc*
+ that extracts the second element::
+
+ >>> from operator import itemgetter
+ >>> keys = [0, 0, 1, 1, 1, 2, 2, 2, 3]
+ >>> values = 'abcdefghi'
+ >>> iterable = zip(keys, values)
+ >>> grouper = groupby_transform(iterable, itemgetter(0), itemgetter(1))
+ >>> [(k, ''.join(g)) for k, g in grouper]
+ [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')]
+
+ Note that the order of items in the iterable is significant.
+ Only adjacent items are grouped together, so if you don't want any
+ duplicate groups, you should sort the iterable by the key function.
+
+ """
+ ret = groupby(iterable, keyfunc)
+ if valuefunc:
+ ret = ((k, map(valuefunc, g)) for k, g in ret)
+ if reducefunc:
+ ret = ((k, reducefunc(g)) for k, g in ret)
+
+ return ret
+
+
+class numeric_range(abc.Sequence, abc.Hashable):
+ """An extension of the built-in ``range()`` function whose arguments can
+ be any orderable numeric type.
+
+ With only *stop* specified, *start* defaults to ``0`` and *step*
+ defaults to ``1``. The output items will match the type of *stop*:
+
+ >>> list(numeric_range(3.5))
+ [0.0, 1.0, 2.0, 3.0]
+
+ With only *start* and *stop* specified, *step* defaults to ``1``. The
+ output items will match the type of *start*:
+
+ >>> from decimal import Decimal
+ >>> start = Decimal('2.1')
+ >>> stop = Decimal('5.1')
+ >>> list(numeric_range(start, stop))
+ [Decimal('2.1'), Decimal('3.1'), Decimal('4.1')]
+
+ With *start*, *stop*, and *step* specified the output items will match
+ the type of ``start + step``:
+
+ >>> from fractions import Fraction
+ >>> start = Fraction(1, 2) # Start at 1/2
+ >>> stop = Fraction(5, 2) # End at 5/2
+ >>> step = Fraction(1, 2) # Count by 1/2
+ >>> list(numeric_range(start, stop, step))
+ [Fraction(1, 2), Fraction(1, 1), Fraction(3, 2), Fraction(2, 1)]
+
+ If *step* is zero, ``ValueError`` is raised. Negative steps are supported:
+
+ >>> list(numeric_range(3, -1, -1.0))
+ [3.0, 2.0, 1.0, 0.0]
+
+ Be aware of the limitations of floating point numbers; the representation
+ of the yielded numbers may be surprising.
+
+ ``datetime.datetime`` objects can be used for *start* and *stop*, if *step*
+ is a ``datetime.timedelta`` object:
+
+ >>> import datetime
+ >>> start = datetime.datetime(2019, 1, 1)
+ >>> stop = datetime.datetime(2019, 1, 3)
+ >>> step = datetime.timedelta(days=1)
+ >>> items = iter(numeric_range(start, stop, step))
+ >>> next(items)
+ datetime.datetime(2019, 1, 1, 0, 0)
+ >>> next(items)
+ datetime.datetime(2019, 1, 2, 0, 0)
+
+ """
+
+ _EMPTY_HASH = hash(range(0, 0))
+
+ def __init__(self, *args):
+ argc = len(args)
+ if argc == 1:
+ (self._stop,) = args
+ self._start = type(self._stop)(0)
+ self._step = type(self._stop - self._start)(1)
+ elif argc == 2:
+ self._start, self._stop = args
+ self._step = type(self._stop - self._start)(1)
+ elif argc == 3:
+ self._start, self._stop, self._step = args
+ elif argc == 0:
+ raise TypeError(
+ 'numeric_range expected at least '
+ '1 argument, got {}'.format(argc)
+ )
+ else:
+ raise TypeError(
+ 'numeric_range expected at most '
+ '3 arguments, got {}'.format(argc)
+ )
+
+ self._zero = type(self._step)(0)
+ if self._step == self._zero:
+ raise ValueError('numeric_range() arg 3 must not be zero')
+ self._growing = self._step > self._zero
+ self._init_len()
+
+ def __bool__(self):
+ if self._growing:
+ return self._start < self._stop
+ else:
+ return self._start > self._stop
+
+ def __contains__(self, elem):
+ if self._growing:
+ if self._start <= elem < self._stop:
+ return (elem - self._start) % self._step == self._zero
+ else:
+ if self._start >= elem > self._stop:
+ return (self._start - elem) % (-self._step) == self._zero
+
+ return False
+
+ def __eq__(self, other):
+ if isinstance(other, numeric_range):
+ empty_self = not bool(self)
+ empty_other = not bool(other)
+ if empty_self or empty_other:
+ return empty_self and empty_other # True if both empty
+ else:
+ return (
+ self._start == other._start
+ and self._step == other._step
+ and self._get_by_index(-1) == other._get_by_index(-1)
+ )
+ else:
+ return False
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self._get_by_index(key)
+ elif isinstance(key, slice):
+ step = self._step if key.step is None else key.step * self._step
+
+ if key.start is None or key.start <= -self._len:
+ start = self._start
+ elif key.start >= self._len:
+ start = self._stop
+ else: # -self._len < key.start < self._len
+ start = self._get_by_index(key.start)
+
+ if key.stop is None or key.stop >= self._len:
+ stop = self._stop
+ elif key.stop <= -self._len:
+ stop = self._start
+ else: # -self._len < key.stop < self._len
+ stop = self._get_by_index(key.stop)
+
+ return numeric_range(start, stop, step)
+ else:
+ raise TypeError(
+ 'numeric range indices must be '
+ 'integers or slices, not {}'.format(type(key).__name__)
+ )
+
+ def __hash__(self):
+ if self:
+ return hash((self._start, self._get_by_index(-1), self._step))
+ else:
+ return self._EMPTY_HASH
+
+ def __iter__(self):
+ values = (self._start + (n * self._step) for n in count())
+ if self._growing:
+ return takewhile(partial(gt, self._stop), values)
+ else:
+ return takewhile(partial(lt, self._stop), values)
+
+ def __len__(self):
+ return self._len
+
+ def _init_len(self):
+ if self._growing:
+ start = self._start
+ stop = self._stop
+ step = self._step
+ else:
+ start = self._stop
+ stop = self._start
+ step = -self._step
+ distance = stop - start
+ if distance <= self._zero:
+ self._len = 0
+ else: # distance > 0 and step > 0: regular euclidean division
+ q, r = divmod(distance, step)
+ self._len = int(q) + int(r != self._zero)
+
+ def __reduce__(self):
+ return numeric_range, (self._start, self._stop, self._step)
+
+ def __repr__(self):
+ if self._step == 1:
+ return "numeric_range({}, {})".format(
+ repr(self._start), repr(self._stop)
+ )
+ else:
+ return "numeric_range({}, {}, {})".format(
+ repr(self._start), repr(self._stop), repr(self._step)
+ )
+
+ def __reversed__(self):
+ return iter(
+ numeric_range(
+ self._get_by_index(-1), self._start - self._step, -self._step
+ )
+ )
+
+ def count(self, value):
+ return int(value in self)
+
+ def index(self, value):
+ if self._growing:
+ if self._start <= value < self._stop:
+ q, r = divmod(value - self._start, self._step)
+ if r == self._zero:
+ return int(q)
+ else:
+ if self._start >= value > self._stop:
+ q, r = divmod(self._start - value, -self._step)
+ if r == self._zero:
+ return int(q)
+
+ raise ValueError("{} is not in numeric range".format(value))
+
+ def _get_by_index(self, i):
+ if i < 0:
+ i += self._len
+ if i < 0 or i >= self._len:
+ raise IndexError("numeric range object index out of range")
+ return self._start + i * self._step
+
+
+def count_cycle(iterable, n=None):
+ """Cycle through the items from *iterable* up to *n* times, yielding
+ the number of completed cycles along with each item. If *n* is omitted the
+ process repeats indefinitely.
+
+ >>> list(count_cycle('AB', 3))
+ [(0, 'A'), (0, 'B'), (1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')]
+
+ """
+ iterable = tuple(iterable)
+ if not iterable:
+ return iter(())
+ counter = count() if n is None else range(n)
+ return ((i, item) for i in counter for item in iterable)
+
+
+def mark_ends(iterable):
+ """Yield 3-tuples of the form ``(is_first, is_last, item)``.
+
+ >>> list(mark_ends('ABC'))
+ [(True, False, 'A'), (False, False, 'B'), (False, True, 'C')]
+
+ Use this when looping over an iterable to take special action on its first
+ and/or last items:
+
+ >>> iterable = ['Header', 100, 200, 'Footer']
+ >>> total = 0
+ >>> for is_first, is_last, item in mark_ends(iterable):
+ ... if is_first:
+ ... continue # Skip the header
+ ... if is_last:
+ ... continue # Skip the footer
+ ... total += item
+ >>> print(total)
+ 300
+ """
+ it = iter(iterable)
+
+ try:
+ b = next(it)
+ except StopIteration:
+ return
+
+ try:
+ for i in count():
+ a = b
+ b = next(it)
+ yield i == 0, False, a
+
+ except StopIteration:
+ yield i == 0, True, a
+
+
+def locate(iterable, pred=bool, window_size=None):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(locate([0, 1, 1, 0, 1, 0, 0]))
+ [1, 2, 4]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item.
+
+ >>> list(locate(['a', 'b', 'c', 'b'], lambda x: x == 'b'))
+ [1, 3]
+
+ If *window_size* is given, then the *pred* function will be called with
+ that many items. This enables searching for sub-sequences:
+
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> pred = lambda *args: args == (1, 2, 3)
+ >>> list(locate(iterable, pred=pred, window_size=3))
+ [1, 5, 9]
+
+ Use with :func:`seekable` to find indexes and then retrieve the associated
+ items:
+
+ >>> from itertools import count
+ >>> from more_itertools import seekable
+ >>> source = (3 * n + 1 if (n % 2) else n // 2 for n in count())
+ >>> it = seekable(source)
+ >>> pred = lambda x: x > 100
+ >>> indexes = locate(it, pred=pred)
+ >>> i = next(indexes)
+ >>> it.seek(i)
+ >>> next(it)
+ 106
+
+ """
+ if window_size is None:
+ return compress(count(), map(pred, iterable))
+
+ if window_size < 1:
+ raise ValueError('window size must be at least 1')
+
+ it = windowed(iterable, window_size, fillvalue=_marker)
+ return compress(count(), starmap(pred, it))
+
+
+def lstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the beginning
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the start of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(lstrip(iterable, pred))
+ [1, 2, None, 3, False, None]
+
+ This function is analogous to to :func:`str.lstrip`, and is essentially
+ an wrapper for :func:`itertools.dropwhile`.
+
+ """
+ return dropwhile(pred, iterable)
+
+
+def rstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the end
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the end of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(rstrip(iterable, pred))
+ [None, False, None, 1, 2, None, 3]
+
+ This function is analogous to :func:`str.rstrip`.
+
+ """
+ cache = []
+ cache_append = cache.append
+ cache_clear = cache.clear
+ for x in iterable:
+ if pred(x):
+ cache_append(x)
+ else:
+ yield from cache
+ cache_clear()
+ yield x
+
+
+def strip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the
+ beginning and end for which *pred* returns ``True``.
+
+ For example, to remove a set of items from both ends of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(strip(iterable, pred))
+ [1, 2, None, 3]
+
+ This function is analogous to :func:`str.strip`.
+
+ """
+ return rstrip(lstrip(iterable, pred), pred)
+
+
+class islice_extended:
+ """An extension of :func:`itertools.islice` that supports negative values
+ for *stop*, *start*, and *step*.
+
+ >>> iterable = iter('abcdefgh')
+ >>> list(islice_extended(iterable, -4, -1))
+ ['e', 'f', 'g']
+
+ Slices with negative values require some caching of *iterable*, but this
+ function takes care to minimize the amount of memory required.
+
+ For example, you can use a negative step with an infinite iterator:
+
+ >>> from itertools import count
+ >>> list(islice_extended(count(), 110, 99, -2))
+ [110, 108, 106, 104, 102, 100]
+
+ You can also use slice notation directly:
+
+ >>> iterable = map(str, count())
+ >>> it = islice_extended(iterable)[10:20:2]
+ >>> list(it)
+ ['10', '12', '14', '16', '18']
+
+ """
+
+ def __init__(self, iterable, *args):
+ it = iter(iterable)
+ if args:
+ self._iterable = _islice_helper(it, slice(*args))
+ else:
+ self._iterable = it
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return next(self._iterable)
+
+ def __getitem__(self, key):
+ if isinstance(key, slice):
+ return islice_extended(_islice_helper(self._iterable, key))
+
+ raise TypeError('islice_extended.__getitem__ argument must be a slice')
+
+
+def _islice_helper(it, s):
+ start = s.start
+ stop = s.stop
+ if s.step == 0:
+ raise ValueError('step argument must be a non-zero integer or None.')
+ step = s.step or 1
+
+ if step > 0:
+ start = 0 if (start is None) else start
+
+ if start < 0:
+ # Consume all but the last -start items
+ cache = deque(enumerate(it, 1), maxlen=-start)
+ len_iter = cache[-1][0] if cache else 0
+
+ # Adjust start to be positive
+ i = max(len_iter + start, 0)
+
+ # Adjust stop to be positive
+ if stop is None:
+ j = len_iter
+ elif stop >= 0:
+ j = min(stop, len_iter)
+ else:
+ j = max(len_iter + stop, 0)
+
+ # Slice the cache
+ n = j - i
+ if n <= 0:
+ return
+
+ for index, item in islice(cache, 0, n, step):
+ yield item
+ elif (stop is not None) and (stop < 0):
+ # Advance to the start position
+ next(islice(it, start, start), None)
+
+ # When stop is negative, we have to carry -stop items while
+ # iterating
+ cache = deque(islice(it, -stop), maxlen=-stop)
+
+ for index, item in enumerate(it):
+ cached_item = cache.popleft()
+ if index % step == 0:
+ yield cached_item
+ cache.append(item)
+ else:
+ # When both start and stop are positive we have the normal case
+ yield from islice(it, start, stop, step)
+ else:
+ start = -1 if (start is None) else start
+
+ if (stop is not None) and (stop < 0):
+ # Consume all but the last items
+ n = -stop - 1
+ cache = deque(enumerate(it, 1), maxlen=n)
+ len_iter = cache[-1][0] if cache else 0
+
+ # If start and stop are both negative they are comparable and
+ # we can just slice. Otherwise we can adjust start to be negative
+ # and then slice.
+ if start < 0:
+ i, j = start, stop
+ else:
+ i, j = min(start - len_iter, -1), None
+
+ for index, item in list(cache)[i:j:step]:
+ yield item
+ else:
+ # Advance to the stop position
+ if stop is not None:
+ m = stop + 1
+ next(islice(it, m, m), None)
+
+ # stop is positive, so if start is negative they are not comparable
+ # and we need the rest of the items.
+ if start < 0:
+ i = start
+ n = None
+ # stop is None and start is positive, so we just need items up to
+ # the start index.
+ elif stop is None:
+ i = None
+ n = start + 1
+ # Both stop and start are positive, so they are comparable.
+ else:
+ i = None
+ n = start - stop
+ if n <= 0:
+ return
+
+ cache = list(islice(it, n))
+
+ yield from cache[i::step]
+
+
+def always_reversible(iterable):
+ """An extension of :func:`reversed` that supports all iterables, not
+ just those which implement the ``Reversible`` or ``Sequence`` protocols.
+
+ >>> print(*always_reversible(x for x in range(3)))
+ 2 1 0
+
+ If the iterable is already reversible, this function returns the
+ result of :func:`reversed()`. If the iterable is not reversible,
+ this function will cache the remaining items in the iterable and
+ yield them in reverse order, which may require significant storage.
+ """
+ try:
+ return reversed(iterable)
+ except TypeError:
+ return reversed(list(iterable))
+
+
+def consecutive_groups(iterable, ordering=lambda x: x):
+ """Yield groups of consecutive items using :func:`itertools.groupby`.
+ The *ordering* function determines whether two items are adjacent by
+ returning their position.
+
+ By default, the ordering function is the identity function. This is
+ suitable for finding runs of numbers:
+
+ >>> iterable = [1, 10, 11, 12, 20, 30, 31, 32, 33, 40]
+ >>> for group in consecutive_groups(iterable):
+ ... print(list(group))
+ [1]
+ [10, 11, 12]
+ [20]
+ [30, 31, 32, 33]
+ [40]
+
+ For finding runs of adjacent letters, try using the :meth:`index` method
+ of a string of letters:
+
+ >>> from string import ascii_lowercase
+ >>> iterable = 'abcdfgilmnop'
+ >>> ordering = ascii_lowercase.index
+ >>> for group in consecutive_groups(iterable, ordering):
+ ... print(list(group))
+ ['a', 'b', 'c', 'd']
+ ['f', 'g']
+ ['i']
+ ['l', 'm', 'n', 'o', 'p']
+
+ Each group of consecutive items is an iterator that shares it source with
+ *iterable*. When an an output group is advanced, the previous group is
+ no longer available unless its elements are copied (e.g., into a ``list``).
+
+ >>> iterable = [1, 2, 11, 12, 21, 22]
+ >>> saved_groups = []
+ >>> for group in consecutive_groups(iterable):
+ ... saved_groups.append(list(group)) # Copy group elements
+ >>> saved_groups
+ [[1, 2], [11, 12], [21, 22]]
+
+ """
+ for k, g in groupby(
+ enumerate(iterable), key=lambda x: x[0] - ordering(x[1])
+ ):
+ yield map(itemgetter(1), g)
+
+
+def difference(iterable, func=sub, *, initial=None):
+ """This function is the inverse of :func:`itertools.accumulate`. By default
+ it will compute the first difference of *iterable* using
+ :func:`operator.sub`:
+
+ >>> from itertools import accumulate
+ >>> iterable = accumulate([0, 1, 2, 3, 4]) # produces 0, 1, 3, 6, 10
+ >>> list(difference(iterable))
+ [0, 1, 2, 3, 4]
+
+ *func* defaults to :func:`operator.sub`, but other functions can be
+ specified. They will be applied as follows::
+
+ A, B, C, D, ... --> A, func(B, A), func(C, B), func(D, C), ...
+
+ For example, to do progressive division:
+
+ >>> iterable = [1, 2, 6, 24, 120]
+ >>> func = lambda x, y: x // y
+ >>> list(difference(iterable, func))
+ [1, 2, 3, 4, 5]
+
+ If the *initial* keyword is set, the first element will be skipped when
+ computing successive differences.
+
+ >>> it = [10, 11, 13, 16] # from accumulate([1, 2, 3], initial=10)
+ >>> list(difference(it, initial=10))
+ [1, 2, 3]
+
+ """
+ a, b = tee(iterable)
+ try:
+ first = [next(b)]
+ except StopIteration:
+ return iter([])
+
+ if initial is not None:
+ first = []
+
+ return chain(first, starmap(func, zip(b, a)))
+
+
+class SequenceView(Sequence):
+ """Return a read-only view of the sequence object *target*.
+
+ :class:`SequenceView` objects are analogous to Python's built-in
+ "dictionary view" types. They provide a dynamic view of a sequence's items,
+ meaning that when the sequence updates, so does the view.
+
+ >>> seq = ['0', '1', '2']
+ >>> view = SequenceView(seq)
+ >>> view
+ SequenceView(['0', '1', '2'])
+ >>> seq.append('3')
+ >>> view
+ SequenceView(['0', '1', '2', '3'])
+
+ Sequence views support indexing, slicing, and length queries. They act
+ like the underlying sequence, except they don't allow assignment:
+
+ >>> view[1]
+ '1'
+ >>> view[1:-1]
+ ['1', '2']
+ >>> len(view)
+ 4
+
+ Sequence views are useful as an alternative to copying, as they don't
+ require (much) extra storage.
+
+ """
+
+ def __init__(self, target):
+ if not isinstance(target, Sequence):
+ raise TypeError
+ self._target = target
+
+ def __getitem__(self, index):
+ return self._target[index]
+
+ def __len__(self):
+ return len(self._target)
+
+ def __repr__(self):
+ return '{}({})'.format(self.__class__.__name__, repr(self._target))
+
+
+class seekable:
+ """Wrap an iterator to allow for seeking backward and forward. This
+ progressively caches the items in the source iterable so they can be
+ re-visited.
+
+ Call :meth:`seek` with an index to seek to that position in the source
+ iterable.
+
+ To "reset" an iterator, seek to ``0``:
+
+ >>> from itertools import count
+ >>> it = seekable((str(n) for n in count()))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> it.seek(0)
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> next(it)
+ '3'
+
+ You can also seek forward:
+
+ >>> it = seekable((str(n) for n in range(20)))
+ >>> it.seek(10)
+ >>> next(it)
+ '10'
+ >>> it.seek(20) # Seeking past the end of the source isn't a problem
+ >>> list(it)
+ []
+ >>> it.seek(0) # Resetting works even after hitting the end
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+
+ Call :meth:`peek` to look ahead one item without advancing the iterator:
+
+ >>> it = seekable('1234')
+ >>> it.peek()
+ '1'
+ >>> list(it)
+ ['1', '2', '3', '4']
+ >>> it.peek(default='empty')
+ 'empty'
+
+ Before the iterator is at its end, calling :func:`bool` on it will return
+ ``True``. After it will return ``False``:
+
+ >>> it = seekable('5678')
+ >>> bool(it)
+ True
+ >>> list(it)
+ ['5', '6', '7', '8']
+ >>> bool(it)
+ False
+
+ You may view the contents of the cache with the :meth:`elements` method.
+ That returns a :class:`SequenceView`, a view that updates automatically:
+
+ >>> it = seekable((str(n) for n in range(10)))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> elements = it.elements()
+ >>> elements
+ SequenceView(['0', '1', '2'])
+ >>> next(it)
+ '3'
+ >>> elements
+ SequenceView(['0', '1', '2', '3'])
+
+ By default, the cache grows as the source iterable progresses, so beware of
+ wrapping very large or infinite iterables. Supply *maxlen* to limit the
+ size of the cache (this of course limits how far back you can seek).
+
+ >>> from itertools import count
+ >>> it = seekable((str(n) for n in count()), maxlen=2)
+ >>> next(it), next(it), next(it), next(it)
+ ('0', '1', '2', '3')
+ >>> list(it.elements())
+ ['2', '3']
+ >>> it.seek(0)
+ >>> next(it), next(it), next(it), next(it)
+ ('2', '3', '4', '5')
+ >>> next(it)
+ '6'
+
+ """
+
+ def __init__(self, iterable, maxlen=None):
+ self._source = iter(iterable)
+ if maxlen is None:
+ self._cache = []
+ else:
+ self._cache = deque([], maxlen)
+ self._index = None
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self._index is not None:
+ try:
+ item = self._cache[self._index]
+ except IndexError:
+ self._index = None
+ else:
+ self._index += 1
+ return item
+
+ item = next(self._source)
+ self._cache.append(item)
+ return item
+
+ def __bool__(self):
+ try:
+ self.peek()
+ except StopIteration:
+ return False
+ return True
+
+ def peek(self, default=_marker):
+ try:
+ peeked = next(self)
+ except StopIteration:
+ if default is _marker:
+ raise
+ return default
+ if self._index is None:
+ self._index = len(self._cache)
+ self._index -= 1
+ return peeked
+
+ def elements(self):
+ return SequenceView(self._cache)
+
+ def seek(self, index):
+ self._index = index
+ remainder = index - len(self._cache)
+ if remainder > 0:
+ consume(self, remainder)
+
+
+class run_length:
+ """
+ :func:`run_length.encode` compresses an iterable with run-length encoding.
+ It yields groups of repeated items with the count of how many times they
+ were repeated:
+
+ >>> uncompressed = 'abbcccdddd'
+ >>> list(run_length.encode(uncompressed))
+ [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+
+ :func:`run_length.decode` decompresses an iterable that was previously
+ compressed with run-length encoding. It yields the items of the
+ decompressed iterable:
+
+ >>> compressed = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+ >>> list(run_length.decode(compressed))
+ ['a', 'b', 'b', 'c', 'c', 'c', 'd', 'd', 'd', 'd']
+
+ """
+
+ @staticmethod
+ def encode(iterable):
+ return ((k, ilen(g)) for k, g in groupby(iterable))
+
+ @staticmethod
+ def decode(iterable):
+ return chain.from_iterable(repeat(k, n) for k, n in iterable)
+
+
+def exactly_n(iterable, n, predicate=bool):
+ """Return ``True`` if exactly ``n`` items in the iterable are ``True``
+ according to the *predicate* function.
+
+ >>> exactly_n([True, True, False], 2)
+ True
+ >>> exactly_n([True, True, False], 1)
+ False
+ >>> exactly_n([0, 1, 2, 3, 4, 5], 3, lambda x: x < 3)
+ True
+
+ The iterable will be advanced until ``n + 1`` truthy items are encountered,
+ so avoid calling it on infinite iterables.
+
+ """
+ return len(take(n + 1, filter(predicate, iterable))) == n
+
+
+def circular_shifts(iterable):
+ """Return a list of circular shifts of *iterable*.
+
+ >>> circular_shifts(range(4))
+ [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
+ """
+ lst = list(iterable)
+ return take(len(lst), windowed(cycle(lst), len(lst)))
+
+
+def make_decorator(wrapping_func, result_index=0):
+ """Return a decorator version of *wrapping_func*, which is a function that
+ modifies an iterable. *result_index* is the position in that function's
+ signature where the iterable goes.
+
+ This lets you use itertools on the "production end," i.e. at function
+ definition. This can augment what the function returns without changing the
+ function's code.
+
+ For example, to produce a decorator version of :func:`chunked`:
+
+ >>> from more_itertools import chunked
+ >>> chunker = make_decorator(chunked, result_index=0)
+ >>> @chunker(3)
+ ... def iter_range(n):
+ ... return iter(range(n))
+ ...
+ >>> list(iter_range(9))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ To only allow truthy items to be returned:
+
+ >>> truth_serum = make_decorator(filter, result_index=1)
+ >>> @truth_serum(bool)
+ ... def boolean_test():
+ ... return [0, 1, '', ' ', False, True]
+ ...
+ >>> list(boolean_test())
+ [1, ' ', True]
+
+ The :func:`peekable` and :func:`seekable` wrappers make for practical
+ decorators:
+
+ >>> from more_itertools import peekable
+ >>> peekable_function = make_decorator(peekable)
+ >>> @peekable_function()
+ ... def str_range(*args):
+ ... return (str(x) for x in range(*args))
+ ...
+ >>> it = str_range(1, 20, 2)
+ >>> next(it), next(it), next(it)
+ ('1', '3', '5')
+ >>> it.peek()
+ '7'
+ >>> next(it)
+ '7'
+
+ """
+ # See https://sites.google.com/site/bbayles/index/decorator_factory for
+ # notes on how this works.
+ def decorator(*wrapping_args, **wrapping_kwargs):
+ def outer_wrapper(f):
+ def inner_wrapper(*args, **kwargs):
+ result = f(*args, **kwargs)
+ wrapping_args_ = list(wrapping_args)
+ wrapping_args_.insert(result_index, result)
+ return wrapping_func(*wrapping_args_, **wrapping_kwargs)
+
+ return inner_wrapper
+
+ return outer_wrapper
+
+ return decorator
+
+
+def map_reduce(iterable, keyfunc, valuefunc=None, reducefunc=None):
+ """Return a dictionary that maps the items in *iterable* to categories
+ defined by *keyfunc*, transforms them with *valuefunc*, and
+ then summarizes them by category with *reducefunc*.
+
+ *valuefunc* defaults to the identity function if it is unspecified.
+ If *reducefunc* is unspecified, no summarization takes place:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> result = map_reduce('abbccc', keyfunc)
+ >>> sorted(result.items())
+ [('A', ['a']), ('B', ['b', 'b']), ('C', ['c', 'c', 'c'])]
+
+ Specifying *valuefunc* transforms the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc)
+ >>> sorted(result.items())
+ [('A', [1]), ('B', [1, 1]), ('C', [1, 1, 1])]
+
+ Specifying *reducefunc* summarizes the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> reducefunc = sum
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc, reducefunc)
+ >>> sorted(result.items())
+ [('A', 1), ('B', 2), ('C', 3)]
+
+ You may want to filter the input iterable before applying the map/reduce
+ procedure:
+
+ >>> all_items = range(30)
+ >>> items = [x for x in all_items if 10 <= x <= 20] # Filter
+ >>> keyfunc = lambda x: x % 2 # Evens map to 0; odds to 1
+ >>> categories = map_reduce(items, keyfunc=keyfunc)
+ >>> sorted(categories.items())
+ [(0, [10, 12, 14, 16, 18, 20]), (1, [11, 13, 15, 17, 19])]
+ >>> summaries = map_reduce(items, keyfunc=keyfunc, reducefunc=sum)
+ >>> sorted(summaries.items())
+ [(0, 90), (1, 75)]
+
+ Note that all items in the iterable are gathered into a list before the
+ summarization step, which may require significant storage.
+
+ The returned object is a :obj:`collections.defaultdict` with the
+ ``default_factory`` set to ``None``, such that it behaves like a normal
+ dictionary.
+
+ """
+ valuefunc = (lambda x: x) if (valuefunc is None) else valuefunc
+
+ ret = defaultdict(list)
+ for item in iterable:
+ key = keyfunc(item)
+ value = valuefunc(item)
+ ret[key].append(value)
+
+ if reducefunc is not None:
+ for key, value_list in ret.items():
+ ret[key] = reducefunc(value_list)
+
+ ret.default_factory = None
+ return ret
+
+
+def rlocate(iterable, pred=bool, window_size=None):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``, starting from the right and moving left.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(rlocate([0, 1, 1, 0, 1, 0, 0])) # Truthy at 1, 2, and 4
+ [4, 2, 1]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item:
+
+ >>> iterable = iter('abcb')
+ >>> pred = lambda x: x == 'b'
+ >>> list(rlocate(iterable, pred))
+ [3, 1]
+
+ If *window_size* is given, then the *pred* function will be called with
+ that many items. This enables searching for sub-sequences:
+
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> pred = lambda *args: args == (1, 2, 3)
+ >>> list(rlocate(iterable, pred=pred, window_size=3))
+ [9, 5, 1]
+
+ Beware, this function won't return anything for infinite iterables.
+ If *iterable* is reversible, ``rlocate`` will reverse it and search from
+ the right. Otherwise, it will search from the left and return the results
+ in reverse order.
+
+ See :func:`locate` to for other example applications.
+
+ """
+ if window_size is None:
+ try:
+ len_iter = len(iterable)
+ return (len_iter - i - 1 for i in locate(reversed(iterable), pred))
+ except TypeError:
+ pass
+
+ return reversed(list(locate(iterable, pred, window_size)))
+
+
+def replace(iterable, pred, substitutes, count=None, window_size=1):
+ """Yield the items from *iterable*, replacing the items for which *pred*
+ returns ``True`` with the items from the iterable *substitutes*.
+
+ >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1]
+ >>> pred = lambda x: x == 0
+ >>> substitutes = (2, 3)
+ >>> list(replace(iterable, pred, substitutes))
+ [1, 1, 2, 3, 1, 1, 2, 3, 1, 1]
+
+ If *count* is given, the number of replacements will be limited:
+
+ >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1, 0]
+ >>> pred = lambda x: x == 0
+ >>> substitutes = [None]
+ >>> list(replace(iterable, pred, substitutes, count=2))
+ [1, 1, None, 1, 1, None, 1, 1, 0]
+
+ Use *window_size* to control the number of items passed as arguments to
+ *pred*. This allows for locating and replacing subsequences.
+
+ >>> iterable = [0, 1, 2, 5, 0, 1, 2, 5]
+ >>> window_size = 3
+ >>> pred = lambda *args: args == (0, 1, 2) # 3 items passed to pred
+ >>> substitutes = [3, 4] # Splice in these items
+ >>> list(replace(iterable, pred, substitutes, window_size=window_size))
+ [3, 4, 5, 3, 4, 5]
+
+ """
+ if window_size < 1:
+ raise ValueError('window_size must be at least 1')
+
+ # Save the substitutes iterable, since it's used more than once
+ substitutes = tuple(substitutes)
+
+ # Add padding such that the number of windows matches the length of the
+ # iterable
+ it = chain(iterable, [_marker] * (window_size - 1))
+ windows = windowed(it, window_size)
+
+ n = 0
+ for w in windows:
+ # If the current window matches our predicate (and we haven't hit
+ # our maximum number of replacements), splice in the substitutes
+ # and then consume the following windows that overlap with this one.
+ # For example, if the iterable is (0, 1, 2, 3, 4...)
+ # and the window size is 2, we have (0, 1), (1, 2), (2, 3)...
+ # If the predicate matches on (0, 1), we need to zap (0, 1) and (1, 2)
+ if pred(*w):
+ if (count is None) or (n < count):
+ n += 1
+ yield from substitutes
+ consume(windows, window_size - 1)
+ continue
+
+ # If there was no match (or we've reached the replacement limit),
+ # yield the first item from the window.
+ if w and (w[0] is not _marker):
+ yield w[0]
+
+
+def partitions(iterable):
+ """Yield all possible order-preserving partitions of *iterable*.
+
+ >>> iterable = 'abc'
+ >>> for part in partitions(iterable):
+ ... print([''.join(p) for p in part])
+ ['abc']
+ ['a', 'bc']
+ ['ab', 'c']
+ ['a', 'b', 'c']
+
+ This is unrelated to :func:`partition`.
+
+ """
+ sequence = list(iterable)
+ n = len(sequence)
+ for i in powerset(range(1, n)):
+ yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))]
+
+
+def set_partitions(iterable, k=None):
+ """
+ Yield the set partitions of *iterable* into *k* parts. Set partitions are
+ not order-preserving.
+
+ >>> iterable = 'abc'
+ >>> for part in set_partitions(iterable, 2):
+ ... print([''.join(p) for p in part])
+ ['a', 'bc']
+ ['ab', 'c']
+ ['b', 'ac']
+
+
+ If *k* is not given, every set partition is generated.
+
+ >>> iterable = 'abc'
+ >>> for part in set_partitions(iterable):
+ ... print([''.join(p) for p in part])
+ ['abc']
+ ['a', 'bc']
+ ['ab', 'c']
+ ['b', 'ac']
+ ['a', 'b', 'c']
+
+ """
+ L = list(iterable)
+ n = len(L)
+ if k is not None:
+ if k < 1:
+ raise ValueError(
+ "Can't partition in a negative or zero number of groups"
+ )
+ elif k > n:
+ return
+
+ def set_partitions_helper(L, k):
+ n = len(L)
+ if k == 1:
+ yield [L]
+ elif n == k:
+ yield [[s] for s in L]
+ else:
+ e, *M = L
+ for p in set_partitions_helper(M, k - 1):
+ yield [[e], *p]
+ for p in set_partitions_helper(M, k):
+ for i in range(len(p)):
+ yield p[:i] + [[e] + p[i]] + p[i + 1 :]
+
+ if k is None:
+ for k in range(1, n + 1):
+ yield from set_partitions_helper(L, k)
+ else:
+ yield from set_partitions_helper(L, k)
+
+
+class time_limited:
+ """
+ Yield items from *iterable* until *limit_seconds* have passed.
+ If the time limit expires before all items have been yielded, the
+ ``timed_out`` parameter will be set to ``True``.
+
+ >>> from time import sleep
+ >>> def generator():
+ ... yield 1
+ ... yield 2
+ ... sleep(0.2)
+ ... yield 3
+ >>> iterable = time_limited(0.1, generator())
+ >>> list(iterable)
+ [1, 2]
+ >>> iterable.timed_out
+ True
+
+ Note that the time is checked before each item is yielded, and iteration
+ stops if the time elapsed is greater than *limit_seconds*. If your time
+ limit is 1 second, but it takes 2 seconds to generate the first item from
+ the iterable, the function will run for 2 seconds and not yield anything.
+
+ """
+
+ def __init__(self, limit_seconds, iterable):
+ if limit_seconds < 0:
+ raise ValueError('limit_seconds must be positive')
+ self.limit_seconds = limit_seconds
+ self._iterable = iter(iterable)
+ self._start_time = monotonic()
+ self.timed_out = False
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ item = next(self._iterable)
+ if monotonic() - self._start_time > self.limit_seconds:
+ self.timed_out = True
+ raise StopIteration
+
+ return item
+
+
+def only(iterable, default=None, too_long=None):
+ """If *iterable* has only one item, return it.
+ If it has zero items, return *default*.
+ If it has more than one item, raise the exception given by *too_long*,
+ which is ``ValueError`` by default.
+
+ >>> only([], default='missing')
+ 'missing'
+ >>> only([1])
+ 1
+ >>> only([1, 2]) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: Expected exactly one item in iterable, but got 1, 2,
+ and perhaps more.'
+ >>> only([1, 2], too_long=TypeError) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ TypeError
+
+ Note that :func:`only` attempts to advance *iterable* twice to ensure there
+ is only one item. See :func:`spy` or :func:`peekable` to check
+ iterable contents less destructively.
+ """
+ it = iter(iterable)
+ first_value = next(it, default)
+
+ try:
+ second_value = next(it)
+ except StopIteration:
+ pass
+ else:
+ msg = (
+ 'Expected exactly one item in iterable, but got {!r}, {!r}, '
+ 'and perhaps more.'.format(first_value, second_value)
+ )
+ raise too_long or ValueError(msg)
+
+ return first_value
+
+
+def ichunked(iterable, n):
+ """Break *iterable* into sub-iterables with *n* elements each.
+ :func:`ichunked` is like :func:`chunked`, but it yields iterables
+ instead of lists.
+
+ If the sub-iterables are read in order, the elements of *iterable*
+ won't be stored in memory.
+ If they are read out of order, :func:`itertools.tee` is used to cache
+ elements as necessary.
+
+ >>> from itertools import count
+ >>> all_chunks = ichunked(count(), 4)
+ >>> c_1, c_2, c_3 = next(all_chunks), next(all_chunks), next(all_chunks)
+ >>> list(c_2) # c_1's elements have been cached; c_3's haven't been
+ [4, 5, 6, 7]
+ >>> list(c_1)
+ [0, 1, 2, 3]
+ >>> list(c_3)
+ [8, 9, 10, 11]
+
+ """
+ source = iter(iterable)
+
+ while True:
+ # Check to see whether we're at the end of the source iterable
+ item = next(source, _marker)
+ if item is _marker:
+ return
+
+ # Clone the source and yield an n-length slice
+ source, it = tee(chain([item], source))
+ yield islice(it, n)
+
+ # Advance the source iterable
+ consume(source, n)
+
+
+def distinct_combinations(iterable, r):
+ """Yield the distinct combinations of *r* items taken from *iterable*.
+
+ >>> list(distinct_combinations([0, 0, 1], 2))
+ [(0, 0), (0, 1)]
+
+ Equivalent to ``set(combinations(iterable))``, except duplicates are not
+ generated and thrown away. For larger input sequences this is much more
+ efficient.
+
+ """
+ if r < 0:
+ raise ValueError('r must be non-negative')
+ elif r == 0:
+ yield ()
+ return
+ pool = tuple(iterable)
+ generators = [unique_everseen(enumerate(pool), key=itemgetter(1))]
+ current_combo = [None] * r
+ level = 0
+ while generators:
+ try:
+ cur_idx, p = next(generators[-1])
+ except StopIteration:
+ generators.pop()
+ level -= 1
+ continue
+ current_combo[level] = p
+ if level + 1 == r:
+ yield tuple(current_combo)
+ else:
+ generators.append(
+ unique_everseen(
+ enumerate(pool[cur_idx + 1 :], cur_idx + 1),
+ key=itemgetter(1),
+ )
+ )
+ level += 1
+
+
+def filter_except(validator, iterable, *exceptions):
+ """Yield the items from *iterable* for which the *validator* function does
+ not raise one of the specified *exceptions*.
+
+ *validator* is called for each item in *iterable*.
+ It should be a function that accepts one argument and raises an exception
+ if that item is not valid.
+
+ >>> iterable = ['1', '2', 'three', '4', None]
+ >>> list(filter_except(int, iterable, ValueError, TypeError))
+ ['1', '2', '4']
+
+ If an exception other than one given by *exceptions* is raised by
+ *validator*, it is raised like normal.
+ """
+ for item in iterable:
+ try:
+ validator(item)
+ except exceptions:
+ pass
+ else:
+ yield item
+
+
+def map_except(function, iterable, *exceptions):
+ """Transform each item from *iterable* with *function* and yield the
+ result, unless *function* raises one of the specified *exceptions*.
+
+ *function* is called to transform each item in *iterable*.
+ It should accept one argument.
+
+ >>> iterable = ['1', '2', 'three', '4', None]
+ >>> list(map_except(int, iterable, ValueError, TypeError))
+ [1, 2, 4]
+
+ If an exception other than one given by *exceptions* is raised by
+ *function*, it is raised like normal.
+ """
+ for item in iterable:
+ try:
+ yield function(item)
+ except exceptions:
+ pass
+
+
+def map_if(iterable, pred, func, func_else=lambda x: x):
+ """Evaluate each item from *iterable* using *pred*. If the result is
+ equivalent to ``True``, transform the item with *func* and yield it.
+ Otherwise, transform the item with *func_else* and yield it.
+
+ *pred*, *func*, and *func_else* should each be functions that accept
+ one argument. By default, *func_else* is the identity function.
+
+ >>> from math import sqrt
+ >>> iterable = list(range(-5, 5))
+ >>> iterable
+ [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4]
+ >>> list(map_if(iterable, lambda x: x > 3, lambda x: 'toobig'))
+ [-5, -4, -3, -2, -1, 0, 1, 2, 3, 'toobig']
+ >>> list(map_if(iterable, lambda x: x >= 0,
+ ... lambda x: f'{sqrt(x):.2f}', lambda x: None))
+ [None, None, None, None, None, '0.00', '1.00', '1.41', '1.73', '2.00']
+ """
+ for item in iterable:
+ yield func(item) if pred(item) else func_else(item)
+
+
+def _sample_unweighted(iterable, k):
+ # Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li:
+ # "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))".
+
+ # Fill up the reservoir (collection of samples) with the first `k` samples
+ reservoir = take(k, iterable)
+
+ # Generate random number that's the largest in a sample of k U(0,1) numbers
+ # Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic
+ W = exp(log(random()) / k)
+
+ # The number of elements to skip before changing the reservoir is a random
+ # number with a geometric distribution. Sample it using random() and logs.
+ next_index = k + floor(log(random()) / log(1 - W))
+
+ for index, element in enumerate(iterable, k):
+
+ if index == next_index:
+ reservoir[randrange(k)] = element
+ # The new W is the largest in a sample of k U(0, `old_W`) numbers
+ W *= exp(log(random()) / k)
+ next_index += floor(log(random()) / log(1 - W)) + 1
+
+ return reservoir
+
+
+def _sample_weighted(iterable, k, weights):
+ # Implementation of "A-ExpJ" from the 2006 paper by Efraimidis et al. :
+ # "Weighted random sampling with a reservoir".
+
+ # Log-transform for numerical stability for weights that are small/large
+ weight_keys = (log(random()) / weight for weight in weights)
+
+ # Fill up the reservoir (collection of samples) with the first `k`
+ # weight-keys and elements, then heapify the list.
+ reservoir = take(k, zip(weight_keys, iterable))
+ heapify(reservoir)
+
+ # The number of jumps before changing the reservoir is a random variable
+ # with an exponential distribution. Sample it using random() and logs.
+ smallest_weight_key, _ = reservoir[0]
+ weights_to_skip = log(random()) / smallest_weight_key
+
+ for weight, element in zip(weights, iterable):
+ if weight >= weights_to_skip:
+ # The notation here is consistent with the paper, but we store
+ # the weight-keys in log-space for better numerical stability.
+ smallest_weight_key, _ = reservoir[0]
+ t_w = exp(weight * smallest_weight_key)
+ r_2 = uniform(t_w, 1) # generate U(t_w, 1)
+ weight_key = log(r_2) / weight
+ heapreplace(reservoir, (weight_key, element))
+ smallest_weight_key, _ = reservoir[0]
+ weights_to_skip = log(random()) / smallest_weight_key
+ else:
+ weights_to_skip -= weight
+
+ # Equivalent to [element for weight_key, element in sorted(reservoir)]
+ return [heappop(reservoir)[1] for _ in range(k)]
+
+
+def sample(iterable, k, weights=None):
+ """Return a *k*-length list of elements chosen (without replacement)
+ from the *iterable*. Like :func:`random.sample`, but works on iterables
+ of unknown length.
+
+ >>> iterable = range(100)
+ >>> sample(iterable, 5) # doctest: +SKIP
+ [81, 60, 96, 16, 4]
+
+ An iterable with *weights* may also be given:
+
+ >>> iterable = range(100)
+ >>> weights = (i * i + 1 for i in range(100))
+ >>> sampled = sample(iterable, 5, weights=weights) # doctest: +SKIP
+ [79, 67, 74, 66, 78]
+
+ The algorithm can also be used to generate weighted random permutations.
+ The relative weight of each item determines the probability that it
+ appears late in the permutation.
+
+ >>> data = "abcdefgh"
+ >>> weights = range(1, len(data) + 1)
+ >>> sample(data, k=len(data), weights=weights) # doctest: +SKIP
+ ['c', 'a', 'b', 'e', 'g', 'd', 'h', 'f']
+ """
+ if k == 0:
+ return []
+
+ iterable = iter(iterable)
+ if weights is None:
+ return _sample_unweighted(iterable, k)
+ else:
+ weights = iter(weights)
+ return _sample_weighted(iterable, k, weights)
+
+
+def is_sorted(iterable, key=None, reverse=False, strict=False):
+ """Returns ``True`` if the items of iterable are in sorted order, and
+ ``False`` otherwise. *key* and *reverse* have the same meaning that they do
+ in the built-in :func:`sorted` function.
+
+ >>> is_sorted(['1', '2', '3', '4', '5'], key=int)
+ True
+ >>> is_sorted([5, 4, 3, 1, 2], reverse=True)
+ False
+
+ If *strict*, tests for strict sorting, that is, returns ``False`` if equal
+ elements are found:
+
+ >>> is_sorted([1, 2, 2])
+ True
+ >>> is_sorted([1, 2, 2], strict=True)
+ False
+
+ The function returns ``False`` after encountering the first out-of-order
+ item. If there are no out-of-order items, the iterable is exhausted.
+ """
+
+ compare = (le if reverse else ge) if strict else (lt if reverse else gt)
+ it = iterable if key is None else map(key, iterable)
+ return not any(starmap(compare, pairwise(it)))
+
+
+class AbortThread(BaseException):
+ pass
+
+
+class callback_iter:
+ """Convert a function that uses callbacks to an iterator.
+
+ Let *func* be a function that takes a `callback` keyword argument.
+ For example:
+
+ >>> def func(callback=None):
+ ... for i, c in [(1, 'a'), (2, 'b'), (3, 'c')]:
+ ... if callback:
+ ... callback(i, c)
+ ... return 4
+
+
+ Use ``with callback_iter(func)`` to get an iterator over the parameters
+ that are delivered to the callback.
+
+ >>> with callback_iter(func) as it:
+ ... for args, kwargs in it:
+ ... print(args)
+ (1, 'a')
+ (2, 'b')
+ (3, 'c')
+
+ The function will be called in a background thread. The ``done`` property
+ indicates whether it has completed execution.
+
+ >>> it.done
+ True
+
+ If it completes successfully, its return value will be available
+ in the ``result`` property.
+
+ >>> it.result
+ 4
+
+ Notes:
+
+ * If the function uses some keyword argument besides ``callback``, supply
+ *callback_kwd*.
+ * If it finished executing, but raised an exception, accessing the
+ ``result`` property will raise the same exception.
+ * If it hasn't finished executing, accessing the ``result``
+ property from within the ``with`` block will raise ``RuntimeError``.
+ * If it hasn't finished executing, accessing the ``result`` property from
+ outside the ``with`` block will raise a
+ ``more_itertools.AbortThread`` exception.
+ * Provide *wait_seconds* to adjust how frequently the it is polled for
+ output.
+
+ """
+
+ def __init__(self, func, callback_kwd='callback', wait_seconds=0.1):
+ self._func = func
+ self._callback_kwd = callback_kwd
+ self._aborted = False
+ self._future = None
+ self._wait_seconds = wait_seconds
+ self._executor = ThreadPoolExecutor(max_workers=1)
+ self._iterator = self._reader()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self._aborted = True
+ self._executor.shutdown()
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return next(self._iterator)
+
+ @property
+ def done(self):
+ if self._future is None:
+ return False
+ return self._future.done()
+
+ @property
+ def result(self):
+ if not self.done:
+ raise RuntimeError('Function has not yet completed')
+
+ return self._future.result()
+
+ def _reader(self):
+ q = Queue()
+
+ def callback(*args, **kwargs):
+ if self._aborted:
+ raise AbortThread('canceled by user')
+
+ q.put((args, kwargs))
+
+ self._future = self._executor.submit(
+ self._func, **{self._callback_kwd: callback}
+ )
+
+ while True:
+ try:
+ item = q.get(timeout=self._wait_seconds)
+ except Empty:
+ pass
+ else:
+ q.task_done()
+ yield item
+
+ if self._future.done():
+ break
+
+ remaining = []
+ while True:
+ try:
+ item = q.get_nowait()
+ except Empty:
+ break
+ else:
+ q.task_done()
+ remaining.append(item)
+ q.join()
+ yield from remaining
+
+
+def windowed_complete(iterable, n):
+ """
+ Yield ``(beginning, middle, end)`` tuples, where:
+
+ * Each ``middle`` has *n* items from *iterable*
+ * Each ``beginning`` has the items before the ones in ``middle``
+ * Each ``end`` has the items after the ones in ``middle``
+
+ >>> iterable = range(7)
+ >>> n = 3
+ >>> for beginning, middle, end in windowed_complete(iterable, n):
+ ... print(beginning, middle, end)
+ () (0, 1, 2) (3, 4, 5, 6)
+ (0,) (1, 2, 3) (4, 5, 6)
+ (0, 1) (2, 3, 4) (5, 6)
+ (0, 1, 2) (3, 4, 5) (6,)
+ (0, 1, 2, 3) (4, 5, 6) ()
+
+ Note that *n* must be at least 0 and most equal to the length of
+ *iterable*.
+
+ This function will exhaust the iterable and may require significant
+ storage.
+ """
+ if n < 0:
+ raise ValueError('n must be >= 0')
+
+ seq = tuple(iterable)
+ size = len(seq)
+
+ if n > size:
+ raise ValueError('n must be <= len(seq)')
+
+ for i in range(size - n + 1):
+ beginning = seq[:i]
+ middle = seq[i : i + n]
+ end = seq[i + n :]
+ yield beginning, middle, end
+
+
+def all_unique(iterable, key=None):
+ """
+ Returns ``True`` if all the elements of *iterable* are unique (no two
+ elements are equal).
+
+ >>> all_unique('ABCB')
+ False
+
+ If a *key* function is specified, it will be used to make comparisons.
+
+ >>> all_unique('ABCb')
+ True
+ >>> all_unique('ABCb', str.lower)
+ False
+
+ The function returns as soon as the first non-unique element is
+ encountered. Iterables with a mix of hashable and unhashable items can
+ be used, but the function will be slower for unhashable items.
+ """
+ seenset = set()
+ seenset_add = seenset.add
+ seenlist = []
+ seenlist_add = seenlist.append
+ for element in map(key, iterable) if key else iterable:
+ try:
+ if element in seenset:
+ return False
+ seenset_add(element)
+ except TypeError:
+ if element in seenlist:
+ return False
+ seenlist_add(element)
+ return True
+
+
+def nth_product(index, *args):
+ """Equivalent to ``list(product(*args))[index]``.
+
+ The products of *args* can be ordered lexicographically.
+ :func:`nth_product` computes the product at sort position *index* without
+ computing the previous products.
+
+ >>> nth_product(8, range(2), range(2), range(2), range(2))
+ (1, 0, 0, 0)
+
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pools = list(map(tuple, reversed(args)))
+ ns = list(map(len, pools))
+
+ c = reduce(mul, ns)
+
+ if index < 0:
+ index += c
+
+ if not 0 <= index < c:
+ raise IndexError
+
+ result = []
+ for pool, n in zip(pools, ns):
+ result.append(pool[index % n])
+ index //= n
+
+ return tuple(reversed(result))
+
+
+def nth_permutation(iterable, r, index):
+ """Equivalent to ``list(permutations(iterable, r))[index]```
+
+ The subsequences of *iterable* that are of length *r* where order is
+ important can be ordered lexicographically. :func:`nth_permutation`
+ computes the subsequence at sort position *index* directly, without
+ computing the previous subsequences.
+
+ >>> nth_permutation('ghijk', 2, 5)
+ ('h', 'i')
+
+ ``ValueError`` will be raised If *r* is negative or greater than the length
+ of *iterable*.
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pool = list(iterable)
+ n = len(pool)
+
+ if r is None or r == n:
+ r, c = n, factorial(n)
+ elif not 0 <= r < n:
+ raise ValueError
+ else:
+ c = factorial(n) // factorial(n - r)
+
+ if index < 0:
+ index += c
+
+ if not 0 <= index < c:
+ raise IndexError
+
+ if c == 0:
+ return tuple()
+
+ result = [0] * r
+ q = index * factorial(n) // c if r < n else index
+ for d in range(1, n + 1):
+ q, i = divmod(q, d)
+ if 0 <= n - d < r:
+ result[n - d] = i
+ if q == 0:
+ break
+
+ return tuple(map(pool.pop, result))
+
+
+def value_chain(*args):
+ """Yield all arguments passed to the function in the same order in which
+ they were passed. If an argument itself is iterable then iterate over its
+ values.
+
+ >>> list(value_chain(1, 2, 3, [4, 5, 6]))
+ [1, 2, 3, 4, 5, 6]
+
+ Binary and text strings are not considered iterable and are emitted
+ as-is:
+
+ >>> list(value_chain('12', '34', ['56', '78']))
+ ['12', '34', '56', '78']
+
+
+ Multiple levels of nesting are not flattened.
+
+ """
+ for value in args:
+ if isinstance(value, (str, bytes)):
+ yield value
+ continue
+ try:
+ yield from value
+ except TypeError:
+ yield value
+
+
+def product_index(element, *args):
+ """Equivalent to ``list(product(*args)).index(element)``
+
+ The products of *args* can be ordered lexicographically.
+ :func:`product_index` computes the first index of *element* without
+ computing the previous products.
+
+ >>> product_index([8, 2], range(10), range(5))
+ 42
+
+ ``ValueError`` will be raised if the given *element* isn't in the product
+ of *args*.
+ """
+ index = 0
+
+ for x, pool in zip_longest(element, args, fillvalue=_marker):
+ if x is _marker or pool is _marker:
+ raise ValueError('element is not a product of args')
+
+ pool = tuple(pool)
+ index = index * len(pool) + pool.index(x)
+
+ return index
+
+
+def combination_index(element, iterable):
+ """Equivalent to ``list(combinations(iterable, r)).index(element)``
+
+ The subsequences of *iterable* that are of length *r* can be ordered
+ lexicographically. :func:`combination_index` computes the index of the
+ first *element*, without computing the previous combinations.
+
+ >>> combination_index('adf', 'abcdefg')
+ 10
+
+ ``ValueError`` will be raised if the given *element* isn't one of the
+ combinations of *iterable*.
+ """
+ element = enumerate(element)
+ k, y = next(element, (None, None))
+ if k is None:
+ return 0
+
+ indexes = []
+ pool = enumerate(iterable)
+ for n, x in pool:
+ if x == y:
+ indexes.append(n)
+ tmp, y = next(element, (None, None))
+ if tmp is None:
+ break
+ else:
+ k = tmp
+ else:
+ raise ValueError('element is not a combination of iterable')
+
+ n, _ = last(pool, default=(n, None))
+
+ # Python versiosn below 3.8 don't have math.comb
+ index = 1
+ for i, j in enumerate(reversed(indexes), start=1):
+ j = n - j
+ if i <= j:
+ index += factorial(j) // (factorial(i) * factorial(j - i))
+
+ return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index
+
+
+def permutation_index(element, iterable):
+ """Equivalent to ``list(permutations(iterable, r)).index(element)```
+
+ The subsequences of *iterable* that are of length *r* where order is
+ important can be ordered lexicographically. :func:`permutation_index`
+ computes the index of the first *element* directly, without computing
+ the previous permutations.
+
+ >>> permutation_index([1, 3, 2], range(5))
+ 19
+
+ ``ValueError`` will be raised if the given *element* isn't one of the
+ permutations of *iterable*.
+ """
+ index = 0
+ pool = list(iterable)
+ for i, x in zip(range(len(pool), -1, -1), element):
+ r = pool.index(x)
+ index = index * i + r
+ del pool[r]
+
+ return index
+
+
+class countable:
+ """Wrap *iterable* and keep a count of how many items have been consumed.
+
+ The ``items_seen`` attribute starts at ``0`` and increments as the iterable
+ is consumed:
+
+ >>> iterable = map(str, range(10))
+ >>> it = countable(iterable)
+ >>> it.items_seen
+ 0
+ >>> next(it), next(it)
+ ('0', '1')
+ >>> list(it)
+ ['2', '3', '4', '5', '6', '7', '8', '9']
+ >>> it.items_seen
+ 10
+ """
+
+ def __init__(self, iterable):
+ self._it = iter(iterable)
+ self.items_seen = 0
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ item = next(self._it)
+ self.items_seen += 1
+
+ return item
+
+
+def chunked_even(iterable, n):
+ """Break *iterable* into lists of approximately length *n*.
+ Items are distributed such the lengths of the lists differ by at most
+ 1 item.
+
+ >>> iterable = [1, 2, 3, 4, 5, 6, 7]
+ >>> n = 3
+ >>> list(chunked_even(iterable, n)) # List lengths: 3, 2, 2
+ [[1, 2, 3], [4, 5], [6, 7]]
+ >>> list(chunked(iterable, n)) # List lengths: 3, 3, 1
+ [[1, 2, 3], [4, 5, 6], [7]]
+
+ """
+
+ len_method = getattr(iterable, '__len__', None)
+
+ if len_method is None:
+ return _chunked_even_online(iterable, n)
+ else:
+ return _chunked_even_finite(iterable, len_method(), n)
+
+
+def _chunked_even_online(iterable, n):
+ buffer = []
+ maxbuf = n + (n - 2) * (n - 1)
+ for x in iterable:
+ buffer.append(x)
+ if len(buffer) == maxbuf:
+ yield buffer[:n]
+ buffer = buffer[n:]
+ yield from _chunked_even_finite(buffer, len(buffer), n)
+
+
+def _chunked_even_finite(iterable, N, n):
+ if N < 1:
+ return
+
+ # Lists are either size `full_size <= n` or `partial_size = full_size - 1`
+ q, r = divmod(N, n)
+ num_lists = q + (1 if r > 0 else 0)
+ q, r = divmod(N, num_lists)
+ full_size = q + (1 if r > 0 else 0)
+ partial_size = full_size - 1
+ num_full = N - partial_size * num_lists
+ num_partial = num_lists - num_full
+
+ buffer = []
+ iterator = iter(iterable)
+
+ # Yield num_full lists of full_size
+ for x in iterator:
+ buffer.append(x)
+ if len(buffer) == full_size:
+ yield buffer
+ buffer = []
+ num_full -= 1
+ if num_full <= 0:
+ break
+
+ # Yield num_partial lists of partial_size
+ for x in iterator:
+ buffer.append(x)
+ if len(buffer) == partial_size:
+ yield buffer
+ buffer = []
+ num_partial -= 1
+
+
+def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False):
+ """A version of :func:`zip` that "broadcasts" any scalar
+ (i.e., non-iterable) items into output tuples.
+
+ >>> iterable_1 = [1, 2, 3]
+ >>> iterable_2 = ['a', 'b', 'c']
+ >>> scalar = '_'
+ >>> list(zip_broadcast(iterable_1, iterable_2, scalar))
+ [(1, 'a', '_'), (2, 'b', '_'), (3, 'c', '_')]
+
+ The *scalar_types* keyword argument determines what types are considered
+ scalar. It is set to ``(str, bytes)`` by default. Set it to ``None`` to
+ treat strings and byte strings as iterable:
+
+ >>> list(zip_broadcast('abc', 0, 'xyz', scalar_types=None))
+ [('a', 0, 'x'), ('b', 0, 'y'), ('c', 0, 'z')]
+
+ If the *strict* keyword argument is ``True``, then
+ ``UnequalIterablesError`` will be raised if any of the iterables have
+ different lengthss.
+ """
+
+ def is_scalar(obj):
+ if scalar_types and isinstance(obj, scalar_types):
+ return True
+ try:
+ iter(obj)
+ except TypeError:
+ return True
+ else:
+ return False
+
+ size = len(objects)
+ if not size:
+ return
+
+ iterables, iterable_positions = [], []
+ scalars, scalar_positions = [], []
+ for i, obj in enumerate(objects):
+ if is_scalar(obj):
+ scalars.append(obj)
+ scalar_positions.append(i)
+ else:
+ iterables.append(iter(obj))
+ iterable_positions.append(i)
+
+ if len(scalars) == size:
+ yield tuple(objects)
+ return
+
+ zipper = _zip_equal if strict else zip
+ for item in zipper(*iterables):
+ new_item = [None] * size
+
+ for i, elem in zip(iterable_positions, item):
+ new_item[i] = elem
+
+ for i, elem in zip(scalar_positions, scalars):
+ new_item[i] = elem
+
+ yield tuple(new_item)
+
+
+def unique_in_window(iterable, n, key=None):
+ """Yield the items from *iterable* that haven't been seen recently.
+ *n* is the size of the lookback window.
+
+ >>> iterable = [0, 1, 0, 2, 3, 0]
+ >>> n = 3
+ >>> list(unique_in_window(iterable, n))
+ [0, 1, 2, 3, 0]
+
+ The *key* function, if provided, will be used to determine uniqueness:
+
+ >>> list(unique_in_window('abAcda', 3, key=lambda x: x.lower()))
+ ['a', 'b', 'c', 'd', 'a']
+
+ The items in *iterable* must be hashable.
+
+ """
+ if n <= 0:
+ raise ValueError('n must be greater than 0')
+
+ window = deque(maxlen=n)
+ uniques = set()
+ use_key = key is not None
+
+ for item in iterable:
+ k = key(item) if use_key else item
+ if k in uniques:
+ continue
+
+ if len(uniques) == n:
+ uniques.discard(window[0])
+
+ uniques.add(k)
+ window.append(k)
+
+ yield item
+
+
+def duplicates_everseen(iterable, key=None):
+ """Yield duplicate elements after their first appearance.
+
+ >>> list(duplicates_everseen('mississippi'))
+ ['s', 'i', 's', 's', 'i', 'p', 'i']
+ >>> list(duplicates_everseen('AaaBbbCccAaa', str.lower))
+ ['a', 'a', 'b', 'b', 'c', 'c', 'A', 'a', 'a']
+
+ This function is analagous to :func:`unique_everseen` and is subject to
+ the same performance considerations.
+
+ """
+ seen_set = set()
+ seen_list = []
+ use_key = key is not None
+
+ for element in iterable:
+ k = key(element) if use_key else element
+ try:
+ if k not in seen_set:
+ seen_set.add(k)
+ else:
+ yield element
+ except TypeError:
+ if k not in seen_list:
+ seen_list.append(k)
+ else:
+ yield element
+
+
+def duplicates_justseen(iterable, key=None):
+ """Yields serially-duplicate elements after their first appearance.
+
+ >>> list(duplicates_justseen('mississippi'))
+ ['s', 's', 'p']
+ >>> list(duplicates_justseen('AaaBbbCccAaa', str.lower))
+ ['a', 'a', 'b', 'b', 'c', 'c', 'a', 'a']
+
+ This function is analagous to :func:`unique_justseen`.
+
+ """
+ return flatten(
+ map(
+ lambda group_tuple: islice_extended(group_tuple[1])[1:],
+ groupby(iterable, key),
+ )
+ )
+
+
+def minmax(iterable_or_value, *others, key=None, default=_marker):
+ """Returns both the smallest and largest items in an iterable
+ or the largest of two or more arguments.
+
+ >>> minmax([3, 1, 5])
+ (1, 5)
+
+ >>> minmax(4, 2, 6)
+ (2, 6)
+
+ If a *key* function is provided, it will be used to transform the input
+ items for comparison.
+
+ >>> minmax([5, 30], key=str) # '30' sorts before '5'
+ (30, 5)
+
+ If a *default* value is provided, it will be returned if there are no
+ input items.
+
+ >>> minmax([], default=(0, 0))
+ (0, 0)
+
+ Otherwise ``ValueError`` is raised.
+
+ This function is based on the
+ `recipe <http://code.activestate.com/recipes/577916/>`__ by
+ Raymond Hettinger and takes care to minimize the number of comparisons
+ performed.
+ """
+ iterable = (iterable_or_value, *others) if others else iterable_or_value
+
+ it = iter(iterable)
+
+ try:
+ lo = hi = next(it)
+ except StopIteration as e:
+ if default is _marker:
+ raise ValueError(
+ '`minmax()` argument is an empty iterable. '
+ 'Provide a `default` value to suppress this error.'
+ ) from e
+ return default
+
+ # Different branches depending on the presence of key. This saves a lot
+ # of unimportant copies which would slow the "key=None" branch
+ # significantly down.
+ if key is None:
+ for x, y in zip_longest(it, it, fillvalue=lo):
+ if y < x:
+ x, y = y, x
+ if x < lo:
+ lo = x
+ if hi < y:
+ hi = y
+
+ else:
+ lo_key = hi_key = key(lo)
+
+ for x, y in zip_longest(it, it, fillvalue=lo):
+
+ x_key, y_key = key(x), key(y)
+
+ if y_key < x_key:
+ x, y, x_key, y_key = y, x, y_key, x_key
+ if x_key < lo_key:
+ lo, lo_key = x, x_key
+ if hi_key < y_key:
+ hi, hi_key = y, y_key
+
+ return lo, hi
diff --git a/contrib/python/more-itertools/py3/more_itertools/more.pyi b/contrib/python/more-itertools/py3/more_itertools/more.pyi
new file mode 100644
index 0000000000..fe7d4bdd7a
--- /dev/null
+++ b/contrib/python/more-itertools/py3/more_itertools/more.pyi
@@ -0,0 +1,664 @@
+"""Stubs for more_itertools.more"""
+
+from typing import (
+ Any,
+ Callable,
+ Container,
+ Dict,
+ Generic,
+ Hashable,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Reversible,
+ Sequence,
+ Sized,
+ Tuple,
+ Union,
+ TypeVar,
+ type_check_only,
+)
+from types import TracebackType
+from typing_extensions import ContextManager, Protocol, Type, overload
+
+# Type and type variable definitions
+_T = TypeVar('_T')
+_T1 = TypeVar('_T1')
+_T2 = TypeVar('_T2')
+_U = TypeVar('_U')
+_V = TypeVar('_V')
+_W = TypeVar('_W')
+_T_co = TypeVar('_T_co', covariant=True)
+_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[object]])
+_Raisable = Union[BaseException, 'Type[BaseException]']
+
+@type_check_only
+class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ...
+
+@type_check_only
+class _SizedReversible(Protocol[_T_co], Sized, Reversible[_T_co]): ...
+
+def chunked(
+ iterable: Iterable[_T], n: Optional[int], strict: bool = ...
+) -> Iterator[List[_T]]: ...
+@overload
+def first(iterable: Iterable[_T]) -> _T: ...
+@overload
+def first(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ...
+@overload
+def last(iterable: Iterable[_T]) -> _T: ...
+@overload
+def last(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ...
+@overload
+def nth_or_last(iterable: Iterable[_T], n: int) -> _T: ...
+@overload
+def nth_or_last(
+ iterable: Iterable[_T], n: int, default: _U
+) -> Union[_T, _U]: ...
+
+class peekable(Generic[_T], Iterator[_T]):
+ def __init__(self, iterable: Iterable[_T]) -> None: ...
+ def __iter__(self) -> peekable[_T]: ...
+ def __bool__(self) -> bool: ...
+ @overload
+ def peek(self) -> _T: ...
+ @overload
+ def peek(self, default: _U) -> Union[_T, _U]: ...
+ def prepend(self, *items: _T) -> None: ...
+ def __next__(self) -> _T: ...
+ @overload
+ def __getitem__(self, index: int) -> _T: ...
+ @overload
+ def __getitem__(self, index: slice) -> List[_T]: ...
+
+def collate(*iterables: Iterable[_T], **kwargs: Any) -> Iterable[_T]: ...
+def consumer(func: _GenFn) -> _GenFn: ...
+def ilen(iterable: Iterable[object]) -> int: ...
+def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ...
+def with_iter(
+ context_manager: ContextManager[Iterable[_T]],
+) -> Iterator[_T]: ...
+def one(
+ iterable: Iterable[_T],
+ too_short: Optional[_Raisable] = ...,
+ too_long: Optional[_Raisable] = ...,
+) -> _T: ...
+def raise_(exception: _Raisable, *args: Any) -> None: ...
+def strictly_n(
+ iterable: Iterable[_T],
+ n: int,
+ too_short: Optional[_GenFn] = ...,
+ too_long: Optional[_GenFn] = ...,
+) -> List[_T]: ...
+def distinct_permutations(
+ iterable: Iterable[_T], r: Optional[int] = ...
+) -> Iterator[Tuple[_T, ...]]: ...
+def intersperse(
+ e: _U, iterable: Iterable[_T], n: int = ...
+) -> Iterator[Union[_T, _U]]: ...
+def unique_to_each(*iterables: Iterable[_T]) -> List[List[_T]]: ...
+@overload
+def windowed(
+ seq: Iterable[_T], n: int, *, step: int = ...
+) -> Iterator[Tuple[Optional[_T], ...]]: ...
+@overload
+def windowed(
+ seq: Iterable[_T], n: int, fillvalue: _U, step: int = ...
+) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+def substrings(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ...
+def substrings_indexes(
+ seq: Sequence[_T], reverse: bool = ...
+) -> Iterator[Tuple[Sequence[_T], int, int]]: ...
+
+class bucket(Generic[_T, _U], Container[_U]):
+ def __init__(
+ self,
+ iterable: Iterable[_T],
+ key: Callable[[_T], _U],
+ validator: Optional[Callable[[object], object]] = ...,
+ ) -> None: ...
+ def __contains__(self, value: object) -> bool: ...
+ def __iter__(self) -> Iterator[_U]: ...
+ def __getitem__(self, value: object) -> Iterator[_T]: ...
+
+def spy(
+ iterable: Iterable[_T], n: int = ...
+) -> Tuple[List[_T], Iterator[_T]]: ...
+def interleave(*iterables: Iterable[_T]) -> Iterator[_T]: ...
+def interleave_longest(*iterables: Iterable[_T]) -> Iterator[_T]: ...
+def interleave_evenly(
+ iterables: List[Iterable[_T]], lengths: Optional[List[int]] = ...
+) -> Iterator[_T]: ...
+def collapse(
+ iterable: Iterable[Any],
+ base_type: Optional[type] = ...,
+ levels: Optional[int] = ...,
+) -> Iterator[Any]: ...
+@overload
+def side_effect(
+ func: Callable[[_T], object],
+ iterable: Iterable[_T],
+ chunk_size: None = ...,
+ before: Optional[Callable[[], object]] = ...,
+ after: Optional[Callable[[], object]] = ...,
+) -> Iterator[_T]: ...
+@overload
+def side_effect(
+ func: Callable[[List[_T]], object],
+ iterable: Iterable[_T],
+ chunk_size: int,
+ before: Optional[Callable[[], object]] = ...,
+ after: Optional[Callable[[], object]] = ...,
+) -> Iterator[_T]: ...
+def sliced(
+ seq: Sequence[_T], n: int, strict: bool = ...
+) -> Iterator[Sequence[_T]]: ...
+def split_at(
+ iterable: Iterable[_T],
+ pred: Callable[[_T], object],
+ maxsplit: int = ...,
+ keep_separator: bool = ...,
+) -> Iterator[List[_T]]: ...
+def split_before(
+ iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
+) -> Iterator[List[_T]]: ...
+def split_after(
+ iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
+) -> Iterator[List[_T]]: ...
+def split_when(
+ iterable: Iterable[_T],
+ pred: Callable[[_T, _T], object],
+ maxsplit: int = ...,
+) -> Iterator[List[_T]]: ...
+def split_into(
+ iterable: Iterable[_T], sizes: Iterable[Optional[int]]
+) -> Iterator[List[_T]]: ...
+@overload
+def padded(
+ iterable: Iterable[_T],
+ *,
+ n: Optional[int] = ...,
+ next_multiple: bool = ...
+) -> Iterator[Optional[_T]]: ...
+@overload
+def padded(
+ iterable: Iterable[_T],
+ fillvalue: _U,
+ n: Optional[int] = ...,
+ next_multiple: bool = ...,
+) -> Iterator[Union[_T, _U]]: ...
+@overload
+def repeat_last(iterable: Iterable[_T]) -> Iterator[_T]: ...
+@overload
+def repeat_last(
+ iterable: Iterable[_T], default: _U
+) -> Iterator[Union[_T, _U]]: ...
+def distribute(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ...
+@overload
+def stagger(
+ iterable: Iterable[_T],
+ offsets: _SizedIterable[int] = ...,
+ longest: bool = ...,
+) -> Iterator[Tuple[Optional[_T], ...]]: ...
+@overload
+def stagger(
+ iterable: Iterable[_T],
+ offsets: _SizedIterable[int] = ...,
+ longest: bool = ...,
+ fillvalue: _U = ...,
+) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+
+class UnequalIterablesError(ValueError):
+ def __init__(
+ self, details: Optional[Tuple[int, int, int]] = ...
+ ) -> None: ...
+
+@overload
+def zip_equal(__iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ...
+@overload
+def zip_equal(
+ __iter1: Iterable[_T1], __iter2: Iterable[_T2]
+) -> Iterator[Tuple[_T1, _T2]]: ...
+@overload
+def zip_equal(
+ __iter1: Iterable[_T],
+ __iter2: Iterable[_T],
+ __iter3: Iterable[_T],
+ *iterables: Iterable[_T]
+) -> Iterator[Tuple[_T, ...]]: ...
+@overload
+def zip_offset(
+ __iter1: Iterable[_T1],
+ *,
+ offsets: _SizedIterable[int],
+ longest: bool = ...,
+ fillvalue: None = None
+) -> Iterator[Tuple[Optional[_T1]]]: ...
+@overload
+def zip_offset(
+ __iter1: Iterable[_T1],
+ __iter2: Iterable[_T2],
+ *,
+ offsets: _SizedIterable[int],
+ longest: bool = ...,
+ fillvalue: None = None
+) -> Iterator[Tuple[Optional[_T1], Optional[_T2]]]: ...
+@overload
+def zip_offset(
+ __iter1: Iterable[_T],
+ __iter2: Iterable[_T],
+ __iter3: Iterable[_T],
+ *iterables: Iterable[_T],
+ offsets: _SizedIterable[int],
+ longest: bool = ...,
+ fillvalue: None = None
+) -> Iterator[Tuple[Optional[_T], ...]]: ...
+@overload
+def zip_offset(
+ __iter1: Iterable[_T1],
+ *,
+ offsets: _SizedIterable[int],
+ longest: bool = ...,
+ fillvalue: _U,
+) -> Iterator[Tuple[Union[_T1, _U]]]: ...
+@overload
+def zip_offset(
+ __iter1: Iterable[_T1],
+ __iter2: Iterable[_T2],
+ *,
+ offsets: _SizedIterable[int],
+ longest: bool = ...,
+ fillvalue: _U,
+) -> Iterator[Tuple[Union[_T1, _U], Union[_T2, _U]]]: ...
+@overload
+def zip_offset(
+ __iter1: Iterable[_T],
+ __iter2: Iterable[_T],
+ __iter3: Iterable[_T],
+ *iterables: Iterable[_T],
+ offsets: _SizedIterable[int],
+ longest: bool = ...,
+ fillvalue: _U,
+) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+def sort_together(
+ iterables: Iterable[Iterable[_T]],
+ key_list: Iterable[int] = ...,
+ key: Optional[Callable[..., Any]] = ...,
+ reverse: bool = ...,
+) -> List[Tuple[_T, ...]]: ...
+def unzip(iterable: Iterable[Sequence[_T]]) -> Tuple[Iterator[_T], ...]: ...
+def divide(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ...
+def always_iterable(
+ obj: object,
+ base_type: Union[
+ type, Tuple[Union[type, Tuple[Any, ...]], ...], None
+ ] = ...,
+) -> Iterator[Any]: ...
+def adjacent(
+ predicate: Callable[[_T], bool],
+ iterable: Iterable[_T],
+ distance: int = ...,
+) -> Iterator[Tuple[bool, _T]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: None = None,
+ valuefunc: None = None,
+ reducefunc: None = None,
+) -> Iterator[Tuple[_T, Iterator[_T]]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: None,
+ reducefunc: None,
+) -> Iterator[Tuple[_U, Iterator[_T]]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: None,
+ valuefunc: Callable[[_T], _V],
+ reducefunc: None,
+) -> Iterable[Tuple[_T, Iterable[_V]]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: Callable[[_T], _V],
+ reducefunc: None,
+) -> Iterable[Tuple[_U, Iterator[_V]]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: None,
+ valuefunc: None,
+ reducefunc: Callable[[Iterator[_T]], _W],
+) -> Iterable[Tuple[_T, _W]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: None,
+ reducefunc: Callable[[Iterator[_T]], _W],
+) -> Iterable[Tuple[_U, _W]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: None,
+ valuefunc: Callable[[_T], _V],
+ reducefunc: Callable[[Iterable[_V]], _W],
+) -> Iterable[Tuple[_T, _W]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: Callable[[_T], _V],
+ reducefunc: Callable[[Iterable[_V]], _W],
+) -> Iterable[Tuple[_U, _W]]: ...
+
+class numeric_range(Generic[_T, _U], Sequence[_T], Hashable, Reversible[_T]):
+ @overload
+ def __init__(self, __stop: _T) -> None: ...
+ @overload
+ def __init__(self, __start: _T, __stop: _T) -> None: ...
+ @overload
+ def __init__(self, __start: _T, __stop: _T, __step: _U) -> None: ...
+ def __bool__(self) -> bool: ...
+ def __contains__(self, elem: object) -> bool: ...
+ def __eq__(self, other: object) -> bool: ...
+ @overload
+ def __getitem__(self, key: int) -> _T: ...
+ @overload
+ def __getitem__(self, key: slice) -> numeric_range[_T, _U]: ...
+ def __hash__(self) -> int: ...
+ def __iter__(self) -> Iterator[_T]: ...
+ def __len__(self) -> int: ...
+ def __reduce__(
+ self,
+ ) -> Tuple[Type[numeric_range[_T, _U]], Tuple[_T, _T, _U]]: ...
+ def __repr__(self) -> str: ...
+ def __reversed__(self) -> Iterator[_T]: ...
+ def count(self, value: _T) -> int: ...
+ def index(self, value: _T) -> int: ... # type: ignore
+
+def count_cycle(
+ iterable: Iterable[_T], n: Optional[int] = ...
+) -> Iterable[Tuple[int, _T]]: ...
+def mark_ends(
+ iterable: Iterable[_T],
+) -> Iterable[Tuple[bool, bool, _T]]: ...
+def locate(
+ iterable: Iterable[object],
+ pred: Callable[..., Any] = ...,
+ window_size: Optional[int] = ...,
+) -> Iterator[int]: ...
+def lstrip(
+ iterable: Iterable[_T], pred: Callable[[_T], object]
+) -> Iterator[_T]: ...
+def rstrip(
+ iterable: Iterable[_T], pred: Callable[[_T], object]
+) -> Iterator[_T]: ...
+def strip(
+ iterable: Iterable[_T], pred: Callable[[_T], object]
+) -> Iterator[_T]: ...
+
+class islice_extended(Generic[_T], Iterator[_T]):
+ def __init__(
+ self, iterable: Iterable[_T], *args: Optional[int]
+ ) -> None: ...
+ def __iter__(self) -> islice_extended[_T]: ...
+ def __next__(self) -> _T: ...
+ def __getitem__(self, index: slice) -> islice_extended[_T]: ...
+
+def always_reversible(iterable: Iterable[_T]) -> Iterator[_T]: ...
+def consecutive_groups(
+ iterable: Iterable[_T], ordering: Callable[[_T], int] = ...
+) -> Iterator[Iterator[_T]]: ...
+@overload
+def difference(
+ iterable: Iterable[_T],
+ func: Callable[[_T, _T], _U] = ...,
+ *,
+ initial: None = ...
+) -> Iterator[Union[_T, _U]]: ...
+@overload
+def difference(
+ iterable: Iterable[_T], func: Callable[[_T, _T], _U] = ..., *, initial: _U
+) -> Iterator[_U]: ...
+
+class SequenceView(Generic[_T], Sequence[_T]):
+ def __init__(self, target: Sequence[_T]) -> None: ...
+ @overload
+ def __getitem__(self, index: int) -> _T: ...
+ @overload
+ def __getitem__(self, index: slice) -> Sequence[_T]: ...
+ def __len__(self) -> int: ...
+
+class seekable(Generic[_T], Iterator[_T]):
+ def __init__(
+ self, iterable: Iterable[_T], maxlen: Optional[int] = ...
+ ) -> None: ...
+ def __iter__(self) -> seekable[_T]: ...
+ def __next__(self) -> _T: ...
+ def __bool__(self) -> bool: ...
+ @overload
+ def peek(self) -> _T: ...
+ @overload
+ def peek(self, default: _U) -> Union[_T, _U]: ...
+ def elements(self) -> SequenceView[_T]: ...
+ def seek(self, index: int) -> None: ...
+
+class run_length:
+ @staticmethod
+ def encode(iterable: Iterable[_T]) -> Iterator[Tuple[_T, int]]: ...
+ @staticmethod
+ def decode(iterable: Iterable[Tuple[_T, int]]) -> Iterator[_T]: ...
+
+def exactly_n(
+ iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ...
+) -> bool: ...
+def circular_shifts(iterable: Iterable[_T]) -> List[Tuple[_T, ...]]: ...
+def make_decorator(
+ wrapping_func: Callable[..., _U], result_index: int = ...
+) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ...
+@overload
+def map_reduce(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: None = ...,
+ reducefunc: None = ...,
+) -> Dict[_U, List[_T]]: ...
+@overload
+def map_reduce(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: Callable[[_T], _V],
+ reducefunc: None = ...,
+) -> Dict[_U, List[_V]]: ...
+@overload
+def map_reduce(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: None = ...,
+ reducefunc: Callable[[List[_T]], _W] = ...,
+) -> Dict[_U, _W]: ...
+@overload
+def map_reduce(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: Callable[[_T], _V],
+ reducefunc: Callable[[List[_V]], _W],
+) -> Dict[_U, _W]: ...
+def rlocate(
+ iterable: Iterable[_T],
+ pred: Callable[..., object] = ...,
+ window_size: Optional[int] = ...,
+) -> Iterator[int]: ...
+def replace(
+ iterable: Iterable[_T],
+ pred: Callable[..., object],
+ substitutes: Iterable[_U],
+ count: Optional[int] = ...,
+ window_size: int = ...,
+) -> Iterator[Union[_T, _U]]: ...
+def partitions(iterable: Iterable[_T]) -> Iterator[List[List[_T]]]: ...
+def set_partitions(
+ iterable: Iterable[_T], k: Optional[int] = ...
+) -> Iterator[List[List[_T]]]: ...
+
+class time_limited(Generic[_T], Iterator[_T]):
+ def __init__(
+ self, limit_seconds: float, iterable: Iterable[_T]
+ ) -> None: ...
+ def __iter__(self) -> islice_extended[_T]: ...
+ def __next__(self) -> _T: ...
+
+@overload
+def only(
+ iterable: Iterable[_T], *, too_long: Optional[_Raisable] = ...
+) -> Optional[_T]: ...
+@overload
+def only(
+ iterable: Iterable[_T], default: _U, too_long: Optional[_Raisable] = ...
+) -> Union[_T, _U]: ...
+def ichunked(iterable: Iterable[_T], n: int) -> Iterator[Iterator[_T]]: ...
+def distinct_combinations(
+ iterable: Iterable[_T], r: int
+) -> Iterator[Tuple[_T, ...]]: ...
+def filter_except(
+ validator: Callable[[Any], object],
+ iterable: Iterable[_T],
+ *exceptions: Type[BaseException]
+) -> Iterator[_T]: ...
+def map_except(
+ function: Callable[[Any], _U],
+ iterable: Iterable[_T],
+ *exceptions: Type[BaseException]
+) -> Iterator[_U]: ...
+def map_if(
+ iterable: Iterable[Any],
+ pred: Callable[[Any], bool],
+ func: Callable[[Any], Any],
+ func_else: Optional[Callable[[Any], Any]] = ...,
+) -> Iterator[Any]: ...
+def sample(
+ iterable: Iterable[_T],
+ k: int,
+ weights: Optional[Iterable[float]] = ...,
+) -> List[_T]: ...
+def is_sorted(
+ iterable: Iterable[_T],
+ key: Optional[Callable[[_T], _U]] = ...,
+ reverse: bool = False,
+ strict: bool = False,
+) -> bool: ...
+
+class AbortThread(BaseException):
+ pass
+
+class callback_iter(Generic[_T], Iterator[_T]):
+ def __init__(
+ self,
+ func: Callable[..., Any],
+ callback_kwd: str = ...,
+ wait_seconds: float = ...,
+ ) -> None: ...
+ def __enter__(self) -> callback_iter[_T]: ...
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> Optional[bool]: ...
+ def __iter__(self) -> callback_iter[_T]: ...
+ def __next__(self) -> _T: ...
+ def _reader(self) -> Iterator[_T]: ...
+ @property
+ def done(self) -> bool: ...
+ @property
+ def result(self) -> Any: ...
+
+def windowed_complete(
+ iterable: Iterable[_T], n: int
+) -> Iterator[Tuple[_T, ...]]: ...
+def all_unique(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
+) -> bool: ...
+def nth_product(index: int, *args: Iterable[_T]) -> Tuple[_T, ...]: ...
+def nth_permutation(
+ iterable: Iterable[_T], r: int, index: int
+) -> Tuple[_T, ...]: ...
+def value_chain(*args: Union[_T, Iterable[_T]]) -> Iterable[_T]: ...
+def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ...
+def combination_index(
+ element: Iterable[_T], iterable: Iterable[_T]
+) -> int: ...
+def permutation_index(
+ element: Iterable[_T], iterable: Iterable[_T]
+) -> int: ...
+def repeat_each(iterable: Iterable[_T], n: int = ...) -> Iterator[_T]: ...
+
+class countable(Generic[_T], Iterator[_T]):
+ def __init__(self, iterable: Iterable[_T]) -> None: ...
+ def __iter__(self) -> countable[_T]: ...
+ def __next__(self) -> _T: ...
+
+def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[List[_T]]: ...
+def zip_broadcast(
+ *objects: Union[_T, Iterable[_T]],
+ scalar_types: Union[
+ type, Tuple[Union[type, Tuple[Any, ...]], ...], None
+ ] = ...,
+ strict: bool = ...
+) -> Iterable[Tuple[_T, ...]]: ...
+def unique_in_window(
+ iterable: Iterable[_T], n: int, key: Optional[Callable[[_T], _U]] = ...
+) -> Iterator[_T]: ...
+def duplicates_everseen(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
+) -> Iterator[_T]: ...
+def duplicates_justseen(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
+) -> Iterator[_T]: ...
+
+class _SupportsLessThan(Protocol):
+ def __lt__(self, __other: Any) -> bool: ...
+
+_SupportsLessThanT = TypeVar("_SupportsLessThanT", bound=_SupportsLessThan)
+
+@overload
+def minmax(
+ iterable_or_value: Iterable[_SupportsLessThanT], *, key: None = None
+) -> Tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
+@overload
+def minmax(
+ iterable_or_value: Iterable[_T], *, key: Callable[[_T], _SupportsLessThan]
+) -> Tuple[_T, _T]: ...
+@overload
+def minmax(
+ iterable_or_value: Iterable[_SupportsLessThanT],
+ *,
+ key: None = None,
+ default: _U
+) -> Union[_U, Tuple[_SupportsLessThanT, _SupportsLessThanT]]: ...
+@overload
+def minmax(
+ iterable_or_value: Iterable[_T],
+ *,
+ key: Callable[[_T], _SupportsLessThan],
+ default: _U,
+) -> Union[_U, Tuple[_T, _T]]: ...
+@overload
+def minmax(
+ iterable_or_value: _SupportsLessThanT,
+ __other: _SupportsLessThanT,
+ *others: _SupportsLessThanT
+) -> Tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
+@overload
+def minmax(
+ iterable_or_value: _T,
+ __other: _T,
+ *others: _T,
+ key: Callable[[_T], _SupportsLessThan]
+) -> Tuple[_T, _T]: ...
diff --git a/contrib/python/more-itertools/py3/more_itertools/py.typed b/contrib/python/more-itertools/py3/more_itertools/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/more-itertools/py3/more_itertools/py.typed
diff --git a/contrib/python/more-itertools/py3/more_itertools/recipes.py b/contrib/python/more-itertools/py3/more_itertools/recipes.py
new file mode 100644
index 0000000000..a2596423a4
--- /dev/null
+++ b/contrib/python/more-itertools/py3/more_itertools/recipes.py
@@ -0,0 +1,698 @@
+"""Imported from the recipes section of the itertools documentation.
+
+All functions taken from the recipes section of the itertools library docs
+[1]_.
+Some backward-compatible usability improvements have been made.
+
+.. [1] http://docs.python.org/library/itertools.html#recipes
+
+"""
+import warnings
+from collections import deque
+from itertools import (
+ chain,
+ combinations,
+ count,
+ cycle,
+ groupby,
+ islice,
+ repeat,
+ starmap,
+ tee,
+ zip_longest,
+)
+import operator
+from random import randrange, sample, choice
+
+__all__ = [
+ 'all_equal',
+ 'before_and_after',
+ 'consume',
+ 'convolve',
+ 'dotproduct',
+ 'first_true',
+ 'flatten',
+ 'grouper',
+ 'iter_except',
+ 'ncycles',
+ 'nth',
+ 'nth_combination',
+ 'padnone',
+ 'pad_none',
+ 'pairwise',
+ 'partition',
+ 'powerset',
+ 'prepend',
+ 'quantify',
+ 'random_combination_with_replacement',
+ 'random_combination',
+ 'random_permutation',
+ 'random_product',
+ 'repeatfunc',
+ 'roundrobin',
+ 'sliding_window',
+ 'tabulate',
+ 'tail',
+ 'take',
+ 'triplewise',
+ 'unique_everseen',
+ 'unique_justseen',
+]
+
+
+def take(n, iterable):
+ """Return first *n* items of the iterable as a list.
+
+ >>> take(3, range(10))
+ [0, 1, 2]
+
+ If there are fewer than *n* items in the iterable, all of them are
+ returned.
+
+ >>> take(10, range(3))
+ [0, 1, 2]
+
+ """
+ return list(islice(iterable, n))
+
+
+def tabulate(function, start=0):
+ """Return an iterator over the results of ``func(start)``,
+ ``func(start + 1)``, ``func(start + 2)``...
+
+ *func* should be a function that accepts one integer argument.
+
+ If *start* is not specified it defaults to 0. It will be incremented each
+ time the iterator is advanced.
+
+ >>> square = lambda x: x ** 2
+ >>> iterator = tabulate(square, -3)
+ >>> take(4, iterator)
+ [9, 4, 1, 0]
+
+ """
+ return map(function, count(start))
+
+
+def tail(n, iterable):
+ """Return an iterator over the last *n* items of *iterable*.
+
+ >>> t = tail(3, 'ABCDEFG')
+ >>> list(t)
+ ['E', 'F', 'G']
+
+ """
+ return iter(deque(iterable, maxlen=n))
+
+
+def consume(iterator, n=None):
+ """Advance *iterable* by *n* steps. If *n* is ``None``, consume it
+ entirely.
+
+ Efficiently exhausts an iterator without returning values. Defaults to
+ consuming the whole iterator, but an optional second argument may be
+ provided to limit consumption.
+
+ >>> i = (x for x in range(10))
+ >>> next(i)
+ 0
+ >>> consume(i, 3)
+ >>> next(i)
+ 4
+ >>> consume(i)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ If the iterator has fewer items remaining than the provided limit, the
+ whole iterator will be consumed.
+
+ >>> i = (x for x in range(3))
+ >>> consume(i, 5)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ """
+ # Use functions that consume iterators at C speed.
+ if n is None:
+ # feed the entire iterator into a zero-length deque
+ deque(iterator, maxlen=0)
+ else:
+ # advance to the empty slice starting at position n
+ next(islice(iterator, n, n), None)
+
+
+def nth(iterable, n, default=None):
+ """Returns the nth item or a default value.
+
+ >>> l = range(10)
+ >>> nth(l, 3)
+ 3
+ >>> nth(l, 20, "zebra")
+ 'zebra'
+
+ """
+ return next(islice(iterable, n, None), default)
+
+
+def all_equal(iterable):
+ """
+ Returns ``True`` if all the elements are equal to each other.
+
+ >>> all_equal('aaaa')
+ True
+ >>> all_equal('aaab')
+ False
+
+ """
+ g = groupby(iterable)
+ return next(g, True) and not next(g, False)
+
+
+def quantify(iterable, pred=bool):
+ """Return the how many times the predicate is true.
+
+ >>> quantify([True, False, True])
+ 2
+
+ """
+ return sum(map(pred, iterable))
+
+
+def pad_none(iterable):
+ """Returns the sequence of elements and then returns ``None`` indefinitely.
+
+ >>> take(5, pad_none(range(3)))
+ [0, 1, 2, None, None]
+
+ Useful for emulating the behavior of the built-in :func:`map` function.
+
+ See also :func:`padded`.
+
+ """
+ return chain(iterable, repeat(None))
+
+
+padnone = pad_none
+
+
+def ncycles(iterable, n):
+ """Returns the sequence elements *n* times
+
+ >>> list(ncycles(["a", "b"], 3))
+ ['a', 'b', 'a', 'b', 'a', 'b']
+
+ """
+ return chain.from_iterable(repeat(tuple(iterable), n))
+
+
+def dotproduct(vec1, vec2):
+ """Returns the dot product of the two iterables.
+
+ >>> dotproduct([10, 10], [20, 20])
+ 400
+
+ """
+ return sum(map(operator.mul, vec1, vec2))
+
+
+def flatten(listOfLists):
+ """Return an iterator flattening one level of nesting in a list of lists.
+
+ >>> list(flatten([[0, 1], [2, 3]]))
+ [0, 1, 2, 3]
+
+ See also :func:`collapse`, which can flatten multiple levels of nesting.
+
+ """
+ return chain.from_iterable(listOfLists)
+
+
+def repeatfunc(func, times=None, *args):
+ """Call *func* with *args* repeatedly, returning an iterable over the
+ results.
+
+ If *times* is specified, the iterable will terminate after that many
+ repetitions:
+
+ >>> from operator import add
+ >>> times = 4
+ >>> args = 3, 5
+ >>> list(repeatfunc(add, times, *args))
+ [8, 8, 8, 8]
+
+ If *times* is ``None`` the iterable will not terminate:
+
+ >>> from random import randrange
+ >>> times = None
+ >>> args = 1, 11
+ >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP
+ [2, 4, 8, 1, 8, 4]
+
+ """
+ if times is None:
+ return starmap(func, repeat(args))
+ return starmap(func, repeat(args, times))
+
+
+def _pairwise(iterable):
+ """Returns an iterator of paired items, overlapping, from the original
+
+ >>> take(4, pairwise(count()))
+ [(0, 1), (1, 2), (2, 3), (3, 4)]
+
+ On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`.
+
+ """
+ a, b = tee(iterable)
+ next(b, None)
+ yield from zip(a, b)
+
+
+try:
+ from itertools import pairwise as itertools_pairwise
+except ImportError:
+ pairwise = _pairwise
+else:
+
+ def pairwise(iterable):
+ yield from itertools_pairwise(iterable)
+
+ pairwise.__doc__ = _pairwise.__doc__
+
+
+def grouper(iterable, n, fillvalue=None):
+ """Collect data into fixed-length chunks or blocks.
+
+ >>> list(grouper('ABCDEFG', 3, 'x'))
+ [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
+
+ """
+ if isinstance(iterable, int):
+ warnings.warn(
+ "grouper expects iterable as first parameter", DeprecationWarning
+ )
+ n, iterable = iterable, n
+ args = [iter(iterable)] * n
+ return zip_longest(fillvalue=fillvalue, *args)
+
+
+def roundrobin(*iterables):
+ """Yields an item from each iterable, alternating between them.
+
+ >>> list(roundrobin('ABC', 'D', 'EF'))
+ ['A', 'D', 'E', 'B', 'F', 'C']
+
+ This function produces the same output as :func:`interleave_longest`, but
+ may perform better for some inputs (in particular when the number of
+ iterables is small).
+
+ """
+ # Recipe credited to George Sakkis
+ pending = len(iterables)
+ nexts = cycle(iter(it).__next__ for it in iterables)
+ while pending:
+ try:
+ for next in nexts:
+ yield next()
+ except StopIteration:
+ pending -= 1
+ nexts = cycle(islice(nexts, pending))
+
+
+def partition(pred, iterable):
+ """
+ Returns a 2-tuple of iterables derived from the input iterable.
+ The first yields the items that have ``pred(item) == False``.
+ The second yields the items that have ``pred(item) == True``.
+
+ >>> is_odd = lambda x: x % 2 != 0
+ >>> iterable = range(10)
+ >>> even_items, odd_items = partition(is_odd, iterable)
+ >>> list(even_items), list(odd_items)
+ ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
+
+ If *pred* is None, :func:`bool` is used.
+
+ >>> iterable = [0, 1, False, True, '', ' ']
+ >>> false_items, true_items = partition(None, iterable)
+ >>> list(false_items), list(true_items)
+ ([0, False, ''], [1, True, ' '])
+
+ """
+ if pred is None:
+ pred = bool
+
+ evaluations = ((pred(x), x) for x in iterable)
+ t1, t2 = tee(evaluations)
+ return (
+ (x for (cond, x) in t1 if not cond),
+ (x for (cond, x) in t2 if cond),
+ )
+
+
+def powerset(iterable):
+ """Yields all possible subsets of the iterable.
+
+ >>> list(powerset([1, 2, 3]))
+ [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
+
+ :func:`powerset` will operate on iterables that aren't :class:`set`
+ instances, so repeated elements in the input will produce repeated elements
+ in the output. Use :func:`unique_everseen` on the input to avoid generating
+ duplicates:
+
+ >>> seq = [1, 1, 0]
+ >>> list(powerset(seq))
+ [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
+ >>> from more_itertools import unique_everseen
+ >>> list(powerset(unique_everseen(seq)))
+ [(), (1,), (0,), (1, 0)]
+
+ """
+ s = list(iterable)
+ return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
+
+
+def unique_everseen(iterable, key=None):
+ """
+ Yield unique elements, preserving order.
+
+ >>> list(unique_everseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D']
+ >>> list(unique_everseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'D']
+
+ Sequences with a mix of hashable and unhashable items can be used.
+ The function will be slower (i.e., `O(n^2)`) for unhashable items.
+
+ Remember that ``list`` objects are unhashable - you can use the *key*
+ parameter to transform the list to a tuple (which is hashable) to
+ avoid a slowdown.
+
+ >>> iterable = ([1, 2], [2, 3], [1, 2])
+ >>> list(unique_everseen(iterable)) # Slow
+ [[1, 2], [2, 3]]
+ >>> list(unique_everseen(iterable, key=tuple)) # Faster
+ [[1, 2], [2, 3]]
+
+ Similary, you may want to convert unhashable ``set`` objects with
+ ``key=frozenset``. For ``dict`` objects,
+ ``key=lambda x: frozenset(x.items())`` can be used.
+
+ """
+ seenset = set()
+ seenset_add = seenset.add
+ seenlist = []
+ seenlist_add = seenlist.append
+ use_key = key is not None
+
+ for element in iterable:
+ k = key(element) if use_key else element
+ try:
+ if k not in seenset:
+ seenset_add(k)
+ yield element
+ except TypeError:
+ if k not in seenlist:
+ seenlist_add(k)
+ yield element
+
+
+def unique_justseen(iterable, key=None):
+ """Yields elements in order, ignoring serial duplicates
+
+ >>> list(unique_justseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D', 'A', 'B']
+ >>> list(unique_justseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'A', 'D']
+
+ """
+ return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
+
+
+def iter_except(func, exception, first=None):
+ """Yields results from a function repeatedly until an exception is raised.
+
+ Converts a call-until-exception interface to an iterator interface.
+ Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
+ to end the loop.
+
+ >>> l = [0, 1, 2]
+ >>> list(iter_except(l.pop, IndexError))
+ [2, 1, 0]
+
+ Multiple exceptions can be specified as a stopping condition:
+
+ >>> l = [1, 2, 3, '...', 4, 5, 6]
+ >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
+ [7, 6, 5]
+ >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
+ [4, 3, 2]
+ >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
+ []
+
+ """
+ try:
+ if first is not None:
+ yield first()
+ while 1:
+ yield func()
+ except exception:
+ pass
+
+
+def first_true(iterable, default=None, pred=None):
+ """
+ Returns the first true value in the iterable.
+
+ If no true value is found, returns *default*
+
+ If *pred* is not None, returns the first item for which
+ ``pred(item) == True`` .
+
+ >>> first_true(range(10))
+ 1
+ >>> first_true(range(10), pred=lambda x: x > 5)
+ 6
+ >>> first_true(range(10), default='missing', pred=lambda x: x > 9)
+ 'missing'
+
+ """
+ return next(filter(pred, iterable), default)
+
+
+def random_product(*args, repeat=1):
+ """Draw an item at random from each of the input iterables.
+
+ >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
+ ('c', 3, 'Z')
+
+ If *repeat* is provided as a keyword argument, that many items will be
+ drawn from each iterable.
+
+ >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP
+ ('a', 2, 'd', 3)
+
+ This equivalent to taking a random selection from
+ ``itertools.product(*args, **kwarg)``.
+
+ """
+ pools = [tuple(pool) for pool in args] * repeat
+ return tuple(choice(pool) for pool in pools)
+
+
+def random_permutation(iterable, r=None):
+ """Return a random *r* length permutation of the elements in *iterable*.
+
+ If *r* is not specified or is ``None``, then *r* defaults to the length of
+ *iterable*.
+
+ >>> random_permutation(range(5)) # doctest:+SKIP
+ (3, 4, 0, 1, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.permutations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ r = len(pool) if r is None else r
+ return tuple(sample(pool, r))
+
+
+def random_combination(iterable, r):
+ """Return a random *r* length subsequence of the elements in *iterable*.
+
+ >>> random_combination(range(5), 3) # doctest:+SKIP
+ (2, 3, 4)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(sample(range(n), r))
+ return tuple(pool[i] for i in indices)
+
+
+def random_combination_with_replacement(iterable, r):
+ """Return a random *r* length subsequence of elements in *iterable*,
+ allowing individual elements to be repeated.
+
+ >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
+ (0, 0, 1, 2, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations_with_replacement(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(randrange(n) for i in range(r))
+ return tuple(pool[i] for i in indices)
+
+
+def nth_combination(iterable, r, index):
+ """Equivalent to ``list(combinations(iterable, r))[index]``.
+
+ The subsequences of *iterable* that are of length *r* can be ordered
+ lexicographically. :func:`nth_combination` computes the subsequence at
+ sort position *index* directly, without computing the previous
+ subsequences.
+
+ >>> nth_combination(range(5), 3, 5)
+ (0, 3, 4)
+
+ ``ValueError`` will be raised If *r* is negative or greater than the length
+ of *iterable*.
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ if (r < 0) or (r > n):
+ raise ValueError
+
+ c = 1
+ k = min(r, n - r)
+ for i in range(1, k + 1):
+ c = c * (n - k + i) // i
+
+ if index < 0:
+ index += c
+
+ if (index < 0) or (index >= c):
+ raise IndexError
+
+ result = []
+ while r:
+ c, n, r = c * r // n, n - 1, r - 1
+ while index >= c:
+ index -= c
+ c, n = c * (n - r) // n, n - 1
+ result.append(pool[-1 - n])
+
+ return tuple(result)
+
+
+def prepend(value, iterator):
+ """Yield *value*, followed by the elements in *iterator*.
+
+ >>> value = '0'
+ >>> iterator = ['1', '2', '3']
+ >>> list(prepend(value, iterator))
+ ['0', '1', '2', '3']
+
+ To prepend multiple values, see :func:`itertools.chain`
+ or :func:`value_chain`.
+
+ """
+ return chain([value], iterator)
+
+
+def convolve(signal, kernel):
+ """Convolve the iterable *signal* with the iterable *kernel*.
+
+ >>> signal = (1, 2, 3, 4, 5)
+ >>> kernel = [3, 2, 1]
+ >>> list(convolve(signal, kernel))
+ [3, 8, 14, 20, 26, 14, 5]
+
+ Note: the input arguments are not interchangeable, as the *kernel*
+ is immediately consumed and stored.
+
+ """
+ kernel = tuple(kernel)[::-1]
+ n = len(kernel)
+ window = deque([0], maxlen=n) * n
+ for x in chain(signal, repeat(0, n - 1)):
+ window.append(x)
+ yield sum(map(operator.mul, kernel, window))
+
+
+def before_and_after(predicate, it):
+ """A variant of :func:`takewhile` that allows complete access to the
+ remainder of the iterator.
+
+ >>> it = iter('ABCdEfGhI')
+ >>> all_upper, remainder = before_and_after(str.isupper, it)
+ >>> ''.join(all_upper)
+ 'ABC'
+ >>> ''.join(remainder) # takewhile() would lose the 'd'
+ 'dEfGhI'
+
+ Note that the first iterator must be fully consumed before the second
+ iterator can generate valid results.
+ """
+ it = iter(it)
+ transition = []
+
+ def true_iterator():
+ for elem in it:
+ if predicate(elem):
+ yield elem
+ else:
+ transition.append(elem)
+ return
+
+ def remainder_iterator():
+ yield from transition
+ yield from it
+
+ return true_iterator(), remainder_iterator()
+
+
+def triplewise(iterable):
+ """Return overlapping triplets from *iterable*.
+
+ >>> list(triplewise('ABCDE'))
+ [('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')]
+
+ """
+ for (a, _), (b, c) in pairwise(pairwise(iterable)):
+ yield a, b, c
+
+
+def sliding_window(iterable, n):
+ """Return a sliding window of width *n* over *iterable*.
+
+ >>> list(sliding_window(range(6), 4))
+ [(0, 1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5)]
+
+ If *iterable* has fewer than *n* items, then nothing is yielded:
+
+ >>> list(sliding_window(range(3), 4))
+ []
+
+ For a variant with more features, see :func:`windowed`.
+ """
+ it = iter(iterable)
+ window = deque(islice(it, n), maxlen=n)
+ if len(window) == n:
+ yield tuple(window)
+ for x in it:
+ window.append(x)
+ yield tuple(window)
diff --git a/contrib/python/more-itertools/py3/more_itertools/recipes.pyi b/contrib/python/more-itertools/py3/more_itertools/recipes.pyi
new file mode 100644
index 0000000000..4648a41b5e
--- /dev/null
+++ b/contrib/python/more-itertools/py3/more_itertools/recipes.pyi
@@ -0,0 +1,112 @@
+"""Stubs for more_itertools.recipes"""
+from typing import (
+ Any,
+ Callable,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Tuple,
+ TypeVar,
+ Union,
+)
+from typing_extensions import overload, Type
+
+# Type and type variable definitions
+_T = TypeVar('_T')
+_U = TypeVar('_U')
+
+def take(n: int, iterable: Iterable[_T]) -> List[_T]: ...
+def tabulate(
+ function: Callable[[int], _T], start: int = ...
+) -> Iterator[_T]: ...
+def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ...
+def consume(iterator: Iterable[object], n: Optional[int] = ...) -> None: ...
+@overload
+def nth(iterable: Iterable[_T], n: int) -> Optional[_T]: ...
+@overload
+def nth(iterable: Iterable[_T], n: int, default: _U) -> Union[_T, _U]: ...
+def all_equal(iterable: Iterable[object]) -> bool: ...
+def quantify(
+ iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
+) -> int: ...
+def pad_none(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ...
+def padnone(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ...
+def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ...
+def dotproduct(vec1: Iterable[object], vec2: Iterable[object]) -> object: ...
+def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ...
+def repeatfunc(
+ func: Callable[..., _U], times: Optional[int] = ..., *args: Any
+) -> Iterator[_U]: ...
+def pairwise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T]]: ...
+@overload
+def grouper(
+ iterable: Iterable[_T], n: int
+) -> Iterator[Tuple[Optional[_T], ...]]: ...
+@overload
+def grouper(
+ iterable: Iterable[_T], n: int, fillvalue: _U
+) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+@overload
+def grouper( # Deprecated interface
+ iterable: int, n: Iterable[_T]
+) -> Iterator[Tuple[Optional[_T], ...]]: ...
+@overload
+def grouper( # Deprecated interface
+ iterable: int, n: Iterable[_T], fillvalue: _U
+) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ...
+def partition(
+ pred: Optional[Callable[[_T], object]], iterable: Iterable[_T]
+) -> Tuple[Iterator[_T], Iterator[_T]]: ...
+def powerset(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ...
+def unique_everseen(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
+) -> Iterator[_T]: ...
+def unique_justseen(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], object]] = ...
+) -> Iterator[_T]: ...
+@overload
+def iter_except(
+ func: Callable[[], _T],
+ exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
+ first: None = ...,
+) -> Iterator[_T]: ...
+@overload
+def iter_except(
+ func: Callable[[], _T],
+ exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
+ first: Callable[[], _U],
+) -> Iterator[Union[_T, _U]]: ...
+@overload
+def first_true(
+ iterable: Iterable[_T], *, pred: Optional[Callable[[_T], object]] = ...
+) -> Optional[_T]: ...
+@overload
+def first_true(
+ iterable: Iterable[_T],
+ default: _U,
+ pred: Optional[Callable[[_T], object]] = ...,
+) -> Union[_T, _U]: ...
+def random_product(
+ *args: Iterable[_T], repeat: int = ...
+) -> Tuple[_T, ...]: ...
+def random_permutation(
+ iterable: Iterable[_T], r: Optional[int] = ...
+) -> Tuple[_T, ...]: ...
+def random_combination(iterable: Iterable[_T], r: int) -> Tuple[_T, ...]: ...
+def random_combination_with_replacement(
+ iterable: Iterable[_T], r: int
+) -> Tuple[_T, ...]: ...
+def nth_combination(
+ iterable: Iterable[_T], r: int, index: int
+) -> Tuple[_T, ...]: ...
+def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[Union[_T, _U]]: ...
+def convolve(signal: Iterable[_T], kernel: Iterable[_T]) -> Iterator[_T]: ...
+def before_and_after(
+ predicate: Callable[[_T], bool], it: Iterable[_T]
+) -> Tuple[Iterator[_T], Iterator[_T]]: ...
+def triplewise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T, _T]]: ...
+def sliding_window(
+ iterable: Iterable[_T], n: int
+) -> Iterator[Tuple[_T, ...]]: ...
diff --git a/contrib/python/more-itertools/py3/patches/01-fix-tests.patch b/contrib/python/more-itertools/py3/patches/01-fix-tests.patch
new file mode 100644
index 0000000000..497d4d8da4
--- /dev/null
+++ b/contrib/python/more-itertools/py3/patches/01-fix-tests.patch
@@ -0,0 +1,17 @@
+--- contrib/python/more-itertools/py3/tests/test_more.py (index)
++++ contrib/python/more-itertools/py3/tests/test_more.py (working tree)
+@@ -177,13 +177,13 @@ class IterOnlyRange:
+ """User-defined iterable class which only support __iter__.
+
+ >>> r = IterOnlyRange(5)
+- >>> r[0]
++ >>> r[0] # doctest: +SKIP
+ AttributeError: IterOnlyRange instance has no attribute '__getitem__'
+
+ Note: In Python 3, ``TypeError`` will be raised because ``object`` is
+ inherited implicitly by default.
+
+- >>> r[0]
++ >>> r[0] # doctest: +SKIP
+ TypeError: 'IterOnlyRange' object does not support indexing
+ """
diff --git a/contrib/python/more-itertools/py3/tests/__init__.py b/contrib/python/more-itertools/py3/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/more-itertools/py3/tests/__init__.py
diff --git a/contrib/python/more-itertools/py3/tests/test_more.py b/contrib/python/more-itertools/py3/tests/test_more.py
new file mode 100644
index 0000000000..9a15025899
--- /dev/null
+++ b/contrib/python/more-itertools/py3/tests/test_more.py
@@ -0,0 +1,5033 @@
+import warnings
+
+from collections import Counter, abc
+from collections.abc import Set
+from datetime import datetime, timedelta
+from decimal import Decimal
+from doctest import DocTestSuite
+from fractions import Fraction
+from functools import partial, reduce
+from heapq import merge
+from io import StringIO
+from itertools import (
+ accumulate,
+ chain,
+ combinations,
+ count,
+ cycle,
+ groupby,
+ islice,
+ permutations,
+ product,
+ repeat,
+)
+from operator import add, mul, itemgetter
+from pickle import loads, dumps
+from random import seed, Random
+from statistics import mean
+from string import ascii_letters
+from sys import version_info
+from time import sleep
+from traceback import format_exc
+from unittest import skipIf, TestCase
+
+import more_itertools as mi
+
+
+def load_tests(loader, tests, ignore):
+ # Add the doctests
+ tests.addTests(DocTestSuite('more_itertools.more'))
+ return tests
+
+
+class CollateTests(TestCase):
+ """Unit tests for ``collate()``"""
+
+ # Also accidentally tests peekable, though that could use its own tests
+
+ def test_default(self):
+ """Test with the default `key` function."""
+ iterables = [range(4), range(7), range(3, 6)]
+ self.assertEqual(
+ sorted(reduce(list.__add__, [list(it) for it in iterables])),
+ list(mi.collate(*iterables)),
+ )
+
+ def test_key(self):
+ """Test using a custom `key` function."""
+ iterables = [range(5, 0, -1), range(4, 0, -1)]
+ actual = sorted(
+ reduce(list.__add__, [list(it) for it in iterables]), reverse=True
+ )
+ expected = list(mi.collate(*iterables, key=lambda x: -x))
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ """Be nice if passed an empty list of iterables."""
+ self.assertEqual([], list(mi.collate()))
+
+ def test_one(self):
+ """Work when only 1 iterable is passed."""
+ self.assertEqual([0, 1], list(mi.collate(range(2))))
+
+ def test_reverse(self):
+ """Test the `reverse` kwarg."""
+ iterables = [range(4, 0, -1), range(7, 0, -1), range(3, 6, -1)]
+
+ actual = sorted(
+ reduce(list.__add__, [list(it) for it in iterables]), reverse=True
+ )
+ expected = list(mi.collate(*iterables, reverse=True))
+ self.assertEqual(actual, expected)
+
+ def test_alias(self):
+ self.assertNotEqual(merge.__doc__, mi.collate.__doc__)
+ self.assertNotEqual(partial.__doc__, mi.collate.__doc__)
+
+
+class ChunkedTests(TestCase):
+ """Tests for ``chunked()``"""
+
+ def test_even(self):
+ """Test when ``n`` divides evenly into the length of the iterable."""
+ self.assertEqual(
+ list(mi.chunked('ABCDEF', 3)), [['A', 'B', 'C'], ['D', 'E', 'F']]
+ )
+
+ def test_odd(self):
+ """Test when ``n`` does not divide evenly into the length of the
+ iterable.
+
+ """
+ self.assertEqual(
+ list(mi.chunked('ABCDE', 3)), [['A', 'B', 'C'], ['D', 'E']]
+ )
+
+ def test_none(self):
+ """Test when ``n`` has the value ``None``."""
+ self.assertEqual(
+ list(mi.chunked('ABCDE', None)), [['A', 'B', 'C', 'D', 'E']]
+ )
+
+ def test_strict_false(self):
+ """Test when ``n`` does not divide evenly into the length of the
+ iterable and strict is false.
+
+ """
+ self.assertEqual(
+ list(mi.chunked('ABCDE', 3, strict=False)),
+ [['A', 'B', 'C'], ['D', 'E']],
+ )
+
+ def test_strict_being_true(self):
+ """Test when ``n`` does not divide evenly into the length of the
+ iterable and strict is True (raising an exception).
+
+ """
+
+ def f():
+ return list(mi.chunked('ABCDE', 3, strict=True))
+
+ self.assertRaisesRegex(ValueError, "iterable is not divisible by n", f)
+ self.assertEqual(
+ list(mi.chunked('ABCDEF', 3, strict=True)),
+ [['A', 'B', 'C'], ['D', 'E', 'F']],
+ )
+
+ def test_strict_being_true_with_size_none(self):
+ """Test when ``n`` has value ``None`` and the keyword strict is True
+ (raising an exception).
+
+ """
+
+ def f():
+ return list(mi.chunked('ABCDE', None, strict=True))
+
+ self.assertRaisesRegex(
+ ValueError, "n must not be None when using strict mode.", f
+ )
+
+
+class FirstTests(TestCase):
+ def test_many(self):
+ # Also try it on a generator expression to make sure it works on
+ # whatever those return, across Python versions.
+ self.assertEqual(mi.first(x for x in range(4)), 0)
+
+ def test_one(self):
+ self.assertEqual(mi.first([3]), 3)
+
+ def test_empty_stop_iteration(self):
+ try:
+ mi.first([])
+ except ValueError:
+ formatted_exc = format_exc()
+ self.assertIn('StopIteration', formatted_exc)
+ self.assertIn(
+ 'The above exception was the direct cause', formatted_exc
+ )
+ else:
+ self.fail()
+
+ def test_default(self):
+ self.assertEqual(mi.first([], 'boo'), 'boo')
+
+
+class IterOnlyRange:
+ """User-defined iterable class which only support __iter__.
+
+ >>> r = IterOnlyRange(5)
+ >>> r[0] # doctest: +SKIP
+ AttributeError: IterOnlyRange instance has no attribute '__getitem__'
+
+ Note: In Python 3, ``TypeError`` will be raised because ``object`` is
+ inherited implicitly by default.
+
+ >>> r[0] # doctest: +SKIP
+ TypeError: 'IterOnlyRange' object does not support indexing
+ """
+
+ def __init__(self, n):
+ """Set the length of the range."""
+ self.n = n
+
+ def __iter__(self):
+ """Works same as range()."""
+ return iter(range(self.n))
+
+
+class LastTests(TestCase):
+ def test_basic(self):
+ cases = [
+ (range(4), 3),
+ (iter(range(4)), 3),
+ (range(1), 0),
+ (iter(range(1)), 0),
+ (IterOnlyRange(5), 4),
+ ({n: str(n) for n in range(5)}, 4),
+ ]
+ # Versions below 3.6.0 don't have ordered dicts
+ if version_info >= (3, 6, 0):
+ cases.append(({0: '0', -1: '-1', 2: '-2'}, 2))
+
+ for iterable, expected in cases:
+ with self.subTest(iterable=iterable):
+ self.assertEqual(mi.last(iterable), expected)
+
+ def test_default(self):
+ for iterable, default, expected in [
+ (range(1), None, 0),
+ ([], None, None),
+ ({}, None, None),
+ (iter([]), None, None),
+ ]:
+ with self.subTest(args=(iterable, default)):
+ self.assertEqual(mi.last(iterable, default=default), expected)
+
+ def test_empty(self):
+ for iterable in ([], iter(range(0))):
+ with self.subTest(iterable=iterable):
+ with self.assertRaises(ValueError):
+ mi.last(iterable)
+
+
+class NthOrLastTests(TestCase):
+ """Tests for ``nth_or_last()``"""
+
+ def test_basic(self):
+ self.assertEqual(mi.nth_or_last(range(3), 1), 1)
+ self.assertEqual(mi.nth_or_last(range(3), 3), 2)
+
+ def test_default_value(self):
+ default = 42
+ self.assertEqual(mi.nth_or_last(range(0), 3, default), default)
+
+ def test_empty_iterable_no_default(self):
+ self.assertRaises(ValueError, lambda: mi.nth_or_last(range(0), 0))
+
+
+class PeekableMixinTests:
+ """Common tests for ``peekable()`` and ``seekable()`` behavior"""
+
+ cls = None
+
+ def test_passthrough(self):
+ """Iterating a peekable without using ``peek()`` or ``prepend()``
+ should just give the underlying iterable's elements (a trivial test but
+ useful to set a baseline in case something goes wrong)"""
+ expected = [1, 2, 3, 4, 5]
+ actual = list(self.cls(expected))
+ self.assertEqual(actual, expected)
+
+ def test_peek_default(self):
+ """Make sure passing a default into ``peek()`` works."""
+ p = self.cls([])
+ self.assertEqual(p.peek(7), 7)
+
+ def test_truthiness(self):
+ """Make sure a ``peekable`` tests true iff there are items remaining in
+ the iterable.
+
+ """
+ p = self.cls([])
+ self.assertFalse(p)
+
+ p = self.cls(range(3))
+ self.assertTrue(p)
+
+ def test_simple_peeking(self):
+ """Make sure ``next`` and ``peek`` advance and don't advance the
+ iterator, respectively.
+
+ """
+ p = self.cls(range(10))
+ self.assertEqual(next(p), 0)
+ self.assertEqual(p.peek(), 1)
+ self.assertEqual(p.peek(), 1)
+ self.assertEqual(next(p), 1)
+
+
+class PeekableTests(PeekableMixinTests, TestCase):
+ """Tests for ``peekable()`` behavior not incidentally covered by testing
+ ``collate()``
+
+ """
+
+ cls = mi.peekable
+
+ def test_indexing(self):
+ """
+ Indexing into the peekable shouldn't advance the iterator.
+ """
+ p = mi.peekable('abcdefghijkl')
+
+ # The 0th index is what ``next()`` will return
+ self.assertEqual(p[0], 'a')
+ self.assertEqual(next(p), 'a')
+
+ # Indexing further into the peekable shouldn't advance the itertor
+ self.assertEqual(p[2], 'd')
+ self.assertEqual(next(p), 'b')
+
+ # The 0th index moves up with the iterator; the last index follows
+ self.assertEqual(p[0], 'c')
+ self.assertEqual(p[9], 'l')
+
+ self.assertEqual(next(p), 'c')
+ self.assertEqual(p[8], 'l')
+
+ # Negative indexing should work too
+ self.assertEqual(p[-2], 'k')
+ self.assertEqual(p[-9], 'd')
+ self.assertRaises(IndexError, lambda: p[-10])
+
+ def test_slicing(self):
+ """Slicing the peekable shouldn't advance the iterator."""
+ seq = list('abcdefghijkl')
+ p = mi.peekable(seq)
+
+ # Slicing the peekable should just be like slicing a re-iterable
+ self.assertEqual(p[1:4], seq[1:4])
+
+ # Advancing the iterator moves the slices up also
+ self.assertEqual(next(p), 'a')
+ self.assertEqual(p[1:4], seq[1:][1:4])
+
+ # Implicit starts and stop should work
+ self.assertEqual(p[:5], seq[1:][:5])
+ self.assertEqual(p[:], seq[1:][:])
+
+ # Indexing past the end should work
+ self.assertEqual(p[:100], seq[1:][:100])
+
+ # Steps should work, including negative
+ self.assertEqual(p[::2], seq[1:][::2])
+ self.assertEqual(p[::-1], seq[1:][::-1])
+
+ def test_slicing_reset(self):
+ """Test slicing on a fresh iterable each time"""
+ iterable = ['0', '1', '2', '3', '4', '5']
+ indexes = list(range(-4, len(iterable) + 4)) + [None]
+ steps = [1, 2, 3, 4, -1, -2, -3, 4]
+ for slice_args in product(indexes, indexes, steps):
+ it = iter(iterable)
+ p = mi.peekable(it)
+ next(p)
+ index = slice(*slice_args)
+ actual = p[index]
+ expected = iterable[1:][index]
+ self.assertEqual(actual, expected, slice_args)
+
+ def test_slicing_error(self):
+ iterable = '01234567'
+ p = mi.peekable(iter(iterable))
+
+ # Prime the cache
+ p.peek()
+ old_cache = list(p._cache)
+
+ # Illegal slice
+ with self.assertRaises(ValueError):
+ p[1:-1:0]
+
+ # Neither the cache nor the iteration should be affected
+ self.assertEqual(old_cache, list(p._cache))
+ self.assertEqual(list(p), list(iterable))
+
+ # prepend() behavior tests
+
+ def test_prepend(self):
+ """Tests intersperesed ``prepend()`` and ``next()`` calls"""
+ it = mi.peekable(range(2))
+ actual = []
+
+ # Test prepend() before next()
+ it.prepend(10)
+ actual += [next(it), next(it)]
+
+ # Test prepend() between next()s
+ it.prepend(11)
+ actual += [next(it), next(it)]
+
+ # Test prepend() after source iterable is consumed
+ it.prepend(12)
+ actual += [next(it)]
+
+ expected = [10, 0, 11, 1, 12]
+ self.assertEqual(actual, expected)
+
+ def test_multi_prepend(self):
+ """Tests prepending multiple items and getting them in proper order"""
+ it = mi.peekable(range(5))
+ actual = [next(it), next(it)]
+ it.prepend(10, 11, 12)
+ it.prepend(20, 21)
+ actual += list(it)
+ expected = [0, 1, 20, 21, 10, 11, 12, 2, 3, 4]
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ """Tests prepending in front of an empty iterable"""
+ it = mi.peekable([])
+ it.prepend(10)
+ actual = list(it)
+ expected = [10]
+ self.assertEqual(actual, expected)
+
+ def test_prepend_truthiness(self):
+ """Tests that ``__bool__()`` or ``__nonzero__()`` works properly
+ with ``prepend()``"""
+ it = mi.peekable(range(5))
+ self.assertTrue(it)
+ actual = list(it)
+ self.assertFalse(it)
+ it.prepend(10)
+ self.assertTrue(it)
+ actual += [next(it)]
+ self.assertFalse(it)
+ expected = [0, 1, 2, 3, 4, 10]
+ self.assertEqual(actual, expected)
+
+ def test_multi_prepend_peek(self):
+ """Tests prepending multiple elements and getting them in reverse order
+ while peeking"""
+ it = mi.peekable(range(5))
+ actual = [next(it), next(it)]
+ self.assertEqual(it.peek(), 2)
+ it.prepend(10, 11, 12)
+ self.assertEqual(it.peek(), 10)
+ it.prepend(20, 21)
+ self.assertEqual(it.peek(), 20)
+ actual += list(it)
+ self.assertFalse(it)
+ expected = [0, 1, 20, 21, 10, 11, 12, 2, 3, 4]
+ self.assertEqual(actual, expected)
+
+ def test_prepend_after_stop(self):
+ """Test resuming iteration after a previous exhaustion"""
+ it = mi.peekable(range(3))
+ self.assertEqual(list(it), [0, 1, 2])
+ self.assertRaises(StopIteration, lambda: next(it))
+ it.prepend(10)
+ self.assertEqual(next(it), 10)
+ self.assertRaises(StopIteration, lambda: next(it))
+
+ def test_prepend_slicing(self):
+ """Tests interaction between prepending and slicing"""
+ seq = list(range(20))
+ p = mi.peekable(seq)
+
+ p.prepend(30, 40, 50)
+ pseq = [30, 40, 50] + seq # pseq for prepended_seq
+
+ # adapt the specific tests from test_slicing
+ self.assertEqual(p[0], 30)
+ self.assertEqual(p[1:8], pseq[1:8])
+ self.assertEqual(p[1:], pseq[1:])
+ self.assertEqual(p[:5], pseq[:5])
+ self.assertEqual(p[:], pseq[:])
+ self.assertEqual(p[:100], pseq[:100])
+ self.assertEqual(p[::2], pseq[::2])
+ self.assertEqual(p[::-1], pseq[::-1])
+
+ def test_prepend_indexing(self):
+ """Tests interaction between prepending and indexing"""
+ seq = list(range(20))
+ p = mi.peekable(seq)
+
+ p.prepend(30, 40, 50)
+
+ self.assertEqual(p[0], 30)
+ self.assertEqual(next(p), 30)
+ self.assertEqual(p[2], 0)
+ self.assertEqual(next(p), 40)
+ self.assertEqual(p[0], 50)
+ self.assertEqual(p[9], 8)
+ self.assertEqual(next(p), 50)
+ self.assertEqual(p[8], 8)
+ self.assertEqual(p[-2], 18)
+ self.assertEqual(p[-9], 11)
+ self.assertRaises(IndexError, lambda: p[-21])
+
+ def test_prepend_iterable(self):
+ """Tests prepending from an iterable"""
+ it = mi.peekable(range(5))
+ # Don't directly use the range() object to avoid any range-specific
+ # optimizations
+ it.prepend(*(x for x in range(5)))
+ actual = list(it)
+ expected = list(chain(range(5), range(5)))
+ self.assertEqual(actual, expected)
+
+ def test_prepend_many(self):
+ """Tests that prepending a huge number of elements works"""
+ it = mi.peekable(range(5))
+ # Don't directly use the range() object to avoid any range-specific
+ # optimizations
+ it.prepend(*(x for x in range(20000)))
+ actual = list(it)
+ expected = list(chain(range(20000), range(5)))
+ self.assertEqual(actual, expected)
+
+ def test_prepend_reversed(self):
+ """Tests prepending from a reversed iterable"""
+ it = mi.peekable(range(3))
+ it.prepend(*reversed((10, 11, 12)))
+ actual = list(it)
+ expected = [12, 11, 10, 0, 1, 2]
+ self.assertEqual(actual, expected)
+
+
+class ConsumerTests(TestCase):
+ """Tests for ``consumer()``"""
+
+ def test_consumer(self):
+ @mi.consumer
+ def eater():
+ while True:
+ x = yield # noqa
+
+ e = eater()
+ e.send('hi') # without @consumer, would raise TypeError
+
+
+class DistinctPermutationsTests(TestCase):
+ def test_distinct_permutations(self):
+ """Make sure the output for ``distinct_permutations()`` is the same as
+ set(permutations(it)).
+
+ """
+ iterable = ['z', 'a', 'a', 'q', 'q', 'q', 'y']
+ test_output = sorted(mi.distinct_permutations(iterable))
+ ref_output = sorted(set(permutations(iterable)))
+ self.assertEqual(test_output, ref_output)
+
+ def test_other_iterables(self):
+ """Make sure ``distinct_permutations()`` accepts a different type of
+ iterables.
+
+ """
+ # a generator
+ iterable = (c for c in ['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ test_output = sorted(mi.distinct_permutations(iterable))
+ # "reload" it
+ iterable = (c for c in ['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ ref_output = sorted(set(permutations(iterable)))
+ self.assertEqual(test_output, ref_output)
+
+ # an iterator
+ iterable = iter(['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ test_output = sorted(mi.distinct_permutations(iterable))
+ # "reload" it
+ iterable = iter(['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ ref_output = sorted(set(permutations(iterable)))
+ self.assertEqual(test_output, ref_output)
+
+ def test_r(self):
+ for iterable, r in (
+ ('mississippi', 0),
+ ('mississippi', 1),
+ ('mississippi', 6),
+ ('mississippi', 7),
+ ('mississippi', 12),
+ ([0, 1, 1, 0], 0),
+ ([0, 1, 1, 0], 1),
+ ([0, 1, 1, 0], 2),
+ ([0, 1, 1, 0], 3),
+ ([0, 1, 1, 0], 4),
+ (['a'], 0),
+ (['a'], 1),
+ (['a'], 5),
+ ([], 0),
+ ([], 1),
+ ([], 4),
+ ):
+ with self.subTest(iterable=iterable, r=r):
+ expected = sorted(set(permutations(iterable, r)))
+ actual = sorted(mi.distinct_permutations(iter(iterable), r))
+ self.assertEqual(actual, expected)
+
+
+class IlenTests(TestCase):
+ def test_ilen(self):
+ """Sanity-checks for ``ilen()``."""
+ # Non-empty
+ self.assertEqual(
+ mi.ilen(filter(lambda x: x % 10 == 0, range(101))), 11
+ )
+
+ # Empty
+ self.assertEqual(mi.ilen(x for x in range(0)), 0)
+
+ # Iterable with __len__
+ self.assertEqual(mi.ilen(list(range(6))), 6)
+
+
+class MinMaxTests(TestCase):
+ def test_basic(self):
+ for iterable, expected in (
+ # easy case
+ ([0, 1, 2, 3], (0, 3)),
+ # min and max are not in the extremes + we have `int`s and `float`s
+ ([3, 5.5, -1, 2], (-1, 5.5)),
+ # unordered collection
+ ({3, 5.5, -1, 2}, (-1, 5.5)),
+ # with repetitions
+ ([3, 5.5, float('-Inf'), 5.5], (float('-Inf'), 5.5)),
+ # other collections
+ ('banana', ('a', 'n')),
+ ({0: 1, 2: 100, 1: 10}, (0, 2)),
+ (range(3, 14), (3, 13)),
+ ):
+ with self.subTest(iterable=iterable, expected=expected):
+ # check for expected results
+ self.assertTupleEqual(mi.minmax(iterable), expected)
+ # check for equality with built-in `min` and `max`
+ self.assertTupleEqual(
+ mi.minmax(iterable), (min(iterable), max(iterable))
+ )
+
+ def test_unpacked(self):
+ self.assertTupleEqual(mi.minmax(2, 3, 1), (1, 3))
+ self.assertTupleEqual(mi.minmax(12, 3, 4, key=str), (12, 4))
+
+ def test_iterables(self):
+ self.assertTupleEqual(mi.minmax(x for x in [0, 1, 2, 3]), (0, 3))
+ self.assertTupleEqual(
+ mi.minmax(map(str, [3, 5.5, 'a', 2])), ('2', 'a')
+ )
+ self.assertTupleEqual(
+ mi.minmax(filter(None, [0, 3, '', None, 10])), (3, 10)
+ )
+
+ def test_key(self):
+ self.assertTupleEqual(
+ mi.minmax({(), (1, 4, 2), 'abcde', range(4)}, key=len),
+ ((), 'abcde'),
+ )
+ self.assertTupleEqual(
+ mi.minmax((x for x in [10, 3, 25]), key=str), (10, 3)
+ )
+
+ def test_default(self):
+ with self.assertRaises(ValueError):
+ mi.minmax([])
+
+ self.assertIs(mi.minmax([], default=None), None)
+ self.assertListEqual(mi.minmax([], default=[1, 'a']), [1, 'a'])
+
+
+class WithIterTests(TestCase):
+ def test_with_iter(self):
+ s = StringIO('One fish\nTwo fish')
+ initial_words = [line.split()[0] for line in mi.with_iter(s)]
+
+ # Iterable's items should be faithfully represented
+ self.assertEqual(initial_words, ['One', 'Two'])
+ # The file object should be closed
+ self.assertTrue(s.closed)
+
+
+class OneTests(TestCase):
+ def test_basic(self):
+ it = iter(['item'])
+ self.assertEqual(mi.one(it), 'item')
+
+ def test_too_short(self):
+ it = iter([])
+ for too_short, exc_type in [
+ (None, ValueError),
+ (IndexError, IndexError),
+ ]:
+ with self.subTest(too_short=too_short):
+ try:
+ mi.one(it, too_short=too_short)
+ except exc_type:
+ formatted_exc = format_exc()
+ self.assertIn('StopIteration', formatted_exc)
+ self.assertIn(
+ 'The above exception was the direct cause',
+ formatted_exc,
+ )
+ else:
+ self.fail()
+
+ def test_too_long(self):
+ it = count()
+ self.assertRaises(ValueError, lambda: mi.one(it)) # burn 0 and 1
+ self.assertEqual(next(it), 2)
+ self.assertRaises(
+ OverflowError, lambda: mi.one(it, too_long=OverflowError)
+ )
+
+ def test_too_long_default_message(self):
+ it = count()
+ self.assertRaisesRegex(
+ ValueError,
+ "Expected exactly one item in "
+ "iterable, but got 0, 1, and "
+ "perhaps more.",
+ lambda: mi.one(it),
+ )
+
+
+class IntersperseTest(TestCase):
+ """Tests for intersperse()"""
+
+ def test_even(self):
+ iterable = (x for x in '01')
+ self.assertEqual(
+ list(mi.intersperse(None, iterable)), ['0', None, '1']
+ )
+
+ def test_odd(self):
+ iterable = (x for x in '012')
+ self.assertEqual(
+ list(mi.intersperse(None, iterable)), ['0', None, '1', None, '2']
+ )
+
+ def test_nested(self):
+ element = ('a', 'b')
+ iterable = (x for x in '012')
+ actual = list(mi.intersperse(element, iterable))
+ expected = ['0', ('a', 'b'), '1', ('a', 'b'), '2']
+ self.assertEqual(actual, expected)
+
+ def test_not_iterable(self):
+ self.assertRaises(TypeError, lambda: mi.intersperse('x', 1))
+
+ def test_n(self):
+ for n, element, expected in [
+ (1, '_', ['0', '_', '1', '_', '2', '_', '3', '_', '4', '_', '5']),
+ (2, '_', ['0', '1', '_', '2', '3', '_', '4', '5']),
+ (3, '_', ['0', '1', '2', '_', '3', '4', '5']),
+ (4, '_', ['0', '1', '2', '3', '_', '4', '5']),
+ (5, '_', ['0', '1', '2', '3', '4', '_', '5']),
+ (6, '_', ['0', '1', '2', '3', '4', '5']),
+ (7, '_', ['0', '1', '2', '3', '4', '5']),
+ (3, ['a', 'b'], ['0', '1', '2', ['a', 'b'], '3', '4', '5']),
+ ]:
+ iterable = (x for x in '012345')
+ actual = list(mi.intersperse(element, iterable, n=n))
+ self.assertEqual(actual, expected)
+
+ def test_n_zero(self):
+ self.assertRaises(
+ ValueError, lambda: list(mi.intersperse('x', '012', n=0))
+ )
+
+
+class UniqueToEachTests(TestCase):
+ """Tests for ``unique_to_each()``"""
+
+ def test_all_unique(self):
+ """When all the input iterables are unique the output should match
+ the input."""
+ iterables = [[1, 2], [3, 4, 5], [6, 7, 8]]
+ self.assertEqual(mi.unique_to_each(*iterables), iterables)
+
+ def test_duplicates(self):
+ """When there are duplicates in any of the input iterables that aren't
+ in the rest, those duplicates should be emitted."""
+ iterables = ["mississippi", "missouri"]
+ self.assertEqual(
+ mi.unique_to_each(*iterables), [['p', 'p'], ['o', 'u', 'r']]
+ )
+
+ def test_mixed(self):
+ """When the input iterables contain different types the function should
+ still behave properly"""
+ iterables = ['x', (i for i in range(3)), [1, 2, 3], tuple()]
+ self.assertEqual(mi.unique_to_each(*iterables), [['x'], [0], [3], []])
+
+
+class WindowedTests(TestCase):
+ """Tests for ``windowed()``"""
+
+ def test_basic(self):
+ actual = list(mi.windowed([1, 2, 3, 4, 5], 3))
+ expected = [(1, 2, 3), (2, 3, 4), (3, 4, 5)]
+ self.assertEqual(actual, expected)
+
+ def test_large_size(self):
+ """
+ When the window size is larger than the iterable, and no fill value is
+ given,``None`` should be filled in.
+ """
+ actual = list(mi.windowed([1, 2, 3, 4, 5], 6))
+ expected = [(1, 2, 3, 4, 5, None)]
+ self.assertEqual(actual, expected)
+
+ def test_fillvalue(self):
+ """
+ When sizes don't match evenly, the given fill value should be used.
+ """
+ iterable = [1, 2, 3, 4, 5]
+
+ for n, kwargs, expected in [
+ (6, {}, [(1, 2, 3, 4, 5, '!')]), # n > len(iterable)
+ (3, {'step': 3}, [(1, 2, 3), (4, 5, '!')]), # using ``step``
+ ]:
+ actual = list(mi.windowed(iterable, n, fillvalue='!', **kwargs))
+ self.assertEqual(actual, expected)
+
+ def test_zero(self):
+ """When the window size is zero, an empty tuple should be emitted."""
+ actual = list(mi.windowed([1, 2, 3, 4, 5], 0))
+ expected = [tuple()]
+ self.assertEqual(actual, expected)
+
+ def test_negative(self):
+ """When the window size is negative, ValueError should be raised."""
+ with self.assertRaises(ValueError):
+ list(mi.windowed([1, 2, 3, 4, 5], -1))
+
+ def test_step(self):
+ """The window should advance by the number of steps provided"""
+ iterable = [1, 2, 3, 4, 5, 6, 7]
+ for n, step, expected in [
+ (3, 2, [(1, 2, 3), (3, 4, 5), (5, 6, 7)]), # n > step
+ (3, 3, [(1, 2, 3), (4, 5, 6), (7, None, None)]), # n == step
+ (3, 4, [(1, 2, 3), (5, 6, 7)]), # line up nicely
+ (3, 5, [(1, 2, 3), (6, 7, None)]), # off by one
+ (3, 6, [(1, 2, 3), (7, None, None)]), # off by two
+ (3, 7, [(1, 2, 3)]), # step past the end
+ (7, 8, [(1, 2, 3, 4, 5, 6, 7)]), # step > len(iterable)
+ ]:
+ actual = list(mi.windowed(iterable, n, step=step))
+ self.assertEqual(actual, expected)
+
+ # Step must be greater than or equal to 1
+ with self.assertRaises(ValueError):
+ list(mi.windowed(iterable, 3, step=0))
+
+
+class SubstringsTests(TestCase):
+ def test_basic(self):
+ iterable = (x for x in range(4))
+ actual = list(mi.substrings(iterable))
+ expected = [
+ (0,),
+ (1,),
+ (2,),
+ (3,),
+ (0, 1),
+ (1, 2),
+ (2, 3),
+ (0, 1, 2),
+ (1, 2, 3),
+ (0, 1, 2, 3),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_strings(self):
+ iterable = 'abc'
+ actual = list(mi.substrings(iterable))
+ expected = [
+ ('a',),
+ ('b',),
+ ('c',),
+ ('a', 'b'),
+ ('b', 'c'),
+ ('a', 'b', 'c'),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ iterable = iter([])
+ actual = list(mi.substrings(iterable))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_order(self):
+ iterable = [2, 0, 1]
+ actual = list(mi.substrings(iterable))
+ expected = [(2,), (0,), (1,), (2, 0), (0, 1), (2, 0, 1)]
+ self.assertEqual(actual, expected)
+
+
+class SubstringsIndexesTests(TestCase):
+ def test_basic(self):
+ sequence = [x for x in range(4)]
+ actual = list(mi.substrings_indexes(sequence))
+ expected = [
+ ([0], 0, 1),
+ ([1], 1, 2),
+ ([2], 2, 3),
+ ([3], 3, 4),
+ ([0, 1], 0, 2),
+ ([1, 2], 1, 3),
+ ([2, 3], 2, 4),
+ ([0, 1, 2], 0, 3),
+ ([1, 2, 3], 1, 4),
+ ([0, 1, 2, 3], 0, 4),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_strings(self):
+ sequence = 'abc'
+ actual = list(mi.substrings_indexes(sequence))
+ expected = [
+ ('a', 0, 1),
+ ('b', 1, 2),
+ ('c', 2, 3),
+ ('ab', 0, 2),
+ ('bc', 1, 3),
+ ('abc', 0, 3),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ sequence = []
+ actual = list(mi.substrings_indexes(sequence))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_order(self):
+ sequence = [2, 0, 1]
+ actual = list(mi.substrings_indexes(sequence))
+ expected = [
+ ([2], 0, 1),
+ ([0], 1, 2),
+ ([1], 2, 3),
+ ([2, 0], 0, 2),
+ ([0, 1], 1, 3),
+ ([2, 0, 1], 0, 3),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_reverse(self):
+ sequence = [2, 0, 1]
+ actual = list(mi.substrings_indexes(sequence, reverse=True))
+ expected = [
+ ([2, 0, 1], 0, 3),
+ ([2, 0], 0, 2),
+ ([0, 1], 1, 3),
+ ([2], 0, 1),
+ ([0], 1, 2),
+ ([1], 2, 3),
+ ]
+ self.assertEqual(actual, expected)
+
+
+class BucketTests(TestCase):
+ def test_basic(self):
+ iterable = [10, 20, 30, 11, 21, 31, 12, 22, 23, 33]
+ D = mi.bucket(iterable, key=lambda x: 10 * (x // 10))
+
+ # In-order access
+ self.assertEqual(list(D[10]), [10, 11, 12])
+
+ # Out of order access
+ self.assertEqual(list(D[30]), [30, 31, 33])
+ self.assertEqual(list(D[20]), [20, 21, 22, 23])
+
+ self.assertEqual(list(D[40]), []) # Nothing in here!
+
+ def test_in(self):
+ iterable = [10, 20, 30, 11, 21, 31, 12, 22, 23, 33]
+ D = mi.bucket(iterable, key=lambda x: 10 * (x // 10))
+
+ self.assertIn(10, D)
+ self.assertNotIn(40, D)
+ self.assertIn(20, D)
+ self.assertNotIn(21, D)
+
+ # Checking in-ness shouldn't advance the iterator
+ self.assertEqual(next(D[10]), 10)
+
+ def test_validator(self):
+ iterable = count(0)
+ key = lambda x: int(str(x)[0]) # First digit of each number
+ validator = lambda x: 0 < x < 10 # No leading zeros
+ D = mi.bucket(iterable, key, validator=validator)
+ self.assertEqual(mi.take(3, D[1]), [1, 10, 11])
+ self.assertNotIn(0, D) # Non-valid entries don't return True
+ self.assertNotIn(0, D._cache) # Don't store non-valid entries
+ self.assertEqual(list(D[0]), [])
+
+ def test_list(self):
+ iterable = [10, 20, 30, 11, 21, 31, 12, 22, 23, 33]
+ D = mi.bucket(iterable, key=lambda x: 10 * (x // 10))
+ self.assertEqual(list(D[10]), [10, 11, 12])
+ self.assertEqual(list(D[20]), [20, 21, 22, 23])
+ self.assertEqual(list(D[30]), [30, 31, 33])
+ self.assertEqual(set(D), {10, 20, 30})
+
+ def test_list_validator(self):
+ iterable = [10, 20, 30, 11, 21, 31, 12, 22, 23, 33]
+ key = lambda x: 10 * (x // 10)
+ validator = lambda x: x != 20
+ D = mi.bucket(iterable, key, validator=validator)
+ self.assertEqual(set(D), {10, 30})
+ self.assertEqual(list(D[10]), [10, 11, 12])
+ self.assertEqual(list(D[20]), [])
+ self.assertEqual(list(D[30]), [30, 31, 33])
+
+
+class SpyTests(TestCase):
+ """Tests for ``spy()``"""
+
+ def test_basic(self):
+ original_iterable = iter('abcdefg')
+ head, new_iterable = mi.spy(original_iterable)
+ self.assertEqual(head, ['a'])
+ self.assertEqual(
+ list(new_iterable), ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+ )
+
+ def test_unpacking(self):
+ original_iterable = iter('abcdefg')
+ (first, second, third), new_iterable = mi.spy(original_iterable, 3)
+ self.assertEqual(first, 'a')
+ self.assertEqual(second, 'b')
+ self.assertEqual(third, 'c')
+ self.assertEqual(
+ list(new_iterable), ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+ )
+
+ def test_too_many(self):
+ original_iterable = iter('abc')
+ head, new_iterable = mi.spy(original_iterable, 4)
+ self.assertEqual(head, ['a', 'b', 'c'])
+ self.assertEqual(list(new_iterable), ['a', 'b', 'c'])
+
+ def test_zero(self):
+ original_iterable = iter('abc')
+ head, new_iterable = mi.spy(original_iterable, 0)
+ self.assertEqual(head, [])
+ self.assertEqual(list(new_iterable), ['a', 'b', 'c'])
+
+ def test_immutable(self):
+ original_iterable = iter('abcdefg')
+ head, new_iterable = mi.spy(original_iterable, 3)
+ head[0] = 'A'
+ self.assertEqual(head, ['A', 'b', 'c'])
+ self.assertEqual(
+ list(new_iterable), ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+ )
+
+
+class InterleaveTests(TestCase):
+ def test_even(self):
+ actual = list(mi.interleave([1, 4, 7], [2, 5, 8], [3, 6, 9]))
+ expected = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_short(self):
+ actual = list(mi.interleave([1, 4], [2, 5, 7], [3, 6, 8]))
+ expected = [1, 2, 3, 4, 5, 6]
+ self.assertEqual(actual, expected)
+
+ def test_mixed_types(self):
+ it_list = ['a', 'b', 'c', 'd']
+ it_str = '12345'
+ it_inf = count()
+ actual = list(mi.interleave(it_list, it_str, it_inf))
+ expected = ['a', '1', 0, 'b', '2', 1, 'c', '3', 2, 'd', '4', 3]
+ self.assertEqual(actual, expected)
+
+
+class InterleaveLongestTests(TestCase):
+ def test_even(self):
+ actual = list(mi.interleave_longest([1, 4, 7], [2, 5, 8], [3, 6, 9]))
+ expected = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_short(self):
+ actual = list(mi.interleave_longest([1, 4], [2, 5, 7], [3, 6, 8]))
+ expected = [1, 2, 3, 4, 5, 6, 7, 8]
+ self.assertEqual(actual, expected)
+
+ def test_mixed_types(self):
+ it_list = ['a', 'b', 'c', 'd']
+ it_str = '12345'
+ it_gen = (x for x in range(3))
+ actual = list(mi.interleave_longest(it_list, it_str, it_gen))
+ expected = ['a', '1', 0, 'b', '2', 1, 'c', '3', 2, 'd', '4', '5']
+ self.assertEqual(actual, expected)
+
+
+class InterleaveEvenlyTests(TestCase):
+ def test_equal_lengths(self):
+ # when lengths are equal, the relative order shouldn't change
+ a = [1, 2, 3]
+ b = [5, 6, 7]
+ actual = list(mi.interleave_evenly([a, b]))
+ expected = [1, 5, 2, 6, 3, 7]
+ self.assertEqual(actual, expected)
+
+ def test_proportional(self):
+ # easy case where the iterables have proportional length
+ a = [1, 2, 3, 4]
+ b = [5, 6]
+ actual = list(mi.interleave_evenly([a, b]))
+ expected = [1, 2, 5, 3, 4, 6]
+ self.assertEqual(actual, expected)
+
+ # swapping a and b should yield the same result
+ actual_swapped = list(mi.interleave_evenly([b, a]))
+ self.assertEqual(actual_swapped, expected)
+
+ def test_not_proportional(self):
+ a = [1, 2, 3, 4, 5, 6, 7]
+ b = [8, 9, 10]
+ expected = [1, 2, 8, 3, 4, 9, 5, 6, 10, 7]
+ actual = list(mi.interleave_evenly([a, b]))
+ self.assertEqual(actual, expected)
+
+ def test_degenerate_one(self):
+ a = [0, 1, 2, 3, 4]
+ b = [5]
+ expected = [0, 1, 2, 5, 3, 4]
+ actual = list(mi.interleave_evenly([a, b]))
+ self.assertEqual(actual, expected)
+
+ def test_degenerate_empty(self):
+ a = [1, 2, 3]
+ b = []
+ expected = [1, 2, 3]
+ actual = list(mi.interleave_evenly([a, b]))
+ self.assertEqual(actual, expected)
+
+ def test_three_iters(self):
+ a = ["a1", "a2", "a3", "a4", "a5"]
+ b = ["b1", "b2", "b3"]
+ c = ["c1"]
+ actual = list(mi.interleave_evenly([a, b, c]))
+ expected = ["a1", "b1", "a2", "c1", "a3", "b2", "a4", "b3", "a5"]
+ self.assertEqual(actual, expected)
+
+ def test_many_iters(self):
+ # smoke test with many iterables: create iterables with a random
+ # number of elements starting with a character ("a0", "a1", ...)
+ rng = Random(0)
+ iterables = []
+ for ch in ascii_letters:
+ length = rng.randint(0, 100)
+ iterable = [f"{ch}{i}" for i in range(length)]
+ iterables.append(iterable)
+
+ interleaved = list(mi.interleave_evenly(iterables))
+
+ # for each iterable, check that the result contains all its items
+ for iterable, ch_expect in zip(iterables, ascii_letters):
+ interleaved_actual = [
+ e for e in interleaved if e.startswith(ch_expect)
+ ]
+ assert len(set(interleaved_actual)) == len(iterable)
+
+ def test_manual_lengths(self):
+ a = combinations(range(4), 2)
+ len_a = 4 * (4 - 1) // 2 # == 6
+ b = combinations(range(4), 3)
+ len_b = 4
+
+ expected = [
+ (0, 1),
+ (0, 1, 2),
+ (0, 2),
+ (0, 3),
+ (0, 1, 3),
+ (1, 2),
+ (0, 2, 3),
+ (1, 3),
+ (2, 3),
+ (1, 2, 3),
+ ]
+ actual = list(mi.interleave_evenly([a, b], lengths=[len_a, len_b]))
+ self.assertEqual(expected, actual)
+
+ def test_no_length_raises(self):
+ # combinations doesn't have __len__, should trigger ValueError
+ iterables = [range(5), combinations(range(5), 2)]
+ with self.assertRaises(ValueError):
+ list(mi.interleave_evenly(iterables))
+
+ def test_argument_mismatch_raises(self):
+ # pass mismatching number of iterables and lengths
+ iterables = [range(3)]
+ lengths = [3, 4]
+ with self.assertRaises(ValueError):
+ list(mi.interleave_evenly(iterables, lengths=lengths))
+
+
+class TestCollapse(TestCase):
+ """Tests for ``collapse()``"""
+
+ def test_collapse(self):
+ l = [[1], 2, [[3], 4], [[[5]]]]
+ self.assertEqual(list(mi.collapse(l)), [1, 2, 3, 4, 5])
+
+ def test_collapse_to_string(self):
+ l = [["s1"], "s2", [["s3"], "s4"], [[["s5"]]]]
+ self.assertEqual(list(mi.collapse(l)), ["s1", "s2", "s3", "s4", "s5"])
+
+ def test_collapse_to_bytes(self):
+ l = [[b"s1"], b"s2", [[b"s3"], b"s4"], [[[b"s5"]]]]
+ self.assertEqual(
+ list(mi.collapse(l)), [b"s1", b"s2", b"s3", b"s4", b"s5"]
+ )
+
+ def test_collapse_flatten(self):
+ l = [[1], [2], [[3], 4], [[[5]]]]
+ self.assertEqual(list(mi.collapse(l, levels=1)), list(mi.flatten(l)))
+
+ def test_collapse_to_level(self):
+ l = [[1], 2, [[3], 4], [[[5]]]]
+ self.assertEqual(list(mi.collapse(l, levels=2)), [1, 2, 3, 4, [5]])
+ self.assertEqual(
+ list(mi.collapse(mi.collapse(l, levels=1), levels=1)),
+ list(mi.collapse(l, levels=2)),
+ )
+
+ def test_collapse_to_list(self):
+ l = (1, [2], (3, [4, (5,)], 'ab'))
+ actual = list(mi.collapse(l, base_type=list))
+ expected = [1, [2], 3, [4, (5,)], 'ab']
+ self.assertEqual(actual, expected)
+
+
+class SideEffectTests(TestCase):
+ """Tests for ``side_effect()``"""
+
+ def test_individual(self):
+ # The function increments the counter for each call
+ counter = [0]
+
+ def func(arg):
+ counter[0] += 1
+
+ result = list(mi.side_effect(func, range(10)))
+ self.assertEqual(result, list(range(10)))
+ self.assertEqual(counter[0], 10)
+
+ def test_chunked(self):
+ # The function increments the counter for each call
+ counter = [0]
+
+ def func(arg):
+ counter[0] += 1
+
+ result = list(mi.side_effect(func, range(10), 2))
+ self.assertEqual(result, list(range(10)))
+ self.assertEqual(counter[0], 5)
+
+ def test_before_after(self):
+ f = StringIO()
+ collector = []
+
+ def func(item):
+ print(item, file=f)
+ collector.append(f.getvalue())
+
+ def it():
+ yield 'a'
+ yield 'b'
+ raise RuntimeError('kaboom')
+
+ before = lambda: print('HEADER', file=f)
+ after = f.close
+
+ try:
+ mi.consume(mi.side_effect(func, it(), before=before, after=after))
+ except RuntimeError:
+ pass
+
+ # The iterable should have been written to the file
+ self.assertEqual(collector, ['HEADER\na\n', 'HEADER\na\nb\n'])
+
+ # The file should be closed even though something bad happened
+ self.assertTrue(f.closed)
+
+ def test_before_fails(self):
+ f = StringIO()
+ func = lambda x: print(x, file=f)
+
+ def before():
+ raise RuntimeError('ouch')
+
+ try:
+ mi.consume(
+ mi.side_effect(func, 'abc', before=before, after=f.close)
+ )
+ except RuntimeError:
+ pass
+
+ # The file should be closed even though something bad happened in the
+ # before function
+ self.assertTrue(f.closed)
+
+
+class SlicedTests(TestCase):
+ """Tests for ``sliced()``"""
+
+ def test_even(self):
+ """Test when the length of the sequence is divisible by *n*"""
+ seq = 'ABCDEFGHI'
+ self.assertEqual(list(mi.sliced(seq, 3)), ['ABC', 'DEF', 'GHI'])
+
+ def test_odd(self):
+ """Test when the length of the sequence is not divisible by *n*"""
+ seq = 'ABCDEFGHI'
+ self.assertEqual(list(mi.sliced(seq, 4)), ['ABCD', 'EFGH', 'I'])
+
+ def test_not_sliceable(self):
+ seq = (x for x in 'ABCDEFGHI')
+
+ with self.assertRaises(TypeError):
+ list(mi.sliced(seq, 3))
+
+ def test_odd_and_strict(self):
+ seq = [x for x in 'ABCDEFGHI']
+
+ with self.assertRaises(ValueError):
+ list(mi.sliced(seq, 4, strict=True))
+
+ def test_numpy_like_array(self):
+ # Numpy arrays don't behave like Python lists - calling bool()
+ # on them doesn't return False for empty lists and True for non-empty
+ # ones. Emulate that behavior.
+ class FalseList(list):
+ def __getitem__(self, key):
+ ret = super().__getitem__(key)
+ if isinstance(key, slice):
+ return FalseList(ret)
+
+ return ret
+
+ def __bool__(self):
+ return False
+
+ seq = FalseList(range(9))
+ actual = list(mi.sliced(seq, 3))
+ expected = [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+ self.assertEqual(actual, expected)
+
+
+class SplitAtTests(TestCase):
+ def test_basic(self):
+ for iterable, separator in [
+ ('a,bb,ccc,dddd', ','),
+ (',a,bb,ccc,dddd', ','),
+ ('a,bb,ccc,dddd,', ','),
+ ('a,bb,ccc,,dddd', ','),
+ ('', ','),
+ (',', ','),
+ ('a,bb,ccc,dddd', ';'),
+ ]:
+ with self.subTest(iterable=iterable, separator=separator):
+ it = iter(iterable)
+ pred = lambda x: x == separator
+ actual = [''.join(x) for x in mi.split_at(it, pred)]
+ expected = iterable.split(separator)
+ self.assertEqual(actual, expected)
+
+ def test_maxsplit(self):
+ iterable = 'a,bb,ccc,dddd'
+ separator = ','
+ pred = lambda x: x == separator
+
+ for maxsplit in range(-1, 4):
+ with self.subTest(maxsplit=maxsplit):
+ it = iter(iterable)
+ result = mi.split_at(it, pred, maxsplit=maxsplit)
+ actual = [''.join(x) for x in result]
+ expected = iterable.split(separator, maxsplit)
+ self.assertEqual(actual, expected)
+
+ def test_keep_separator(self):
+ separator = ','
+ pred = lambda x: x == separator
+
+ for iterable, expected in [
+ ('a,bb,ccc', ['a', ',', 'bb', ',', 'ccc']),
+ (',a,bb,ccc', ['', ',', 'a', ',', 'bb', ',', 'ccc']),
+ ('a,bb,ccc,', ['a', ',', 'bb', ',', 'ccc', ',', '']),
+ ]:
+ with self.subTest(iterable=iterable):
+ it = iter(iterable)
+ result = mi.split_at(it, pred, keep_separator=True)
+ actual = [''.join(x) for x in result]
+ self.assertEqual(actual, expected)
+
+ def test_combination(self):
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+ pred = lambda x: x % 3 == 0
+ actual = list(
+ mi.split_at(iterable, pred, maxsplit=2, keep_separator=True)
+ )
+ expected = [[1, 2], [3], [4, 5], [6], [7, 8, 9, 10]]
+ self.assertEqual(actual, expected)
+
+
+class SplitBeforeTest(TestCase):
+ """Tests for ``split_before()``"""
+
+ def test_starts_with_sep(self):
+ actual = list(mi.split_before('xooxoo', lambda c: c == 'x'))
+ expected = [['x', 'o', 'o'], ['x', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+ def test_ends_with_sep(self):
+ actual = list(mi.split_before('ooxoox', lambda c: c == 'x'))
+ expected = [['o', 'o'], ['x', 'o', 'o'], ['x']]
+ self.assertEqual(actual, expected)
+
+ def test_no_sep(self):
+ actual = list(mi.split_before('ooo', lambda c: c == 'x'))
+ expected = [['o', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+ def test_empty_collection(self):
+ actual = list(mi.split_before([], lambda c: bool(c)))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_max_split(self):
+ for args, expected in [
+ (
+ ('a,b,c,d', lambda c: c == ',', -1),
+ [['a'], [',', 'b'], [',', 'c'], [',', 'd']],
+ ),
+ (
+ ('a,b,c,d', lambda c: c == ',', 0),
+ [['a', ',', 'b', ',', 'c', ',', 'd']],
+ ),
+ (
+ ('a,b,c,d', lambda c: c == ',', 1),
+ [['a'], [',', 'b', ',', 'c', ',', 'd']],
+ ),
+ (
+ ('a,b,c,d', lambda c: c == ',', 2),
+ [['a'], [',', 'b'], [',', 'c', ',', 'd']],
+ ),
+ (
+ ('a,b,c,d', lambda c: c == ',', 10),
+ [['a'], [',', 'b'], [',', 'c'], [',', 'd']],
+ ),
+ (
+ ('a,b,c,d', lambda c: c == '@', 2),
+ [['a', ',', 'b', ',', 'c', ',', 'd']],
+ ),
+ (
+ ('a,b,c,d', lambda c: c != ',', 2),
+ [['a', ','], ['b', ','], ['c', ',', 'd']],
+ ),
+ ]:
+ actual = list(mi.split_before(*args))
+ self.assertEqual(actual, expected)
+
+
+class SplitAfterTest(TestCase):
+ """Tests for ``split_after()``"""
+
+ def test_starts_with_sep(self):
+ actual = list(mi.split_after('xooxoo', lambda c: c == 'x'))
+ expected = [['x'], ['o', 'o', 'x'], ['o', 'o']]
+ self.assertEqual(actual, expected)
+
+ def test_ends_with_sep(self):
+ actual = list(mi.split_after('ooxoox', lambda c: c == 'x'))
+ expected = [['o', 'o', 'x'], ['o', 'o', 'x']]
+ self.assertEqual(actual, expected)
+
+ def test_no_sep(self):
+ actual = list(mi.split_after('ooo', lambda c: c == 'x'))
+ expected = [['o', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+ def test_max_split(self):
+ for args, expected in [
+ (
+ ('a,b,c,d', lambda c: c == ',', -1),
+ [['a', ','], ['b', ','], ['c', ','], ['d']],
+ ),
+ (
+ ('a,b,c,d', lambda c: c == ',', 0),
+ [['a', ',', 'b', ',', 'c', ',', 'd']],
+ ),
+ (
+ ('a,b,c,d', lambda c: c == ',', 1),
+ [['a', ','], ['b', ',', 'c', ',', 'd']],
+ ),
+ (
+ ('a,b,c,d', lambda c: c == ',', 2),
+ [['a', ','], ['b', ','], ['c', ',', 'd']],
+ ),
+ (
+ ('a,b,c,d', lambda c: c == ',', 10),
+ [['a', ','], ['b', ','], ['c', ','], ['d']],
+ ),
+ (
+ ('a,b,c,d', lambda c: c == '@', 2),
+ [['a', ',', 'b', ',', 'c', ',', 'd']],
+ ),
+ (
+ ('a,b,c,d', lambda c: c != ',', 2),
+ [['a'], [',', 'b'], [',', 'c', ',', 'd']],
+ ),
+ ]:
+ actual = list(mi.split_after(*args))
+ self.assertEqual(actual, expected)
+
+
+class SplitWhenTests(TestCase):
+ """Tests for ``split_when()``"""
+
+ @staticmethod
+ def _split_when_before(iterable, pred):
+ return mi.split_when(iterable, lambda _, c: pred(c))
+
+ @staticmethod
+ def _split_when_after(iterable, pred):
+ return mi.split_when(iterable, lambda c, _: pred(c))
+
+ # split_before emulation
+ def test_before_emulation_starts_with_sep(self):
+ actual = list(self._split_when_before('xooxoo', lambda c: c == 'x'))
+ expected = [['x', 'o', 'o'], ['x', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+ def test_before_emulation_ends_with_sep(self):
+ actual = list(self._split_when_before('ooxoox', lambda c: c == 'x'))
+ expected = [['o', 'o'], ['x', 'o', 'o'], ['x']]
+ self.assertEqual(actual, expected)
+
+ def test_before_emulation_no_sep(self):
+ actual = list(self._split_when_before('ooo', lambda c: c == 'x'))
+ expected = [['o', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+ # split_after emulation
+ def test_after_emulation_starts_with_sep(self):
+ actual = list(self._split_when_after('xooxoo', lambda c: c == 'x'))
+ expected = [['x'], ['o', 'o', 'x'], ['o', 'o']]
+ self.assertEqual(actual, expected)
+
+ def test_after_emulation_ends_with_sep(self):
+ actual = list(self._split_when_after('ooxoox', lambda c: c == 'x'))
+ expected = [['o', 'o', 'x'], ['o', 'o', 'x']]
+ self.assertEqual(actual, expected)
+
+ def test_after_emulation_no_sep(self):
+ actual = list(self._split_when_after('ooo', lambda c: c == 'x'))
+ expected = [['o', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+ # edge cases
+ def test_empty_iterable(self):
+ actual = list(mi.split_when('', lambda a, b: a != b))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_one_element(self):
+ actual = list(mi.split_when('o', lambda a, b: a == b))
+ expected = [['o']]
+ self.assertEqual(actual, expected)
+
+ def test_one_element_is_second_item(self):
+ actual = list(self._split_when_before('x', lambda c: c == 'x'))
+ expected = [['x']]
+ self.assertEqual(actual, expected)
+
+ def test_one_element_is_first_item(self):
+ actual = list(self._split_when_after('x', lambda c: c == 'x'))
+ expected = [['x']]
+ self.assertEqual(actual, expected)
+
+ def test_max_split(self):
+ for args, expected in [
+ (
+ ('a,b,c,d', lambda a, _: a == ',', -1),
+ [['a', ','], ['b', ','], ['c', ','], ['d']],
+ ),
+ (
+ ('a,b,c,d', lambda a, _: a == ',', 0),
+ [['a', ',', 'b', ',', 'c', ',', 'd']],
+ ),
+ (
+ ('a,b,c,d', lambda _, b: b == ',', 1),
+ [['a'], [',', 'b', ',', 'c', ',', 'd']],
+ ),
+ (
+ ('a,b,c,d', lambda a, _: a == ',', 2),
+ [['a', ','], ['b', ','], ['c', ',', 'd']],
+ ),
+ (
+ ('0124376', lambda a, b: a > b, -1),
+ [['0', '1', '2', '4'], ['3', '7'], ['6']],
+ ),
+ (
+ ('0124376', lambda a, b: a > b, 0),
+ [['0', '1', '2', '4', '3', '7', '6']],
+ ),
+ (
+ ('0124376', lambda a, b: a > b, 1),
+ [['0', '1', '2', '4'], ['3', '7', '6']],
+ ),
+ (
+ ('0124376', lambda a, b: a > b, 2),
+ [['0', '1', '2', '4'], ['3', '7'], ['6']],
+ ),
+ ]:
+ actual = list(mi.split_when(*args))
+ self.assertEqual(actual, expected, str(args))
+
+
+class SplitIntoTests(TestCase):
+ """Tests for ``split_into()``"""
+
+ def test_iterable_just_right(self):
+ """Size of ``iterable`` equals the sum of ``sizes``."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [2, 3, 4]
+ expected = [[1, 2], [3, 4, 5], [6, 7, 8, 9]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_iterable_too_small(self):
+ """Size of ``iterable`` is smaller than sum of ``sizes``. Last return
+ list is shorter as a result."""
+ iterable = [1, 2, 3, 4, 5, 6, 7]
+ sizes = [2, 3, 4]
+ expected = [[1, 2], [3, 4, 5], [6, 7]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_iterable_too_small_extra(self):
+ """Size of ``iterable`` is smaller than sum of ``sizes``. Second last
+ return list is shorter and last return list is empty as a result."""
+ iterable = [1, 2, 3, 4, 5, 6, 7]
+ sizes = [2, 3, 4, 5]
+ expected = [[1, 2], [3, 4, 5], [6, 7], []]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_iterable_too_large(self):
+ """Size of ``iterable`` is larger than sum of ``sizes``. Not all
+ items of iterable are returned."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [2, 3, 2]
+ expected = [[1, 2], [3, 4, 5], [6, 7]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_using_none_with_leftover(self):
+ """Last item of ``sizes`` is None when items still remain in
+ ``iterable``. Last list returned stretches to fit all remaining items
+ of ``iterable``."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [2, 3, None]
+ expected = [[1, 2], [3, 4, 5], [6, 7, 8, 9]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_using_none_without_leftover(self):
+ """Last item of ``sizes`` is None when no items remain in
+ ``iterable``. Last list returned is empty."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [2, 3, 4, None]
+ expected = [[1, 2], [3, 4, 5], [6, 7, 8, 9], []]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_using_none_mid_sizes(self):
+ """None is present in ``sizes`` but is not the last item. Last list
+ returned stretches to fit all remaining items of ``iterable`` but
+ all items in ``sizes`` after None are ignored."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [2, 3, None, 4]
+ expected = [[1, 2], [3, 4, 5], [6, 7, 8, 9]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_iterable_empty(self):
+ """``iterable`` argument is empty but ``sizes`` is not. An empty
+ list is returned for each item in ``sizes``."""
+ iterable = []
+ sizes = [2, 4, 2]
+ expected = [[], [], []]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_iterable_empty_using_none(self):
+ """``iterable`` argument is empty but ``sizes`` is not. An empty
+ list is returned for each item in ``sizes`` that is not after a
+ None item."""
+ iterable = []
+ sizes = [2, 4, None, 2]
+ expected = [[], [], []]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_sizes_empty(self):
+ """``sizes`` argument is empty but ``iterable`` is not. An empty
+ generator is returned."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = []
+ expected = []
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_both_empty(self):
+ """Both ``sizes`` and ``iterable`` arguments are empty. An empty
+ generator is returned."""
+ iterable = []
+ sizes = []
+ expected = []
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_bool_in_sizes(self):
+ """A bool object is present in ``sizes`` is treated as a 1 or 0 for
+ ``True`` or ``False`` due to bool being an instance of int."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [3, True, 2, False]
+ expected = [[1, 2, 3], [4], [5, 6], []]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_invalid_in_sizes(self):
+ """A ValueError is raised if an object in ``sizes`` is neither ``None``
+ or an integer."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [1, [], 3]
+ with self.assertRaises(ValueError):
+ list(mi.split_into(iterable, sizes))
+
+ def test_invalid_in_sizes_after_none(self):
+ """A item in ``sizes`` that is invalid will not raise a TypeError if it
+ comes after a ``None`` item."""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = [3, 4, None, []]
+ expected = [[1, 2, 3], [4, 5, 6, 7], [8, 9]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ def test_generator_iterable_integrity(self):
+ """Check that if ``iterable`` is an iterator, it is consumed only by as
+ many items as the sum of ``sizes``."""
+ iterable = (i for i in range(10))
+ sizes = [2, 3]
+
+ expected = [[0, 1], [2, 3, 4]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ iterable_expected = [5, 6, 7, 8, 9]
+ iterable_actual = list(iterable)
+ self.assertEqual(iterable_actual, iterable_expected)
+
+ def test_generator_sizes_integrity(self):
+ """Check that if ``sizes`` is an iterator, it is consumed only until a
+ ``None`` item is reached"""
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ sizes = (i for i in [1, 2, None, 3, 4])
+
+ expected = [[1], [2, 3], [4, 5, 6, 7, 8, 9]]
+ actual = list(mi.split_into(iterable, sizes))
+ self.assertEqual(actual, expected)
+
+ sizes_expected = [3, 4]
+ sizes_actual = list(sizes)
+ self.assertEqual(sizes_actual, sizes_expected)
+
+
+class PaddedTest(TestCase):
+ """Tests for ``padded()``"""
+
+ def test_no_n(self):
+ seq = [1, 2, 3]
+
+ # No fillvalue
+ self.assertEqual(mi.take(5, mi.padded(seq)), [1, 2, 3, None, None])
+
+ # With fillvalue
+ self.assertEqual(
+ mi.take(5, mi.padded(seq, fillvalue='')), [1, 2, 3, '', '']
+ )
+
+ def test_invalid_n(self):
+ self.assertRaises(ValueError, lambda: list(mi.padded([1, 2, 3], n=-1)))
+ self.assertRaises(ValueError, lambda: list(mi.padded([1, 2, 3], n=0)))
+
+ def test_valid_n(self):
+ seq = [1, 2, 3, 4, 5]
+
+ # No need for padding: len(seq) <= n
+ self.assertEqual(list(mi.padded(seq, n=4)), [1, 2, 3, 4, 5])
+ self.assertEqual(list(mi.padded(seq, n=5)), [1, 2, 3, 4, 5])
+
+ # No fillvalue
+ self.assertEqual(
+ list(mi.padded(seq, n=7)), [1, 2, 3, 4, 5, None, None]
+ )
+
+ # With fillvalue
+ self.assertEqual(
+ list(mi.padded(seq, fillvalue='', n=7)), [1, 2, 3, 4, 5, '', '']
+ )
+
+ def test_next_multiple(self):
+ seq = [1, 2, 3, 4, 5, 6]
+
+ # No need for padding: len(seq) % n == 0
+ self.assertEqual(
+ list(mi.padded(seq, n=3, next_multiple=True)), [1, 2, 3, 4, 5, 6]
+ )
+
+ # Padding needed: len(seq) < n
+ self.assertEqual(
+ list(mi.padded(seq, n=8, next_multiple=True)),
+ [1, 2, 3, 4, 5, 6, None, None],
+ )
+
+ # No padding needed: len(seq) == n
+ self.assertEqual(
+ list(mi.padded(seq, n=6, next_multiple=True)), [1, 2, 3, 4, 5, 6]
+ )
+
+ # Padding needed: len(seq) > n
+ self.assertEqual(
+ list(mi.padded(seq, n=4, next_multiple=True)),
+ [1, 2, 3, 4, 5, 6, None, None],
+ )
+
+ # With fillvalue
+ self.assertEqual(
+ list(mi.padded(seq, fillvalue='', n=4, next_multiple=True)),
+ [1, 2, 3, 4, 5, 6, '', ''],
+ )
+
+
+class RepeatEachTests(TestCase):
+ """Tests for repeat_each()"""
+
+ def test_default(self):
+ actual = list(mi.repeat_each('ABC'))
+ expected = ['A', 'A', 'B', 'B', 'C', 'C']
+ self.assertEqual(actual, expected)
+
+ def test_basic(self):
+ actual = list(mi.repeat_each('ABC', 3))
+ expected = ['A', 'A', 'A', 'B', 'B', 'B', 'C', 'C', 'C']
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ actual = list(mi.repeat_each(''))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_no_repeat(self):
+ actual = list(mi.repeat_each('ABC', 0))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_negative_repeat(self):
+ actual = list(mi.repeat_each('ABC', -1))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_infinite_input(self):
+ repeater = mi.repeat_each(cycle('AB'))
+ actual = mi.take(6, repeater)
+ expected = ['A', 'A', 'B', 'B', 'A', 'A']
+ self.assertEqual(actual, expected)
+
+
+class RepeatLastTests(TestCase):
+ def test_empty_iterable(self):
+ slice_length = 3
+ iterable = iter([])
+ actual = mi.take(slice_length, mi.repeat_last(iterable))
+ expected = [None] * slice_length
+ self.assertEqual(actual, expected)
+
+ def test_default_value(self):
+ slice_length = 3
+ iterable = iter([])
+ default = '3'
+ actual = mi.take(slice_length, mi.repeat_last(iterable, default))
+ expected = ['3'] * slice_length
+ self.assertEqual(actual, expected)
+
+ def test_basic(self):
+ slice_length = 10
+ iterable = (str(x) for x in range(5))
+ actual = mi.take(slice_length, mi.repeat_last(iterable))
+ expected = ['0', '1', '2', '3', '4', '4', '4', '4', '4', '4']
+ self.assertEqual(actual, expected)
+
+
+class DistributeTest(TestCase):
+ """Tests for distribute()"""
+
+ def test_invalid_n(self):
+ self.assertRaises(ValueError, lambda: mi.distribute(-1, [1, 2, 3]))
+ self.assertRaises(ValueError, lambda: mi.distribute(0, [1, 2, 3]))
+
+ def test_basic(self):
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+
+ for n, expected in [
+ (1, [iterable]),
+ (2, [[1, 3, 5, 7, 9], [2, 4, 6, 8, 10]]),
+ (3, [[1, 4, 7, 10], [2, 5, 8], [3, 6, 9]]),
+ (10, [[n] for n in range(1, 10 + 1)]),
+ ]:
+ self.assertEqual(
+ [list(x) for x in mi.distribute(n, iterable)], expected
+ )
+
+ def test_large_n(self):
+ iterable = [1, 2, 3, 4]
+ self.assertEqual(
+ [list(x) for x in mi.distribute(6, iterable)],
+ [[1], [2], [3], [4], [], []],
+ )
+
+
+class StaggerTest(TestCase):
+ """Tests for ``stagger()``"""
+
+ def test_default(self):
+ iterable = [0, 1, 2, 3]
+ actual = list(mi.stagger(iterable))
+ expected = [(None, 0, 1), (0, 1, 2), (1, 2, 3)]
+ self.assertEqual(actual, expected)
+
+ def test_offsets(self):
+ iterable = [0, 1, 2, 3]
+ for offsets, expected in [
+ ((-2, 0, 2), [('', 0, 2), ('', 1, 3)]),
+ ((-2, -1), [('', ''), ('', 0), (0, 1), (1, 2), (2, 3)]),
+ ((1, 2), [(1, 2), (2, 3)]),
+ ]:
+ all_groups = mi.stagger(iterable, offsets=offsets, fillvalue='')
+ self.assertEqual(list(all_groups), expected)
+
+ def test_longest(self):
+ iterable = [0, 1, 2, 3]
+ for offsets, expected in [
+ (
+ (-1, 0, 1),
+ [('', 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, ''), (3, '', '')],
+ ),
+ ((-2, -1), [('', ''), ('', 0), (0, 1), (1, 2), (2, 3), (3, '')]),
+ ((1, 2), [(1, 2), (2, 3), (3, '')]),
+ ]:
+ all_groups = mi.stagger(
+ iterable, offsets=offsets, fillvalue='', longest=True
+ )
+ self.assertEqual(list(all_groups), expected)
+
+
+class ZipEqualTest(TestCase):
+ @skipIf(version_info[:2] < (3, 10), 'zip_equal deprecated for 3.10+')
+ def test_deprecation(self):
+ with warnings.catch_warnings(record=True) as caught:
+ warnings.simplefilter('always')
+ self.assertEqual(
+ list(mi.zip_equal([1, 2], [3, 4])), [(1, 3), (2, 4)]
+ )
+
+ (warning,) = caught
+ assert warning.category == DeprecationWarning
+
+ def test_equal(self):
+ lists = [0, 1, 2], [2, 3, 4]
+
+ for iterables in [lists, map(iter, lists)]:
+ actual = list(mi.zip_equal(*iterables))
+ expected = [(0, 2), (1, 3), (2, 4)]
+ self.assertEqual(actual, expected)
+
+ def test_unequal_lists(self):
+ two_items = [0, 1]
+ three_items = [2, 3, 4]
+ four_items = [5, 6, 7, 8]
+
+ # the mismatch is at index 1
+ try:
+ list(mi.zip_equal(two_items, three_items, four_items))
+ except mi.UnequalIterablesError as e:
+ self.assertEqual(
+ e.args[0],
+ (
+ 'Iterables have different lengths: '
+ 'index 0 has length 2; index 1 has length 3'
+ ),
+ )
+
+ # the mismatch is at index 2
+ try:
+ list(mi.zip_equal(two_items, two_items, four_items, four_items))
+ except mi.UnequalIterablesError as e:
+ self.assertEqual(
+ e.args[0],
+ (
+ 'Iterables have different lengths: '
+ 'index 0 has length 2; index 2 has length 4'
+ ),
+ )
+
+ # One without length: delegate to _zip_equal_generator
+ try:
+ list(mi.zip_equal(two_items, iter(two_items), three_items))
+ except mi.UnequalIterablesError as e:
+ self.assertEqual(e.args[0], 'Iterables have different lengths')
+
+
+class ZipOffsetTest(TestCase):
+ """Tests for ``zip_offset()``"""
+
+ def test_shortest(self):
+ a_1 = [0, 1, 2, 3]
+ a_2 = [0, 1, 2, 3, 4, 5]
+ a_3 = [0, 1, 2, 3, 4, 5, 6, 7]
+ actual = list(
+ mi.zip_offset(a_1, a_2, a_3, offsets=(-1, 0, 1), fillvalue='')
+ )
+ expected = [('', 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, 4), (3, 4, 5)]
+ self.assertEqual(actual, expected)
+
+ def test_longest(self):
+ a_1 = [0, 1, 2, 3]
+ a_2 = [0, 1, 2, 3, 4, 5]
+ a_3 = [0, 1, 2, 3, 4, 5, 6, 7]
+ actual = list(
+ mi.zip_offset(a_1, a_2, a_3, offsets=(-1, 0, 1), longest=True)
+ )
+ expected = [
+ (None, 0, 1),
+ (0, 1, 2),
+ (1, 2, 3),
+ (2, 3, 4),
+ (3, 4, 5),
+ (None, 5, 6),
+ (None, None, 7),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_mismatch(self):
+ iterables = [0, 1, 2], [2, 3, 4]
+ offsets = (-1, 0, 1)
+ self.assertRaises(
+ ValueError,
+ lambda: list(mi.zip_offset(*iterables, offsets=offsets)),
+ )
+
+
+class UnzipTests(TestCase):
+ """Tests for unzip()"""
+
+ def test_empty_iterable(self):
+ self.assertEqual(list(mi.unzip([])), [])
+ # in reality zip([], [], []) is equivalent to iter([])
+ # but it doesn't hurt to test both
+ self.assertEqual(list(mi.unzip(zip([], [], []))), [])
+
+ def test_length_one_iterable(self):
+ xs, ys, zs = mi.unzip(zip([1], [2], [3]))
+ self.assertEqual(list(xs), [1])
+ self.assertEqual(list(ys), [2])
+ self.assertEqual(list(zs), [3])
+
+ def test_normal_case(self):
+ xs, ys, zs = range(10), range(1, 11), range(2, 12)
+ zipped = zip(xs, ys, zs)
+ xs, ys, zs = mi.unzip(zipped)
+ self.assertEqual(list(xs), list(range(10)))
+ self.assertEqual(list(ys), list(range(1, 11)))
+ self.assertEqual(list(zs), list(range(2, 12)))
+
+ def test_improperly_zipped(self):
+ zipped = iter([(1, 2, 3), (4, 5), (6,)])
+ xs, ys, zs = mi.unzip(zipped)
+ self.assertEqual(list(xs), [1, 4, 6])
+ self.assertEqual(list(ys), [2, 5])
+ self.assertEqual(list(zs), [3])
+
+ def test_increasingly_zipped(self):
+ zipped = iter([(1, 2), (3, 4, 5), (6, 7, 8, 9)])
+ unzipped = mi.unzip(zipped)
+ # from the docstring:
+ # len(first tuple) is the number of iterables zipped
+ self.assertEqual(len(unzipped), 2)
+ xs, ys = unzipped
+ self.assertEqual(list(xs), [1, 3, 6])
+ self.assertEqual(list(ys), [2, 4, 7])
+
+
+class SortTogetherTest(TestCase):
+ """Tests for sort_together()"""
+
+ def test_key_list(self):
+ """tests `key_list` including default, iterables include duplicates"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20],
+ ]
+
+ self.assertEqual(
+ mi.sort_together(iterables),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('June', 'July', 'July', 'May', 'Aug.', 'May'),
+ (70, 100, 20, 97, 20, 100),
+ ],
+ )
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(0, 1)),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('July', 'July', 'June', 'Aug.', 'May', 'May'),
+ (100, 20, 70, 20, 97, 100),
+ ],
+ )
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(0, 1, 2)),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('July', 'July', 'June', 'Aug.', 'May', 'May'),
+ (20, 100, 70, 20, 97, 100),
+ ],
+ )
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(2,)),
+ [
+ ('GA', 'CT', 'CT', 'GA', 'GA', 'CT'),
+ ('Aug.', 'July', 'June', 'May', 'May', 'July'),
+ (20, 20, 70, 97, 100, 100),
+ ],
+ )
+
+ def test_invalid_key_list(self):
+ """tests `key_list` for indexes not available in `iterables`"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20],
+ ]
+
+ self.assertRaises(
+ IndexError, lambda: mi.sort_together(iterables, key_list=(5,))
+ )
+
+ def test_key_function(self):
+ """tests `key` function, including interaction with `key_list`"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20],
+ ]
+ self.assertEqual(
+ mi.sort_together(iterables, key=lambda x: x),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('June', 'July', 'July', 'May', 'Aug.', 'May'),
+ (70, 100, 20, 97, 20, 100),
+ ],
+ )
+ self.assertEqual(
+ mi.sort_together(iterables, key=lambda x: x[::-1]),
+ [
+ ('GA', 'GA', 'GA', 'CT', 'CT', 'CT'),
+ ('May', 'Aug.', 'May', 'June', 'July', 'July'),
+ (97, 20, 100, 70, 100, 20),
+ ],
+ )
+ self.assertEqual(
+ mi.sort_together(
+ iterables,
+ key_list=(0, 2),
+ key=lambda state, number: number
+ if state == 'CT'
+ else 2 * number,
+ ),
+ [
+ ('CT', 'GA', 'CT', 'CT', 'GA', 'GA'),
+ ('July', 'Aug.', 'June', 'July', 'May', 'May'),
+ (20, 20, 70, 100, 97, 100),
+ ],
+ )
+
+ def test_reverse(self):
+ """tests `reverse` to ensure a reverse sort for `key_list` iterables"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20],
+ ]
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(0, 1, 2), reverse=True),
+ [
+ ('GA', 'GA', 'GA', 'CT', 'CT', 'CT'),
+ ('May', 'May', 'Aug.', 'June', 'July', 'July'),
+ (100, 97, 20, 70, 100, 20),
+ ],
+ )
+
+ def test_uneven_iterables(self):
+ """tests trimming of iterables to the shortest length before sorting"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT', 'MA'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20, 0],
+ ]
+
+ self.assertEqual(
+ mi.sort_together(iterables),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('June', 'July', 'July', 'May', 'Aug.', 'May'),
+ (70, 100, 20, 97, 20, 100),
+ ],
+ )
+
+
+class DivideTest(TestCase):
+ """Tests for divide()"""
+
+ def test_invalid_n(self):
+ self.assertRaises(ValueError, lambda: mi.divide(-1, [1, 2, 3]))
+ self.assertRaises(ValueError, lambda: mi.divide(0, [1, 2, 3]))
+
+ def test_basic(self):
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+
+ for n, expected in [
+ (1, [iterable]),
+ (2, [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]),
+ (3, [[1, 2, 3, 4], [5, 6, 7], [8, 9, 10]]),
+ (10, [[n] for n in range(1, 10 + 1)]),
+ ]:
+ self.assertEqual(
+ [list(x) for x in mi.divide(n, iterable)], expected
+ )
+
+ def test_large_n(self):
+ self.assertEqual(
+ [list(x) for x in mi.divide(6, iter(range(1, 4 + 1)))],
+ [[1], [2], [3], [4], [], []],
+ )
+
+
+class TestAlwaysIterable(TestCase):
+ """Tests for always_iterable()"""
+
+ def test_single(self):
+ self.assertEqual(list(mi.always_iterable(1)), [1])
+
+ def test_strings(self):
+ for obj in ['foo', b'bar', 'baz']:
+ actual = list(mi.always_iterable(obj))
+ expected = [obj]
+ self.assertEqual(actual, expected)
+
+ def test_base_type(self):
+ dict_obj = {'a': 1, 'b': 2}
+ str_obj = '123'
+
+ # Default: dicts are iterable like they normally are
+ default_actual = list(mi.always_iterable(dict_obj))
+ default_expected = list(dict_obj)
+ self.assertEqual(default_actual, default_expected)
+
+ # Unitary types set: dicts are not iterable
+ custom_actual = list(mi.always_iterable(dict_obj, base_type=dict))
+ custom_expected = [dict_obj]
+ self.assertEqual(custom_actual, custom_expected)
+
+ # With unitary types set, strings are iterable
+ str_actual = list(mi.always_iterable(str_obj, base_type=None))
+ str_expected = list(str_obj)
+ self.assertEqual(str_actual, str_expected)
+
+ # base_type handles nested tuple (via isinstance).
+ base_type = ((dict,),)
+ custom_actual = list(mi.always_iterable(dict_obj, base_type=base_type))
+ custom_expected = [dict_obj]
+ self.assertEqual(custom_actual, custom_expected)
+
+ def test_iterables(self):
+ self.assertEqual(list(mi.always_iterable([0, 1])), [0, 1])
+ self.assertEqual(
+ list(mi.always_iterable([0, 1], base_type=list)), [[0, 1]]
+ )
+ self.assertEqual(
+ list(mi.always_iterable(iter('foo'))), ['f', 'o', 'o']
+ )
+ self.assertEqual(list(mi.always_iterable([])), [])
+
+ def test_none(self):
+ self.assertEqual(list(mi.always_iterable(None)), [])
+
+ def test_generator(self):
+ def _gen():
+ yield 0
+ yield 1
+
+ self.assertEqual(list(mi.always_iterable(_gen())), [0, 1])
+
+
+class AdjacentTests(TestCase):
+ def test_typical(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10)))
+ expected = [
+ (True, 0),
+ (True, 1),
+ (False, 2),
+ (False, 3),
+ (True, 4),
+ (True, 5),
+ (True, 6),
+ (False, 7),
+ (False, 8),
+ (False, 9),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_empty_iterable(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, []))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_length_one(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, [0]))
+ expected = [(True, 0)]
+ self.assertEqual(actual, expected)
+
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, [1]))
+ expected = [(False, 1)]
+ self.assertEqual(actual, expected)
+
+ def test_consecutive_true(self):
+ """Test that when the predicate matches multiple consecutive elements
+ it doesn't repeat elements in the output"""
+ actual = list(mi.adjacent(lambda x: x % 5 < 2, range(10)))
+ expected = [
+ (True, 0),
+ (True, 1),
+ (True, 2),
+ (False, 3),
+ (True, 4),
+ (True, 5),
+ (True, 6),
+ (True, 7),
+ (False, 8),
+ (False, 9),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_distance(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10), distance=2))
+ expected = [
+ (True, 0),
+ (True, 1),
+ (True, 2),
+ (True, 3),
+ (True, 4),
+ (True, 5),
+ (True, 6),
+ (True, 7),
+ (False, 8),
+ (False, 9),
+ ]
+ self.assertEqual(actual, expected)
+
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10), distance=3))
+ expected = [
+ (True, 0),
+ (True, 1),
+ (True, 2),
+ (True, 3),
+ (True, 4),
+ (True, 5),
+ (True, 6),
+ (True, 7),
+ (True, 8),
+ (False, 9),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_large_distance(self):
+ """Test distance larger than the length of the iterable"""
+ iterable = range(10)
+ actual = list(mi.adjacent(lambda x: x % 5 == 4, iterable, distance=20))
+ expected = list(zip(repeat(True), iterable))
+ self.assertEqual(actual, expected)
+
+ actual = list(mi.adjacent(lambda x: False, iterable, distance=20))
+ expected = list(zip(repeat(False), iterable))
+ self.assertEqual(actual, expected)
+
+ def test_zero_distance(self):
+ """Test that adjacent() reduces to zip+map when distance is 0"""
+ iterable = range(1000)
+ predicate = lambda x: x % 4 == 2
+ actual = mi.adjacent(predicate, iterable, 0)
+ expected = zip(map(predicate, iterable), iterable)
+ self.assertTrue(all(a == e for a, e in zip(actual, expected)))
+
+ def test_negative_distance(self):
+ """Test that adjacent() raises an error with negative distance"""
+ pred = lambda x: x
+ self.assertRaises(
+ ValueError, lambda: mi.adjacent(pred, range(1000), -1)
+ )
+ self.assertRaises(
+ ValueError, lambda: mi.adjacent(pred, range(10), -10)
+ )
+
+ def test_grouping(self):
+ """Test interaction of adjacent() with groupby_transform()"""
+ iterable = mi.adjacent(lambda x: x % 5 == 0, range(10))
+ grouper = mi.groupby_transform(iterable, itemgetter(0), itemgetter(1))
+ actual = [(k, list(g)) for k, g in grouper]
+ expected = [
+ (True, [0, 1]),
+ (False, [2, 3]),
+ (True, [4, 5, 6]),
+ (False, [7, 8, 9]),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_call_once(self):
+ """Test that the predicate is only called once per item."""
+ already_seen = set()
+ iterable = range(10)
+
+ def predicate(item):
+ self.assertNotIn(item, already_seen)
+ already_seen.add(item)
+ return True
+
+ actual = list(mi.adjacent(predicate, iterable))
+ expected = [(True, x) for x in iterable]
+ self.assertEqual(actual, expected)
+
+
+class GroupByTransformTests(TestCase):
+ def assertAllGroupsEqual(self, groupby1, groupby2):
+ for a, b in zip(groupby1, groupby2):
+ key1, group1 = a
+ key2, group2 = b
+ self.assertEqual(key1, key2)
+ self.assertListEqual(list(group1), list(group2))
+ self.assertRaises(StopIteration, lambda: next(groupby1))
+ self.assertRaises(StopIteration, lambda: next(groupby2))
+
+ def test_default_funcs(self):
+ iterable = [(x // 5, x) for x in range(1000)]
+ actual = mi.groupby_transform(iterable)
+ expected = groupby(iterable)
+ self.assertAllGroupsEqual(actual, expected)
+
+ def test_valuefunc(self):
+ iterable = [(int(x / 5), int(x / 3), x) for x in range(10)]
+
+ # Test the standard usage of grouping one iterable using another's keys
+ grouper = mi.groupby_transform(
+ iterable, keyfunc=itemgetter(0), valuefunc=itemgetter(-1)
+ )
+ actual = [(k, list(g)) for k, g in grouper]
+ expected = [(0, [0, 1, 2, 3, 4]), (1, [5, 6, 7, 8, 9])]
+ self.assertEqual(actual, expected)
+
+ grouper = mi.groupby_transform(
+ iterable, keyfunc=itemgetter(1), valuefunc=itemgetter(-1)
+ )
+ actual = [(k, list(g)) for k, g in grouper]
+ expected = [(0, [0, 1, 2]), (1, [3, 4, 5]), (2, [6, 7, 8]), (3, [9])]
+ self.assertEqual(actual, expected)
+
+ # and now for something a little different
+ d = dict(zip(range(10), 'abcdefghij'))
+ grouper = mi.groupby_transform(
+ range(10), keyfunc=lambda x: x // 5, valuefunc=d.get
+ )
+ actual = [(k, ''.join(g)) for k, g in grouper]
+ expected = [(0, 'abcde'), (1, 'fghij')]
+ self.assertEqual(actual, expected)
+
+ def test_no_valuefunc(self):
+ iterable = range(1000)
+
+ def key(x):
+ return x // 5
+
+ actual = mi.groupby_transform(iterable, key, valuefunc=None)
+ expected = groupby(iterable, key)
+ self.assertAllGroupsEqual(actual, expected)
+
+ actual = mi.groupby_transform(iterable, key) # default valuefunc
+ expected = groupby(iterable, key)
+ self.assertAllGroupsEqual(actual, expected)
+
+ def test_reducefunc(self):
+ iterable = range(50)
+ keyfunc = lambda k: 10 * (k // 10)
+ valuefunc = lambda v: v + 1
+ reducefunc = sum
+ actual = list(
+ mi.groupby_transform(
+ iterable,
+ keyfunc=keyfunc,
+ valuefunc=valuefunc,
+ reducefunc=reducefunc,
+ )
+ )
+ expected = [(0, 55), (10, 155), (20, 255), (30, 355), (40, 455)]
+ self.assertEqual(actual, expected)
+
+
+class NumericRangeTests(TestCase):
+ def test_basic(self):
+ for args, expected in [
+ ((4,), [0, 1, 2, 3]),
+ ((4.0,), [0.0, 1.0, 2.0, 3.0]),
+ ((1.0, 4), [1.0, 2.0, 3.0]),
+ ((1, 4.0), [1.0, 2.0, 3.0]),
+ ((1.0, 5), [1.0, 2.0, 3.0, 4.0]),
+ ((0, 20, 5), [0, 5, 10, 15]),
+ ((0, 20, 5.0), [0.0, 5.0, 10.0, 15.0]),
+ ((0, 10, 3), [0, 3, 6, 9]),
+ ((0, 10, 3.0), [0.0, 3.0, 6.0, 9.0]),
+ ((0, -5, -1), [0, -1, -2, -3, -4]),
+ ((0.0, -5, -1), [0.0, -1.0, -2.0, -3.0, -4.0]),
+ ((1, 2, Fraction(1, 2)), [Fraction(1, 1), Fraction(3, 2)]),
+ ((0,), []),
+ ((0.0,), []),
+ ((1, 0), []),
+ ((1.0, 0.0), []),
+ ((0.1, 0.30000000000000001, 0.2), [0.1]), # IEE 754 !
+ (
+ (
+ Decimal("0.1"),
+ Decimal("0.30000000000000001"),
+ Decimal("0.2"),
+ ),
+ [Decimal("0.1"), Decimal("0.3")],
+ ), # okay with Decimal
+ (
+ (
+ Fraction(1, 10),
+ Fraction(30000000000000001, 100000000000000000),
+ Fraction(2, 10),
+ ),
+ [Fraction(1, 10), Fraction(3, 10)],
+ ), # okay with Fraction
+ ((Fraction(2, 1),), [Fraction(0, 1), Fraction(1, 1)]),
+ ((Decimal('2.0'),), [Decimal('0.0'), Decimal('1.0')]),
+ (
+ (
+ datetime(2019, 3, 29, 12, 34, 56),
+ datetime(2019, 3, 29, 12, 37, 55),
+ timedelta(minutes=1),
+ ),
+ [
+ datetime(2019, 3, 29, 12, 34, 56),
+ datetime(2019, 3, 29, 12, 35, 56),
+ datetime(2019, 3, 29, 12, 36, 56),
+ ],
+ ),
+ ]:
+ actual = list(mi.numeric_range(*args))
+ self.assertEqual(expected, actual)
+ self.assertTrue(
+ all(type(a) == type(e) for a, e in zip(actual, expected))
+ )
+
+ def test_arg_count(self):
+ for args, message in [
+ ((), 'numeric_range expected at least 1 argument, got 0'),
+ (
+ (0, 1, 2, 3),
+ 'numeric_range expected at most 3 arguments, got 4',
+ ),
+ ]:
+ with self.assertRaisesRegex(TypeError, message):
+ mi.numeric_range(*args)
+
+ def test_zero_step(self):
+ for args in [
+ (1, 2, 0),
+ (
+ datetime(2019, 3, 29, 12, 34, 56),
+ datetime(2019, 3, 29, 12, 37, 55),
+ timedelta(minutes=0),
+ ),
+ (1.0, 2.0, 0.0),
+ (Decimal("1.0"), Decimal("2.0"), Decimal("0.0")),
+ (Fraction(2, 2), Fraction(4, 2), Fraction(0, 2)),
+ ]:
+ with self.assertRaises(ValueError):
+ list(mi.numeric_range(*args))
+
+ def test_bool(self):
+ for args, expected in [
+ ((1.0, 3.0, 1.5), True),
+ ((1.0, 2.0, 1.5), True),
+ ((1.0, 1.0, 1.5), False),
+ ((1.0, 0.0, 1.5), False),
+ ((3.0, 1.0, -1.5), True),
+ ((2.0, 1.0, -1.5), True),
+ ((1.0, 1.0, -1.5), False),
+ ((0.0, 1.0, -1.5), False),
+ ((Decimal("1.0"), Decimal("2.0"), Decimal("1.5")), True),
+ ((Decimal("1.0"), Decimal("0.0"), Decimal("1.5")), False),
+ ((Fraction(2, 2), Fraction(4, 2), Fraction(3, 2)), True),
+ ((Fraction(2, 2), Fraction(0, 2), Fraction(3, 2)), False),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=1),
+ ),
+ True,
+ ),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 28),
+ timedelta(hours=1),
+ ),
+ False,
+ ),
+ ]:
+ self.assertEqual(expected, bool(mi.numeric_range(*args)))
+
+ def test_contains(self):
+ for args, expected_in, expected_not_in in [
+ ((10,), range(10), (0.5,)),
+ ((1.0, 9.9, 1.5), (1.0, 2.5, 4.0, 5.5, 7.0, 8.5), (0.9,)),
+ ((9.0, 1.0, -1.5), (1.5, 3.0, 4.5, 6.0, 7.5, 9.0), (0.0, 0.9)),
+ (
+ (Decimal("1.0"), Decimal("9.9"), Decimal("1.5")),
+ (
+ Decimal("1.0"),
+ Decimal("2.5"),
+ Decimal("4.0"),
+ Decimal("5.5"),
+ Decimal("7.0"),
+ Decimal("8.5"),
+ ),
+ (Decimal("0.9"),),
+ ),
+ (
+ (Fraction(0, 1), Fraction(5, 1), Fraction(1, 2)),
+ (Fraction(0, 1), Fraction(1, 2), Fraction(9, 2)),
+ (Fraction(10, 2),),
+ ),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=1),
+ ),
+ (datetime(2019, 3, 29, 15),),
+ (datetime(2019, 3, 29, 15, 30),),
+ ),
+ ]:
+ r = mi.numeric_range(*args)
+ for v in expected_in:
+ self.assertTrue(v in r)
+ self.assertFalse(v not in r)
+
+ for v in expected_not_in:
+ self.assertFalse(v in r)
+ self.assertTrue(v not in r)
+
+ def test_eq(self):
+ for args1, args2 in [
+ ((0, 5, 2), (0, 6, 2)),
+ ((1.0, 9.9, 1.5), (1.0, 8.6, 1.5)),
+ ((8.5, 0.0, -1.5), (8.5, 0.7, -1.5)),
+ ((7.0, 0.0, 1.0), (17.0, 7.0, 0.5)),
+ (
+ (Decimal("1.0"), Decimal("9.9"), Decimal("1.5")),
+ (Decimal("1.0"), Decimal("8.6"), Decimal("1.5")),
+ ),
+ (
+ (Fraction(1, 1), Fraction(10, 1), Fraction(3, 2)),
+ (Fraction(1, 1), Fraction(9, 1), Fraction(3, 2)),
+ ),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=10),
+ ),
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30, 1),
+ timedelta(hours=10),
+ ),
+ ),
+ ]:
+ self.assertEqual(
+ mi.numeric_range(*args1), mi.numeric_range(*args2)
+ )
+
+ for args1, args2 in [
+ ((0, 5, 2), (0, 7, 2)),
+ ((1.0, 9.9, 1.5), (1.2, 9.9, 1.5)),
+ ((1.0, 9.9, 1.5), (1.0, 10.3, 1.5)),
+ ((1.0, 9.9, 1.5), (1.0, 9.9, 1.4)),
+ ((8.5, 0.0, -1.5), (8.4, 0.0, -1.5)),
+ ((8.5, 0.0, -1.5), (8.5, -0.7, -1.5)),
+ ((8.5, 0.0, -1.5), (8.5, 0.0, -1.4)),
+ ((0.0, 7.0, 1.0), (7.0, 0.0, 1.0)),
+ (
+ (Decimal("1.0"), Decimal("10.0"), Decimal("1.5")),
+ (Decimal("1.0"), Decimal("10.5"), Decimal("1.5")),
+ ),
+ (
+ (Fraction(1, 1), Fraction(10, 1), Fraction(3, 2)),
+ (Fraction(1, 1), Fraction(21, 2), Fraction(3, 2)),
+ ),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=10),
+ ),
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30, 15),
+ timedelta(hours=10),
+ ),
+ ),
+ ]:
+ self.assertNotEqual(
+ mi.numeric_range(*args1), mi.numeric_range(*args2)
+ )
+
+ self.assertNotEqual(mi.numeric_range(7.0), 1)
+ self.assertNotEqual(mi.numeric_range(7.0), "abc")
+
+ def test_get_item_by_index(self):
+ for args, index, expected in [
+ ((1, 6), 2, 3),
+ ((1.0, 6.0, 1.5), 0, 1.0),
+ ((1.0, 6.0, 1.5), 1, 2.5),
+ ((1.0, 6.0, 1.5), 2, 4.0),
+ ((1.0, 6.0, 1.5), 3, 5.5),
+ ((1.0, 6.0, 1.5), -1, 5.5),
+ ((1.0, 6.0, 1.5), -2, 4.0),
+ (
+ (Decimal("1.0"), Decimal("9.0"), Decimal("1.5")),
+ -1,
+ Decimal("8.5"),
+ ),
+ (
+ (Fraction(1, 1), Fraction(10, 1), Fraction(3, 2)),
+ 2,
+ Fraction(4, 1),
+ ),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=10),
+ ),
+ 1,
+ datetime(2019, 3, 29, 10),
+ ),
+ ]:
+ self.assertEqual(expected, mi.numeric_range(*args)[index])
+
+ for args, index in [
+ ((1.0, 6.0, 1.5), 4),
+ ((1.0, 6.0, 1.5), -5),
+ ((6.0, 1.0, 1.5), 0),
+ ((6.0, 1.0, 1.5), -1),
+ ((Decimal("1.0"), Decimal("9.0"), Decimal("-1.5")), -1),
+ ((Fraction(1, 1), Fraction(2, 1), Fraction(3, 2)), 2),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=10),
+ ),
+ 8,
+ ),
+ ]:
+ with self.assertRaises(IndexError):
+ mi.numeric_range(*args)[index]
+
+ def test_get_item_by_slice(self):
+ for args, sl, expected_args in [
+ ((1.0, 9.0, 1.5), slice(None, None, None), (1.0, 9.0, 1.5)),
+ ((1.0, 9.0, 1.5), slice(None, 1, None), (1.0, 2.5, 1.5)),
+ ((1.0, 9.0, 1.5), slice(None, None, 2), (1.0, 9.0, 3.0)),
+ ((1.0, 9.0, 1.5), slice(None, 2, None), (1.0, 4.0, 1.5)),
+ ((1.0, 9.0, 1.5), slice(1, 2, None), (2.5, 4.0, 1.5)),
+ ((1.0, 9.0, 1.5), slice(1, -1, None), (2.5, 8.5, 1.5)),
+ ((1.0, 9.0, 1.5), slice(10, None, 3), (9.0, 9.0, 4.5)),
+ ((1.0, 9.0, 1.5), slice(-10, None, 3), (1.0, 9.0, 4.5)),
+ ((1.0, 9.0, 1.5), slice(None, -10, 3), (1.0, 1.0, 4.5)),
+ ((1.0, 9.0, 1.5), slice(None, 10, 3), (1.0, 9.0, 4.5)),
+ (
+ (Decimal("1.0"), Decimal("9.0"), Decimal("1.5")),
+ slice(1, -1, None),
+ (Decimal("2.5"), Decimal("8.5"), Decimal("1.5")),
+ ),
+ (
+ (Fraction(1, 1), Fraction(5, 1), Fraction(3, 2)),
+ slice(1, -1, None),
+ (Fraction(5, 2), Fraction(4, 1), Fraction(3, 2)),
+ ),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=10),
+ ),
+ slice(1, -1, None),
+ (
+ datetime(2019, 3, 29, 10),
+ datetime(2019, 3, 29, 20),
+ timedelta(hours=10),
+ ),
+ ),
+ ]:
+ self.assertEqual(
+ mi.numeric_range(*expected_args), mi.numeric_range(*args)[sl]
+ )
+
+ def test_hash(self):
+ for args, expected in [
+ ((1.0, 6.0, 1.5), hash((1.0, 5.5, 1.5))),
+ ((1.0, 7.0, 1.5), hash((1.0, 5.5, 1.5))),
+ ((1.0, 7.5, 1.5), hash((1.0, 7.0, 1.5))),
+ ((1.0, 1.5, 1.5), hash((1.0, 1.0, 1.5))),
+ ((1.5, 1.0, 1.5), hash(range(0, 0))),
+ ((1.5, 1.5, 1.5), hash(range(0, 0))),
+ (
+ (Decimal("1.0"), Decimal("9.0"), Decimal("1.5")),
+ hash((Decimal("1.0"), Decimal("8.5"), Decimal("1.5"))),
+ ),
+ (
+ (Fraction(1, 1), Fraction(5, 1), Fraction(3, 2)),
+ hash((Fraction(1, 1), Fraction(4, 1), Fraction(3, 2))),
+ ),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=10),
+ ),
+ hash(
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 29, 20),
+ timedelta(hours=10),
+ )
+ ),
+ ),
+ ]:
+ self.assertEqual(expected, hash(mi.numeric_range(*args)))
+
+ def test_iter_twice(self):
+ r1 = mi.numeric_range(1.0, 9.9, 1.5)
+ r2 = mi.numeric_range(8.5, 0.0, -1.5)
+ self.assertEqual([1.0, 2.5, 4.0, 5.5, 7.0, 8.5], list(r1))
+ self.assertEqual([1.0, 2.5, 4.0, 5.5, 7.0, 8.5], list(r1))
+ self.assertEqual([8.5, 7.0, 5.5, 4.0, 2.5, 1.0], list(r2))
+ self.assertEqual([8.5, 7.0, 5.5, 4.0, 2.5, 1.0], list(r2))
+
+ def test_len(self):
+ for args, expected in [
+ ((1.0, 7.0, 1.5), 4),
+ ((1.0, 7.01, 1.5), 5),
+ ((7.0, 1.0, -1.5), 4),
+ ((7.01, 1.0, -1.5), 5),
+ ((0.1, 0.30000000000000001, 0.2), 1), # IEE 754 !
+ (
+ (
+ Decimal("0.1"),
+ Decimal("0.30000000000000001"),
+ Decimal("0.2"),
+ ),
+ 2,
+ ), # works with Decimal
+ ((Decimal("1.0"), Decimal("9.0"), Decimal("1.5")), 6),
+ ((Fraction(1, 1), Fraction(5, 1), Fraction(3, 2)), 3),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=10),
+ ),
+ 3,
+ ),
+ ]:
+ self.assertEqual(expected, len(mi.numeric_range(*args)))
+
+ def test_repr(self):
+ for args, *expected in [
+ ((7.0,), "numeric_range(0.0, 7.0)"),
+ ((1.0, 7.0), "numeric_range(1.0, 7.0)"),
+ ((7.0, 1.0, -1.5), "numeric_range(7.0, 1.0, -1.5)"),
+ (
+ (Decimal("1.0"), Decimal("9.0"), Decimal("1.5")),
+ (
+ "numeric_range(Decimal('1.0'), Decimal('9.0'), "
+ "Decimal('1.5'))"
+ ),
+ ),
+ (
+ (Fraction(7, 7), Fraction(10, 2), Fraction(3, 2)),
+ (
+ "numeric_range(Fraction(1, 1), Fraction(5, 1), "
+ "Fraction(3, 2))"
+ ),
+ ),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=10),
+ ),
+ "numeric_range(datetime.datetime(2019, 3, 29, 0, 0), "
+ "datetime.datetime(2019, 3, 30, 0, 0), "
+ "datetime.timedelta(seconds=36000))",
+ "numeric_range(datetime.datetime(2019, 3, 29, 0, 0), "
+ "datetime.datetime(2019, 3, 30, 0, 0), "
+ "datetime.timedelta(0, 36000))",
+ ),
+ ]:
+ with self.subTest(args=args):
+ self.assertIn(repr(mi.numeric_range(*args)), expected)
+
+ def test_reversed(self):
+ for args, expected in [
+ ((7.0,), [6.0, 5.0, 4.0, 3.0, 2.0, 1.0, 0.0]),
+ ((1.0, 7.0), [6.0, 5.0, 4.0, 3.0, 2.0, 1.0]),
+ ((7.0, 1.0, -1.5), [2.5, 4.0, 5.5, 7.0]),
+ ((7.0, 0.9, -1.5), [1.0, 2.5, 4.0, 5.5, 7.0]),
+ (
+ (Decimal("1.0"), Decimal("5.0"), Decimal("1.5")),
+ [Decimal('4.0'), Decimal('2.5'), Decimal('1.0')],
+ ),
+ (
+ (Fraction(1, 1), Fraction(5, 1), Fraction(3, 2)),
+ [Fraction(4, 1), Fraction(5, 2), Fraction(1, 1)],
+ ),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=10),
+ ),
+ [
+ datetime(2019, 3, 29, 20),
+ datetime(2019, 3, 29, 10),
+ datetime(2019, 3, 29),
+ ],
+ ),
+ ]:
+ self.assertEqual(expected, list(reversed(mi.numeric_range(*args))))
+
+ def test_count(self):
+ for args, v, c in [
+ ((7.0,), 0.0, 1),
+ ((7.0,), 0.5, 0),
+ ((7.0,), 6.0, 1),
+ ((7.0,), 7.0, 0),
+ ((7.0,), 10.0, 0),
+ (
+ (Decimal("1.0"), Decimal("5.0"), Decimal("1.5")),
+ Decimal('4.0'),
+ 1,
+ ),
+ (
+ (Fraction(1, 1), Fraction(5, 1), Fraction(3, 2)),
+ Fraction(5, 2),
+ 1,
+ ),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=10),
+ ),
+ datetime(2019, 3, 29, 20),
+ 1,
+ ),
+ ]:
+ self.assertEqual(c, mi.numeric_range(*args).count(v))
+
+ def test_index(self):
+ for args, v, i in [
+ ((7.0,), 0.0, 0),
+ ((7.0,), 6.0, 6),
+ ((7.0, 0.0, -1.0), 7.0, 0),
+ ((7.0, 0.0, -1.0), 1.0, 6),
+ (
+ (Decimal("1.0"), Decimal("5.0"), Decimal("1.5")),
+ Decimal('4.0'),
+ 2,
+ ),
+ (
+ (Fraction(1, 1), Fraction(5, 1), Fraction(3, 2)),
+ Fraction(5, 2),
+ 1,
+ ),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=10),
+ ),
+ datetime(2019, 3, 29, 20),
+ 2,
+ ),
+ ]:
+ self.assertEqual(i, mi.numeric_range(*args).index(v))
+
+ for args, v in [
+ ((0.7,), 0.5),
+ ((0.7,), 7.0),
+ ((0.7,), 10.0),
+ ((7.0, 0.0, -1.0), 0.5),
+ ((7.0, 0.0, -1.0), 0.0),
+ ((7.0, 0.0, -1.0), 10.0),
+ ((7.0, 0.0), 5.0),
+ ((Decimal("1.0"), Decimal("5.0"), Decimal("1.5")), Decimal('4.5')),
+ ((Fraction(1, 1), Fraction(5, 1), Fraction(3, 2)), Fraction(5, 3)),
+ (
+ (
+ datetime(2019, 3, 29),
+ datetime(2019, 3, 30),
+ timedelta(hours=10),
+ ),
+ datetime(2019, 3, 30),
+ ),
+ ]:
+ with self.assertRaises(ValueError):
+ mi.numeric_range(*args).index(v)
+
+ def test_parent_classes(self):
+ r = mi.numeric_range(7.0)
+ self.assertTrue(isinstance(r, abc.Iterable))
+ self.assertFalse(isinstance(r, abc.Iterator))
+ self.assertTrue(isinstance(r, abc.Sequence))
+ self.assertTrue(isinstance(r, abc.Hashable))
+
+ def test_bad_key(self):
+ r = mi.numeric_range(7.0)
+ for arg, message in [
+ ('a', 'numeric range indices must be integers or slices, not str'),
+ (
+ (),
+ 'numeric range indices must be integers or slices, not tuple',
+ ),
+ ]:
+ with self.assertRaisesRegex(TypeError, message):
+ r[arg]
+
+ def test_pickle(self):
+ for args in [
+ (7.0,),
+ (5.0, 7.0),
+ (5.0, 7.0, 3.0),
+ (7.0, 5.0),
+ (7.0, 5.0, 4.0),
+ (7.0, 5.0, -1.0),
+ (Decimal("1.0"), Decimal("5.0"), Decimal("1.5")),
+ (Fraction(1, 1), Fraction(5, 1), Fraction(3, 2)),
+ (datetime(2019, 3, 29), datetime(2019, 3, 30)),
+ ]:
+ r = mi.numeric_range(*args)
+ self.assertTrue(dumps(r)) # assert not empty
+ self.assertEqual(r, loads(dumps(r)))
+
+
+class CountCycleTests(TestCase):
+ def test_basic(self):
+ expected = [
+ (0, 'a'),
+ (0, 'b'),
+ (0, 'c'),
+ (1, 'a'),
+ (1, 'b'),
+ (1, 'c'),
+ (2, 'a'),
+ (2, 'b'),
+ (2, 'c'),
+ ]
+ for actual in [
+ mi.take(9, mi.count_cycle('abc')), # n=None
+ list(mi.count_cycle('abc', 3)), # n=3
+ ]:
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ self.assertEqual(list(mi.count_cycle('')), [])
+ self.assertEqual(list(mi.count_cycle('', 2)), [])
+
+ def test_negative(self):
+ self.assertEqual(list(mi.count_cycle('abc', -3)), [])
+
+
+class MarkEndsTests(TestCase):
+ def test_basic(self):
+ for size, expected in [
+ (0, []),
+ (1, [(True, True, '0')]),
+ (2, [(True, False, '0'), (False, True, '1')]),
+ (3, [(True, False, '0'), (False, False, '1'), (False, True, '2')]),
+ (
+ 4,
+ [
+ (True, False, '0'),
+ (False, False, '1'),
+ (False, False, '2'),
+ (False, True, '3'),
+ ],
+ ),
+ ]:
+ with self.subTest(size=size):
+ iterable = map(str, range(size))
+ actual = list(mi.mark_ends(iterable))
+ self.assertEqual(actual, expected)
+
+
+class LocateTests(TestCase):
+ def test_default_pred(self):
+ iterable = [0, 1, 1, 0, 1, 0, 0]
+ actual = list(mi.locate(iterable))
+ expected = [1, 2, 4]
+ self.assertEqual(actual, expected)
+
+ def test_no_matches(self):
+ iterable = [0, 0, 0]
+ actual = list(mi.locate(iterable))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_custom_pred(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda x: x == '0'
+ actual = list(mi.locate(iterable, pred))
+ expected = [0, 3, 5, 6]
+ self.assertEqual(actual, expected)
+
+ def test_window_size(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda *args: args == ('0', 1)
+ actual = list(mi.locate(iterable, pred, window_size=2))
+ expected = [0, 3]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_large(self):
+ iterable = [1, 2, 3, 4]
+ pred = lambda a, b, c, d, e: True
+ actual = list(mi.locate(iterable, pred, window_size=5))
+ expected = [0]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_zero(self):
+ iterable = [1, 2, 3, 4]
+ pred = lambda: True
+ with self.assertRaises(ValueError):
+ list(mi.locate(iterable, pred, window_size=0))
+
+
+class StripFunctionTests(TestCase):
+ def test_hashable(self):
+ iterable = list('www.example.com')
+ pred = lambda x: x in set('cmowz.')
+
+ self.assertEqual(list(mi.lstrip(iterable, pred)), list('example.com'))
+ self.assertEqual(list(mi.rstrip(iterable, pred)), list('www.example'))
+ self.assertEqual(list(mi.strip(iterable, pred)), list('example'))
+
+ def test_not_hashable(self):
+ iterable = [
+ list('http://'),
+ list('www'),
+ list('.example'),
+ list('.com'),
+ ]
+ pred = lambda x: x in [list('http://'), list('www'), list('.com')]
+
+ self.assertEqual(list(mi.lstrip(iterable, pred)), iterable[2:])
+ self.assertEqual(list(mi.rstrip(iterable, pred)), iterable[:3])
+ self.assertEqual(list(mi.strip(iterable, pred)), iterable[2:3])
+
+ def test_math(self):
+ iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2]
+ pred = lambda x: x <= 2
+
+ self.assertEqual(list(mi.lstrip(iterable, pred)), iterable[3:])
+ self.assertEqual(list(mi.rstrip(iterable, pred)), iterable[:-3])
+ self.assertEqual(list(mi.strip(iterable, pred)), iterable[3:-3])
+
+
+class IsliceExtendedTests(TestCase):
+ def test_all(self):
+ iterable = ['0', '1', '2', '3', '4', '5']
+ indexes = list(range(-4, len(iterable) + 4)) + [None]
+ steps = [1, 2, 3, 4, -1, -2, -3, 4]
+ for slice_args in product(indexes, indexes, steps):
+ with self.subTest(slice_args=slice_args):
+ actual = list(mi.islice_extended(iterable, *slice_args))
+ expected = iterable[slice(*slice_args)]
+ self.assertEqual(actual, expected, slice_args)
+
+ def test_zero_step(self):
+ with self.assertRaises(ValueError):
+ list(mi.islice_extended([1, 2, 3], 0, 1, 0))
+
+ def test_slicing(self):
+ iterable = map(str, count())
+ first_slice = mi.islice_extended(iterable)[10:]
+ second_slice = mi.islice_extended(first_slice)[:10]
+ third_slice = mi.islice_extended(second_slice)[::2]
+ self.assertEqual(list(third_slice), ['10', '12', '14', '16', '18'])
+
+ def test_slicing_extensive(self):
+ iterable = range(10)
+ options = (None, 1, 2, 7, -1)
+ for start, stop, step in product(options, options, options):
+ with self.subTest(slice_args=(start, stop, step)):
+ sliced_tuple_0 = tuple(
+ mi.islice_extended(iterable)[start:stop:step]
+ )
+ sliced_tuple_1 = tuple(
+ mi.islice_extended(iterable, start, stop, step)
+ )
+ sliced_range = tuple(iterable[start:stop:step])
+ self.assertEqual(sliced_tuple_0, sliced_range)
+ self.assertEqual(sliced_tuple_1, sliced_range)
+
+ def test_invalid_slice(self):
+ with self.assertRaises(TypeError):
+ mi.islice_extended(count())[13]
+
+
+class ConsecutiveGroupsTest(TestCase):
+ def test_numbers(self):
+ iterable = [-10, -8, -7, -6, 1, 2, 4, 5, -1, 7]
+ actual = [list(g) for g in mi.consecutive_groups(iterable)]
+ expected = [[-10], [-8, -7, -6], [1, 2], [4, 5], [-1], [7]]
+ self.assertEqual(actual, expected)
+
+ def test_custom_ordering(self):
+ iterable = ['1', '10', '11', '20', '21', '22', '30', '31']
+ ordering = lambda x: int(x)
+ actual = [list(g) for g in mi.consecutive_groups(iterable, ordering)]
+ expected = [['1'], ['10', '11'], ['20', '21', '22'], ['30', '31']]
+ self.assertEqual(actual, expected)
+
+ def test_exotic_ordering(self):
+ iterable = [
+ ('a', 'b', 'c', 'd'),
+ ('a', 'c', 'b', 'd'),
+ ('a', 'c', 'd', 'b'),
+ ('a', 'd', 'b', 'c'),
+ ('d', 'b', 'c', 'a'),
+ ('d', 'c', 'a', 'b'),
+ ]
+ ordering = list(permutations('abcd')).index
+ actual = [list(g) for g in mi.consecutive_groups(iterable, ordering)]
+ expected = [
+ [('a', 'b', 'c', 'd')],
+ [('a', 'c', 'b', 'd'), ('a', 'c', 'd', 'b'), ('a', 'd', 'b', 'c')],
+ [('d', 'b', 'c', 'a'), ('d', 'c', 'a', 'b')],
+ ]
+ self.assertEqual(actual, expected)
+
+
+class DifferenceTest(TestCase):
+ def test_normal(self):
+ iterable = [10, 20, 30, 40, 50]
+ actual = list(mi.difference(iterable))
+ expected = [10, 10, 10, 10, 10]
+ self.assertEqual(actual, expected)
+
+ def test_custom(self):
+ iterable = [10, 20, 30, 40, 50]
+ actual = list(mi.difference(iterable, add))
+ expected = [10, 30, 50, 70, 90]
+ self.assertEqual(actual, expected)
+
+ def test_roundtrip(self):
+ original = list(range(100))
+ accumulated = accumulate(original)
+ actual = list(mi.difference(accumulated))
+ self.assertEqual(actual, original)
+
+ def test_one(self):
+ self.assertEqual(list(mi.difference([0])), [0])
+
+ def test_empty(self):
+ self.assertEqual(list(mi.difference([])), [])
+
+ @skipIf(version_info[:2] < (3, 8), 'accumulate with initial needs 3.8+')
+ def test_initial(self):
+ original = list(range(100))
+ accumulated = accumulate(original, initial=100)
+ actual = list(mi.difference(accumulated, initial=100))
+ self.assertEqual(actual, original)
+
+
+class SeekableTest(PeekableMixinTests, TestCase):
+ cls = mi.seekable
+
+ def test_exhaustion_reset(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(list(s), iterable) # Normal iteration
+ self.assertEqual(list(s), []) # Iterable is exhausted
+
+ s.seek(0)
+ self.assertEqual(list(s), iterable) # Back in action
+
+ def test_partial_reset(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(mi.take(5, s), iterable[:5]) # Normal iteration
+
+ s.seek(1)
+ self.assertEqual(list(s), iterable[1:]) # Get the rest of the iterable
+
+ def test_forward(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(mi.take(1, s), iterable[:1]) # Normal iteration
+
+ s.seek(3) # Skip over index 2
+ self.assertEqual(list(s), iterable[3:]) # Result is similar to slicing
+
+ s.seek(0) # Back to 0
+ self.assertEqual(list(s), iterable) # No difference in result
+
+ def test_past_end(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(mi.take(1, s), iterable[:1]) # Normal iteration
+
+ s.seek(20)
+ self.assertEqual(list(s), []) # Iterable is exhausted
+
+ s.seek(0) # Back to 0
+ self.assertEqual(list(s), iterable) # No difference in result
+
+ def test_elements(self):
+ iterable = map(str, count())
+
+ s = mi.seekable(iterable)
+ mi.take(10, s)
+
+ elements = s.elements()
+ self.assertEqual(
+ [elements[i] for i in range(10)], [str(n) for n in range(10)]
+ )
+ self.assertEqual(len(elements), 10)
+
+ mi.take(10, s)
+ self.assertEqual(list(elements), [str(n) for n in range(20)])
+
+ def test_maxlen(self):
+ iterable = map(str, count())
+
+ s = mi.seekable(iterable, maxlen=4)
+ self.assertEqual(mi.take(10, s), [str(n) for n in range(10)])
+ self.assertEqual(list(s.elements()), ['6', '7', '8', '9'])
+
+ s.seek(0)
+ self.assertEqual(mi.take(14, s), [str(n) for n in range(6, 20)])
+ self.assertEqual(list(s.elements()), ['16', '17', '18', '19'])
+
+ def test_maxlen_zero(self):
+ iterable = [str(x) for x in range(5)]
+ s = mi.seekable(iterable, maxlen=0)
+ self.assertEqual(list(s), iterable)
+ self.assertEqual(list(s.elements()), [])
+
+
+class SequenceViewTests(TestCase):
+ def test_init(self):
+ view = mi.SequenceView((1, 2, 3))
+ self.assertEqual(repr(view), "SequenceView((1, 2, 3))")
+ self.assertRaises(TypeError, lambda: mi.SequenceView({}))
+
+ def test_update(self):
+ seq = [1, 2, 3]
+ view = mi.SequenceView(seq)
+ self.assertEqual(len(view), 3)
+ self.assertEqual(repr(view), "SequenceView([1, 2, 3])")
+
+ seq.pop()
+ self.assertEqual(len(view), 2)
+ self.assertEqual(repr(view), "SequenceView([1, 2])")
+
+ def test_indexing(self):
+ seq = ('a', 'b', 'c', 'd', 'e', 'f')
+ view = mi.SequenceView(seq)
+ for i in range(-len(seq), len(seq)):
+ self.assertEqual(view[i], seq[i])
+
+ def test_slicing(self):
+ seq = ('a', 'b', 'c', 'd', 'e', 'f')
+ view = mi.SequenceView(seq)
+ n = len(seq)
+ indexes = list(range(-n - 1, n + 1)) + [None]
+ steps = list(range(-n, n + 1))
+ steps.remove(0)
+ for slice_args in product(indexes, indexes, steps):
+ i = slice(*slice_args)
+ self.assertEqual(view[i], seq[i])
+
+ def test_abc_methods(self):
+ # collections.Sequence should provide all of this functionality
+ seq = ('a', 'b', 'c', 'd', 'e', 'f', 'f')
+ view = mi.SequenceView(seq)
+
+ # __contains__
+ self.assertIn('b', view)
+ self.assertNotIn('g', view)
+
+ # __iter__
+ self.assertEqual(list(iter(view)), list(seq))
+
+ # __reversed__
+ self.assertEqual(list(reversed(view)), list(reversed(seq)))
+
+ # index
+ self.assertEqual(view.index('b'), 1)
+
+ # count
+ self.assertEqual(seq.count('f'), 2)
+
+
+class RunLengthTest(TestCase):
+ def test_encode(self):
+ iterable = (int(str(n)[0]) for n in count(800))
+ actual = mi.take(4, mi.run_length.encode(iterable))
+ expected = [(8, 100), (9, 100), (1, 1000), (2, 1000)]
+ self.assertEqual(actual, expected)
+
+ def test_decode(self):
+ iterable = [('d', 4), ('c', 3), ('b', 2), ('a', 1)]
+ actual = ''.join(mi.run_length.decode(iterable))
+ expected = 'ddddcccbba'
+ self.assertEqual(actual, expected)
+
+
+class ExactlyNTests(TestCase):
+ """Tests for ``exactly_n()``"""
+
+ def test_true(self):
+ """Iterable has ``n`` ``True`` elements"""
+ self.assertTrue(mi.exactly_n([True, False, True], 2))
+ self.assertTrue(mi.exactly_n([1, 1, 1, 0], 3))
+ self.assertTrue(mi.exactly_n([False, False], 0))
+ self.assertTrue(mi.exactly_n(range(100), 10, lambda x: x < 10))
+
+ def test_false(self):
+ """Iterable does not have ``n`` ``True`` elements"""
+ self.assertFalse(mi.exactly_n([True, False, False], 2))
+ self.assertFalse(mi.exactly_n([True, True, False], 1))
+ self.assertFalse(mi.exactly_n([False], 1))
+ self.assertFalse(mi.exactly_n([True], -1))
+ self.assertFalse(mi.exactly_n(repeat(True), 100))
+
+ def test_empty(self):
+ """Return ``True`` if the iterable is empty and ``n`` is 0"""
+ self.assertTrue(mi.exactly_n([], 0))
+ self.assertFalse(mi.exactly_n([], 1))
+
+
+class AlwaysReversibleTests(TestCase):
+ """Tests for ``always_reversible()``"""
+
+ def test_regular_reversed(self):
+ self.assertEqual(
+ list(reversed(range(10))), list(mi.always_reversible(range(10)))
+ )
+ self.assertEqual(
+ list(reversed([1, 2, 3])), list(mi.always_reversible([1, 2, 3]))
+ )
+ self.assertEqual(
+ reversed([1, 2, 3]).__class__,
+ mi.always_reversible([1, 2, 3]).__class__,
+ )
+
+ def test_nonseq_reversed(self):
+ # Create a non-reversible generator from a sequence
+ with self.assertRaises(TypeError):
+ reversed(x for x in range(10))
+
+ self.assertEqual(
+ list(reversed(range(10))),
+ list(mi.always_reversible(x for x in range(10))),
+ )
+ self.assertEqual(
+ list(reversed([1, 2, 3])),
+ list(mi.always_reversible(x for x in [1, 2, 3])),
+ )
+ self.assertNotEqual(
+ reversed((1, 2)).__class__,
+ mi.always_reversible(x for x in (1, 2)).__class__,
+ )
+
+
+class CircularShiftsTests(TestCase):
+ def test_empty(self):
+ # empty iterable -> empty list
+ self.assertEqual(list(mi.circular_shifts([])), [])
+
+ def test_simple_circular_shifts(self):
+ # test the a simple iterator case
+ self.assertEqual(
+ mi.circular_shifts(range(4)),
+ [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)],
+ )
+
+ def test_duplicates(self):
+ # test non-distinct entries
+ self.assertEqual(
+ mi.circular_shifts([0, 1, 0, 1]),
+ [(0, 1, 0, 1), (1, 0, 1, 0), (0, 1, 0, 1), (1, 0, 1, 0)],
+ )
+
+
+class MakeDecoratorTests(TestCase):
+ def test_basic(self):
+ slicer = mi.make_decorator(islice)
+
+ @slicer(1, 10, 2)
+ def user_function(arg_1, arg_2, kwarg_1=None):
+ self.assertEqual(arg_1, 'arg_1')
+ self.assertEqual(arg_2, 'arg_2')
+ self.assertEqual(kwarg_1, 'kwarg_1')
+ return map(str, count())
+
+ it = user_function('arg_1', 'arg_2', kwarg_1='kwarg_1')
+ actual = list(it)
+ expected = ['1', '3', '5', '7', '9']
+ self.assertEqual(actual, expected)
+
+ def test_result_index(self):
+ def stringify(*args, **kwargs):
+ self.assertEqual(args[0], 'arg_0')
+ iterable = args[1]
+ self.assertEqual(args[2], 'arg_2')
+ self.assertEqual(kwargs['kwarg_1'], 'kwarg_1')
+ return map(str, iterable)
+
+ stringifier = mi.make_decorator(stringify, result_index=1)
+
+ @stringifier('arg_0', 'arg_2', kwarg_1='kwarg_1')
+ def user_function(n):
+ return count(n)
+
+ it = user_function(1)
+ actual = mi.take(5, it)
+ expected = ['1', '2', '3', '4', '5']
+ self.assertEqual(actual, expected)
+
+ def test_wrap_class(self):
+ seeker = mi.make_decorator(mi.seekable)
+
+ @seeker()
+ def user_function(n):
+ return map(str, range(n))
+
+ it = user_function(5)
+ self.assertEqual(list(it), ['0', '1', '2', '3', '4'])
+
+ it.seek(0)
+ self.assertEqual(list(it), ['0', '1', '2', '3', '4'])
+
+
+class MapReduceTests(TestCase):
+ def test_default(self):
+ iterable = (str(x) for x in range(5))
+ keyfunc = lambda x: int(x) // 2
+ actual = sorted(mi.map_reduce(iterable, keyfunc).items())
+ expected = [(0, ['0', '1']), (1, ['2', '3']), (2, ['4'])]
+ self.assertEqual(actual, expected)
+
+ def test_valuefunc(self):
+ iterable = (str(x) for x in range(5))
+ keyfunc = lambda x: int(x) // 2
+ valuefunc = int
+ actual = sorted(mi.map_reduce(iterable, keyfunc, valuefunc).items())
+ expected = [(0, [0, 1]), (1, [2, 3]), (2, [4])]
+ self.assertEqual(actual, expected)
+
+ def test_reducefunc(self):
+ iterable = (str(x) for x in range(5))
+ keyfunc = lambda x: int(x) // 2
+ valuefunc = int
+ reducefunc = lambda value_list: reduce(mul, value_list, 1)
+ actual = sorted(
+ mi.map_reduce(iterable, keyfunc, valuefunc, reducefunc).items()
+ )
+ expected = [(0, 0), (1, 6), (2, 4)]
+ self.assertEqual(actual, expected)
+
+ def test_ret(self):
+ d = mi.map_reduce([1, 0, 2, 0, 1, 0], bool)
+ self.assertEqual(d, {False: [0, 0, 0], True: [1, 2, 1]})
+ self.assertRaises(KeyError, lambda: d[None].append(1))
+
+
+class RlocateTests(TestCase):
+ def test_default_pred(self):
+ iterable = [0, 1, 1, 0, 1, 0, 0]
+ for it in (iterable[:], iter(iterable)):
+ actual = list(mi.rlocate(it))
+ expected = [4, 2, 1]
+ self.assertEqual(actual, expected)
+
+ def test_no_matches(self):
+ iterable = [0, 0, 0]
+ for it in (iterable[:], iter(iterable)):
+ actual = list(mi.rlocate(it))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_custom_pred(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda x: x == '0'
+ for it in (iterable[:], iter(iterable)):
+ actual = list(mi.rlocate(it, pred))
+ expected = [6, 5, 3, 0]
+ self.assertEqual(actual, expected)
+
+ def test_efficient_reversal(self):
+ iterable = range(9 ** 9) # Is efficiently reversible
+ target = 9 ** 9 - 2
+ pred = lambda x: x == target # Find-able from the right
+ actual = next(mi.rlocate(iterable, pred))
+ self.assertEqual(actual, target)
+
+ def test_window_size(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda *args: args == ('0', 1)
+ for it in (iterable, iter(iterable)):
+ actual = list(mi.rlocate(it, pred, window_size=2))
+ expected = [3, 0]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_large(self):
+ iterable = [1, 2, 3, 4]
+ pred = lambda a, b, c, d, e: True
+ for it in (iterable, iter(iterable)):
+ actual = list(mi.rlocate(iterable, pred, window_size=5))
+ expected = [0]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_zero(self):
+ iterable = [1, 2, 3, 4]
+ pred = lambda: True
+ for it in (iterable, iter(iterable)):
+ with self.assertRaises(ValueError):
+ list(mi.locate(iterable, pred, window_size=0))
+
+
+class ReplaceTests(TestCase):
+ def test_basic(self):
+ iterable = range(10)
+ pred = lambda x: x % 2 == 0
+ substitutes = []
+ actual = list(mi.replace(iterable, pred, substitutes))
+ expected = [1, 3, 5, 7, 9]
+ self.assertEqual(actual, expected)
+
+ def test_count(self):
+ iterable = range(10)
+ pred = lambda x: x % 2 == 0
+ substitutes = []
+ actual = list(mi.replace(iterable, pred, substitutes, count=4))
+ expected = [1, 3, 5, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_window_size(self):
+ iterable = range(10)
+ pred = lambda *args: args == (0, 1, 2)
+ substitutes = []
+ actual = list(mi.replace(iterable, pred, substitutes, window_size=3))
+ expected = [3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_end(self):
+ iterable = range(10)
+ pred = lambda *args: args == (7, 8, 9)
+ substitutes = []
+ actual = list(mi.replace(iterable, pred, substitutes, window_size=3))
+ expected = [0, 1, 2, 3, 4, 5, 6]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_count(self):
+ iterable = range(10)
+ pred = lambda *args: (args == (0, 1, 2)) or (args == (7, 8, 9))
+ substitutes = []
+ actual = list(
+ mi.replace(iterable, pred, substitutes, count=1, window_size=3)
+ )
+ expected = [3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_large(self):
+ iterable = range(4)
+ pred = lambda a, b, c, d, e: True
+ substitutes = [5, 6, 7]
+ actual = list(mi.replace(iterable, pred, substitutes, window_size=5))
+ expected = [5, 6, 7]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_zero(self):
+ iterable = range(10)
+ pred = lambda *args: True
+ substitutes = []
+ with self.assertRaises(ValueError):
+ list(mi.replace(iterable, pred, substitutes, window_size=0))
+
+ def test_iterable_substitutes(self):
+ iterable = range(5)
+ pred = lambda x: x % 2 == 0
+ substitutes = iter('__')
+ actual = list(mi.replace(iterable, pred, substitutes))
+ expected = ['_', '_', 1, '_', '_', 3, '_', '_']
+ self.assertEqual(actual, expected)
+
+
+class PartitionsTest(TestCase):
+ def test_types(self):
+ for iterable in ['abcd', ['a', 'b', 'c', 'd'], ('a', 'b', 'c', 'd')]:
+ with self.subTest(iterable=iterable):
+ actual = list(mi.partitions(iterable))
+ expected = [
+ [['a', 'b', 'c', 'd']],
+ [['a'], ['b', 'c', 'd']],
+ [['a', 'b'], ['c', 'd']],
+ [['a', 'b', 'c'], ['d']],
+ [['a'], ['b'], ['c', 'd']],
+ [['a'], ['b', 'c'], ['d']],
+ [['a', 'b'], ['c'], ['d']],
+ [['a'], ['b'], ['c'], ['d']],
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ iterable = []
+ actual = list(mi.partitions(iterable))
+ expected = [[[]]]
+ self.assertEqual(actual, expected)
+
+ def test_order(self):
+ iterable = iter([3, 2, 1])
+ actual = list(mi.partitions(iterable))
+ expected = [[[3, 2, 1]], [[3], [2, 1]], [[3, 2], [1]], [[3], [2], [1]]]
+ self.assertEqual(actual, expected)
+
+ def test_duplicates(self):
+ iterable = [1, 1, 1]
+ actual = list(mi.partitions(iterable))
+ expected = [[[1, 1, 1]], [[1], [1, 1]], [[1, 1], [1]], [[1], [1], [1]]]
+ self.assertEqual(actual, expected)
+
+
+class _FrozenMultiset(Set):
+ """
+ A helper class, useful to compare two lists without reference to the order
+ of elements.
+
+ FrozenMultiset represents a hashable set that allows duplicate elements.
+ """
+
+ def __init__(self, iterable):
+ self._collection = frozenset(Counter(iterable).items())
+
+ def __contains__(self, y):
+ """
+ >>> (0, 1) in _FrozenMultiset([(0, 1), (2,), (0, 1)])
+ True
+ """
+ return any(y == x for x, _ in self._collection)
+
+ def __iter__(self):
+ """
+ >>> sorted(_FrozenMultiset([(0, 1), (2,), (0, 1)]))
+ [(0, 1), (0, 1), (2,)]
+ """
+ return (x for x, c in self._collection for _ in range(c))
+
+ def __len__(self):
+ """
+ >>> len(_FrozenMultiset([(0, 1), (2,), (0, 1)]))
+ 3
+ """
+ return sum(c for x, c in self._collection)
+
+ def has_duplicates(self):
+ """
+ >>> _FrozenMultiset([(0, 1), (2,), (0, 1)]).has_duplicates()
+ True
+ """
+ return any(c != 1 for _, c in self._collection)
+
+ def __hash__(self):
+ return hash(self._collection)
+
+ def __repr__(self):
+ return "FrozenSet([{}]".format(", ".join(repr(x) for x in iter(self)))
+
+
+class SetPartitionsTests(TestCase):
+ @staticmethod
+ def _normalize_partition(p):
+ """
+ Return a normalized, hashable, version of a partition using
+ _FrozenMultiset
+ """
+ return _FrozenMultiset(_FrozenMultiset(g) for g in p)
+
+ @staticmethod
+ def _normalize_partitions(ps):
+ """
+ Return a normalized set of all normalized partitions using
+ _FrozenMultiset
+ """
+ return _FrozenMultiset(
+ SetPartitionsTests._normalize_partition(p) for p in ps
+ )
+
+ def test_repeated(self):
+ it = 'aaa'
+ actual = mi.set_partitions(it, 2)
+ expected = [['a', 'aa'], ['a', 'aa'], ['a', 'aa']]
+ self.assertEqual(
+ self._normalize_partitions(expected),
+ self._normalize_partitions(actual),
+ )
+
+ def test_each_correct(self):
+ a = set(range(6))
+ for p in mi.set_partitions(a):
+ total = {e for g in p for e in g}
+ self.assertEqual(a, total)
+
+ def test_duplicates(self):
+ a = set(range(6))
+ for p in mi.set_partitions(a):
+ self.assertFalse(self._normalize_partition(p).has_duplicates())
+
+ def test_found_all(self):
+ """small example, hand-checked"""
+ expected = [
+ [[0], [1], [2, 3, 4]],
+ [[0], [1, 2], [3, 4]],
+ [[0], [2], [1, 3, 4]],
+ [[0], [3], [1, 2, 4]],
+ [[0], [4], [1, 2, 3]],
+ [[0], [1, 3], [2, 4]],
+ [[0], [1, 4], [2, 3]],
+ [[1], [2], [0, 3, 4]],
+ [[1], [3], [0, 2, 4]],
+ [[1], [4], [0, 2, 3]],
+ [[1], [0, 2], [3, 4]],
+ [[1], [0, 3], [2, 4]],
+ [[1], [0, 4], [2, 3]],
+ [[2], [3], [0, 1, 4]],
+ [[2], [4], [0, 1, 3]],
+ [[2], [0, 1], [3, 4]],
+ [[2], [0, 3], [1, 4]],
+ [[2], [0, 4], [1, 3]],
+ [[3], [4], [0, 1, 2]],
+ [[3], [0, 1], [2, 4]],
+ [[3], [0, 2], [1, 4]],
+ [[3], [0, 4], [1, 2]],
+ [[4], [0, 1], [2, 3]],
+ [[4], [0, 2], [1, 3]],
+ [[4], [0, 3], [1, 2]],
+ ]
+ actual = mi.set_partitions(range(5), 3)
+ self.assertEqual(
+ self._normalize_partitions(expected),
+ self._normalize_partitions(actual),
+ )
+
+ def test_stirling_numbers(self):
+ """Check against https://en.wikipedia.org/wiki/
+ Stirling_numbers_of_the_second_kind#Table_of_values"""
+ cardinality_by_k_by_n = [
+ [1],
+ [1, 1],
+ [1, 3, 1],
+ [1, 7, 6, 1],
+ [1, 15, 25, 10, 1],
+ [1, 31, 90, 65, 15, 1],
+ ]
+ for n, cardinality_by_k in enumerate(cardinality_by_k_by_n, 1):
+ for k, cardinality in enumerate(cardinality_by_k, 1):
+ self.assertEqual(
+ cardinality, len(list(mi.set_partitions(range(n), k)))
+ )
+
+ def test_no_group(self):
+ def helper():
+ list(mi.set_partitions(range(4), -1))
+
+ self.assertRaises(ValueError, helper)
+
+ def test_to_many_groups(self):
+ self.assertEqual([], list(mi.set_partitions(range(4), 5)))
+
+
+class TimeLimitedTests(TestCase):
+ def test_basic(self):
+ def generator():
+ yield 1
+ yield 2
+ sleep(0.2)
+ yield 3
+
+ iterable = mi.time_limited(0.1, generator())
+ actual = list(iterable)
+ expected = [1, 2]
+ self.assertEqual(actual, expected)
+ self.assertTrue(iterable.timed_out)
+
+ def test_complete(self):
+ iterable = mi.time_limited(2, iter(range(10)))
+ actual = list(iterable)
+ expected = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+ self.assertFalse(iterable.timed_out)
+
+ def test_zero_limit(self):
+ iterable = mi.time_limited(0, count())
+ actual = list(iterable)
+ expected = []
+ self.assertEqual(actual, expected)
+ self.assertTrue(iterable.timed_out)
+
+ def test_invalid_limit(self):
+ with self.assertRaises(ValueError):
+ list(mi.time_limited(-0.1, count()))
+
+
+class OnlyTests(TestCase):
+ def test_defaults(self):
+ self.assertEqual(mi.only([]), None)
+ self.assertEqual(mi.only([1]), 1)
+ self.assertRaises(ValueError, lambda: mi.only([1, 2]))
+
+ def test_custom_value(self):
+ self.assertEqual(mi.only([], default='!'), '!')
+ self.assertEqual(mi.only([1], default='!'), 1)
+ self.assertRaises(ValueError, lambda: mi.only([1, 2], default='!'))
+
+ def test_custom_exception(self):
+ self.assertEqual(mi.only([], too_long=RuntimeError), None)
+ self.assertEqual(mi.only([1], too_long=RuntimeError), 1)
+ self.assertRaises(
+ RuntimeError, lambda: mi.only([1, 2], too_long=RuntimeError)
+ )
+
+ def test_default_exception_message(self):
+ self.assertRaisesRegex(
+ ValueError,
+ "Expected exactly one item in iterable, "
+ "but got 'foo', 'bar', and perhaps more",
+ lambda: mi.only(['foo', 'bar', 'baz']),
+ )
+
+
+class IchunkedTests(TestCase):
+ def test_even(self):
+ iterable = (str(x) for x in range(10))
+ actual = [''.join(c) for c in mi.ichunked(iterable, 5)]
+ expected = ['01234', '56789']
+ self.assertEqual(actual, expected)
+
+ def test_odd(self):
+ iterable = (str(x) for x in range(10))
+ actual = [''.join(c) for c in mi.ichunked(iterable, 4)]
+ expected = ['0123', '4567', '89']
+ self.assertEqual(actual, expected)
+
+ def test_zero(self):
+ iterable = []
+ actual = [list(c) for c in mi.ichunked(iterable, 0)]
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_negative(self):
+ iterable = count()
+ with self.assertRaises(ValueError):
+ [list(c) for c in mi.ichunked(iterable, -1)]
+
+ def test_out_of_order(self):
+ iterable = map(str, count())
+ it = mi.ichunked(iterable, 4)
+ chunk_1 = next(it)
+ chunk_2 = next(it)
+ self.assertEqual(''.join(chunk_2), '4567')
+ self.assertEqual(''.join(chunk_1), '0123')
+
+ def test_laziness(self):
+ def gen():
+ yield 0
+ raise RuntimeError
+ yield from count(1)
+
+ it = mi.ichunked(gen(), 4)
+ chunk = next(it)
+ self.assertEqual(next(chunk), 0)
+ self.assertRaises(RuntimeError, next, it)
+
+
+class DistinctCombinationsTests(TestCase):
+ def test_basic(self):
+ for iterable in [
+ (1, 2, 2, 3, 3, 3), # In order
+ range(6), # All distinct
+ 'abbccc', # Not numbers
+ 'cccbba', # Backward
+ 'mississippi', # No particular order
+ ]:
+ for r in range(len(iterable)):
+ with self.subTest(iterable=iterable, r=r):
+ actual = list(mi.distinct_combinations(iterable, r))
+ expected = list(
+ mi.unique_everseen(combinations(iterable, r))
+ )
+ self.assertEqual(actual, expected)
+
+ def test_negative(self):
+ with self.assertRaises(ValueError):
+ list(mi.distinct_combinations([], -1))
+
+ def test_empty(self):
+ self.assertEqual(list(mi.distinct_combinations([], 2)), [])
+
+
+class FilterExceptTests(TestCase):
+ def test_no_exceptions_pass(self):
+ iterable = '0123'
+ actual = list(mi.filter_except(int, iterable))
+ expected = ['0', '1', '2', '3']
+ self.assertEqual(actual, expected)
+
+ def test_no_exceptions_raise(self):
+ iterable = ['0', '1', 'two', '3']
+ with self.assertRaises(ValueError):
+ list(mi.filter_except(int, iterable))
+
+ def test_raise(self):
+ iterable = ['0', '1' '2', 'three', None]
+ with self.assertRaises(TypeError):
+ list(mi.filter_except(int, iterable, ValueError))
+
+ def test_false(self):
+ # Even if the validator returns false, we pass through
+ validator = lambda x: False
+ iterable = ['0', '1', '2', 'three', None]
+ actual = list(mi.filter_except(validator, iterable, Exception))
+ expected = ['0', '1', '2', 'three', None]
+ self.assertEqual(actual, expected)
+
+ def test_multiple(self):
+ iterable = ['0', '1', '2', 'three', None, '4']
+ actual = list(mi.filter_except(int, iterable, ValueError, TypeError))
+ expected = ['0', '1', '2', '4']
+ self.assertEqual(actual, expected)
+
+
+class MapExceptTests(TestCase):
+ def test_no_exceptions_pass(self):
+ iterable = '0123'
+ actual = list(mi.map_except(int, iterable))
+ expected = [0, 1, 2, 3]
+ self.assertEqual(actual, expected)
+
+ def test_no_exceptions_raise(self):
+ iterable = ['0', '1', 'two', '3']
+ with self.assertRaises(ValueError):
+ list(mi.map_except(int, iterable))
+
+ def test_raise(self):
+ iterable = ['0', '1' '2', 'three', None]
+ with self.assertRaises(TypeError):
+ list(mi.map_except(int, iterable, ValueError))
+
+ def test_multiple(self):
+ iterable = ['0', '1', '2', 'three', None, '4']
+ actual = list(mi.map_except(int, iterable, ValueError, TypeError))
+ expected = [0, 1, 2, 4]
+ self.assertEqual(actual, expected)
+
+
+class MapIfTests(TestCase):
+ def test_without_func_else(self):
+ iterable = list(range(-5, 5))
+ actual = list(mi.map_if(iterable, lambda x: x > 3, lambda x: 'toobig'))
+ expected = [-5, -4, -3, -2, -1, 0, 1, 2, 3, 'toobig']
+ self.assertEqual(actual, expected)
+
+ def test_with_func_else(self):
+ iterable = list(range(-5, 5))
+ actual = list(
+ mi.map_if(
+ iterable, lambda x: x >= 0, lambda x: 'notneg', lambda x: 'neg'
+ )
+ )
+ expected = ['neg'] * 5 + ['notneg'] * 5
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ actual = list(mi.map_if([], lambda x: len(x) > 5, lambda x: None))
+ expected = []
+ self.assertEqual(actual, expected)
+
+
+class SampleTests(TestCase):
+ def test_unit_case(self):
+ """Test against a fixed case by seeding the random module."""
+ # Beware that this test really just verifies random.random() behavior.
+ # If the algorithm is changed (e.g. to a more naive implementation)
+ # this test will fail, but the algorithm might be correct.
+ # Also, this test can pass and the algorithm can be completely wrong.
+ data = "abcdef"
+ weights = list(range(1, len(data) + 1))
+ seed(123)
+ actual = mi.sample(data, k=2, weights=weights)
+ expected = ['f', 'e']
+ self.assertEqual(actual, expected)
+
+ def test_length(self):
+ """Check that *k* elements are sampled."""
+ data = [1, 2, 3, 4, 5]
+ for k in [0, 3, 5, 7]:
+ sampled = mi.sample(data, k=k)
+ actual = len(sampled)
+ expected = min(k, len(data))
+ self.assertEqual(actual, expected)
+
+ def test_samling_entire_iterable(self):
+ """If k=len(iterable), the sample contains the original elements."""
+ data = ["a", 2, "a", 4, (1, 2, 3)]
+ actual = set(mi.sample(data, k=len(data)))
+ expected = set(data)
+ self.assertEqual(actual, expected)
+
+ def test_scale_invariance_of_weights(self):
+ """The probabilit of chosing element a_i is w_i / sum(weights).
+ Scaling weights should not change the probability or outcome."""
+ data = "abcdef"
+
+ weights = list(range(1, len(data) + 1))
+ seed(123)
+ first_sample = mi.sample(data, k=2, weights=weights)
+
+ # Scale the weights and sample again
+ weights_scaled = [w / 1e10 for w in weights]
+ seed(123)
+ second_sample = mi.sample(data, k=2, weights=weights_scaled)
+
+ self.assertEqual(first_sample, second_sample)
+
+ def test_invariance_under_permutations_unweighted(self):
+ """The order of the data should not matter. This is a stochastic test,
+ but it will fail in less than 1 / 10_000 cases."""
+
+ # Create a data set and a reversed data set
+ data = list(range(100))
+ data_rev = list(reversed(data))
+
+ # Sample each data set 10 times
+ data_means = [mean(mi.sample(data, k=50)) for _ in range(10)]
+ data_rev_means = [mean(mi.sample(data_rev, k=50)) for _ in range(10)]
+
+ # The difference in the means should be low, i.e. little bias
+ difference_in_means = abs(mean(data_means) - mean(data_rev_means))
+
+ # The observed largest difference in 10,000 simulations was 5.09599
+ self.assertTrue(difference_in_means < 5.1)
+
+ def test_invariance_under_permutations_weighted(self):
+ """The order of the data should not matter. This is a stochastic test,
+ but it will fail in less than 1 / 10_000 cases."""
+
+ # Create a data set and a reversed data set
+ data = list(range(1, 101))
+ data_rev = list(reversed(data))
+
+ # Sample each data set 10 times
+ data_means = [
+ mean(mi.sample(data, k=50, weights=data)) for _ in range(10)
+ ]
+ data_rev_means = [
+ mean(mi.sample(data_rev, k=50, weights=data_rev))
+ for _ in range(10)
+ ]
+
+ # The difference in the means should be low, i.e. little bias
+ difference_in_means = abs(mean(data_means) - mean(data_rev_means))
+
+ # The observed largest difference in 10,000 simulations was 4.337999
+ self.assertTrue(difference_in_means < 4.4)
+
+
+class IsSortedTests(TestCase):
+ def test_basic(self):
+ for iterable, kwargs, expected in [
+ ([], {}, True),
+ ([1], {}, True),
+ ([1, 2, 3], {}, True),
+ ([1, 1, 2, 3], {}, True),
+ ([1, 10, 2, 3], {}, False),
+ (['1', '10', '2', '3'], {}, True),
+ (['1', '10', '2', '3'], {'key': int}, False),
+ ([1, 2, 3], {'reverse': True}, False),
+ ([1, 1, 2, 3], {'reverse': True}, False),
+ ([1, 10, 2, 3], {'reverse': True}, False),
+ (['3', '2', '10', '1'], {'reverse': True}, True),
+ (['3', '2', '10', '1'], {'key': int, 'reverse': True}, False),
+ # strict
+ ([], {'strict': True}, True),
+ ([1], {'strict': True}, True),
+ ([1, 1], {'strict': True}, False),
+ ([1, 2, 3], {'strict': True}, True),
+ ([1, 1, 2, 3], {'strict': True}, False),
+ ([1, 10, 2, 3], {'strict': True}, False),
+ (['1', '10', '2', '3'], {'strict': True}, True),
+ (['1', '10', '2', '3', '3'], {'strict': True}, False),
+ (['1', '10', '2', '3'], {'strict': True, 'key': int}, False),
+ ([1, 2, 3], {'strict': True, 'reverse': True}, False),
+ ([1, 1, 2, 3], {'strict': True, 'reverse': True}, False),
+ ([1, 10, 2, 3], {'strict': True, 'reverse': True}, False),
+ (['3', '2', '10', '1'], {'strict': True, 'reverse': True}, True),
+ (
+ ['3', '2', '10', '10', '1'],
+ {'strict': True, 'reverse': True},
+ False,
+ ),
+ (
+ ['3', '2', '10', '1'],
+ {'strict': True, 'key': int, 'reverse': True},
+ False,
+ ),
+ # We'll do the same weird thing as Python here
+ (['nan', 0, 'nan', 0], {'key': float}, True),
+ ([0, 'nan', 0, 'nan'], {'key': float}, True),
+ (['nan', 0, 'nan', 0], {'key': float, 'reverse': True}, True),
+ ([0, 'nan', 0, 'nan'], {'key': float, 'reverse': True}, True),
+ ([0, 'nan', 0, 'nan'], {'strict': True, 'key': float}, True),
+ (
+ ['nan', 0, 'nan', 0],
+ {'strict': True, 'key': float, 'reverse': True},
+ True,
+ ),
+ ]:
+ key = kwargs.get('key', None)
+ reverse = kwargs.get('reverse', False)
+ strict = kwargs.get('strict', False)
+
+ with self.subTest(
+ iterable=iterable, key=key, reverse=reverse, strict=strict
+ ):
+ mi_result = mi.is_sorted(
+ iter(iterable), key=key, reverse=reverse, strict=strict
+ )
+
+ sorted_iterable = sorted(iterable, key=key, reverse=reverse)
+ if strict:
+ sorted_iterable = list(mi.unique_justseen(sorted_iterable))
+
+ py_result = iterable == sorted_iterable
+
+ self.assertEqual(mi_result, expected)
+ self.assertEqual(mi_result, py_result)
+
+
+class CallbackIterTests(TestCase):
+ def _target(self, cb=None, exc=None, wait=0):
+ total = 0
+ for i, c in enumerate('abc', 1):
+ total += i
+ if wait:
+ sleep(wait)
+ if cb:
+ cb(i, c, intermediate_total=total)
+ if exc:
+ raise exc('error in target')
+
+ return total
+
+ def test_basic(self):
+ func = lambda callback=None: self._target(cb=callback, wait=0.02)
+ with mi.callback_iter(func, wait_seconds=0.01) as it:
+ # Execution doesn't start until we begin iterating
+ self.assertFalse(it.done)
+
+ # Consume everything
+ self.assertEqual(
+ list(it),
+ [
+ ((1, 'a'), {'intermediate_total': 1}),
+ ((2, 'b'), {'intermediate_total': 3}),
+ ((3, 'c'), {'intermediate_total': 6}),
+ ],
+ )
+
+ # After consuming everything the future is done and the
+ # result is available.
+ self.assertTrue(it.done)
+ self.assertEqual(it.result, 6)
+
+ # This examines the internal state of the ThreadPoolExecutor. This
+ # isn't documented, so may break in future Python versions.
+ self.assertTrue(it._executor._shutdown)
+
+ def test_callback_kwd(self):
+ with mi.callback_iter(self._target, callback_kwd='cb') as it:
+ self.assertEqual(
+ list(it),
+ [
+ ((1, 'a'), {'intermediate_total': 1}),
+ ((2, 'b'), {'intermediate_total': 3}),
+ ((3, 'c'), {'intermediate_total': 6}),
+ ],
+ )
+
+ def test_partial_consumption(self):
+ func = lambda callback=None: self._target(cb=callback)
+ with mi.callback_iter(func) as it:
+ self.assertEqual(next(it), ((1, 'a'), {'intermediate_total': 1}))
+
+ self.assertTrue(it._executor._shutdown)
+
+ def test_abort(self):
+ func = lambda callback=None: self._target(cb=callback, wait=0.1)
+ with mi.callback_iter(func) as it:
+ self.assertEqual(next(it), ((1, 'a'), {'intermediate_total': 1}))
+
+ with self.assertRaises(mi.AbortThread):
+ it.result
+
+ def test_no_result(self):
+ func = lambda callback=None: self._target(cb=callback)
+ with mi.callback_iter(func) as it:
+ with self.assertRaises(RuntimeError):
+ it.result
+
+ def test_exception(self):
+ func = lambda callback=None: self._target(cb=callback, exc=ValueError)
+ with mi.callback_iter(func) as it:
+ self.assertEqual(
+ next(it),
+ ((1, 'a'), {'intermediate_total': 1}),
+ )
+
+ with self.assertRaises(ValueError):
+ it.result
+
+
+class WindowedCompleteTests(TestCase):
+ """Tests for ``windowed_complete()``"""
+
+ def test_basic(self):
+ actual = list(mi.windowed_complete([1, 2, 3, 4, 5], 3))
+ expected = [
+ ((), (1, 2, 3), (4, 5)),
+ ((1,), (2, 3, 4), (5,)),
+ ((1, 2), (3, 4, 5), ()),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_zero_length(self):
+ actual = list(mi.windowed_complete([1, 2, 3], 0))
+ expected = [
+ ((), (), (1, 2, 3)),
+ ((1,), (), (2, 3)),
+ ((1, 2), (), (3,)),
+ ((1, 2, 3), (), ()),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_wrong_length(self):
+ seq = [1, 2, 3, 4, 5]
+ for n in (-10, -1, len(seq) + 1, len(seq) + 10):
+ with self.subTest(n=n):
+ with self.assertRaises(ValueError):
+ list(mi.windowed_complete(seq, n))
+
+ def test_every_partition(self):
+ every_partition = lambda seq: chain(
+ *map(partial(mi.windowed_complete, seq), range(len(seq)))
+ )
+
+ seq = 'ABC'
+ actual = list(every_partition(seq))
+ expected = [
+ ((), (), ('A', 'B', 'C')),
+ (('A',), (), ('B', 'C')),
+ (('A', 'B'), (), ('C',)),
+ (('A', 'B', 'C'), (), ()),
+ ((), ('A',), ('B', 'C')),
+ (('A',), ('B',), ('C',)),
+ (('A', 'B'), ('C',), ()),
+ ((), ('A', 'B'), ('C',)),
+ (('A',), ('B', 'C'), ()),
+ ]
+ self.assertEqual(actual, expected)
+
+
+class AllUniqueTests(TestCase):
+ def test_basic(self):
+ for iterable, expected in [
+ ([], True),
+ ([1, 2, 3], True),
+ ([1, 1], False),
+ ([1, 2, 3, 1], False),
+ ([1, 2, 3, '1'], True),
+ ]:
+ with self.subTest(args=(iterable,)):
+ self.assertEqual(mi.all_unique(iterable), expected)
+
+ def test_non_hashable(self):
+ self.assertEqual(mi.all_unique([[1, 2], [3, 4]]), True)
+ self.assertEqual(mi.all_unique([[1, 2], [3, 4], [1, 2]]), False)
+
+ def test_partially_hashable(self):
+ self.assertEqual(mi.all_unique([[1, 2], [3, 4], (5, 6)]), True)
+ self.assertEqual(
+ mi.all_unique([[1, 2], [3, 4], (5, 6), [1, 2]]), False
+ )
+ self.assertEqual(
+ mi.all_unique([[1, 2], [3, 4], (5, 6), (5, 6)]), False
+ )
+
+ def test_key(self):
+ iterable = ['A', 'B', 'C', 'b']
+ self.assertEqual(mi.all_unique(iterable, lambda x: x), True)
+ self.assertEqual(mi.all_unique(iterable, str.lower), False)
+
+ def test_infinite(self):
+ self.assertEqual(mi.all_unique(mi.prepend(3, count())), False)
+
+
+class NthProductTests(TestCase):
+ def test_basic(self):
+ iterables = ['ab', 'cdef', 'ghi']
+ for index, expected in enumerate(product(*iterables)):
+ actual = mi.nth_product(index, *iterables)
+ self.assertEqual(actual, expected)
+
+ def test_long(self):
+ actual = mi.nth_product(1337, range(101), range(22), range(53))
+ expected = (1, 3, 12)
+ self.assertEqual(actual, expected)
+
+ def test_negative(self):
+ iterables = ['abc', 'de', 'fghi']
+ for index, expected in enumerate(product(*iterables)):
+ actual = mi.nth_product(index - 24, *iterables)
+ self.assertEqual(actual, expected)
+
+ def test_invalid_index(self):
+ with self.assertRaises(IndexError):
+ mi.nth_product(24, 'ab', 'cde', 'fghi')
+
+
+class ValueChainTests(TestCase):
+ def test_empty(self):
+ actual = list(mi.value_chain())
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_simple(self):
+ actual = list(mi.value_chain(1, 2.71828, False, 'foo'))
+ expected = [1, 2.71828, False, 'foo']
+ self.assertEqual(actual, expected)
+
+ def test_more(self):
+ actual = list(mi.value_chain(b'bar', [1, 2, 3], 4, {'key': 1}))
+ expected = [b'bar', 1, 2, 3, 4, 'key']
+ self.assertEqual(actual, expected)
+
+ def test_empty_lists(self):
+ actual = list(mi.value_chain(1, 2, [], [3, 4]))
+ expected = [1, 2, 3, 4]
+ self.assertEqual(actual, expected)
+
+ def test_complex(self):
+ obj = object()
+ actual = list(
+ mi.value_chain(
+ (1, (2, (3,))),
+ ['foo', ['bar', ['baz']], 'tic'],
+ {'key': {'foo': 1}},
+ obj,
+ )
+ )
+ expected = [1, (2, (3,)), 'foo', ['bar', ['baz']], 'tic', 'key', obj]
+ self.assertEqual(actual, expected)
+
+
+class ProductIndexTests(TestCase):
+ def test_basic(self):
+ iterables = ['ab', 'cdef', 'ghi']
+ first_index = {}
+ for index, element in enumerate(product(*iterables)):
+ actual = mi.product_index(element, *iterables)
+ expected = first_index.setdefault(element, index)
+ self.assertEqual(actual, expected)
+
+ def test_multiplicity(self):
+ iterables = ['ab', 'bab', 'cab']
+ first_index = {}
+ for index, element in enumerate(product(*iterables)):
+ actual = mi.product_index(element, *iterables)
+ expected = first_index.setdefault(element, index)
+ self.assertEqual(actual, expected)
+
+ def test_long(self):
+ actual = mi.product_index((1, 3, 12), range(101), range(22), range(53))
+ expected = 1337
+ self.assertEqual(actual, expected)
+
+ def test_invalid_empty(self):
+ with self.assertRaises(ValueError):
+ mi.product_index('', 'ab', 'cde', 'fghi')
+
+ def test_invalid_small(self):
+ with self.assertRaises(ValueError):
+ mi.product_index('ac', 'ab', 'cde', 'fghi')
+
+ def test_invalid_large(self):
+ with self.assertRaises(ValueError):
+ mi.product_index('achi', 'ab', 'cde', 'fghi')
+
+ def test_invalid_match(self):
+ with self.assertRaises(ValueError):
+ mi.product_index('axf', 'ab', 'cde', 'fghi')
+
+
+class CombinationIndexTests(TestCase):
+ def test_r_less_than_n(self):
+ iterable = 'abcdefg'
+ r = 4
+ first_index = {}
+ for index, element in enumerate(combinations(iterable, r)):
+ actual = mi.combination_index(element, iterable)
+ expected = first_index.setdefault(element, index)
+ self.assertEqual(actual, expected)
+
+ def test_r_equal_to_n(self):
+ iterable = 'abcd'
+ r = len(iterable)
+ first_index = {}
+ for index, element in enumerate(combinations(iterable, r=r)):
+ actual = mi.combination_index(element, iterable)
+ expected = first_index.setdefault(element, index)
+ self.assertEqual(actual, expected)
+
+ def test_multiplicity(self):
+ iterable = 'abacba'
+ r = 3
+ first_index = {}
+ for index, element in enumerate(combinations(iterable, r)):
+ actual = mi.combination_index(element, iterable)
+ expected = first_index.setdefault(element, index)
+ self.assertEqual(actual, expected)
+
+ def test_null(self):
+ actual = mi.combination_index(tuple(), [])
+ expected = 0
+ self.assertEqual(actual, expected)
+
+ def test_long(self):
+ actual = mi.combination_index((2, 12, 35, 126), range(180))
+ expected = 2000000
+ self.assertEqual(actual, expected)
+
+ def test_invalid_order(self):
+ with self.assertRaises(ValueError):
+ mi.combination_index(tuple('acb'), 'abcde')
+
+ def test_invalid_large(self):
+ with self.assertRaises(ValueError):
+ mi.combination_index(tuple('abcdefg'), 'abcdef')
+
+ def test_invalid_match(self):
+ with self.assertRaises(ValueError):
+ mi.combination_index(tuple('axe'), 'abcde')
+
+
+class PermutationIndexTests(TestCase):
+ def test_r_less_than_n(self):
+ iterable = 'abcdefg'
+ r = 4
+ first_index = {}
+ for index, element in enumerate(permutations(iterable, r)):
+ actual = mi.permutation_index(element, iterable)
+ expected = first_index.setdefault(element, index)
+ self.assertEqual(actual, expected)
+
+ def test_r_equal_to_n(self):
+ iterable = 'abcd'
+ first_index = {}
+ for index, element in enumerate(permutations(iterable)):
+ actual = mi.permutation_index(element, iterable)
+ expected = first_index.setdefault(element, index)
+ self.assertEqual(actual, expected)
+
+ def test_multiplicity(self):
+ iterable = 'abacba'
+ r = 3
+ first_index = {}
+ for index, element in enumerate(permutations(iterable, r)):
+ actual = mi.permutation_index(element, iterable)
+ expected = first_index.setdefault(element, index)
+ self.assertEqual(actual, expected)
+
+ def test_null(self):
+ actual = mi.permutation_index(tuple(), [])
+ expected = 0
+ self.assertEqual(actual, expected)
+
+ def test_long(self):
+ actual = mi.permutation_index((2, 12, 35, 126), range(180))
+ expected = 11631678
+ self.assertEqual(actual, expected)
+
+ def test_invalid_large(self):
+ with self.assertRaises(ValueError):
+ mi.permutation_index(tuple('abcdefg'), 'abcdef')
+
+ def test_invalid_match(self):
+ with self.assertRaises(ValueError):
+ mi.permutation_index(tuple('axe'), 'abcde')
+
+
+class CountableTests(TestCase):
+ def test_empty(self):
+ iterable = []
+ it = mi.countable(iterable)
+ self.assertEqual(it.items_seen, 0)
+ self.assertEqual(list(it), [])
+
+ def test_basic(self):
+ iterable = '0123456789'
+ it = mi.countable(iterable)
+ self.assertEqual(it.items_seen, 0)
+ self.assertEqual(next(it), '0')
+ self.assertEqual(it.items_seen, 1)
+ self.assertEqual(''.join(it), '123456789')
+ self.assertEqual(it.items_seen, 10)
+
+
+class ChunkedEvenTests(TestCase):
+ """Tests for ``chunked_even()``"""
+
+ def test_0(self):
+ self._test_finite('', 3, [])
+
+ def test_1(self):
+ self._test_finite('A', 1, [['A']])
+
+ def test_4(self):
+ self._test_finite('ABCD', 3, [['A', 'B'], ['C', 'D']])
+
+ def test_5(self):
+ self._test_finite('ABCDE', 3, [['A', 'B', 'C'], ['D', 'E']])
+
+ def test_6(self):
+ self._test_finite('ABCDEF', 3, [['A', 'B', 'C'], ['D', 'E', 'F']])
+
+ def test_7(self):
+ self._test_finite(
+ 'ABCDEFG', 3, [['A', 'B', 'C'], ['D', 'E'], ['F', 'G']]
+ )
+
+ def _test_finite(self, seq, n, expected):
+ # Check with and without `len()`
+ self.assertEqual(list(mi.chunked_even(seq, n)), expected)
+ self.assertEqual(list(mi.chunked_even(iter(seq), n)), expected)
+
+ def test_infinite(self):
+ for n in range(1, 5):
+ k = 0
+
+ def count_with_assert():
+ for i in count():
+ # Look-ahead should be less than n^2
+ self.assertLessEqual(i, n * k + n * n)
+ yield i
+
+ ls = mi.chunked_even(count_with_assert(), n)
+ while k < 2:
+ self.assertEqual(next(ls), list(range(k * n, (k + 1) * n)))
+ k += 1
+
+ def test_evenness(self):
+ for N in range(1, 50):
+ for n in range(1, N + 2):
+ lengths = []
+ items = []
+ for l in mi.chunked_even(range(N), n):
+ L = len(l)
+ self.assertLessEqual(L, n)
+ self.assertGreaterEqual(L, 1)
+ lengths.append(L)
+ items.extend(l)
+ self.assertEqual(items, list(range(N)))
+ self.assertLessEqual(max(lengths) - min(lengths), 1)
+
+
+class ZipBroadcastTests(TestCase):
+ def test_basic(self):
+ for objects, expected in [
+ # All scalar
+ ([1, 2], [(1, 2)]),
+ # Scalar, iterable
+ ([1, [2]], [(1, 2)]),
+ # Iterable, scalar
+ ([[1], 2], [(1, 2)]),
+ # Mixed length
+ ([1, [2, 3]], [(1, 2), (1, 3)]),
+ # All iterable
+ ([[1, 2], [3, 4]], [(1, 3), (2, 4)]),
+ # Infinite
+ ([count(), 1, [2]], [(0, 1, 2)]),
+ ([count(), 1, [2, 3]], [(0, 1, 2), (1, 1, 3)]),
+ ]:
+ with self.subTest(expected=expected):
+ actual = list(mi.zip_broadcast(*objects))
+ self.assertEqual(actual, expected)
+
+ def test_scalar_types(self):
+ # Default: str and bytes are treated as scalar
+ self.assertEqual(
+ list(mi.zip_broadcast('ab', [1, 2, 3])),
+ [('ab', 1), ('ab', 2), ('ab', 3)],
+ )
+ self.assertEqual(
+ list(mi.zip_broadcast(b'ab', [1, 2, 3])),
+ [(b'ab', 1), (b'ab', 2), (b'ab', 3)],
+ )
+ # scalar_types=None allows str and bytes to be treated as iterable
+ self.assertEqual(
+ list(mi.zip_broadcast('abc', [1, 2, 3], scalar_types=None)),
+ [('a', 1), ('b', 2), ('c', 3)],
+ )
+ # Use a custom type
+ self.assertEqual(
+ list(mi.zip_broadcast({'a': 'b'}, [1, 2, 3], scalar_types=dict)),
+ [({'a': 'b'}, 1), ({'a': 'b'}, 2), ({'a': 'b'}, 3)],
+ )
+
+ def test_strict(self):
+ for objects, zipped in [
+ ([[], [1]], []),
+ ([[1], []], []),
+ ([[1], [2, 3]], [(1, 2)]),
+ ([[1, 2], [3]], [(1, 3)]),
+ ([[1, 2], [3], [4]], [(1, 3, 4)]),
+ ([[1], [2, 3], [4]], [(1, 2, 4)]),
+ ([[1], [2], [3, 4]], [(1, 2, 3)]),
+ ([[1], [2, 3], [4, 5]], [(1, 2, 4)]),
+ ([[1, 2], [3], [4, 5]], [(1, 3, 4)]),
+ ([[1, 2], [3, 4], [5]], [(1, 3, 5)]),
+ (['a', [1, 2], [3, 4, 5]], [('a', 1, 3), ('a', 2, 4)]),
+ ]:
+ # Truncate by default
+ with self.subTest(objects=objects, strict=False, zipped=zipped):
+ self.assertEqual(list(mi.zip_broadcast(*objects)), zipped)
+
+ # Raise an exception for strict=True
+ with self.subTest(objects=objects, strict=True):
+ with self.assertRaises(ValueError):
+ list(mi.zip_broadcast(*objects, strict=True))
+
+ def test_empty(self):
+ self.assertEqual(list(mi.zip_broadcast()), [])
+
+
+class UniqueInWindowTests(TestCase):
+ def test_invalid_n(self):
+ with self.assertRaises(ValueError):
+ list(mi.unique_in_window([], 0))
+
+ def test_basic(self):
+ for iterable, n, expected in [
+ (range(9), 10, list(range(9))),
+ (range(20), 10, list(range(20))),
+ ([1, 2, 3, 4, 4, 4], 1, [1, 2, 3, 4]),
+ ([1, 2, 3, 4, 4, 4], 2, [1, 2, 3, 4]),
+ ([1, 2, 3, 4, 4, 4], 3, [1, 2, 3, 4]),
+ ([1, 2, 3, 4, 4, 4], 4, [1, 2, 3, 4]),
+ ([1, 2, 3, 4, 4, 4], 5, [1, 2, 3, 4]),
+ ]:
+ with self.subTest(expected=expected):
+ actual = list(mi.unique_in_window(iterable, n))
+ self.assertEqual(actual, expected)
+
+ def test_key(self):
+ iterable = [0, 1, 3, 4, 5, 6, 7, 8, 9]
+ n = 3
+ key = lambda x: x // 3
+ actual = list(mi.unique_in_window(iterable, n, key=key))
+ expected = [0, 3, 6, 9]
+ self.assertEqual(actual, expected)
+
+
+class StrictlyNTests(TestCase):
+ def test_basic(self):
+ iterable = ['a', 'b', 'c', 'd']
+ n = 4
+ actual = list(mi.strictly_n(iter(iterable), n))
+ expected = iterable
+ self.assertEqual(actual, expected)
+
+ def test_too_short_default(self):
+ iterable = ['a', 'b', 'c', 'd']
+ n = 5
+ with self.assertRaises(ValueError) as exc:
+ list(mi.strictly_n(iter(iterable), n))
+
+ self.assertEqual(
+ 'Too few items in iterable (got 4)', exc.exception.args[0]
+ )
+
+ def test_too_long_default(self):
+ iterable = ['a', 'b', 'c', 'd']
+ n = 3
+ with self.assertRaises(ValueError) as cm:
+ list(mi.strictly_n(iter(iterable), n))
+
+ self.assertEqual(
+ 'Too many items in iterable (got at least 4)',
+ cm.exception.args[0],
+ )
+
+ def test_too_short_custom(self):
+ call_count = 0
+
+ def too_short(item_count):
+ nonlocal call_count
+ call_count += 1
+
+ iterable = ['a', 'b', 'c', 'd']
+ n = 6
+ actual = []
+ for item in mi.strictly_n(iter(iterable), n, too_short=too_short):
+ actual.append(item)
+ expected = ['a', 'b', 'c', 'd']
+ self.assertEqual(actual, expected)
+ self.assertEqual(call_count, 1)
+
+ def test_too_long_custom(self):
+ import logging
+
+ iterable = ['a', 'b', 'c', 'd']
+ n = 2
+ too_long = lambda item_count: logging.warning(
+ 'Picked the first %s items', n
+ )
+
+ with self.assertLogs(level='WARNING') as cm:
+ actual = list(mi.strictly_n(iter(iterable), n, too_long=too_long))
+
+ self.assertEqual(actual, ['a', 'b'])
+ self.assertIn('Picked the first 2 items', cm.output[0])
+
+
+class DuplicatesEverSeenTests(TestCase):
+ def test_basic(self):
+ for iterable, expected in [
+ ([], []),
+ ([1, 2, 3], []),
+ ([1, 1], [1]),
+ ([1, 2, 1, 2], [1, 2]),
+ ([1, 2, 3, '1'], []),
+ ]:
+ with self.subTest(args=(iterable,)):
+ self.assertEqual(
+ list(mi.duplicates_everseen(iterable)), expected
+ )
+
+ def test_non_hashable(self):
+ self.assertEqual(list(mi.duplicates_everseen([[1, 2], [3, 4]])), [])
+ self.assertEqual(
+ list(mi.duplicates_everseen([[1, 2], [3, 4], [1, 2]])), [[1, 2]]
+ )
+
+ def test_partially_hashable(self):
+ self.assertEqual(
+ list(mi.duplicates_everseen([[1, 2], [3, 4], (5, 6)])), []
+ )
+ self.assertEqual(
+ list(mi.duplicates_everseen([[1, 2], [3, 4], (5, 6), [1, 2]])),
+ [[1, 2]],
+ )
+ self.assertEqual(
+ list(mi.duplicates_everseen([[1, 2], [3, 4], (5, 6), (5, 6)])),
+ [(5, 6)],
+ )
+
+ def test_key_hashable(self):
+ iterable = 'HEheHEhe'
+ self.assertEqual(list(mi.duplicates_everseen(iterable)), list('HEhe'))
+ self.assertEqual(
+ list(mi.duplicates_everseen(iterable, str.lower)),
+ list('heHEhe'),
+ )
+
+ def test_key_non_hashable(self):
+ iterable = [[1, 2], [3, 0], [5, -2], [5, 6]]
+ self.assertEqual(
+ list(mi.duplicates_everseen(iterable, lambda x: x)), []
+ )
+ self.assertEqual(
+ list(mi.duplicates_everseen(iterable, sum)), [[3, 0], [5, -2]]
+ )
+
+ def test_key_partially_hashable(self):
+ iterable = [[1, 2], (1, 2), [1, 2], [5, 6]]
+ self.assertEqual(
+ list(mi.duplicates_everseen(iterable, lambda x: x)), [[1, 2]]
+ )
+ self.assertEqual(
+ list(mi.duplicates_everseen(iterable, list)), [(1, 2), [1, 2]]
+ )
+
+
+class DuplicatesJustSeenTests(TestCase):
+ def test_basic(self):
+ for iterable, expected in [
+ ([], []),
+ ([1, 2, 3, 3, 2, 2], [3, 2]),
+ ([1, 1], [1]),
+ ([1, 2, 1, 2], []),
+ ([1, 2, 3, '1'], []),
+ ]:
+ with self.subTest(args=(iterable,)):
+ self.assertEqual(
+ list(mi.duplicates_justseen(iterable)), expected
+ )
+
+ def test_non_hashable(self):
+ self.assertEqual(list(mi.duplicates_justseen([[1, 2], [3, 4]])), [])
+ self.assertEqual(
+ list(
+ mi.duplicates_justseen(
+ [[1, 2], [3, 4], [3, 4], [3, 4], [1, 2]]
+ )
+ ),
+ [[3, 4], [3, 4]],
+ )
+
+ def test_partially_hashable(self):
+ self.assertEqual(
+ list(mi.duplicates_justseen([[1, 2], [3, 4], (5, 6)])), []
+ )
+ self.assertEqual(
+ list(
+ mi.duplicates_justseen(
+ [[1, 2], [3, 4], (5, 6), [1, 2], [1, 2]]
+ )
+ ),
+ [[1, 2]],
+ )
+ self.assertEqual(
+ list(
+ mi.duplicates_justseen(
+ [[1, 2], [3, 4], (5, 6), (5, 6), (5, 6)]
+ )
+ ),
+ [(5, 6), (5, 6)],
+ )
+
+ def test_key_hashable(self):
+ iterable = 'HEheHHHhEheeEe'
+ self.assertEqual(list(mi.duplicates_justseen(iterable)), list('HHe'))
+ self.assertEqual(
+ list(mi.duplicates_justseen(iterable, str.lower)),
+ list('HHheEe'),
+ )
+
+ def test_key_non_hashable(self):
+ iterable = [[1, 2], [3, 0], [5, -2], [5, 6], [1, 2]]
+ self.assertEqual(
+ list(mi.duplicates_justseen(iterable, lambda x: x)), []
+ )
+ self.assertEqual(
+ list(mi.duplicates_justseen(iterable, sum)), [[3, 0], [5, -2]]
+ )
+
+ def test_key_partially_hashable(self):
+ iterable = [[1, 2], (1, 2), [1, 2], [5, 6], [1, 2]]
+ self.assertEqual(
+ list(mi.duplicates_justseen(iterable, lambda x: x)), []
+ )
+ self.assertEqual(
+ list(mi.duplicates_justseen(iterable, list)), [(1, 2), [1, 2]]
+ )
+
+ def test_nested(self):
+ iterable = [[[1, 2], [1, 2]], [5, 6], [5, 6]]
+ self.assertEqual(list(mi.duplicates_justseen(iterable)), [[5, 6]])
diff --git a/contrib/python/more-itertools/py3/tests/test_recipes.py b/contrib/python/more-itertools/py3/tests/test_recipes.py
new file mode 100644
index 0000000000..be40995749
--- /dev/null
+++ b/contrib/python/more-itertools/py3/tests/test_recipes.py
@@ -0,0 +1,765 @@
+import warnings
+
+from doctest import DocTestSuite
+from itertools import combinations, count, permutations
+from math import factorial
+from unittest import TestCase
+
+import more_itertools as mi
+
+
+def load_tests(loader, tests, ignore):
+ # Add the doctests
+ tests.addTests(DocTestSuite('more_itertools.recipes'))
+ return tests
+
+
+class TakeTests(TestCase):
+ """Tests for ``take()``"""
+
+ def test_simple_take(self):
+ """Test basic usage"""
+ t = mi.take(5, range(10))
+ self.assertEqual(t, [0, 1, 2, 3, 4])
+
+ def test_null_take(self):
+ """Check the null case"""
+ t = mi.take(0, range(10))
+ self.assertEqual(t, [])
+
+ def test_negative_take(self):
+ """Make sure taking negative items results in a ValueError"""
+ self.assertRaises(ValueError, lambda: mi.take(-3, range(10)))
+
+ def test_take_too_much(self):
+ """Taking more than an iterator has remaining should return what the
+ iterator has remaining.
+
+ """
+ t = mi.take(10, range(5))
+ self.assertEqual(t, [0, 1, 2, 3, 4])
+
+
+class TabulateTests(TestCase):
+ """Tests for ``tabulate()``"""
+
+ def test_simple_tabulate(self):
+ """Test the happy path"""
+ t = mi.tabulate(lambda x: x)
+ f = tuple([next(t) for _ in range(3)])
+ self.assertEqual(f, (0, 1, 2))
+
+ def test_count(self):
+ """Ensure tabulate accepts specific count"""
+ t = mi.tabulate(lambda x: 2 * x, -1)
+ f = (next(t), next(t), next(t))
+ self.assertEqual(f, (-2, 0, 2))
+
+
+class TailTests(TestCase):
+ """Tests for ``tail()``"""
+
+ def test_greater(self):
+ """Length of iterable is greater than requested tail"""
+ self.assertEqual(list(mi.tail(3, 'ABCDEFG')), ['E', 'F', 'G'])
+
+ def test_equal(self):
+ """Length of iterable is equal to the requested tail"""
+ self.assertEqual(
+ list(mi.tail(7, 'ABCDEFG')), ['A', 'B', 'C', 'D', 'E', 'F', 'G']
+ )
+
+ def test_less(self):
+ """Length of iterable is less than requested tail"""
+ self.assertEqual(
+ list(mi.tail(8, 'ABCDEFG')), ['A', 'B', 'C', 'D', 'E', 'F', 'G']
+ )
+
+
+class ConsumeTests(TestCase):
+ """Tests for ``consume()``"""
+
+ def test_sanity(self):
+ """Test basic functionality"""
+ r = (x for x in range(10))
+ mi.consume(r, 3)
+ self.assertEqual(3, next(r))
+
+ def test_null_consume(self):
+ """Check the null case"""
+ r = (x for x in range(10))
+ mi.consume(r, 0)
+ self.assertEqual(0, next(r))
+
+ def test_negative_consume(self):
+ """Check that negative consumsion throws an error"""
+ r = (x for x in range(10))
+ self.assertRaises(ValueError, lambda: mi.consume(r, -1))
+
+ def test_total_consume(self):
+ """Check that iterator is totally consumed by default"""
+ r = (x for x in range(10))
+ mi.consume(r)
+ self.assertRaises(StopIteration, lambda: next(r))
+
+
+class NthTests(TestCase):
+ """Tests for ``nth()``"""
+
+ def test_basic(self):
+ """Make sure the nth item is returned"""
+ l = range(10)
+ for i, v in enumerate(l):
+ self.assertEqual(mi.nth(l, i), v)
+
+ def test_default(self):
+ """Ensure a default value is returned when nth item not found"""
+ l = range(3)
+ self.assertEqual(mi.nth(l, 100, "zebra"), "zebra")
+
+ def test_negative_item_raises(self):
+ """Ensure asking for a negative item raises an exception"""
+ self.assertRaises(ValueError, lambda: mi.nth(range(10), -3))
+
+
+class AllEqualTests(TestCase):
+ """Tests for ``all_equal()``"""
+
+ def test_true(self):
+ """Everything is equal"""
+ self.assertTrue(mi.all_equal('aaaaaa'))
+ self.assertTrue(mi.all_equal([0, 0, 0, 0]))
+
+ def test_false(self):
+ """Not everything is equal"""
+ self.assertFalse(mi.all_equal('aaaaab'))
+ self.assertFalse(mi.all_equal([0, 0, 0, 1]))
+
+ def test_tricky(self):
+ """Not everything is identical, but everything is equal"""
+ items = [1, complex(1, 0), 1.0]
+ self.assertTrue(mi.all_equal(items))
+
+ def test_empty(self):
+ """Return True if the iterable is empty"""
+ self.assertTrue(mi.all_equal(''))
+ self.assertTrue(mi.all_equal([]))
+
+ def test_one(self):
+ """Return True if the iterable is singular"""
+ self.assertTrue(mi.all_equal('0'))
+ self.assertTrue(mi.all_equal([0]))
+
+
+class QuantifyTests(TestCase):
+ """Tests for ``quantify()``"""
+
+ def test_happy_path(self):
+ """Make sure True count is returned"""
+ q = [True, False, True]
+ self.assertEqual(mi.quantify(q), 2)
+
+ def test_custom_predicate(self):
+ """Ensure non-default predicates return as expected"""
+ q = range(10)
+ self.assertEqual(mi.quantify(q, lambda x: x % 2 == 0), 5)
+
+
+class PadnoneTests(TestCase):
+ def test_basic(self):
+ iterable = range(2)
+ for func in (mi.pad_none, mi.padnone):
+ with self.subTest(func=func):
+ p = func(iterable)
+ self.assertEqual(
+ [0, 1, None, None], [next(p) for _ in range(4)]
+ )
+
+
+class NcyclesTests(TestCase):
+ """Tests for ``nyclces()``"""
+
+ def test_happy_path(self):
+ """cycle a sequence three times"""
+ r = ["a", "b", "c"]
+ n = mi.ncycles(r, 3)
+ self.assertEqual(
+ ["a", "b", "c", "a", "b", "c", "a", "b", "c"], list(n)
+ )
+
+ def test_null_case(self):
+ """asking for 0 cycles should return an empty iterator"""
+ n = mi.ncycles(range(100), 0)
+ self.assertRaises(StopIteration, lambda: next(n))
+
+ def test_pathalogical_case(self):
+ """asking for negative cycles should return an empty iterator"""
+ n = mi.ncycles(range(100), -10)
+ self.assertRaises(StopIteration, lambda: next(n))
+
+
+class DotproductTests(TestCase):
+ """Tests for ``dotproduct()``'"""
+
+ def test_happy_path(self):
+ """simple dotproduct example"""
+ self.assertEqual(400, mi.dotproduct([10, 10], [20, 20]))
+
+
+class FlattenTests(TestCase):
+ """Tests for ``flatten()``"""
+
+ def test_basic_usage(self):
+ """ensure list of lists is flattened one level"""
+ f = [[0, 1, 2], [3, 4, 5]]
+ self.assertEqual(list(range(6)), list(mi.flatten(f)))
+
+ def test_single_level(self):
+ """ensure list of lists is flattened only one level"""
+ f = [[0, [1, 2]], [[3, 4], 5]]
+ self.assertEqual([0, [1, 2], [3, 4], 5], list(mi.flatten(f)))
+
+
+class RepeatfuncTests(TestCase):
+ """Tests for ``repeatfunc()``"""
+
+ def test_simple_repeat(self):
+ """test simple repeated functions"""
+ r = mi.repeatfunc(lambda: 5)
+ self.assertEqual([5, 5, 5, 5, 5], [next(r) for _ in range(5)])
+
+ def test_finite_repeat(self):
+ """ensure limited repeat when times is provided"""
+ r = mi.repeatfunc(lambda: 5, times=5)
+ self.assertEqual([5, 5, 5, 5, 5], list(r))
+
+ def test_added_arguments(self):
+ """ensure arguments are applied to the function"""
+ r = mi.repeatfunc(lambda x: x, 2, 3)
+ self.assertEqual([3, 3], list(r))
+
+ def test_null_times(self):
+ """repeat 0 should return an empty iterator"""
+ r = mi.repeatfunc(range, 0, 3)
+ self.assertRaises(StopIteration, lambda: next(r))
+
+
+class PairwiseTests(TestCase):
+ """Tests for ``pairwise()``"""
+
+ def test_base_case(self):
+ """ensure an iterable will return pairwise"""
+ p = mi.pairwise([1, 2, 3])
+ self.assertEqual([(1, 2), (2, 3)], list(p))
+
+ def test_short_case(self):
+ """ensure an empty iterator if there's not enough values to pair"""
+ p = mi.pairwise("a")
+ self.assertRaises(StopIteration, lambda: next(p))
+
+
+class GrouperTests(TestCase):
+ """Tests for ``grouper()``"""
+
+ def test_even(self):
+ """Test when group size divides evenly into the length of
+ the iterable.
+
+ """
+ self.assertEqual(
+ list(mi.grouper('ABCDEF', 3)), [('A', 'B', 'C'), ('D', 'E', 'F')]
+ )
+
+ def test_odd(self):
+ """Test when group size does not divide evenly into the length of the
+ iterable.
+
+ """
+ self.assertEqual(
+ list(mi.grouper('ABCDE', 3)), [('A', 'B', 'C'), ('D', 'E', None)]
+ )
+
+ def test_fill_value(self):
+ """Test that the fill value is used to pad the final group"""
+ self.assertEqual(
+ list(mi.grouper('ABCDE', 3, 'x')),
+ [('A', 'B', 'C'), ('D', 'E', 'x')],
+ )
+
+ def test_legacy_order(self):
+ """Historically, grouper expected the n as the first parameter"""
+ with warnings.catch_warnings(record=True) as caught:
+ warnings.simplefilter('always')
+ self.assertEqual(
+ list(mi.grouper(3, 'ABCDEF')),
+ [('A', 'B', 'C'), ('D', 'E', 'F')],
+ )
+
+ (warning,) = caught
+ assert warning.category == DeprecationWarning
+
+
+class RoundrobinTests(TestCase):
+ """Tests for ``roundrobin()``"""
+
+ def test_even_groups(self):
+ """Ensure ordered output from evenly populated iterables"""
+ self.assertEqual(
+ list(mi.roundrobin('ABC', [1, 2, 3], range(3))),
+ ['A', 1, 0, 'B', 2, 1, 'C', 3, 2],
+ )
+
+ def test_uneven_groups(self):
+ """Ensure ordered output from unevenly populated iterables"""
+ self.assertEqual(
+ list(mi.roundrobin('ABCD', [1, 2], range(0))),
+ ['A', 1, 'B', 2, 'C', 'D'],
+ )
+
+
+class PartitionTests(TestCase):
+ """Tests for ``partition()``"""
+
+ def test_bool(self):
+ lesser, greater = mi.partition(lambda x: x > 5, range(10))
+ self.assertEqual(list(lesser), [0, 1, 2, 3, 4, 5])
+ self.assertEqual(list(greater), [6, 7, 8, 9])
+
+ def test_arbitrary(self):
+ divisibles, remainders = mi.partition(lambda x: x % 3, range(10))
+ self.assertEqual(list(divisibles), [0, 3, 6, 9])
+ self.assertEqual(list(remainders), [1, 2, 4, 5, 7, 8])
+
+ def test_pred_is_none(self):
+ falses, trues = mi.partition(None, range(3))
+ self.assertEqual(list(falses), [0])
+ self.assertEqual(list(trues), [1, 2])
+
+
+class PowersetTests(TestCase):
+ """Tests for ``powerset()``"""
+
+ def test_combinatorics(self):
+ """Ensure a proper enumeration"""
+ p = mi.powerset([1, 2, 3])
+ self.assertEqual(
+ list(p), [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
+ )
+
+
+class UniqueEverseenTests(TestCase):
+ """Tests for ``unique_everseen()``"""
+
+ def test_everseen(self):
+ """ensure duplicate elements are ignored"""
+ u = mi.unique_everseen('AAAABBBBCCDAABBB')
+ self.assertEqual(['A', 'B', 'C', 'D'], list(u))
+
+ def test_custom_key(self):
+ """ensure the custom key comparison works"""
+ u = mi.unique_everseen('aAbACCc', key=str.lower)
+ self.assertEqual(list('abC'), list(u))
+
+ def test_unhashable(self):
+ """ensure things work for unhashable items"""
+ iterable = ['a', [1, 2, 3], [1, 2, 3], 'a']
+ u = mi.unique_everseen(iterable)
+ self.assertEqual(list(u), ['a', [1, 2, 3]])
+
+ def test_unhashable_key(self):
+ """ensure things work for unhashable items with a custom key"""
+ iterable = ['a', [1, 2, 3], [1, 2, 3], 'a']
+ u = mi.unique_everseen(iterable, key=lambda x: x)
+ self.assertEqual(list(u), ['a', [1, 2, 3]])
+
+
+class UniqueJustseenTests(TestCase):
+ """Tests for ``unique_justseen()``"""
+
+ def test_justseen(self):
+ """ensure only last item is remembered"""
+ u = mi.unique_justseen('AAAABBBCCDABB')
+ self.assertEqual(list('ABCDAB'), list(u))
+
+ def test_custom_key(self):
+ """ensure the custom key comparison works"""
+ u = mi.unique_justseen('AABCcAD', str.lower)
+ self.assertEqual(list('ABCAD'), list(u))
+
+
+class IterExceptTests(TestCase):
+ """Tests for ``iter_except()``"""
+
+ def test_exact_exception(self):
+ """ensure the exact specified exception is caught"""
+ l = [1, 2, 3]
+ i = mi.iter_except(l.pop, IndexError)
+ self.assertEqual(list(i), [3, 2, 1])
+
+ def test_generic_exception(self):
+ """ensure the generic exception can be caught"""
+ l = [1, 2]
+ i = mi.iter_except(l.pop, Exception)
+ self.assertEqual(list(i), [2, 1])
+
+ def test_uncaught_exception_is_raised(self):
+ """ensure a non-specified exception is raised"""
+ l = [1, 2, 3]
+ i = mi.iter_except(l.pop, KeyError)
+ self.assertRaises(IndexError, lambda: list(i))
+
+ def test_first(self):
+ """ensure first is run before the function"""
+ l = [1, 2, 3]
+ f = lambda: 25
+ i = mi.iter_except(l.pop, IndexError, f)
+ self.assertEqual(list(i), [25, 3, 2, 1])
+
+ def test_multiple(self):
+ """ensure can catch multiple exceptions"""
+
+ class Fiz(Exception):
+ pass
+
+ class Buzz(Exception):
+ pass
+
+ i = 0
+
+ def fizbuzz():
+ nonlocal i
+ i += 1
+ if i % 3 == 0:
+ raise Fiz
+ if i % 5 == 0:
+ raise Buzz
+ return i
+
+ expected = ([1, 2], [4], [], [7, 8], [])
+ for x in expected:
+ self.assertEqual(list(mi.iter_except(fizbuzz, (Fiz, Buzz))), x)
+
+
+class FirstTrueTests(TestCase):
+ """Tests for ``first_true()``"""
+
+ def test_something_true(self):
+ """Test with no keywords"""
+ self.assertEqual(mi.first_true(range(10)), 1)
+
+ def test_nothing_true(self):
+ """Test default return value."""
+ self.assertIsNone(mi.first_true([0, 0, 0]))
+
+ def test_default(self):
+ """Test with a default keyword"""
+ self.assertEqual(mi.first_true([0, 0, 0], default='!'), '!')
+
+ def test_pred(self):
+ """Test with a custom predicate"""
+ self.assertEqual(
+ mi.first_true([2, 4, 6], pred=lambda x: x % 3 == 0), 6
+ )
+
+
+class RandomProductTests(TestCase):
+ """Tests for ``random_product()``
+
+ Since random.choice() has different results with the same seed across
+ python versions 2.x and 3.x, these tests use highly probably events to
+ create predictable outcomes across platforms.
+ """
+
+ def test_simple_lists(self):
+ """Ensure that one item is chosen from each list in each pair.
+ Also ensure that each item from each list eventually appears in
+ the chosen combinations.
+
+ Odds are roughly 1 in 7.1 * 10e16 that one item from either list will
+ not be chosen after 100 samplings of one item from each list. Just to
+ be safe, better use a known random seed, too.
+
+ """
+ nums = [1, 2, 3]
+ lets = ['a', 'b', 'c']
+ n, m = zip(*[mi.random_product(nums, lets) for _ in range(100)])
+ n, m = set(n), set(m)
+ self.assertEqual(n, set(nums))
+ self.assertEqual(m, set(lets))
+ self.assertEqual(len(n), len(nums))
+ self.assertEqual(len(m), len(lets))
+
+ def test_list_with_repeat(self):
+ """ensure multiple items are chosen, and that they appear to be chosen
+ from one list then the next, in proper order.
+
+ """
+ nums = [1, 2, 3]
+ lets = ['a', 'b', 'c']
+ r = list(mi.random_product(nums, lets, repeat=100))
+ self.assertEqual(2 * 100, len(r))
+ n, m = set(r[::2]), set(r[1::2])
+ self.assertEqual(n, set(nums))
+ self.assertEqual(m, set(lets))
+ self.assertEqual(len(n), len(nums))
+ self.assertEqual(len(m), len(lets))
+
+
+class RandomPermutationTests(TestCase):
+ """Tests for ``random_permutation()``"""
+
+ def test_full_permutation(self):
+ """ensure every item from the iterable is returned in a new ordering
+
+ 15 elements have a 1 in 1.3 * 10e12 of appearing in sorted order, so
+ we fix a seed value just to be sure.
+
+ """
+ i = range(15)
+ r = mi.random_permutation(i)
+ self.assertEqual(set(i), set(r))
+ if i == r:
+ raise AssertionError("Values were not permuted")
+
+ def test_partial_permutation(self):
+ """ensure all returned items are from the iterable, that the returned
+ permutation is of the desired length, and that all items eventually
+ get returned.
+
+ Sampling 100 permutations of length 5 from a set of 15 leaves a
+ (2/3)^100 chance that an item will not be chosen. Multiplied by 15
+ items, there is a 1 in 2.6e16 chance that at least 1 item will not
+ show up in the resulting output. Using a random seed will fix that.
+
+ """
+ items = range(15)
+ item_set = set(items)
+ all_items = set()
+ for _ in range(100):
+ permutation = mi.random_permutation(items, 5)
+ self.assertEqual(len(permutation), 5)
+ permutation_set = set(permutation)
+ self.assertLessEqual(permutation_set, item_set)
+ all_items |= permutation_set
+ self.assertEqual(all_items, item_set)
+
+
+class RandomCombinationTests(TestCase):
+ """Tests for ``random_combination()``"""
+
+ def test_pseudorandomness(self):
+ """ensure different subsets of the iterable get returned over many
+ samplings of random combinations"""
+ items = range(15)
+ all_items = set()
+ for _ in range(50):
+ combination = mi.random_combination(items, 5)
+ all_items |= set(combination)
+ self.assertEqual(all_items, set(items))
+
+ def test_no_replacement(self):
+ """ensure that elements are sampled without replacement"""
+ items = range(15)
+ for _ in range(50):
+ combination = mi.random_combination(items, len(items))
+ self.assertEqual(len(combination), len(set(combination)))
+ self.assertRaises(
+ ValueError, lambda: mi.random_combination(items, len(items) + 1)
+ )
+
+
+class RandomCombinationWithReplacementTests(TestCase):
+ """Tests for ``random_combination_with_replacement()``"""
+
+ def test_replacement(self):
+ """ensure that elements are sampled with replacement"""
+ items = range(5)
+ combo = mi.random_combination_with_replacement(items, len(items) * 2)
+ self.assertEqual(2 * len(items), len(combo))
+ if len(set(combo)) == len(combo):
+ raise AssertionError("Combination contained no duplicates")
+
+ def test_pseudorandomness(self):
+ """ensure different subsets of the iterable get returned over many
+ samplings of random combinations"""
+ items = range(15)
+ all_items = set()
+ for _ in range(50):
+ combination = mi.random_combination_with_replacement(items, 5)
+ all_items |= set(combination)
+ self.assertEqual(all_items, set(items))
+
+
+class NthCombinationTests(TestCase):
+ def test_basic(self):
+ iterable = 'abcdefg'
+ r = 4
+ for index, expected in enumerate(combinations(iterable, r)):
+ actual = mi.nth_combination(iterable, r, index)
+ self.assertEqual(actual, expected)
+
+ def test_long(self):
+ actual = mi.nth_combination(range(180), 4, 2000000)
+ expected = (2, 12, 35, 126)
+ self.assertEqual(actual, expected)
+
+ def test_invalid_r(self):
+ for r in (-1, 3):
+ with self.assertRaises(ValueError):
+ mi.nth_combination([], r, 0)
+
+ def test_invalid_index(self):
+ with self.assertRaises(IndexError):
+ mi.nth_combination('abcdefg', 3, -36)
+
+
+class NthPermutationTests(TestCase):
+ def test_r_less_than_n(self):
+ iterable = 'abcde'
+ r = 4
+ for index, expected in enumerate(permutations(iterable, r)):
+ actual = mi.nth_permutation(iterable, r, index)
+ self.assertEqual(actual, expected)
+
+ def test_r_equal_to_n(self):
+ iterable = 'abcde'
+ for index, expected in enumerate(permutations(iterable)):
+ actual = mi.nth_permutation(iterable, None, index)
+ self.assertEqual(actual, expected)
+
+ def test_long(self):
+ iterable = tuple(range(180))
+ r = 4
+ index = 1000000
+ actual = mi.nth_permutation(iterable, r, index)
+ expected = mi.nth(permutations(iterable, r), index)
+ self.assertEqual(actual, expected)
+
+ def test_null(self):
+ actual = mi.nth_permutation([], 0, 0)
+ expected = tuple()
+ self.assertEqual(actual, expected)
+
+ def test_negative_index(self):
+ iterable = 'abcde'
+ r = 4
+ n = factorial(len(iterable)) // factorial(len(iterable) - r)
+ for index, expected in enumerate(permutations(iterable, r)):
+ actual = mi.nth_permutation(iterable, r, index - n)
+ self.assertEqual(actual, expected)
+
+ def test_invalid_index(self):
+ iterable = 'abcde'
+ r = 4
+ n = factorial(len(iterable)) // factorial(len(iterable) - r)
+ for index in [-1 - n, n + 1]:
+ with self.assertRaises(IndexError):
+ mi.nth_combination(iterable, r, index)
+
+ def test_invalid_r(self):
+ iterable = 'abcde'
+ r = 4
+ n = factorial(len(iterable)) // factorial(len(iterable) - r)
+ for r in [-1, n + 1]:
+ with self.assertRaises(ValueError):
+ mi.nth_combination(iterable, r, 0)
+
+
+class PrependTests(TestCase):
+ def test_basic(self):
+ value = 'a'
+ iterator = iter('bcdefg')
+ actual = list(mi.prepend(value, iterator))
+ expected = list('abcdefg')
+ self.assertEqual(actual, expected)
+
+ def test_multiple(self):
+ value = 'ab'
+ iterator = iter('cdefg')
+ actual = tuple(mi.prepend(value, iterator))
+ expected = ('ab',) + tuple('cdefg')
+ self.assertEqual(actual, expected)
+
+
+class Convolvetests(TestCase):
+ def test_moving_average(self):
+ signal = iter([10, 20, 30, 40, 50])
+ kernel = [0.5, 0.5]
+ actual = list(mi.convolve(signal, kernel))
+ expected = [
+ (10 + 0) / 2,
+ (20 + 10) / 2,
+ (30 + 20) / 2,
+ (40 + 30) / 2,
+ (50 + 40) / 2,
+ (0 + 50) / 2,
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_derivative(self):
+ signal = iter([10, 20, 30, 40, 50])
+ kernel = [1, -1]
+ actual = list(mi.convolve(signal, kernel))
+ expected = [10 - 0, 20 - 10, 30 - 20, 40 - 30, 50 - 40, 0 - 50]
+ self.assertEqual(actual, expected)
+
+ def test_infinite_signal(self):
+ signal = count()
+ kernel = [1, -1]
+ actual = mi.take(5, mi.convolve(signal, kernel))
+ expected = [0, 1, 1, 1, 1]
+ self.assertEqual(actual, expected)
+
+
+class BeforeAndAfterTests(TestCase):
+ def test_empty(self):
+ before, after = mi.before_and_after(bool, [])
+ self.assertEqual(list(before), [])
+ self.assertEqual(list(after), [])
+
+ def test_never_true(self):
+ before, after = mi.before_and_after(bool, [0, False, None, ''])
+ self.assertEqual(list(before), [])
+ self.assertEqual(list(after), [0, False, None, ''])
+
+ def test_never_false(self):
+ before, after = mi.before_and_after(bool, [1, True, Ellipsis, ' '])
+ self.assertEqual(list(before), [1, True, Ellipsis, ' '])
+ self.assertEqual(list(after), [])
+
+ def test_some_true(self):
+ before, after = mi.before_and_after(bool, [1, True, 0, False])
+ self.assertEqual(list(before), [1, True])
+ self.assertEqual(list(after), [0, False])
+
+
+class TriplewiseTests(TestCase):
+ def test_basic(self):
+ for iterable, expected in [
+ ([0], []),
+ ([0, 1], []),
+ ([0, 1, 2], [(0, 1, 2)]),
+ ([0, 1, 2, 3], [(0, 1, 2), (1, 2, 3)]),
+ ([0, 1, 2, 3, 4], [(0, 1, 2), (1, 2, 3), (2, 3, 4)]),
+ ]:
+ with self.subTest(expected=expected):
+ actual = list(mi.triplewise(iterable))
+ self.assertEqual(actual, expected)
+
+
+class SlidingWindowTests(TestCase):
+ def test_basic(self):
+ for iterable, n, expected in [
+ ([], 0, [()]),
+ ([], 1, []),
+ ([0], 1, [(0,)]),
+ ([0, 1], 1, [(0,), (1,)]),
+ ([0, 1, 2], 2, [(0, 1), (1, 2)]),
+ ([0, 1, 2], 3, [(0, 1, 2)]),
+ ([0, 1, 2], 4, []),
+ ([0, 1, 2, 3], 4, [(0, 1, 2, 3)]),
+ ([0, 1, 2, 3, 4], 4, [(0, 1, 2, 3), (1, 2, 3, 4)]),
+ ]:
+ with self.subTest(expected=expected):
+ actual = list(mi.sliding_window(iterable, n))
+ self.assertEqual(actual, expected)
diff --git a/contrib/python/more-itertools/py3/tests/ya.make b/contrib/python/more-itertools/py3/tests/ya.make
new file mode 100644
index 0000000000..8d3caffc22
--- /dev/null
+++ b/contrib/python/more-itertools/py3/tests/ya.make
@@ -0,0 +1,16 @@
+PY3TEST()
+
+OWNER(g:python-contrib)
+
+PEERDIR(
+ contrib/python/more-itertools
+)
+
+TEST_SRCS(
+ test_more.py
+ test_recipes.py
+)
+
+NO_LINT()
+
+END()
diff --git a/contrib/python/more-itertools/py3/ya.make b/contrib/python/more-itertools/py3/ya.make
new file mode 100644
index 0000000000..3573378d83
--- /dev/null
+++ b/contrib/python/more-itertools/py3/ya.make
@@ -0,0 +1,34 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+OWNER(g:python-contrib)
+
+VERSION(8.12.0)
+
+LICENSE(MIT)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ more_itertools/__init__.py
+ more_itertools/__init__.pyi
+ more_itertools/more.py
+ more_itertools/more.pyi
+ more_itertools/recipes.py
+ more_itertools/recipes.pyi
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/more-itertools/py3/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+ more_itertools/py.typed
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/contrib/python/more-itertools/ya.make b/contrib/python/more-itertools/ya.make
new file mode 100644
index 0000000000..2caa580ba5
--- /dev/null
+++ b/contrib/python/more-itertools/ya.make
@@ -0,0 +1,20 @@
+PY23_LIBRARY()
+
+LICENSE(Service-Py23-Proxy)
+
+OWNER(g:python-contrib)
+
+IF (PYTHON2)
+ PEERDIR(contrib/python/more-itertools/py2)
+ELSE()
+ PEERDIR(contrib/python/more-itertools/py3)
+ENDIF()
+
+NO_LINT()
+
+END()
+
+RECURSE(
+ py2
+ py3
+)