aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/pyrsistent/py3
diff options
context:
space:
mode:
authorshmel1k <shmel1k@ydb.tech>2023-11-26 18:16:14 +0300
committershmel1k <shmel1k@ydb.tech>2023-11-26 18:43:30 +0300
commitb8cf9e88f4c5c64d9406af533d8948deb050d695 (patch)
tree218eb61fb3c3b96ec08b4d8cdfef383104a87d63 /contrib/python/pyrsistent/py3
parent523f645a83a0ec97a0332dbc3863bb354c92a328 (diff)
downloadydb-b8cf9e88f4c5c64d9406af533d8948deb050d695.tar.gz
add kikimr_configure
Diffstat (limited to 'contrib/python/pyrsistent/py3')
-rw-r--r--contrib/python/pyrsistent/py3/.dist-info/METADATA789
-rw-r--r--contrib/python/pyrsistent/py3/.dist-info/top_level.txt3
-rw-r--r--contrib/python/pyrsistent/py3/LICENSE.mit22
-rw-r--r--contrib/python/pyrsistent/py3/README.rst767
-rw-r--r--contrib/python/pyrsistent/py3/_pyrsistent_version.py1
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/__init__.py47
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_checked_types.py547
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_field_common.py332
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_helpers.py101
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_immutable.py97
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_pbag.py270
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_pclass.py262
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_pdeque.py379
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_plist.py316
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_pmap.py583
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_precord.py167
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_pset.py230
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_pvector.py715
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_toolz.py83
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/_transformations.py143
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/py.typed0
-rw-r--r--contrib/python/pyrsistent/py3/pyrsistent/typing.py82
-rw-r--r--contrib/python/pyrsistent/py3/tests/bag_test.py150
-rw-r--r--contrib/python/pyrsistent/py3/tests/checked_map_test.py152
-rw-r--r--contrib/python/pyrsistent/py3/tests/checked_set_test.py85
-rw-r--r--contrib/python/pyrsistent/py3/tests/checked_vector_test.py213
-rw-r--r--contrib/python/pyrsistent/py3/tests/class_test.py474
-rw-r--r--contrib/python/pyrsistent/py3/tests/deque_test.py293
-rw-r--r--contrib/python/pyrsistent/py3/tests/field_test.py23
-rw-r--r--contrib/python/pyrsistent/py3/tests/freeze_test.py174
-rw-r--r--contrib/python/pyrsistent/py3/tests/hypothesis_vector_test.py304
-rw-r--r--contrib/python/pyrsistent/py3/tests/immutable_object_test.py67
-rw-r--r--contrib/python/pyrsistent/py3/tests/list_test.py209
-rw-r--r--contrib/python/pyrsistent/py3/tests/map_test.py551
-rw-r--r--contrib/python/pyrsistent/py3/tests/memory_profiling.py48
-rw-r--r--contrib/python/pyrsistent/py3/tests/record_test.py878
-rw-r--r--contrib/python/pyrsistent/py3/tests/regression_test.py30
-rw-r--r--contrib/python/pyrsistent/py3/tests/set_test.py181
-rw-r--r--contrib/python/pyrsistent/py3/tests/toolz_test.py6
-rw-r--r--contrib/python/pyrsistent/py3/tests/transform_test.py122
-rw-r--r--contrib/python/pyrsistent/py3/tests/vector_test.py934
-rw-r--r--contrib/python/pyrsistent/py3/tests/ya.make27
-rw-r--r--contrib/python/pyrsistent/py3/ya.make47
43 files changed, 10904 insertions, 0 deletions
diff --git a/contrib/python/pyrsistent/py3/.dist-info/METADATA b/contrib/python/pyrsistent/py3/.dist-info/METADATA
new file mode 100644
index 0000000000..1ce3d375fd
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/.dist-info/METADATA
@@ -0,0 +1,789 @@
+Metadata-Version: 2.1
+Name: pyrsistent
+Version: 0.20.0
+Summary: Persistent/Functional/Immutable data structures
+Home-page: https://github.com/tobgu/pyrsistent/
+Author: Tobias Gustafsson
+Author-email: tobias.l.gustafsson@gmail.com
+License: MIT
+Project-URL: Changelog, https://pyrsistent.readthedocs.io/en/latest/changes.html
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE.mit
+
+Pyrsistent
+==========
+.. image:: https://github.com/tobgu/pyrsistent/actions/workflows/tests.yaml/badge.svg
+ :target: https://github.com/tobgu/pyrsistent/actions/workflows/tests.yaml
+
+
+.. _Pyrthon: https://www.github.com/tobgu/pyrthon
+.. _Pyrsistent_extras: https://github.com/mingmingrr/pyrsistent-extras
+
+Pyrsistent is a number of persistent collections (by some referred to as functional data structures). Persistent in
+the sense that they are immutable.
+
+All methods on a data structure that would normally mutate it instead return a new copy of the structure containing the
+requested updates. The original structure is left untouched.
+
+This will simplify the reasoning about what a program does since no hidden side effects ever can take place to these
+data structures. You can rest assured that the object you hold a reference to will remain the same throughout its
+lifetime and need not worry that somewhere five stack levels below you in the darkest corner of your application
+someone has decided to remove that element that you expected to be there.
+
+Pyrsistent is influenced by persistent data structures such as those found in the standard library of Clojure. The
+data structures are designed to share common elements through path copying.
+It aims at taking these concepts and make them as pythonic as possible so that they can be easily integrated into any python
+program without hassle.
+
+If you want use literal syntax to define them in your code rather
+than function calls check out Pyrthon_. Be aware, that one is experimental, unmaintained and alpha software.
+
+If you cannot find the persistent data structure you're looking for here you may want to take a look at
+Pyrsistent_extras_ which is maintained by @mingmingrr. If you still don't find what you're looking for please
+open an issue for discussion. If we agree that functionality is missing you may want to go ahead and create
+a Pull Request implement the missing functionality.
+
+Examples
+--------
+.. _Sequence: collections_
+.. _Hashable: collections_
+.. _Mapping: collections_
+.. _Mappings: collections_
+.. _Set: collections_
+.. _collections: https://docs.python.org/3/library/collections.abc.html
+.. _documentation: http://pyrsistent.readthedocs.org/
+
+The collection types and key features currently implemented are:
+
+* PVector_, similar to a python list
+* PMap_, similar to dict
+* PSet_, similar to set
+* PRecord_, a PMap on steroids with fixed fields, optional type and invariant checking and much more
+* PClass_, a Python class fixed fields, optional type and invariant checking and much more
+* `Checked collections`_, PVector, PMap and PSet with optional type and invariance checks and more
+* PBag, similar to collections.Counter
+* PList, a classic singly linked list
+* PDeque, similar to collections.deque
+* Immutable object type (immutable) built on the named tuple
+* freeze_ and thaw_ functions to convert between pythons standard collections and pyrsistent collections.
+* Flexible transformations_ of arbitrarily complex structures built from PMaps and PVectors.
+
+Below are examples of common usage patterns for some of the structures and features. More information and
+full documentation for all data structures is available in the documentation_.
+
+.. _PVector:
+
+PVector
+~~~~~~~
+With full support for the Sequence_ protocol PVector is meant as a drop in replacement to the built in list from a readers
+point of view. Write operations of course differ since no in place mutation is done but naming should be in line
+with corresponding operations on the built in list.
+
+Support for the Hashable_ protocol also means that it can be used as key in Mappings_.
+
+Appends are amortized O(1). Random access and insert is log32(n) where n is the size of the vector.
+
+.. code:: python
+
+ >>> from pyrsistent import v, pvector
+
+ # No mutation of vectors once created, instead they
+ # are "evolved" leaving the original untouched
+ >>> v1 = v(1, 2, 3)
+ >>> v2 = v1.append(4)
+ >>> v3 = v2.set(1, 5)
+ >>> v1
+ pvector([1, 2, 3])
+ >>> v2
+ pvector([1, 2, 3, 4])
+ >>> v3
+ pvector([1, 5, 3, 4])
+
+ # Random access and slicing
+ >>> v3[1]
+ 5
+ >>> v3[1:3]
+ pvector([5, 3])
+
+ # Iteration
+ >>> list(x + 1 for x in v3)
+ [2, 6, 4, 5]
+ >>> pvector(2 * x for x in range(3))
+ pvector([0, 2, 4])
+
+.. _PMap:
+
+PMap
+~~~~
+With full support for the Mapping_ protocol PMap is meant as a drop in replacement to the built in dict from a readers point
+of view. Support for the Hashable_ protocol also means that it can be used as key in other Mappings_.
+
+Random access and insert is log32(n) where n is the size of the map.
+
+.. code:: python
+
+ >>> from pyrsistent import m, pmap, v
+
+ # No mutation of maps once created, instead they are
+ # "evolved" leaving the original untouched
+ >>> m1 = m(a=1, b=2)
+ >>> m2 = m1.set('c', 3)
+ >>> m3 = m2.set('a', 5)
+ >>> m1
+ pmap({'a': 1, 'b': 2})
+ >>> m2
+ pmap({'a': 1, 'c': 3, 'b': 2})
+ >>> m3
+ pmap({'a': 5, 'c': 3, 'b': 2})
+ >>> m3['a']
+ 5
+
+ # Evolution of nested persistent structures
+ >>> m4 = m(a=5, b=6, c=v(1, 2))
+ >>> m4.transform(('c', 1), 17)
+ pmap({'a': 5, 'c': pvector([1, 17]), 'b': 6})
+ >>> m5 = m(a=1, b=2)
+
+ # Evolve by merging with other mappings
+ >>> m5.update(m(a=2, c=3), {'a': 17, 'd': 35})
+ pmap({'a': 17, 'c': 3, 'b': 2, 'd': 35})
+ >>> pmap({'x': 1, 'y': 2}) + pmap({'y': 3, 'z': 4})
+ pmap({'y': 3, 'x': 1, 'z': 4})
+
+ # Dict-like methods to convert to list and iterate
+ >>> m3.items()
+ pvector([('a', 5), ('c', 3), ('b', 2)])
+ >>> list(m3)
+ ['a', 'c', 'b']
+
+.. _PSet:
+
+PSet
+~~~~
+With full support for the Set_ protocol PSet is meant as a drop in replacement to the built in set from a readers point
+of view. Support for the Hashable_ protocol also means that it can be used as key in Mappings_.
+
+Random access and insert is log32(n) where n is the size of the set.
+
+.. code:: python
+
+ >>> from pyrsistent import s
+
+ # No mutation of sets once created, you know the story...
+ >>> s1 = s(1, 2, 3, 2)
+ >>> s2 = s1.add(4)
+ >>> s3 = s1.remove(1)
+ >>> s1
+ pset([1, 2, 3])
+ >>> s2
+ pset([1, 2, 3, 4])
+ >>> s3
+ pset([2, 3])
+
+ # Full support for set operations
+ >>> s1 | s(3, 4, 5)
+ pset([1, 2, 3, 4, 5])
+ >>> s1 & s(3, 4, 5)
+ pset([3])
+ >>> s1 < s2
+ True
+ >>> s1 < s(3, 4, 5)
+ False
+
+.. _PRecord:
+
+PRecord
+~~~~~~~
+A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting
+from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element
+access using subscript notation.
+
+.. code:: python
+
+ >>> from pyrsistent import PRecord, field
+ >>> class ARecord(PRecord):
+ ... x = field()
+ ...
+ >>> r = ARecord(x=3)
+ >>> r
+ ARecord(x=3)
+ >>> r.x
+ 3
+ >>> r.set(x=2)
+ ARecord(x=2)
+ >>> r.set(y=2)
+ Traceback (most recent call last):
+ AttributeError: 'y' is not among the specified fields for ARecord
+
+Type information
+****************
+It is possible to add type information to the record to enforce type checks. Multiple allowed types can be specified
+by providing an iterable of types.
+
+.. code:: python
+
+ >>> class BRecord(PRecord):
+ ... x = field(type=int)
+ ... y = field(type=(int, type(None)))
+ ...
+ >>> BRecord(x=3, y=None)
+ BRecord(y=None, x=3)
+ >>> BRecord(x=3.0)
+ Traceback (most recent call last):
+ PTypeError: Invalid type for field BRecord.x, was float
+
+
+Custom types (classes) that are iterable should be wrapped in a tuple to prevent their
+members being added to the set of valid types. Although Enums in particular are now
+supported without wrapping, see #83 for more information.
+
+Mandatory fields
+****************
+Fields are not mandatory by default but can be specified as such. If fields are missing an
+*InvariantException* will be thrown which contains information about the missing fields.
+
+.. code:: python
+
+ >>> from pyrsistent import InvariantException
+ >>> class CRecord(PRecord):
+ ... x = field(mandatory=True)
+ ...
+ >>> r = CRecord(x=3)
+ >>> try:
+ ... r.discard('x')
+ ... except InvariantException as e:
+ ... print(e.missing_fields)
+ ...
+ ('CRecord.x',)
+
+Invariants
+**********
+It is possible to add invariants that must hold when evolving the record. Invariants can be
+specified on both field and record level. If invariants fail an *InvariantException* will be
+thrown which contains information about the failing invariants. An invariant function should
+return a tuple consisting of a boolean that tells if the invariant holds or not and an object
+describing the invariant. This object can later be used to identify which invariant that failed.
+
+The global invariant function is only executed if all field invariants hold.
+
+Global invariants are inherited to subclasses.
+
+.. code:: python
+
+ >>> class RestrictedVector(PRecord):
+ ... __invariant__ = lambda r: (r.y >= r.x, 'x larger than y')
+ ... x = field(invariant=lambda x: (x > 0, 'x negative'))
+ ... y = field(invariant=lambda y: (y > 0, 'y negative'))
+ ...
+ >>> r = RestrictedVector(y=3, x=2)
+ >>> try:
+ ... r.set(x=-1, y=-2)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ ('y negative', 'x negative')
+ >>> try:
+ ... r.set(x=2, y=1)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ ('x larger than y',)
+
+Invariants may also contain multiple assertions. For those cases the invariant function should
+return a tuple of invariant tuples as described above. This structure is reflected in the
+invariant_errors attribute of the exception which will contain tuples with data from all failed
+invariants. Eg:
+
+.. code:: python
+
+ >>> class EvenX(PRecord):
+ ... x = field(invariant=lambda x: ((x > 0, 'x negative'), (x % 2 == 0, 'x odd')))
+ ...
+ >>> try:
+ ... EvenX(x=-1)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ (('x negative', 'x odd'),)
+
+
+Factories
+*********
+It's possible to specify factory functions for fields. The factory function receives whatever
+is supplied as field value and the actual returned by the factory is assigned to the field
+given that any type and invariant checks hold.
+PRecords have a default factory specified as a static function on the class, create(). It takes
+a *Mapping* as argument and returns an instance of the specific record.
+If a record has fields of type PRecord the create() method of that record will
+be called to create the "sub record" if no factory has explicitly been specified to override
+this behaviour.
+
+.. code:: python
+
+ >>> class DRecord(PRecord):
+ ... x = field(factory=int)
+ ...
+ >>> class ERecord(PRecord):
+ ... d = field(type=DRecord)
+ ...
+ >>> ERecord.create({'d': {'x': '1'}})
+ ERecord(d=DRecord(x=1))
+
+Collection fields
+*****************
+It is also possible to have fields with ``pyrsistent`` collections.
+
+.. code:: python
+
+ >>> from pyrsistent import pset_field, pmap_field, pvector_field
+ >>> class MultiRecord(PRecord):
+ ... set_of_ints = pset_field(int)
+ ... map_int_to_str = pmap_field(int, str)
+ ... vector_of_strs = pvector_field(str)
+ ...
+
+Serialization
+*************
+PRecords support serialization back to dicts. Default serialization will take keys and values
+"as is" and output them into a dict. It is possible to specify custom serialization functions
+to take care of fields that require special treatment.
+
+.. code:: python
+
+ >>> from datetime import date
+ >>> class Person(PRecord):
+ ... name = field(type=unicode)
+ ... birth_date = field(type=date,
+ ... serializer=lambda format, d: d.strftime(format['date']))
+ ...
+ >>> john = Person(name=u'John', birth_date=date(1985, 10, 21))
+ >>> john.serialize({'date': '%Y-%m-%d'})
+ {'birth_date': '1985-10-21', 'name': u'John'}
+
+
+.. _instar: https://github.com/boxed/instar/
+
+.. _PClass:
+
+PClass
+~~~~~~
+A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting
+from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it
+is not a PMap and hence not a collection but rather a plain Python object.
+
+.. code:: python
+
+ >>> from pyrsistent import PClass, field
+ >>> class AClass(PClass):
+ ... x = field()
+ ...
+ >>> a = AClass(x=3)
+ >>> a
+ AClass(x=3)
+ >>> a.x
+ 3
+
+
+Checked collections
+~~~~~~~~~~~~~~~~~~~
+Checked collections currently come in three flavors: CheckedPVector, CheckedPMap and CheckedPSet.
+
+.. code:: python
+
+ >>> from pyrsistent import CheckedPVector, CheckedPMap, CheckedPSet, thaw
+ >>> class Positives(CheckedPSet):
+ ... __type__ = (long, int)
+ ... __invariant__ = lambda n: (n >= 0, 'Negative')
+ ...
+ >>> class Lottery(PRecord):
+ ... name = field(type=str)
+ ... numbers = field(type=Positives, invariant=lambda p: (len(p) > 0, 'No numbers'))
+ ...
+ >>> class Lotteries(CheckedPVector):
+ ... __type__ = Lottery
+ ...
+ >>> class LotteriesByDate(CheckedPMap):
+ ... __key_type__ = date
+ ... __value_type__ = Lotteries
+ ...
+ >>> lotteries = LotteriesByDate.create({date(2015, 2, 15): [{'name': 'SuperLotto', 'numbers': {1, 2, 3}},
+ ... {'name': 'MegaLotto', 'numbers': {4, 5, 6}}],
+ ... date(2015, 2, 16): [{'name': 'SuperLotto', 'numbers': {3, 2, 1}},
+ ... {'name': 'MegaLotto', 'numbers': {6, 5, 4}}]})
+ >>> lotteries
+ LotteriesByDate({datetime.date(2015, 2, 15): Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')]), datetime.date(2015, 2, 16): Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')])})
+
+ # The checked versions support all operations that the corresponding
+ # unchecked types do
+ >>> lottery_0215 = lotteries[date(2015, 2, 15)]
+ >>> lottery_0215.transform([0, 'name'], 'SuperDuperLotto')
+ Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperDuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')])
+
+ # But also makes asserts that types and invariants hold
+ >>> lottery_0215.transform([0, 'name'], 999)
+ Traceback (most recent call last):
+ PTypeError: Invalid type for field Lottery.name, was int
+
+ >>> lottery_0215.transform([0, 'numbers'], set())
+ Traceback (most recent call last):
+ InvariantException: Field invariant failed
+
+ # They can be converted back to python built ins with either thaw()
+ # or serialize() (which provides possibilities to customize serialization)
+ >>> thaw(lottery_0215)
+ [{'numbers': set([1, 2, 3]), 'name': 'SuperLotto'}, {'numbers': set([4, 5, 6]), 'name': 'MegaLotto'}]
+ >>> lottery_0215.serialize()
+ [{'numbers': set([1, 2, 3]), 'name': 'SuperLotto'}, {'numbers': set([4, 5, 6]), 'name': 'MegaLotto'}]
+
+.. _transformations:
+
+Transformations
+~~~~~~~~~~~~~~~
+Transformations are inspired by the cool library instar_ for Clojure. They let you evolve PMaps and PVectors
+with arbitrarily deep/complex nesting using simple syntax and flexible matching syntax.
+
+The first argument to transformation is the path that points out the value to transform. The
+second is the transformation to perform. If the transformation is callable it will be applied
+to the value(s) matching the path. The path may also contain callables. In that case they are
+treated as matchers. If the matcher returns True for a specific key it is considered for transformation.
+
+.. code:: python
+
+ # Basic examples
+ >>> from pyrsistent import inc, freeze, thaw, rex, ny, discard
+ >>> v1 = freeze([1, 2, 3, 4, 5])
+ >>> v1.transform([2], inc)
+ pvector([1, 2, 4, 4, 5])
+ >>> v1.transform([lambda ix: 0 < ix < 4], 8)
+ pvector([1, 8, 8, 8, 5])
+ >>> v1.transform([lambda ix, v: ix == 0 or v == 5], 0)
+ pvector([0, 2, 3, 4, 0])
+
+ # The (a)ny matcher can be used to match anything
+ >>> v1.transform([ny], 8)
+ pvector([8, 8, 8, 8, 8])
+
+ # Regular expressions can be used for matching
+ >>> scores = freeze({'John': 12, 'Joseph': 34, 'Sara': 23})
+ >>> scores.transform([rex('^Jo')], 0)
+ pmap({'Joseph': 0, 'Sara': 23, 'John': 0})
+
+ # Transformations can be done on arbitrarily deep structures
+ >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
+ ... {'author': 'Steve', 'content': 'A slightly longer article'}],
+ ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
+ >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
+ >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
+ >>> very_short_news.articles[0].content
+ 'A short article'
+ >>> very_short_news.articles[1].content
+ 'A slightly long...'
+
+ # When nothing has been transformed the original data structure is kept
+ >>> short_news is news_paper
+ True
+ >>> very_short_news is news_paper
+ False
+ >>> very_short_news.articles[0] is news_paper.articles[0]
+ True
+
+ # There is a special transformation that can be used to discard elements. Also
+ # multiple transformations can be applied in one call
+ >>> thaw(news_paper.transform(['weather'], discard, ['articles', ny, 'content'], discard))
+ {'articles': [{'author': 'Sara'}, {'author': 'Steve'}]}
+
+Evolvers
+~~~~~~~~
+PVector, PMap and PSet all have support for a concept dubbed *evolvers*. An evolver acts like a mutable
+view of the underlying persistent data structure with "transaction like" semantics. No updates of the original
+data structure is ever performed, it is still fully immutable.
+
+The evolvers have a very limited API by design to discourage excessive, and inappropriate, usage as that would
+take us down the mutable road. In principle only basic mutation and element access functions are supported.
+Check out the documentation_ of each data structure for specific examples.
+
+Examples of when you may want to use an evolver instead of working directly with the data structure include:
+
+* Multiple updates are done to the same data structure and the intermediate results are of no
+ interest. In this case using an evolver may be a more efficient and easier to work with.
+* You need to pass a vector into a legacy function or a function that you have no control
+ over which performs in place mutations. In this case pass an evolver instance
+ instead and then create a new pvector from the evolver once the function returns.
+
+.. code:: python
+
+ >>> from pyrsistent import v
+
+ # In place mutation as when working with the built in counterpart
+ >>> v1 = v(1, 2, 3)
+ >>> e = v1.evolver()
+ >>> e[1] = 22
+ >>> e = e.append(4)
+ >>> e = e.extend([5, 6])
+ >>> e[5] += 1
+ >>> len(e)
+ 6
+
+ # The evolver is considered *dirty* when it contains changes compared to the underlying vector
+ >>> e.is_dirty()
+ True
+
+ # But the underlying pvector still remains untouched
+ >>> v1
+ pvector([1, 2, 3])
+
+ # Once satisfied with the updates you can produce a new pvector containing the updates.
+ # The new pvector will share data with the original pvector in the same way that would have
+ # been done if only using operations on the pvector.
+ >>> v2 = e.persistent()
+ >>> v2
+ pvector([1, 22, 3, 4, 5, 7])
+
+ # The evolver is now no longer considered *dirty* as it contains no differences compared to the
+ # pvector just produced.
+ >>> e.is_dirty()
+ False
+
+ # You may continue to work with the same evolver without affecting the content of v2
+ >>> e[0] = 11
+
+ # Or create a new evolver from v2. The two evolvers can be updated independently but will both
+ # share data with v2 where possible.
+ >>> e2 = v2.evolver()
+ >>> e2[0] = 1111
+ >>> e.persistent()
+ pvector([11, 22, 3, 4, 5, 7])
+ >>> e2.persistent()
+ pvector([1111, 22, 3, 4, 5, 7])
+
+.. _freeze:
+.. _thaw:
+
+freeze and thaw
+~~~~~~~~~~~~~~~
+These functions are great when your cozy immutable world has to interact with the evil mutable world outside.
+
+.. code:: python
+
+ >>> from pyrsistent import freeze, thaw, v, m
+ >>> freeze([1, {'a': 3}])
+ pvector([1, pmap({'a': 3})])
+ >>> thaw(v(1, m(a=3)))
+ [1, {'a': 3}]
+
+By default, freeze will also recursively convert values inside PVectors and PMaps. This behaviour can be changed by providing freeze with the flag strict=False.
+
+.. code:: python
+
+ >>> from pyrsistent import freeze, v, m
+ >>> freeze(v(1, v(2, [3])))
+ pvector([1, pvector([2, pvector([3])])])
+ >>> freeze(v(1, v(2, [3])), strict=False)
+ pvector([1, pvector([2, [3]])])
+ >>> freeze(m(a=m(b={'c': 1})))
+ pmap({'a': pmap({'b': pmap({'c': 1})})})
+ >>> freeze(m(a=m(b={'c': 1})), strict=False)
+ pmap({'a': pmap({'b': {'c': 1}})})
+
+In this regard, thaw operates as the inverse of freeze so will thaw values inside native data structures unless passed the strict=False flag.
+
+
+Compatibility
+-------------
+
+Pyrsistent is developed and tested on Python 3.8+ and PyPy3.
+
+Performance
+-----------
+
+Pyrsistent is developed with performance in mind. Still, while some operations are nearly on par with their built in,
+mutable, counterparts in terms of speed, other operations are slower. In the cases where attempts at
+optimizations have been done, speed has generally been valued over space.
+
+Pyrsistent comes with two API compatible flavors of PVector (on which PMap and PSet are based), one pure Python
+implementation and one implemented as a C extension. The latter generally being 2 - 20 times faster than the former.
+The C extension will be used automatically when possible.
+
+The pure python implementation is fully PyPy compatible. Running it under PyPy speeds operations up considerably if
+the structures are used heavily (if JITed), for some cases the performance is almost on par with the built in counterparts.
+
+Type hints
+----------
+
+PEP 561 style type hints for use with mypy and various editors are available for most types and functions in pyrsistent.
+
+Type classes for annotating your own code with pyrsistent types are also available under pyrsistent.typing.
+
+Installation
+------------
+
+pip install pyrsistent
+
+Documentation
+-------------
+
+Available at http://pyrsistent.readthedocs.org/
+
+Brief presentation available at http://slides.com/tobiasgustafsson/immutability-and-python/
+
+Contributors
+------------
+
+Tobias Gustafsson https://github.com/tobgu
+
+Christopher Armstrong https://github.com/radix
+
+Anders Hovmöller https://github.com/boxed
+
+Itamar Turner-Trauring https://github.com/itamarst
+
+Jonathan Lange https://github.com/jml
+
+Richard Futrell https://github.com/Futrell
+
+Jakob Hollenstein https://github.com/jkbjh
+
+David Honour https://github.com/foolswood
+
+David R. MacIver https://github.com/DRMacIver
+
+Marcus Ewert https://github.com/sarum90
+
+Jean-Paul Calderone https://github.com/exarkun
+
+Douglas Treadwell https://github.com/douglas-treadwell
+
+Travis Parker https://github.com/teepark
+
+Julian Berman https://github.com/Julian
+
+Dennis Tomas https://github.com/dtomas
+
+Neil Vyas https://github.com/neilvyas
+
+doozr https://github.com/doozr
+
+Kamil Galuszka https://github.com/galuszkak
+
+Tsuyoshi Hombashi https://github.com/thombashi
+
+nattofriends https://github.com/nattofriends
+
+agberk https://github.com/agberk
+
+Waleed Khan https://github.com/arxanas
+
+Jean-Louis Fuchs https://github.com/ganwell
+
+Carlos Corbacho https://github.com/ccorbacho
+
+Felix Yan https://github.com/felixonmars
+
+benrg https://github.com/benrg
+
+Jere Lahelma https://github.com/je-l
+
+Max Taggart https://github.com/MaxTaggart
+
+Vincent Philippon https://github.com/vphilippon
+
+Semen Zhydenko https://github.com/ss18
+
+Till Varoquaux https://github.com/till-varoquaux
+
+Michal Kowalik https://github.com/michalvi
+
+ossdev07 https://github.com/ossdev07
+
+Kerry Olesen https://github.com/qhesz
+
+johnthagen https://github.com/johnthagen
+
+Bastien Vallet https://github.com/djailla
+
+Ram Rachum https://github.com/cool-RR
+
+Vincent Philippon https://github.com/vphilippon
+
+Andrey Bienkowski https://github.com/hexagonrecursion
+
+Ethan McCue https://github.com/bowbahdoe
+
+Jason R. Coombs https://github.com/jaraco
+
+Nathan https://github.com/ndowens
+
+Geert Barentsen https://github.com/barentsen
+
+phil-arh https://github.com/phil-arh
+
+Tamás Nepusz https://github.com/ntamas
+
+Hugo van Kemenade https://github.com/hugovk
+
+Ben Beasley https://github.com/musicinmybrain
+
+Noah C. Benson https://github.com/noahbenson
+
+dscrofts https://github.com/dscrofts
+
+Andy Reagan https://github.com/andyreagan
+
+Aaron Durant https://github.com/Aaron-Durant
+
+Joshua Munn https://github.com/jams2
+
+Lukas https://github.com/lukasK9999
+
+Arshad https://github.com/arshad-ml
+
+Contributing
+------------
+
+Want to contribute? That's great! If you experience problems please log them on GitHub. If you want to contribute code,
+please fork the repository and submit a pull request.
+
+Run tests
+~~~~~~~~~
+.. _tox: https://tox.readthedocs.io/en/latest/
+
+Tests can be executed using tox_.
+
+Install tox: ``pip install tox``
+
+Run test for Python 3.8: ``tox -e py38``
+
+Release
+~~~~~~~
+* `pip install -r requirements.txt`
+* Update CHANGES.txt
+* Update README.rst with any new contributors and potential info needed.
+* Update _pyrsistent_version.py
+* Commit and tag with new version: `git add -u . && git commit -m 'Prepare version vX.Y.Z' && git tag -a vX.Y.Z -m 'vX.Y.Z'`
+* Push commit and tags: `git push --follow-tags`
+* Build new release using Github actions
+
+Project status
+--------------
+Pyrsistent can be considered stable and mature (who knows, there may even be a 1.0 some day :-)). The project is
+maintained, bugs fixed, PRs reviewed and merged and new releases made. I currently do not have time for development
+of new features or functionality which I don't have use for myself. I'm more than happy to take PRs for new
+functionality though!
+
+There are a bunch of issues marked with ``enhancement`` and ``help wanted`` that contain requests for new functionality
+that would be nice to include. The level of difficulty and extend of the issues varies, please reach out to me if you're
+interested in working on any of them.
+
+If you feel that you have a grand master plan for where you would like Pyrsistent to go and have the time to put into
+it please don't hesitate to discuss this with me and submit PRs for it. If all goes well I'd be more than happy to add
+additional maintainers to the project!
diff --git a/contrib/python/pyrsistent/py3/.dist-info/top_level.txt b/contrib/python/pyrsistent/py3/.dist-info/top_level.txt
new file mode 100644
index 0000000000..f2460728a9
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/.dist-info/top_level.txt
@@ -0,0 +1,3 @@
+_pyrsistent_version
+pvectorc
+pyrsistent
diff --git a/contrib/python/pyrsistent/py3/LICENSE.mit b/contrib/python/pyrsistent/py3/LICENSE.mit
new file mode 100644
index 0000000000..8a32be2449
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/LICENSE.mit
@@ -0,0 +1,22 @@
+Copyright (c) 2023 Tobias Gustafsson
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file
diff --git a/contrib/python/pyrsistent/py3/README.rst b/contrib/python/pyrsistent/py3/README.rst
new file mode 100644
index 0000000000..64bb5854ca
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/README.rst
@@ -0,0 +1,767 @@
+Pyrsistent
+==========
+.. image:: https://github.com/tobgu/pyrsistent/actions/workflows/tests.yaml/badge.svg
+ :target: https://github.com/tobgu/pyrsistent/actions/workflows/tests.yaml
+
+
+.. _Pyrthon: https://www.github.com/tobgu/pyrthon
+.. _Pyrsistent_extras: https://github.com/mingmingrr/pyrsistent-extras
+
+Pyrsistent is a number of persistent collections (by some referred to as functional data structures). Persistent in
+the sense that they are immutable.
+
+All methods on a data structure that would normally mutate it instead return a new copy of the structure containing the
+requested updates. The original structure is left untouched.
+
+This will simplify the reasoning about what a program does since no hidden side effects ever can take place to these
+data structures. You can rest assured that the object you hold a reference to will remain the same throughout its
+lifetime and need not worry that somewhere five stack levels below you in the darkest corner of your application
+someone has decided to remove that element that you expected to be there.
+
+Pyrsistent is influenced by persistent data structures such as those found in the standard library of Clojure. The
+data structures are designed to share common elements through path copying.
+It aims at taking these concepts and make them as pythonic as possible so that they can be easily integrated into any python
+program without hassle.
+
+If you want use literal syntax to define them in your code rather
+than function calls check out Pyrthon_. Be aware, that one is experimental, unmaintained and alpha software.
+
+If you cannot find the persistent data structure you're looking for here you may want to take a look at
+Pyrsistent_extras_ which is maintained by @mingmingrr. If you still don't find what you're looking for please
+open an issue for discussion. If we agree that functionality is missing you may want to go ahead and create
+a Pull Request implement the missing functionality.
+
+Examples
+--------
+.. _Sequence: collections_
+.. _Hashable: collections_
+.. _Mapping: collections_
+.. _Mappings: collections_
+.. _Set: collections_
+.. _collections: https://docs.python.org/3/library/collections.abc.html
+.. _documentation: http://pyrsistent.readthedocs.org/
+
+The collection types and key features currently implemented are:
+
+* PVector_, similar to a python list
+* PMap_, similar to dict
+* PSet_, similar to set
+* PRecord_, a PMap on steroids with fixed fields, optional type and invariant checking and much more
+* PClass_, a Python class fixed fields, optional type and invariant checking and much more
+* `Checked collections`_, PVector, PMap and PSet with optional type and invariance checks and more
+* PBag, similar to collections.Counter
+* PList, a classic singly linked list
+* PDeque, similar to collections.deque
+* Immutable object type (immutable) built on the named tuple
+* freeze_ and thaw_ functions to convert between pythons standard collections and pyrsistent collections.
+* Flexible transformations_ of arbitrarily complex structures built from PMaps and PVectors.
+
+Below are examples of common usage patterns for some of the structures and features. More information and
+full documentation for all data structures is available in the documentation_.
+
+.. _PVector:
+
+PVector
+~~~~~~~
+With full support for the Sequence_ protocol PVector is meant as a drop in replacement to the built in list from a readers
+point of view. Write operations of course differ since no in place mutation is done but naming should be in line
+with corresponding operations on the built in list.
+
+Support for the Hashable_ protocol also means that it can be used as key in Mappings_.
+
+Appends are amortized O(1). Random access and insert is log32(n) where n is the size of the vector.
+
+.. code:: python
+
+ >>> from pyrsistent import v, pvector
+
+ # No mutation of vectors once created, instead they
+ # are "evolved" leaving the original untouched
+ >>> v1 = v(1, 2, 3)
+ >>> v2 = v1.append(4)
+ >>> v3 = v2.set(1, 5)
+ >>> v1
+ pvector([1, 2, 3])
+ >>> v2
+ pvector([1, 2, 3, 4])
+ >>> v3
+ pvector([1, 5, 3, 4])
+
+ # Random access and slicing
+ >>> v3[1]
+ 5
+ >>> v3[1:3]
+ pvector([5, 3])
+
+ # Iteration
+ >>> list(x + 1 for x in v3)
+ [2, 6, 4, 5]
+ >>> pvector(2 * x for x in range(3))
+ pvector([0, 2, 4])
+
+.. _PMap:
+
+PMap
+~~~~
+With full support for the Mapping_ protocol PMap is meant as a drop in replacement to the built in dict from a readers point
+of view. Support for the Hashable_ protocol also means that it can be used as key in other Mappings_.
+
+Random access and insert is log32(n) where n is the size of the map.
+
+.. code:: python
+
+ >>> from pyrsistent import m, pmap, v
+
+ # No mutation of maps once created, instead they are
+ # "evolved" leaving the original untouched
+ >>> m1 = m(a=1, b=2)
+ >>> m2 = m1.set('c', 3)
+ >>> m3 = m2.set('a', 5)
+ >>> m1
+ pmap({'a': 1, 'b': 2})
+ >>> m2
+ pmap({'a': 1, 'c': 3, 'b': 2})
+ >>> m3
+ pmap({'a': 5, 'c': 3, 'b': 2})
+ >>> m3['a']
+ 5
+
+ # Evolution of nested persistent structures
+ >>> m4 = m(a=5, b=6, c=v(1, 2))
+ >>> m4.transform(('c', 1), 17)
+ pmap({'a': 5, 'c': pvector([1, 17]), 'b': 6})
+ >>> m5 = m(a=1, b=2)
+
+ # Evolve by merging with other mappings
+ >>> m5.update(m(a=2, c=3), {'a': 17, 'd': 35})
+ pmap({'a': 17, 'c': 3, 'b': 2, 'd': 35})
+ >>> pmap({'x': 1, 'y': 2}) + pmap({'y': 3, 'z': 4})
+ pmap({'y': 3, 'x': 1, 'z': 4})
+
+ # Dict-like methods to convert to list and iterate
+ >>> m3.items()
+ pvector([('a', 5), ('c', 3), ('b', 2)])
+ >>> list(m3)
+ ['a', 'c', 'b']
+
+.. _PSet:
+
+PSet
+~~~~
+With full support for the Set_ protocol PSet is meant as a drop in replacement to the built in set from a readers point
+of view. Support for the Hashable_ protocol also means that it can be used as key in Mappings_.
+
+Random access and insert is log32(n) where n is the size of the set.
+
+.. code:: python
+
+ >>> from pyrsistent import s
+
+ # No mutation of sets once created, you know the story...
+ >>> s1 = s(1, 2, 3, 2)
+ >>> s2 = s1.add(4)
+ >>> s3 = s1.remove(1)
+ >>> s1
+ pset([1, 2, 3])
+ >>> s2
+ pset([1, 2, 3, 4])
+ >>> s3
+ pset([2, 3])
+
+ # Full support for set operations
+ >>> s1 | s(3, 4, 5)
+ pset([1, 2, 3, 4, 5])
+ >>> s1 & s(3, 4, 5)
+ pset([3])
+ >>> s1 < s2
+ True
+ >>> s1 < s(3, 4, 5)
+ False
+
+.. _PRecord:
+
+PRecord
+~~~~~~~
+A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting
+from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element
+access using subscript notation.
+
+.. code:: python
+
+ >>> from pyrsistent import PRecord, field
+ >>> class ARecord(PRecord):
+ ... x = field()
+ ...
+ >>> r = ARecord(x=3)
+ >>> r
+ ARecord(x=3)
+ >>> r.x
+ 3
+ >>> r.set(x=2)
+ ARecord(x=2)
+ >>> r.set(y=2)
+ Traceback (most recent call last):
+ AttributeError: 'y' is not among the specified fields for ARecord
+
+Type information
+****************
+It is possible to add type information to the record to enforce type checks. Multiple allowed types can be specified
+by providing an iterable of types.
+
+.. code:: python
+
+ >>> class BRecord(PRecord):
+ ... x = field(type=int)
+ ... y = field(type=(int, type(None)))
+ ...
+ >>> BRecord(x=3, y=None)
+ BRecord(y=None, x=3)
+ >>> BRecord(x=3.0)
+ Traceback (most recent call last):
+ PTypeError: Invalid type for field BRecord.x, was float
+
+
+Custom types (classes) that are iterable should be wrapped in a tuple to prevent their
+members being added to the set of valid types. Although Enums in particular are now
+supported without wrapping, see #83 for more information.
+
+Mandatory fields
+****************
+Fields are not mandatory by default but can be specified as such. If fields are missing an
+*InvariantException* will be thrown which contains information about the missing fields.
+
+.. code:: python
+
+ >>> from pyrsistent import InvariantException
+ >>> class CRecord(PRecord):
+ ... x = field(mandatory=True)
+ ...
+ >>> r = CRecord(x=3)
+ >>> try:
+ ... r.discard('x')
+ ... except InvariantException as e:
+ ... print(e.missing_fields)
+ ...
+ ('CRecord.x',)
+
+Invariants
+**********
+It is possible to add invariants that must hold when evolving the record. Invariants can be
+specified on both field and record level. If invariants fail an *InvariantException* will be
+thrown which contains information about the failing invariants. An invariant function should
+return a tuple consisting of a boolean that tells if the invariant holds or not and an object
+describing the invariant. This object can later be used to identify which invariant that failed.
+
+The global invariant function is only executed if all field invariants hold.
+
+Global invariants are inherited to subclasses.
+
+.. code:: python
+
+ >>> class RestrictedVector(PRecord):
+ ... __invariant__ = lambda r: (r.y >= r.x, 'x larger than y')
+ ... x = field(invariant=lambda x: (x > 0, 'x negative'))
+ ... y = field(invariant=lambda y: (y > 0, 'y negative'))
+ ...
+ >>> r = RestrictedVector(y=3, x=2)
+ >>> try:
+ ... r.set(x=-1, y=-2)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ ('y negative', 'x negative')
+ >>> try:
+ ... r.set(x=2, y=1)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ ('x larger than y',)
+
+Invariants may also contain multiple assertions. For those cases the invariant function should
+return a tuple of invariant tuples as described above. This structure is reflected in the
+invariant_errors attribute of the exception which will contain tuples with data from all failed
+invariants. Eg:
+
+.. code:: python
+
+ >>> class EvenX(PRecord):
+ ... x = field(invariant=lambda x: ((x > 0, 'x negative'), (x % 2 == 0, 'x odd')))
+ ...
+ >>> try:
+ ... EvenX(x=-1)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ (('x negative', 'x odd'),)
+
+
+Factories
+*********
+It's possible to specify factory functions for fields. The factory function receives whatever
+is supplied as field value and the actual returned by the factory is assigned to the field
+given that any type and invariant checks hold.
+PRecords have a default factory specified as a static function on the class, create(). It takes
+a *Mapping* as argument and returns an instance of the specific record.
+If a record has fields of type PRecord the create() method of that record will
+be called to create the "sub record" if no factory has explicitly been specified to override
+this behaviour.
+
+.. code:: python
+
+ >>> class DRecord(PRecord):
+ ... x = field(factory=int)
+ ...
+ >>> class ERecord(PRecord):
+ ... d = field(type=DRecord)
+ ...
+ >>> ERecord.create({'d': {'x': '1'}})
+ ERecord(d=DRecord(x=1))
+
+Collection fields
+*****************
+It is also possible to have fields with ``pyrsistent`` collections.
+
+.. code:: python
+
+ >>> from pyrsistent import pset_field, pmap_field, pvector_field
+ >>> class MultiRecord(PRecord):
+ ... set_of_ints = pset_field(int)
+ ... map_int_to_str = pmap_field(int, str)
+ ... vector_of_strs = pvector_field(str)
+ ...
+
+Serialization
+*************
+PRecords support serialization back to dicts. Default serialization will take keys and values
+"as is" and output them into a dict. It is possible to specify custom serialization functions
+to take care of fields that require special treatment.
+
+.. code:: python
+
+ >>> from datetime import date
+ >>> class Person(PRecord):
+ ... name = field(type=unicode)
+ ... birth_date = field(type=date,
+ ... serializer=lambda format, d: d.strftime(format['date']))
+ ...
+ >>> john = Person(name=u'John', birth_date=date(1985, 10, 21))
+ >>> john.serialize({'date': '%Y-%m-%d'})
+ {'birth_date': '1985-10-21', 'name': u'John'}
+
+
+.. _instar: https://github.com/boxed/instar/
+
+.. _PClass:
+
+PClass
+~~~~~~
+A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting
+from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it
+is not a PMap and hence not a collection but rather a plain Python object.
+
+.. code:: python
+
+ >>> from pyrsistent import PClass, field
+ >>> class AClass(PClass):
+ ... x = field()
+ ...
+ >>> a = AClass(x=3)
+ >>> a
+ AClass(x=3)
+ >>> a.x
+ 3
+
+
+Checked collections
+~~~~~~~~~~~~~~~~~~~
+Checked collections currently come in three flavors: CheckedPVector, CheckedPMap and CheckedPSet.
+
+.. code:: python
+
+ >>> from pyrsistent import CheckedPVector, CheckedPMap, CheckedPSet, thaw
+ >>> class Positives(CheckedPSet):
+ ... __type__ = (long, int)
+ ... __invariant__ = lambda n: (n >= 0, 'Negative')
+ ...
+ >>> class Lottery(PRecord):
+ ... name = field(type=str)
+ ... numbers = field(type=Positives, invariant=lambda p: (len(p) > 0, 'No numbers'))
+ ...
+ >>> class Lotteries(CheckedPVector):
+ ... __type__ = Lottery
+ ...
+ >>> class LotteriesByDate(CheckedPMap):
+ ... __key_type__ = date
+ ... __value_type__ = Lotteries
+ ...
+ >>> lotteries = LotteriesByDate.create({date(2015, 2, 15): [{'name': 'SuperLotto', 'numbers': {1, 2, 3}},
+ ... {'name': 'MegaLotto', 'numbers': {4, 5, 6}}],
+ ... date(2015, 2, 16): [{'name': 'SuperLotto', 'numbers': {3, 2, 1}},
+ ... {'name': 'MegaLotto', 'numbers': {6, 5, 4}}]})
+ >>> lotteries
+ LotteriesByDate({datetime.date(2015, 2, 15): Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')]), datetime.date(2015, 2, 16): Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')])})
+
+ # The checked versions support all operations that the corresponding
+ # unchecked types do
+ >>> lottery_0215 = lotteries[date(2015, 2, 15)]
+ >>> lottery_0215.transform([0, 'name'], 'SuperDuperLotto')
+ Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperDuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')])
+
+ # But also makes asserts that types and invariants hold
+ >>> lottery_0215.transform([0, 'name'], 999)
+ Traceback (most recent call last):
+ PTypeError: Invalid type for field Lottery.name, was int
+
+ >>> lottery_0215.transform([0, 'numbers'], set())
+ Traceback (most recent call last):
+ InvariantException: Field invariant failed
+
+ # They can be converted back to python built ins with either thaw()
+ # or serialize() (which provides possibilities to customize serialization)
+ >>> thaw(lottery_0215)
+ [{'numbers': set([1, 2, 3]), 'name': 'SuperLotto'}, {'numbers': set([4, 5, 6]), 'name': 'MegaLotto'}]
+ >>> lottery_0215.serialize()
+ [{'numbers': set([1, 2, 3]), 'name': 'SuperLotto'}, {'numbers': set([4, 5, 6]), 'name': 'MegaLotto'}]
+
+.. _transformations:
+
+Transformations
+~~~~~~~~~~~~~~~
+Transformations are inspired by the cool library instar_ for Clojure. They let you evolve PMaps and PVectors
+with arbitrarily deep/complex nesting using simple syntax and flexible matching syntax.
+
+The first argument to transformation is the path that points out the value to transform. The
+second is the transformation to perform. If the transformation is callable it will be applied
+to the value(s) matching the path. The path may also contain callables. In that case they are
+treated as matchers. If the matcher returns True for a specific key it is considered for transformation.
+
+.. code:: python
+
+ # Basic examples
+ >>> from pyrsistent import inc, freeze, thaw, rex, ny, discard
+ >>> v1 = freeze([1, 2, 3, 4, 5])
+ >>> v1.transform([2], inc)
+ pvector([1, 2, 4, 4, 5])
+ >>> v1.transform([lambda ix: 0 < ix < 4], 8)
+ pvector([1, 8, 8, 8, 5])
+ >>> v1.transform([lambda ix, v: ix == 0 or v == 5], 0)
+ pvector([0, 2, 3, 4, 0])
+
+ # The (a)ny matcher can be used to match anything
+ >>> v1.transform([ny], 8)
+ pvector([8, 8, 8, 8, 8])
+
+ # Regular expressions can be used for matching
+ >>> scores = freeze({'John': 12, 'Joseph': 34, 'Sara': 23})
+ >>> scores.transform([rex('^Jo')], 0)
+ pmap({'Joseph': 0, 'Sara': 23, 'John': 0})
+
+ # Transformations can be done on arbitrarily deep structures
+ >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
+ ... {'author': 'Steve', 'content': 'A slightly longer article'}],
+ ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
+ >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
+ >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
+ >>> very_short_news.articles[0].content
+ 'A short article'
+ >>> very_short_news.articles[1].content
+ 'A slightly long...'
+
+ # When nothing has been transformed the original data structure is kept
+ >>> short_news is news_paper
+ True
+ >>> very_short_news is news_paper
+ False
+ >>> very_short_news.articles[0] is news_paper.articles[0]
+ True
+
+ # There is a special transformation that can be used to discard elements. Also
+ # multiple transformations can be applied in one call
+ >>> thaw(news_paper.transform(['weather'], discard, ['articles', ny, 'content'], discard))
+ {'articles': [{'author': 'Sara'}, {'author': 'Steve'}]}
+
+Evolvers
+~~~~~~~~
+PVector, PMap and PSet all have support for a concept dubbed *evolvers*. An evolver acts like a mutable
+view of the underlying persistent data structure with "transaction like" semantics. No updates of the original
+data structure is ever performed, it is still fully immutable.
+
+The evolvers have a very limited API by design to discourage excessive, and inappropriate, usage as that would
+take us down the mutable road. In principle only basic mutation and element access functions are supported.
+Check out the documentation_ of each data structure for specific examples.
+
+Examples of when you may want to use an evolver instead of working directly with the data structure include:
+
+* Multiple updates are done to the same data structure and the intermediate results are of no
+ interest. In this case using an evolver may be a more efficient and easier to work with.
+* You need to pass a vector into a legacy function or a function that you have no control
+ over which performs in place mutations. In this case pass an evolver instance
+ instead and then create a new pvector from the evolver once the function returns.
+
+.. code:: python
+
+ >>> from pyrsistent import v
+
+ # In place mutation as when working with the built in counterpart
+ >>> v1 = v(1, 2, 3)
+ >>> e = v1.evolver()
+ >>> e[1] = 22
+ >>> e = e.append(4)
+ >>> e = e.extend([5, 6])
+ >>> e[5] += 1
+ >>> len(e)
+ 6
+
+ # The evolver is considered *dirty* when it contains changes compared to the underlying vector
+ >>> e.is_dirty()
+ True
+
+ # But the underlying pvector still remains untouched
+ >>> v1
+ pvector([1, 2, 3])
+
+ # Once satisfied with the updates you can produce a new pvector containing the updates.
+ # The new pvector will share data with the original pvector in the same way that would have
+ # been done if only using operations on the pvector.
+ >>> v2 = e.persistent()
+ >>> v2
+ pvector([1, 22, 3, 4, 5, 7])
+
+ # The evolver is now no longer considered *dirty* as it contains no differences compared to the
+ # pvector just produced.
+ >>> e.is_dirty()
+ False
+
+ # You may continue to work with the same evolver without affecting the content of v2
+ >>> e[0] = 11
+
+ # Or create a new evolver from v2. The two evolvers can be updated independently but will both
+ # share data with v2 where possible.
+ >>> e2 = v2.evolver()
+ >>> e2[0] = 1111
+ >>> e.persistent()
+ pvector([11, 22, 3, 4, 5, 7])
+ >>> e2.persistent()
+ pvector([1111, 22, 3, 4, 5, 7])
+
+.. _freeze:
+.. _thaw:
+
+freeze and thaw
+~~~~~~~~~~~~~~~
+These functions are great when your cozy immutable world has to interact with the evil mutable world outside.
+
+.. code:: python
+
+ >>> from pyrsistent import freeze, thaw, v, m
+ >>> freeze([1, {'a': 3}])
+ pvector([1, pmap({'a': 3})])
+ >>> thaw(v(1, m(a=3)))
+ [1, {'a': 3}]
+
+By default, freeze will also recursively convert values inside PVectors and PMaps. This behaviour can be changed by providing freeze with the flag strict=False.
+
+.. code:: python
+
+ >>> from pyrsistent import freeze, v, m
+ >>> freeze(v(1, v(2, [3])))
+ pvector([1, pvector([2, pvector([3])])])
+ >>> freeze(v(1, v(2, [3])), strict=False)
+ pvector([1, pvector([2, [3]])])
+ >>> freeze(m(a=m(b={'c': 1})))
+ pmap({'a': pmap({'b': pmap({'c': 1})})})
+ >>> freeze(m(a=m(b={'c': 1})), strict=False)
+ pmap({'a': pmap({'b': {'c': 1}})})
+
+In this regard, thaw operates as the inverse of freeze so will thaw values inside native data structures unless passed the strict=False flag.
+
+
+Compatibility
+-------------
+
+Pyrsistent is developed and tested on Python 3.8+ and PyPy3.
+
+Performance
+-----------
+
+Pyrsistent is developed with performance in mind. Still, while some operations are nearly on par with their built in,
+mutable, counterparts in terms of speed, other operations are slower. In the cases where attempts at
+optimizations have been done, speed has generally been valued over space.
+
+Pyrsistent comes with two API compatible flavors of PVector (on which PMap and PSet are based), one pure Python
+implementation and one implemented as a C extension. The latter generally being 2 - 20 times faster than the former.
+The C extension will be used automatically when possible.
+
+The pure python implementation is fully PyPy compatible. Running it under PyPy speeds operations up considerably if
+the structures are used heavily (if JITed), for some cases the performance is almost on par with the built in counterparts.
+
+Type hints
+----------
+
+PEP 561 style type hints for use with mypy and various editors are available for most types and functions in pyrsistent.
+
+Type classes for annotating your own code with pyrsistent types are also available under pyrsistent.typing.
+
+Installation
+------------
+
+pip install pyrsistent
+
+Documentation
+-------------
+
+Available at http://pyrsistent.readthedocs.org/
+
+Brief presentation available at http://slides.com/tobiasgustafsson/immutability-and-python/
+
+Contributors
+------------
+
+Tobias Gustafsson https://github.com/tobgu
+
+Christopher Armstrong https://github.com/radix
+
+Anders Hovmöller https://github.com/boxed
+
+Itamar Turner-Trauring https://github.com/itamarst
+
+Jonathan Lange https://github.com/jml
+
+Richard Futrell https://github.com/Futrell
+
+Jakob Hollenstein https://github.com/jkbjh
+
+David Honour https://github.com/foolswood
+
+David R. MacIver https://github.com/DRMacIver
+
+Marcus Ewert https://github.com/sarum90
+
+Jean-Paul Calderone https://github.com/exarkun
+
+Douglas Treadwell https://github.com/douglas-treadwell
+
+Travis Parker https://github.com/teepark
+
+Julian Berman https://github.com/Julian
+
+Dennis Tomas https://github.com/dtomas
+
+Neil Vyas https://github.com/neilvyas
+
+doozr https://github.com/doozr
+
+Kamil Galuszka https://github.com/galuszkak
+
+Tsuyoshi Hombashi https://github.com/thombashi
+
+nattofriends https://github.com/nattofriends
+
+agberk https://github.com/agberk
+
+Waleed Khan https://github.com/arxanas
+
+Jean-Louis Fuchs https://github.com/ganwell
+
+Carlos Corbacho https://github.com/ccorbacho
+
+Felix Yan https://github.com/felixonmars
+
+benrg https://github.com/benrg
+
+Jere Lahelma https://github.com/je-l
+
+Max Taggart https://github.com/MaxTaggart
+
+Vincent Philippon https://github.com/vphilippon
+
+Semen Zhydenko https://github.com/ss18
+
+Till Varoquaux https://github.com/till-varoquaux
+
+Michal Kowalik https://github.com/michalvi
+
+ossdev07 https://github.com/ossdev07
+
+Kerry Olesen https://github.com/qhesz
+
+johnthagen https://github.com/johnthagen
+
+Bastien Vallet https://github.com/djailla
+
+Ram Rachum https://github.com/cool-RR
+
+Vincent Philippon https://github.com/vphilippon
+
+Andrey Bienkowski https://github.com/hexagonrecursion
+
+Ethan McCue https://github.com/bowbahdoe
+
+Jason R. Coombs https://github.com/jaraco
+
+Nathan https://github.com/ndowens
+
+Geert Barentsen https://github.com/barentsen
+
+phil-arh https://github.com/phil-arh
+
+Tamás Nepusz https://github.com/ntamas
+
+Hugo van Kemenade https://github.com/hugovk
+
+Ben Beasley https://github.com/musicinmybrain
+
+Noah C. Benson https://github.com/noahbenson
+
+dscrofts https://github.com/dscrofts
+
+Andy Reagan https://github.com/andyreagan
+
+Aaron Durant https://github.com/Aaron-Durant
+
+Joshua Munn https://github.com/jams2
+
+Lukas https://github.com/lukasK9999
+
+Arshad https://github.com/arshad-ml
+
+Contributing
+------------
+
+Want to contribute? That's great! If you experience problems please log them on GitHub. If you want to contribute code,
+please fork the repository and submit a pull request.
+
+Run tests
+~~~~~~~~~
+.. _tox: https://tox.readthedocs.io/en/latest/
+
+Tests can be executed using tox_.
+
+Install tox: ``pip install tox``
+
+Run test for Python 3.8: ``tox -e py38``
+
+Release
+~~~~~~~
+* `pip install -r requirements.txt`
+* Update CHANGES.txt
+* Update README.rst with any new contributors and potential info needed.
+* Update _pyrsistent_version.py
+* Commit and tag with new version: `git add -u . && git commit -m 'Prepare version vX.Y.Z' && git tag -a vX.Y.Z -m 'vX.Y.Z'`
+* Push commit and tags: `git push --follow-tags`
+* Build new release using Github actions
+
+Project status
+--------------
+Pyrsistent can be considered stable and mature (who knows, there may even be a 1.0 some day :-)). The project is
+maintained, bugs fixed, PRs reviewed and merged and new releases made. I currently do not have time for development
+of new features or functionality which I don't have use for myself. I'm more than happy to take PRs for new
+functionality though!
+
+There are a bunch of issues marked with ``enhancement`` and ``help wanted`` that contain requests for new functionality
+that would be nice to include. The level of difficulty and extend of the issues varies, please reach out to me if you're
+interested in working on any of them.
+
+If you feel that you have a grand master plan for where you would like Pyrsistent to go and have the time to put into
+it please don't hesitate to discuss this with me and submit PRs for it. If all goes well I'd be more than happy to add
+additional maintainers to the project!
diff --git a/contrib/python/pyrsistent/py3/_pyrsistent_version.py b/contrib/python/pyrsistent/py3/_pyrsistent_version.py
new file mode 100644
index 0000000000..2f15b8cd37
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/_pyrsistent_version.py
@@ -0,0 +1 @@
+__version__ = '0.20.0'
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/__init__.py b/contrib/python/pyrsistent/py3/pyrsistent/__init__.py
new file mode 100644
index 0000000000..be299658f3
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/__init__.py
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+
+from pyrsistent._pmap import pmap, m, PMap
+
+from pyrsistent._pvector import pvector, v, PVector
+
+from pyrsistent._pset import pset, s, PSet
+
+from pyrsistent._pbag import pbag, b, PBag
+
+from pyrsistent._plist import plist, l, PList
+
+from pyrsistent._pdeque import pdeque, dq, PDeque
+
+from pyrsistent._checked_types import (
+ CheckedPMap, CheckedPVector, CheckedPSet, InvariantException, CheckedKeyTypeError,
+ CheckedValueTypeError, CheckedType, optional)
+
+from pyrsistent._field_common import (
+ field, PTypeError, pset_field, pmap_field, pvector_field)
+
+from pyrsistent._precord import PRecord
+
+from pyrsistent._pclass import PClass, PClassMeta
+
+from pyrsistent._immutable import immutable
+
+from pyrsistent._helpers import freeze, thaw, mutant
+
+from pyrsistent._transformations import inc, discard, rex, ny
+
+from pyrsistent._toolz import get_in
+
+
+__all__ = ('pmap', 'm', 'PMap',
+ 'pvector', 'v', 'PVector',
+ 'pset', 's', 'PSet',
+ 'pbag', 'b', 'PBag',
+ 'plist', 'l', 'PList',
+ 'pdeque', 'dq', 'PDeque',
+ 'CheckedPMap', 'CheckedPVector', 'CheckedPSet', 'InvariantException', 'CheckedKeyTypeError', 'CheckedValueTypeError', 'CheckedType', 'optional',
+ 'PRecord', 'field', 'pset_field', 'pmap_field', 'pvector_field',
+ 'PClass', 'PClassMeta',
+ 'immutable',
+ 'freeze', 'thaw', 'mutant',
+ 'get_in',
+ 'inc', 'discard', 'rex', 'ny')
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_checked_types.py b/contrib/python/pyrsistent/py3/pyrsistent/_checked_types.py
new file mode 100644
index 0000000000..48446e5168
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_checked_types.py
@@ -0,0 +1,547 @@
+from enum import Enum
+
+from abc import abstractmethod, ABCMeta
+from collections.abc import Iterable
+from typing import TypeVar, Generic
+
+from pyrsistent._pmap import PMap, pmap
+from pyrsistent._pset import PSet, pset
+from pyrsistent._pvector import PythonPVector, python_pvector
+
+T_co = TypeVar('T_co', covariant=True)
+KT = TypeVar('KT')
+VT_co = TypeVar('VT_co', covariant=True)
+
+
+class CheckedType(object):
+ """
+ Marker class to enable creation and serialization of checked object graphs.
+ """
+ __slots__ = ()
+
+ @classmethod
+ @abstractmethod
+ def create(cls, source_data, _factory_fields=None):
+ raise NotImplementedError()
+
+ @abstractmethod
+ def serialize(self, format=None):
+ raise NotImplementedError()
+
+
+def _restore_pickle(cls, data):
+ return cls.create(data, _factory_fields=set())
+
+
+class InvariantException(Exception):
+ """
+ Exception raised from a :py:class:`CheckedType` when invariant tests fail or when a mandatory
+ field is missing.
+
+ Contains two fields of interest:
+ invariant_errors, a tuple of error data for the failing invariants
+ missing_fields, a tuple of strings specifying the missing names
+ """
+
+ def __init__(self, error_codes=(), missing_fields=(), *args, **kwargs):
+ self.invariant_errors = tuple(e() if callable(e) else e for e in error_codes)
+ self.missing_fields = missing_fields
+ super(InvariantException, self).__init__(*args, **kwargs)
+
+ def __str__(self):
+ return super(InvariantException, self).__str__() + \
+ ", invariant_errors=[{invariant_errors}], missing_fields=[{missing_fields}]".format(
+ invariant_errors=', '.join(str(e) for e in self.invariant_errors),
+ missing_fields=', '.join(self.missing_fields))
+
+
+_preserved_iterable_types = (
+ Enum,
+)
+"""Some types are themselves iterable, but we want to use the type itself and
+not its members for the type specification. This defines a set of such types
+that we explicitly preserve.
+
+Note that strings are not such types because the string inputs we pass in are
+values, not types.
+"""
+
+
+def maybe_parse_user_type(t):
+ """Try to coerce a user-supplied type directive into a list of types.
+
+ This function should be used in all places where a user specifies a type,
+ for consistency.
+
+ The policy for what defines valid user input should be clear from the implementation.
+ """
+ is_type = isinstance(t, type)
+ is_preserved = isinstance(t, type) and issubclass(t, _preserved_iterable_types)
+ is_string = isinstance(t, str)
+ is_iterable = isinstance(t, Iterable)
+
+ if is_preserved:
+ return [t]
+ elif is_string:
+ return [t]
+ elif is_type and not is_iterable:
+ return [t]
+ elif is_iterable:
+ # Recur to validate contained types as well.
+ ts = t
+ return tuple(e for t in ts for e in maybe_parse_user_type(t))
+ else:
+ # If this raises because `t` cannot be formatted, so be it.
+ raise TypeError(
+ 'Type specifications must be types or strings. Input: {}'.format(t)
+ )
+
+
+def maybe_parse_many_user_types(ts):
+ # Just a different name to communicate that you're parsing multiple user
+ # inputs. `maybe_parse_user_type` handles the iterable case anyway.
+ return maybe_parse_user_type(ts)
+
+
+def _store_types(dct, bases, destination_name, source_name):
+ maybe_types = maybe_parse_many_user_types([
+ d[source_name]
+ for d in ([dct] + [b.__dict__ for b in bases]) if source_name in d
+ ])
+
+ dct[destination_name] = maybe_types
+
+
+def _merge_invariant_results(result):
+ verdict = True
+ data = []
+ for verd, dat in result:
+ if not verd:
+ verdict = False
+ data.append(dat)
+
+ return verdict, tuple(data)
+
+
+def wrap_invariant(invariant):
+ # Invariant functions may return the outcome of several tests
+ # In those cases the results have to be merged before being passed
+ # back to the client.
+ def f(*args, **kwargs):
+ result = invariant(*args, **kwargs)
+ if isinstance(result[0], bool):
+ return result
+
+ return _merge_invariant_results(result)
+
+ return f
+
+
+def _all_dicts(bases, seen=None):
+ """
+ Yield each class in ``bases`` and each of their base classes.
+ """
+ if seen is None:
+ seen = set()
+ for cls in bases:
+ if cls in seen:
+ continue
+ seen.add(cls)
+ yield cls.__dict__
+ for b in _all_dicts(cls.__bases__, seen):
+ yield b
+
+
+def store_invariants(dct, bases, destination_name, source_name):
+ # Invariants are inherited
+ invariants = []
+ for ns in [dct] + list(_all_dicts(bases)):
+ try:
+ invariant = ns[source_name]
+ except KeyError:
+ continue
+ invariants.append(invariant)
+
+ if not all(callable(invariant) for invariant in invariants):
+ raise TypeError('Invariants must be callable')
+ dct[destination_name] = tuple(wrap_invariant(inv) for inv in invariants)
+
+
+class _CheckedTypeMeta(ABCMeta):
+ def __new__(mcs, name, bases, dct):
+ _store_types(dct, bases, '_checked_types', '__type__')
+ store_invariants(dct, bases, '_checked_invariants', '__invariant__')
+
+ def default_serializer(self, _, value):
+ if isinstance(value, CheckedType):
+ return value.serialize()
+ return value
+
+ dct.setdefault('__serializer__', default_serializer)
+
+ dct['__slots__'] = ()
+
+ return super(_CheckedTypeMeta, mcs).__new__(mcs, name, bases, dct)
+
+
+class CheckedTypeError(TypeError):
+ def __init__(self, source_class, expected_types, actual_type, actual_value, *args, **kwargs):
+ super(CheckedTypeError, self).__init__(*args, **kwargs)
+ self.source_class = source_class
+ self.expected_types = expected_types
+ self.actual_type = actual_type
+ self.actual_value = actual_value
+
+
+class CheckedKeyTypeError(CheckedTypeError):
+ """
+ Raised when trying to set a value using a key with a type that doesn't match the declared type.
+
+ Attributes:
+ source_class -- The class of the collection
+ expected_types -- Allowed types
+ actual_type -- The non matching type
+ actual_value -- Value of the variable with the non matching type
+ """
+ pass
+
+
+class CheckedValueTypeError(CheckedTypeError):
+ """
+ Raised when trying to set a value using a key with a type that doesn't match the declared type.
+
+ Attributes:
+ source_class -- The class of the collection
+ expected_types -- Allowed types
+ actual_type -- The non matching type
+ actual_value -- Value of the variable with the non matching type
+ """
+ pass
+
+
+def _get_class(type_name):
+ module_name, class_name = type_name.rsplit('.', 1)
+ module = __import__(module_name, fromlist=[class_name])
+ return getattr(module, class_name)
+
+
+def get_type(typ):
+ if isinstance(typ, type):
+ return typ
+
+ return _get_class(typ)
+
+
+def get_types(typs):
+ return [get_type(typ) for typ in typs]
+
+
+def _check_types(it, expected_types, source_class, exception_type=CheckedValueTypeError):
+ if expected_types:
+ for e in it:
+ if not any(isinstance(e, get_type(t)) for t in expected_types):
+ actual_type = type(e)
+ msg = "Type {source_class} can only be used with {expected_types}, not {actual_type}".format(
+ source_class=source_class.__name__,
+ expected_types=tuple(get_type(et).__name__ for et in expected_types),
+ actual_type=actual_type.__name__)
+ raise exception_type(source_class, expected_types, actual_type, e, msg)
+
+
+def _invariant_errors(elem, invariants):
+ return [data for valid, data in (invariant(elem) for invariant in invariants) if not valid]
+
+
+def _invariant_errors_iterable(it, invariants):
+ return sum([_invariant_errors(elem, invariants) for elem in it], [])
+
+
+def optional(*typs):
+ """ Convenience function to specify that a value may be of any of the types in type 'typs' or None """
+ return tuple(typs) + (type(None),)
+
+
+def _checked_type_create(cls, source_data, _factory_fields=None, ignore_extra=False):
+ if isinstance(source_data, cls):
+ return source_data
+
+ # Recursively apply create methods of checked types if the types of the supplied data
+ # does not match any of the valid types.
+ types = get_types(cls._checked_types)
+ checked_type = next((t for t in types if issubclass(t, CheckedType)), None)
+ if checked_type:
+ return cls([checked_type.create(data, ignore_extra=ignore_extra)
+ if not any(isinstance(data, t) for t in types) else data
+ for data in source_data])
+
+ return cls(source_data)
+
+class CheckedPVector(Generic[T_co], PythonPVector, CheckedType, metaclass=_CheckedTypeMeta):
+ """
+ A CheckedPVector is a PVector which allows specifying type and invariant checks.
+
+ >>> class Positives(CheckedPVector):
+ ... __type__ = (int, float)
+ ... __invariant__ = lambda n: (n >= 0, 'Negative')
+ ...
+ >>> Positives([1, 2, 3])
+ Positives([1, 2, 3])
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, initial=()):
+ if type(initial) == PythonPVector:
+ return super(CheckedPVector, cls).__new__(cls, initial._count, initial._shift, initial._root, initial._tail)
+
+ return CheckedPVector.Evolver(cls, python_pvector()).extend(initial).persistent()
+
+ def set(self, key, value):
+ return self.evolver().set(key, value).persistent()
+
+ def append(self, val):
+ return self.evolver().append(val).persistent()
+
+ def extend(self, it):
+ return self.evolver().extend(it).persistent()
+
+ create = classmethod(_checked_type_create)
+
+ def serialize(self, format=None):
+ serializer = self.__serializer__
+ return list(serializer(format, v) for v in self)
+
+ def __reduce__(self):
+ # Pickling support
+ return _restore_pickle, (self.__class__, list(self),)
+
+ class Evolver(PythonPVector.Evolver):
+ __slots__ = ('_destination_class', '_invariant_errors')
+
+ def __init__(self, destination_class, vector):
+ super(CheckedPVector.Evolver, self).__init__(vector)
+ self._destination_class = destination_class
+ self._invariant_errors = []
+
+ def _check(self, it):
+ _check_types(it, self._destination_class._checked_types, self._destination_class)
+ error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants)
+ self._invariant_errors.extend(error_data)
+
+ def __setitem__(self, key, value):
+ self._check([value])
+ return super(CheckedPVector.Evolver, self).__setitem__(key, value)
+
+ def append(self, elem):
+ self._check([elem])
+ return super(CheckedPVector.Evolver, self).append(elem)
+
+ def extend(self, it):
+ it = list(it)
+ self._check(it)
+ return super(CheckedPVector.Evolver, self).extend(it)
+
+ def persistent(self):
+ if self._invariant_errors:
+ raise InvariantException(error_codes=self._invariant_errors)
+
+ result = self._orig_pvector
+ if self.is_dirty() or (self._destination_class != type(self._orig_pvector)):
+ pv = super(CheckedPVector.Evolver, self).persistent().extend(self._extra_tail)
+ result = self._destination_class(pv)
+ self._reset(result)
+
+ return result
+
+ def __repr__(self):
+ return self.__class__.__name__ + "({0})".format(self.tolist())
+
+ __str__ = __repr__
+
+ def evolver(self):
+ return CheckedPVector.Evolver(self.__class__, self)
+
+
+class CheckedPSet(PSet[T_co], CheckedType, metaclass=_CheckedTypeMeta):
+ """
+ A CheckedPSet is a PSet which allows specifying type and invariant checks.
+
+ >>> class Positives(CheckedPSet):
+ ... __type__ = (int, float)
+ ... __invariant__ = lambda n: (n >= 0, 'Negative')
+ ...
+ >>> Positives([1, 2, 3])
+ Positives([1, 2, 3])
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, initial=()):
+ if type(initial) is PMap:
+ return super(CheckedPSet, cls).__new__(cls, initial)
+
+ evolver = CheckedPSet.Evolver(cls, pset())
+ for e in initial:
+ evolver.add(e)
+
+ return evolver.persistent()
+
+ def __repr__(self):
+ return self.__class__.__name__ + super(CheckedPSet, self).__repr__()[4:]
+
+ def __str__(self):
+ return self.__repr__()
+
+ def serialize(self, format=None):
+ serializer = self.__serializer__
+ return set(serializer(format, v) for v in self)
+
+ create = classmethod(_checked_type_create)
+
+ def __reduce__(self):
+ # Pickling support
+ return _restore_pickle, (self.__class__, list(self),)
+
+ def evolver(self):
+ return CheckedPSet.Evolver(self.__class__, self)
+
+ class Evolver(PSet._Evolver):
+ __slots__ = ('_destination_class', '_invariant_errors')
+
+ def __init__(self, destination_class, original_set):
+ super(CheckedPSet.Evolver, self).__init__(original_set)
+ self._destination_class = destination_class
+ self._invariant_errors = []
+
+ def _check(self, it):
+ _check_types(it, self._destination_class._checked_types, self._destination_class)
+ error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants)
+ self._invariant_errors.extend(error_data)
+
+ def add(self, element):
+ self._check([element])
+ self._pmap_evolver[element] = True
+ return self
+
+ def persistent(self):
+ if self._invariant_errors:
+ raise InvariantException(error_codes=self._invariant_errors)
+
+ if self.is_dirty() or self._destination_class != type(self._original_pset):
+ return self._destination_class(self._pmap_evolver.persistent())
+
+ return self._original_pset
+
+
+class _CheckedMapTypeMeta(type):
+ def __new__(mcs, name, bases, dct):
+ _store_types(dct, bases, '_checked_key_types', '__key_type__')
+ _store_types(dct, bases, '_checked_value_types', '__value_type__')
+ store_invariants(dct, bases, '_checked_invariants', '__invariant__')
+
+ def default_serializer(self, _, key, value):
+ sk = key
+ if isinstance(key, CheckedType):
+ sk = key.serialize()
+
+ sv = value
+ if isinstance(value, CheckedType):
+ sv = value.serialize()
+
+ return sk, sv
+
+ dct.setdefault('__serializer__', default_serializer)
+
+ dct['__slots__'] = ()
+
+ return super(_CheckedMapTypeMeta, mcs).__new__(mcs, name, bases, dct)
+
+# Marker object
+_UNDEFINED_CHECKED_PMAP_SIZE = object()
+
+
+class CheckedPMap(PMap[KT, VT_co], CheckedType, metaclass=_CheckedMapTypeMeta):
+ """
+ A CheckedPMap is a PMap which allows specifying type and invariant checks.
+
+ >>> class IntToFloatMap(CheckedPMap):
+ ... __key_type__ = int
+ ... __value_type__ = float
+ ... __invariant__ = lambda k, v: (int(v) == k, 'Invalid mapping')
+ ...
+ >>> IntToFloatMap({1: 1.5, 2: 2.25})
+ IntToFloatMap({1: 1.5, 2: 2.25})
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, initial={}, size=_UNDEFINED_CHECKED_PMAP_SIZE):
+ if size is not _UNDEFINED_CHECKED_PMAP_SIZE:
+ return super(CheckedPMap, cls).__new__(cls, size, initial)
+
+ evolver = CheckedPMap.Evolver(cls, pmap())
+ for k, v in initial.items():
+ evolver.set(k, v)
+
+ return evolver.persistent()
+
+ def evolver(self):
+ return CheckedPMap.Evolver(self.__class__, self)
+
+ def __repr__(self):
+ return self.__class__.__name__ + "({0})".format(str(dict(self)))
+
+ __str__ = __repr__
+
+ def serialize(self, format=None):
+ serializer = self.__serializer__
+ return dict(serializer(format, k, v) for k, v in self.items())
+
+ @classmethod
+ def create(cls, source_data, _factory_fields=None):
+ if isinstance(source_data, cls):
+ return source_data
+
+ # Recursively apply create methods of checked types if the types of the supplied data
+ # does not match any of the valid types.
+ key_types = get_types(cls._checked_key_types)
+ checked_key_type = next((t for t in key_types if issubclass(t, CheckedType)), None)
+ value_types = get_types(cls._checked_value_types)
+ checked_value_type = next((t for t in value_types if issubclass(t, CheckedType)), None)
+
+ if checked_key_type or checked_value_type:
+ return cls(dict((checked_key_type.create(key) if checked_key_type and not any(isinstance(key, t) for t in key_types) else key,
+ checked_value_type.create(value) if checked_value_type and not any(isinstance(value, t) for t in value_types) else value)
+ for key, value in source_data.items()))
+
+ return cls(source_data)
+
+ def __reduce__(self):
+ # Pickling support
+ return _restore_pickle, (self.__class__, dict(self),)
+
+ class Evolver(PMap._Evolver):
+ __slots__ = ('_destination_class', '_invariant_errors')
+
+ def __init__(self, destination_class, original_map):
+ super(CheckedPMap.Evolver, self).__init__(original_map)
+ self._destination_class = destination_class
+ self._invariant_errors = []
+
+ def set(self, key, value):
+ _check_types([key], self._destination_class._checked_key_types, self._destination_class, CheckedKeyTypeError)
+ _check_types([value], self._destination_class._checked_value_types, self._destination_class)
+ self._invariant_errors.extend(data for valid, data in (invariant(key, value)
+ for invariant in self._destination_class._checked_invariants)
+ if not valid)
+
+ return super(CheckedPMap.Evolver, self).set(key, value)
+
+ def persistent(self):
+ if self._invariant_errors:
+ raise InvariantException(error_codes=self._invariant_errors)
+
+ if self.is_dirty() or type(self._original_pmap) != self._destination_class:
+ return self._destination_class(self._buckets_evolver.persistent(), self._size)
+
+ return self._original_pmap
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_field_common.py b/contrib/python/pyrsistent/py3/pyrsistent/_field_common.py
new file mode 100644
index 0000000000..508dd2f799
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_field_common.py
@@ -0,0 +1,332 @@
+from pyrsistent._checked_types import (
+ CheckedPMap,
+ CheckedPSet,
+ CheckedPVector,
+ CheckedType,
+ InvariantException,
+ _restore_pickle,
+ get_type,
+ maybe_parse_user_type,
+ maybe_parse_many_user_types,
+)
+from pyrsistent._checked_types import optional as optional_type
+from pyrsistent._checked_types import wrap_invariant
+import inspect
+
+
+def set_fields(dct, bases, name):
+ dct[name] = dict(sum([list(b.__dict__.get(name, {}).items()) for b in bases], []))
+
+ for k, v in list(dct.items()):
+ if isinstance(v, _PField):
+ dct[name][k] = v
+ del dct[k]
+
+
+def check_global_invariants(subject, invariants):
+ error_codes = tuple(error_code for is_ok, error_code in
+ (invariant(subject) for invariant in invariants) if not is_ok)
+ if error_codes:
+ raise InvariantException(error_codes, (), 'Global invariant failed')
+
+
+def serialize(serializer, format, value):
+ if isinstance(value, CheckedType) and serializer is PFIELD_NO_SERIALIZER:
+ return value.serialize(format)
+
+ return serializer(format, value)
+
+
+def check_type(destination_cls, field, name, value):
+ if field.type and not any(isinstance(value, get_type(t)) for t in field.type):
+ actual_type = type(value)
+ message = "Invalid type for field {0}.{1}, was {2}".format(destination_cls.__name__, name, actual_type.__name__)
+ raise PTypeError(destination_cls, name, field.type, actual_type, message)
+
+
+def is_type_cls(type_cls, field_type):
+ if type(field_type) is set:
+ return True
+ types = tuple(field_type)
+ if len(types) == 0:
+ return False
+ return issubclass(get_type(types[0]), type_cls)
+
+
+def is_field_ignore_extra_complaint(type_cls, field, ignore_extra):
+ # ignore_extra param has default False value, for speed purpose no need to propagate False
+ if not ignore_extra:
+ return False
+
+ if not is_type_cls(type_cls, field.type):
+ return False
+
+ return 'ignore_extra' in inspect.signature(field.factory).parameters
+
+
+
+class _PField(object):
+ __slots__ = ('type', 'invariant', 'initial', 'mandatory', '_factory', 'serializer')
+
+ def __init__(self, type, invariant, initial, mandatory, factory, serializer):
+ self.type = type
+ self.invariant = invariant
+ self.initial = initial
+ self.mandatory = mandatory
+ self._factory = factory
+ self.serializer = serializer
+
+ @property
+ def factory(self):
+ # If no factory is specified and the type is another CheckedType use the factory method of that CheckedType
+ if self._factory is PFIELD_NO_FACTORY and len(self.type) == 1:
+ typ = get_type(tuple(self.type)[0])
+ if issubclass(typ, CheckedType):
+ return typ.create
+
+ return self._factory
+
+PFIELD_NO_TYPE = ()
+PFIELD_NO_INVARIANT = lambda _: (True, None)
+PFIELD_NO_FACTORY = lambda x: x
+PFIELD_NO_INITIAL = object()
+PFIELD_NO_SERIALIZER = lambda _, value: value
+
+
+def field(type=PFIELD_NO_TYPE, invariant=PFIELD_NO_INVARIANT, initial=PFIELD_NO_INITIAL,
+ mandatory=False, factory=PFIELD_NO_FACTORY, serializer=PFIELD_NO_SERIALIZER):
+ """
+ Field specification factory for :py:class:`PRecord`.
+
+ :param type: a type or iterable with types that are allowed for this field
+ :param invariant: a function specifying an invariant that must hold for the field
+ :param initial: value of field if not specified when instantiating the record
+ :param mandatory: boolean specifying if the field is mandatory or not
+ :param factory: function called when field is set.
+ :param serializer: function that returns a serialized version of the field
+ """
+
+ # NB: We have to check this predicate separately from the predicates in
+ # `maybe_parse_user_type` et al. because this one is related to supporting
+ # the argspec for `field`, while those are related to supporting the valid
+ # ways to specify types.
+
+ # Multiple types must be passed in one of the following containers. Note
+ # that a type that is a subclass of one of these containers, like a
+ # `collections.namedtuple`, will work as expected, since we check
+ # `isinstance` and not `issubclass`.
+ if isinstance(type, (list, set, tuple)):
+ types = set(maybe_parse_many_user_types(type))
+ else:
+ types = set(maybe_parse_user_type(type))
+
+ invariant_function = wrap_invariant(invariant) if invariant != PFIELD_NO_INVARIANT and callable(invariant) else invariant
+ field = _PField(type=types, invariant=invariant_function, initial=initial,
+ mandatory=mandatory, factory=factory, serializer=serializer)
+
+ _check_field_parameters(field)
+
+ return field
+
+
+def _check_field_parameters(field):
+ for t in field.type:
+ if not isinstance(t, type) and not isinstance(t, str):
+ raise TypeError('Type parameter expected, not {0}'.format(type(t)))
+
+ if field.initial is not PFIELD_NO_INITIAL and \
+ not callable(field.initial) and \
+ field.type and not any(isinstance(field.initial, t) for t in field.type):
+ raise TypeError('Initial has invalid type {0}'.format(type(field.initial)))
+
+ if not callable(field.invariant):
+ raise TypeError('Invariant must be callable')
+
+ if not callable(field.factory):
+ raise TypeError('Factory must be callable')
+
+ if not callable(field.serializer):
+ raise TypeError('Serializer must be callable')
+
+
+class PTypeError(TypeError):
+ """
+ Raised when trying to assign a value with a type that doesn't match the declared type.
+
+ Attributes:
+ source_class -- The class of the record
+ field -- Field name
+ expected_types -- Types allowed for the field
+ actual_type -- The non matching type
+ """
+ def __init__(self, source_class, field, expected_types, actual_type, *args, **kwargs):
+ super(PTypeError, self).__init__(*args, **kwargs)
+ self.source_class = source_class
+ self.field = field
+ self.expected_types = expected_types
+ self.actual_type = actual_type
+
+
+SEQ_FIELD_TYPE_SUFFIXES = {
+ CheckedPVector: "PVector",
+ CheckedPSet: "PSet",
+}
+
+# Global dictionary to hold auto-generated field types: used for unpickling
+_seq_field_types = {}
+
+def _restore_seq_field_pickle(checked_class, item_type, data):
+ """Unpickling function for auto-generated PVec/PSet field types."""
+ type_ = _seq_field_types[checked_class, item_type]
+ return _restore_pickle(type_, data)
+
+def _types_to_names(types):
+ """Convert a tuple of types to a human-readable string."""
+ return "".join(get_type(typ).__name__.capitalize() for typ in types)
+
+def _make_seq_field_type(checked_class, item_type, item_invariant):
+ """Create a subclass of the given checked class with the given item type."""
+ type_ = _seq_field_types.get((checked_class, item_type))
+ if type_ is not None:
+ return type_
+
+ class TheType(checked_class):
+ __type__ = item_type
+ __invariant__ = item_invariant
+
+ def __reduce__(self):
+ return (_restore_seq_field_pickle,
+ (checked_class, item_type, list(self)))
+
+ suffix = SEQ_FIELD_TYPE_SUFFIXES[checked_class]
+ TheType.__name__ = _types_to_names(TheType._checked_types) + suffix
+ _seq_field_types[checked_class, item_type] = TheType
+ return TheType
+
+def _sequence_field(checked_class, item_type, optional, initial,
+ invariant=PFIELD_NO_INVARIANT,
+ item_invariant=PFIELD_NO_INVARIANT):
+ """
+ Create checked field for either ``PSet`` or ``PVector``.
+
+ :param checked_class: ``CheckedPSet`` or ``CheckedPVector``.
+ :param item_type: The required type for the items in the set.
+ :param optional: If true, ``None`` can be used as a value for
+ this field.
+ :param initial: Initial value to pass to factory.
+
+ :return: A ``field`` containing a checked class.
+ """
+ TheType = _make_seq_field_type(checked_class, item_type, item_invariant)
+
+ if optional:
+ def factory(argument, _factory_fields=None, ignore_extra=False):
+ if argument is None:
+ return None
+ else:
+ return TheType.create(argument, _factory_fields=_factory_fields, ignore_extra=ignore_extra)
+ else:
+ factory = TheType.create
+
+ return field(type=optional_type(TheType) if optional else TheType,
+ factory=factory, mandatory=True,
+ invariant=invariant,
+ initial=factory(initial))
+
+
+def pset_field(item_type, optional=False, initial=(),
+ invariant=PFIELD_NO_INVARIANT,
+ item_invariant=PFIELD_NO_INVARIANT):
+ """
+ Create checked ``PSet`` field.
+
+ :param item_type: The required type for the items in the set.
+ :param optional: If true, ``None`` can be used as a value for
+ this field.
+ :param initial: Initial value to pass to factory if no value is given
+ for the field.
+
+ :return: A ``field`` containing a ``CheckedPSet`` of the given type.
+ """
+ return _sequence_field(CheckedPSet, item_type, optional, initial,
+ invariant=invariant,
+ item_invariant=item_invariant)
+
+
+def pvector_field(item_type, optional=False, initial=(),
+ invariant=PFIELD_NO_INVARIANT,
+ item_invariant=PFIELD_NO_INVARIANT):
+ """
+ Create checked ``PVector`` field.
+
+ :param item_type: The required type for the items in the vector.
+ :param optional: If true, ``None`` can be used as a value for
+ this field.
+ :param initial: Initial value to pass to factory if no value is given
+ for the field.
+
+ :return: A ``field`` containing a ``CheckedPVector`` of the given type.
+ """
+ return _sequence_field(CheckedPVector, item_type, optional, initial,
+ invariant=invariant,
+ item_invariant=item_invariant)
+
+
+_valid = lambda item: (True, "")
+
+
+# Global dictionary to hold auto-generated field types: used for unpickling
+_pmap_field_types = {}
+
+def _restore_pmap_field_pickle(key_type, value_type, data):
+ """Unpickling function for auto-generated PMap field types."""
+ type_ = _pmap_field_types[key_type, value_type]
+ return _restore_pickle(type_, data)
+
+def _make_pmap_field_type(key_type, value_type):
+ """Create a subclass of CheckedPMap with the given key and value types."""
+ type_ = _pmap_field_types.get((key_type, value_type))
+ if type_ is not None:
+ return type_
+
+ class TheMap(CheckedPMap):
+ __key_type__ = key_type
+ __value_type__ = value_type
+
+ def __reduce__(self):
+ return (_restore_pmap_field_pickle,
+ (self.__key_type__, self.__value_type__, dict(self)))
+
+ TheMap.__name__ = "{0}To{1}PMap".format(
+ _types_to_names(TheMap._checked_key_types),
+ _types_to_names(TheMap._checked_value_types))
+ _pmap_field_types[key_type, value_type] = TheMap
+ return TheMap
+
+
+def pmap_field(key_type, value_type, optional=False, invariant=PFIELD_NO_INVARIANT):
+ """
+ Create a checked ``PMap`` field.
+
+ :param key: The required type for the keys of the map.
+ :param value: The required type for the values of the map.
+ :param optional: If true, ``None`` can be used as a value for
+ this field.
+ :param invariant: Pass-through to ``field``.
+
+ :return: A ``field`` containing a ``CheckedPMap``.
+ """
+ TheMap = _make_pmap_field_type(key_type, value_type)
+
+ if optional:
+ def factory(argument):
+ if argument is None:
+ return None
+ else:
+ return TheMap.create(argument)
+ else:
+ factory = TheMap.create
+
+ return field(mandatory=True, initial=TheMap(),
+ type=optional_type(TheMap) if optional else TheMap,
+ factory=factory, invariant=invariant)
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_helpers.py b/contrib/python/pyrsistent/py3/pyrsistent/_helpers.py
new file mode 100644
index 0000000000..b44bfc5735
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_helpers.py
@@ -0,0 +1,101 @@
+import collections
+from functools import wraps
+from pyrsistent._pmap import PMap, pmap
+from pyrsistent._pset import PSet, pset
+from pyrsistent._pvector import PVector, pvector
+
+def freeze(o, strict=True):
+ """
+ Recursively convert simple Python containers into pyrsistent versions
+ of those containers.
+
+ - list is converted to pvector, recursively
+ - dict is converted to pmap, recursively on values (but not keys)
+ - defaultdict is converted to pmap, recursively on values (but not keys)
+ - set is converted to pset, but not recursively
+ - tuple is converted to tuple, recursively.
+
+ If strict == True (default):
+
+ - freeze is called on elements of pvectors
+ - freeze is called on values of pmaps
+
+ Sets and dict keys are not recursively frozen because they do not contain
+ mutable data by convention. The main exception to this rule is that
+ dict keys and set elements are often instances of mutable objects that
+ support hash-by-id, which this function can't convert anyway.
+
+ >>> freeze(set([1, 2]))
+ pset([1, 2])
+ >>> freeze([1, {'a': 3}])
+ pvector([1, pmap({'a': 3})])
+ >>> freeze((1, []))
+ (1, pvector([]))
+ """
+ typ = type(o)
+ if typ is dict or (strict and isinstance(o, PMap)):
+ return pmap({k: freeze(v, strict) for k, v in o.items()})
+ if typ is collections.defaultdict or (strict and isinstance(o, PMap)):
+ return pmap({k: freeze(v, strict) for k, v in o.items()})
+ if typ is list or (strict and isinstance(o, PVector)):
+ curried_freeze = lambda x: freeze(x, strict)
+ return pvector(map(curried_freeze, o))
+ if typ is tuple:
+ curried_freeze = lambda x: freeze(x, strict)
+ return tuple(map(curried_freeze, o))
+ if typ is set:
+ # impossible to have anything that needs freezing inside a set or pset
+ return pset(o)
+ return o
+
+
+def thaw(o, strict=True):
+ """
+ Recursively convert pyrsistent containers into simple Python containers.
+
+ - pvector is converted to list, recursively
+ - pmap is converted to dict, recursively on values (but not keys)
+ - pset is converted to set, but not recursively
+ - tuple is converted to tuple, recursively.
+
+ If strict == True (the default):
+
+ - thaw is called on elements of lists
+ - thaw is called on values in dicts
+
+ >>> from pyrsistent import s, m, v
+ >>> thaw(s(1, 2))
+ {1, 2}
+ >>> thaw(v(1, m(a=3)))
+ [1, {'a': 3}]
+ >>> thaw((1, v()))
+ (1, [])
+ """
+ typ = type(o)
+ if isinstance(o, PVector) or (strict and typ is list):
+ curried_thaw = lambda x: thaw(x, strict)
+ return list(map(curried_thaw, o))
+ if isinstance(o, PMap) or (strict and typ is dict):
+ return {k: thaw(v, strict) for k, v in o.items()}
+ if typ is tuple:
+ curried_thaw = lambda x: thaw(x, strict)
+ return tuple(map(curried_thaw, o))
+ if isinstance(o, PSet):
+ # impossible to thaw inside psets or sets
+ return set(o)
+ return o
+
+
+def mutant(fn):
+ """
+ Convenience decorator to isolate mutation to within the decorated function (with respect
+ to the input arguments).
+
+ All arguments to the decorated function will be frozen so that they are guaranteed not to change.
+ The return value is also frozen.
+ """
+ @wraps(fn)
+ def inner_f(*args, **kwargs):
+ return freeze(fn(*[freeze(e) for e in args], **dict(freeze(item) for item in kwargs.items())))
+
+ return inner_f
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_immutable.py b/contrib/python/pyrsistent/py3/pyrsistent/_immutable.py
new file mode 100644
index 0000000000..d23deca774
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_immutable.py
@@ -0,0 +1,97 @@
+import sys
+
+
+def immutable(members='', name='Immutable', verbose=False):
+ """
+ Produces a class that either can be used standalone or as a base class for persistent classes.
+
+ This is a thin wrapper around a named tuple.
+
+ Constructing a type and using it to instantiate objects:
+
+ >>> Point = immutable('x, y', name='Point')
+ >>> p = Point(1, 2)
+ >>> p2 = p.set(x=3)
+ >>> p
+ Point(x=1, y=2)
+ >>> p2
+ Point(x=3, y=2)
+
+ Inheriting from a constructed type. In this case no type name needs to be supplied:
+
+ >>> class PositivePoint(immutable('x, y')):
+ ... __slots__ = tuple()
+ ... def __new__(cls, x, y):
+ ... if x > 0 and y > 0:
+ ... return super(PositivePoint, cls).__new__(cls, x, y)
+ ... raise Exception('Coordinates must be positive!')
+ ...
+ >>> p = PositivePoint(1, 2)
+ >>> p.set(x=3)
+ PositivePoint(x=3, y=2)
+ >>> p.set(y=-3)
+ Traceback (most recent call last):
+ Exception: Coordinates must be positive!
+
+ The persistent class also supports the notion of frozen members. The value of a frozen member
+ cannot be updated. For example it could be used to implement an ID that should remain the same
+ over time. A frozen member is denoted by a trailing underscore.
+
+ >>> Point = immutable('x, y, id_', name='Point')
+ >>> p = Point(1, 2, id_=17)
+ >>> p.set(x=3)
+ Point(x=3, y=2, id_=17)
+ >>> p.set(id_=18)
+ Traceback (most recent call last):
+ AttributeError: Cannot set frozen members id_
+ """
+
+ if isinstance(members, str):
+ members = members.replace(',', ' ').split()
+
+ def frozen_member_test():
+ frozen_members = ["'%s'" % f for f in members if f.endswith('_')]
+ if frozen_members:
+ return """
+ frozen_fields = fields_to_modify & set([{frozen_members}])
+ if frozen_fields:
+ raise AttributeError('Cannot set frozen members %s' % ', '.join(frozen_fields))
+ """.format(frozen_members=', '.join(frozen_members))
+
+ return ''
+
+ quoted_members = ', '.join("'%s'" % m for m in members)
+ template = """
+class {class_name}(namedtuple('ImmutableBase', [{quoted_members}])):
+ __slots__ = tuple()
+
+ def __repr__(self):
+ return super({class_name}, self).__repr__().replace('ImmutableBase', self.__class__.__name__)
+
+ def set(self, **kwargs):
+ if not kwargs:
+ return self
+
+ fields_to_modify = set(kwargs.keys())
+ if not fields_to_modify <= {member_set}:
+ raise AttributeError("'%s' is not a member" % ', '.join(fields_to_modify - {member_set}))
+
+ {frozen_member_test}
+
+ return self.__class__.__new__(self.__class__, *map(kwargs.pop, [{quoted_members}], self))
+""".format(quoted_members=quoted_members,
+ member_set="set([%s])" % quoted_members if quoted_members else 'set()',
+ frozen_member_test=frozen_member_test(),
+ class_name=name)
+
+ if verbose:
+ print(template)
+
+ from collections import namedtuple
+ namespace = dict(namedtuple=namedtuple, __name__='pyrsistent_immutable')
+ try:
+ exec(template, namespace)
+ except SyntaxError as e:
+ raise SyntaxError(str(e) + ':\n' + template) from e
+
+ return namespace[name]
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_pbag.py b/contrib/python/pyrsistent/py3/pyrsistent/_pbag.py
new file mode 100644
index 0000000000..50001f1919
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_pbag.py
@@ -0,0 +1,270 @@
+from collections.abc import Container, Iterable, Sized, Hashable
+from functools import reduce
+from typing import Generic, TypeVar
+from pyrsistent._pmap import pmap
+
+T_co = TypeVar('T_co', covariant=True)
+
+
+def _add_to_counters(counters, element):
+ return counters.set(element, counters.get(element, 0) + 1)
+
+
+class PBag(Generic[T_co]):
+ """
+ A persistent bag/multiset type.
+
+ Requires elements to be hashable, and allows duplicates, but has no
+ ordering. Bags are hashable.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`b`
+ or :py:func:`pbag` to create an instance.
+
+ Some examples:
+
+ >>> s = pbag([1, 2, 3, 1])
+ >>> s2 = s.add(4)
+ >>> s3 = s2.remove(1)
+ >>> s
+ pbag([1, 1, 2, 3])
+ >>> s2
+ pbag([1, 1, 2, 3, 4])
+ >>> s3
+ pbag([1, 2, 3, 4])
+ """
+
+ __slots__ = ('_counts', '__weakref__')
+
+ def __init__(self, counts):
+ self._counts = counts
+
+ def add(self, element):
+ """
+ Add an element to the bag.
+
+ >>> s = pbag([1])
+ >>> s2 = s.add(1)
+ >>> s3 = s.add(2)
+ >>> s2
+ pbag([1, 1])
+ >>> s3
+ pbag([1, 2])
+ """
+ return PBag(_add_to_counters(self._counts, element))
+
+ def update(self, iterable):
+ """
+ Update bag with all elements in iterable.
+
+ >>> s = pbag([1])
+ >>> s.update([1, 2])
+ pbag([1, 1, 2])
+ """
+ if iterable:
+ return PBag(reduce(_add_to_counters, iterable, self._counts))
+
+ return self
+
+ def remove(self, element):
+ """
+ Remove an element from the bag.
+
+ >>> s = pbag([1, 1, 2])
+ >>> s2 = s.remove(1)
+ >>> s3 = s.remove(2)
+ >>> s2
+ pbag([1, 2])
+ >>> s3
+ pbag([1, 1])
+ """
+ if element not in self._counts:
+ raise KeyError(element)
+ elif self._counts[element] == 1:
+ newc = self._counts.remove(element)
+ else:
+ newc = self._counts.set(element, self._counts[element] - 1)
+ return PBag(newc)
+
+ def count(self, element):
+ """
+ Return the number of times an element appears.
+
+
+ >>> pbag([]).count('non-existent')
+ 0
+ >>> pbag([1, 1, 2]).count(1)
+ 2
+ """
+ return self._counts.get(element, 0)
+
+ def __len__(self):
+ """
+ Return the length including duplicates.
+
+ >>> len(pbag([1, 1, 2]))
+ 3
+ """
+ return sum(self._counts.itervalues())
+
+ def __iter__(self):
+ """
+ Return an iterator of all elements, including duplicates.
+
+ >>> list(pbag([1, 1, 2]))
+ [1, 1, 2]
+ >>> list(pbag([1, 2]))
+ [1, 2]
+ """
+ for elt, count in self._counts.iteritems():
+ for i in range(count):
+ yield elt
+
+ def __contains__(self, elt):
+ """
+ Check if an element is in the bag.
+
+ >>> 1 in pbag([1, 1, 2])
+ True
+ >>> 0 in pbag([1, 2])
+ False
+ """
+ return elt in self._counts
+
+ def __repr__(self):
+ return "pbag({0})".format(list(self))
+
+ def __eq__(self, other):
+ """
+ Check if two bags are equivalent, honoring the number of duplicates,
+ and ignoring insertion order.
+
+ >>> pbag([1, 1, 2]) == pbag([1, 2])
+ False
+ >>> pbag([2, 1, 0]) == pbag([0, 1, 2])
+ True
+ """
+ if type(other) is not PBag:
+ raise TypeError("Can only compare PBag with PBags")
+ return self._counts == other._counts
+
+ def __lt__(self, other):
+ raise TypeError('PBags are not orderable')
+
+ __le__ = __lt__
+ __gt__ = __lt__
+ __ge__ = __lt__
+
+ # Multiset-style operations similar to collections.Counter
+
+ def __add__(self, other):
+ """
+ Combine elements from two PBags.
+
+ >>> pbag([1, 2, 2]) + pbag([2, 3, 3])
+ pbag([1, 2, 2, 2, 3, 3])
+ """
+ if not isinstance(other, PBag):
+ return NotImplemented
+ result = self._counts.evolver()
+ for elem, other_count in other._counts.iteritems():
+ result[elem] = self.count(elem) + other_count
+ return PBag(result.persistent())
+
+ def __sub__(self, other):
+ """
+ Remove elements from one PBag that are present in another.
+
+ >>> pbag([1, 2, 2, 2, 3]) - pbag([2, 3, 3, 4])
+ pbag([1, 2, 2])
+ """
+ if not isinstance(other, PBag):
+ return NotImplemented
+ result = self._counts.evolver()
+ for elem, other_count in other._counts.iteritems():
+ newcount = self.count(elem) - other_count
+ if newcount > 0:
+ result[elem] = newcount
+ elif elem in self:
+ result.remove(elem)
+ return PBag(result.persistent())
+
+ def __or__(self, other):
+ """
+ Union: Keep elements that are present in either of two PBags.
+
+ >>> pbag([1, 2, 2, 2]) | pbag([2, 3, 3])
+ pbag([1, 2, 2, 2, 3, 3])
+ """
+ if not isinstance(other, PBag):
+ return NotImplemented
+ result = self._counts.evolver()
+ for elem, other_count in other._counts.iteritems():
+ count = self.count(elem)
+ newcount = max(count, other_count)
+ result[elem] = newcount
+ return PBag(result.persistent())
+
+ def __and__(self, other):
+ """
+ Intersection: Only keep elements that are present in both PBags.
+
+ >>> pbag([1, 2, 2, 2]) & pbag([2, 3, 3])
+ pbag([2])
+ """
+ if not isinstance(other, PBag):
+ return NotImplemented
+ result = pmap().evolver()
+ for elem, count in self._counts.iteritems():
+ newcount = min(count, other.count(elem))
+ if newcount > 0:
+ result[elem] = newcount
+ return PBag(result.persistent())
+
+ def __hash__(self):
+ """
+ Hash based on value of elements.
+
+ >>> m = pmap({pbag([1, 2]): "it's here!"})
+ >>> m[pbag([2, 1])]
+ "it's here!"
+ >>> pbag([1, 1, 2]) in m
+ False
+ """
+ return hash(self._counts)
+
+
+Container.register(PBag)
+Iterable.register(PBag)
+Sized.register(PBag)
+Hashable.register(PBag)
+
+
+def b(*elements):
+ """
+ Construct a persistent bag.
+
+ Takes an arbitrary number of arguments to insert into the new persistent
+ bag.
+
+ >>> b(1, 2, 3, 2)
+ pbag([1, 2, 2, 3])
+ """
+ return pbag(elements)
+
+
+def pbag(elements):
+ """
+ Convert an iterable to a persistent bag.
+
+ Takes an iterable with elements to insert.
+
+ >>> pbag([1, 2, 3, 2])
+ pbag([1, 2, 2, 3])
+ """
+ if not elements:
+ return _EMPTY_PBAG
+ return PBag(reduce(_add_to_counters, elements, pmap()))
+
+
+_EMPTY_PBAG = PBag(pmap())
+
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_pclass.py b/contrib/python/pyrsistent/py3/pyrsistent/_pclass.py
new file mode 100644
index 0000000000..fd31a95d63
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_pclass.py
@@ -0,0 +1,262 @@
+from pyrsistent._checked_types import (InvariantException, CheckedType, _restore_pickle, store_invariants)
+from pyrsistent._field_common import (
+ set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants
+)
+from pyrsistent._transformations import transform
+
+
+def _is_pclass(bases):
+ return len(bases) == 1 and bases[0] == CheckedType
+
+
+class PClassMeta(type):
+ def __new__(mcs, name, bases, dct):
+ set_fields(dct, bases, name='_pclass_fields')
+ store_invariants(dct, bases, '_pclass_invariants', '__invariant__')
+ dct['__slots__'] = ('_pclass_frozen',) + tuple(key for key in dct['_pclass_fields'])
+
+ # There must only be one __weakref__ entry in the inheritance hierarchy,
+ # lets put it on the top level class.
+ if _is_pclass(bases):
+ dct['__slots__'] += ('__weakref__',)
+
+ return super(PClassMeta, mcs).__new__(mcs, name, bases, dct)
+
+_MISSING_VALUE = object()
+
+
+def _check_and_set_attr(cls, field, name, value, result, invariant_errors):
+ check_type(cls, field, name, value)
+ is_ok, error_code = field.invariant(value)
+ if not is_ok:
+ invariant_errors.append(error_code)
+ else:
+ setattr(result, name, value)
+
+
+class PClass(CheckedType, metaclass=PClassMeta):
+ """
+ A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting
+ from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it
+ is not a PMap and hence not a collection but rather a plain Python object.
+
+
+ More documentation and examples of PClass usage is available at https://github.com/tobgu/pyrsistent
+ """
+ def __new__(cls, **kwargs): # Support *args?
+ result = super(PClass, cls).__new__(cls)
+ factory_fields = kwargs.pop('_factory_fields', None)
+ ignore_extra = kwargs.pop('ignore_extra', None)
+ missing_fields = []
+ invariant_errors = []
+ for name, field in cls._pclass_fields.items():
+ if name in kwargs:
+ if factory_fields is None or name in factory_fields:
+ if is_field_ignore_extra_complaint(PClass, field, ignore_extra):
+ value = field.factory(kwargs[name], ignore_extra=ignore_extra)
+ else:
+ value = field.factory(kwargs[name])
+ else:
+ value = kwargs[name]
+ _check_and_set_attr(cls, field, name, value, result, invariant_errors)
+ del kwargs[name]
+ elif field.initial is not PFIELD_NO_INITIAL:
+ initial = field.initial() if callable(field.initial) else field.initial
+ _check_and_set_attr(
+ cls, field, name, initial, result, invariant_errors)
+ elif field.mandatory:
+ missing_fields.append('{0}.{1}'.format(cls.__name__, name))
+
+ if invariant_errors or missing_fields:
+ raise InvariantException(tuple(invariant_errors), tuple(missing_fields), 'Field invariant failed')
+
+ if kwargs:
+ raise AttributeError("'{0}' are not among the specified fields for {1}".format(
+ ', '.join(kwargs), cls.__name__))
+
+ check_global_invariants(result, cls._pclass_invariants)
+
+ result._pclass_frozen = True
+ return result
+
+ def set(self, *args, **kwargs):
+ """
+ Set a field in the instance. Returns a new instance with the updated value. The original instance remains
+ unmodified. Accepts key-value pairs or single string representing the field name and a value.
+
+ >>> from pyrsistent import PClass, field
+ >>> class AClass(PClass):
+ ... x = field()
+ ...
+ >>> a = AClass(x=1)
+ >>> a2 = a.set(x=2)
+ >>> a3 = a.set('x', 3)
+ >>> a
+ AClass(x=1)
+ >>> a2
+ AClass(x=2)
+ >>> a3
+ AClass(x=3)
+ """
+ if args:
+ kwargs[args[0]] = args[1]
+
+ factory_fields = set(kwargs)
+
+ for key in self._pclass_fields:
+ if key not in kwargs:
+ value = getattr(self, key, _MISSING_VALUE)
+ if value is not _MISSING_VALUE:
+ kwargs[key] = value
+
+ return self.__class__(_factory_fields=factory_fields, **kwargs)
+
+ @classmethod
+ def create(cls, kwargs, _factory_fields=None, ignore_extra=False):
+ """
+ Factory method. Will create a new PClass of the current type and assign the values
+ specified in kwargs.
+
+ :param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not
+ in the set of fields on the PClass.
+ """
+ if isinstance(kwargs, cls):
+ return kwargs
+
+ if ignore_extra:
+ kwargs = {k: kwargs[k] for k in cls._pclass_fields if k in kwargs}
+
+ return cls(_factory_fields=_factory_fields, ignore_extra=ignore_extra, **kwargs)
+
+ def serialize(self, format=None):
+ """
+ Serialize the current PClass using custom serializer functions for fields where
+ such have been supplied.
+ """
+ result = {}
+ for name in self._pclass_fields:
+ value = getattr(self, name, _MISSING_VALUE)
+ if value is not _MISSING_VALUE:
+ result[name] = serialize(self._pclass_fields[name].serializer, format, value)
+
+ return result
+
+ def transform(self, *transformations):
+ """
+ Apply transformations to the currency PClass. For more details on transformations see
+ the documentation for PMap. Transformations on PClasses do not support key matching
+ since the PClass is not a collection. Apart from that the transformations available
+ for other persistent types work as expected.
+ """
+ return transform(self, transformations)
+
+ def __eq__(self, other):
+ if isinstance(other, self.__class__):
+ for name in self._pclass_fields:
+ if getattr(self, name, _MISSING_VALUE) != getattr(other, name, _MISSING_VALUE):
+ return False
+
+ return True
+
+ return NotImplemented
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ # May want to optimize this by caching the hash somehow
+ return hash(tuple((key, getattr(self, key, _MISSING_VALUE)) for key in self._pclass_fields))
+
+ def __setattr__(self, key, value):
+ if getattr(self, '_pclass_frozen', False):
+ raise AttributeError("Can't set attribute, key={0}, value={1}".format(key, value))
+
+ super(PClass, self).__setattr__(key, value)
+
+ def __delattr__(self, key):
+ raise AttributeError("Can't delete attribute, key={0}, use remove()".format(key))
+
+ def _to_dict(self):
+ result = {}
+ for key in self._pclass_fields:
+ value = getattr(self, key, _MISSING_VALUE)
+ if value is not _MISSING_VALUE:
+ result[key] = value
+
+ return result
+
+ def __repr__(self):
+ return "{0}({1})".format(self.__class__.__name__,
+ ', '.join('{0}={1}'.format(k, repr(v)) for k, v in self._to_dict().items()))
+
+ def __reduce__(self):
+ # Pickling support
+ data = dict((key, getattr(self, key)) for key in self._pclass_fields if hasattr(self, key))
+ return _restore_pickle, (self.__class__, data,)
+
+ def evolver(self):
+ """
+ Returns an evolver for this object.
+ """
+ return _PClassEvolver(self, self._to_dict())
+
+ def remove(self, name):
+ """
+ Remove attribute given by name from the current instance. Raises AttributeError if the
+ attribute doesn't exist.
+ """
+ evolver = self.evolver()
+ del evolver[name]
+ return evolver.persistent()
+
+
+class _PClassEvolver(object):
+ __slots__ = ('_pclass_evolver_original', '_pclass_evolver_data', '_pclass_evolver_data_is_dirty', '_factory_fields')
+
+ def __init__(self, original, initial_dict):
+ self._pclass_evolver_original = original
+ self._pclass_evolver_data = initial_dict
+ self._pclass_evolver_data_is_dirty = False
+ self._factory_fields = set()
+
+ def __getitem__(self, item):
+ return self._pclass_evolver_data[item]
+
+ def set(self, key, value):
+ if self._pclass_evolver_data.get(key, _MISSING_VALUE) is not value:
+ self._pclass_evolver_data[key] = value
+ self._factory_fields.add(key)
+ self._pclass_evolver_data_is_dirty = True
+
+ return self
+
+ def __setitem__(self, key, value):
+ self.set(key, value)
+
+ def remove(self, item):
+ if item in self._pclass_evolver_data:
+ del self._pclass_evolver_data[item]
+ self._factory_fields.discard(item)
+ self._pclass_evolver_data_is_dirty = True
+ return self
+
+ raise AttributeError(item)
+
+ def __delitem__(self, item):
+ self.remove(item)
+
+ def persistent(self):
+ if self._pclass_evolver_data_is_dirty:
+ return self._pclass_evolver_original.__class__(_factory_fields=self._factory_fields,
+ **self._pclass_evolver_data)
+
+ return self._pclass_evolver_original
+
+ def __setattr__(self, key, value):
+ if key not in self.__slots__:
+ self.set(key, value)
+ else:
+ super(_PClassEvolver, self).__setattr__(key, value)
+
+ def __getattr__(self, item):
+ return self[item]
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_pdeque.py b/contrib/python/pyrsistent/py3/pyrsistent/_pdeque.py
new file mode 100644
index 0000000000..0f25936af7
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_pdeque.py
@@ -0,0 +1,379 @@
+from collections.abc import Sequence, Hashable
+from itertools import islice, chain
+from numbers import Integral
+from typing import TypeVar, Generic
+from pyrsistent._plist import plist
+
+T_co = TypeVar('T_co', covariant=True)
+
+
+class PDeque(Generic[T_co]):
+ """
+ Persistent double ended queue (deque). Allows quick appends and pops in both ends. Implemented
+ using two persistent lists.
+
+ A maximum length can be specified to create a bounded queue.
+
+ Fully supports the Sequence and Hashable protocols including indexing and slicing but
+ if you need fast random access go for the PVector instead.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`dq` or :py:func:`pdeque` to
+ create an instance.
+
+ Some examples:
+
+ >>> x = pdeque([1, 2, 3])
+ >>> x.left
+ 1
+ >>> x.right
+ 3
+ >>> x[0] == x.left
+ True
+ >>> x[-1] == x.right
+ True
+ >>> x.pop()
+ pdeque([1, 2])
+ >>> x.pop() == x[:-1]
+ True
+ >>> x.popleft()
+ pdeque([2, 3])
+ >>> x.append(4)
+ pdeque([1, 2, 3, 4])
+ >>> x.appendleft(4)
+ pdeque([4, 1, 2, 3])
+
+ >>> y = pdeque([1, 2, 3], maxlen=3)
+ >>> y.append(4)
+ pdeque([2, 3, 4], maxlen=3)
+ >>> y.appendleft(4)
+ pdeque([4, 1, 2], maxlen=3)
+ """
+ __slots__ = ('_left_list', '_right_list', '_length', '_maxlen', '__weakref__')
+
+ def __new__(cls, left_list, right_list, length, maxlen=None):
+ instance = super(PDeque, cls).__new__(cls)
+ instance._left_list = left_list
+ instance._right_list = right_list
+ instance._length = length
+
+ if maxlen is not None:
+ if not isinstance(maxlen, Integral):
+ raise TypeError('An integer is required as maxlen')
+
+ if maxlen < 0:
+ raise ValueError("maxlen must be non-negative")
+
+ instance._maxlen = maxlen
+ return instance
+
+ @property
+ def right(self):
+ """
+ Rightmost element in dqueue.
+ """
+ return PDeque._tip_from_lists(self._right_list, self._left_list)
+
+ @property
+ def left(self):
+ """
+ Leftmost element in dqueue.
+ """
+ return PDeque._tip_from_lists(self._left_list, self._right_list)
+
+ @staticmethod
+ def _tip_from_lists(primary_list, secondary_list):
+ if primary_list:
+ return primary_list.first
+
+ if secondary_list:
+ return secondary_list[-1]
+
+ raise IndexError('No elements in empty deque')
+
+ def __iter__(self):
+ return chain(self._left_list, self._right_list.reverse())
+
+ def __repr__(self):
+ return "pdeque({0}{1})".format(list(self),
+ ', maxlen={0}'.format(self._maxlen) if self._maxlen is not None else '')
+ __str__ = __repr__
+
+ @property
+ def maxlen(self):
+ """
+ Maximum length of the queue.
+ """
+ return self._maxlen
+
+ def pop(self, count=1):
+ """
+ Return new deque with rightmost element removed. Popping the empty queue
+ will return the empty queue. A optional count can be given to indicate the
+ number of elements to pop. Popping with a negative index is the same as
+ popleft. Executes in amortized O(k) where k is the number of elements to pop.
+
+ >>> pdeque([1, 2]).pop()
+ pdeque([1])
+ >>> pdeque([1, 2]).pop(2)
+ pdeque([])
+ >>> pdeque([1, 2]).pop(-1)
+ pdeque([2])
+ """
+ if count < 0:
+ return self.popleft(-count)
+
+ new_right_list, new_left_list = PDeque._pop_lists(self._right_list, self._left_list, count)
+ return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen)
+
+ def popleft(self, count=1):
+ """
+ Return new deque with leftmost element removed. Otherwise functionally
+ equivalent to pop().
+
+ >>> pdeque([1, 2]).popleft()
+ pdeque([2])
+ """
+ if count < 0:
+ return self.pop(-count)
+
+ new_left_list, new_right_list = PDeque._pop_lists(self._left_list, self._right_list, count)
+ return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen)
+
+ @staticmethod
+ def _pop_lists(primary_list, secondary_list, count):
+ new_primary_list = primary_list
+ new_secondary_list = secondary_list
+
+ while count > 0 and (new_primary_list or new_secondary_list):
+ count -= 1
+ if new_primary_list.rest:
+ new_primary_list = new_primary_list.rest
+ elif new_primary_list:
+ new_primary_list = new_secondary_list.reverse()
+ new_secondary_list = plist()
+ else:
+ new_primary_list = new_secondary_list.reverse().rest
+ new_secondary_list = plist()
+
+ return new_primary_list, new_secondary_list
+
+ def _is_empty(self):
+ return not self._left_list and not self._right_list
+
+ def __lt__(self, other):
+ if not isinstance(other, PDeque):
+ return NotImplemented
+
+ return tuple(self) < tuple(other)
+
+ def __eq__(self, other):
+ if not isinstance(other, PDeque):
+ return NotImplemented
+
+ if tuple(self) == tuple(other):
+ # Sanity check of the length value since it is redundant (there for performance)
+ assert len(self) == len(other)
+ return True
+
+ return False
+
+ def __hash__(self):
+ return hash(tuple(self))
+
+ def __len__(self):
+ return self._length
+
+ def append(self, elem):
+ """
+ Return new deque with elem as the rightmost element.
+
+ >>> pdeque([1, 2]).append(3)
+ pdeque([1, 2, 3])
+ """
+ new_left_list, new_right_list, new_length = self._append(self._left_list, self._right_list, elem)
+ return PDeque(new_left_list, new_right_list, new_length, self._maxlen)
+
+ def appendleft(self, elem):
+ """
+ Return new deque with elem as the leftmost element.
+
+ >>> pdeque([1, 2]).appendleft(3)
+ pdeque([3, 1, 2])
+ """
+ new_right_list, new_left_list, new_length = self._append(self._right_list, self._left_list, elem)
+ return PDeque(new_left_list, new_right_list, new_length, self._maxlen)
+
+ def _append(self, primary_list, secondary_list, elem):
+ if self._maxlen is not None and self._length == self._maxlen:
+ if self._maxlen == 0:
+ return primary_list, secondary_list, 0
+ new_primary_list, new_secondary_list = PDeque._pop_lists(primary_list, secondary_list, 1)
+ return new_primary_list, new_secondary_list.cons(elem), self._length
+
+ return primary_list, secondary_list.cons(elem), self._length + 1
+
+ @staticmethod
+ def _extend_list(the_list, iterable):
+ count = 0
+ for elem in iterable:
+ the_list = the_list.cons(elem)
+ count += 1
+
+ return the_list, count
+
+ def _extend(self, primary_list, secondary_list, iterable):
+ new_primary_list, extend_count = PDeque._extend_list(primary_list, iterable)
+ new_secondary_list = secondary_list
+ current_len = self._length + extend_count
+ if self._maxlen is not None and current_len > self._maxlen:
+ pop_len = current_len - self._maxlen
+ new_secondary_list, new_primary_list = PDeque._pop_lists(new_secondary_list, new_primary_list, pop_len)
+ extend_count -= pop_len
+
+ return new_primary_list, new_secondary_list, extend_count
+
+ def extend(self, iterable):
+ """
+ Return new deque with all elements of iterable appended to the right.
+
+ >>> pdeque([1, 2]).extend([3, 4])
+ pdeque([1, 2, 3, 4])
+ """
+ new_right_list, new_left_list, extend_count = self._extend(self._right_list, self._left_list, iterable)
+ return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen)
+
+ def extendleft(self, iterable):
+ """
+ Return new deque with all elements of iterable appended to the left.
+
+ NB! The elements will be inserted in reverse order compared to the order in the iterable.
+
+ >>> pdeque([1, 2]).extendleft([3, 4])
+ pdeque([4, 3, 1, 2])
+ """
+ new_left_list, new_right_list, extend_count = self._extend(self._left_list, self._right_list, iterable)
+ return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen)
+
+ def count(self, elem):
+ """
+ Return the number of elements equal to elem present in the queue
+
+ >>> pdeque([1, 2, 1]).count(1)
+ 2
+ """
+ return self._left_list.count(elem) + self._right_list.count(elem)
+
+ def remove(self, elem):
+ """
+ Return new deque with first element from left equal to elem removed. If no such element is found
+ a ValueError is raised.
+
+ >>> pdeque([2, 1, 2]).remove(2)
+ pdeque([1, 2])
+ """
+ try:
+ return PDeque(self._left_list.remove(elem), self._right_list, self._length - 1)
+ except ValueError:
+ # Value not found in left list, try the right list
+ try:
+ # This is severely inefficient with a double reverse, should perhaps implement a remove_last()?
+ return PDeque(self._left_list,
+ self._right_list.reverse().remove(elem).reverse(), self._length - 1)
+ except ValueError as e:
+ raise ValueError('{0} not found in PDeque'.format(elem)) from e
+
+ def reverse(self):
+ """
+ Return reversed deque.
+
+ >>> pdeque([1, 2, 3]).reverse()
+ pdeque([3, 2, 1])
+
+ Also supports the standard python reverse function.
+
+ >>> reversed(pdeque([1, 2, 3]))
+ pdeque([3, 2, 1])
+ """
+ return PDeque(self._right_list, self._left_list, self._length)
+ __reversed__ = reverse
+
+ def rotate(self, steps):
+ """
+ Return deque with elements rotated steps steps.
+
+ >>> x = pdeque([1, 2, 3])
+ >>> x.rotate(1)
+ pdeque([3, 1, 2])
+ >>> x.rotate(-2)
+ pdeque([3, 1, 2])
+ """
+ popped_deque = self.pop(steps)
+ if steps >= 0:
+ return popped_deque.extendleft(islice(self.reverse(), steps))
+
+ return popped_deque.extend(islice(self, -steps))
+
+ def __reduce__(self):
+ # Pickling support
+ return pdeque, (list(self), self._maxlen)
+
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ if index.step is not None and index.step != 1:
+ # Too difficult, no structural sharing possible
+ return pdeque(tuple(self)[index], maxlen=self._maxlen)
+
+ result = self
+ if index.start is not None:
+ result = result.popleft(index.start % self._length)
+ if index.stop is not None:
+ result = result.pop(self._length - (index.stop % self._length))
+
+ return result
+
+ if not isinstance(index, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
+
+ if index >= 0:
+ return self.popleft(index).left
+
+ shifted = len(self) + index
+ if shifted < 0:
+ raise IndexError(
+ "pdeque index {0} out of range {1}".format(index, len(self)),
+ )
+ return self.popleft(shifted).left
+
+ index = Sequence.index
+
+Sequence.register(PDeque)
+Hashable.register(PDeque)
+
+
+def pdeque(iterable=(), maxlen=None):
+ """
+ Return deque containing the elements of iterable. If maxlen is specified then
+ len(iterable) - maxlen elements are discarded from the left to if len(iterable) > maxlen.
+
+ >>> pdeque([1, 2, 3])
+ pdeque([1, 2, 3])
+ >>> pdeque([1, 2, 3, 4], maxlen=2)
+ pdeque([3, 4], maxlen=2)
+ """
+ t = tuple(iterable)
+ if maxlen is not None:
+ t = t[-maxlen:]
+ length = len(t)
+ pivot = int(length / 2)
+ left = plist(t[:pivot])
+ right = plist(t[pivot:], reverse=True)
+ return PDeque(left, right, length, maxlen)
+
+def dq(*elements):
+ """
+ Return deque containing all arguments.
+
+ >>> dq(1, 2, 3)
+ pdeque([1, 2, 3])
+ """
+ return pdeque(elements)
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_plist.py b/contrib/python/pyrsistent/py3/pyrsistent/_plist.py
new file mode 100644
index 0000000000..322e15d649
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_plist.py
@@ -0,0 +1,316 @@
+from collections.abc import Sequence, Hashable
+from numbers import Integral
+from functools import reduce
+from typing import Generic, TypeVar
+
+T_co = TypeVar('T_co', covariant=True)
+
+
+class _PListBuilder(object):
+ """
+ Helper class to allow construction of a list without
+ having to reverse it in the end.
+ """
+ __slots__ = ('_head', '_tail')
+
+ def __init__(self):
+ self._head = _EMPTY_PLIST
+ self._tail = _EMPTY_PLIST
+
+ def _append(self, elem, constructor):
+ if not self._tail:
+ self._head = constructor(elem)
+ self._tail = self._head
+ else:
+ self._tail.rest = constructor(elem)
+ self._tail = self._tail.rest
+
+ return self._head
+
+ def append_elem(self, elem):
+ return self._append(elem, lambda e: PList(e, _EMPTY_PLIST))
+
+ def append_plist(self, pl):
+ return self._append(pl, lambda l: l)
+
+ def build(self):
+ return self._head
+
+
+class _PListBase(object):
+ __slots__ = ('__weakref__',)
+
+ # Selected implementations can be taken straight from the Sequence
+ # class, other are less suitable. Especially those that work with
+ # index lookups.
+ count = Sequence.count
+ index = Sequence.index
+
+ def __reduce__(self):
+ # Pickling support
+ return plist, (list(self),)
+
+ def __len__(self):
+ """
+ Return the length of the list, computed by traversing it.
+
+ This is obviously O(n) but with the current implementation
+ where a list is also a node the overhead of storing the length
+ in every node would be quite significant.
+ """
+ return sum(1 for _ in self)
+
+ def __repr__(self):
+ return "plist({0})".format(list(self))
+ __str__ = __repr__
+
+ def cons(self, elem):
+ """
+ Return a new list with elem inserted as new head.
+
+ >>> plist([1, 2]).cons(3)
+ plist([3, 1, 2])
+ """
+ return PList(elem, self)
+
+ def mcons(self, iterable):
+ """
+ Return a new list with all elements of iterable repeatedly cons:ed to the current list.
+ NB! The elements will be inserted in the reverse order of the iterable.
+ Runs in O(len(iterable)).
+
+ >>> plist([1, 2]).mcons([3, 4])
+ plist([4, 3, 1, 2])
+ """
+ head = self
+ for elem in iterable:
+ head = head.cons(elem)
+
+ return head
+
+ def reverse(self):
+ """
+ Return a reversed version of list. Runs in O(n) where n is the length of the list.
+
+ >>> plist([1, 2, 3]).reverse()
+ plist([3, 2, 1])
+
+ Also supports the standard reversed function.
+
+ >>> reversed(plist([1, 2, 3]))
+ plist([3, 2, 1])
+ """
+ result = plist()
+ head = self
+ while head:
+ result = result.cons(head.first)
+ head = head.rest
+
+ return result
+ __reversed__ = reverse
+
+ def split(self, index):
+ """
+ Spilt the list at position specified by index. Returns a tuple containing the
+ list up until index and the list after the index. Runs in O(index).
+
+ >>> plist([1, 2, 3, 4]).split(2)
+ (plist([1, 2]), plist([3, 4]))
+ """
+ lb = _PListBuilder()
+ right_list = self
+ i = 0
+ while right_list and i < index:
+ lb.append_elem(right_list.first)
+ right_list = right_list.rest
+ i += 1
+
+ if not right_list:
+ # Just a small optimization in the cases where no split occurred
+ return self, _EMPTY_PLIST
+
+ return lb.build(), right_list
+
+ def __iter__(self):
+ li = self
+ while li:
+ yield li.first
+ li = li.rest
+
+ def __lt__(self, other):
+ if not isinstance(other, _PListBase):
+ return NotImplemented
+
+ return tuple(self) < tuple(other)
+
+ def __eq__(self, other):
+ """
+ Traverses the lists, checking equality of elements.
+
+ This is an O(n) operation, but preserves the standard semantics of list equality.
+ """
+ if not isinstance(other, _PListBase):
+ return NotImplemented
+
+ self_head = self
+ other_head = other
+ while self_head and other_head:
+ if not self_head.first == other_head.first:
+ return False
+ self_head = self_head.rest
+ other_head = other_head.rest
+
+ return not self_head and not other_head
+
+ def __getitem__(self, index):
+ # Don't use this this data structure if you plan to do a lot of indexing, it is
+ # very inefficient! Use a PVector instead!
+
+ if isinstance(index, slice):
+ if index.start is not None and index.stop is None and (index.step is None or index.step == 1):
+ return self._drop(index.start)
+
+ # Take the easy way out for all other slicing cases, not much structural reuse possible anyway
+ return plist(tuple(self)[index])
+
+ if not isinstance(index, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
+
+ if index < 0:
+ # NB: O(n)!
+ index += len(self)
+
+ try:
+ return self._drop(index).first
+ except AttributeError as e:
+ raise IndexError("PList index out of range") from e
+
+ def _drop(self, count):
+ if count < 0:
+ raise IndexError("PList index out of range")
+
+ head = self
+ while count > 0:
+ head = head.rest
+ count -= 1
+
+ return head
+
+ def __hash__(self):
+ return hash(tuple(self))
+
+ def remove(self, elem):
+ """
+ Return new list with first element equal to elem removed. O(k) where k is the position
+ of the element that is removed.
+
+ Raises ValueError if no matching element is found.
+
+ >>> plist([1, 2, 1]).remove(1)
+ plist([2, 1])
+ """
+
+ builder = _PListBuilder()
+ head = self
+ while head:
+ if head.first == elem:
+ return builder.append_plist(head.rest)
+
+ builder.append_elem(head.first)
+ head = head.rest
+
+ raise ValueError('{0} not found in PList'.format(elem))
+
+
+class PList(Generic[T_co], _PListBase):
+ """
+ Classical Lisp style singly linked list. Adding elements to the head using cons is O(1).
+ Element access is O(k) where k is the position of the element in the list. Taking the
+ length of the list is O(n).
+
+ Fully supports the Sequence and Hashable protocols including indexing and slicing but
+ if you need fast random access go for the PVector instead.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`l` or :py:func:`plist` to
+ create an instance.
+
+ Some examples:
+
+ >>> x = plist([1, 2])
+ >>> y = x.cons(3)
+ >>> x
+ plist([1, 2])
+ >>> y
+ plist([3, 1, 2])
+ >>> y.first
+ 3
+ >>> y.rest == x
+ True
+ >>> y[:2]
+ plist([3, 1])
+ """
+ __slots__ = ('first', 'rest')
+
+ def __new__(cls, first, rest):
+ instance = super(PList, cls).__new__(cls)
+ instance.first = first
+ instance.rest = rest
+ return instance
+
+ def __bool__(self):
+ return True
+ __nonzero__ = __bool__
+
+
+Sequence.register(PList)
+Hashable.register(PList)
+
+
+class _EmptyPList(_PListBase):
+ __slots__ = ()
+
+ def __bool__(self):
+ return False
+ __nonzero__ = __bool__
+
+ @property
+ def first(self):
+ raise AttributeError("Empty PList has no first")
+
+ @property
+ def rest(self):
+ return self
+
+
+Sequence.register(_EmptyPList)
+Hashable.register(_EmptyPList)
+
+_EMPTY_PLIST = _EmptyPList()
+
+
+def plist(iterable=(), reverse=False):
+ """
+ Creates a new persistent list containing all elements of iterable.
+ Optional parameter reverse specifies if the elements should be inserted in
+ reverse order or not.
+
+ >>> plist([1, 2, 3])
+ plist([1, 2, 3])
+ >>> plist([1, 2, 3], reverse=True)
+ plist([3, 2, 1])
+ """
+ if not reverse:
+ iterable = list(iterable)
+ iterable.reverse()
+
+ return reduce(lambda pl, elem: pl.cons(elem), iterable, _EMPTY_PLIST)
+
+
+def l(*elements):
+ """
+ Creates a new persistent list containing all arguments.
+
+ >>> l(1, 2, 3)
+ plist([1, 2, 3])
+ """
+ return plist(elements)
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_pmap.py b/contrib/python/pyrsistent/py3/pyrsistent/_pmap.py
new file mode 100644
index 0000000000..0d82c4386a
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_pmap.py
@@ -0,0 +1,583 @@
+from collections.abc import Mapping, Hashable
+from itertools import chain
+from typing import Generic, TypeVar
+
+from pyrsistent._pvector import pvector
+from pyrsistent._transformations import transform
+
+KT = TypeVar('KT')
+VT_co = TypeVar('VT_co', covariant=True)
+class PMapView:
+ """View type for the persistent map/dict type `PMap`.
+
+ Provides an equivalent of Python's built-in `dict_values` and `dict_items`
+ types that result from expreessions such as `{}.values()` and
+ `{}.items()`. The equivalent for `{}.keys()` is absent because the keys are
+ instead represented by a `PSet` object, which can be created in `O(1)` time.
+
+ The `PMapView` class is overloaded by the `PMapValues` and `PMapItems`
+ classes which handle the specific case of values and items, respectively
+
+ Parameters
+ ----------
+ m : mapping
+ The mapping/dict-like object of which a view is to be created. This
+ should generally be a `PMap` object.
+ """
+ # The public methods that use the above.
+ def __init__(self, m):
+ # Make sure this is a persistnt map
+ if not isinstance(m, PMap):
+ # We can convert mapping objects into pmap objects, I guess (but why?)
+ if isinstance(m, Mapping):
+ m = pmap(m)
+ else:
+ raise TypeError("PViewMap requires a Mapping object")
+ object.__setattr__(self, '_map', m)
+
+ def __len__(self):
+ return len(self._map)
+
+ def __setattr__(self, k, v):
+ raise TypeError("%s is immutable" % (type(self),))
+
+ def __reversed__(self):
+ raise TypeError("Persistent maps are not reversible")
+
+class PMapValues(PMapView):
+ """View type for the values of the persistent map/dict type `PMap`.
+
+ Provides an equivalent of Python's built-in `dict_values` type that result
+ from expreessions such as `{}.values()`. See also `PMapView`.
+
+ Parameters
+ ----------
+ m : mapping
+ The mapping/dict-like object of which a view is to be created. This
+ should generally be a `PMap` object.
+ """
+ def __iter__(self):
+ return self._map.itervalues()
+
+ def __contains__(self, arg):
+ return arg in self._map.itervalues()
+
+ # The str and repr methods imitate the dict_view style currently.
+ def __str__(self):
+ return f"pmap_values({list(iter(self))})"
+
+ def __repr__(self):
+ return f"pmap_values({list(iter(self))})"
+
+ def __eq__(self, x):
+ # For whatever reason, dict_values always seem to return False for ==
+ # (probably it's not implemented), so we mimic that.
+ if x is self: return True
+ else: return False
+
+class PMapItems(PMapView):
+ """View type for the items of the persistent map/dict type `PMap`.
+
+ Provides an equivalent of Python's built-in `dict_items` type that result
+ from expreessions such as `{}.items()`. See also `PMapView`.
+
+ Parameters
+ ----------
+ m : mapping
+ The mapping/dict-like object of which a view is to be created. This
+ should generally be a `PMap` object.
+ """
+ def __iter__(self):
+ return self._map.iteritems()
+
+ def __contains__(self, arg):
+ try: (k,v) = arg
+ except Exception: return False
+ return k in self._map and self._map[k] == v
+
+ # The str and repr methods mitate the dict_view style currently.
+ def __str__(self):
+ return f"pmap_items({list(iter(self))})"
+
+ def __repr__(self):
+ return f"pmap_items({list(iter(self))})"
+
+ def __eq__(self, x):
+ if x is self: return True
+ elif not isinstance(x, type(self)): return False
+ else: return self._map == x._map
+
+class PMap(Generic[KT, VT_co]):
+ """
+ Persistent map/dict. Tries to follow the same naming conventions as the built in dict where feasible.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`m` or :py:func:`pmap` to
+ create an instance.
+
+ Was originally written as a very close copy of the Clojure equivalent but was later rewritten to closer
+ re-assemble the python dict. This means that a sparse vector (a PVector) of buckets is used. The keys are
+ hashed and the elements inserted at position hash % len(bucket_vector). Whenever the map size exceeds 2/3 of
+ the containing vectors size the map is reallocated to a vector of double the size. This is done to avoid
+ excessive hash collisions.
+
+ This structure corresponds most closely to the built in dict type and is intended as a replacement. Where the
+ semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
+ for example assignments and deletion of values.
+
+ PMap implements the Mapping protocol and is Hashable. It also supports dot-notation for
+ element access.
+
+ Random access and insert is log32(n) where n is the size of the map.
+
+ The following are examples of some common operations on persistent maps
+
+ >>> m1 = m(a=1, b=3)
+ >>> m2 = m1.set('c', 3)
+ >>> m3 = m2.remove('a')
+ >>> m1 == {'a': 1, 'b': 3}
+ True
+ >>> m2 == {'a': 1, 'b': 3, 'c': 3}
+ True
+ >>> m3 == {'b': 3, 'c': 3}
+ True
+ >>> m3['c']
+ 3
+ >>> m3.c
+ 3
+ """
+ __slots__ = ('_size', '_buckets', '__weakref__', '_cached_hash')
+
+ def __new__(cls, size, buckets):
+ self = super(PMap, cls).__new__(cls)
+ self._size = size
+ self._buckets = buckets
+ return self
+
+ @staticmethod
+ def _get_bucket(buckets, key):
+ index = hash(key) % len(buckets)
+ bucket = buckets[index]
+ return index, bucket
+
+ @staticmethod
+ def _getitem(buckets, key):
+ _, bucket = PMap._get_bucket(buckets, key)
+ if bucket:
+ for k, v in bucket:
+ if k == key:
+ return v
+
+ raise KeyError(key)
+
+ def __getitem__(self, key):
+ return PMap._getitem(self._buckets, key)
+
+ @staticmethod
+ def _contains(buckets, key):
+ _, bucket = PMap._get_bucket(buckets, key)
+ if bucket:
+ for k, _ in bucket:
+ if k == key:
+ return True
+
+ return False
+
+ return False
+
+ def __contains__(self, key):
+ return self._contains(self._buckets, key)
+
+ get = Mapping.get
+
+ def __iter__(self):
+ return self.iterkeys()
+
+ # If this method is not defined, then reversed(pmap) will attempt to reverse
+ # the map using len() and getitem, usually resulting in a mysterious
+ # KeyError.
+ def __reversed__(self):
+ raise TypeError("Persistent maps are not reversible")
+
+ def __getattr__(self, key):
+ try:
+ return self[key]
+ except KeyError as e:
+ raise AttributeError(
+ "{0} has no attribute '{1}'".format(type(self).__name__, key)
+ ) from e
+
+ def iterkeys(self):
+ for k, _ in self.iteritems():
+ yield k
+
+ # These are more efficient implementations compared to the original
+ # methods that are based on the keys iterator and then calls the
+ # accessor functions to access the value for the corresponding key
+ def itervalues(self):
+ for _, v in self.iteritems():
+ yield v
+
+ def iteritems(self):
+ for bucket in self._buckets:
+ if bucket:
+ for k, v in bucket:
+ yield k, v
+
+ def values(self):
+ return PMapValues(self)
+
+ def keys(self):
+ from ._pset import PSet
+ return PSet(self)
+
+ def items(self):
+ return PMapItems(self)
+
+ def __len__(self):
+ return self._size
+
+ def __repr__(self):
+ return 'pmap({0})'.format(str(dict(self)))
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ if not isinstance(other, Mapping):
+ return NotImplemented
+ if len(self) != len(other):
+ return False
+ if isinstance(other, PMap):
+ if (hasattr(self, '_cached_hash') and hasattr(other, '_cached_hash')
+ and self._cached_hash != other._cached_hash):
+ return False
+ if self._buckets == other._buckets:
+ return True
+ return dict(self.iteritems()) == dict(other.iteritems())
+ elif isinstance(other, dict):
+ return dict(self.iteritems()) == other
+ return dict(self.iteritems()) == dict(other.items())
+
+ __ne__ = Mapping.__ne__
+
+ def __lt__(self, other):
+ raise TypeError('PMaps are not orderable')
+
+ __le__ = __lt__
+ __gt__ = __lt__
+ __ge__ = __lt__
+
+ def __str__(self):
+ return self.__repr__()
+
+ def __hash__(self):
+ if not hasattr(self, '_cached_hash'):
+ self._cached_hash = hash(frozenset(self.iteritems()))
+ return self._cached_hash
+
+ def set(self, key, val):
+ """
+ Return a new PMap with key and val inserted.
+
+ >>> m1 = m(a=1, b=2)
+ >>> m2 = m1.set('a', 3)
+ >>> m3 = m1.set('c' ,4)
+ >>> m1 == {'a': 1, 'b': 2}
+ True
+ >>> m2 == {'a': 3, 'b': 2}
+ True
+ >>> m3 == {'a': 1, 'b': 2, 'c': 4}
+ True
+ """
+ return self.evolver().set(key, val).persistent()
+
+ def remove(self, key):
+ """
+ Return a new PMap without the element specified by key. Raises KeyError if the element
+ is not present.
+
+ >>> m1 = m(a=1, b=2)
+ >>> m1.remove('a')
+ pmap({'b': 2})
+ """
+ return self.evolver().remove(key).persistent()
+
+ def discard(self, key):
+ """
+ Return a new PMap without the element specified by key. Returns reference to itself
+ if element is not present.
+
+ >>> m1 = m(a=1, b=2)
+ >>> m1.discard('a')
+ pmap({'b': 2})
+ >>> m1 is m1.discard('c')
+ True
+ """
+ try:
+ return self.remove(key)
+ except KeyError:
+ return self
+
+ def update(self, *maps):
+ """
+ Return a new PMap with the items in Mappings inserted. If the same key is present in multiple
+ maps the rightmost (last) value is inserted.
+
+ >>> m1 = m(a=1, b=2)
+ >>> m1.update(m(a=2, c=3), {'a': 17, 'd': 35}) == {'a': 17, 'b': 2, 'c': 3, 'd': 35}
+ True
+ """
+ return self.update_with(lambda l, r: r, *maps)
+
+ def update_with(self, update_fn, *maps):
+ """
+ Return a new PMap with the items in Mappings maps inserted. If the same key is present in multiple
+ maps the values will be merged using merge_fn going from left to right.
+
+ >>> from operator import add
+ >>> m1 = m(a=1, b=2)
+ >>> m1.update_with(add, m(a=2)) == {'a': 3, 'b': 2}
+ True
+
+ The reverse behaviour of the regular merge. Keep the leftmost element instead of the rightmost.
+
+ >>> m1 = m(a=1)
+ >>> m1.update_with(lambda l, r: l, m(a=2), {'a':3})
+ pmap({'a': 1})
+ """
+ evolver = self.evolver()
+ for map in maps:
+ for key, value in map.items():
+ evolver.set(key, update_fn(evolver[key], value) if key in evolver else value)
+
+ return evolver.persistent()
+
+ def __add__(self, other):
+ return self.update(other)
+
+ __or__ = __add__
+
+ def __reduce__(self):
+ # Pickling support
+ return pmap, (dict(self),)
+
+ def transform(self, *transformations):
+ """
+ Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
+ consists of two parts. One match expression that specifies which elements to transform
+ and one transformation function that performs the actual transformation.
+
+ >>> from pyrsistent import freeze, ny
+ >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
+ ... {'author': 'Steve', 'content': 'A slightly longer article'}],
+ ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
+ >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
+ >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
+ >>> very_short_news.articles[0].content
+ 'A short article'
+ >>> very_short_news.articles[1].content
+ 'A slightly long...'
+
+ When nothing has been transformed the original data structure is kept
+
+ >>> short_news is news_paper
+ True
+ >>> very_short_news is news_paper
+ False
+ >>> very_short_news.articles[0] is news_paper.articles[0]
+ True
+ """
+ return transform(self, transformations)
+
+ def copy(self):
+ return self
+
+ class _Evolver(object):
+ __slots__ = ('_buckets_evolver', '_size', '_original_pmap')
+
+ def __init__(self, original_pmap):
+ self._original_pmap = original_pmap
+ self._buckets_evolver = original_pmap._buckets.evolver()
+ self._size = original_pmap._size
+
+ def __getitem__(self, key):
+ return PMap._getitem(self._buckets_evolver, key)
+
+ def __setitem__(self, key, val):
+ self.set(key, val)
+
+ def set(self, key, val):
+ kv = (key, val)
+ index, bucket = PMap._get_bucket(self._buckets_evolver, key)
+ reallocation_required = len(self._buckets_evolver) < 0.67 * self._size
+ if bucket:
+ for k, v in bucket:
+ if k == key:
+ if v is not val:
+ # Use `not (k2 == k)` rather than `!=` to avoid relying on a well implemented `__ne__`, see #268.
+ new_bucket = [(k2, v2) if not (k2 == k) else (k2, val) for k2, v2 in bucket]
+ self._buckets_evolver[index] = new_bucket
+
+ return self
+
+ # Only check and perform reallocation if not replacing an existing value.
+ # This is a performance tweak, see #247.
+ if reallocation_required:
+ self._reallocate()
+ return self.set(key, val)
+
+ new_bucket = [kv]
+ new_bucket.extend(bucket)
+ self._buckets_evolver[index] = new_bucket
+ self._size += 1
+ else:
+ if reallocation_required:
+ self._reallocate()
+ return self.set(key, val)
+
+ self._buckets_evolver[index] = [kv]
+ self._size += 1
+
+ return self
+
+ def _reallocate(self):
+ new_size = 2 * len(self._buckets_evolver)
+ new_list = new_size * [None]
+ buckets = self._buckets_evolver.persistent()
+ for k, v in chain.from_iterable(x for x in buckets if x):
+ index = hash(k) % new_size
+ if new_list[index]:
+ new_list[index].append((k, v))
+ else:
+ new_list[index] = [(k, v)]
+
+ # A reallocation should always result in a dirty buckets evolver to avoid
+ # possible loss of elements when doing the reallocation.
+ self._buckets_evolver = pvector().evolver()
+ self._buckets_evolver.extend(new_list)
+
+ def is_dirty(self):
+ return self._buckets_evolver.is_dirty()
+
+ def persistent(self):
+ if self.is_dirty():
+ self._original_pmap = PMap(self._size, self._buckets_evolver.persistent())
+
+ return self._original_pmap
+
+ def __len__(self):
+ return self._size
+
+ def __contains__(self, key):
+ return PMap._contains(self._buckets_evolver, key)
+
+ def __delitem__(self, key):
+ self.remove(key)
+
+ def remove(self, key):
+ index, bucket = PMap._get_bucket(self._buckets_evolver, key)
+
+ if bucket:
+ # Use `not (k == key)` rather than `!=` to avoid relying on a well implemented `__ne__`, see #268.
+ new_bucket = [(k, v) for (k, v) in bucket if not (k == key)]
+ size_diff = len(bucket) - len(new_bucket)
+ if size_diff > 0:
+ self._buckets_evolver[index] = new_bucket if new_bucket else None
+ self._size -= size_diff
+ return self
+
+ raise KeyError('{0}'.format(key))
+
+ def evolver(self):
+ """
+ Create a new evolver for this pmap. For a discussion on evolvers in general see the
+ documentation for the pvector evolver.
+
+ Create the evolver and perform various mutating updates to it:
+
+ >>> m1 = m(a=1, b=2)
+ >>> e = m1.evolver()
+ >>> e['c'] = 3
+ >>> len(e)
+ 3
+ >>> del e['a']
+
+ The underlying pmap remains the same:
+
+ >>> m1 == {'a': 1, 'b': 2}
+ True
+
+ The changes are kept in the evolver. An updated pmap can be created using the
+ persistent() function on the evolver.
+
+ >>> m2 = e.persistent()
+ >>> m2 == {'b': 2, 'c': 3}
+ True
+
+ The new pmap will share data with the original pmap in the same way that would have
+ been done if only using operations on the pmap.
+ """
+ return self._Evolver(self)
+
+Mapping.register(PMap)
+Hashable.register(PMap)
+
+
+def _turbo_mapping(initial, pre_size):
+ if pre_size:
+ size = pre_size
+ else:
+ try:
+ size = 2 * len(initial) or 8
+ except Exception:
+ # Guess we can't figure out the length. Give up on length hinting,
+ # we can always reallocate later.
+ size = 8
+
+ buckets = size * [None]
+
+ if not isinstance(initial, Mapping):
+ # Make a dictionary of the initial data if it isn't already,
+ # that will save us some job further down since we can assume no
+ # key collisions
+ initial = dict(initial)
+
+ for k, v in initial.items():
+ h = hash(k)
+ index = h % size
+ bucket = buckets[index]
+
+ if bucket:
+ bucket.append((k, v))
+ else:
+ buckets[index] = [(k, v)]
+
+ return PMap(len(initial), pvector().extend(buckets))
+
+
+_EMPTY_PMAP = _turbo_mapping({}, 0)
+
+
+def pmap(initial={}, pre_size=0):
+ """
+ Create new persistent map, inserts all elements in initial into the newly created map.
+ The optional argument pre_size may be used to specify an initial size of the underlying bucket vector. This
+ may have a positive performance impact in the cases where you know beforehand that a large number of elements
+ will be inserted into the map eventually since it will reduce the number of reallocations required.
+
+ >>> pmap({'a': 13, 'b': 14}) == {'a': 13, 'b': 14}
+ True
+ """
+ if not initial and pre_size == 0:
+ return _EMPTY_PMAP
+
+ return _turbo_mapping(initial, pre_size)
+
+
+def m(**kwargs):
+ """
+ Creates a new persistent map. Inserts all key value arguments into the newly created map.
+
+ >>> m(a=13, b=14) == {'a': 13, 'b': 14}
+ True
+ """
+ return pmap(kwargs)
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_precord.py b/contrib/python/pyrsistent/py3/pyrsistent/_precord.py
new file mode 100644
index 0000000000..1ee8198a1a
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_precord.py
@@ -0,0 +1,167 @@
+from pyrsistent._checked_types import CheckedType, _restore_pickle, InvariantException, store_invariants
+from pyrsistent._field_common import (
+ set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants
+)
+from pyrsistent._pmap import PMap, pmap
+
+
+class _PRecordMeta(type):
+ def __new__(mcs, name, bases, dct):
+ set_fields(dct, bases, name='_precord_fields')
+ store_invariants(dct, bases, '_precord_invariants', '__invariant__')
+
+ dct['_precord_mandatory_fields'] = \
+ set(name for name, field in dct['_precord_fields'].items() if field.mandatory)
+
+ dct['_precord_initial_values'] = \
+ dict((k, field.initial) for k, field in dct['_precord_fields'].items() if field.initial is not PFIELD_NO_INITIAL)
+
+
+ dct['__slots__'] = ()
+
+ return super(_PRecordMeta, mcs).__new__(mcs, name, bases, dct)
+
+
+class PRecord(PMap, CheckedType, metaclass=_PRecordMeta):
+ """
+ A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting
+ from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element
+ access using subscript notation.
+
+ More documentation and examples of PRecord usage is available at https://github.com/tobgu/pyrsistent
+ """
+ def __new__(cls, **kwargs):
+ # Hack total! If these two special attributes exist that means we can create
+ # ourselves. Otherwise we need to go through the Evolver to create the structures
+ # for us.
+ if '_precord_size' in kwargs and '_precord_buckets' in kwargs:
+ return super(PRecord, cls).__new__(cls, kwargs['_precord_size'], kwargs['_precord_buckets'])
+
+ factory_fields = kwargs.pop('_factory_fields', None)
+ ignore_extra = kwargs.pop('_ignore_extra', False)
+
+ initial_values = kwargs
+ if cls._precord_initial_values:
+ initial_values = dict((k, v() if callable(v) else v)
+ for k, v in cls._precord_initial_values.items())
+ initial_values.update(kwargs)
+
+ e = _PRecordEvolver(cls, pmap(pre_size=len(cls._precord_fields)), _factory_fields=factory_fields, _ignore_extra=ignore_extra)
+ for k, v in initial_values.items():
+ e[k] = v
+
+ return e.persistent()
+
+ def set(self, *args, **kwargs):
+ """
+ Set a field in the record. This set function differs slightly from that in the PMap
+ class. First of all it accepts key-value pairs. Second it accepts multiple key-value
+ pairs to perform one, atomic, update of multiple fields.
+ """
+
+ # The PRecord set() can accept kwargs since all fields that have been declared are
+ # valid python identifiers. Also allow multiple fields to be set in one operation.
+ if args:
+ return super(PRecord, self).set(args[0], args[1])
+
+ return self.update(kwargs)
+
+ def evolver(self):
+ """
+ Returns an evolver of this object.
+ """
+ return _PRecordEvolver(self.__class__, self)
+
+ def __repr__(self):
+ return "{0}({1})".format(self.__class__.__name__,
+ ', '.join('{0}={1}'.format(k, repr(v)) for k, v in self.items()))
+
+ @classmethod
+ def create(cls, kwargs, _factory_fields=None, ignore_extra=False):
+ """
+ Factory method. Will create a new PRecord of the current type and assign the values
+ specified in kwargs.
+
+ :param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not
+ in the set of fields on the PRecord.
+ """
+ if isinstance(kwargs, cls):
+ return kwargs
+
+ if ignore_extra:
+ kwargs = {k: kwargs[k] for k in cls._precord_fields if k in kwargs}
+
+ return cls(_factory_fields=_factory_fields, _ignore_extra=ignore_extra, **kwargs)
+
+ def __reduce__(self):
+ # Pickling support
+ return _restore_pickle, (self.__class__, dict(self),)
+
+ def serialize(self, format=None):
+ """
+ Serialize the current PRecord using custom serializer functions for fields where
+ such have been supplied.
+ """
+ return dict((k, serialize(self._precord_fields[k].serializer, format, v)) for k, v in self.items())
+
+
+class _PRecordEvolver(PMap._Evolver):
+ __slots__ = ('_destination_cls', '_invariant_error_codes', '_missing_fields', '_factory_fields', '_ignore_extra')
+
+ def __init__(self, cls, original_pmap, _factory_fields=None, _ignore_extra=False):
+ super(_PRecordEvolver, self).__init__(original_pmap)
+ self._destination_cls = cls
+ self._invariant_error_codes = []
+ self._missing_fields = []
+ self._factory_fields = _factory_fields
+ self._ignore_extra = _ignore_extra
+
+ def __setitem__(self, key, original_value):
+ self.set(key, original_value)
+
+ def set(self, key, original_value):
+ field = self._destination_cls._precord_fields.get(key)
+ if field:
+ if self._factory_fields is None or field in self._factory_fields:
+ try:
+ if is_field_ignore_extra_complaint(PRecord, field, self._ignore_extra):
+ value = field.factory(original_value, ignore_extra=self._ignore_extra)
+ else:
+ value = field.factory(original_value)
+ except InvariantException as e:
+ self._invariant_error_codes += e.invariant_errors
+ self._missing_fields += e.missing_fields
+ return self
+ else:
+ value = original_value
+
+ check_type(self._destination_cls, field, key, value)
+
+ is_ok, error_code = field.invariant(value)
+ if not is_ok:
+ self._invariant_error_codes.append(error_code)
+
+ return super(_PRecordEvolver, self).set(key, value)
+ else:
+ raise AttributeError("'{0}' is not among the specified fields for {1}".format(key, self._destination_cls.__name__))
+
+ def persistent(self):
+ cls = self._destination_cls
+ is_dirty = self.is_dirty()
+ pm = super(_PRecordEvolver, self).persistent()
+ if is_dirty or not isinstance(pm, cls):
+ result = cls(_precord_buckets=pm._buckets, _precord_size=pm._size)
+ else:
+ result = pm
+
+ if cls._precord_mandatory_fields:
+ self._missing_fields += tuple('{0}.{1}'.format(cls.__name__, f) for f
+ in (cls._precord_mandatory_fields - set(result.keys())))
+
+ if self._invariant_error_codes or self._missing_fields:
+ raise InvariantException(tuple(self._invariant_error_codes), tuple(self._missing_fields),
+ 'Field invariant failed')
+
+ check_global_invariants(result, cls._precord_invariants)
+
+ return result
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_pset.py b/contrib/python/pyrsistent/py3/pyrsistent/_pset.py
new file mode 100644
index 0000000000..6247607db5
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_pset.py
@@ -0,0 +1,230 @@
+from collections.abc import Set, Hashable
+import sys
+from typing import TypeVar, Generic
+from pyrsistent._pmap import pmap
+
+T_co = TypeVar('T_co', covariant=True)
+
+
+class PSet(Generic[T_co]):
+ """
+ Persistent set implementation. Built on top of the persistent map. The set supports all operations
+ in the Set protocol and is Hashable.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`s` or :py:func:`pset`
+ to create an instance.
+
+ Random access and insert is log32(n) where n is the size of the set.
+
+ Some examples:
+
+ >>> s = pset([1, 2, 3, 1])
+ >>> s2 = s.add(4)
+ >>> s3 = s2.remove(2)
+ >>> s
+ pset([1, 2, 3])
+ >>> s2
+ pset([1, 2, 3, 4])
+ >>> s3
+ pset([1, 3, 4])
+ """
+ __slots__ = ('_map', '__weakref__')
+
+ def __new__(cls, m):
+ self = super(PSet, cls).__new__(cls)
+ self._map = m
+ return self
+
+ def __contains__(self, element):
+ return element in self._map
+
+ def __iter__(self):
+ return iter(self._map)
+
+ def __len__(self):
+ return len(self._map)
+
+ def __repr__(self):
+ if not self:
+ return 'p' + str(set(self))
+
+ return 'pset([{0}])'.format(str(set(self))[1:-1])
+
+ def __str__(self):
+ return self.__repr__()
+
+ def __hash__(self):
+ return hash(self._map)
+
+ def __reduce__(self):
+ # Pickling support
+ return pset, (list(self),)
+
+ @classmethod
+ def _from_iterable(cls, it, pre_size=8):
+ return PSet(pmap(dict((k, True) for k in it), pre_size=pre_size))
+
+ def add(self, element):
+ """
+ Return a new PSet with element added
+
+ >>> s1 = s(1, 2)
+ >>> s1.add(3)
+ pset([1, 2, 3])
+ """
+ return self.evolver().add(element).persistent()
+
+ def update(self, iterable):
+ """
+ Return a new PSet with elements in iterable added
+
+ >>> s1 = s(1, 2)
+ >>> s1.update([3, 4, 4])
+ pset([1, 2, 3, 4])
+ """
+ e = self.evolver()
+ for element in iterable:
+ e.add(element)
+
+ return e.persistent()
+
+ def remove(self, element):
+ """
+ Return a new PSet with element removed. Raises KeyError if element is not present.
+
+ >>> s1 = s(1, 2)
+ >>> s1.remove(2)
+ pset([1])
+ """
+ if element in self._map:
+ return self.evolver().remove(element).persistent()
+
+ raise KeyError("Element '%s' not present in PSet" % repr(element))
+
+ def discard(self, element):
+ """
+ Return a new PSet with element removed. Returns itself if element is not present.
+ """
+ if element in self._map:
+ return self.evolver().remove(element).persistent()
+
+ return self
+
+ class _Evolver(object):
+ __slots__ = ('_original_pset', '_pmap_evolver')
+
+ def __init__(self, original_pset):
+ self._original_pset = original_pset
+ self._pmap_evolver = original_pset._map.evolver()
+
+ def add(self, element):
+ self._pmap_evolver[element] = True
+ return self
+
+ def remove(self, element):
+ del self._pmap_evolver[element]
+ return self
+
+ def is_dirty(self):
+ return self._pmap_evolver.is_dirty()
+
+ def persistent(self):
+ if not self.is_dirty():
+ return self._original_pset
+
+ return PSet(self._pmap_evolver.persistent())
+
+ def __len__(self):
+ return len(self._pmap_evolver)
+
+ def copy(self):
+ return self
+
+ def evolver(self):
+ """
+ Create a new evolver for this pset. For a discussion on evolvers in general see the
+ documentation for the pvector evolver.
+
+ Create the evolver and perform various mutating updates to it:
+
+ >>> s1 = s(1, 2, 3)
+ >>> e = s1.evolver()
+ >>> _ = e.add(4)
+ >>> len(e)
+ 4
+ >>> _ = e.remove(1)
+
+ The underlying pset remains the same:
+
+ >>> s1
+ pset([1, 2, 3])
+
+ The changes are kept in the evolver. An updated pmap can be created using the
+ persistent() function on the evolver.
+
+ >>> s2 = e.persistent()
+ >>> s2
+ pset([2, 3, 4])
+
+ The new pset will share data with the original pset in the same way that would have
+ been done if only using operations on the pset.
+ """
+ return PSet._Evolver(self)
+
+ # All the operations and comparisons you would expect on a set.
+ #
+ # This is not very beautiful. If we avoid inheriting from PSet we can use the
+ # __slots__ concepts (which requires a new style class) and hopefully save some memory.
+ __le__ = Set.__le__
+ __lt__ = Set.__lt__
+ __gt__ = Set.__gt__
+ __ge__ = Set.__ge__
+ __eq__ = Set.__eq__
+ __ne__ = Set.__ne__
+
+ __and__ = Set.__and__
+ __or__ = Set.__or__
+ __sub__ = Set.__sub__
+ __xor__ = Set.__xor__
+
+ issubset = __le__
+ issuperset = __ge__
+ union = __or__
+ intersection = __and__
+ difference = __sub__
+ symmetric_difference = __xor__
+
+ isdisjoint = Set.isdisjoint
+
+Set.register(PSet)
+Hashable.register(PSet)
+
+_EMPTY_PSET = PSet(pmap())
+
+
+def pset(iterable=(), pre_size=8):
+ """
+ Creates a persistent set from iterable. Optionally takes a sizing parameter equivalent to that
+ used for :py:func:`pmap`.
+
+ >>> s1 = pset([1, 2, 3, 2])
+ >>> s1
+ pset([1, 2, 3])
+ """
+ if not iterable:
+ return _EMPTY_PSET
+
+ return PSet._from_iterable(iterable, pre_size=pre_size)
+
+
+def s(*elements):
+ """
+ Create a persistent set.
+
+ Takes an arbitrary number of arguments to insert into the new set.
+
+ >>> s1 = s(1, 2, 3, 2)
+ >>> s1
+ pset([1, 2, 3])
+ """
+ return pset(elements)
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_pvector.py b/contrib/python/pyrsistent/py3/pyrsistent/_pvector.py
new file mode 100644
index 0000000000..51d8a227ba
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_pvector.py
@@ -0,0 +1,715 @@
+from abc import abstractmethod, ABCMeta
+from collections.abc import Sequence, Hashable
+from numbers import Integral
+import operator
+from typing import TypeVar, Generic
+
+from pyrsistent._transformations import transform
+
+T_co = TypeVar('T_co', covariant=True)
+
+
+def _bitcount(val):
+ return bin(val).count("1")
+
+BRANCH_FACTOR = 32
+BIT_MASK = BRANCH_FACTOR - 1
+SHIFT = _bitcount(BIT_MASK)
+
+
+def compare_pvector(v, other, operator):
+ return operator(v.tolist(), other.tolist() if isinstance(other, PVector) else other)
+
+
+def _index_or_slice(index, stop):
+ if stop is None:
+ return index
+
+ return slice(index, stop)
+
+
+class PythonPVector(object):
+ """
+ Support structure for PVector that implements structural sharing for vectors using a trie.
+ """
+ __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '__weakref__')
+
+ def __new__(cls, count, shift, root, tail):
+ self = super(PythonPVector, cls).__new__(cls)
+ self._count = count
+ self._shift = shift
+ self._root = root
+ self._tail = tail
+
+ # Derived attribute stored for performance
+ self._tail_offset = self._count - len(self._tail)
+ return self
+
+ def __len__(self):
+ return self._count
+
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ # There are more conditions than the below where it would be OK to
+ # return ourselves, implement those...
+ if index.start is None and index.stop is None and index.step is None:
+ return self
+
+ # This is a bit nasty realizing the whole structure as a list before
+ # slicing it but it is the fastest way I've found to date, and it's easy :-)
+ return _EMPTY_PVECTOR.extend(self.tolist()[index])
+
+ if index < 0:
+ index += self._count
+
+ return PythonPVector._node_for(self, index)[index & BIT_MASK]
+
+ def __add__(self, other):
+ return self.extend(other)
+
+ def __repr__(self):
+ return 'pvector({0})'.format(str(self.tolist()))
+
+ def __str__(self):
+ return self.__repr__()
+
+ def __iter__(self):
+ # This is kind of lazy and will produce some memory overhead but it is the fasted method
+ # by far of those tried since it uses the speed of the built in python list directly.
+ return iter(self.tolist())
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __eq__(self, other):
+ return self is other or (hasattr(other, '__len__') and self._count == len(other)) and compare_pvector(self, other, operator.eq)
+
+ def __gt__(self, other):
+ return compare_pvector(self, other, operator.gt)
+
+ def __lt__(self, other):
+ return compare_pvector(self, other, operator.lt)
+
+ def __ge__(self, other):
+ return compare_pvector(self, other, operator.ge)
+
+ def __le__(self, other):
+ return compare_pvector(self, other, operator.le)
+
+ def __mul__(self, times):
+ if times <= 0 or self is _EMPTY_PVECTOR:
+ return _EMPTY_PVECTOR
+
+ if times == 1:
+ return self
+
+ return _EMPTY_PVECTOR.extend(times * self.tolist())
+
+ __rmul__ = __mul__
+
+ def _fill_list(self, node, shift, the_list):
+ if shift:
+ shift -= SHIFT
+ for n in node:
+ self._fill_list(n, shift, the_list)
+ else:
+ the_list.extend(node)
+
+ def tolist(self):
+ """
+ The fastest way to convert the vector into a python list.
+ """
+ the_list = []
+ self._fill_list(self._root, self._shift, the_list)
+ the_list.extend(self._tail)
+ return the_list
+
+ def _totuple(self):
+ """
+ Returns the content as a python tuple.
+ """
+ return tuple(self.tolist())
+
+ def __hash__(self):
+ # Taking the easy way out again...
+ return hash(self._totuple())
+
+ def transform(self, *transformations):
+ return transform(self, transformations)
+
+ def __reduce__(self):
+ # Pickling support
+ return pvector, (self.tolist(),)
+
+ def mset(self, *args):
+ if len(args) % 2:
+ raise TypeError("mset expected an even number of arguments")
+
+ evolver = self.evolver()
+ for i in range(0, len(args), 2):
+ evolver[args[i]] = args[i+1]
+
+ return evolver.persistent()
+
+ class Evolver(object):
+ __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '_dirty_nodes',
+ '_extra_tail', '_cached_leafs', '_orig_pvector')
+
+ def __init__(self, v):
+ self._reset(v)
+
+ def __getitem__(self, index):
+ if not isinstance(index, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
+
+ if index < 0:
+ index += self._count + len(self._extra_tail)
+
+ if self._count <= index < self._count + len(self._extra_tail):
+ return self._extra_tail[index - self._count]
+
+ return PythonPVector._node_for(self, index)[index & BIT_MASK]
+
+ def _reset(self, v):
+ self._count = v._count
+ self._shift = v._shift
+ self._root = v._root
+ self._tail = v._tail
+ self._tail_offset = v._tail_offset
+ self._dirty_nodes = {}
+ self._cached_leafs = {}
+ self._extra_tail = []
+ self._orig_pvector = v
+
+ def append(self, element):
+ self._extra_tail.append(element)
+ return self
+
+ def extend(self, iterable):
+ self._extra_tail.extend(iterable)
+ return self
+
+ def set(self, index, val):
+ self[index] = val
+ return self
+
+ def __setitem__(self, index, val):
+ if not isinstance(index, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
+
+ if index < 0:
+ index += self._count + len(self._extra_tail)
+
+ if 0 <= index < self._count:
+ node = self._cached_leafs.get(index >> SHIFT)
+ if node:
+ node[index & BIT_MASK] = val
+ elif index >= self._tail_offset:
+ if id(self._tail) not in self._dirty_nodes:
+ self._tail = list(self._tail)
+ self._dirty_nodes[id(self._tail)] = True
+ self._cached_leafs[index >> SHIFT] = self._tail
+ self._tail[index & BIT_MASK] = val
+ else:
+ self._root = self._do_set(self._shift, self._root, index, val)
+ elif self._count <= index < self._count + len(self._extra_tail):
+ self._extra_tail[index - self._count] = val
+ elif index == self._count + len(self._extra_tail):
+ self._extra_tail.append(val)
+ else:
+ raise IndexError("Index out of range: %s" % (index,))
+
+ def _do_set(self, level, node, i, val):
+ if id(node) in self._dirty_nodes:
+ ret = node
+ else:
+ ret = list(node)
+ self._dirty_nodes[id(ret)] = True
+
+ if level == 0:
+ ret[i & BIT_MASK] = val
+ self._cached_leafs[i >> SHIFT] = ret
+ else:
+ sub_index = (i >> level) & BIT_MASK # >>>
+ ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
+
+ return ret
+
+ def delete(self, index):
+ del self[index]
+ return self
+
+ def __delitem__(self, key):
+ if self._orig_pvector:
+ # All structural sharing bets are off, base evolver on _extra_tail only
+ l = PythonPVector(self._count, self._shift, self._root, self._tail).tolist()
+ l.extend(self._extra_tail)
+ self._reset(_EMPTY_PVECTOR)
+ self._extra_tail = l
+
+ del self._extra_tail[key]
+
+ def persistent(self):
+ result = self._orig_pvector
+ if self.is_dirty():
+ result = PythonPVector(self._count, self._shift, self._root, self._tail).extend(self._extra_tail)
+ self._reset(result)
+
+ return result
+
+ def __len__(self):
+ return self._count + len(self._extra_tail)
+
+ def is_dirty(self):
+ return bool(self._dirty_nodes or self._extra_tail)
+
+ def evolver(self):
+ return PythonPVector.Evolver(self)
+
+ def set(self, i, val):
+ # This method could be implemented by a call to mset() but doing so would cause
+ # a ~5 X performance penalty on PyPy (considered the primary platform for this implementation
+ # of PVector) so we're keeping this implementation for now.
+
+ if not isinstance(i, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(i).__name__)
+
+ if i < 0:
+ i += self._count
+
+ if 0 <= i < self._count:
+ if i >= self._tail_offset:
+ new_tail = list(self._tail)
+ new_tail[i & BIT_MASK] = val
+ return PythonPVector(self._count, self._shift, self._root, new_tail)
+
+ return PythonPVector(self._count, self._shift, self._do_set(self._shift, self._root, i, val), self._tail)
+
+ if i == self._count:
+ return self.append(val)
+
+ raise IndexError("Index out of range: %s" % (i,))
+
+ def _do_set(self, level, node, i, val):
+ ret = list(node)
+ if level == 0:
+ ret[i & BIT_MASK] = val
+ else:
+ sub_index = (i >> level) & BIT_MASK # >>>
+ ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
+
+ return ret
+
+ @staticmethod
+ def _node_for(pvector_like, i):
+ if 0 <= i < pvector_like._count:
+ if i >= pvector_like._tail_offset:
+ return pvector_like._tail
+
+ node = pvector_like._root
+ for level in range(pvector_like._shift, 0, -SHIFT):
+ node = node[(i >> level) & BIT_MASK] # >>>
+
+ return node
+
+ raise IndexError("Index out of range: %s" % (i,))
+
+ def _create_new_root(self):
+ new_shift = self._shift
+
+ # Overflow root?
+ if (self._count >> SHIFT) > (1 << self._shift): # >>>
+ new_root = [self._root, self._new_path(self._shift, self._tail)]
+ new_shift += SHIFT
+ else:
+ new_root = self._push_tail(self._shift, self._root, self._tail)
+
+ return new_root, new_shift
+
+ def append(self, val):
+ if len(self._tail) < BRANCH_FACTOR:
+ new_tail = list(self._tail)
+ new_tail.append(val)
+ return PythonPVector(self._count + 1, self._shift, self._root, new_tail)
+
+ # Full tail, push into tree
+ new_root, new_shift = self._create_new_root()
+ return PythonPVector(self._count + 1, new_shift, new_root, [val])
+
+ def _new_path(self, level, node):
+ if level == 0:
+ return node
+
+ return [self._new_path(level - SHIFT, node)]
+
+ def _mutating_insert_tail(self):
+ self._root, self._shift = self._create_new_root()
+ self._tail = []
+
+ def _mutating_fill_tail(self, offset, sequence):
+ max_delta_len = BRANCH_FACTOR - len(self._tail)
+ delta = sequence[offset:offset + max_delta_len]
+ self._tail.extend(delta)
+ delta_len = len(delta)
+ self._count += delta_len
+ return offset + delta_len
+
+ def _mutating_extend(self, sequence):
+ offset = 0
+ sequence_len = len(sequence)
+ while offset < sequence_len:
+ offset = self._mutating_fill_tail(offset, sequence)
+ if len(self._tail) == BRANCH_FACTOR:
+ self._mutating_insert_tail()
+
+ self._tail_offset = self._count - len(self._tail)
+
+ def extend(self, obj):
+ # Mutates the new vector directly for efficiency but that's only an
+ # implementation detail, once it is returned it should be considered immutable
+ l = obj.tolist() if isinstance(obj, PythonPVector) else list(obj)
+ if l:
+ new_vector = self.append(l[0])
+ new_vector._mutating_extend(l[1:])
+ return new_vector
+
+ return self
+
+ def _push_tail(self, level, parent, tail_node):
+ """
+ if parent is leaf, insert node,
+ else does it map to an existing child? ->
+ node_to_insert = push node one more level
+ else alloc new path
+
+ return node_to_insert placed in copy of parent
+ """
+ ret = list(parent)
+
+ if level == SHIFT:
+ ret.append(tail_node)
+ return ret
+
+ sub_index = ((self._count - 1) >> level) & BIT_MASK # >>>
+ if len(parent) > sub_index:
+ ret[sub_index] = self._push_tail(level - SHIFT, parent[sub_index], tail_node)
+ return ret
+
+ ret.append(self._new_path(level - SHIFT, tail_node))
+ return ret
+
+ def index(self, value, *args, **kwargs):
+ return self.tolist().index(value, *args, **kwargs)
+
+ def count(self, value):
+ return self.tolist().count(value)
+
+ def delete(self, index, stop=None):
+ l = self.tolist()
+ del l[_index_or_slice(index, stop)]
+ return _EMPTY_PVECTOR.extend(l)
+
+ def remove(self, value):
+ l = self.tolist()
+ l.remove(value)
+ return _EMPTY_PVECTOR.extend(l)
+
+class PVector(Generic[T_co],metaclass=ABCMeta):
+ """
+ Persistent vector implementation. Meant as a replacement for the cases where you would normally
+ use a Python list.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`v` and :py:func:`pvector` to
+ create an instance.
+
+ Heavily influenced by the persistent vector available in Clojure. Initially this was more or
+ less just a port of the Java code for the Clojure vector. It has since been modified and to
+ some extent optimized for usage in Python.
+
+ The vector is organized as a trie, any mutating method will return a new vector that contains the changes. No
+ updates are done to the original vector. Structural sharing between vectors are applied where possible to save
+ space and to avoid making complete copies.
+
+ This structure corresponds most closely to the built in list type and is intended as a replacement. Where the
+ semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
+ for example assignments.
+
+ The PVector implements the Sequence protocol and is Hashable.
+
+ Inserts are amortized O(1). Random access is log32(n) where n is the size of the vector.
+
+ The following are examples of some common operations on persistent vectors:
+
+ >>> p = v(1, 2, 3)
+ >>> p2 = p.append(4)
+ >>> p3 = p2.extend([5, 6, 7])
+ >>> p
+ pvector([1, 2, 3])
+ >>> p2
+ pvector([1, 2, 3, 4])
+ >>> p3
+ pvector([1, 2, 3, 4, 5, 6, 7])
+ >>> p3[5]
+ 6
+ >>> p.set(1, 99)
+ pvector([1, 99, 3])
+ >>>
+ """
+
+ @abstractmethod
+ def __len__(self):
+ """
+ >>> len(v(1, 2, 3))
+ 3
+ """
+
+ @abstractmethod
+ def __getitem__(self, index):
+ """
+ Get value at index. Full slicing support.
+
+ >>> v1 = v(5, 6, 7, 8)
+ >>> v1[2]
+ 7
+ >>> v1[1:3]
+ pvector([6, 7])
+ """
+
+ @abstractmethod
+ def __add__(self, other):
+ """
+ >>> v1 = v(1, 2)
+ >>> v2 = v(3, 4)
+ >>> v1 + v2
+ pvector([1, 2, 3, 4])
+ """
+
+ @abstractmethod
+ def __mul__(self, times):
+ """
+ >>> v1 = v(1, 2)
+ >>> 3 * v1
+ pvector([1, 2, 1, 2, 1, 2])
+ """
+
+ @abstractmethod
+ def __hash__(self):
+ """
+ >>> v1 = v(1, 2, 3)
+ >>> v2 = v(1, 2, 3)
+ >>> hash(v1) == hash(v2)
+ True
+ """
+
+ @abstractmethod
+ def evolver(self):
+ """
+ Create a new evolver for this pvector. The evolver acts as a mutable view of the vector
+ with "transaction like" semantics. No part of the underlying vector i updated, it is still
+ fully immutable. Furthermore multiple evolvers created from the same pvector do not
+ interfere with each other.
+
+ You may want to use an evolver instead of working directly with the pvector in the
+ following cases:
+
+ * Multiple updates are done to the same vector and the intermediate results are of no
+ interest. In this case using an evolver may be a more efficient and easier to work with.
+ * You need to pass a vector into a legacy function or a function that you have no control
+ over which performs in place mutations of lists. In this case pass an evolver instance
+ instead and then create a new pvector from the evolver once the function returns.
+
+ The following example illustrates a typical workflow when working with evolvers. It also
+ displays most of the API (which i kept small by design, you should not be tempted to
+ use evolvers in excess ;-)).
+
+ Create the evolver and perform various mutating updates to it:
+
+ >>> v1 = v(1, 2, 3, 4, 5)
+ >>> e = v1.evolver()
+ >>> e[1] = 22
+ >>> _ = e.append(6)
+ >>> _ = e.extend([7, 8, 9])
+ >>> e[8] += 1
+ >>> len(e)
+ 9
+
+ The underlying pvector remains the same:
+
+ >>> v1
+ pvector([1, 2, 3, 4, 5])
+
+ The changes are kept in the evolver. An updated pvector can be created using the
+ persistent() function on the evolver.
+
+ >>> v2 = e.persistent()
+ >>> v2
+ pvector([1, 22, 3, 4, 5, 6, 7, 8, 10])
+
+ The new pvector will share data with the original pvector in the same way that would have
+ been done if only using operations on the pvector.
+ """
+
+ @abstractmethod
+ def mset(self, *args):
+ """
+ Return a new vector with elements in specified positions replaced by values (multi set).
+
+ Elements on even positions in the argument list are interpreted as indexes while
+ elements on odd positions are considered values.
+
+ >>> v1 = v(1, 2, 3)
+ >>> v1.mset(0, 11, 2, 33)
+ pvector([11, 2, 33])
+ """
+
+ @abstractmethod
+ def set(self, i, val):
+ """
+ Return a new vector with element at position i replaced with val. The original vector remains unchanged.
+
+ Setting a value one step beyond the end of the vector is equal to appending. Setting beyond that will
+ result in an IndexError.
+
+ >>> v1 = v(1, 2, 3)
+ >>> v1.set(1, 4)
+ pvector([1, 4, 3])
+ >>> v1.set(3, 4)
+ pvector([1, 2, 3, 4])
+ >>> v1.set(-1, 4)
+ pvector([1, 2, 4])
+ """
+
+ @abstractmethod
+ def append(self, val):
+ """
+ Return a new vector with val appended.
+
+ >>> v1 = v(1, 2)
+ >>> v1.append(3)
+ pvector([1, 2, 3])
+ """
+
+ @abstractmethod
+ def extend(self, obj):
+ """
+ Return a new vector with all values in obj appended to it. Obj may be another
+ PVector or any other Iterable.
+
+ >>> v1 = v(1, 2, 3)
+ >>> v1.extend([4, 5])
+ pvector([1, 2, 3, 4, 5])
+ """
+
+ @abstractmethod
+ def index(self, value, *args, **kwargs):
+ """
+ Return first index of value. Additional indexes may be supplied to limit the search to a
+ sub range of the vector.
+
+ >>> v1 = v(1, 2, 3, 4, 3)
+ >>> v1.index(3)
+ 2
+ >>> v1.index(3, 3, 5)
+ 4
+ """
+
+ @abstractmethod
+ def count(self, value):
+ """
+ Return the number of times that value appears in the vector.
+
+ >>> v1 = v(1, 4, 3, 4)
+ >>> v1.count(4)
+ 2
+ """
+
+ @abstractmethod
+ def transform(self, *transformations):
+ """
+ Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
+ consists of two parts. One match expression that specifies which elements to transform
+ and one transformation function that performs the actual transformation.
+
+ >>> from pyrsistent import freeze, ny
+ >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
+ ... {'author': 'Steve', 'content': 'A slightly longer article'}],
+ ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
+ >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
+ >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
+ >>> very_short_news.articles[0].content
+ 'A short article'
+ >>> very_short_news.articles[1].content
+ 'A slightly long...'
+
+ When nothing has been transformed the original data structure is kept
+
+ >>> short_news is news_paper
+ True
+ >>> very_short_news is news_paper
+ False
+ >>> very_short_news.articles[0] is news_paper.articles[0]
+ True
+ """
+
+ @abstractmethod
+ def delete(self, index, stop=None):
+ """
+ Delete a portion of the vector by index or range.
+
+ >>> v1 = v(1, 2, 3, 4, 5)
+ >>> v1.delete(1)
+ pvector([1, 3, 4, 5])
+ >>> v1.delete(1, 3)
+ pvector([1, 4, 5])
+ """
+
+ @abstractmethod
+ def remove(self, value):
+ """
+ Remove the first occurrence of a value from the vector.
+
+ >>> v1 = v(1, 2, 3, 2, 1)
+ >>> v2 = v1.remove(1)
+ >>> v2
+ pvector([2, 3, 2, 1])
+ >>> v2.remove(1)
+ pvector([2, 3, 2])
+ """
+
+
+_EMPTY_PVECTOR = PythonPVector(0, SHIFT, [], [])
+PVector.register(PythonPVector)
+Sequence.register(PVector)
+Hashable.register(PVector)
+
+def python_pvector(iterable=()):
+ """
+ Create a new persistent vector containing the elements in iterable.
+
+ >>> v1 = pvector([1, 2, 3])
+ >>> v1
+ pvector([1, 2, 3])
+ """
+ return _EMPTY_PVECTOR.extend(iterable)
+
+try:
+ # Use the C extension as underlying trie implementation if it is available
+ import os
+ if os.environ.get('PYRSISTENT_NO_C_EXTENSION'):
+ pvector = python_pvector
+ else:
+ from pvectorc import pvector
+ PVector.register(type(pvector()))
+except ImportError:
+ pvector = python_pvector
+
+
+def v(*elements):
+ """
+ Create a new persistent vector containing all parameters to this function.
+
+ >>> v1 = v(1, 2, 3)
+ >>> v1
+ pvector([1, 2, 3])
+ """
+ return pvector(elements)
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_toolz.py b/contrib/python/pyrsistent/py3/pyrsistent/_toolz.py
new file mode 100644
index 0000000000..0bf2cb1449
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_toolz.py
@@ -0,0 +1,83 @@
+"""
+Functionality copied from the toolz package to avoid having
+to add toolz as a dependency.
+
+See https://github.com/pytoolz/toolz/.
+
+toolz is released under BSD licence. Below is the licence text
+from toolz as it appeared when copying the code.
+
+--------------------------------------------------------------
+
+Copyright (c) 2013 Matthew Rocklin
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ a. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ b. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ c. Neither the name of toolz nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGE.
+"""
+import operator
+from functools import reduce
+
+
+def get_in(keys, coll, default=None, no_default=False):
+ """
+ NB: This is a straight copy of the get_in implementation found in
+ the toolz library (https://github.com/pytoolz/toolz/). It works
+ with persistent data structures as well as the corresponding
+ datastructures from the stdlib.
+
+ Returns coll[i0][i1]...[iX] where [i0, i1, ..., iX]==keys.
+
+ If coll[i0][i1]...[iX] cannot be found, returns ``default``, unless
+ ``no_default`` is specified, then it raises KeyError or IndexError.
+
+ ``get_in`` is a generalization of ``operator.getitem`` for nested data
+ structures such as dictionaries and lists.
+ >>> from pyrsistent import freeze
+ >>> transaction = freeze({'name': 'Alice',
+ ... 'purchase': {'items': ['Apple', 'Orange'],
+ ... 'costs': [0.50, 1.25]},
+ ... 'credit card': '5555-1234-1234-1234'})
+ >>> get_in(['purchase', 'items', 0], transaction)
+ 'Apple'
+ >>> get_in(['name'], transaction)
+ 'Alice'
+ >>> get_in(['purchase', 'total'], transaction)
+ >>> get_in(['purchase', 'items', 'apple'], transaction)
+ >>> get_in(['purchase', 'items', 10], transaction)
+ >>> get_in(['purchase', 'total'], transaction, 0)
+ 0
+ >>> get_in(['y'], {}, no_default=True)
+ Traceback (most recent call last):
+ ...
+ KeyError: 'y'
+ """
+ try:
+ return reduce(operator.getitem, keys, coll)
+ except (KeyError, IndexError, TypeError):
+ if no_default:
+ raise
+ return default
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/_transformations.py b/contrib/python/pyrsistent/py3/pyrsistent/_transformations.py
new file mode 100644
index 0000000000..6ef747f07e
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/_transformations.py
@@ -0,0 +1,143 @@
+import re
+try:
+ from inspect import Parameter, signature
+except ImportError:
+ signature = None
+ from inspect import getfullargspec
+
+
+_EMPTY_SENTINEL = object()
+
+
+def inc(x):
+ """ Add one to the current value """
+ return x + 1
+
+
+def dec(x):
+ """ Subtract one from the current value """
+ return x - 1
+
+
+def discard(evolver, key):
+ """ Discard the element and returns a structure without the discarded elements """
+ try:
+ del evolver[key]
+ except KeyError:
+ pass
+
+
+# Matchers
+def rex(expr):
+ """ Regular expression matcher to use together with transform functions """
+ r = re.compile(expr)
+ return lambda key: isinstance(key, str) and r.match(key)
+
+
+def ny(_):
+ """ Matcher that matches any value """
+ return True
+
+
+# Support functions
+def _chunks(l, n):
+ for i in range(0, len(l), n):
+ yield l[i:i + n]
+
+
+def transform(structure, transformations):
+ r = structure
+ for path, command in _chunks(transformations, 2):
+ r = _do_to_path(r, path, command)
+ return r
+
+
+def _do_to_path(structure, path, command):
+ if not path:
+ return command(structure) if callable(command) else command
+
+ kvs = _get_keys_and_values(structure, path[0])
+ return _update_structure(structure, kvs, path[1:], command)
+
+
+def _items(structure):
+ try:
+ return structure.items()
+ except AttributeError:
+ # Support wider range of structures by adding a transform_items() or similar?
+ return list(enumerate(structure))
+
+
+def _get(structure, key, default):
+ try:
+ if hasattr(structure, '__getitem__'):
+ return structure[key]
+
+ return getattr(structure, key)
+
+ except (IndexError, KeyError):
+ return default
+
+
+def _get_keys_and_values(structure, key_spec):
+ if callable(key_spec):
+ # Support predicates as callable objects in the path
+ arity = _get_arity(key_spec)
+ if arity == 1:
+ # Unary predicates are called with the "key" of the path
+ # - eg a key in a mapping, an index in a sequence.
+ return [(k, v) for k, v in _items(structure) if key_spec(k)]
+ elif arity == 2:
+ # Binary predicates are called with the key and the corresponding
+ # value.
+ return [(k, v) for k, v in _items(structure) if key_spec(k, v)]
+ else:
+ # Other arities are an error.
+ raise ValueError(
+ "callable in transform path must take 1 or 2 arguments"
+ )
+
+ # Non-callables are used as-is as a key.
+ return [(key_spec, _get(structure, key_spec, _EMPTY_SENTINEL))]
+
+
+if signature is None:
+ def _get_arity(f):
+ argspec = getfullargspec(f)
+ return len(argspec.args) - len(argspec.defaults or ())
+else:
+ def _get_arity(f):
+ return sum(
+ 1
+ for p
+ in signature(f).parameters.values()
+ if p.default is Parameter.empty
+ and p.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
+ )
+
+
+def _update_structure(structure, kvs, path, command):
+ from pyrsistent._pmap import pmap
+ e = structure.evolver()
+ if not path and command is discard:
+ # Do this in reverse to avoid index problems with vectors. See #92.
+ for k, v in reversed(kvs):
+ discard(e, k)
+ else:
+ for k, v in kvs:
+ is_empty = False
+ if v is _EMPTY_SENTINEL:
+ if command is discard:
+ # If nothing there when discarding just move on, do not introduce new nodes
+ continue
+
+ # Allow expansion of structure but make sure to cover the case
+ # when an empty pmap is added as leaf node. See #154.
+ is_empty = True
+ v = pmap()
+
+ result = _do_to_path(v, path, command)
+ if result is not v or is_empty:
+ e[k] = result
+
+ return e.persistent()
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/py.typed b/contrib/python/pyrsistent/py3/pyrsistent/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/py.typed
diff --git a/contrib/python/pyrsistent/py3/pyrsistent/typing.py b/contrib/python/pyrsistent/py3/pyrsistent/typing.py
new file mode 100644
index 0000000000..c97f9db520
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/pyrsistent/typing.py
@@ -0,0 +1,82 @@
+"""Helpers for use with type annotation.
+
+Use the empty classes in this module when annotating the types of Pyrsistent
+objects, instead of using the actual collection class.
+
+For example,
+
+ from pyrsistent import pvector
+ from pyrsistent.typing import PVector
+
+ myvector: PVector[str] = pvector(['a', 'b', 'c'])
+
+"""
+from __future__ import absolute_import
+
+try:
+ from typing import Container
+ from typing import Hashable
+ from typing import Generic
+ from typing import Iterable
+ from typing import Mapping
+ from typing import Sequence
+ from typing import Sized
+ from typing import TypeVar
+
+ __all__ = [
+ 'CheckedPMap',
+ 'CheckedPSet',
+ 'CheckedPVector',
+ 'PBag',
+ 'PDeque',
+ 'PList',
+ 'PMap',
+ 'PSet',
+ 'PVector',
+ ]
+
+ T = TypeVar('T')
+ T_co = TypeVar('T_co', covariant=True)
+ KT = TypeVar('KT')
+ VT = TypeVar('VT')
+ VT_co = TypeVar('VT_co', covariant=True)
+
+ class CheckedPMap(Mapping[KT, VT_co], Hashable):
+ pass
+
+ # PSet.add and PSet.discard have different type signatures than that of Set.
+ class CheckedPSet(Generic[T_co], Hashable):
+ pass
+
+ class CheckedPVector(Sequence[T_co], Hashable):
+ pass
+
+ class PBag(Container[T_co], Iterable[T_co], Sized, Hashable):
+ pass
+
+ class PDeque(Sequence[T_co], Hashable):
+ pass
+
+ class PList(Sequence[T_co], Hashable):
+ pass
+
+ class PMap(Mapping[KT, VT_co], Hashable):
+ pass
+
+ # PSet.add and PSet.discard have different type signatures than that of Set.
+ class PSet(Generic[T_co], Hashable):
+ pass
+
+ class PVector(Sequence[T_co], Hashable):
+ pass
+
+ class PVectorEvolver(Generic[T]):
+ pass
+
+ class PMapEvolver(Generic[KT, VT]):
+ pass
+
+ class PSetEvolver(Generic[T]):
+ pass
+except ImportError:
+ pass
diff --git a/contrib/python/pyrsistent/py3/tests/bag_test.py b/contrib/python/pyrsistent/py3/tests/bag_test.py
new file mode 100644
index 0000000000..fb80603108
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/bag_test.py
@@ -0,0 +1,150 @@
+import pytest
+
+from pyrsistent import b, pbag
+
+
+def test_literalish_works():
+ assert b(1, 2) == pbag([1, 2])
+
+def test_empty_bag():
+ """
+ creating an empty pbag returns a singleton.
+
+ Note that this should NOT be relied upon in application code.
+ """
+ assert b() is b()
+
+def test_supports_hash():
+ assert hash(b(1, 2)) == hash(b(2, 1))
+
+def test_hash_in_dict():
+ assert {b(1,2,3,3): "hello"}[b(3,3,2,1)] == "hello"
+
+def test_empty_truthiness():
+ assert b(1)
+ assert not b()
+
+
+def test_repr_empty():
+ assert repr(b()) == 'pbag([])'
+
+def test_repr_elements():
+ assert repr(b(1, 2)) in ('pbag([1, 2])', 'pbag([2, 1])')
+
+
+def test_add_empty():
+ assert b().add(1) == b(1)
+
+def test_remove_final():
+ assert b().add(1).remove(1) == b()
+
+def test_remove_nonfinal():
+ assert b().add(1).add(1).remove(1) == b(1)
+
+def test_remove_nonexistent():
+ with pytest.raises(KeyError) as excinfo:
+ b().remove(1)
+ assert str(excinfo.exconly()) == 'KeyError: 1'
+
+
+def test_eq_empty():
+ assert b() == b()
+
+def test_neq():
+ assert b(1) != b()
+
+def test_eq_same_order():
+ assert b(1, 2, 1) == b(1, 2, 1)
+
+def test_eq_different_order():
+ assert b(2, 1, 2) == b(1, 2, 2)
+
+
+def test_count_non_existent():
+ assert b().count(1) == 0
+
+def test_count_unique():
+ assert b(1).count(1) == 1
+
+def test_count_duplicate():
+ assert b(1, 1).count(1) == 2
+
+
+def test_length_empty():
+ assert len(b()) == 0
+
+def test_length_unique():
+ assert len(b(1)) == 1
+
+def test_length_duplicates():
+ assert len(b(1, 1)) == 2
+
+def test_length_multiple_elements():
+ assert len(b(1, 1, 2, 3)) == 4
+
+
+def test_iter_duplicates():
+ assert list(b(1, 1)) == [1, 1]
+
+def test_iter_multiple_elements():
+ assert list(b(1, 2, 2)) in ([1, 2, 2], [2, 2, 1])
+
+def test_contains():
+ assert 1 in b(1)
+
+def test_not_contains():
+ assert 1 not in b(2)
+
+def test_add():
+ assert b(3, 3, 3, 2, 2, 1) + b(4, 3, 2, 1) == b(4,
+ 3, 3, 3, 3,
+ 2, 2, 2,
+ 1, 1)
+
+def test_sub():
+ assert b(1, 2, 3, 3) - b(3, 4) == b(1, 2, 3)
+
+def test_or():
+ assert b(1, 2, 2, 3, 3, 3) | b(1, 2, 3, 4, 4) == b(1,
+ 2, 2,
+ 3, 3, 3,
+ 4, 4)
+
+def test_and():
+ assert b(1, 2, 2, 3, 3, 3) & b(2, 3, 3, 4) == b(2, 3, 3)
+
+
+def test_pbag_is_unorderable():
+ with pytest.raises(TypeError):
+ _ = b(1) < b(2) # type: ignore
+
+ with pytest.raises(TypeError):
+ _ = b(1) <= b(2) # type: ignore
+
+ with pytest.raises(TypeError):
+ _ = b(1) > b(2) # type: ignore
+
+ with pytest.raises(TypeError):
+ _ = b(1) >= b(2) # type: ignore
+
+
+def test_supports_weakref():
+ import weakref
+ weakref.ref(b(1))
+
+
+def test_update():
+ assert pbag([1, 2, 2]).update([3, 3, 4]) == pbag([1, 2, 2, 3, 3, 4])
+
+
+def test_update_no_elements():
+ b = pbag([1, 2, 2])
+ assert b.update([]) is b
+
+
+def test_iterable():
+ """
+ PBags can be created from iterables even though they can't be len() hinted.
+ """
+
+ assert pbag(iter("a")) == pbag(iter("a"))
diff --git a/contrib/python/pyrsistent/py3/tests/checked_map_test.py b/contrib/python/pyrsistent/py3/tests/checked_map_test.py
new file mode 100644
index 0000000000..b0ffbceecf
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/checked_map_test.py
@@ -0,0 +1,152 @@
+import pickle
+import pytest
+from pyrsistent import CheckedPMap, InvariantException, PMap, CheckedType, CheckedPSet, CheckedPVector, \
+ CheckedKeyTypeError, CheckedValueTypeError
+
+
+class FloatToIntMap(CheckedPMap):
+ __key_type__ = float
+ __value_type__ = int
+ __invariant__ = lambda key, value: (int(key) == value, 'Invalid mapping')
+
+def test_instantiate():
+ x = FloatToIntMap({1.25: 1, 2.5: 2})
+
+ assert dict(x.items()) == {1.25: 1, 2.5: 2}
+ assert isinstance(x, FloatToIntMap)
+ assert isinstance(x, PMap)
+ assert isinstance(x, CheckedType)
+
+def test_instantiate_empty():
+ x = FloatToIntMap()
+
+ assert dict(x.items()) == {}
+ assert isinstance(x, FloatToIntMap)
+
+def test_set():
+ x = FloatToIntMap()
+ x2 = x.set(1.0, 1)
+
+ assert x2[1.0] == 1
+ assert isinstance(x2, FloatToIntMap)
+
+def test_invalid_key_type():
+ with pytest.raises(CheckedKeyTypeError):
+ FloatToIntMap({1: 1})
+
+def test_invalid_value_type():
+ with pytest.raises(CheckedValueTypeError):
+ FloatToIntMap({1.0: 1.0})
+
+def test_breaking_invariant():
+ try:
+ FloatToIntMap({1.5: 2})
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == ('Invalid mapping',)
+
+def test_repr():
+ x = FloatToIntMap({1.25: 1})
+
+ assert str(x) == 'FloatToIntMap({1.25: 1})'
+
+def test_default_serialization():
+ x = FloatToIntMap({1.25: 1, 2.5: 2})
+
+ assert x.serialize() == {1.25: 1, 2.5: 2}
+
+class StringFloatToIntMap(FloatToIntMap):
+ @staticmethod
+ def __serializer__(format, key, value):
+ return format.format(key), format.format(value)
+
+def test_custom_serialization():
+ x = StringFloatToIntMap({1.25: 1, 2.5: 2})
+
+ assert x.serialize("{0}") == {"1.25": "1", "2.5": "2"}
+
+class FloatSet(CheckedPSet):
+ __type__ = float
+
+class IntToFloatSetMap(CheckedPMap):
+ __key_type__ = int
+ __value_type__ = FloatSet
+
+
+def test_multi_level_serialization():
+ x = IntToFloatSetMap.create({1: [1.25, 1.50], 2: [2.5, 2.75]})
+
+ assert str(x) == "IntToFloatSetMap({1: FloatSet([1.5, 1.25]), 2: FloatSet([2.75, 2.5])})"
+
+ sx = x.serialize()
+ assert sx == {1: set([1.5, 1.25]), 2: set([2.75, 2.5])}
+ assert isinstance(sx[1], set)
+
+def test_create_non_checked_types():
+ assert FloatToIntMap.create({1.25: 1, 2.5: 2}) == FloatToIntMap({1.25: 1, 2.5: 2})
+
+def test_create_checked_types():
+ class IntSet(CheckedPSet):
+ __type__ = int
+
+ class FloatVector(CheckedPVector):
+ __type__ = float
+
+ class IntSetToFloatVectorMap(CheckedPMap):
+ __key_type__ = IntSet
+ __value_type__ = FloatVector
+
+ x = IntSetToFloatVectorMap.create({frozenset([1, 2]): [1.25, 2.5]})
+
+ assert str(x) == "IntSetToFloatVectorMap({IntSet([1, 2]): FloatVector([1.25, 2.5])})"
+
+def test_evolver_returns_same_instance_when_no_updates():
+ x = FloatToIntMap({1.25: 1, 2.25: 2})
+
+ assert x.evolver().persistent() is x
+
+def test_map_with_no_types_or_invariants():
+ class NoCheckPMap(CheckedPMap):
+ pass
+
+ x = NoCheckPMap({1: 2, 3: 4})
+ assert x[1] == 2
+ assert x[3] == 4
+
+
+def test_pickling():
+ x = FloatToIntMap({1.25: 1, 2.5: 2})
+ y = pickle.loads(pickle.dumps(x, -1))
+
+ assert x == y
+ assert isinstance(y, FloatToIntMap)
+
+
+class FloatVector(CheckedPVector):
+ __type__ = float
+
+
+class VectorToSetMap(CheckedPMap):
+ __key_type__ = '__tests__.checked_map_test.FloatVector'
+ __value_type__ = '__tests__.checked_map_test.FloatSet'
+
+
+def test_type_check_with_string_specification():
+ content = [1.5, 2.0]
+ vec = FloatVector(content)
+ sett = FloatSet(content)
+ map = VectorToSetMap({vec: sett})
+
+ assert map[vec] == sett
+
+
+def test_type_creation_with_string_specification():
+ content = (1.5, 2.0)
+ map = VectorToSetMap.create({content: content})
+
+ assert map[FloatVector(content)] == set(content)
+
+
+def test_supports_weakref():
+ import weakref
+ weakref.ref(VectorToSetMap({}))
diff --git a/contrib/python/pyrsistent/py3/tests/checked_set_test.py b/contrib/python/pyrsistent/py3/tests/checked_set_test.py
new file mode 100644
index 0000000000..f0be4963e2
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/checked_set_test.py
@@ -0,0 +1,85 @@
+import pickle
+import pytest
+from pyrsistent import CheckedPSet, PSet, InvariantException, CheckedType, CheckedPVector, CheckedValueTypeError
+
+
+class Naturals(CheckedPSet):
+ __type__ = int
+ __invariant__ = lambda value: (value >= 0, 'Negative value')
+
+def test_instantiate():
+ x = Naturals([1, 2, 3, 3])
+
+ assert list(x) == [1, 2, 3]
+ assert isinstance(x, Naturals)
+ assert isinstance(x, PSet)
+ assert isinstance(x, CheckedType)
+
+def test_add():
+ x = Naturals()
+ x2 = x.add(1)
+
+ assert list(x2) == [1]
+ assert isinstance(x2, Naturals)
+
+def test_invalid_type():
+ with pytest.raises(CheckedValueTypeError):
+ Naturals([1, 2.0])
+
+def test_breaking_invariant():
+ try:
+ Naturals([1, -1])
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == ('Negative value',)
+
+def test_repr():
+ x = Naturals([1, 2])
+
+ assert str(x) == 'Naturals([1, 2])'
+
+def test_default_serialization():
+ x = Naturals([1, 2])
+
+ assert x.serialize() == set([1, 2])
+
+class StringNaturals(Naturals):
+ @staticmethod
+ def __serializer__(format, value):
+ return format.format(value)
+
+def test_custom_serialization():
+ x = StringNaturals([1, 2])
+
+ assert x.serialize("{0}") == set(["1", "2"])
+
+class NaturalsVector(CheckedPVector):
+ __type__ = Naturals
+
+def test_multi_level_serialization():
+ x = NaturalsVector.create([[1, 2], [3, 4]])
+
+ assert str(x) == "NaturalsVector([Naturals([1, 2]), Naturals([3, 4])])"
+
+ sx = x.serialize()
+ assert sx == [set([1, 2]), set([3, 4])]
+ assert isinstance(sx[0], set)
+
+def test_create():
+ assert Naturals.create([1, 2]) == Naturals([1, 2])
+
+def test_evolver_returns_same_instance_when_no_updates():
+ x = Naturals([1, 2])
+ assert x.evolver().persistent() is x
+
+def test_pickling():
+ x = Naturals([1, 2])
+ y = pickle.loads(pickle.dumps(x, -1))
+
+ assert x == y
+ assert isinstance(y, Naturals)
+
+
+def test_supports_weakref():
+ import weakref
+ weakref.ref(Naturals([1, 2])) \ No newline at end of file
diff --git a/contrib/python/pyrsistent/py3/tests/checked_vector_test.py b/contrib/python/pyrsistent/py3/tests/checked_vector_test.py
new file mode 100644
index 0000000000..b2e3d43cd6
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/checked_vector_test.py
@@ -0,0 +1,213 @@
+import datetime
+import pickle
+import pytest
+from pyrsistent import CheckedPVector, InvariantException, optional, CheckedValueTypeError, PVector
+
+
+class Naturals(CheckedPVector):
+ __type__ = int
+ __invariant__ = lambda value: (value >= 0, 'Negative value')
+
+def test_instantiate():
+ x = Naturals([1, 2, 3])
+
+ assert list(x) == [1, 2, 3]
+ assert isinstance(x, Naturals)
+ assert isinstance(x, PVector)
+
+def test_append():
+ x = Naturals()
+ x2 = x.append(1)
+
+ assert list(x2) == [1]
+ assert isinstance(x2, Naturals)
+
+def test_extend():
+ x = Naturals()
+ x2 = x.extend([1])
+
+ assert list(x2) == [1]
+ assert isinstance(x2, Naturals)
+
+def test_set():
+ x = Naturals([1, 2])
+ x2 = x.set(1, 3)
+
+ assert list(x2) == [1, 3]
+ assert isinstance(x2, Naturals)
+
+
+def test_invalid_type():
+ try:
+ Naturals([1, 2.0])
+ assert False
+ except CheckedValueTypeError as e:
+ assert e.expected_types == (int,)
+ assert e.actual_type is float
+ assert e.actual_value == 2.0
+ assert e.source_class is Naturals
+
+ x = Naturals([1, 2])
+ with pytest.raises(TypeError):
+ x.append(3.0)
+
+ with pytest.raises(TypeError):
+ x.extend([3, 4.0])
+
+ with pytest.raises(TypeError):
+ x.set(1, 2.0)
+
+ with pytest.raises(TypeError):
+ x.evolver()[1] = 2.0
+
+def test_breaking_invariant():
+ try:
+ Naturals([1, -1])
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == ('Negative value',)
+
+ x = Naturals([1, 2])
+ try:
+ x.append(-1)
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == ('Negative value',)
+
+ try:
+ x.extend([-1])
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == ('Negative value',)
+
+ try:
+ x.set(1, -1)
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == ('Negative value',)
+
+def test_create_base_case():
+ x = Naturals.create([1, 2, 3])
+
+ assert isinstance(x, Naturals)
+ assert x == Naturals([1, 2, 3])
+
+def test_create_with_instance_of_checked_pvector_returns_the_argument():
+ x = Naturals([1, 2, 3])
+
+ assert Naturals.create(x) is x
+
+class OptionalNaturals(CheckedPVector):
+ __type__ = optional(int)
+ __invariant__ = lambda value: (value is None or value >= 0, 'Negative value')
+
+def test_multiple_allowed_types():
+ assert list(OptionalNaturals([1, None, 3])) == [1, None, 3]
+
+class NaturalsVector(CheckedPVector):
+ __type__ = optional(Naturals)
+
+def test_create_of_nested_structure():
+ assert NaturalsVector([Naturals([1, 2]), Naturals([3, 4]), None]) ==\
+ NaturalsVector.create([[1, 2], [3, 4], None])
+
+def test_serialize_default_case():
+ v = CheckedPVector([1, 2, 3])
+ assert v.serialize() == [1, 2, 3]
+
+class Dates(CheckedPVector):
+ __type__ = datetime.date
+
+ @staticmethod
+ def __serializer__(format, d):
+ return d.strftime(format)
+
+def test_serialize_custom_serializer():
+ d = datetime.date
+ v = Dates([d(2015, 2, 2), d(2015, 2, 3)])
+ assert v.serialize(format='%Y-%m-%d') == ['2015-02-02', '2015-02-03']
+
+def test_type_information_is_inherited():
+ class MultiDates(Dates):
+ __type__ = int
+
+ MultiDates([datetime.date(2015, 2, 4), 5])
+
+ with pytest.raises(TypeError):
+ MultiDates([5.0])
+
+def test_invariants_are_inherited():
+ class LimitNaturals(Naturals):
+ __invariant__ = lambda value: (value < 10, 'Too big')
+
+ try:
+ LimitNaturals([10, -1])
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == ('Too big', 'Negative value')
+
+def test_invariant_must_be_callable():
+ with pytest.raises(TypeError):
+ class InvalidInvariant(CheckedPVector):
+ __invariant__ = 1
+
+def test_type_spec_must_be_type():
+ with pytest.raises(TypeError):
+ class InvalidType(CheckedPVector):
+ __type__ = 1
+
+def test_repr():
+ x = Naturals([1, 2])
+
+ assert str(x) == 'Naturals([1, 2])'
+
+def test_evolver_returns_same_instance_when_no_updates():
+ x = Naturals([1, 2])
+ assert x.evolver().persistent() is x
+
+def test_pickling():
+ x = Naturals([1, 2])
+ y = pickle.loads(pickle.dumps(x, -1))
+
+ assert x == y
+ assert isinstance(y, Naturals)
+
+def test_multiple_optional_types():
+ class Numbers(CheckedPVector):
+ __type__ = optional(int, float)
+
+ numbers = Numbers([1, 2.5, None])
+ assert numbers.serialize() == [1, 2.5, None]
+
+ with pytest.raises(TypeError):
+ numbers.append('foo')
+
+
+class NaturalsVectorStr(CheckedPVector):
+ __type__ = '__tests__.checked_vector_test.Naturals'
+
+
+def test_check_with_string_specification():
+ naturals_list = [Naturals([1, 2]), Naturals([3, 4])]
+ nv = NaturalsVectorStr(naturals_list)
+ assert nv == naturals_list
+
+
+def test_create_with_string_specification():
+ naturals_list = [[1, 2], [3, 4]]
+ nv = NaturalsVectorStr.create(naturals_list)
+ assert nv == naturals_list
+
+
+def test_supports_weakref():
+ import weakref
+ weakref.ref(Naturals([]))
+
+
+def test_create_with_generator_iterator():
+ # See issue #97
+ class Numbers(CheckedPVector):
+ __type__ = int
+
+ n = Numbers(i for i in [1, 2, 3])
+ assert n == Numbers([1, 2, 3]) \ No newline at end of file
diff --git a/contrib/python/pyrsistent/py3/tests/class_test.py b/contrib/python/pyrsistent/py3/tests/class_test.py
new file mode 100644
index 0000000000..5e953965d5
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/class_test.py
@@ -0,0 +1,474 @@
+from collections.abc import Hashable
+import math
+import pickle
+import pytest
+import uuid
+from pyrsistent import (
+ field, InvariantException, PClass, optional, CheckedPVector,
+ pmap_field, pset_field, pvector_field)
+
+
+class Point(PClass):
+ x = field(type=int, mandatory=True, invariant=lambda x: (x >= 0, 'X negative'))
+ y = field(type=int, serializer=lambda formatter, y: formatter(y))
+ z = field(type=int, initial=0)
+
+
+class Hierarchy(PClass):
+ point = field(type=Point)
+
+
+class TypedContainerObj(PClass):
+ map = pmap_field(str, str)
+ set = pset_field(str)
+ vec = pvector_field(str)
+
+
+class UniqueThing(PClass):
+ id = field(type=uuid.UUID, factory=uuid.UUID)
+ x = field(type=int)
+
+
+def test_create_ignore_extra():
+ p = Point.create({'x': 5, 'y': 10, 'z': 15, 'a': 0}, ignore_extra=True)
+ assert p.x == 5
+ assert p.y == 10
+ assert p.z == 15
+ assert isinstance(p, Point)
+
+
+def test_create_ignore_extra_false():
+ with pytest.raises(AttributeError):
+ _ = Point.create({'x': 5, 'y': 10, 'z': 15, 'a': 0})
+
+
+def test_create_ignore_extra_true():
+ h = Hierarchy.create(
+ {'point': {'x': 5, 'y': 10, 'z': 15, 'extra_field_0': 'extra_data_0'}, 'extra_field_1': 'extra_data_1'},
+ ignore_extra=True)
+ assert isinstance(h, Hierarchy)
+
+
+def test_evolve_pclass_instance():
+ p = Point(x=1, y=2)
+ p2 = p.set(x=p.x+2)
+
+ # Original remains
+ assert p.x == 1
+ assert p.y == 2
+
+ # Evolved object updated
+ assert p2.x == 3
+ assert p2.y == 2
+
+ p3 = p2.set('x', 4)
+ assert p3.x == 4
+ assert p3.y == 2
+
+
+def test_direct_assignment_not_possible():
+ p = Point(x=1, y=2)
+
+ with pytest.raises(AttributeError):
+ p.x = 1
+
+ with pytest.raises(AttributeError):
+ setattr(p, 'x', 1)
+
+
+def test_direct_delete_not_possible():
+ p = Point(x=1, y=2)
+ with pytest.raises(AttributeError):
+ del p.x
+
+ with pytest.raises(AttributeError):
+ delattr(p, 'x')
+
+
+def test_cannot_construct_with_undeclared_fields():
+ with pytest.raises(AttributeError):
+ Point(x=1, p=5)
+
+
+def test_cannot_construct_with_wrong_type():
+ with pytest.raises(TypeError):
+ Point(x='a')
+
+
+def test_cannot_construct_without_mandatory_fields():
+ try:
+ Point(y=1)
+ assert False
+ except InvariantException as e:
+ assert "[Point.x]" in str(e)
+
+
+def test_field_invariant_must_hold():
+ try:
+ Point(x=-1)
+ assert False
+ except InvariantException as e:
+ assert "X negative" in str(e)
+
+
+def test_initial_value_set_when_not_present_in_arguments():
+ p = Point(x=1, y=2)
+
+ assert p.z == 0
+
+
+class Line(PClass):
+ p1 = field(type=Point)
+ p2 = field(type=Point)
+
+
+def test_can_create_nested_structures_from_dict_and_serialize_back_to_dict():
+ source = dict(p1=dict(x=1, y=2, z=3), p2=dict(x=10, y=20, z=30))
+ l = Line.create(source)
+
+ assert l.p1.x == 1
+ assert l.p1.y == 2
+ assert l.p1.z == 3
+ assert l.p2.x == 10
+ assert l.p2.y == 20
+ assert l.p2.z == 30
+
+ assert l.serialize(format=lambda val: val) == source
+
+
+def test_can_serialize_with_custom_serializer():
+ p = Point(x=1, y=1, z=1)
+
+ assert p.serialize(format=lambda v: v + 17) == {'x': 1, 'y': 18, 'z': 1}
+
+
+def test_implements_proper_equality_based_on_equality_of_fields():
+ p1 = Point(x=1, y=2)
+ p2 = Point(x=3)
+ p3 = Point(x=1, y=2)
+
+ assert p1 == p3
+ assert not p1 != p3
+ assert p1 != p2
+ assert not p1 == p2
+
+
+def test_is_hashable():
+ p1 = Point(x=1, y=2)
+ p2 = Point(x=3, y=2)
+
+ d = {p1: 'A point', p2: 'Another point'}
+
+ p1_like = Point(x=1, y=2)
+ p2_like = Point(x=3, y=2)
+
+ assert isinstance(p1, Hashable)
+ assert d[p1_like] == 'A point'
+ assert d[p2_like] == 'Another point'
+ assert Point(x=10) not in d
+
+
+def test_supports_nested_transformation():
+ l1 = Line(p1=Point(x=2, y=1), p2=Point(x=20, y=10))
+
+ l2 = l1.transform(['p1', 'x'], 3)
+
+ assert l1.p1.x == 2
+
+ assert l2.p1.x == 3
+ assert l2.p1.y == 1
+ assert l2.p2.x == 20
+ assert l2.p2.y == 10
+
+
+def test_repr():
+ class ARecord(PClass):
+ a = field()
+ b = field()
+
+ assert repr(ARecord(a=1, b=2)) in ('ARecord(a=1, b=2)', 'ARecord(b=2, a=1)')
+
+
+def test_global_invariant_check():
+ class UnitCirclePoint(PClass):
+ __invariant__ = lambda cp: (0.99 < math.sqrt(cp.x*cp.x + cp.y*cp.y) < 1.01,
+ "Point not on unit circle")
+ x = field(type=float)
+ y = field(type=float)
+
+ UnitCirclePoint(x=1.0, y=0.0)
+
+ with pytest.raises(InvariantException):
+ UnitCirclePoint(x=1.0, y=1.0)
+
+
+def test_supports_pickling():
+ p1 = Point(x=2, y=1)
+ p2 = pickle.loads(pickle.dumps(p1, -1))
+
+ assert p1 == p2
+ assert isinstance(p2, Point)
+
+
+def test_supports_pickling_with_typed_container_fields():
+ obj = TypedContainerObj(map={'foo': 'bar'}, set=['hello', 'there'], vec=['a', 'b'])
+ obj2 = pickle.loads(pickle.dumps(obj))
+ assert obj == obj2
+
+
+def test_can_remove_optional_member():
+ p1 = Point(x=1, y=2)
+ p2 = p1.remove('y')
+
+ assert p2 == Point(x=1)
+
+
+def test_cannot_remove_mandatory_member():
+ p1 = Point(x=1, y=2)
+
+ with pytest.raises(InvariantException):
+ p1.remove('x')
+
+
+def test_cannot_remove_non_existing_member():
+ p1 = Point(x=1)
+
+ with pytest.raises(AttributeError):
+ p1.remove('y')
+
+
+def test_evolver_without_evolution_returns_original_instance():
+ p1 = Point(x=1)
+ e = p1.evolver()
+
+ assert e.persistent() is p1
+
+
+def test_evolver_with_evolution_to_same_element_returns_original_instance():
+ p1 = Point(x=1)
+ e = p1.evolver()
+ e.set('x', p1.x)
+
+ assert e.persistent() is p1
+
+
+def test_evolver_supports_chained_set_and_remove():
+ p1 = Point(x=1, y=2)
+
+ assert p1.evolver().set('x', 3).remove('y').persistent() == Point(x=3)
+
+
+def test_evolver_supports_dot_notation_for_setting_and_getting_elements():
+ e = Point(x=1, y=2).evolver()
+
+ e.x = 3
+ assert e.x == 3
+ assert e.persistent() == Point(x=3, y=2)
+
+
+class Numbers(CheckedPVector):
+ __type__ = int
+
+
+class LinkedList(PClass):
+ value = field(type='__tests__.class_test.Numbers')
+ next = field(type=optional('__tests__.class_test.LinkedList'))
+
+
+def test_string_as_type_specifier():
+ l = LinkedList(value=[1, 2], next=LinkedList(value=[3, 4], next=None))
+
+ assert isinstance(l.value, Numbers)
+ assert list(l.value) == [1, 2]
+ assert l.next.next is None
+
+
+def test_multiple_invariants_on_field():
+ # If the invariant returns a list of tests the results of running those tests will be
+ # a tuple containing result data of all failing tests.
+
+ class MultiInvariantField(PClass):
+ one = field(type=int, invariant=lambda x: ((False, 'one_one'),
+ (False, 'one_two'),
+ (True, 'one_three')))
+ two = field(invariant=lambda x: (False, 'two_one'))
+
+ try:
+ MultiInvariantField(one=1, two=2)
+ assert False
+ except InvariantException as e:
+ assert set(e.invariant_errors) == set([('one_one', 'one_two'), 'two_one'])
+
+
+def test_multiple_global_invariants():
+ class MultiInvariantGlobal(PClass):
+ __invariant__ = lambda self: ((False, 'x'), (False, 'y'))
+ one = field()
+
+ try:
+ MultiInvariantGlobal(one=1)
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == (('x', 'y'),)
+
+
+def test_inherited_global_invariants():
+ class Distant(object):
+ def __invariant__(self):
+ return [(self.distant, "distant")]
+
+ class Nearby(Distant):
+ def __invariant__(self):
+ return [(self.nearby, "nearby")]
+
+ class MultipleInvariantGlobal(Nearby, PClass):
+ distant = field()
+ nearby = field()
+
+ try:
+ MultipleInvariantGlobal(distant=False, nearby=False)
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == (("nearby",), ("distant",),)
+
+
+def test_diamond_inherited_global_invariants():
+ counter = []
+ class Base(object):
+ def __invariant__(self):
+ counter.append(None)
+ return [(False, "base")]
+
+ class Left(Base):
+ pass
+
+ class Right(Base):
+ pass
+
+ class SingleInvariantGlobal(Left, Right, PClass):
+ pass
+
+ try:
+ SingleInvariantGlobal()
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == (("base",),)
+ assert counter == [None]
+
+def test_supports_weakref():
+ import weakref
+ weakref.ref(Point(x=1, y=2))
+
+
+def test_supports_weakref_with_multi_level_inheritance():
+ import weakref
+
+ class PPoint(Point):
+ a = field()
+
+ weakref.ref(PPoint(x=1, y=2))
+
+
+def test_supports_lazy_initial_value_for_field():
+ class MyClass(PClass):
+ a = field(int, initial=lambda: 2)
+
+ assert MyClass() == MyClass(a=2)
+
+
+def test_type_checks_lazy_initial_value_for_field():
+ class MyClass(PClass):
+ a = field(int, initial=lambda: "a")
+
+ with pytest.raises(TypeError):
+ MyClass()
+
+
+def test_invariant_checks_lazy_initial_value_for_field():
+ class MyClass(PClass):
+ a = field(int, invariant=lambda x: (x < 5, "Too large"), initial=lambda: 10)
+
+ with pytest.raises(InvariantException):
+ MyClass()
+
+
+def test_invariant_checks_static_initial_value():
+ class MyClass(PClass):
+ a = field(int, invariant=lambda x: (x < 5, "Too large"), initial=10)
+
+ with pytest.raises(InvariantException):
+ MyClass()
+
+
+def test_lazy_invariant_message():
+ class MyClass(PClass):
+ a = field(int, invariant=lambda x: (x < 5, lambda: "{x} is too large".format(x=x)))
+
+ try:
+ MyClass(a=5)
+ assert False
+ except InvariantException as e:
+ assert '5 is too large' in e.invariant_errors
+
+
+def test_enum_key_type():
+ import enum
+ class Foo(enum.Enum):
+ Bar = 1
+ Baz = 2
+
+ # This currently fails because the enum is iterable
+ class MyClass1(PClass):
+ f = pmap_field(key_type=Foo, value_type=int)
+
+ MyClass1()
+
+ # This is OK since it's wrapped in a tuple
+ class MyClass2(PClass):
+ f = pmap_field(key_type=(Foo,), value_type=int)
+
+ MyClass2()
+
+
+def test_pickle_with_one_way_factory():
+ thing = UniqueThing(id='25544626-86da-4bce-b6b6-9186c0804d64')
+ assert pickle.loads(pickle.dumps(thing)) == thing
+
+
+def test_evolver_with_one_way_factory():
+ thing = UniqueThing(id='cc65249a-56fe-4995-8719-ea02e124b234')
+ ev = thing.evolver()
+ ev.x = 5 # necessary to prevent persistent() returning the original
+ assert ev.persistent() == UniqueThing(id=str(thing.id), x=5)
+
+
+def test_set_doesnt_trigger_other_factories():
+ thing = UniqueThing(id='b413b280-de76-4e28-a8e3-5470ca83ea2c')
+ thing.set(x=5)
+
+
+def test_set_does_trigger_factories():
+ class SquaredPoint(PClass):
+ x = field(factory=lambda x: x ** 2)
+ y = field()
+
+ sp = SquaredPoint(x=3, y=10)
+ assert (sp.x, sp.y) == (9, 10)
+
+ sp2 = sp.set(x=4)
+ assert (sp2.x, sp2.y) == (16, 10)
+
+
+def test_value_can_be_overridden_in_subclass_new():
+ class X(PClass):
+ y = pvector_field(int)
+
+ def __new__(cls, **kwargs):
+ items = kwargs.get('y', None)
+ if items is None:
+ kwargs['y'] = ()
+ return super(X, cls).__new__(cls, **kwargs)
+
+ a = X(y=[])
+ b = a.set(y=None)
+ assert a == b
diff --git a/contrib/python/pyrsistent/py3/tests/deque_test.py b/contrib/python/pyrsistent/py3/tests/deque_test.py
new file mode 100644
index 0000000000..7798a75583
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/deque_test.py
@@ -0,0 +1,293 @@
+import pickle
+import pytest
+from pyrsistent import pdeque, dq
+
+
+def test_basic_right_and_left():
+ x = pdeque([1, 2])
+
+ assert x.right == 2
+ assert x.left == 1
+ assert len(x) == 2
+
+
+def test_construction_with_maxlen():
+ assert pdeque([1, 2, 3, 4], maxlen=2) == pdeque([3, 4])
+ assert pdeque([1, 2, 3, 4], maxlen=4) == pdeque([1, 2, 3, 4])
+ assert pdeque([], maxlen=2) == pdeque()
+
+
+def test_construction_with_invalid_maxlen():
+ with pytest.raises(TypeError):
+ pdeque([], maxlen='foo')
+
+ with pytest.raises(ValueError):
+ pdeque([], maxlen=-3)
+
+
+def test_pop():
+ x = pdeque([1, 2, 3, 4]).pop()
+ assert x.right == 3
+ assert x.left == 1
+
+ x = x.pop()
+ assert x.right == 2
+ assert x.left == 1
+
+ x = x.pop()
+ assert x.right == 1
+ assert x.left == 1
+
+ x = x.pop()
+ assert x == pdeque()
+
+ x = pdeque([1, 2]).pop()
+ assert x == pdeque([1])
+
+ x = x.pop()
+ assert x == pdeque()
+
+ assert pdeque().append(1).pop() == pdeque()
+ assert pdeque().appendleft(1).pop() == pdeque()
+
+
+def test_pop_multiple():
+ assert pdeque([1, 2, 3, 4]).pop(3) == pdeque([1])
+ assert pdeque([1, 2]).pop(3) == pdeque()
+
+
+def test_pop_with_negative_index():
+ assert pdeque([1, 2, 3]).pop(-1) == pdeque([1, 2, 3]).popleft(1)
+ assert pdeque([1, 2, 3]).popleft(-1) == pdeque([1, 2, 3]).pop(1)
+
+
+def test_popleft():
+ x = pdeque([1, 2, 3, 4]).popleft()
+ assert x.left == 2
+ assert x.right == 4
+
+ x = x.popleft()
+ assert x.left == 3
+ assert x.right == 4
+
+ x = x.popleft()
+ assert x.right == 4
+ assert x.left == 4
+
+ x = x.popleft()
+ assert x == pdeque()
+
+ x = pdeque([1, 2]).popleft()
+ assert x == pdeque([2])
+
+ x = x.popleft()
+ assert x == pdeque()
+
+ assert pdeque().append(1).popleft() == pdeque()
+ assert pdeque().appendleft(1).popleft() == pdeque()
+
+
+def test_popleft_multiple():
+ assert pdeque([1, 2, 3, 4]).popleft(3) == pdeque([4])
+
+
+def test_left_on_empty_deque():
+ with pytest.raises(IndexError):
+ pdeque().left
+
+
+def test_right_on_empty_deque():
+ with pytest.raises(IndexError):
+ pdeque().right
+
+
+def test_pop_empty_deque_returns_empty_deque():
+ # The other option is to throw an index error, this is what feels best for now though
+ assert pdeque().pop() == pdeque()
+ assert pdeque().popleft() == pdeque()
+
+
+def test_str():
+ assert str(pdeque([1, 2, 3])) == 'pdeque([1, 2, 3])'
+ assert str(pdeque([])) == 'pdeque([])'
+ assert str(pdeque([1, 2], maxlen=4)) == 'pdeque([1, 2], maxlen=4)'
+
+
+def test_append():
+ assert pdeque([1, 2]).append(3).append(4) == pdeque([1, 2, 3, 4])
+
+
+def test_append_with_maxlen():
+ assert pdeque([1, 2], maxlen=2).append(3).append(4) == pdeque([3, 4])
+ assert pdeque([1, 2], maxlen=3).append(3).append(4) == pdeque([2, 3, 4])
+ assert pdeque([], maxlen=0).append(1) == pdeque()
+
+
+def test_appendleft():
+ assert pdeque([2, 1]).appendleft(3).appendleft(4) == pdeque([4, 3, 2, 1])
+
+
+def test_appendleft_with_maxlen():
+ assert pdeque([2, 1], maxlen=2).appendleft(3).appendleft(4) == pdeque([4, 3])
+ assert pdeque([2, 1], maxlen=3).appendleft(3).appendleft(4) == pdeque([4, 3, 2])
+ assert pdeque([], maxlen=0).appendleft(1) == pdeque()
+
+
+def test_extend():
+ assert pdeque([1, 2]).extend([3, 4]) == pdeque([1, 2, 3, 4])
+
+
+def test_extend_with_maxlen():
+ assert pdeque([1, 2], maxlen=3).extend([3, 4]) == pdeque([2, 3, 4])
+ assert pdeque([1, 2], maxlen=2).extend([3, 4]) == pdeque([3, 4])
+ assert pdeque([], maxlen=2).extend([1, 2]) == pdeque([1, 2])
+ assert pdeque([], maxlen=0).extend([1, 2]) == pdeque([])
+
+
+def test_extendleft():
+ assert pdeque([2, 1]).extendleft([3, 4]) == pdeque([4, 3, 2, 1])
+
+
+def test_extendleft_with_maxlen():
+ assert pdeque([1, 2], maxlen=3).extendleft([3, 4]) == pdeque([4, 3, 1])
+ assert pdeque([1, 2], maxlen=2).extendleft([3, 4]) == pdeque([4, 3])
+ assert pdeque([], maxlen=2).extendleft([1, 2]) == pdeque([2, 1])
+ assert pdeque([], maxlen=0).extendleft([1, 2]) == pdeque([])
+
+
+def test_count():
+ x = pdeque([1, 2, 3, 2, 1])
+ assert x.count(1) == 2
+ assert x.count(2) == 2
+
+
+def test_remove():
+ assert pdeque([1, 2, 3, 4]).remove(2) == pdeque([1, 3, 4])
+ assert pdeque([1, 2, 3, 4]).remove(4) == pdeque([1, 2, 3])
+
+ # Right list must be reversed before removing element
+ assert pdeque([1, 2, 3, 3, 4, 5, 4, 6]).remove(4) == pdeque([1, 2, 3, 3, 5, 4, 6])
+
+
+def test_remove_element_missing():
+ with pytest.raises(ValueError):
+ pdeque().remove(2)
+
+ with pytest.raises(ValueError):
+ pdeque([1, 2, 3]).remove(4)
+
+
+def test_reverse():
+ assert pdeque([1, 2, 3, 4]).reverse() == pdeque([4, 3, 2, 1])
+ assert pdeque().reverse() == pdeque()
+
+
+def test_rotate_right():
+ assert pdeque([1, 2, 3, 4, 5]).rotate(2) == pdeque([4, 5, 1, 2, 3])
+ assert pdeque([1, 2]).rotate(0) == pdeque([1, 2])
+ assert pdeque().rotate(2) == pdeque()
+
+
+def test_rotate_left():
+ assert pdeque([1, 2, 3, 4, 5]).rotate(-2) == pdeque([3, 4, 5, 1, 2])
+ assert pdeque().rotate(-2) == pdeque()
+
+
+def test_set_maxlen():
+ x = pdeque([], maxlen=4)
+ assert x.maxlen == 4
+
+ with pytest.raises(AttributeError):
+ x.maxlen = 5
+
+
+def test_comparison():
+ small = pdeque([1, 2])
+ large = pdeque([1, 2, 3])
+
+ assert small < large
+ assert large > small
+ assert not small > large
+ assert not large < small
+ assert large != small
+
+ # Not equal to other types
+ assert small != [1, 2]
+
+
+def test_pickling():
+ input = pdeque([1, 2, 3], maxlen=5)
+ output = pickle.loads(pickle.dumps(input, -1))
+
+ assert output == input
+ assert output.maxlen == input.maxlen
+
+
+def test_indexing():
+ assert pdeque([1, 2, 3])[0] == 1
+ assert pdeque([1, 2, 3])[1] == 2
+ assert pdeque([1, 2, 3])[2] == 3
+ assert pdeque([1, 2, 3])[-1] == 3
+ assert pdeque([1, 2, 3])[-2] == 2
+ assert pdeque([1, 2, 3])[-3] == 1
+
+
+def test_one_element_indexing():
+ assert pdeque([2])[0] == 2
+ assert pdeque([2])[-1] == 2
+
+
+def test_empty_indexing():
+ with pytest.raises(IndexError):
+ assert pdeque([])[0] == 1
+
+
+def test_indexing_out_of_range():
+ with pytest.raises(IndexError):
+ pdeque([1, 2, 3])[-4]
+
+ with pytest.raises(IndexError):
+ pdeque([1, 2, 3])[3]
+
+ with pytest.raises(IndexError):
+ pdeque([2])[-2]
+
+
+def test_indexing_invalid_type():
+ with pytest.raises(TypeError) as e:
+ pdeque([1, 2, 3])['foo']
+
+ assert 'cannot be interpreted' in str(e.value)
+
+
+def test_slicing():
+ assert pdeque([1, 2, 3])[1:2] == pdeque([2])
+ assert pdeque([1, 2, 3])[2:1] == pdeque([])
+ assert pdeque([1, 2, 3])[-2:-1] == pdeque([2])
+ assert pdeque([1, 2, 3])[::2] == pdeque([1, 3])
+
+
+def test_hashing():
+ assert hash(pdeque([1, 2, 3])) == hash(pdeque().append(1).append(2).append(3))
+
+
+def test_index():
+ assert pdeque([1, 2, 3]).index(3) == 2
+
+
+def test_literalish():
+ assert dq(1, 2, 3) == pdeque([1, 2, 3])
+
+
+def test_supports_weakref():
+ import weakref
+ weakref.ref(dq(1, 2))
+
+
+def test_iterable():
+ """
+ PDeques can be created from iterables even though they can't be len()
+ hinted.
+ """
+
+ assert pdeque(iter("a")) == pdeque(iter("a"))
diff --git a/contrib/python/pyrsistent/py3/tests/field_test.py b/contrib/python/pyrsistent/py3/tests/field_test.py
new file mode 100644
index 0000000000..176b64cc6b
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/field_test.py
@@ -0,0 +1,23 @@
+from enum import Enum
+
+from pyrsistent import field, pvector_field
+
+
+class ExampleEnum(Enum):
+ x = 1
+ y = 2
+
+
+def test_enum():
+ f = field(type=ExampleEnum)
+
+ assert ExampleEnum in f.type
+ assert len(f.type) == 1
+
+
+# This is meant to exercise `_seq_field`.
+def test_pvector_field_enum_type():
+ f = pvector_field(ExampleEnum)
+
+ assert len(f.type) == 1
+ assert ExampleEnum is list(f.type)[0].__type__
diff --git a/contrib/python/pyrsistent/py3/tests/freeze_test.py b/contrib/python/pyrsistent/py3/tests/freeze_test.py
new file mode 100644
index 0000000000..158cf5d872
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/freeze_test.py
@@ -0,0 +1,174 @@
+"""Tests for freeze and thaw."""
+import collections
+from pyrsistent import v, m, s, freeze, thaw, PRecord, field, mutant
+
+
+## Freeze (standard)
+
+def test_freeze_basic():
+ assert freeze(1) == 1
+ assert freeze('foo') == 'foo'
+
+def test_freeze_list():
+ assert freeze([1, 2]) == v(1, 2)
+
+def test_freeze_dict():
+ result = freeze({'a': 'b'})
+ assert result == m(a='b')
+ assert type(freeze({'a': 'b'})) is type(m())
+
+def test_freeze_defaultdict():
+ test_dict = collections.defaultdict(dict)
+ test_dict['a'] = 'b'
+ result = freeze(test_dict)
+ assert result == m(a='b')
+ assert type(freeze({'a': 'b'})) is type(m())
+
+def test_freeze_set():
+ result = freeze(set([1, 2, 3]))
+ assert result == s(1, 2, 3)
+ assert type(result) is type(s())
+
+def test_freeze_recurse_in_dictionary_values():
+ result = freeze({'a': [1]})
+ assert result == m(a=v(1))
+ assert type(result['a']) is type(v())
+
+def test_freeze_recurse_in_defaultdict_values():
+ test_dict = collections.defaultdict(dict)
+ test_dict['a'] = [1]
+ result = freeze(test_dict)
+ assert result == m(a=v(1))
+ assert type(result['a']) is type(v())
+
+def test_freeze_recurse_in_pmap_values():
+ input = {'a': m(b={'c': 1})}
+ result = freeze(input)
+ # PMap and PVector are == to their mutable equivalents
+ assert result == input
+ assert type(result) is type(m())
+ assert type(result['a']['b']) is type(m())
+
+def test_freeze_recurse_in_lists():
+ result = freeze(['a', {'b': 3}])
+ assert result == v('a', m(b=3))
+ assert type(result[1]) is type(m())
+
+def test_freeze_recurse_in_pvectors():
+ input = [1, v(2, [3])]
+ result = freeze(input)
+ # PMap and PVector are == to their mutable equivalents
+ assert result == input
+ assert type(result) is type(v())
+ assert type(result[1][1]) is type(v())
+
+def test_freeze_recurse_in_tuples():
+ """Values in tuples are recursively frozen."""
+ result = freeze(('a', {}))
+ assert result == ('a', m())
+ assert type(result[1]) is type(m())
+
+
+## Freeze (weak)
+
+def test_freeze_nonstrict_no_recurse_in_pmap_values():
+ input = {'a': m(b={'c': 1})}
+ result = freeze(input, strict=False)
+ # PMap and PVector are == to their mutable equivalents
+ assert result == input
+ assert type(result) is type(m())
+ assert type(result['a']['b']) is dict
+
+def test_freeze_nonstrict_no_recurse_in_pvectors():
+ input = [1, v(2, [3])]
+ result = freeze(input, strict=False)
+ # PMap and PVector are == to their mutable equivalents
+ assert result == input
+ assert type(result) is type(v())
+ assert type(result[1][1]) is list
+
+
+## Thaw
+
+def test_thaw_basic():
+ assert thaw(1) == 1
+ assert thaw('foo') == 'foo'
+
+def test_thaw_list():
+ result = thaw(v(1, 2))
+ assert result == [1, 2]
+ assert type(result) is list
+
+def test_thaw_dict():
+ result = thaw(m(a='b'))
+ assert result == {'a': 'b'}
+ assert type(result) is dict
+
+def test_thaw_set():
+ result = thaw(s(1, 2))
+ assert result == set([1, 2])
+ assert type(result) is set
+
+def test_thaw_recurse_in_mapping_values():
+ result = thaw(m(a=v(1)))
+ assert result == {'a': [1]}
+ assert type(result['a']) is list
+
+def test_thaw_recurse_in_dict_values():
+ result = thaw({'a': v(1, m(b=2))})
+ assert result == {'a': [1, {'b': 2}]}
+ assert type(result['a']) is list
+ assert type(result['a'][1]) is dict
+
+def test_thaw_recurse_in_vectors():
+ result = thaw(v('a', m(b=3)))
+ assert result == ['a', {'b': 3}]
+ assert type(result[1]) is dict
+
+def test_thaw_recurse_in_lists():
+ result = thaw(v(['a', m(b=1), v(2)]))
+ assert result == [['a', {'b': 1}, [2]]]
+ assert type(result[0]) is list
+ assert type(result[0][1]) is dict
+
+def test_thaw_recurse_in_tuples():
+ result = thaw(('a', m()))
+ assert result == ('a', {})
+ assert type(result[1]) is dict
+
+def test_thaw_can_handle_subclasses_of_persistent_base_types():
+ class R(PRecord):
+ x = field()
+
+ result = thaw(R(x=1))
+ assert result == {'x': 1}
+ assert type(result) is dict
+
+
+## Thaw (weak)
+
+def test_thaw_non_strict_no_recurse_in_dict_values():
+ result = thaw({'a': v(1, m(b=2))}, strict=False)
+ assert result == {'a': [1, {'b': 2}]}
+ assert type(result['a']) is type(v())
+ assert type(result['a'][1]) is type(m())
+
+def test_thaw_non_strict_no_recurse_in_lists():
+ result = thaw(v(['a', m(b=1), v(2)]), strict=False)
+ assert result == [['a', {'b': 1}, [2]]]
+ assert type(result[0][1]) is type(m())
+
+def test_mutant_decorator():
+ @mutant
+ def fn(a_list, a_dict):
+ assert a_list == v(1, 2, 3)
+ assert isinstance(a_dict, type(m()))
+ assert a_dict == {'a': 5}
+
+ return [1, 2, 3], {'a': 3}
+
+ pv, pm = fn([1, 2, 3], a_dict={'a': 5})
+
+ assert pv == v(1, 2, 3)
+ assert pm == m(a=3)
+ assert isinstance(pm, type(m()))
diff --git a/contrib/python/pyrsistent/py3/tests/hypothesis_vector_test.py b/contrib/python/pyrsistent/py3/tests/hypothesis_vector_test.py
new file mode 100644
index 0000000000..73e82abf0b
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/hypothesis_vector_test.py
@@ -0,0 +1,304 @@
+"""
+Hypothesis-based tests for pvector.
+"""
+
+import gc
+
+from collections.abc import Iterable
+from functools import wraps
+from pyrsistent import PClass, field
+
+from pytest import fixture
+
+from pyrsistent import pvector, discard
+
+from hypothesis import strategies as st, assume
+from hypothesis.stateful import RuleBasedStateMachine, Bundle, rule
+
+
+class RefCountTracker:
+ """
+ An object that might catch reference count errors sometimes.
+ """
+ def __init__(self):
+ self.id = id(self)
+
+ def __repr__(self):
+ return "<%s>" % (self.id,)
+
+ def __del__(self):
+ # If self is a dangling memory reference this check might fail. Or
+ # segfault :)
+ if self.id != id(self):
+ raise RuntimeError()
+
+
+@fixture(scope="module")
+def gc_when_done(request):
+ request.addfinalizer(gc.collect)
+
+
+def test_setup(gc_when_done):
+ """
+ Ensure we GC when tests finish.
+ """
+
+
+# Pairs of a list and corresponding pvector:
+PVectorAndLists = st.lists(st.builds(RefCountTracker)).map(
+ lambda l: (l, pvector(l)))
+
+
+def verify_inputs_unmodified(original):
+ """
+ Decorator that asserts that the wrapped function does not modify its
+ inputs.
+ """
+ def to_tuples(pairs):
+ return [(tuple(l), tuple(pv)) for (l, pv) in pairs]
+
+ @wraps(original)
+ def wrapper(self, **kwargs):
+ inputs = [k for k in kwargs.values() if isinstance(k, Iterable)]
+ tuple_inputs = to_tuples(inputs)
+ try:
+ return original(self, **kwargs)
+ finally:
+ # Ensure inputs were unmodified:
+ assert to_tuples(inputs) == tuple_inputs
+ return wrapper
+
+
+def assert_equal(l, pv):
+ assert l == pv
+ assert len(l) == len(pv)
+ length = len(l)
+ for i in range(length):
+ assert l[i] == pv[i]
+ for i in range(length):
+ for j in range(i, length):
+ assert l[i:j] == pv[i:j]
+ assert l == list(iter(pv))
+
+
+class PVectorBuilder(RuleBasedStateMachine):
+ """
+ Build a list and matching pvector step-by-step.
+
+ In each step in the state machine we do same operation on a list and
+ on a pvector, and then when we're done we compare the two.
+ """
+ sequences = Bundle("sequences")
+
+ @rule(target=sequences, start=PVectorAndLists)
+ def initial_value(self, start):
+ """
+ Some initial values generated by a hypothesis strategy.
+ """
+ return start
+
+ @rule(target=sequences, former=sequences)
+ @verify_inputs_unmodified
+ def append(self, former):
+ """
+ Append an item to the pair of sequences.
+ """
+ l, pv = former
+ obj = RefCountTracker()
+ l2 = l[:]
+ l2.append(obj)
+ return l2, pv.append(obj)
+
+ @rule(target=sequences, start=sequences, end=sequences)
+ @verify_inputs_unmodified
+ def extend(self, start, end):
+ """
+ Extend a pair of sequences with another pair of sequences.
+ """
+ l, pv = start
+ l2, pv2 = end
+ # compare() has O(N**2) behavior, so don't want too-large lists:
+ assume(len(l) + len(l2) < 50)
+ l3 = l[:]
+ l3.extend(l2)
+ return l3, pv.extend(pv2)
+
+ @rule(target=sequences, former=sequences, data=st.data())
+ @verify_inputs_unmodified
+ def remove(self, former, data):
+ """
+ Remove an item from the sequences.
+ """
+ l, pv = former
+ assume(l)
+ l2 = l[:]
+ i = data.draw(st.sampled_from(range(len(l))))
+ del l2[i]
+ return l2, pv.delete(i)
+
+ @rule(target=sequences, former=sequences, data=st.data())
+ @verify_inputs_unmodified
+ def set(self, former, data):
+ """
+ Overwrite an item in the sequence.
+ """
+ l, pv = former
+ assume(l)
+ l2 = l[:]
+ i = data.draw(st.sampled_from(range(len(l))))
+ obj = RefCountTracker()
+ l2[i] = obj
+ return l2, pv.set(i, obj)
+
+ @rule(target=sequences, former=sequences, data=st.data())
+ @verify_inputs_unmodified
+ def transform_set(self, former, data):
+ """
+ Transform the sequence by setting value.
+ """
+ l, pv = former
+ assume(l)
+ l2 = l[:]
+ i = data.draw(st.sampled_from(range(len(l))))
+ obj = RefCountTracker()
+ l2[i] = obj
+ return l2, pv.transform([i], obj)
+
+ @rule(target=sequences, former=sequences, data=st.data())
+ @verify_inputs_unmodified
+ def transform_discard(self, former, data):
+ """
+ Transform the sequence by discarding a value.
+ """
+ l, pv = former
+ assume(l)
+ l2 = l[:]
+ i = data.draw(st.sampled_from(range(len(l))))
+ del l2[i]
+ return l2, pv.transform([i], discard)
+
+ @rule(target=sequences, former=sequences, data=st.data())
+ @verify_inputs_unmodified
+ def subset(self, former, data):
+ """
+ A subset of the previous sequence.
+ """
+ l, pv = former
+ assume(l)
+ i = data.draw(st.sampled_from(range(len(l))))
+ j = data.draw(st.sampled_from(range(len(l))))
+ return l[i:j], pv[i:j]
+
+ @rule(pair=sequences)
+ @verify_inputs_unmodified
+ def compare(self, pair):
+ """
+ The list and pvector must match.
+ """
+ l, pv = pair
+ # compare() has O(N**2) behavior, so don't want too-large lists:
+ assume(len(l) < 50)
+ assert_equal(l, pv)
+
+
+PVectorBuilderTests = PVectorBuilder.TestCase
+
+
+class EvolverItem(PClass):
+ original_list = field()
+ original_pvector = field()
+ current_list = field()
+ current_evolver = field()
+
+
+class PVectorEvolverBuilder(RuleBasedStateMachine):
+ """
+ Build a list and matching pvector evolver step-by-step.
+
+ In each step in the state machine we do same operation on a list and
+ on a pvector evolver, and then when we're done we compare the two.
+ """
+ sequences = Bundle("evolver_sequences")
+
+ @rule(target=sequences, start=PVectorAndLists)
+ def initial_value(self, start):
+ """
+ Some initial values generated by a hypothesis strategy.
+ """
+ l, pv = start
+ return EvolverItem(original_list=l,
+ original_pvector=pv,
+ current_list=l[:],
+ current_evolver=pv.evolver())
+
+ @rule(item=sequences)
+ def append(self, item):
+ """
+ Append an item to the pair of sequences.
+ """
+ obj = RefCountTracker()
+ item.current_list.append(obj)
+ item.current_evolver.append(obj)
+
+ @rule(start=sequences, end=sequences)
+ def extend(self, start, end):
+ """
+ Extend a pair of sequences with another pair of sequences.
+ """
+ # compare() has O(N**2) behavior, so don't want too-large lists:
+ assume(len(start.current_list) + len(end.current_list) < 50)
+ start.current_evolver.extend(end.current_list)
+ start.current_list.extend(end.current_list)
+
+ @rule(item=sequences, data=st.data())
+ def delete(self, item, data):
+ """
+ Remove an item from the sequences.
+ """
+ assume(item.current_list)
+ i = data.draw(st.sampled_from(range(len(item.current_list))))
+ del item.current_list[i]
+ del item.current_evolver[i]
+
+ @rule(item=sequences, data=st.data())
+ def setitem(self, item, data):
+ """
+ Overwrite an item in the sequence using ``__setitem__``.
+ """
+ assume(item.current_list)
+ i = data.draw(st.sampled_from(range(len(item.current_list))))
+ obj = RefCountTracker()
+ item.current_list[i] = obj
+ item.current_evolver[i] = obj
+
+ @rule(item=sequences, data=st.data())
+ def set(self, item, data):
+ """
+ Overwrite an item in the sequence using ``set``.
+ """
+ assume(item.current_list)
+ i = data.draw(st.sampled_from(range(len(item.current_list))))
+ obj = RefCountTracker()
+ item.current_list[i] = obj
+ item.current_evolver.set(i, obj)
+
+ @rule(item=sequences)
+ def compare(self, item):
+ """
+ The list and pvector evolver must match.
+ """
+ item.current_evolver.is_dirty()
+ # compare() has O(N**2) behavior, so don't want too-large lists:
+ assume(len(item.current_list) < 50)
+ # original object unmodified
+ assert item.original_list == item.original_pvector
+ # evolver matches:
+ for i in range(len(item.current_evolver)):
+ assert item.current_list[i] == item.current_evolver[i]
+ # persistent version matches
+ assert_equal(item.current_list, item.current_evolver.persistent())
+ # original object still unmodified
+ assert item.original_list == item.original_pvector
+
+
+PVectorEvolverBuilderTests = PVectorEvolverBuilder.TestCase
diff --git a/contrib/python/pyrsistent/py3/tests/immutable_object_test.py b/contrib/python/pyrsistent/py3/tests/immutable_object_test.py
new file mode 100644
index 0000000000..11ff513cbc
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/immutable_object_test.py
@@ -0,0 +1,67 @@
+import pytest
+from pyrsistent import immutable
+
+class Empty(immutable(verbose=True)):
+ pass
+
+
+class Single(immutable('x')):
+ pass
+
+
+class FrozenMember(immutable('x, y_')):
+ pass
+
+
+class DerivedWithNew(immutable(['x', 'y'])):
+ def __new__(cls, x, y):
+ return super(DerivedWithNew, cls).__new__(cls, x, y)
+
+
+def test_instantiate_object_with_no_members():
+ t = Empty()
+ t2 = t.set()
+
+ assert t is t2
+
+
+def test_assign_non_existing_attribute():
+ t = Empty()
+
+ with pytest.raises(AttributeError):
+ t.set(a=1)
+
+
+def test_basic_instantiation():
+ t = Single(17)
+
+ assert t.x == 17
+ assert str(t) == 'Single(x=17)'
+
+
+def test_cannot_modify_member():
+ t = Single(17)
+
+ with pytest.raises(AttributeError):
+ t.x = 18
+
+def test_basic_replace():
+ t = Single(17)
+ t2 = t.set(x=18)
+
+ assert t.x == 17
+ assert t2.x == 18
+
+
+def test_cannot_replace_frozen_member():
+ t = FrozenMember(17, 18)
+
+ with pytest.raises(AttributeError):
+ t.set(y_=18)
+
+
+def test_derived_class_with_new():
+ d = DerivedWithNew(1, 2)
+ d2 = d.set(x=3)
+
+ assert d2.x == 3
diff --git a/contrib/python/pyrsistent/py3/tests/list_test.py b/contrib/python/pyrsistent/py3/tests/list_test.py
new file mode 100644
index 0000000000..ccbd83ba97
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/list_test.py
@@ -0,0 +1,209 @@
+import pickle
+import pytest
+from pyrsistent import plist, l
+
+
+def test_literalish_works():
+ assert l(1, 2, 3) == plist([1, 2, 3])
+
+
+def test_first_and_rest():
+ pl = plist([1, 2])
+ assert pl.first == 1
+ assert pl.rest.first == 2
+ assert pl.rest.rest is plist()
+
+
+def test_instantiate_large_list():
+ assert plist(range(1000)).first == 0
+
+
+def test_iteration():
+ assert list(plist()) == []
+ assert list(plist([1, 2, 3])) == [1, 2, 3]
+
+
+def test_cons():
+ assert plist([1, 2, 3]).cons(0) == plist([0, 1, 2, 3])
+
+
+def test_cons_empty_list():
+ assert plist().cons(0) == plist([0])
+
+
+def test_truthiness():
+ assert plist([1])
+ assert not plist()
+
+
+def test_len():
+ assert len(plist([1, 2, 3])) == 3
+ assert len(plist()) == 0
+
+
+def test_first_illegal_on_empty_list():
+ with pytest.raises(AttributeError):
+ plist().first
+
+
+def test_rest_return_self_on_empty_list():
+ assert plist().rest is plist()
+
+
+def test_reverse():
+ assert plist([1, 2, 3]).reverse() == plist([3, 2, 1])
+ assert reversed(plist([1, 2, 3])) == plist([3, 2, 1])
+
+ assert plist().reverse() == plist()
+ assert reversed(plist()) == plist()
+
+
+def test_inequality():
+ assert plist([1, 2]) != plist([1, 3])
+ assert plist([1, 2]) != plist([1, 2, 3])
+ assert plist() != plist([1, 2, 3])
+
+
+def test_repr():
+ assert str(plist()) == "plist([])"
+ assert str(plist([1, 2, 3])) == "plist([1, 2, 3])"
+
+
+def test_indexing():
+ assert plist([1, 2, 3])[2] == 3
+ assert plist([1, 2, 3])[-1] == 3
+
+
+def test_indexing_on_empty_list():
+ with pytest.raises(IndexError):
+ plist()[0]
+
+
+def test_index_out_of_range():
+ with pytest.raises(IndexError):
+ plist([1, 2])[2]
+
+ with pytest.raises(IndexError):
+ plist([1, 2])[-3]
+
+def test_index_invalid_type():
+ with pytest.raises(TypeError) as e:
+ plist([1, 2, 3])['foo'] # type: ignore
+
+ assert 'cannot be interpreted' in str(e.value)
+
+
+def test_slicing_take():
+ assert plist([1, 2, 3])[:2] == plist([1, 2])
+
+
+def test_slicing_take_out_of_range():
+ assert plist([1, 2, 3])[:20] == plist([1, 2, 3])
+
+
+def test_slicing_drop():
+ li = plist([1, 2, 3])
+ assert li[1:] is li.rest
+
+
+def test_slicing_drop_out_of_range():
+ assert plist([1, 2, 3])[3:] is plist()
+
+
+def test_contains():
+ assert 2 in plist([1, 2, 3])
+ assert 4 not in plist([1, 2, 3])
+ assert 1 not in plist()
+
+
+def test_count():
+ assert plist([1, 2, 1]).count(1) == 2
+ assert plist().count(1) == 0
+
+
+def test_index():
+ assert plist([1, 2, 3]).index(3) == 2
+
+
+def test_index_item_not_found():
+ with pytest.raises(ValueError):
+ plist().index(3)
+
+ with pytest.raises(ValueError):
+ plist([1, 2]).index(3)
+
+
+def test_pickling_empty_list():
+ assert pickle.loads(pickle.dumps(plist(), -1)) == plist()
+
+
+def test_pickling_non_empty_list():
+ assert pickle.loads(pickle.dumps(plist([1, 2, 3]), -1)) == plist([1, 2, 3])
+
+
+def test_comparison():
+ assert plist([1, 2]) < plist([1, 2, 3])
+ assert plist([2, 1]) > plist([1, 2, 3])
+ assert plist() < plist([1])
+ assert plist([1]) > plist()
+
+
+def test_comparison_with_other_type():
+ assert plist() != []
+
+
+def test_hashing():
+ assert hash(plist([1, 2])) == hash(plist([1, 2]))
+ assert hash(plist([1, 2])) != hash(plist([2, 1]))
+
+
+def test_split():
+ left_list, right_list = plist([1, 2, 3, 4, 5]).split(3)
+ assert left_list == plist([1, 2, 3])
+ assert right_list == plist([4, 5])
+
+
+def test_split_no_split_occurred():
+ x = plist([1, 2])
+ left_list, right_list = x.split(2)
+ assert left_list is x
+ assert right_list is plist()
+
+
+def test_split_empty_list():
+ left_list, right_list = plist().split(2)
+ assert left_list == plist()
+ assert right_list == plist()
+
+
+def test_remove():
+ assert plist([1, 2, 3, 2]).remove(2) == plist([1, 3, 2])
+ assert plist([1, 2, 3]).remove(1) == plist([2, 3])
+ assert plist([1, 2, 3]).remove(3) == plist([1, 2])
+
+
+def test_remove_missing_element():
+ with pytest.raises(ValueError):
+ plist([1, 2]).remove(3)
+
+ with pytest.raises(ValueError):
+ plist().remove(2)
+
+
+def test_mcons():
+ assert plist([1, 2]).mcons([3, 4]) == plist([4, 3, 1, 2])
+
+
+def test_supports_weakref():
+ import weakref
+ weakref.ref(plist())
+ weakref.ref(plist([1, 2]))
+
+
+def test_iterable():
+ """
+ PLists can be created from iterables even though they can't be len()
+ hinted.
+ """
+
+ assert plist(iter("a")) == plist(iter("a"))
diff --git a/contrib/python/pyrsistent/py3/tests/map_test.py b/contrib/python/pyrsistent/py3/tests/map_test.py
new file mode 100644
index 0000000000..ae2317b233
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/map_test.py
@@ -0,0 +1,551 @@
+from collections import namedtuple
+from collections.abc import Mapping, Hashable
+from operator import add
+import pytest
+from pyrsistent import pmap, m
+import pickle
+
+
+def test_instance_of_hashable():
+ assert isinstance(m(), Hashable)
+
+
+def test_instance_of_map():
+ assert isinstance(m(), Mapping)
+
+
+def test_literalish_works():
+ assert m() is pmap()
+ assert m(a=1, b=2) == pmap({'a': 1, 'b': 2})
+
+
+def test_empty_initialization():
+ a_map = pmap()
+ assert len(a_map) == 0
+
+
+def test_initialization_with_one_element():
+ the_map = pmap({'a': 2})
+ assert len(the_map) == 1
+ assert the_map['a'] == 2
+ assert the_map.a == 2
+ assert 'a' in the_map
+
+ assert the_map is the_map.discard('b')
+
+ empty_map = the_map.remove('a')
+ assert len(empty_map) == 0
+ assert 'a' not in empty_map
+
+
+def test_get_non_existing_raises_key_error():
+ m1 = m()
+ with pytest.raises(KeyError) as error:
+ m1['foo']
+
+ assert str(error.value) == "'foo'"
+
+
+def test_remove_non_existing_element_raises_key_error():
+ m1 = m(a=1)
+
+ with pytest.raises(KeyError) as error:
+ m1.remove('b')
+
+ assert str(error.value) == "'b'"
+
+
+def test_various_iterations():
+ assert {'a', 'b'} == set(m(a=1, b=2))
+ assert ['a', 'b'] == sorted(m(a=1, b=2).keys())
+
+ assert {1, 2} == set(m(a=1, b=2).itervalues())
+ assert [1, 2] == sorted(m(a=1, b=2).values())
+
+ assert {('a', 1), ('b', 2)} == set(m(a=1, b=2).iteritems())
+ assert {('a', 1), ('b', 2)} == set(m(a=1, b=2).items())
+
+ pm = pmap({k: k for k in range(100)})
+ assert len(pm) == len(pm.keys())
+ assert len(pm) == len(pm.values())
+ assert len(pm) == len(pm.items())
+ ks = pm.keys()
+ assert all(k in pm for k in ks)
+ assert all(k in ks for k in ks)
+ us = pm.items()
+ assert all(pm[k] == v for (k, v) in us)
+ vs = pm.values()
+ assert all(v in vs for v in vs)
+
+
+def test_initialization_with_two_elements():
+ map1 = pmap({'a': 2, 'b': 3})
+ assert len(map1) == 2
+ assert map1['a'] == 2
+ assert map1['b'] == 3
+
+ map2 = map1.remove('a')
+ assert 'a' not in map2
+ assert map2['b'] == 3
+
+
+def test_initialization_with_many_elements():
+ init_dict = dict([(str(x), x) for x in range(1700)])
+ the_map = pmap(init_dict)
+
+ assert len(the_map) == 1700
+ assert the_map['16'] == 16
+ assert the_map['1699'] == 1699
+ assert the_map.set('256', 256) is the_map
+
+ new_map = the_map.remove('1600')
+ assert len(new_map) == 1699
+ assert '1600' not in new_map
+ assert new_map['1601'] == 1601
+
+ # Some NOP properties
+ assert new_map.discard('18888') is new_map
+ assert '19999' not in new_map
+ assert new_map['1500'] == 1500
+ assert new_map.set('1500', new_map['1500']) is new_map
+
+
+def test_access_non_existing_element():
+ map1 = pmap()
+ assert len(map1) == 0
+
+ map2 = map1.set('1', 1)
+ assert '1' not in map1
+ assert map2['1'] == 1
+ assert '2' not in map2
+
+
+def test_overwrite_existing_element():
+ map1 = pmap({'a': 2})
+ map2 = map1.set('a', 3)
+
+ assert len(map2) == 1
+ assert map2['a'] == 3
+
+
+def test_hash():
+ x = m(a=1, b=2, c=3)
+ y = m(a=1, b=2, c=3)
+
+ assert hash(x) == hash(y)
+
+
+def test_same_hash_when_content_the_same_but_underlying_vector_size_differs():
+ x = pmap(dict((x, x) for x in range(1000)))
+ y = pmap({10: 10, 200: 200, 700: 700})
+
+ for z in x:
+ if z not in y:
+ x = x.remove(z)
+
+ assert x == y
+ assert hash(x) == hash(y)
+
+
+class HashabilityControlled(object):
+ hashable = True
+
+ def __hash__(self):
+ if self.hashable:
+ return 4 # Proven random
+ raise ValueError("I am not currently hashable.")
+
+
+def test_map_does_not_hash_values_on_second_hash_invocation():
+ hashable = HashabilityControlled()
+ x = pmap(dict(el=hashable))
+ hash(x)
+ hashable.hashable = False
+ hash(x)
+
+
+def test_equal():
+ x = m(a=1, b=2, c=3)
+ y = m(a=1, b=2, c=3)
+
+ assert x == y
+ assert not (x != y)
+
+ assert y == x
+ assert not (y != x)
+
+
+def test_equal_to_dict():
+ x = m(a=1, b=2, c=3)
+ y = dict(a=1, b=2, c=3)
+
+ assert x == y
+ assert not (x != y)
+
+ assert y == x
+ assert not (y != x)
+
+
+def test_equal_with_different_bucket_sizes():
+ x = pmap({'a': 1, 'b': 2}, 50)
+ y = pmap({'a': 1, 'b': 2}, 10)
+
+ assert x == y
+ assert not (x != y)
+
+ assert y == x
+ assert not (y != x)
+
+
+def test_equal_with_different_insertion_order():
+ x = pmap([(i, i) for i in range(50)], 10)
+ y = pmap([(i, i) for i in range(49, -1, -1)], 10)
+
+ assert x == y
+ assert not (x != y)
+
+ assert y == x
+ assert not (y != x)
+
+
+def test_not_equal():
+ x = m(a=1, b=2, c=3)
+ y = m(a=1, b=2)
+
+ assert x != y
+ assert not (x == y)
+
+ assert y != x
+ assert not (y == x)
+
+
+def test_not_equal_to_dict():
+ x = m(a=1, b=2, c=3)
+ y = dict(a=1, b=2, d=4)
+
+ assert x != y
+ assert not (x == y)
+
+ assert y != x
+ assert not (y == x)
+
+
+def test_update_with_multiple_arguments():
+ # If same value is present in multiple sources, the rightmost is used.
+ x = m(a=1, b=2, c=3)
+ y = x.update(m(b=4, c=5), {'c': 6})
+
+ assert y == m(a=1, b=4, c=6)
+
+
+def test_update_one_argument():
+ x = m(a=1)
+
+ assert x.update(m(b=2)) == m(a=1, b=2)
+
+
+def test_update_no_arguments():
+ x = m(a=1)
+
+ assert x.update() is x
+
+
+def test_addition():
+ assert m(x=1, y=2) + m(y=3, z=4) == m(x=1, y=3, z=4)
+
+
+def test_union_operator():
+ assert m(x=1, y=2) | m(y=3, z=4) == m(x=1, y=3, z=4)
+
+
+def test_transform_base_case():
+ # Works as set when called with only one key
+ x = m(a=1, b=2)
+
+ assert x.transform(['a'], 3) == m(a=3, b=2)
+
+
+def test_transform_nested_maps():
+ x = m(a=1, b=m(c=3, d=m(e=6, f=7)))
+
+ assert x.transform(['b', 'd', 'e'], 999) == m(a=1, b=m(c=3, d=m(e=999, f=7)))
+
+
+def test_transform_levels_missing():
+ x = m(a=1, b=m(c=3))
+
+ assert x.transform(['b', 'd', 'e'], 999) == m(a=1, b=m(c=3, d=m(e=999)))
+
+
+class HashDummy(object):
+ def __hash__(self):
+ return 6528039219058920 # Hash of '33'
+
+ def __eq__(self, other):
+ return self is other
+
+
+def test_hash_collision_is_correctly_resolved():
+ dummy1 = HashDummy()
+ dummy2 = HashDummy()
+ dummy3 = HashDummy()
+ dummy4 = HashDummy()
+
+ map1 = pmap({dummy1: 1, dummy2: 2, dummy3: 3})
+ assert map1[dummy1] == 1
+ assert map1[dummy2] == 2
+ assert map1[dummy3] == 3
+ assert dummy4 not in map1
+
+ keys = set()
+ values = set()
+ for k, v in map1.iteritems():
+ keys.add(k)
+ values.add(v)
+
+ assert keys == {dummy1, dummy2, dummy3}
+ assert values == {1, 2, 3}
+
+ map2 = map1.set(dummy1, 11)
+ assert map2[dummy1] == 11
+
+ # Re-use existing structure when inserted element is the same
+ assert map2.set(dummy1, 11) is map2
+
+ map3 = map1.set('a', 22)
+ assert map3['a'] == 22
+ assert map3[dummy3] == 3
+
+ # Remove elements
+ map4 = map1.discard(dummy2)
+ assert len(map4) == 2
+ assert map4[dummy1] == 1
+ assert dummy2 not in map4
+ assert map4[dummy3] == 3
+
+ assert map1.discard(dummy4) is map1
+
+ # Empty map handling
+ empty_map = map4.remove(dummy1).remove(dummy3)
+ assert len(empty_map) == 0
+ assert empty_map.discard(dummy1) is empty_map
+
+
+def test_bitmap_indexed_iteration():
+ a_map = pmap({'a': 2, 'b': 1})
+ keys = set()
+ values = set()
+
+ count = 0
+ for k, v in a_map.iteritems():
+ count += 1
+ keys.add(k)
+ values.add(v)
+
+ assert count == 2
+ assert keys == {'a', 'b'}
+ assert values == {2, 1}
+
+
+def test_iteration_with_many_elements():
+ values = list(range(0, 2000))
+ keys = [str(x) for x in values]
+ init_dict = dict(zip(keys, values))
+
+ hash_dummy1 = HashDummy()
+ hash_dummy2 = HashDummy()
+
+ # Throw in a couple of hash collision nodes to tests
+ # those properly as well
+ init_dict[hash_dummy1] = 12345
+ init_dict[hash_dummy2] = 54321
+ a_map = pmap(init_dict)
+
+ actual_values = set()
+ actual_keys = set()
+
+ for k, v in a_map.iteritems():
+ actual_values.add(v)
+ actual_keys.add(k)
+
+ assert actual_keys == set(keys + [hash_dummy1, hash_dummy2])
+ assert actual_values == set(values + [12345, 54321])
+
+
+def test_str():
+ assert str(pmap({1: 2, 3: 4})) == "pmap({1: 2, 3: 4})"
+
+
+def test_empty_truthiness():
+ assert m(a=1)
+ assert not m()
+
+
+def test_update_with():
+ assert m(a=1).update_with(add, m(a=2, b=4)) == m(a=3, b=4)
+ assert m(a=1).update_with(lambda l, r: l, m(a=2, b=4)) == m(a=1, b=4)
+
+ def map_add(l, r):
+ return dict(list(l.items()) + list(r.items()))
+
+ assert m(a={'c': 3}).update_with(map_add, m(a={'d': 4})) == m(a={'c': 3, 'd': 4})
+
+
+def test_pickling_empty_map():
+ assert pickle.loads(pickle.dumps(m(), -1)) == m()
+
+
+def test_pickling_non_empty_map():
+ assert pickle.loads(pickle.dumps(m(a=1, b=2), -1)) == m(a=1, b=2)
+
+
+def test_set_with_relocation():
+ x = pmap({'a': 1000}, pre_size=1)
+ x = x.set('b', 3000)
+ x = x.set('c', 4000)
+ x = x.set('d', 5000)
+ x = x.set('d', 6000)
+
+ assert len(x) == 4
+ assert x == pmap({'a': 1000, 'b': 3000, 'c': 4000, 'd': 6000})
+
+
+def test_evolver_simple_update():
+ x = m(a=1000, b=2000)
+ e = x.evolver()
+ e['b'] = 3000
+
+ assert e['b'] == 3000
+ assert e.persistent()['b'] == 3000
+ assert x['b'] == 2000
+
+
+def test_evolver_update_with_relocation():
+ x = pmap({'a': 1000}, pre_size=1)
+ e = x.evolver()
+ e['b'] = 3000
+ e['c'] = 4000
+ e['d'] = 5000
+ e['d'] = 6000
+
+ assert len(e) == 4
+ assert e.persistent() == pmap({'a': 1000, 'b': 3000, 'c': 4000, 'd': 6000})
+
+
+def test_evolver_set_with_reallocation_edge_case():
+ # Demonstrates a bug in evolver that also affects updates. Under certain
+ # circumstances, the result of `x.update(y)` will **not** have all the
+ # keys from `y`.
+ foo = object()
+ x = pmap({'a': foo}, pre_size=1)
+ e = x.evolver()
+ e['b'] = 3000
+ # Bug is triggered when we do a reallocation and the new value is
+ # identical to the old one.
+ e['a'] = foo
+
+ y = e.persistent()
+ assert 'b' in y
+ assert y is e.persistent()
+
+
+def test_evolver_remove_element():
+ e = m(a=1000, b=2000).evolver()
+ assert 'a' in e
+
+ del e['a']
+ assert 'a' not in e
+
+
+def test_evolver_remove_element_not_present():
+ e = m(a=1000, b=2000).evolver()
+
+ with pytest.raises(KeyError) as error:
+ del e['c']
+
+ assert str(error.value) == "'c'"
+
+
+def test_copy_returns_reference_to_self():
+ m1 = m(a=10)
+ assert m1.copy() is m1
+
+
+def test_dot_access_of_non_existing_element_raises_attribute_error():
+ m1 = m(a=10)
+
+ with pytest.raises(AttributeError) as error:
+ m1.b
+
+ error_message = str(error.value)
+
+ assert "'b'" in error_message
+ assert type(m1).__name__ in error_message
+
+
+def test_pmap_unorderable():
+ with pytest.raises(TypeError):
+ _ = m(a=1) < m(b=2)
+
+ with pytest.raises(TypeError):
+ _ = m(a=1) <= m(b=2)
+
+ with pytest.raises(TypeError):
+ _ = m(a=1) > m(b=2)
+
+ with pytest.raises(TypeError):
+ _ = m(a=1) >= m(b=2)
+
+
+def test_supports_weakref():
+ import weakref
+ weakref.ref(m(a=1))
+
+
+def test_insert_and_get_many_elements():
+ # This test case triggers reallocation of the underlying bucket structure.
+ a_map = m()
+ for x in range(1000):
+ a_map = a_map.set(str(x), x)
+
+ assert len(a_map) == 1000
+ for x in range(1000):
+ assert a_map[str(x)] == x, x
+
+
+def test_iterable():
+ """
+ PMaps can be created from iterables even though they can't be len() hinted.
+ """
+
+ assert pmap(iter([("a", "b")])) == pmap([("a", "b")])
+
+
+class BrokenPerson(namedtuple('Person', 'name')):
+ def __eq__(self, other):
+ return self.__class__ == other.__class__ and self.name == other.name
+
+ def __hash__(self):
+ return hash(self.name)
+
+
+class BrokenItem(namedtuple('Item', 'name')):
+ def __eq__(self, other):
+ return self.__class__ == other.__class__ and self.name == other.name
+
+ def __hash__(self):
+ return hash(self.name)
+
+
+def test_pmap_removal_with_broken_classes_deriving_from_namedtuple():
+ """
+ The two classes above implement __eq__ but also would need to implement __ne__ to compare
+ consistently. See issue https://github.com/tobgu/pyrsistent/issues/268 for details.
+ """
+ s = pmap({BrokenPerson('X'): 2, BrokenItem('X'): 3})
+ s = s.remove(BrokenPerson('X'))
+
+ # Both items are removed due to how they are compared for inequality
+ assert BrokenPerson('X') not in s
+ assert BrokenItem('X') in s
+ assert len(s) == 1
diff --git a/contrib/python/pyrsistent/py3/tests/memory_profiling.py b/contrib/python/pyrsistent/py3/tests/memory_profiling.py
new file mode 100644
index 0000000000..69036520cd
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/memory_profiling.py
@@ -0,0 +1,48 @@
+"""
+Script to try do detect any memory leaks that may be lurking in the C implementation of the PVector.
+"""
+import inspect
+import sys
+import time
+import memory_profiler
+import vector_test
+from pyrsistent import pvector
+
+try:
+ import pvectorc
+except ImportError:
+ print("No C implementation of PVector available, terminating")
+ sys.exit()
+
+
+PROFILING_DURATION = 2.0
+
+
+def run_function(fn):
+ stop = time.time() + PROFILING_DURATION
+ while time.time() < stop:
+ fn(pvector)
+
+
+def detect_memory_leak(samples):
+ # Do not allow a memory usage difference larger than 5% between the beginning and the end.
+ # Skip the first samples to get rid of the build up period and the last sample since it seems
+ # a little less precise
+ return abs(1 - (sum(samples[5:8]) / sum(samples[-4:-1]))) > 0.05
+
+
+def profile_tests():
+ test_functions = [fn for fn in inspect.getmembers(vector_test, inspect.isfunction)
+ if fn[0].startswith('test_')]
+
+ for name, fn in test_functions:
+ # There are a couple of tests that are not run for the C implementation, skip those
+ fn_args = inspect.getfullargspec(fn)[0]
+ if 'pvector' in fn_args:
+ print('Executing %s' % name)
+ result = memory_profiler.memory_usage((run_function, (fn,), {}), interval=.1)
+ assert not detect_memory_leak(result), (name, result)
+
+
+if __name__ == "__main__":
+ profile_tests() \ No newline at end of file
diff --git a/contrib/python/pyrsistent/py3/tests/record_test.py b/contrib/python/pyrsistent/py3/tests/record_test.py
new file mode 100644
index 0000000000..95fc55b8f1
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/record_test.py
@@ -0,0 +1,878 @@
+import pickle
+import datetime
+import pytest
+import uuid
+from pyrsistent import (
+ PRecord, field, InvariantException, ny, pset, PSet, CheckedPVector,
+ PTypeError, pset_field, pvector_field, pmap_field, pmap, PMap,
+ pvector, PVector, v, m)
+
+
+class ARecord(PRecord):
+ x = field(type=(int, float))
+ y = field()
+
+
+class Hierarchy(PRecord):
+ point1 = field(ARecord)
+ point2 = field(ARecord)
+ points = pvector_field(ARecord)
+
+
+class RecordContainingContainers(PRecord):
+ map = pmap_field(str, str)
+ vec = pvector_field(str)
+ set = pset_field(str)
+
+
+class UniqueThing(PRecord):
+ id = field(type=uuid.UUID, factory=uuid.UUID)
+
+
+class Something(object):
+ pass
+
+class Another(object):
+ pass
+
+def test_create_ignore_extra_true():
+ h = Hierarchy.create(
+ {'point1': {'x': 1, 'y': 'foo', 'extra_field_0': 'extra_data_0'},
+ 'point2': {'x': 1, 'y': 'foo', 'extra_field_1': 'extra_data_1'},
+ 'extra_field_2': 'extra_data_2',
+ }, ignore_extra=True
+ )
+ assert h
+
+
+def test_create_ignore_extra_true_sequence_hierarchy():
+ h = Hierarchy.create(
+ {'point1': {'x': 1, 'y': 'foo', 'extra_field_0': 'extra_data_0'},
+ 'point2': {'x': 1, 'y': 'foo', 'extra_field_1': 'extra_data_1'},
+ 'points': [{'x': 1, 'y': 'foo', 'extra_field_2': 'extra_data_2'},
+ {'x': 1, 'y': 'foo', 'extra_field_3': 'extra_data_3'}],
+ 'extra_field____': 'extra_data_2',
+ }, ignore_extra=True
+ )
+ assert h
+
+
+def test_ignore_extra_for_pvector_field():
+ class HierarchyA(PRecord):
+ points = pvector_field(ARecord, optional=False)
+
+ class HierarchyB(PRecord):
+ points = pvector_field(ARecord, optional=True)
+
+ point_object = {'x': 1, 'y': 'foo', 'extra_field': 69}
+
+ h = HierarchyA.create({'points': [point_object]}, ignore_extra=True)
+ assert h
+ h = HierarchyB.create({'points': [point_object]}, ignore_extra=True)
+ assert h
+
+
+def test_create():
+ r = ARecord(x=1, y='foo')
+ assert r.x == 1
+ assert r.y == 'foo'
+ assert isinstance(r, ARecord)
+
+
+def test_create_ignore_extra():
+ r = ARecord.create({'x': 1, 'y': 'foo', 'z': None}, ignore_extra=True)
+ assert r.x == 1
+ assert r.y == 'foo'
+ assert isinstance(r, ARecord)
+
+
+def test_create_ignore_extra_false():
+ with pytest.raises(AttributeError):
+ _ = ARecord.create({'x': 1, 'y': 'foo', 'z': None})
+
+
+def test_correct_assignment():
+ r = ARecord(x=1, y='foo')
+ r2 = r.set('x', 2.0)
+ r3 = r2.set('y', 'bar')
+
+ assert r2 == {'x': 2.0, 'y': 'foo'}
+ assert r3 == {'x': 2.0, 'y': 'bar'}
+ assert isinstance(r3, ARecord)
+
+
+def test_direct_assignment_not_possible():
+ with pytest.raises(AttributeError):
+ ARecord().x = 1
+
+
+def test_cannot_assign_undeclared_fields():
+ with pytest.raises(AttributeError):
+ ARecord().set('z', 5)
+
+
+def test_cannot_assign_wrong_type_to_fields():
+ try:
+ ARecord().set('x', 'foo')
+ assert False
+ except PTypeError as e:
+ assert e.source_class == ARecord
+ assert e.field == 'x'
+ assert e.expected_types == set([int, float])
+ assert e.actual_type is type('foo')
+
+
+def test_cannot_construct_with_undeclared_fields():
+ with pytest.raises(AttributeError):
+ ARecord(z=5)
+
+
+def test_cannot_construct_with_fields_of_wrong_type():
+ with pytest.raises(TypeError):
+ ARecord(x='foo')
+
+
+def test_support_record_inheritance():
+ class BRecord(ARecord):
+ z = field()
+
+ r = BRecord(x=1, y='foo', z='bar')
+
+ assert isinstance(r, BRecord)
+ assert isinstance(r, ARecord)
+ assert r == {'x': 1, 'y': 'foo', 'z': 'bar'}
+
+
+def test_single_type_spec():
+ class A(PRecord):
+ x = field(type=int)
+
+ r = A(x=1)
+ assert r.x == 1
+
+ with pytest.raises(TypeError):
+ r.set('x', 'foo')
+
+
+def test_remove():
+ r = ARecord(x=1, y='foo')
+ r2 = r.remove('y')
+
+ assert isinstance(r2, ARecord)
+ assert r2 == {'x': 1}
+
+
+def test_remove_non_existing_member():
+ r = ARecord(x=1, y='foo')
+
+ with pytest.raises(KeyError):
+ r.remove('z')
+
+
+def test_field_invariant_must_hold():
+ class BRecord(PRecord):
+ x = field(invariant=lambda x: (x > 1, 'x too small'))
+ y = field(mandatory=True)
+
+ try:
+ BRecord(x=1)
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == ('x too small',)
+ assert e.missing_fields == ('BRecord.y',)
+
+
+def test_global_invariant_must_hold():
+ class BRecord(PRecord):
+ __invariant__ = lambda r: (r.x <= r.y, 'y smaller than x')
+ x = field()
+ y = field()
+
+ BRecord(x=1, y=2)
+
+ try:
+ BRecord(x=2, y=1)
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == ('y smaller than x',)
+ assert e.missing_fields == ()
+
+
+def test_set_multiple_fields():
+ a = ARecord(x=1, y='foo')
+ b = a.set(x=2, y='bar')
+
+ assert b == {'x': 2, 'y': 'bar'}
+
+
+def test_initial_value():
+ class BRecord(PRecord):
+ x = field(initial=1)
+ y = field(initial=2)
+
+ a = BRecord()
+ assert a.x == 1
+ assert a.y == 2
+
+
+def test_enum_field():
+ try:
+ from enum import Enum
+ except ImportError:
+ return # Enum not supported in this environment
+
+ class ExampleEnum(Enum):
+ x = 1
+ y = 2
+
+ class RecordContainingEnum(PRecord):
+ enum_field = field(type=ExampleEnum)
+
+ r = RecordContainingEnum(enum_field=ExampleEnum.x)
+ assert r.enum_field == ExampleEnum.x
+
+def test_type_specification_must_be_a_type():
+ with pytest.raises(TypeError):
+ class BRecord(PRecord):
+ x = field(type=1)
+
+
+def test_initial_must_be_of_correct_type():
+ with pytest.raises(TypeError):
+ class BRecord(PRecord):
+ x = field(type=int, initial='foo')
+
+
+def test_invariant_must_be_callable():
+ with pytest.raises(TypeError):
+ class BRecord(PRecord):
+ x = field(invariant='foo') # type: ignore
+
+
+def test_global_invariants_are_inherited():
+ class BRecord(PRecord):
+ __invariant__ = lambda r: (r.x % r.y == 0, 'modulo')
+ x = field()
+ y = field()
+
+ class CRecord(BRecord):
+ __invariant__ = lambda r: (r.x > r.y, 'size')
+
+ try:
+ CRecord(x=5, y=3)
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == ('modulo',)
+
+
+def test_global_invariants_must_be_callable():
+ with pytest.raises(TypeError):
+ class CRecord(PRecord):
+ __invariant__ = 1
+
+
+def test_repr():
+ r = ARecord(x=1, y=2)
+ assert repr(r) == 'ARecord(x=1, y=2)' or repr(r) == 'ARecord(y=2, x=1)'
+
+
+def test_factory():
+ class BRecord(PRecord):
+ x = field(type=int, factory=int)
+
+ assert BRecord(x=2.5) == {'x': 2}
+
+
+def test_factory_must_be_callable():
+ with pytest.raises(TypeError):
+ class BRecord(PRecord):
+ x = field(type=int, factory=1) # type: ignore
+
+
+def test_nested_record_construction():
+ class BRecord(PRecord):
+ x = field(int, factory=int)
+
+ class CRecord(PRecord):
+ a = field()
+ b = field(type=BRecord)
+
+ r = CRecord.create({'a': 'foo', 'b': {'x': '5'}})
+ assert isinstance(r, CRecord)
+ assert isinstance(r.b, BRecord)
+ assert r == {'a': 'foo', 'b': {'x': 5}}
+
+
+def test_pickling():
+ x = ARecord(x=2.0, y='bar')
+ y = pickle.loads(pickle.dumps(x, -1))
+
+ assert x == y
+ assert isinstance(y, ARecord)
+
+def test_supports_pickling_with_typed_container_fields():
+ obj = RecordContainingContainers(
+ map={'foo': 'bar'}, set=['hello', 'there'], vec=['a', 'b'])
+ obj2 = pickle.loads(pickle.dumps(obj))
+ assert obj == obj2
+
+def test_all_invariant_errors_reported():
+ class BRecord(PRecord):
+ x = field(factory=int, invariant=lambda x: (x >= 0, 'x negative'))
+ y = field(mandatory=True)
+
+ class CRecord(PRecord):
+ a = field(invariant=lambda x: (x != 0, 'a zero'))
+ b = field(type=BRecord)
+
+ try:
+ CRecord.create({'a': 0, 'b': {'x': -5}})
+ assert False
+ except InvariantException as e:
+ assert set(e.invariant_errors) == set(['x negative', 'a zero'])
+ assert e.missing_fields == ('BRecord.y',)
+
+
+def test_precord_factory_method_is_idempotent():
+ class BRecord(PRecord):
+ x = field()
+ y = field()
+
+ r = BRecord(x=1, y=2)
+ assert BRecord.create(r) is r
+
+
+def test_serialize():
+ class BRecord(PRecord):
+ d = field(type=datetime.date,
+ factory=lambda d: datetime.datetime.strptime(d, "%d%m%Y").date(),
+ serializer=lambda format, d: d.strftime('%Y-%m-%d') if format == 'ISO' else d.strftime('%d%m%Y'))
+
+ assert BRecord(d='14012015').serialize('ISO') == {'d': '2015-01-14'}
+ assert BRecord(d='14012015').serialize('other') == {'d': '14012015'}
+
+
+def test_nested_serialize():
+ class BRecord(PRecord):
+ d = field(serializer=lambda format, d: format)
+
+ class CRecord(PRecord):
+ b = field()
+
+ serialized = CRecord(b=BRecord(d='foo')).serialize('bar')
+
+ assert serialized == {'b': {'d': 'bar'}}
+ assert isinstance(serialized, dict)
+
+
+def test_serializer_must_be_callable():
+ with pytest.raises(TypeError):
+ class CRecord(PRecord):
+ x = field(serializer=1) # type: ignore
+
+
+def test_transform_without_update_returns_same_precord():
+ r = ARecord(x=2.0, y='bar')
+ assert r.transform([ny], lambda x: x) is r
+
+
+class Application(PRecord):
+ name = field(type=str)
+ image = field(type=str)
+
+
+class ApplicationVector(CheckedPVector):
+ __type__ = Application
+
+
+class Node(PRecord):
+ applications = field(type=ApplicationVector)
+
+
+def test_nested_create_serialize():
+ node = Node(applications=[Application(name='myapp', image='myimage'),
+ Application(name='b', image='c')])
+
+ node2 = Node.create({'applications': [{'name': 'myapp', 'image': 'myimage'},
+ {'name': 'b', 'image': 'c'}]})
+
+ assert node == node2
+
+ serialized = node.serialize()
+ restored = Node.create(serialized)
+
+ assert restored == node
+
+
+def test_pset_field_initial_value():
+ """
+ ``pset_field`` results in initial value that is empty.
+ """
+ class Record(PRecord):
+ value = pset_field(int)
+ assert Record() == Record(value=[])
+
+def test_pset_field_custom_initial():
+ """
+ A custom initial value can be passed in.
+ """
+ class Record(PRecord):
+ value = pset_field(int, initial=(1, 2))
+ assert Record() == Record(value=[1, 2])
+
+def test_pset_field_factory():
+ """
+ ``pset_field`` has a factory that creates a ``PSet``.
+ """
+ class Record(PRecord):
+ value = pset_field(int)
+ record = Record(value=[1, 2])
+ assert isinstance(record.value, PSet)
+
+def test_pset_field_checked_set():
+ """
+ ``pset_field`` results in a set that enforces its type.
+ """
+ class Record(PRecord):
+ value = pset_field(int)
+ record = Record(value=[1, 2])
+ with pytest.raises(TypeError):
+ record.value.add("hello") # type: ignore
+
+def test_pset_field_checked_vector_multiple_types():
+ """
+ ``pset_field`` results in a vector that enforces its types.
+ """
+ class Record(PRecord):
+ value = pset_field((int, str))
+ record = Record(value=[1, 2, "hello"])
+ with pytest.raises(TypeError):
+ record.value.add(object())
+
+def test_pset_field_type():
+ """
+ ``pset_field`` enforces its type.
+ """
+ class Record(PRecord):
+ value = pset_field(int)
+ record = Record()
+ with pytest.raises(TypeError):
+ record.set("value", None)
+
+def test_pset_field_mandatory():
+ """
+ ``pset_field`` is a mandatory field.
+ """
+ class Record(PRecord):
+ value = pset_field(int)
+ record = Record(value=[1])
+ with pytest.raises(InvariantException):
+ record.remove("value")
+
+def test_pset_field_default_non_optional():
+ """
+ By default ``pset_field`` is non-optional, i.e. does not allow
+ ``None``.
+ """
+ class Record(PRecord):
+ value = pset_field(int)
+ with pytest.raises(TypeError):
+ Record(value=None)
+
+def test_pset_field_explicit_non_optional():
+ """
+ If ``optional`` argument is ``False`` then ``pset_field`` is
+ non-optional, i.e. does not allow ``None``.
+ """
+ class Record(PRecord):
+ value = pset_field(int, optional=False)
+ with pytest.raises(TypeError):
+ Record(value=None)
+
+def test_pset_field_optional():
+ """
+ If ``optional`` argument is true, ``None`` is acceptable alternative
+ to a set.
+ """
+ class Record(PRecord):
+ value = pset_field(int, optional=True)
+ assert ((Record(value=[1, 2]).value, Record(value=None).value) ==
+ (pset([1, 2]), None))
+
+def test_pset_field_name():
+ """
+ The created set class name is based on the type of items in the set.
+ """
+ class Record(PRecord):
+ value = pset_field(Something)
+ value2 = pset_field(int)
+ assert ((Record().value.__class__.__name__,
+ Record().value2.__class__.__name__) ==
+ ("SomethingPSet", "IntPSet"))
+
+def test_pset_multiple_types_field_name():
+ """
+ The created set class name is based on the multiple given types of
+ items in the set.
+ """
+ class Record(PRecord):
+ value = pset_field((Something, int))
+
+ assert (Record().value.__class__.__name__ ==
+ "SomethingIntPSet")
+
+def test_pset_field_name_string_type():
+ """
+ The created set class name is based on the type of items specified by name
+ """
+ class Record(PRecord):
+ value = pset_field("record_test.Something")
+ assert Record().value.__class__.__name__ == "SomethingPSet"
+
+
+def test_pset_multiple_string_types_field_name():
+ """
+ The created set class name is based on the multiple given types of
+ items in the set specified by name
+ """
+ class Record(PRecord):
+ value = pset_field(("record_test.Something", "record_test.Another"))
+
+ assert Record().value.__class__.__name__ == "SomethingAnotherPSet"
+
+def test_pvector_field_initial_value():
+ """
+ ``pvector_field`` results in initial value that is empty.
+ """
+ class Record(PRecord):
+ value = pvector_field(int)
+ assert Record() == Record(value=[])
+
+def test_pvector_field_custom_initial():
+ """
+ A custom initial value can be passed in.
+ """
+ class Record(PRecord):
+ value = pvector_field(int, initial=(1, 2))
+ assert Record() == Record(value=[1, 2])
+
+def test_pvector_field_factory():
+ """
+ ``pvector_field`` has a factory that creates a ``PVector``.
+ """
+ class Record(PRecord):
+ value = pvector_field(int)
+ record = Record(value=[1, 2])
+ assert isinstance(record.value, PVector)
+
+def test_pvector_field_checked_vector():
+ """
+ ``pvector_field`` results in a vector that enforces its type.
+ """
+ class Record(PRecord):
+ value = pvector_field(int)
+ record = Record(value=[1, 2])
+ with pytest.raises(TypeError):
+ record.value.append("hello") # type: ignore
+
+def test_pvector_field_checked_vector_multiple_types():
+ """
+ ``pvector_field`` results in a vector that enforces its types.
+ """
+ class Record(PRecord):
+ value = pvector_field((int, str))
+ record = Record(value=[1, 2, "hello"])
+ with pytest.raises(TypeError):
+ record.value.append(object())
+
+def test_pvector_field_type():
+ """
+ ``pvector_field`` enforces its type.
+ """
+ class Record(PRecord):
+ value = pvector_field(int)
+ record = Record()
+ with pytest.raises(TypeError):
+ record.set("value", None)
+
+def test_pvector_field_mandatory():
+ """
+ ``pvector_field`` is a mandatory field.
+ """
+ class Record(PRecord):
+ value = pvector_field(int)
+ record = Record(value=[1])
+ with pytest.raises(InvariantException):
+ record.remove("value")
+
+def test_pvector_field_default_non_optional():
+ """
+ By default ``pvector_field`` is non-optional, i.e. does not allow
+ ``None``.
+ """
+ class Record(PRecord):
+ value = pvector_field(int)
+ with pytest.raises(TypeError):
+ Record(value=None)
+
+def test_pvector_field_explicit_non_optional():
+ """
+ If ``optional`` argument is ``False`` then ``pvector_field`` is
+ non-optional, i.e. does not allow ``None``.
+ """
+ class Record(PRecord):
+ value = pvector_field(int, optional=False)
+ with pytest.raises(TypeError):
+ Record(value=None)
+
+def test_pvector_field_optional():
+ """
+ If ``optional`` argument is true, ``None`` is acceptable alternative
+ to a sequence.
+ """
+ class Record(PRecord):
+ value = pvector_field(int, optional=True)
+ assert ((Record(value=[1, 2]).value, Record(value=None).value) ==
+ (pvector([1, 2]), None))
+
+def test_pvector_field_name():
+ """
+ The created set class name is based on the type of items in the set.
+ """
+ class Record(PRecord):
+ value = pvector_field(Something)
+ value2 = pvector_field(int)
+ assert ((Record().value.__class__.__name__,
+ Record().value2.__class__.__name__) ==
+ ("SomethingPVector", "IntPVector"))
+
+def test_pvector_multiple_types_field_name():
+ """
+ The created vector class name is based on the multiple given types of
+ items in the vector.
+ """
+ class Record(PRecord):
+ value = pvector_field((Something, int))
+
+ assert (Record().value.__class__.__name__ ==
+ "SomethingIntPVector")
+
+def test_pvector_field_name_string_type():
+ """
+ The created set class name is based on the type of items in the set
+ specified by name.
+ """
+ class Record(PRecord):
+ value = pvector_field("record_test.Something")
+ assert Record().value.__class__.__name__ == "SomethingPVector"
+
+def test_pvector_multiple_string_types_field_name():
+ """
+ The created vector class name is based on the multiple given types of
+ items in the vector.
+ """
+ class Record(PRecord):
+ value = pvector_field(("record_test.Something", "record_test.Another"))
+
+ assert Record().value.__class__.__name__ == "SomethingAnotherPVector"
+
+def test_pvector_field_create_from_nested_serialized_data():
+ class Foo(PRecord):
+ foo = field(type=str)
+
+ class Bar(PRecord):
+ bar = pvector_field(Foo)
+
+ data = Bar(bar=v(Foo(foo="foo")))
+ Bar.create(data.serialize()) == data
+
+def test_pmap_field_initial_value():
+ """
+ ``pmap_field`` results in initial value that is empty.
+ """
+ class Record(PRecord):
+ value = pmap_field(int, int)
+ assert Record() == Record(value={})
+
+def test_pmap_field_factory():
+ """
+ ``pmap_field`` has a factory that creates a ``PMap``.
+ """
+ class Record(PRecord):
+ value = pmap_field(int, int)
+ record = Record(value={1: 1234})
+ assert isinstance(record.value, PMap)
+
+def test_pmap_field_checked_map_key():
+ """
+ ``pmap_field`` results in a map that enforces its key type.
+ """
+ class Record(PRecord):
+ value = pmap_field(int, type(None))
+ record = Record(value={1: None})
+ with pytest.raises(TypeError):
+ record.value.set("hello", None) # type: ignore
+
+def test_pmap_field_checked_map_value():
+ """
+ ``pmap_field`` results in a map that enforces its value type.
+ """
+ class Record(PRecord):
+ value = pmap_field(int, type(None))
+ record = Record(value={1: None})
+ with pytest.raises(TypeError):
+ record.value.set(2, 4) # type: ignore
+
+def test_pmap_field_checked_map_key_multiple_types():
+ """
+ ``pmap_field`` results in a map that enforces its key types.
+ """
+ class Record(PRecord):
+ value = pmap_field((int, str), type(None))
+ record = Record(value={1: None, "hello": None})
+ with pytest.raises(TypeError):
+ record.value.set(object(), None)
+
+def test_pmap_field_checked_map_value_multiple_types():
+ """
+ ``pmap_field`` results in a map that enforces its value types.
+ """
+ class Record(PRecord):
+ value = pmap_field(int, (str, type(None)))
+ record = Record(value={1: None, 3: "hello"})
+ with pytest.raises(TypeError):
+ record.value.set(2, 4)
+
+def test_pmap_field_mandatory():
+ """
+ ``pmap_field`` is a mandatory field.
+ """
+ class Record(PRecord):
+ value = pmap_field(int, int)
+ record = Record()
+ with pytest.raises(InvariantException):
+ record.remove("value")
+
+def test_pmap_field_default_non_optional():
+ """
+ By default ``pmap_field`` is non-optional, i.e. does not allow
+ ``None``.
+ """
+ class Record(PRecord):
+ value = pmap_field(int, int)
+ # Ought to be TypeError, but pyrsistent doesn't quite allow that:
+ with pytest.raises(AttributeError):
+ Record(value=None)
+
+def test_pmap_field_explicit_non_optional():
+ """
+ If ``optional`` argument is ``False`` then ``pmap_field`` is
+ non-optional, i.e. does not allow ``None``.
+ """
+ class Record(PRecord):
+ value = pmap_field(int, int, optional=False)
+ # Ought to be TypeError, but pyrsistent doesn't quite allow that:
+ with pytest.raises(AttributeError):
+ Record(value=None)
+
+def test_pmap_field_optional():
+ """
+ If ``optional`` argument is true, ``None`` is acceptable alternative
+ to a set.
+ """
+ class Record(PRecord):
+ value = pmap_field(int, int, optional=True)
+ assert (Record(value={1: 2}).value, Record(value=None).value) == \
+ (pmap({1: 2}), None)
+
+def test_pmap_field_name():
+ """
+ The created map class name is based on the types of items in the map.
+ """
+ class Record(PRecord):
+ value = pmap_field(Something, Another)
+ value2 = pmap_field(int, float)
+ assert ((Record().value.__class__.__name__,
+ Record().value2.__class__.__name__) ==
+ ("SomethingToAnotherPMap", "IntToFloatPMap"))
+
+def test_pmap_field_name_multiple_types():
+ """
+ The created map class name is based on the types of items in the map,
+ including when there are multiple supported types.
+ """
+ class Record(PRecord):
+ value = pmap_field((Something, Another), int)
+ value2 = pmap_field(str, (int, float))
+ assert ((Record().value.__class__.__name__,
+ Record().value2.__class__.__name__) ==
+ ("SomethingAnotherToIntPMap", "StrToIntFloatPMap"))
+
+def test_pmap_field_name_string_type():
+ """
+ The created map class name is based on the types of items in the map
+ specified by name.
+ """
+ class Record(PRecord):
+ value = pmap_field("record_test.Something", "record_test.Another")
+ assert Record().value.__class__.__name__ == "SomethingToAnotherPMap"
+
+def test_pmap_field_name_multiple_string_types():
+ """
+ The created map class name is based on the types of items in the map,
+ including when there are multiple supported types.
+ """
+ class Record(PRecord):
+ value = pmap_field(("record_test.Something", "record_test.Another"), int)
+ value2 = pmap_field(str, ("record_test.Something", "record_test.Another"))
+ assert ((Record().value.__class__.__name__,
+ Record().value2.__class__.__name__) ==
+ ("SomethingAnotherToIntPMap", "StrToSomethingAnotherPMap"))
+
+def test_pmap_field_invariant():
+ """
+ The ``invariant`` parameter is passed through to ``field``.
+ """
+ class Record(PRecord):
+ value = pmap_field(
+ int, int,
+ invariant=(
+ lambda pmap: (len(pmap) == 1, "Exactly one item required.")
+ )
+ )
+ with pytest.raises(InvariantException):
+ Record(value={})
+ with pytest.raises(InvariantException):
+ Record(value={1: 2, 3: 4})
+ assert Record(value={1: 2}).value == {1: 2}
+
+
+def test_pmap_field_create_from_nested_serialized_data():
+ class Foo(PRecord):
+ foo = field(type=str)
+
+ class Bar(PRecord):
+ bar = pmap_field(str, Foo)
+
+ data = Bar(bar=m(foo_key=Foo(foo="foo")))
+ Bar.create(data.serialize()) == data
+
+
+def test_supports_weakref():
+ import weakref
+ weakref.ref(ARecord(x=1, y=2))
+
+
+def test_supports_lazy_initial_value_for_field():
+ class MyRecord(PRecord):
+ a = field(int, initial=lambda: 2)
+
+ assert MyRecord() == MyRecord(a=2)
+
+
+def test_pickle_with_one_way_factory():
+ """
+ A field factory isn't called when restoring from pickle.
+ """
+ thing = UniqueThing(id='25544626-86da-4bce-b6b6-9186c0804d64')
+ assert thing == pickle.loads(pickle.dumps(thing))
diff --git a/contrib/python/pyrsistent/py3/tests/regression_test.py b/contrib/python/pyrsistent/py3/tests/regression_test.py
new file mode 100644
index 0000000000..f8c1133834
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/regression_test.py
@@ -0,0 +1,30 @@
+from pyrsistent import pmap
+import random
+
+import gc
+
+
+def test_segfault_issue_52():
+ threshold = None
+ if hasattr(gc, 'get_threshold'):
+ # PyPy is lacking these functions
+ threshold = gc.get_threshold()
+ gc.set_threshold(1, 1, 1) # fail fast
+
+ v = [pmap()]
+
+ def step():
+ depth = random.randint(1, 10)
+ path = random.sample(range(100000), depth)
+ v[0] = v[0].transform(path, "foo")
+
+ for i in range(1000): # usually crashes after 10-20 steps
+ while True:
+ try:
+ step()
+ break
+ except AttributeError: # evolver on string
+ continue
+
+ if threshold:
+ gc.set_threshold(*threshold)
diff --git a/contrib/python/pyrsistent/py3/tests/set_test.py b/contrib/python/pyrsistent/py3/tests/set_test.py
new file mode 100644
index 0000000000..f605ee0d5e
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/set_test.py
@@ -0,0 +1,181 @@
+from pyrsistent import pset, s
+import pytest
+import pickle
+
+def test_key_is_tuple():
+ with pytest.raises(KeyError):
+ pset().remove((1,1))
+
+def test_literalish_works():
+ assert s() is pset()
+ assert s(1, 2) == pset([1, 2])
+
+
+def test_supports_hash():
+ assert hash(s(1, 2)) == hash(s(1, 2))
+
+
+def test_empty_truthiness():
+ assert s(1)
+ assert not s()
+
+
+def test_contains_elements_that_it_was_initialized_with():
+ initial = [1, 2, 3]
+ s = pset(initial)
+
+ assert set(s) == set(initial)
+ assert len(s) == len(set(initial))
+
+
+def test_is_immutable():
+ s1 = pset([1])
+ s2 = s1.add(2)
+
+ assert s1 == pset([1])
+ assert s2 == pset([1, 2])
+
+ s3 = s2.remove(1)
+ assert s2 == pset([1, 2])
+ assert s3 == pset([2])
+
+
+def test_remove_when_not_present():
+ s1 = s(1, 2, 3)
+ with pytest.raises(KeyError):
+ s1.remove(4)
+
+
+def test_discard():
+ s1 = s(1, 2, 3)
+ assert s1.discard(3) == s(1, 2)
+ assert s1.discard(4) is s1
+
+
+def test_is_iterable():
+ assert sum(pset([1, 2, 3])) == 6
+
+
+def test_contains():
+ s = pset([1, 2, 3])
+
+ assert 2 in s
+ assert 4 not in s
+
+
+def test_supports_set_operations():
+ s1 = pset([1, 2, 3])
+ s2 = pset([3, 4, 5])
+
+ assert s1 | s2 == s(1, 2, 3, 4, 5)
+ assert s1.union(s2) == s1 | s2
+
+ assert s1 & s2 == s(3)
+ assert s1.intersection(s2) == s1 & s2
+
+ assert s1 - s2 == s(1, 2)
+ assert s1.difference(s2) == s1 - s2
+
+ assert s1 ^ s2 == s(1, 2, 4, 5)
+ assert s1.symmetric_difference(s2) == s1 ^ s2
+
+
+def test_supports_set_comparisons():
+ s1 = s(1, 2, 3)
+ s3 = s(1, 2)
+ s4 = s(1, 2, 3)
+
+ assert s(1, 2, 3, 3, 5) == s(1, 2, 3, 5)
+ assert s1 != s3
+
+ assert s3 < s1
+ assert s3 <= s1
+ assert s3 <= s4
+
+ assert s1 > s3
+ assert s1 >= s3
+ assert s4 >= s3
+
+
+def test_str():
+ rep = str(pset([1, 2, 3]))
+ assert rep == "pset([1, 2, 3])"
+
+
+def test_is_disjoint():
+ s1 = pset([1, 2, 3])
+ s2 = pset([3, 4, 5])
+ s3 = pset([4, 5])
+
+ assert not s1.isdisjoint(s2)
+ assert s1.isdisjoint(s3)
+
+
+def test_evolver_simple_add():
+ x = s(1, 2, 3)
+ e = x.evolver()
+ assert not e.is_dirty()
+
+ e.add(4)
+ assert e.is_dirty()
+
+ x2 = e.persistent()
+ assert not e.is_dirty()
+ assert x2 == s(1, 2, 3, 4)
+ assert x == s(1, 2, 3)
+
+def test_evolver_simple_remove():
+ x = s(1, 2, 3)
+ e = x.evolver()
+ e.remove(2)
+
+ x2 = e.persistent()
+ assert x2 == s(1, 3)
+ assert x == s(1, 2, 3)
+
+
+def test_evolver_no_update_produces_same_pset():
+ x = s(1, 2, 3)
+ e = x.evolver()
+ assert e.persistent() is x
+
+
+def test_evolver_len():
+ x = s(1, 2, 3)
+ e = x.evolver()
+ assert len(e) == 3
+
+
+def test_copy_returns_reference_to_self():
+ s1 = s(10)
+ assert s1.copy() is s1
+
+
+def test_pickling_empty_set():
+ assert pickle.loads(pickle.dumps(s(), -1)) == s()
+
+
+def test_pickling_non_empty_map():
+ assert pickle.loads(pickle.dumps(s(1, 2), -1)) == s(1, 2)
+
+
+def test_supports_weakref():
+ import weakref
+ weakref.ref(s(1))
+
+
+def test_update():
+ assert s(1, 2, 3).update([3, 4, 4, 5]) == s(1, 2, 3, 4, 5)
+
+
+def test_update_no_elements():
+ s1 = s(1, 2)
+ assert s1.update([]) is s1
+
+
+def test_iterable():
+ """
+ PSets can be created from iterables even though they can't be len() hinted.
+ """
+
+ assert pset(iter("a")) == pset(iter("a"))
diff --git a/contrib/python/pyrsistent/py3/tests/toolz_test.py b/contrib/python/pyrsistent/py3/tests/toolz_test.py
new file mode 100644
index 0000000000..d145704b86
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/toolz_test.py
@@ -0,0 +1,6 @@
+from pyrsistent import get_in, m, v
+
+
+def test_get_in():
+ # This is not an extensive test. The doctest covers that fairly good though.
+ get_in(m(a=v(1, 2, 3)), ['m', 1]) == 2
diff --git a/contrib/python/pyrsistent/py3/tests/transform_test.py b/contrib/python/pyrsistent/py3/tests/transform_test.py
new file mode 100644
index 0000000000..d133d14f65
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/transform_test.py
@@ -0,0 +1,122 @@
+from pyrsistent import freeze, inc, discard, rex, ny, field, PClass, pmap
+
+
+def test_callable_command():
+ m = freeze({'foo': {'bar': {'baz': 1}}})
+ assert m.transform(['foo', 'bar', 'baz'], inc) == {'foo': {'bar': {'baz': 2}}}
+
+
+def test_predicate():
+ m = freeze({'foo': {'bar': {'baz': 1}, 'qux': {'baz': 1}}})
+ assert m.transform(['foo', lambda x: x.startswith('b'), 'baz'], inc) == {'foo': {'bar': {'baz': 2}, 'qux': {'baz': 1}}}
+
+
+def test_broken_predicate():
+ broken_predicates = [
+ lambda: None,
+ lambda a, b, c: None,
+ lambda a, b, c, d=None: None,
+ lambda *args: None,
+ lambda **kwargs: None,
+ ]
+ for pred in broken_predicates:
+ try:
+ freeze({}).transform([pred], None)
+ assert False
+ except ValueError as e:
+ assert str(e) == "callable in transform path must take 1 or 2 arguments"
+
+
+def test_key_value_predicate():
+ m = freeze({
+ 'foo': 1,
+ 'bar': 2,
+ })
+ assert m.transform([
+ lambda k, v: (k, v) == ('foo', 1),
+ ], lambda v: v * 3) == {"foo": 3, "bar": 2}
+
+
+def test_remove():
+ m = freeze({'foo': {'bar': {'baz': 1}}})
+ assert m.transform(['foo', 'bar', 'baz'], discard) == {'foo': {'bar': {}}}
+
+
+def test_remove_pvector():
+ m = freeze({'foo': [1, 2, 3]})
+ assert m.transform(['foo', 1], discard) == {'foo': [1, 3]}
+
+
+def test_remove_pclass():
+ class MyClass(PClass):
+ a = field()
+ b = field()
+
+ m = freeze({'foo': MyClass(a=1, b=2)})
+ assert m.transform(['foo', 'b'], discard) == {'foo': MyClass(a=1)}
+
+
+def test_predicate_no_match():
+ m = freeze({'foo': {'bar': {'baz': 1}}})
+ assert m.transform(['foo', lambda x: x.startswith('c'), 'baz'], inc) == m
+
+
+def test_rex_predicate():
+ m = freeze({'foo': {'bar': {'baz': 1},
+ 'bof': {'baz': 1}}})
+ assert m.transform(['foo', rex('^bo.*'), 'baz'], inc) == {'foo': {'bar': {'baz': 1},
+ 'bof': {'baz': 2}}}
+
+
+def test_rex_with_non_string_key():
+ m = freeze({'foo': 1, 5: 2})
+ assert m.transform([rex(".*")], 5) == {'foo': 5, 5: 2}
+
+
+def test_ny_predicated_matches_any_key():
+ m = freeze({'foo': 1, 5: 2})
+ assert m.transform([ny], 5) == {'foo': 5, 5: 5}
+
+
+def test_new_elements_created_when_missing():
+ m = freeze({})
+ assert m.transform(['foo', 'bar', 'baz'], 7) == {'foo': {'bar': {'baz': 7}}}
+
+
+def test_mixed_vector_and_map():
+ m = freeze({'foo': [1, 2, 3]})
+ assert m.transform(['foo', 1], 5) == freeze({'foo': [1, 5, 3]})
+
+
+def test_vector_predicate_callable_command():
+ v = freeze([1, 2, 3, 4, 5])
+ assert v.transform([lambda i: 0 < i < 4], inc) == freeze(freeze([1, 3, 4, 5, 5]))
+
+
+def test_vector_insert_map_one_step_beyond_end():
+ v = freeze([1, 2])
+ assert v.transform([2, 'foo'], 3) == freeze([1, 2, {'foo': 3}])
+
+
+def test_multiple_transformations():
+ v = freeze([1, 2])
+ assert v.transform([2, 'foo'], 3, [2, 'foo'], inc) == freeze([1, 2, {'foo': 4}])
+
+
+def test_no_transformation_returns_the_same_structure():
+ v = freeze([{'foo': 1}, {'bar': 2}])
+ assert v.transform([ny, ny], lambda x: x) is v
+
+
+def test_discard_multiple_elements_in_pvector():
+ assert freeze([0, 1, 2, 3, 4]).transform([lambda i: i % 2], discard) == freeze([0, 2, 4])
+
+
+def test_transform_insert_empty_pmap():
+ m = pmap().transform(['123'], pmap())
+ assert m == pmap({'123': pmap()})
+
+
+def test_discard_does_not_insert_nodes():
+ m = freeze({}).transform(['foo', 'bar'], discard)
+ assert m == pmap({})
diff --git a/contrib/python/pyrsistent/py3/tests/vector_test.py b/contrib/python/pyrsistent/py3/tests/vector_test.py
new file mode 100644
index 0000000000..e5c4bf69c3
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/vector_test.py
@@ -0,0 +1,934 @@
+from collections.abc import Hashable, Sequence
+import os
+import pickle
+import pytest
+
+from pyrsistent._pvector import python_pvector
+
+
+@pytest.fixture(scope='session', params=['pyrsistent._pvector', 'pvectorc'])
+def pvector(request):
+ if request.param == 'pvectorc' and os.environ.get('PYRSISTENT_NO_C_EXTENSION'):
+ pytest.skip('Configured to not run tests for C extension')
+
+ m = pytest.importorskip(request.param)
+ if request.param == 'pyrsistent._pvector':
+ return m.python_pvector
+ return m.pvector
+
+
+def test_literalish_works():
+ from pyrsistent import pvector, v
+ assert v() is pvector()
+ assert v(1, 2) == pvector([1, 2])
+
+
+def test_empty_initialization(pvector):
+ seq = pvector()
+ assert len(seq) == 0
+
+ with pytest.raises(IndexError) as error:
+ x = seq[0]
+ assert str(error.value) == 'Index out of range: 0'
+
+
+def test_initialization_with_one_element(pvector):
+ seq = pvector([3])
+ assert len(seq) == 1
+ assert seq[0] == 3
+
+
+def test_append_works_and_does_not_affect_original_within_tail(pvector):
+ seq1 = pvector([3])
+ seq2 = seq1.append(2)
+
+ assert len(seq1) == 1
+ assert seq1[0] == 3
+
+ assert len(seq2) == 2
+ assert seq2[0] == 3
+ assert seq2[1] == 2
+
+
+def test_append_works_and_does_not_affect_original_outside_tail(pvector):
+ original = pvector([])
+ seq = original
+
+ for x in range(33):
+ seq = seq.append(x)
+
+ assert len(seq) == 33
+ assert seq[0] == 0
+ assert seq[31] == 31
+ assert seq[32] == 32
+
+ assert len(original) == 0
+
+
+def test_append_when_root_overflows(pvector):
+ seq = pvector([])
+
+ for x in range(32 * 33):
+ seq = seq.append(x)
+
+ seq = seq.append(10001)
+
+ for i in range(32 * 33):
+ assert seq[i] == i
+
+ assert seq[32 * 33] == 10001
+
+
+def test_multi_level_sequence(pvector):
+ seq = pvector(range(8000))
+ seq2 = seq.append(11)
+
+ assert seq[5] == 5
+ assert seq2[7373] == 7373
+ assert seq2[8000] == 11
+
+
+def test_multi_level_sequence_from_iterator(pvector):
+ seq = pvector(iter(range(8000)))
+ seq2 = seq.append(11)
+
+ assert seq[5] == 5
+ assert seq2[7373] == 7373
+ assert seq2[8000] == 11
+
+
+def test_random_insert_within_tail(pvector):
+ seq = pvector([1, 2, 3])
+
+ seq2 = seq.set(1, 4)
+
+ assert seq2[1] == 4
+ assert seq[1] == 2
+
+
+def test_random_insert_outside_tail(pvector):
+ seq = pvector(range(20000))
+
+ seq2 = seq.set(19000, 4)
+
+ assert seq2[19000] == 4
+ assert seq[19000] == 19000
+
+
+def test_insert_beyond_end(pvector):
+ seq = pvector(range(2))
+ seq2 = seq.set(2, 50)
+ assert seq2[2] == 50
+
+ with pytest.raises(IndexError) as error:
+ seq2.set(19, 4)
+
+ assert str(error.value) == 'Index out of range: 19'
+
+
+def test_insert_with_index_from_the_end(pvector):
+ x = pvector([1, 2, 3, 4])
+
+ assert x.set(-2, 5) == pvector([1, 2, 5, 4])
+
+
+def test_insert_with_too_negative_index(pvector):
+ x = pvector([1, 2, 3, 4])
+
+ with pytest.raises(IndexError):
+ x.set(-5, 17)
+
+
+def test_iteration(pvector):
+ y = 0
+ seq = pvector(range(2000))
+ for x in seq:
+ assert x == y
+ y += 1
+
+ assert y == 2000
+
+
+def test_zero_extend(pvector):
+ the_list = []
+ seq = pvector()
+ seq2 = seq.extend(the_list)
+ assert seq == seq2
+
+
+def test_short_extend(pvector):
+ # Extend within tail length
+ the_list = [1, 2]
+ seq = pvector()
+ seq2 = seq.extend(the_list)
+
+ assert len(seq2) == len(the_list)
+ assert seq2[0] == the_list[0]
+ assert seq2[1] == the_list[1]
+
+
+def test_long_extend(pvector):
+ # Multi level extend
+ seq = pvector()
+ length = 2137
+
+ # Extend from scratch
+ seq2 = seq.extend(range(length))
+ assert len(seq2) == length
+ for i in range(length):
+ assert seq2[i] == i
+
+ # Extend already filled vector
+ seq3 = seq2.extend(range(length, length + 5))
+ assert len(seq3) == length + 5
+ for i in range(length + 5):
+ assert seq3[i] == i
+
+ # Check that the original vector is still intact
+ assert len(seq2) == length
+ for i in range(length):
+ assert seq2[i] == i
+
+
+def test_slicing_zero_length_range(pvector):
+ seq = pvector(range(10))
+ seq2 = seq[2:2]
+
+ assert len(seq2) == 0
+
+
+def test_slicing_range(pvector):
+ seq = pvector(range(10))
+ seq2 = seq[2:4]
+
+ assert list(seq2) == [2, 3]
+
+
+def test_slice_identity(pvector):
+ # Pvector is immutable, no need to make a copy!
+ seq = pvector(range(10))
+
+ assert seq is seq[::]
+
+
+def test_slicing_range_with_step(pvector):
+ seq = pvector(range(100))
+ seq2 = seq[2:12:3]
+
+ assert list(seq2) == [2, 5, 8, 11]
+
+
+def test_slicing_no_range_but_step(pvector):
+ seq = pvector(range(10))
+ seq2 = seq[::2]
+
+ assert list(seq2) == [0, 2, 4, 6, 8]
+
+
+def test_slicing_reverse(pvector):
+ seq = pvector(range(10))
+ seq2 = seq[::-1]
+
+ assert seq2[0] == 9
+ assert seq2[1] == 8
+ assert len(seq2) == 10
+
+ seq3 = seq[-3: -7: -1]
+ assert seq3[0] == 7
+ assert seq3[3] == 4
+ assert len(seq3) == 4
+
+
+def test_delete_index(pvector):
+ seq = pvector([1, 2, 3])
+ assert seq.delete(0) == pvector([2, 3])
+ assert seq.delete(1) == pvector([1, 3])
+ assert seq.delete(2) == pvector([1, 2])
+ assert seq.delete(-1) == pvector([1, 2])
+ assert seq.delete(-2) == pvector([1, 3])
+ assert seq.delete(-3) == pvector([2, 3])
+
+
+def test_delete_index_out_of_bounds(pvector):
+ with pytest.raises(IndexError):
+ pvector([]).delete(0)
+ with pytest.raises(IndexError):
+ pvector([]).delete(-1)
+
+
+def test_delete_index_malformed(pvector):
+ with pytest.raises(TypeError):
+ pvector([]).delete('a')
+
+
+def test_delete_slice(pvector):
+ seq = pvector(range(5))
+ assert seq.delete(1, 4) == pvector([0, 4])
+ assert seq.delete(4, 1) == seq
+ assert seq.delete(0, 1) == pvector([1, 2, 3, 4])
+ assert seq.delete(6, 8) == seq
+ assert seq.delete(-1, 1) == seq
+ assert seq.delete(1, -1) == pvector([0, 4])
+
+
+def test_remove(pvector):
+ seq = pvector(range(5))
+ assert seq.remove(3) == pvector([0, 1, 2, 4])
+
+
+def test_remove_first_only(pvector):
+ seq = pvector([1, 2, 3, 2, 1])
+ assert seq.remove(2) == pvector([1, 3, 2, 1])
+
+
+def test_remove_index_out_of_bounds(pvector):
+ seq = pvector(range(5))
+ with pytest.raises(ValueError) as err:
+ seq.remove(5)
+ assert 'not in' in str(err.value)
+
+
+def test_addition(pvector):
+ v = pvector([1, 2]) + pvector([3, 4])
+
+ assert list(v) == [1, 2, 3, 4]
+
+
+def test_sorted(pvector):
+ seq = pvector([5, 2, 3, 1])
+ assert [1, 2, 3, 5] == sorted(seq)
+
+
+def test_boolean_conversion(pvector):
+ assert not bool(pvector())
+ assert bool(pvector([1]))
+
+
+def test_access_with_negative_index(pvector):
+ seq = pvector([1, 2, 3, 4])
+
+ assert seq[-1] == 4
+ assert seq[-4] == 1
+
+
+def test_index_error_positive(pvector):
+ with pytest.raises(IndexError):
+ pvector([1, 2, 3])[3]
+
+
+def test_index_error_negative(pvector):
+ with pytest.raises(IndexError):
+ pvector([1, 2, 3])[-4]
+
+
+def test_is_sequence(pvector):
+ assert isinstance(pvector(), Sequence)
+
+
+def test_empty_repr(pvector):
+ assert str(pvector()) == "pvector([])"
+
+
+def test_non_empty_repr(pvector):
+ v = pvector([1, 2, 3])
+ assert str(v) == "pvector([1, 2, 3])"
+
+ # There's some state that needs to be reset between calls in the native version,
+ # test that multiple invocations work.
+ assert str(v) == "pvector([1, 2, 3])"
+
+
+def test_repr_when_contained_object_contains_reference_to_self(pvector):
+ x = [1, 2, 3]
+ v = pvector([1, 2, x])
+ x.append(v)
+ assert str(v) == 'pvector([1, 2, [1, 2, 3, pvector([1, 2, [...]])]])'
+
+ # Run a GC to provoke any potential misbehavior
+ import gc
+ gc.collect()
+
+
+def test_is_hashable(pvector):
+
+ v = pvector([1, 2, 3])
+ v2 = pvector([1, 2, 3])
+
+ assert hash(v) == hash(v2)
+ assert isinstance(pvector(), Hashable)
+
+
+def test_refuses_to_hash_when_members_are_unhashable(pvector):
+ v = pvector([1, 2, [1, 2]])
+
+ with pytest.raises(TypeError):
+ hash(v)
+
+
+def test_compare_same_vectors(pvector):
+ v = pvector([1, 2])
+ assert v == v
+ assert pvector() == pvector()
+
+
+def test_compare_with_other_type_of_object(pvector):
+ assert pvector([1, 2]) != 'foo'
+
+
+def test_compare_equal_vectors(pvector):
+ v1 = pvector([1, 2])
+ v2 = pvector([1, 2])
+ assert v1 == v2
+ assert v1 >= v2
+ assert v1 <= v2
+
+
+def test_compare_different_vectors_same_size(pvector):
+ v1 = pvector([1, 2])
+ v2 = pvector([1, 3])
+ assert v1 != v2
+
+
+def test_compare_different_vectors_different_sizes(pvector):
+ v1 = pvector([1, 2])
+ v2 = pvector([1, 2, 3])
+ assert v1 != v2
+
+
+def test_compare_lt_gt(pvector):
+ v1 = pvector([1, 2])
+ v2 = pvector([1, 2, 3])
+ assert v1 < v2
+ assert v2 > v1
+
+
+def test_repeat(pvector):
+ v = pvector([1, 2])
+ assert 5 * pvector() is pvector()
+ assert v is 1 * v
+ assert 0 * v is pvector()
+ assert 2 * pvector([1, 2]) == pvector([1, 2, 1, 2])
+ assert -3 * pvector([1, 2]) is pvector()
+
+
+def test_transform_zero_key_length(pvector):
+ x = pvector([1, 2])
+
+ assert x.transform([], 3) == 3
+
+
+def test_transform_base_case(pvector):
+ x = pvector([1, 2])
+
+ assert x.transform([1], 3) == pvector([1, 3])
+
+
+def test_transform_nested_vectors(pvector):
+ x = pvector([1, 2, pvector([3, 4]), 5])
+
+ assert x.transform([2, 0], 999) == pvector([1, 2, pvector([999, 4]), 5])
+
+
+def test_transform_when_appending(pvector):
+ from pyrsistent import m
+ x = pvector([1, 2])
+
+ assert x.transform([2, 'd'], 999) == pvector([1, 2, m(d=999)])
+
+
+def test_transform_index_error_out_range(pvector):
+ x = pvector([1, 2, pvector([3, 4]), 5])
+
+ with pytest.raises(IndexError):
+ x.transform([2, 10], 999)
+
+
+def test_transform_index_error_wrong_type(pvector):
+ x = pvector([1, 2, pvector([3, 4]), 5])
+
+ with pytest.raises(TypeError):
+ x.transform([2, 'foo'], 999)
+
+
+def test_transform_non_setable_type(pvector):
+ x = pvector([1, 2, 5])
+
+ with pytest.raises(TypeError):
+ x.transform([2, 3], 999)
+
+
+def test_reverse(pvector):
+ x = pvector([1, 2, 5])
+
+ assert list(reversed(x)) == [5, 2, 1]
+
+
+def test_contains(pvector):
+ x = pvector([1, 2, 5])
+
+ assert 2 in x
+ assert 3 not in x
+
+
+def test_index(pvector):
+ x = pvector([1, 2, 5])
+
+ assert x.index(5) == 2
+
+
+def test_index_not_found(pvector):
+ x = pvector([1, 2, 5])
+
+ with pytest.raises(ValueError):
+ x.index(7)
+
+
+def test_index_not_found_with_limits(pvector):
+ x = pvector([1, 2, 5, 1])
+
+ with pytest.raises(ValueError):
+ x.index(1, 1, 3)
+
+
+def test_count(pvector):
+ x = pvector([1, 2, 5, 1])
+
+ assert x.count(1) == 2
+ assert x.count(4) == 0
+
+
+def test_empty_truthiness(pvector):
+ assert pvector([1])
+ assert not pvector([])
+
+
+def test_pickling_empty_vector(pvector):
+ assert pickle.loads(pickle.dumps(pvector(), -1)) == pvector()
+
+
+def test_pickling_non_empty_vector(pvector):
+ assert pickle.loads(pickle.dumps(pvector([1, 'a']), -1)) == pvector([1, 'a'])
+
+
+def test_mset_basic_assignments(pvector):
+ v1 = pvector(range(2000))
+ v2 = v1.mset(1, -1, 505, -505, 1998, -1998)
+
+ # Original not changed
+ assert v1[1] == 1
+ assert v1[505] == 505
+ assert v1[1998] == 1998
+
+ # Other updated
+ assert v2[1] == -1
+ assert v2[505] == -505
+ assert v2[1998] == -1998
+
+
+def test_mset_odd_number_of_arguments(pvector):
+ v = pvector([0, 1])
+
+ with pytest.raises(TypeError):
+ v.mset(0, 10, 1)
+
+
+def test_mset_index_out_of_range(pvector):
+ v = pvector([0, 1])
+
+ with pytest.raises(IndexError):
+ v.mset(3, 10)
+
+
+def test_evolver_no_update(pvector):
+ # This is mostly a test against memory leaks in the C implementation
+ v = pvector(range(40))
+
+ assert v.evolver().persistent() == v
+
+
+def test_evolver_deallocate_dirty_evolver(pvector):
+ # Ref count handling in native implementation
+ v = pvector(range(3220))
+ e = v.evolver()
+ e[10] = -10
+ e[3220] = -3220
+
+
+def test_evolver_simple_update_in_tree(pvector):
+ v = pvector(range(35))
+ e = v.evolver()
+ e[10] = -10
+
+ assert e[10] == -10
+ assert e.persistent()[10] == -10
+
+
+def test_evolver_set_out_of_range(pvector):
+ v = pvector([0])
+ e = v.evolver()
+ with pytest.raises(IndexError) as error:
+ e[10] = 1
+ assert str(error.value) == "Index out of range: 10"
+
+def test_evolver_multi_level_multi_update_in_tree(pvector):
+ # This test is mostly to detect memory/ref count issues in the native implementation
+ v = pvector(range(3500))
+ e = v.evolver()
+
+ # Update differs between first and second time since the
+ # corresponding node will be marked as dirty the first time only.
+ e[10] = -10
+ e[11] = -11
+ e[10] = -1000
+
+ # Update in neighbour node
+ e[50] = -50
+ e[50] = -5000
+
+ # Update in node in other half of vector
+ e[3000] = -3000
+ e[3000] = -30000
+
+ # Before freezing
+ assert e[10] == -1000
+ assert e[11] == -11
+ assert e[50] == -5000
+ assert e[3000] == -30000
+
+ # Run a GC to provoke any potential misbehavior
+ import gc
+ gc.collect()
+
+ v2 = e.persistent()
+ assert v2[10] == -1000
+ assert v2[50] == -5000
+ assert v2[3000] == -30000
+
+ # Run a GC to provoke any potential misbehavior
+ gc.collect()
+
+ # After freezing
+ assert e[10] == -1000
+ assert e[11] == -11
+ assert e[50] == -5000
+ assert e[3000] == -30000
+
+ # Original stays the same
+ assert v[10] == 10
+ assert v[50] == 50
+ assert v[3000] == 3000
+
+
+def test_evolver_simple_update_in_tail(pvector):
+ v = pvector(range(35))
+ e = v.evolver()
+ e[33] = -33
+
+ assert e[33] == -33
+ assert e.persistent()[33] == -33
+ assert v[33] == 33
+
+
+def test_evolver_simple_update_just_outside_vector(pvector):
+ v = pvector()
+ e = v.evolver()
+ e[0] = 1
+
+ assert e[0] == 1
+ assert e.persistent()[0] == 1
+ assert len(v) == 0
+
+
+def test_evolver_append(pvector):
+ v = pvector()
+ e = v.evolver()
+ e.append(1000)
+ assert e[0] == 1000
+
+ e[0] = 2000
+ assert e[0] == 2000
+ assert list(e.persistent()) == [2000]
+ assert list(v) == []
+
+
+def test_evolver_extend(pvector):
+ v = pvector([1000])
+ e = v.evolver()
+ e.extend([2000, 3000])
+ e[2] = 20000
+
+ assert list(e.persistent()) == [1000, 2000, 20000]
+ assert list(v) == [1000]
+
+
+def test_evolver_assign_and_read_with_negative_indices(pvector):
+ v = pvector([1, 2, 3])
+ e = v.evolver()
+ e[-1] = 4
+ e.extend([11, 12, 13])
+ e[-1] = 33
+
+ assert e[-1] == 33
+ assert list(e.persistent()) == [1, 2, 4, 11, 12, 33]
+
+
+def test_evolver_non_integral_access(pvector):
+ e = pvector([1]).evolver()
+
+ with pytest.raises(TypeError):
+ x = e['foo']
+
+
+def test_evolver_non_integral_assignment(pvector):
+ e = pvector([1]).evolver()
+
+ with pytest.raises(TypeError):
+ e['foo'] = 1
+
+
+def test_evolver_out_of_bounds_access(pvector):
+ e = pvector([1]).evolver()
+
+ with pytest.raises(IndexError):
+ x = e[1]
+
+
+def test_evolver_out_of_bounds_assignment(pvector):
+ e = pvector([1]).evolver()
+
+ with pytest.raises(IndexError):
+ e[2] = 1
+
+
+def test_no_dependencies_between_evolvers_from_the_same_pvector(pvector):
+ original_list = list(range(40))
+ v = pvector(original_list)
+ e1 = v.evolver()
+ e2 = v.evolver()
+
+ e1.extend([1, 2, 3])
+ e1[2] = 20
+ e1[35] = 350
+
+ e2.extend([-1, -2, -3])
+ e2[2] = -20
+ e2[35] = -350
+
+ e1_expected = original_list + [1, 2, 3]
+ e1_expected[2] = 20
+ e1_expected[35] = 350
+ assert list(e1.persistent()) == e1_expected
+
+ e2_expected = original_list + [-1, -2, -3]
+ e2_expected[2] = -20
+ e2_expected[35] = -350
+ assert list(e2.persistent()) == e2_expected
+
+
+def test_pvectors_produced_from_the_same_evolver_do_not_interfere(pvector):
+ original_list = list(range(40))
+ v = pvector(original_list)
+ e = v.evolver()
+
+ e.extend([1, 2, 3])
+ e[2] = 20
+ e[35] = 350
+
+ v1 = e.persistent()
+ v1_expected = original_list + [1, 2, 3]
+ v1_expected[2] = 20
+ v1_expected[35] = 350
+
+ e.extend([-1, -2, -3])
+ e[3] = -30
+ e[36] = -360
+
+ v2 = e.persistent()
+ v2_expected = v1_expected + [-1, -2, -3]
+ v2_expected[3] = -30
+ v2_expected[36] = -360
+
+ assert list(v1) == v1_expected
+ assert list(v2) == v2_expected
+
+
+def test_evolver_len(pvector):
+ e = pvector([1, 2, 3]).evolver()
+ e.extend([4, 5])
+
+ assert len(e) == 5
+
+
+def test_evolver_is_dirty(pvector):
+ e = pvector([1, 2, 3]).evolver()
+ assert not e.is_dirty()
+
+ e.append(4)
+ assert e.is_dirty
+
+ e.persistent()
+ assert not e.is_dirty()
+
+ e[2] = 2000
+ assert e.is_dirty
+
+ e.persistent()
+ assert not e.is_dirty()
+
+
+def test_vector_insert_one_step_beyond_end(pvector):
+ # This test exists to get the transform functionality under memory
+ # leak supervision. Most of the transformation tests are in test_transform.py.
+ v = pvector([1, 2])
+ assert v.transform([2], 3) == pvector([1, 2, 3])
+
+
+def test_evolver_with_no_updates_returns_same_pvector(pvector):
+ v = pvector([1, 2])
+ assert v.evolver().persistent() is v
+
+
+def test_evolver_returns_itself_on_evolving_operations(pvector):
+ # Does this to be able to chain operations
+ v = pvector([1, 2])
+ assert v.evolver().append(3).extend([4, 5]).set(1, 6).persistent() == pvector([1, 6, 3, 4, 5])
+
+
+def test_evolver_delete_by_index(pvector):
+ e = pvector([1, 2, 3]).evolver()
+
+ del e[0]
+
+ assert e.persistent() == python_pvector([2, 3])
+ assert e.append(4).persistent() == python_pvector([2, 3, 4])
+
+
+def test_evolver_delete_function_by_index(pvector):
+ e = pvector([1, 2, 3]).evolver()
+
+ assert e.delete(1).persistent() == python_pvector([1, 3])
+
+
+def test_evolver_delete_function_by_index_multiple_times(pvector):
+ SIZE = 40
+ e = pvector(range(SIZE)).evolver()
+ for i in range(SIZE):
+ assert e[0] == i
+ assert list(e.persistent()) == list(range(i, SIZE))
+ del e[0]
+
+ assert e.persistent() == list()
+
+
+def test_evolver_delete_function_invalid_index(pvector):
+ e = pvector([1, 2]).evolver()
+
+ with pytest.raises(TypeError):
+ del e["e"]
+
+
+def test_delete_of_non_existing_element(pvector):
+ e = pvector([1, 2]).evolver()
+
+ with pytest.raises(IndexError):
+ del e[2]
+
+ del e[0]
+ del e[0]
+
+ with pytest.raises(IndexError):
+ del e[0]
+
+ assert e.persistent() == pvector()
+
+
+def test_append_followed_by_delete(pvector):
+ e = pvector([1, 2]).evolver()
+
+ e.append(3)
+
+ del e[2]
+
+
+def test_evolver_set_followed_by_delete(pvector):
+ evolver = pvector([1, 2]).evolver()
+ evolver[1] = 3
+
+ assert [evolver[i] for i in range(len(evolver))] == [1, 3]
+
+ del evolver[0]
+
+ assert evolver.persistent() == pvector([3])
+
+
+def test_compare_with_list(pvector):
+ v = pvector([1, 2, 3])
+
+ assert v == [1, 2, 3]
+ assert v != [1, 2]
+ assert v > [1, 2]
+ assert v < [2, 2]
+ assert [1, 2] < v
+ assert v <= [1, 2, 3]
+ assert v <= [1, 2, 4]
+ assert v >= [1, 2, 3]
+ assert v >= [1, 2]
+
+
+def test_compare_with_non_iterable(pvector):
+ assert pvector([1, 2, 3]) != 5
+ assert not (pvector([1, 2, 3]) == 5)
+
+
+def test_python_no_c_extension_with_environment_variable():
+ from importlib import reload as reload_module
+ import pyrsistent._pvector
+ import pyrsistent
+ import os
+
+ os.environ['PYRSISTENT_NO_C_EXTENSION'] = 'TRUE'
+
+ reload_module(pyrsistent._pvector)
+ reload_module(pyrsistent)
+
+ assert type(pyrsistent.pvector()) is pyrsistent._pvector.PythonPVector
+
+ del os.environ['PYRSISTENT_NO_C_EXTENSION']
+
+ reload_module(pyrsistent._pvector)
+ reload_module(pyrsistent)
+
+
+def test_supports_weakref(pvector):
+ import weakref
+ weakref.ref(pvector())
+
+def test_get_evolver_referents(pvector):
+ """The C implementation of the evolver should expose the original PVector
+ to the gc only once.
+ """
+ if pvector.__module__ == 'pyrsistent._pvector':
+ pytest.skip("This test only applies to pvectorc")
+ import gc
+ v = pvector([1, 2, 3])
+ e = v.evolver()
+ assert len([x for x in gc.get_referents(e) if x is v]) == 1
+
+
+def test_failing_repr(pvector):
+ # See https://github.com/tobgu/pyrsistent/issues/84
+ class A(object):
+ def __repr__(self):
+ raise ValueError('oh no!')
+
+ with pytest.raises(ValueError):
+ repr(pvector([A()]))
+
+
+def test_iterable(pvector):
+ """
+ PVectors can be created from iterables even though they can't be len()
+ hinted.
+ """
+
+ assert pvector(iter("a")) == pvector(iter("a"))
diff --git a/contrib/python/pyrsistent/py3/tests/ya.make b/contrib/python/pyrsistent/py3/tests/ya.make
new file mode 100644
index 0000000000..8bb63ae559
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/tests/ya.make
@@ -0,0 +1,27 @@
+PY3TEST()
+
+PEERDIR(
+ contrib/python/pyrsistent
+)
+
+TEST_SRCS(
+ bag_test.py
+ checked_map_test.py
+ checked_set_test.py
+ checked_vector_test.py
+ class_test.py
+ deque_test.py
+ field_test.py
+ freeze_test.py
+ immutable_object_test.py
+ list_test.py
+ map_test.py
+ record_test.py
+ regression_test.py
+ set_test.py
+ toolz_test.py
+)
+
+NO_LINT()
+
+END()
diff --git a/contrib/python/pyrsistent/py3/ya.make b/contrib/python/pyrsistent/py3/ya.make
new file mode 100644
index 0000000000..bec491d8be
--- /dev/null
+++ b/contrib/python/pyrsistent/py3/ya.make
@@ -0,0 +1,47 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+PROVIDES(python_pyrsistent)
+
+VERSION(0.20.0)
+
+LICENSE(MIT)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ _pyrsistent_version.py
+ pyrsistent/__init__.py
+ pyrsistent/__init__.pyi
+ pyrsistent/_checked_types.py
+ pyrsistent/_field_common.py
+ pyrsistent/_helpers.py
+ pyrsistent/_immutable.py
+ pyrsistent/_pbag.py
+ pyrsistent/_pclass.py
+ pyrsistent/_pdeque.py
+ pyrsistent/_plist.py
+ pyrsistent/_pmap.py
+ pyrsistent/_precord.py
+ pyrsistent/_pset.py
+ pyrsistent/_pvector.py
+ pyrsistent/_toolz.py
+ pyrsistent/_transformations.py
+ pyrsistent/typing.py
+ pyrsistent/typing.pyi
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/pyrsistent/py3/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+ pyrsistent/py.typed
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)