aboutsummaryrefslogtreecommitdiffstats
path: root/library/python/cyson/ut
diff options
context:
space:
mode:
authorvvvv <vvvv@ydb.tech>2023-08-30 20:49:53 +0300
committervvvv <vvvv@ydb.tech>2023-08-30 21:17:44 +0300
commitf154e22342f327342effe873b0a00ad80c975e76 (patch)
treefff231496c10fbfcff025ed953b512bf2a82d7c0 /library/python/cyson/ut
parent4ebafdd49d8b0706c5af76ef7c2d0b3b498d0310 (diff)
downloadydb-f154e22342f327342effe873b0a00ad80c975e76.tar.gz
Moved udf_test and refactored test_framework
Локально упавший тест выполняется %% vvvv@mr-nvme-testing-08:~/repo/arcadia/statbox/nile/tests/yql/py2/part_2$ arc checkout move_udf_test_and_refactor_tf Switched to branch 'move_udf_test_and_refactor_tf' vvvv@mr-nvme-testing-08:~/repo/arcadia/statbox/nile/tests/yql/py2/part_2$ ya make -tA -F '*test_unchanged_table*' Warn[-WPluginErr]: in $B/statbox/nile/tests/yql/py2/part_2/libpy2-part_2.so: Requirement cpu is redefined 2 -> 4 Warn[-WPluginErr]: in $B/statbox/nile/tests/yql/py2/part_2/libpy2-part_2.so: Requirement ram is redefined 16 -> 9 Number of suites skipped by name: 2, by filter *test_unchanged_table* Total 1 suite: 1 - GOOD Total 4 tests: 4 - GOOD Ok %% судя по ошибке он flaky
Diffstat (limited to 'library/python/cyson/ut')
-rw-r--r--library/python/cyson/ut/test_control_attributes.py258
-rw-r--r--library/python/cyson/ut/test_input_stream.py82
-rw-r--r--library/python/cyson/ut/test_py_reader_writer.py70
-rw-r--r--library/python/cyson/ut/test_reader_writer.py251
-rw-r--r--library/python/cyson/ut/test_unsigned_long.py222
-rw-r--r--library/python/cyson/ut/ya.make21
6 files changed, 904 insertions, 0 deletions
diff --git a/library/python/cyson/ut/test_control_attributes.py b/library/python/cyson/ut/test_control_attributes.py
new file mode 100644
index 0000000000..221542b12d
--- /dev/null
+++ b/library/python/cyson/ut/test_control_attributes.py
@@ -0,0 +1,258 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import print_function, absolute_import, division
+
+import itertools
+from functools import partial
+
+import pytest
+import six
+
+from cyson import (
+ YsonEntity, InputStream,
+ list_fragments, key_switched_list_fragments,
+ Reader, UnicodeReader
+)
+
+
+def filter_control_records(list):
+ return [
+ _ for _ in list
+ if not isinstance(_[2], YsonEntity)
+ ]
+
+
+def canonize(val, as_unicode):
+ _canonize = partial(canonize, as_unicode=as_unicode)
+
+ if isinstance(val, six.binary_type) and as_unicode:
+ return val.decode('utf8')
+ elif isinstance(val, six.text_type) and not as_unicode:
+ return val.encode('utf8')
+ elif isinstance(val, (list, tuple)):
+ return [_canonize(elem) for elem in val]
+ elif isinstance(val, dict):
+ return {_canonize(k): _canonize(v) for k, v in val.items()}
+ return val
+
+
+@pytest.mark.parametrize(
+ 'reader, as_unicode', [
+ [Reader, False],
+ [UnicodeReader, True],
+ ],
+)
+@pytest.mark.parametrize(
+ 'keep_control_records', [True, False]
+)
+def test_row_index(keep_control_records, reader, as_unicode):
+ _ = partial(canonize, as_unicode=as_unicode)
+
+ data = b"""
+ <row_index=0>#;
+ {a=1;b=2};
+ {a=2;b=3};
+ {a=3;b=4};
+ <row_index=10000>#;
+ {a=-1;b=-1};
+ {a=-2;b=-2};
+ """
+
+ iter = list_fragments(
+ stream=InputStream.from_string(data),
+ Reader=reader,
+ process_attributes=True,
+ keep_control_records=keep_control_records,
+ )
+ records = [(iter.range_index, iter.row_index, __) for __ in iter]
+
+ etalon = [
+ (None, -1, YsonEntity(attributes={b'row_index': 0})),
+ (None, 0, _({b'a': 1, b'b': 2})),
+ (None, 1, _({b'a': 2, b'b': 3})),
+ (None, 2, _({b'a': 3, b'b': 4})),
+ (None, 9999, YsonEntity(attributes={b'row_index': 10000})),
+ (None, 10000, _({b'a': -1, b'b': -1})),
+ (None, 10001, _({b'a': -2, b'b': -2})),
+ ]
+
+ if not keep_control_records:
+ etalon = filter_control_records(etalon)
+
+ assert records == etalon
+
+
+@pytest.mark.parametrize(
+ 'reader, as_unicode', [
+ [Reader, False],
+ [UnicodeReader, True],
+ ]
+)
+@pytest.mark.parametrize(
+ 'keep_control_records', [True, False],
+)
+@pytest.mark.parametrize(
+ 'parameter_name',
+ ['process_attributes', 'process_table_index']
+)
+def test_range_index(parameter_name, keep_control_records, reader, as_unicode):
+ _ = partial(canonize, as_unicode=as_unicode)
+
+ data = b"""
+ <range_index=2; row_index=0>#;
+ {a=1;b=2};
+ {a=2;b=3};
+ {a=3;b=4};
+ <range_index=0; row_index=10000>#;
+ {a=-1;b=-1};
+ {a=-2;b=-2};
+ """
+
+ iter = list_fragments(
+ stream=InputStream.from_string(data),
+ Reader=reader,
+ **{parameter_name: True, 'keep_control_records': keep_control_records}
+ )
+ records = [(iter.range_index, iter.row_index, __) for __ in iter]
+
+ etalon = [
+ (2, -1, YsonEntity(attributes={b'range_index': 2, b'row_index': 0})),
+ (2, 0, _({b'a': 1, b'b': 2})),
+ (2, 1, _({b'a': 2, b'b': 3})),
+ (2, 2, _({b'a': 3, b'b': 4})),
+ (0, 9999, YsonEntity(attributes={b'range_index': 0, b'row_index': 10000})),
+ (0, 10000, _({b'a': -1, b'b': -1})),
+ (0, 10001, _({b'a': -2, b'b': -2})),
+ ]
+
+ if not keep_control_records:
+ etalon = filter_control_records(etalon)
+
+ assert records == etalon
+
+
+@pytest.mark.parametrize(
+ 'reader, as_unicode', [
+ [Reader, False],
+ [UnicodeReader, True],
+ ]
+)
+def test_key_switch_first(reader, as_unicode):
+ _ = partial(canonize, as_unicode=as_unicode)
+
+ data = b"""
+ <key_switch=True>#;
+ {k=1;a=1;b=2};
+ {k=1;a=2;b=3};
+ {k=1;a=3;b=4};
+ <key_switch=True>#;
+ {k=2;a=-1;b=-1};
+ {k=2;a=-2;b=-2};
+ """
+
+ iter = key_switched_list_fragments(
+ stream=InputStream.from_string(data),
+ Reader=reader,
+ )
+ records = [list(__) for __ in iter]
+
+ assert records == [
+ [
+ _({b'k': 1, b'a': 1, b'b': 2}),
+ _({b'k': 1, b'a': 2, b'b': 3}),
+ _({b'k': 1, b'a': 3, b'b': 4}),
+ ],
+ [
+ _({b'k': 2, b'a': -1, b'b': -1}),
+ _({b'k': 2, b'a': -2, b'b': -2}),
+ ]
+ ]
+
+
+@pytest.mark.parametrize(
+ 'reader, as_unicode', [
+ [Reader, False],
+ [UnicodeReader, True],
+ ]
+)
+def test_key_switch_nofirst(reader, as_unicode):
+ _ = partial(canonize, as_unicode=as_unicode)
+
+ data = b"""
+ {k=1;a=1;b=2};
+ {k=1;a=2;b=3};
+ {k=1;a=3;b=4};
+ <key_switch=True>#;
+ {k=2;a=-1;b=-1};
+ {k=2;a=-2;b=-2};
+ """
+
+ iter = key_switched_list_fragments(
+ stream=InputStream.from_string(data),
+ Reader=reader
+ )
+ records = [list(__) for __ in iter]
+
+ assert records == [
+ [
+ _({b'k': 1, b'a': 1, b'b': 2}),
+ _({b'k': 1, b'a': 2, b'b': 3}),
+ _({b'k': 1, b'a': 3, b'b': 4}),
+ ],
+ [
+ _({b'k': 2, b'a': -1, b'b': -1}),
+ _({b'k': 2, b'a': -2, b'b': -2}),
+ ]
+ ]
+
+
+@pytest.mark.parametrize(
+ 'reader, as_unicode', [
+ [Reader, False],
+ [UnicodeReader, True],
+ ]
+)
+def test_key_switch_exhaust_unused_records(reader, as_unicode):
+ _ = partial(canonize, as_unicode=as_unicode)
+
+ data = b"""
+ {k=1;a=1;b=2};
+ {k=1;a=2;b=3};
+ {k=1;a=3;b=4};
+ <key_switch=True>#;
+ {k=2;a=-1;b=-1};
+ {k=2;a=-2;b=-2};
+ """
+
+ iter = key_switched_list_fragments(
+ stream=InputStream.from_string(data),
+ Reader=reader,
+ )
+
+ records = []
+
+ for group in iter:
+ records.append(
+ list(itertools.islice(group, 2))
+ )
+
+ assert records == [
+ [
+ _({b'k': 1, b'a': 1, b'b': 2}),
+ _({b'k': 1, b'a': 2, b'b': 3}),
+ ],
+ [
+ _({b'k': 2, b'a': -1, b'b': -1}),
+ _({b'k': 2, b'a': -2, b'b': -2}),
+ ]
+ ]
+
+
+@pytest.mark.parametrize('reader', [Reader, UnicodeReader])
+def test_key_switch_empty(reader):
+ assert list(
+ key_switched_list_fragments(
+ stream=InputStream.from_string(""),
+ Reader=reader,
+ )
+ ) == []
diff --git a/library/python/cyson/ut/test_input_stream.py b/library/python/cyson/ut/test_input_stream.py
new file mode 100644
index 0000000000..ae7c0e8f1e
--- /dev/null
+++ b/library/python/cyson/ut/test_input_stream.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import print_function, absolute_import, division
+
+import atexit
+import io
+import os
+import tempfile
+
+import pytest
+import six
+
+from cyson import Reader, InputStream, dumps
+
+
+def prepare_file(string):
+ filepath = tempfile.mktemp()
+
+ with open(filepath, 'wb') as sink:
+ sink.write(string)
+
+ atexit.register(os.remove, filepath)
+
+ return filepath
+
+
+def prepare_bytesio(string, klass):
+ obj = klass()
+ obj.write(b'?:!;*')
+ obj.write(string)
+ obj.seek(5)
+
+ return obj
+
+
+def slice_string(string):
+ index = 0
+ length = len(string)
+
+ while index < length:
+ yield string[index:index + 2]
+ index += 2
+
+
+# <method name>, <input constructor>
+CASES = (
+ ('from_string', lambda x: x),
+ ('from_iter', slice_string),
+ ('from_file', lambda x: prepare_bytesio(x, io.BytesIO)),
+ ('from_file', lambda x: open(prepare_file(x), 'rb')),
+ ('from_fd', lambda x: os.open(prepare_file(x), os.O_RDONLY)),
+)
+
+if six.PY2:
+ import StringIO
+ import cStringIO
+
+ CASES += (
+ ('from_file', lambda x: prepare_bytesio(x, StringIO.StringIO)),
+ ('from_file', lambda x: prepare_bytesio(x, cStringIO.StringIO)),
+ )
+
+
+DATA = {u'a': [1, u'word', 3], b'b': b'xyz', u'c': None}
+ETALON = {b'a': [1, b'word', 3], b'b': b'xyz', b'c': None}
+
+
+@pytest.fixture(scope='module')
+def serialized_data():
+ return dumps(DATA, format='binary')
+
+
+def test_serizlized_data(serialized_data):
+ assert type(serialized_data) is bytes
+
+
+@pytest.mark.parametrize('method_name,make_input', CASES)
+def test_input_streams(method_name, make_input, serialized_data):
+ method = getattr(InputStream, method_name)
+ input_stream = method(make_input(serialized_data))
+
+ assert Reader(input_stream).node() == ETALON
diff --git a/library/python/cyson/ut/test_py_reader_writer.py b/library/python/cyson/ut/test_py_reader_writer.py
new file mode 100644
index 0000000000..0238040f50
--- /dev/null
+++ b/library/python/cyson/ut/test_py_reader_writer.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import print_function, absolute_import, division
+
+import pytest
+import six
+
+from cyson import PyWriter, PyReader, dumps, loads, dumps_into
+
+
+if six.PY3:
+ unicode = str
+
+
+def switch_string_type(string):
+ if isinstance(string, unicode):
+ return string.encode('utf8')
+ elif isinstance(string, bytes):
+ return string.decode('utf8')
+
+ raise TypeError('expected bytes or unicode, got {!r}'.format(string))
+
+
+CASES = [
+ None,
+ # int
+ 0, 1, -1, 2**63, -2**63, 2**64 - 1,
+ # float
+ 0.0, 100.0, -100.0,
+ # long
+ 10**100, 2**300, -7**100,
+ # bytes
+ b'', b'hello', u'Привет'.encode('utf8'),
+ # unicode
+ u'', u'hello', u'Привет',
+ # tuple
+ (), (0,), (1, 'hello'), (17, 'q') * 100,
+ # list
+ [], [0], ['hello', set([1, 2, 3])], [17, 'q'] * 100,
+ # dict
+ {}, {'a': 'b'}, {'a': 17}, {'a': frozenset([1, 2, 3])}, {b'a': 1, u'b': 2},
+ {1: 2, 3: 4, 5: None}, {(1, 2, 3): (1, 4, 9), None: 0},
+ # set
+ set(), {1, 2, 3}, {'hello', 'world'},
+ # frozenset
+ frozenset(), frozenset([1, 2, 3]), frozenset(['hello', 'world']),
+]
+
+
+@pytest.mark.parametrize('format', ['binary', 'text', 'pretty'])
+@pytest.mark.parametrize('value', CASES)
+def test_roundtrip(value, format):
+ encoded = dumps(value, format=format, Writer=PyWriter)
+ decoded = loads(encoded, Reader=PyReader)
+ assert encoded == dumps(value, format=switch_string_type(format), Writer=PyWriter)
+ assert type(decoded) is type(value)
+ assert decoded == value
+
+
+@pytest.mark.parametrize('format', ['binary', 'text', 'pretty'])
+@pytest.mark.parametrize('value', CASES)
+def test_roundtrip_bytearray(value, format):
+ encoded1 = bytearray()
+ encoded2 = bytearray()
+ dumps_into(encoded1, value, format=format, Writer=PyWriter)
+ dumps_into(encoded2, value, format=switch_string_type(format), Writer=PyWriter)
+ decoded = loads(encoded1, Reader=PyReader)
+ assert decoded == loads(encoded2, Reader=PyReader)
+ assert type(decoded) is type(value)
+ assert decoded == value
diff --git a/library/python/cyson/ut/test_reader_writer.py b/library/python/cyson/ut/test_reader_writer.py
new file mode 100644
index 0000000000..6428ea0b56
--- /dev/null
+++ b/library/python/cyson/ut/test_reader_writer.py
@@ -0,0 +1,251 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import print_function, absolute_import, division
+
+import io
+import math
+import pytest
+import six
+import sys
+
+from functools import partial
+
+from cyson import (
+ dumps, loads, YsonInt64, YsonUInt64, UInt, Writer, OutputStream,
+ UnicodeReader,
+)
+
+
+if six.PY2:
+ NativeUInt = long # noqa: F821
+elif six.PY3:
+ NativeUInt = UInt
+ unicode = str
+ long = int
+else:
+ raise RuntimeError('Unsupported Python version')
+
+
+def canonize(value, as_unicode=False):
+ _canonize = partial(canonize, as_unicode=as_unicode)
+
+ if isinstance(value, (list, tuple)):
+ return [_canonize(_) for _ in value]
+ elif isinstance(value, dict):
+ return {_canonize(k): _canonize(value[k]) for k in value}
+ elif isinstance(value, unicode) and not as_unicode:
+ return value.encode('utf8')
+ elif isinstance(value, bytes) and as_unicode:
+ return value.decode('utf8')
+
+ return value
+
+
+def switch_string_type(string):
+ if isinstance(string, bytes):
+ return string.decode('utf8')
+ elif isinstance(string, unicode):
+ return string.encode('utf8')
+
+ raise TypeError('expected unicode or bytes, got {!r}'.format(string))
+
+
+def coerce(obj, to, via=None):
+ if via is None:
+ via = to
+
+ if isinstance(obj, to):
+ return obj
+
+ return via(obj)
+
+
+SKIP_PY3 = pytest.mark.skipif(six.PY3, reason='Makes no sense for Python3')
+
+
+if six.PY3 and sys.platform == 'win32':
+ NUMPY_CASES = []
+else:
+ import numpy as np
+
+ NUMPY_CASES = [
+ # numpy int
+ np.int8(2 ** 7 - 1), np.int16(2 ** 15 - 1),
+ np.int32(2 ** 31 - 1), np.int64(2 ** 63 - 1),
+ # numpy uint
+ np.uint8(2 ** 8 - 1), np.uint16(2 ** 16 - 1),
+ np.uint32(2 ** 32 - 1), np.uint64(2 ** 64 - 1),
+ # numpy float
+ np.float16(100.0), np.float32(100.0), np.float64(100.0),
+ ]
+
+
+CASES = [
+ # NoneType
+ None,
+ # boolean
+ True, False,
+ # int
+ 0, 1, -1, int(2 ** 63 - 1), int(-2 ** 63),
+ # float
+ 0.0, 100.0, -100.0, float('inf'), float('-inf'),
+ # bytes
+ b'', b'hello', u'Привет'.encode('utf8'),
+ # unicode
+ u'', u'hello', u'Привет',
+ # list
+ [], [0], [1, 'hello'], [17, 'q'] * 100, [b'bytes'],
+ # tuple
+ (), (0,), (1, 'hello'), (17, 'q') * 100, (b'bytes',),
+ # dict
+ {}, {'a': 'b'}, {'a': 17}, {'a': [1, 2, 3]}, {b'a': 1, u'b': b'a'}
+] + NUMPY_CASES
+
+
+@pytest.mark.parametrize('format', ['binary', 'text', 'pretty'])
+@pytest.mark.parametrize('value', CASES)
+def test_roundtrip(value, format):
+ encoded = dumps(value, format)
+ decoded = loads(encoded)
+ assert encoded == dumps(value, switch_string_type(format))
+ assert decoded == canonize(value)
+
+
+# NOTE: roundtrip test doesn't work for NaN (NaN != NaN)
+@pytest.mark.parametrize('format', ['binary', 'text', 'pretty'])
+def test_nan(format):
+ encoded = dumps(float('nan'), format)
+ decoded = loads(encoded)
+ assert encoded == dumps(float('nan'), switch_string_type(format))
+ assert math.isnan(decoded)
+
+
+@SKIP_PY3
+@pytest.mark.parametrize('format', ['binary', 'text', 'pretty'])
+@pytest.mark.parametrize(
+ 'value', [long(0), long(1), long(2 ** 63), long(2 ** 64 - 1)]
+)
+def test_long_roundtrip(value, format):
+ encoded = dumps(value, format)
+ decoded = loads(encoded)
+ assert encoded == dumps(value, switch_string_type(format))
+ assert decoded == value
+
+
+@pytest.mark.parametrize(
+ 'value', [NativeUInt(0), NativeUInt(111), NativeUInt(2 ** 63), NativeUInt(2 ** 64 - 1)]
+)
+@pytest.mark.parametrize('format', ['binary', 'text', 'pretty'])
+def test_readwrite_uint64(value, format):
+ dumped_uint64 = dumps(coerce(value, YsonUInt64), format=format)
+ loaded_uint64 = loads(dumped_uint64)
+
+ assert type(value) is NativeUInt
+ assert type(loaded_uint64) is NativeUInt
+ assert dumps(value, format=format) == dumped_uint64
+
+
+@pytest.mark.parametrize('value', [int(-2 ** 63), -111, 0, 111, int(2 ** 63 - 1)])
+@pytest.mark.parametrize('format', ['binary', 'text', 'pretty'])
+def test_readwrite_int64(value, format):
+ dumped_int64 = dumps(YsonInt64(value), format=format)
+ loaded_int64 = loads(dumped_int64)
+
+ assert type(value) is int
+ assert type(loaded_int64) is int
+ assert dumps(value, format=format) == dumped_int64
+
+
+@SKIP_PY3
+def test_long_overflow():
+ with pytest.raises(OverflowError):
+ dumps(long(-1))
+
+ with pytest.raises(OverflowError):
+ dumps(long(2**64))
+
+
+@pytest.mark.parametrize('value', [2 ** 63, -2 ** 63 - 1])
+def test_int64_overflow(value):
+ with pytest.raises(OverflowError):
+ int64_value = YsonInt64(value)
+ dumps(int64_value)
+
+ if six.PY3:
+ with pytest.raises(OverflowError):
+ dumps(value)
+
+
+@pytest.mark.parametrize('value', [2 ** 64, 2 ** 100])
+def test_uint64_overflow(value):
+ with pytest.raises(OverflowError):
+ uint64_value = YsonUInt64(value)
+ dumps(uint64_value)
+
+
+@pytest.mark.parametrize('format', ['binary', 'text', 'pretty'])
+def test_force_write_sequence(format):
+ class Sequence(object):
+ def __init__(self, seq):
+ self._seq = seq
+
+ def __getitem__(self, index):
+ return self._seq[index]
+
+ def __len__(self):
+ return len(self._seq)
+
+ sequence = [1, 1.1, None, b'xyz']
+
+ sink = io.BytesIO()
+ writer = Writer(OutputStream.from_file(sink), format=format)
+
+ writer.begin_stream()
+ writer.list(Sequence(sequence))
+ writer.end_stream()
+
+ assert sink.getvalue() == dumps(sequence, format)
+
+
+@pytest.mark.parametrize('format', ['binary', 'text', 'pretty'])
+def test_force_write_mapping(format):
+ class Mapping(object):
+ def __init__(self, mapping):
+ self._mapping = mapping
+
+ def __getitem__(self, key):
+ return self._mapping[key]
+
+ def keys(self):
+ return self._mapping.keys()
+
+ mapping = {b'a': 1, b'b': 1.1, b'c': None, b'd': b'some'}
+
+ sink = io.BytesIO()
+ writer = Writer(OutputStream.from_file(sink), format=format)
+
+ writer.begin_stream()
+ writer.map(Mapping(mapping))
+ writer.end_stream()
+
+ assert sink.getvalue() == dumps(mapping, format)
+
+
+@pytest.mark.parametrize('format', ['binary', 'text', 'pretty'])
+@pytest.mark.parametrize('value', CASES)
+def test_unicode_reader(value, format):
+ expected = canonize(value, as_unicode=True)
+ got = loads(dumps(value, format), UnicodeReader)
+ assert expected == got
+
+
+def test_unicode_reader_raises_unicode_decode_error():
+ not_decodable = b'\x80\x81'
+ with pytest.raises(UnicodeDecodeError):
+ loads(dumps(not_decodable, format='binary'), UnicodeReader)
+
+
+def test_unicode_reader_decodes_object_with_attributes():
+ data = b'{"a" = "b"; "c" = <"foo" = "bar">"d"}'
+ expected = {u"a": u"b", u"c": u"d"}
+ assert loads(data, UnicodeReader) == expected
diff --git a/library/python/cyson/ut/test_unsigned_long.py b/library/python/cyson/ut/test_unsigned_long.py
new file mode 100644
index 0000000000..3cd4ffe440
--- /dev/null
+++ b/library/python/cyson/ut/test_unsigned_long.py
@@ -0,0 +1,222 @@
+#!/usr/bin/env python
+
+from __future__ import division
+
+import pytest
+import six
+
+from cyson import UInt
+
+
+if six.PY3:
+ long = int
+
+
+def equals_with_type(data, etalon):
+ return type(data) is type(etalon) and data == etalon
+
+
+def equals_as_uint(data, etalon):
+ return type(data) is UInt and data == etalon
+
+
+N = long(12)
+UN = UInt(N)
+
+
+def test_uint64_initialization():
+ assert UInt(2**63 - 1) == 2**63 - 1
+ assert UInt() == UInt(0) == 0
+ assert UInt(long(78)) == 78
+ assert UInt(23.57) == 23
+ assert UInt('111') == 111
+
+ with pytest.raises(OverflowError):
+ UInt(-10)
+
+
+def test_add():
+ assert equals_as_uint(UN + 1, N + 1)
+ assert equals_as_uint(UN + long(1), N + 1)
+ assert equals_as_uint(UN + UInt(1), N + 1)
+ assert equals_as_uint(1 + UN, 1 + N)
+ assert equals_as_uint(long(1) + UN, long(1) + N)
+ assert equals_as_uint(UInt(1) + UN, 1 + N)
+ assert equals_with_type(UN + 1.1, N + 1.1)
+ assert equals_with_type(1.1 + UN, 1.1 + N)
+ assert equals_with_type(UN + int(-N - 1), N + int(-N - 1))
+ assert equals_with_type(UN + long(-N - 1), N + long(-N - 1))
+ assert equals_with_type(int(-N - 1) + UN, int(-N - 1) + N)
+ assert equals_with_type(long(-N - 1) + UN, long(-N - 1) + N)
+
+
+def test_sub():
+ assert equals_as_uint(UN - 1, N - 1)
+ assert equals_as_uint(UN - long(1), N - long(1))
+ assert equals_as_uint(UN - UInt(1), N - 1)
+ assert equals_as_uint(13 - UN, 13 - UN)
+ assert equals_as_uint(UInt(13) - UN, long(13) - N)
+ assert equals_as_uint(long(13) - UN, long(13) - UN)
+ assert equals_with_type(UN - 0.1, N - 0.1)
+ assert equals_with_type(13.1 - UN, 13.1 - N)
+ assert equals_with_type(1 - UN, long(1) - N)
+ assert equals_with_type(long(1) - UN, long(1) - N)
+ assert equals_with_type(UInt(1) - UN, long(1) - N)
+ assert equals_with_type(UN - int(UN + 1), N - int(UN + 1))
+ assert equals_with_type(UN - long(UN + 1), N - long(UN + 1))
+ assert equals_with_type(UN - UInt(UN + 1), N - long(UN + 1))
+
+
+def test_mul():
+ assert equals_as_uint(UN * 2, N * 2)
+ assert equals_as_uint(UN * long(2), N * long(2))
+ assert equals_as_uint(UN * UInt(2), N * long(2))
+ assert equals_as_uint(2 * UN, 2 * N)
+ assert equals_as_uint(long(2) * UN, long(2) * UN)
+ assert equals_as_uint(UInt(2) * UN, long(2) * UN)
+ assert equals_with_type(-3 * UN, -3 * N)
+ assert equals_with_type(long(-3) * UN, long(-3) * N)
+ assert equals_with_type(UN * -3, N * -3)
+ assert equals_with_type(UN * long(-3), N * long(-3))
+ assert equals_with_type(UN * 1.1, N * 1.1)
+ assert equals_with_type(1.1 * UN, 1.1 * N)
+
+
+def test_truediv():
+ assert equals_with_type(UN / 1, N / long(1))
+ assert equals_with_type(UN / UInt(1), N / long(1))
+ assert equals_with_type(1 / UN, long(1) / N)
+ assert equals_with_type(UInt(1) / UN, long(1) / N)
+ assert equals_with_type(UN / N, N / long(N))
+ assert equals_with_type(UN / UInt(N), N / long(N))
+ assert equals_with_type(UN / -1, N / long(-1))
+ assert equals_with_type(-1 / UN, long(-1) / N)
+ assert equals_with_type(UN / 1.1, N / 1.1)
+ assert equals_with_type(1.1 / UN, 1.1 / N)
+
+
+def test_floordiv():
+ # floor division (__floordiv__)
+ assert equals_as_uint(UN // 1, N // 1)
+ assert equals_as_uint(UN // long(1), N // long(1))
+ assert equals_as_uint(UN // UInt(1), N // long(1))
+ assert equals_as_uint(1 // UN, 1 // N)
+ assert equals_as_uint(long(1) // UN, long(1) // N)
+ assert equals_as_uint(UInt(1) // UN, long(1) // N)
+ assert equals_as_uint(UN // N, N // N)
+ assert equals_as_uint(UN // UN, N // N)
+ assert equals_with_type(UN // -1, N // long(-1))
+ assert equals_with_type(UN // long(-1), N // long(-1))
+ assert equals_with_type(-1 // UN, -long(1) // N)
+ assert equals_with_type(long(-1) // UN, long(-1) // N)
+ assert equals_with_type(UN // 1.1, N // 1.1)
+ assert equals_with_type(1.1 // UN, 1.1 // N)
+
+
+def test_mod():
+ assert equals_as_uint(UN % 7, N % 7)
+ assert equals_as_uint(UN % long(7), N % long(7))
+ assert equals_as_uint(UN % UInt(7), N % long(7))
+ assert equals_as_uint(23 % UN, 23 % N)
+ assert equals_as_uint(long(23) % UN, long(23) % N)
+ assert equals_as_uint(UInt(23) % UN, long(23) % N)
+ assert equals_as_uint(-23 % UN, -23 % N)
+ assert equals_as_uint(long(-23) % UN, long(-23) % N)
+ assert equals_with_type(UN % -11, N % long(-11))
+ assert equals_with_type(UN % long(-11), N % long(-11))
+
+
+def test_pow():
+ assert equals_as_uint(UN ** 2, N ** 2)
+ assert equals_as_uint(UN ** long(2), N ** long(2))
+ assert equals_as_uint(UN ** UInt(2), N ** long(2))
+ assert equals_as_uint(2 ** UN, 2 ** N)
+ assert equals_as_uint(long(2) ** UN, long(2) ** N)
+ assert equals_as_uint(UInt(2) ** UN, long(2) ** N)
+ assert equals_with_type(UN ** -1, N ** long(-1))
+ assert equals_with_type(UN ** long(-1), N ** -long(1))
+ assert equals_with_type(UN ** 1.1, N ** 1.1)
+ assert equals_with_type(UN ** -1.1, N ** -1.1)
+ assert equals_with_type(1.1 ** UN, 1.1 ** N)
+ assert equals_with_type(UN ** 0.5, N ** 0.5)
+ assert equals_with_type(0.5 ** UN, 0.5 ** N)
+
+
+def test_neg():
+ assert equals_with_type(-UN, -N)
+ assert equals_with_type(-UInt(0), long(0))
+
+
+def test_pos():
+ assert equals_as_uint(+UN, N)
+ assert equals_as_uint(+UInt(0), 0)
+
+
+def test_abs():
+ assert equals_as_uint(abs(UN), N)
+ assert abs(UN) is UN
+
+
+def test_invert():
+ assert equals_with_type(~UN, ~N)
+ assert equals_with_type(~UInt(0), ~long(0))
+
+
+def test_lshift():
+ assert equals_as_uint(1 << UN, 1 << N)
+ assert equals_as_uint(long(1) << UN, long(1) << N)
+ assert equals_as_uint(UInt(1) << UN, long(1) << N)
+ assert equals_as_uint(UN << 2, N << 2)
+ assert equals_as_uint(UN << long(2), N << 2)
+ assert equals_as_uint(UN << UInt(2), N << 2)
+ assert equals_with_type(-1 << UN, -1 << N)
+ assert equals_with_type(long(-1) << UN, -long(1) << N)
+
+ with pytest.raises(TypeError):
+ UN << 1.1
+ with pytest.raises(TypeError):
+ 1.1 << UN
+ with pytest.raises(ValueError):
+ UN << -1
+
+
+def test_rshift():
+ assert equals_as_uint(10000 >> UN, 10000 >> N)
+ assert equals_as_uint(long(10000) >> UN, long(10000) >> N)
+ assert equals_as_uint(UInt(10000) >> UN, long(10000) >> N)
+ assert equals_as_uint(UN >> 2, N >> 2)
+ assert equals_as_uint(UN >> long(2), N >> long(2))
+ assert equals_as_uint(UN >> UInt(2), N >> long(2))
+ assert equals_with_type(-10000 >> UN, -10000 >> N)
+ assert equals_with_type(long(-10000) >> UN, long(-10000) >> N)
+
+ with pytest.raises(TypeError):
+ UN >> 1.1
+ with pytest.raises(TypeError):
+ 1.1 >> UN
+ with pytest.raises(ValueError):
+ UN >> -1
+
+
+def test_and():
+ assert equals_as_uint(UN & 15, N & 15)
+ assert equals_as_uint(UN & long(15), N & long(15))
+
+ with pytest.raises(TypeError):
+ UN & 1.1
+
+
+def test_or():
+ assert equals_as_uint(UN | 15, N | 15)
+ assert equals_as_uint(UN | long(15), N | long(15))
+
+ with pytest.raises(TypeError):
+ UN | 1.1
+
+
+def test_xor():
+ assert equals_as_uint(UN ^ 9, N ^ 9)
+ assert equals_as_uint(UN ^ long(9), N ^ long(9))
+
+ with pytest.raises(TypeError):
+ UN ^ 1.1
diff --git a/library/python/cyson/ut/ya.make b/library/python/cyson/ut/ya.make
new file mode 100644
index 0000000000..1af753735f
--- /dev/null
+++ b/library/python/cyson/ut/ya.make
@@ -0,0 +1,21 @@
+PY23_TEST()
+
+PEERDIR(
+ library/python/cyson
+)
+
+IF(NOT OS_WINDOWS)
+ PEERDIR(
+ contrib/python/numpy
+ )
+ENDIF()
+
+TEST_SRCS(
+ test_control_attributes.py
+ test_input_stream.py
+ test_py_reader_writer.py
+ test_reader_writer.py
+ test_unsigned_long.py
+)
+
+END()